diff --git a/.editorconfig b/.editorconfig index 73b614047e4..7de29a1dfa8 100644 --- a/.editorconfig +++ b/.editorconfig @@ -23,6 +23,11 @@ ij_json_spaces_within_braces = false ij_json_spaces_within_brackets = false ij_json_wrap_long_lines = false +[*.{kt,kts}] +indent_size = 2 +max_line_length = 150 +ij_kotlin_packages_to_use_import_on_demand = unset + [{*.markdown,*.md}] ij_markdown_force_one_space_after_blockquote_symbol = true ij_markdown_force_one_space_after_header_symbol = true diff --git a/.env b/.env index 97b349f0435..dd1bd56961c 100644 --- a/.env +++ b/.env @@ -26,7 +26,7 @@ WORKSPACE_DOCKER_MOUNT=airbyte_workspace # be the same as *_ROOT. # Issue: https://github.com/airbytehq/airbyte/issues/578 LOCAL_ROOT=/tmp/airbyte_local -LOCAL_DOCKER_MOUNT=/tmp/airbyte_local +LOCAL_DOCKER_MOUNT=oss_local_root # todo (cgardens) - hack to handle behavior change in docker compose. *_PARENT directories MUST # already exist on the host filesystem and MUST be parents of *_ROOT. # Issue: https://github.com/airbytehq/airbyte/issues/577 @@ -34,6 +34,7 @@ HACK_LOCAL_ROOT_PARENT=/tmp # Storage type STORAGE_TYPE=local +STORAGE_BUCKET_ACTIVITY_PAYLOAD=payload-storage STORAGE_BUCKET_LOG=airbyte-dev-logs STORAGE_BUCKET_STATE=state-storage STORAGE_BUCKET_WORKLOAD_OUTPUT=state-storage @@ -65,17 +66,17 @@ CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.40.23.002 TEMPORAL_HOST=airbyte-temporal:7233 INTERNAL_API_HOST=airbyte-server:8001 INTERNAL_API_URL=http://airbyte-server:8001 -CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:80 +CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:8080 WEBAPP_URL=http://localhost:8000/ WORKLOAD_API_HOST=workload-api-server:8007 WORKLOAD_API_URL=http://workload-api-server:8007 # Although not present as an env var, required for webapp configuration. CONNECTOR_BUILDER_API_URL=/connector-builder-api AIRBYTE_API_HOST=airbyte-api-server:8006 -CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:80 +CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:8080 # Replace with the commented-out line below to use a locally-run connector-builder-server # image, e.g. when developing the CDK's builder server command runner. -# CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:80 +# CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:8080 ### JOBS ### # Relevant to scaling. diff --git a/.github/actions/match-github-to-slack-user/action.yml b/.github/actions/match-github-to-slack-user/action.yml new file mode 100644 index 00000000000..90fff4ddbd0 --- /dev/null +++ b/.github/actions/match-github-to-slack-user/action.yml @@ -0,0 +1,25 @@ +# This action will try to match git commit author (GITHUB_ACTOR) with Slack user +# and add it to GITHUB_OUTPUT +# Following env variables should be provided. +# Provided by Github: +# GITHUB_ACTOR: commit author +# GITHUB_REPOSITORY: name of the repo we check the commit author, e.g. "airbytehq/airbyte-platform-internal" +# Required: +# AIRBYTE_HQ_BOT_SLACK_TOKEN: ${{ secrets.AIRBYTE_HQ_BOT_SLACK_TOKEN }} +# AIRBYTE_TEAM_BOT_SLACK_TOKEN: ${{ secrets.AIRBYTE_TEAM_BOT_SLACK_TOKEN }} +# GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +name: 'Match Github user to Slack user' +description: 'Match Github user to Slack by email or full name in Github profile.' +outputs: + slack_user_ids: + description: 'Comma separated slack user IDs that match to GITHUB_ACTOR (Github username)' + value: ${{ steps.match-github-to-slack-user.outputs.slack_user_ids }} +runs: + using: 'composite' + steps: + - name: Match github user to slack user + id: match-github-to-slack-user + run: | + ./tools/bin/match_github_user_to_slack + shell: bash diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index a5e02d08d78..87cbb220b03 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,22 +1,23 @@ ## What -*Describe what the change is solving* -*It helps to add screenshots if it affects the frontend.* + ## How -*Describe the solution* + ## Recommended reading order -1. `x.java` -2. `y.java` +1. `x.kt` +2. `y.kt` -## Can this PR be safely reverted / rolled back? -*If you know that your PR is backwards-compatible and can be simply reverted or rolled back, check the YES box.* - -*Otherwise if your PR has a breaking change, like a database migration for example, check the NO box.* - -*If unsure, leave it blank.* +## Can this PR be safely reverted and rolled back? + - [ ] YES 💚 - [ ] NO ❌ - -## 🚨 User Impact 🚨 -Are there any breaking changes? What is the end result perceived by the user? If yes, please merge this PR with the 🚨🚨 emoji so changelog authors can further highlight this if needed. diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index dcc9e372a6b..61b4b029f7e 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -1,4 +1,7 @@ -name: Airbyte Platform CI +# The goal of this build is to make sure that OSS contributors can build the project and run the tests +# so that they can develop locally. It is NOT a release verification. As such, we just run build and +# unit test. No additional acceptance test, etc. +name: Airbyte Platform OSS Developer Build env: S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} @@ -24,7 +27,6 @@ on: permissions: write-all jobs: - # COMMON TASKS ensure-images-exist: name: "Ensure all required Docker images exist on Dockerhub" timeout-minutes: 10 @@ -94,275 +96,18 @@ jobs: # - run: | # echo '${{ toJSON(needs) }}' - ## BUILDS - ## Frontend Test - # In case of self-hosted EC2 errors, remove this block. - start-frontend-runner: - name: "Frontend: Start EC2 Runner" - needs: - - changes - # Because scheduled builds on main require us to skip the changes job. Use always() to force this to run on main. - if: | - needs.changes.outputs.frontend == 'true' || needs.changes.outputs.build == 'true' || github.ref == 'refs/heads/main' - || (always() && needs.changes.outputs.backend == 'true') - timeout-minutes: 10 - runs-on: ubuntu-latest - outputs: - label: ${{ steps.start-ec2-runner.outputs.label }} - ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - - name: Check PAT rate limits - run: | - ./tools/bin/find_non_rate_limited_PAT \ - ${{ secrets.GH_PAT_BUILD_RUNNER_OSS }} \ - ${{ secrets.GH_PAT_BUILD_RUNNER_BACKUP }} - - name: Start AWS Runner - id: start-ec2-runner - uses: ./.github/actions/start-aws-runner - with: - aws-access-key-id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} - github-token: ${{ env.PAT }} - frontend-build: - name: "Frontend: Build" - needs: - - start-frontend-runner - runs-on: ${{ needs.start-frontend-runner.outputs.label }} - steps: - - name: Checkout Airbyte - uses: actions/checkout@v3 - # We need to fetch at least one more commmit for the Chromatic action not to fail - # but since we don't do screenshot comparison we don't need to fetch the full history. - with: - fetch-depth: 2 - - - name: Cache Build Artifacts - uses: ./.github/actions/cache-build-artifacts - with: - cache-key: ${{ secrets.CACHE_VERSION }} - cache-python: "false" - - - uses: actions/setup-java@v3 - with: - distribution: "zulu" - java-version: "21" - - - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - - name: Set up CI Gradle Properties - run: | - mkdir -p ~/.gradle/ - cat > ~/.gradle/gradle.properties < ~/.gradle/gradle.properties < ~/.gradle/gradle.properties <?, + metadata: Map, ) } @@ -104,26 +105,23 @@ class SegmentTrackingClient( override fun identify(workspaceId: UUID) { val deployment: Deployment = deploymentFetcher.get() val trackingIdentity: TrackingIdentity = trackingIdentityFetcher.apply(workspaceId) - val identityMetadata: MutableMap = HashMap() - - // deployment - identityMetadata[AIRBYTE_VERSION_KEY] = deployment.getDeploymentVersion() - identityMetadata["deployment_mode"] = deployment.getDeploymentMode() - identityMetadata["deployment_env"] = deployment.getDeploymentEnvironment() - identityMetadata["deployment_id"] = deployment.getDeploymentId().toString() - - // workspace (includes info that in the future we would store in an organization) - identityMetadata["anonymized"] = trackingIdentity.isAnonymousDataCollection() - identityMetadata["subscribed_newsletter"] = trackingIdentity.isNews() - identityMetadata["subscribed_security"] = trackingIdentity.isSecurityUpdates() - if (trackingIdentity.email != null) { - identityMetadata["email"] = trackingIdentity.email - } - - // other - if (airbyteRole.isNotBlank()) { - identityMetadata[AIRBYTE_ROLE] = airbyteRole - } + val identityMetadata: Map = + buildMap { + // deployment + put(AIRBYTE_VERSION_KEY, deployment.getDeploymentVersion()) + put("deployment_mode", deployment.getDeploymentMode()) + put("deployment_env", deployment.getDeploymentEnvironment()) + put("deployment_id", deployment.getDeploymentId().toString()) + + // workspace (includes info that in the future we would store in an organization) + put("anonymized", trackingIdentity.isAnonymousDataCollection()) + put("subscribed_newsletter", trackingIdentity.isNews()) + put("subscribed_security", trackingIdentity.isSecurityUpdates()) + trackingIdentity.email?.let { put("email", it) } + + // other + airbyteRole.takeIf { it.isNotBlank() }?.let { put(AIRBYTE_ROLE, it) } + } val joinKey: String = trackingIdentity.customerId.toString() segmentAnalyticsClient.analyticsClient.enqueue( @@ -145,32 +143,34 @@ class SegmentTrackingClient( workspaceId: UUID, action: String?, ) { - track(workspaceId, action, emptyMap()) + track(workspaceId, action, emptyMap()) } override fun track( workspaceId: UUID, action: String?, - metadata: Map?, + metadata: Map, ) { - val mapCopy: MutableMap = java.util.HashMap(metadata) val deployment: Deployment = deploymentFetcher.get() val trackingIdentity: TrackingIdentity = trackingIdentityFetcher.apply(workspaceId) - val airbyteSource: Optional = getAirbyteSource() - mapCopy[AIRBYTE_SOURCE] = airbyteSource.orElse(UNKNOWN) - - // Always add these traits. - mapCopy[AIRBYTE_VERSION_KEY] = deployment.getDeploymentVersion() - mapCopy[CUSTOMER_ID_KEY] = trackingIdentity.customerId - mapCopy[AIRBYTE_DEPLOYMENT_ID] = deployment.getDeploymentId().toString() - mapCopy[AIRBYTE_DEPLOYMENT_MODE] = deployment.getDeploymentMode() - mapCopy[AIRBYTE_TRACKED_AT] = Instant.now().toString() - if (metadata!!.isNotEmpty()) { - if (trackingIdentity.email != null) { - mapCopy["email"] = trackingIdentity.email + val mapCopy: Map = + buildMap { + putAll(metadata) + put(AIRBYTE_SOURCE, getAirbyteSource() ?: UNKNOWN) + + // Always add these traits. + put(AIRBYTE_VERSION_KEY, deployment.getDeploymentVersion()) + put(CUSTOMER_ID_KEY, trackingIdentity.customerId) + put(AIRBYTE_DEPLOYMENT_ID, deployment.getDeploymentId().toString()) + put(AIRBYTE_DEPLOYMENT_MODE, deployment.getDeploymentMode()) + put(AIRBYTE_TRACKED_AT, Instant.now().toString()) + if (metadata.isNotEmpty()) { + if (trackingIdentity.email != null) { + put("email", trackingIdentity.email) + } + } } - } val joinKey: String = trackingIdentity.customerId.toString() segmentAnalyticsClient.analyticsClient.enqueue( @@ -180,12 +180,12 @@ class SegmentTrackingClient( ) } - private fun getAirbyteSource(): Optional { + private fun getAirbyteSource(): String? { val currentRequest = ServerRequestContext.currentRequest() return if (currentRequest.isPresent) { - Optional.ofNullable(currentRequest.get().headers[AIRBYTE_ANALYTIC_SOURCE_HEADER]) + currentRequest.get().headers[AIRBYTE_ANALYTIC_SOURCE_HEADER] } else { - Optional.empty() + null } } @@ -326,7 +326,7 @@ class LoggingTrackingClient( override fun track( workspaceId: UUID, action: String?, - metadata: Map?, + metadata: Map, ) { val deployment: Deployment = deploymentFetcher.get() val trackingIdentity: TrackingIdentity = trackingIdentityFetcher.apply(workspaceId) @@ -337,9 +337,11 @@ class LoggingTrackingClient( } @Singleton -class DeploymentFetcher( +@CacheConfig("analytics-tracking-deployments") +open class DeploymentFetcher( @Named("deploymentSupplier") val deploymentFetcher: Supplier, ) : Supplier { + @Cacheable override fun get(): Deployment { val deploymentMetadata = deploymentFetcher.get() return Deployment(deploymentMetadata) @@ -347,9 +349,11 @@ class DeploymentFetcher( } @Singleton -class TrackingIdentityFetcher( +@CacheConfig("analytics-tracking-identity") +open class TrackingIdentityFetcher( @Named("workspaceFetcher") val workspaceFetcher: Function, ) : Function { + @Cacheable override fun apply(workspaceId: UUID): TrackingIdentity { val workspaceRead = workspaceFetcher.apply(workspaceId) val email: String? = diff --git a/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/BlockingShutdownAnalyticsPluginTest.kt b/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/BlockingShutdownAnalyticsPluginTest.kt index c38bf12066c..836c48869a2 100644 --- a/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/BlockingShutdownAnalyticsPluginTest.kt +++ b/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/BlockingShutdownAnalyticsPluginTest.kt @@ -35,7 +35,7 @@ class BlockingShutdownAnalyticsPluginTest { val bodyJson = "{}" val client: Client = mockk() val response: Response = mockk() - val flushInterval = 120L + val flushInterval = 3L val writeKey = "write-key" val plugin = BlockingShutdownAnalyticsPlugin(flushInterval) @@ -54,12 +54,13 @@ class BlockingShutdownAnalyticsPluginTest { .builder(writeKey) .client(client) .flushInterval(flushInterval, TimeUnit.SECONDS) + .flushQueueSize(5001) .plugin(plugin) .build() assertDoesNotThrow { CompletableFuture.supplyAsync { - for (i in 0..50000) { + for (i in 0..5000) { val builder = TrackMessage.builder("track").userId("user-id").properties(mapOf("property" to "value")) analytics.enqueue(builder) } diff --git a/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/SegmentTrackingClientTest.kt b/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/SegmentTrackingClientTest.kt index 9d95a8ce32f..2f0e0362c42 100644 --- a/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/SegmentTrackingClientTest.kt +++ b/airbyte-analytics/src/test/kotlin/io/airbyte/analytics/SegmentTrackingClientTest.kt @@ -103,7 +103,7 @@ class SegmentTrackingClientTest { verify(exactly = 1) { analytics.enqueue(any()) } val actual = builderSlot.captured.build() - val expectedTraits: Map? = + val expectedTraits: Map = mapOf( "airbyte_role" to "role", SegmentTrackingClient.AIRBYTE_VERSION_KEY to airbyteVersion.serialize(), @@ -124,7 +124,7 @@ class SegmentTrackingClientTest { val builderSlot = slot() every { analytics.enqueue(capture(builderSlot)) } returns Unit - val metadata: Map? = + val metadata: Map = mapOf( SegmentTrackingClient.AIRBYTE_VERSION_KEY to airbyteVersion.serialize(), "user_id" to identity.customerId, @@ -147,7 +147,7 @@ class SegmentTrackingClientTest { val builderSlot = slot() every { analytics.enqueue(capture(builderSlot)) } returns Unit - val metadata: Map? = + val metadata: Map = mapOf( SegmentTrackingClient.AIRBYTE_VERSION_KEY to airbyteVersion.serialize(), EMAIL_KEY to EMAIL, @@ -177,7 +177,7 @@ class SegmentTrackingClientTest { every { httpRequest.headers } returns httpHeaders ServerRequestContext.with(httpRequest) { - val metadata: Map? = + val metadata: Map = mapOf( SegmentTrackingClient.AIRBYTE_VERSION_KEY to airbyteVersion.serialize(), EMAIL_KEY to EMAIL, diff --git a/airbyte-api-server/Dockerfile b/airbyte-api-server/Dockerfile index babb3e8ca0e..3fb85849db1 100644 --- a/airbyte-api-server/Dockerfile +++ b/airbyte-api-server/Dockerfile @@ -1,12 +1,16 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} AS server +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} EXPOSE 8006 5005 ENV APPLICATION airbyte-api-server ENV VERSION ${VERSION} WORKDIR /app - -# This is automatically unzipped by Docker -ADD airbyte-app.tar /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte # wait for upstream dependencies to become available before starting server ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/${APPLICATION}"] diff --git a/airbyte-api-server/build.gradle.kts b/airbyte-api-server/build.gradle.kts index 4d2b3e4c2f9..94e87a6eb06 100644 --- a/airbyte-api-server/build.gradle.kts +++ b/airbyte-api-server/build.gradle.kts @@ -1,100 +1,106 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("io.airbyte.gradle.docker") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + kotlin("jvm") + kotlin("kapt") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - kaptTest(libs.micronaut.jaxrs.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(libs.micronaut.jaxrs.processor) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(platform(libs.micronaut.platform)) - implementation(libs.cron.utils) - implementation(libs.log4j.slf4j2.impl) - implementation(libs.bundles.jackson) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.problem.json) - implementation(libs.micronaut.security) - implementation(libs.sentry.java) - implementation(libs.swagger.annotations) - implementation(libs.jakarta.ws.rs.api) - implementation(libs.airbyte.protocol) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.cron.utils) + implementation(libs.log4j.slf4j2.impl) + implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.problem.json) + implementation(libs.micronaut.security) + implementation(libs.sentry.java) + implementation(libs.swagger.annotations) + implementation(libs.jakarta.ws.rs.api) + implementation(libs.airbyte.protocol) - runtimeOnly(libs.javax.databind) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.javax.databind) + runtimeOnly(libs.snakeyaml) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.micronaut.jaxrs.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.micronaut.jaxrs.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockk) } kapt { - correctErrorTypes = true + correctErrorTypes = true } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.api.server.ApplicationKt" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + application { + mainClass = "io.airbyte.api.server.ApplicationKt" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "MICRONAUT_ENVIRONMENTS" to "control-plane", - "SERVICE_NAME" to project.name, - "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), - )) - } - docker { - imageName = "airbyte-api-server" - } + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "MICRONAUT_ENVIRONMENTS" to "control-plane", + "SERVICE_NAME" to project.name, + "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), + ) + ) + } + docker { + imageName = "airbyte-api-server" + } } tasks.named("test") { - environment(mapOf( - "AIRBYTE_VERSION" to env["VERSION"], - "MICRONAUT_ENVIRONMENTS" to "test", - "SERVICE_NAME" to project.name, - )) + environment( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"], + "MICRONAUT_ENVIRONMENTS" to "test", + "SERVICE_NAME" to project.name, + ) + ) } // Even though Kotlin is excluded on Spotbugs, this projects) // still runs into spotbug issues. Working theory is that) // generated code is being picked up. Disable as a short-term fix.) tasks.named("spotbugsMain") { - enabled = false + enabled = false } diff --git a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/apiTracking/TrackingHelper.kt b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/apiTracking/TrackingHelper.kt index 06f03e247af..52facb295c7 100644 --- a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/apiTracking/TrackingHelper.kt +++ b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/apiTracking/TrackingHelper.kt @@ -131,7 +131,7 @@ class TrackingHelper(private val trackingClient: TrackingClient) { trackingClient.track( userId, AIRBYTE_API_CALL, - payload as Map?, + payload.toMap(), ) } diff --git a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt index 2ef65f9dd66..9f6d7e70ea1 100644 --- a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt +++ b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/controllers/ConnectionsController.kt @@ -103,7 +103,14 @@ open class ConnectionsController( for (streamConfiguration in connectionCreateRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -116,23 +123,21 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream!!, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all streams with full refresh overwrite - configuredCatalog = airbyteCatalogFromDiscoverSchema - AirbyteCatalogHelper.setAllStreamsFullRefreshOverwrite(configuredCatalog!!) + configuredCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalogFromDiscoverSchema) } val finalConfiguredCatalog = configuredCatalog @@ -328,7 +333,14 @@ open class ConnectionsController( for (streamConfiguration in connectionPatchRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -341,18 +353,17 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream!!, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all existing streams diff --git a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt index cd65f0cb471..4ab234a1d20 100644 --- a/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-api-server/src/main/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelper.kt @@ -56,9 +56,19 @@ object AirbyteCatalogHelper { * * @param config config to be set */ - fun setConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?) { - config!!.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.OVERWRITE + fun updateConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + config?.let { + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.selected = config.selected + updatedStreamConfiguration.selectedFields = config.selectedFields + updatedStreamConfiguration.suggested = config.suggested + } + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + return updatedStreamConfiguration } /** @@ -66,11 +76,20 @@ object AirbyteCatalogHelper { * * @param airbyteCatalog The catalog to be modified */ - fun setAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog) { - for (schemaStreams in airbyteCatalog.streams) { - val config = schemaStreams.config!! - setConfigDefaultFullRefreshOverwrite(config) + fun updateAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog?): AirbyteCatalog { + val updatedAirbyteCatalog = AirbyteCatalog() + airbyteCatalog?.let { + updatedAirbyteCatalog.streams = + it.streams.stream().map { stream: AirbyteStreamAndConfiguration -> + val updatedAirbyteStreamAndConfiguration = + AirbyteStreamAndConfiguration() + updatedAirbyteStreamAndConfiguration.config = updateConfigDefaultFullRefreshOverwrite(stream.config) + updatedAirbyteStreamAndConfiguration.stream = stream.stream + updatedAirbyteStreamAndConfiguration + }.toList() } + + return updatedAirbyteCatalog } /** @@ -154,6 +173,85 @@ object AirbyteCatalogHelper { // check that the first seconds and hour values are not * } + fun updateAirbyteStreamConfiguration( + config: AirbyteStreamConfiguration?, + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + // Set stream config as selected + updatedStreamConfiguration.selected = true + updatedStreamConfiguration.aliasName = config?.aliasName + updatedStreamConfiguration.fieldSelectionEnabled = config?.fieldSelectionEnabled + updatedStreamConfiguration.suggested = config?.suggested + + if (streamConfiguration.syncMode == null) { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } else { + when (streamConfiguration.syncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } + + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { + updatedStreamConfiguration.syncMode(SyncMode.INCREMENTAL) + updatedStreamConfiguration.destinationSyncMode(DestinationSyncMode.APPEND) + updatedStreamConfiguration.cursorField(selectCursorField(airbyteStream, streamConfiguration)) + updatedStreamConfiguration.primaryKey(selectPrimaryKey(airbyteStream, streamConfiguration)) + } + + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { + updatedStreamConfiguration.syncMode = SyncMode.INCREMENTAL + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP + updatedStreamConfiguration.cursorField = selectCursorField(airbyteStream, streamConfiguration) + updatedStreamConfiguration.primaryKey = selectPrimaryKey(airbyteStream, streamConfiguration) + } + + else -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config?.cursorField + updatedStreamConfiguration.primaryKey = config?.primaryKey + } + } + } + + return updatedStreamConfiguration + } + + private fun selectCursorField( + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): List? { + return if (airbyteStream?.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { + airbyteStream.defaultCursorField + } else if (streamConfiguration.cursorField != null && streamConfiguration.cursorField.isNotEmpty()) { + streamConfiguration.cursorField + } else { + airbyteStream?.defaultCursorField + } + } + + private fun selectPrimaryKey( + airbyteStream: AirbyteStream?, + streamConfiguration: StreamConfiguration, + ): List>? { + // if no source defined primary key + return if (airbyteStream?.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { + streamConfiguration.primaryKey + } else if (streamConfiguration.primaryKey == null || streamConfiguration.primaryKey.isEmpty()) { + airbyteStream.sourceDefinedPrimaryKey + } else { + listOf() + } + } + /** * Validates a stream's configurations and sets those configurations in the * `AirbyteStreamConfiguration` object. Logic comes from @@ -162,19 +260,14 @@ object AirbyteCatalogHelper { * @param streamConfiguration The configuration input of a specific stream provided by the caller. * @param validDestinationSyncModes All the valid destination sync modes for a destination * @param airbyteStream The immutable schema defined by the source - * @param config The configuration of a stream consumed by the config-api * @return True if no exceptions. Needed so it can be used inside TrackingHelper.callWithTracker */ - fun setAndValidateStreamConfig( + fun validateStreamConfig( streamConfiguration: StreamConfiguration, - validDestinationSyncModes: List, + validDestinationSyncModes: List, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ): Boolean { - // Set stream config as selected - config.selected = true if (streamConfiguration.syncMode == null) { - setConfigDefaultFullRefreshOverwrite(config) return true } @@ -187,46 +280,33 @@ object AirbyteCatalogHelper { validCombinedSyncModes, ) } - when (streamConfiguration.syncMode) { - ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { - config.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.APPEND - } + when (streamConfiguration.syncMode) { ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) } ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) - setAndValidatePrimaryKey(streamConfiguration.primaryKey, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) + validatePrimaryKey(streamConfiguration.primaryKey, airbyteStream) } - else -> { - // always valid - setConfigDefaultFullRefreshOverwrite(config) - } + else -> {} } return true } - private fun setAndValidateCursorField( + private fun validateCursorField( cursorField: List?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { if (!cursorField.isNullOrEmpty()) { // if cursor given is not empty and is NOT the same as the default, throw error - if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { + if (cursorField != airbyteStream.defaultCursorField) { throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem(airbyteStream.name, airbyteStream.defaultCursorField!!) } } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } else { if (!cursorField.isNullOrEmpty()) { // validate cursor field @@ -234,26 +314,24 @@ object AirbyteCatalogHelper { if (!validCursorFields.contains(cursorField)) { throw ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) } - config.cursorField = cursorField } else { // no default or given cursor field if (airbyteStream.defaultCursorField == null || airbyteStream.defaultCursorField!!.isEmpty()) { throw ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } } } - private fun setAndValidatePrimaryKey( + private fun validatePrimaryKey( primaryKey: List>?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { // if no source defined primary key if (airbyteStream.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { if (!primaryKey.isNullOrEmpty()) { // validate primary key + val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) // todo maybe check that they don't provide the same primary key twice? @@ -262,7 +340,6 @@ object AirbyteCatalogHelper { throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) } } - config.primaryKey = primaryKey } else { throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) } @@ -270,8 +347,6 @@ object AirbyteCatalogHelper { // source defined primary key exists if (!primaryKey.isNullOrEmpty()) { throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name) - } else { - config.primaryKey = airbyteStream.sourceDefinedPrimaryKey // this probably isn't necessary and should be already set } } } @@ -287,7 +362,7 @@ object AirbyteCatalogHelper { validSourceSyncModes: List?, validDestinationSyncModes: List, ): Set { - val validCombinedSyncModes: MutableSet = HashSet() + val validCombinedSyncModes: MutableSet = mutableSetOf() for (sourceSyncMode in validSourceSyncModes!!) { for (destinationSyncMode in validDestinationSyncModes) { val combinedSyncMode: ConnectionSyncModeEnum? = @@ -313,8 +388,7 @@ object AirbyteCatalogHelper { fun getStreamFields(connectorSchema: JsonNode): List> { val yamlMapper = ObjectMapper(YAMLFactory()) val streamFields: MutableList> = ArrayList() - val spec: JsonNode - spec = + val spec: JsonNode = try { yamlMapper.readTree(connectorSchema.traverse()) } catch (e: IOException) { @@ -328,14 +402,14 @@ object AirbyteCatalogHelper { val propertyFields = paths.fields() while (propertyFields.hasNext()) { val (propertyName, nestedProperties) = propertyFields.next() - streamFields.add(java.util.List.of(propertyName)) + streamFields.add(listOf(propertyName)) // retrieve nested paths for (entry in getStreamFields(nestedProperties)) { if (entry.isEmpty()) { continue } - val streamFieldPath: MutableList = ArrayList(java.util.List.of(propertyName)) + val streamFieldPath: MutableList = mutableListOf(propertyName) streamFieldPath.addAll(entry) streamFields.add(streamFieldPath) } diff --git a/airbyte-api-server/src/main/resources/application.yml b/airbyte-api-server/src/main/resources/application.yml index f9cdb48c361..7ffe8095bfb 100644 --- a/airbyte-api-server/src/main/resources/application.yml +++ b/airbyte-api-server/src/main/resources/application.yml @@ -1,6 +1,15 @@ micronaut: application: name: airbyte-api-server + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + analytics-tracking-deployments: + charset: "UTF-8" + expire-after-access: 10m + analytics-tracking-identity: + charset: "UTF-8" + expire-after-access: 10m env: cloud-deduction: true executors: @@ -64,9 +73,9 @@ airbyte: persistence: ${SECRET_PERSISTENCE:TESTING_CONFIG_DB_TABLE} store: aws: - access-key: ${AWS_ACCESS_KEY:} - secret-key: ${AWS_SECRET_ACCESS_KEY:} - region: ${AWS_REGION:} + access-key: ${AWS_SECRET_MANAGER_ACCESS_KEY_ID:} + secret-key: ${AWS_SECRET_MANAGER_SECRET_ACCESS_KEY:} + region: ${AWS_SECRET_MANAGER_REGION:} gcp: credentials: ${SECRET_STORE_GCP_CREDENTIALS:} project-id: ${SECRET_STORE_GCP_PROJECT_ID:} @@ -92,6 +101,9 @@ endpoints: beans: enabled: true sensitive: false + caches: + enabled: true + sensitive: false env: enabled: true sensitive: false diff --git a/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt b/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt new file mode 100644 index 00000000000..a4a70d8406d --- /dev/null +++ b/airbyte-api-server/src/test/kotlin/io/airbyte/api/server/helpers/AirbyteCatalogHelperTest.kt @@ -0,0 +1,603 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.server.helpers + +import io.airbyte.airbyte_api.model.generated.ConnectionSchedule +import io.airbyte.airbyte_api.model.generated.ConnectionSyncModeEnum +import io.airbyte.airbyte_api.model.generated.ScheduleTypeEnum +import io.airbyte.airbyte_api.model.generated.StreamConfiguration +import io.airbyte.airbyte_api.model.generated.StreamConfigurations +import io.airbyte.api.client.model.generated.AirbyteCatalog +import io.airbyte.api.client.model.generated.AirbyteStream +import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration +import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration +import io.airbyte.api.client.model.generated.DestinationSyncMode +import io.airbyte.api.client.model.generated.SelectedFieldInfo +import io.airbyte.api.client.model.generated.SyncMode +import io.airbyte.api.server.problems.ConnectionConfigurationProblem +import io.airbyte.commons.json.Jsons +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.EnumSource + +internal class AirbyteCatalogHelperTest { + @Test + internal fun `test that a stream configuration is not empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf(mockk()) + + assertTrue(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + } + + @Test + internal fun `test that a stream configuration is empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf() + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + every { streamConfigurations.streams } returns null + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(null)) + } + + @Test + internal fun `test that a copy of the AirbyteStreamConfiguration is returned when it is updated to full refresh overwrite mode`() { + val originalStreamConfiguration = createAirbyteStreamConfiguration() + + val updatedStreamConfiguration = AirbyteCatalogHelper.updateConfigDefaultFullRefreshOverwrite(config = originalStreamConfiguration) + assertFalse(originalStreamConfiguration === updatedStreamConfiguration) + assertEquals(SyncMode.FULL_REFRESH, updatedStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a copy of the AirbyteCatalog is returned when all of its streams are updated to full refresh overwrite mode`() { + val originalAirbyteCatalog = createAirbyteCatalog() + val updatedAirbyteCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalog = originalAirbyteCatalog) + assertFalse(originalAirbyteCatalog === updatedAirbyteCatalog) + updatedAirbyteCatalog.streams.stream().forEach { stream -> + assertEquals(SyncMode.FULL_REFRESH, stream.config?.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, stream.config?.destinationSyncMode) + } + } + + @Test + internal fun `test that streams can be validated`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + assertTrue(AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations)) + } + + @Test + internal fun `test that a stream with an invalid name is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "unknown" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Invalid stream found")) + } + + @Test + internal fun `test that streams with duplicate streams is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration1 = StreamConfiguration() + streamConfiguration1.name = "name1" + val streamConfiguration2 = StreamConfiguration() + streamConfiguration2.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration1, streamConfiguration2) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Duplicate stream found in configuration")) + } + + @Test + internal fun `test that valid streams can be retrieved from the AirbyteCatalog`() { + val airbyteCatalog = createAirbyteCatalog() + val validStreamNames = AirbyteCatalogHelper.getValidStreams(airbyteCatalog = airbyteCatalog) + assertEquals(airbyteCatalog.streams.map { it.stream?.name }.toSet(), validStreamNames.keys) + } + + @Test + internal fun `test that the cron configuration can be validated`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * UTC" + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + assertFalse(connectionSchedule.cronExpression.contains("UTC")) + + connectionSchedule.scheduleType = ScheduleTypeEnum.MANUAL + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = null)) + } + + @Test + internal fun `test that the cron configuration with a missing cron expression is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = null + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Missing cron expression in the schedule.")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression length is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * * * *" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Cron expression contains 10 parts but we expect one of [6, 7]")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression is invalid`() { + val connectionSchedule = ConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "not a valid cron expression string" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Failed to parse cron expression. Invalid chars in expression!")) + } + + @ParameterizedTest + @EnumSource(ConnectionSyncModeEnum::class) + internal fun `test that when a stream configuration is updated, the corret sync modes are set based on the stream configuration`( + connectionSyncMode: ConnectionSyncModeEnum, + ) { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = connectionSyncMode + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(getSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.syncMode) + assertEquals(getDestinationSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when a stream configuration does not have a configured sync mode, the updated configuration uses full refresh overwrite`() { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(SyncMode.FULL_REFRESH, updatedAirbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when validating a stream without a sync mode, the sync mode is set to full refresh and the stream is considered valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue(AirbyteCatalogHelper.validateStreamConfig(streamConfiguration, listOf(), airbyteStream)) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(true, airbyteStreamConfiguration.selected) + } + + @Test + internal fun `test that if the stream configuration contains an invalid sync mode, the stream is considered invalid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + streamConfiguration.name = "stream-name" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Cannot set sync mode to ${streamConfiguration.syncMode} for stream")) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_APPEND is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + createAirbyteStreamConfiguration(), + airbyteStream, + streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_OVERWRITE is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source defined cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source defined cursor field is invalid`() { + val cursorField = "cursor" + val streamName = "stream-name" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.name = streamName + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf("other") + streamConfiguration.name = airbyteStream.name + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Do not include a cursor field configuration for this stream")) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source cursor field is invalid`() { + val cursorField = "cursor" + val otherCursorField = "other" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(otherCursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$otherCursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid cursor fields include: [[$otherCursorField]]", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if there is no cursor field`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf() + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf() + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "No default cursor field for stream: ${airbyteStream.name}. Please include a cursor field configuration for this stream.", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source defined cursor and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source cursor field and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that the combined sync modes are valid`() { + val validSourceSyncModes = listOf(SyncMode.FULL_REFRESH) + val validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE) + + val combinedSyncModes = + AirbyteCatalogHelper.validCombinedSyncModes( + validSourceSyncModes = validSourceSyncModes, + validDestinationSyncModes = validDestinationSyncModes, + ) + assertEquals(1, combinedSyncModes.size) + assertEquals(listOf(ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE).first(), combinedSyncModes.first()) + } + + private fun createAirbyteCatalog(): AirbyteCatalog { + val airbyteCatalog = AirbyteCatalog() + val streams = mutableListOf() + for (i in 1..5) { + val streamAndConfiguration = AirbyteStreamAndConfiguration() + val stream = AirbyteStream() + stream.name = "name$i" + stream.namespace = "namespace" + streamAndConfiguration.stream = stream + streamAndConfiguration.config = createAirbyteStreamConfiguration() + streams += streamAndConfiguration + } + airbyteCatalog.streams(streams) + return airbyteCatalog + } + + private fun createAirbyteStreamConfiguration(): AirbyteStreamConfiguration { + val airbyteStreamConfiguration = AirbyteStreamConfiguration() + airbyteStreamConfiguration.aliasName = "alias" + airbyteStreamConfiguration.cursorField = listOf("cursor") + airbyteStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + airbyteStreamConfiguration.fieldSelectionEnabled = true + airbyteStreamConfiguration.primaryKey = listOf(listOf("primary")) + airbyteStreamConfiguration.selected = false + airbyteStreamConfiguration.selectedFields = listOf(SelectedFieldInfo()) + airbyteStreamConfiguration.suggested = false + airbyteStreamConfiguration.syncMode = SyncMode.INCREMENTAL + return airbyteStreamConfiguration + } + + private fun getSyncMode(connectionSyncMode: ConnectionSyncModeEnum): SyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> SyncMode.INCREMENTAL + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> SyncMode.INCREMENTAL + } + } + + private fun getDestinationSyncMode(connectionSyncMode: ConnectionSyncModeEnum): DestinationSyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> DestinationSyncMode.OVERWRITE + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> DestinationSyncMode.APPEND_DEDUP + } + } +} diff --git a/airbyte-api/build.gradle b/airbyte-api/build.gradle index 311af90d7e5..070a35aacc0 100644 --- a/airbyte-api/build.gradle +++ b/airbyte-api/build.gradle @@ -16,7 +16,7 @@ def workloadSpecFile = "$projectDir/src/main/openapi/workload-openapi.yaml" def connectorBuilderServerSpecFile = project(":airbyte-connector-builder-server").file("src/main/openapi/openapi.yaml").getPath() def genApiServer = tasks.register("generateApiServer", GenerateTask) { - def serverOutputDir = "$buildDir/generated/api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/api/server" inputs.file specFile outputs.dir serverOutputDir @@ -69,7 +69,7 @@ def genApiServer = tasks.register("generateApiServer", GenerateTask) { } def genApiClient = tasks.register("generateApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/api/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client" inputs.file specFile outputs.dir clientOutputDir @@ -107,7 +107,7 @@ def genApiClient = tasks.register("generateApiClient", GenerateTask) { } def genApiClient2 = tasks.register("genApiClient2", GenerateTask) { - def clientOutputDir = "$buildDir/generated/api/client2" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client2" inputs.file specFile outputs.dir clientOutputDir @@ -136,22 +136,23 @@ def genApiClient2 = tasks.register("genApiClient2", GenerateTask) { generateApiDocumentation = false configOptions = [ + enumPropertyNaming : "UPPERCASE", generatePom : "false", interfaceOnly : "true" ] doLast { - def apiClientPath = 'build/generated/api/client2/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt' + def apiClientPath = "${clientOutputDir}/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt" updateApiClientWithFailsafe(apiClientPath) - updateDomainClientsWithFailsafe('build/generated/api/client2/src/main/kotlin/io/airbyte/api/client2/generated') + updateDomainClientsWithFailsafe("${clientOutputDir}/src/main/kotlin/io/airbyte/api/client2/generated") // a JsonNode adapter needs to be added to the kotlin client's serializer to handle JsonNode fields in requests - updateApiClientSerializerWithJsonNodeAdapter('build/generated/api/client2/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt') + updateApiClientSerializerWithJsonNodeAdapter("${clientOutputDir}/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt") } } def genApiDocs = tasks.register("generateApiDocs", GenerateTask) { - def docsOutputDir = "$buildDir/generated/api/docs" + def docsOutputDir = "${getLayout().buildDirectory.get()}/generated/api/docs" generatorName = "html" inputSpec = specFile @@ -183,7 +184,7 @@ def genApiDocs = tasks.register("generateApiDocs", GenerateTask) { } def genPublicApiServer = tasks.register('generatePublicApiServer', GenerateTask) { - def serverOutputDir = "$buildDir/generated/public_api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/public_api/server" inputs.file specFile outputs.dir serverOutputDir @@ -227,7 +228,7 @@ def genPublicApiServer = tasks.register('generatePublicApiServer', GenerateTask) } def genAirbyteApiServer = tasks.register('generateAirbyteApiServer', GenerateTask) { - def serverOutputDir = "$buildDir/generated/airbyte_api/server" + def serverOutputDir = "${getLayout().buildDirectory.get()}/generated/airbyte_api/server" inputs.file airbyteApiSpecFile outputs.dir serverOutputDir @@ -270,7 +271,7 @@ def genAirbyteApiServer = tasks.register('generateAirbyteApiServer', GenerateTas // TODO: Linked to document okhhtp def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/workloadapi/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/workloadapi/client" inputs.file workloadSpecFile outputs.dir clientOutputDir @@ -304,9 +305,9 @@ def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) ] doLast { - def apiClientPath = 'build/generated/workloadapi/client/src/main/kotlin/io/airbyte/workload/api/client/generated/infrastructure/ApiClient.kt' + def apiClientPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/workload/api/client/generated/infrastructure/ApiClient.kt" updateApiClientWithFailsafe(apiClientPath) - def generatedDomainClientsPath = 'build/generated/workloadapi/client/src/main/kotlin/io/airbyte/workload/api/client/generated' + def generatedDomainClientsPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/workload/api/client/generated" updateDomainClientsWithFailsafe(generatedDomainClientsPath) // the kotlin client (as opposed to the java client) doesn't include the response body in the exception message. updateDomainClientsToIncludeHttpResponseBodyOnClientException(generatedDomainClientsPath) @@ -316,7 +317,7 @@ def genWorkloadApiClient = tasks.register("genWorkloadApiClient", GenerateTask) } def genConnectorBuilderServerApiClient = tasks.register("genConnectorBuilderServerApiClient", GenerateTask) { - def clientOutputDir = "$buildDir/generated/connectorbuilderserverapi/client" + def clientOutputDir = "${getLayout().buildDirectory.get()}/generated/connectorbuilderserverapi/client" inputs.file connectorBuilderServerSpecFile outputs.dir clientOutputDir @@ -345,9 +346,9 @@ def genConnectorBuilderServerApiClient = tasks.register("genConnectorBuilderServ doLast { // Delete file generated by the client2 task - def dir = file('build/generated/connectorbuilderserverapi/client/src/main/kotlin/org').deleteDir() + def dir = file("${clientOutputDir}/src/main/kotlin/org").deleteDir() - def generatedDomainClientsPath = 'build/generated/connectorbuilderserverapi/client/src/main/kotlin/io/airbyte/connectorbuilderserver/api/client/generated' + def generatedDomainClientsPath = "${clientOutputDir}/src/main/kotlin/io/airbyte/connectorbuilderserver/api/client/generated" updateDomainClientsWithFailsafe(generatedDomainClientsPath) // the kotlin client (as opposed to the java client) doesn't include the response body in the exception message. updateDomainClientsToIncludeHttpResponseBodyOnClientException(generatedDomainClientsPath) @@ -385,20 +386,24 @@ dependencies { kapt libs.v3.swagger.annotations api libs.bundles.micronaut.annotation - api(libs.kotlin.logging) + api libs.micronaut.http + api libs.bundles.micronaut.metrics + api libs.failsafe.okhttp + api libs.okhttp + api libs.guava + api libs.java.jwt + api libs.google.auth.library.oauth2.http + api libs.kotlin.logging implementation platform(libs.micronaut.platform) implementation libs.bundles.micronaut implementation libs.commons.io - implementation libs.failsafe.okhttp - implementation libs.guava implementation libs.jakarta.annotation.api implementation libs.jakarta.ws.rs.api implementation libs.jakarta.validation.api implementation libs.jackson.datatype implementation libs.jackson.databind implementation libs.moshi.kotlin - implementation libs.okhttp implementation libs.openapi.jackson.databind.nullable implementation libs.reactor.core implementation libs.slf4j.api @@ -470,7 +475,12 @@ private def updateApiClientWithFailsafe(def clientPath) { 'val response = client.newCall(request).execute()', '''val call = client.newCall(request) val failsafeCall = FailsafeCall.with(policy).compose(call) - val response: Response = failsafeCall.execute()''') + val response: Response = failsafeCall.execute() + + return response.use { processResponse(response) } + } + + protected inline fun processResponse(response: Response): ApiResponse {''') // add imports if not exist if (!apiClientFileText.contains("import dev.failsafe.RetryPolicy")) { diff --git a/airbyte-api/src/main/java/io/airbyte/api/client/AirbyteApiClient.java b/airbyte-api/src/main/java/io/airbyte/api/client/AirbyteApiClient.java index ae4cb5134a4..21066fb7164 100644 --- a/airbyte-api/src/main/java/io/airbyte/api/client/AirbyteApiClient.java +++ b/airbyte-api/src/main/java/io/airbyte/api/client/AirbyteApiClient.java @@ -64,7 +64,6 @@ public class AirbyteApiClient { private final DestinationDefinitionSpecificationApi destinationSpecificationApi; private final JobsApi jobsApi; private final JobRetryStatesApi jobRetryStatesApi; - private final PatchedLogsApi logsApi; private final OperationApi operationApi; private final SourceDefinitionApi sourceDefinitionApi; private final SourceApi sourceApi; @@ -87,7 +86,6 @@ public AirbyteApiClient(final ApiClient apiClient) { destinationSpecificationApi = new DestinationDefinitionSpecificationApi(apiClient); jobsApi = new JobsApi(apiClient); jobRetryStatesApi = new JobRetryStatesApi(apiClient); - logsApi = new PatchedLogsApi(apiClient); operationApi = new OperationApi(apiClient); sourceDefinitionApi = new SourceDefinitionApi(apiClient); sourceApi = new SourceApi(apiClient); @@ -147,10 +145,6 @@ public WorkspaceApi getWorkspaceApi() { return workspaceApi; } - public PatchedLogsApi getLogsApi() { - return logsApi; - } - public OperationApi getOperationApi() { return operationApi; } diff --git a/airbyte-api/src/main/java/io/airbyte/api/client/PatchedLogsApi.java b/airbyte-api/src/main/java/io/airbyte/api/client/PatchedLogsApi.java deleted file mode 100644 index 9a2aaf2061b..00000000000 --- a/airbyte-api/src/main/java/io/airbyte/api/client/PatchedLogsApi.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.api.client; - -import com.fasterxml.jackson.databind.ObjectMapper; -import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.invoker.generated.ApiResponse; -import io.airbyte.api.client.model.generated.LogsRequestBody; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.time.Duration; -import java.util.function.Consumer; -import org.apache.commons.io.FileUtils; - -/** - * This class is a copy of {@link io.airbyte.api.client.generated.LogsApi} except it allows Accept: - * text/plain. Without this modification, {@link io.airbyte.api.client.generated.LogsApi} returns a - * 406 because the generated code requests the wrong response type. - */ -public class PatchedLogsApi { - - private final HttpClient memberVarHttpClient; - private final ObjectMapper memberVarObjectMapper; - private final String memberVarBaseUri; - private final Consumer memberVarInterceptor; - private final Duration memberVarReadTimeout; - private final Consumer> memberVarResponseInterceptor; - - public PatchedLogsApi() { - this(new ApiClient()); - } - - public PatchedLogsApi(final ApiClient apiClient) { - memberVarHttpClient = apiClient.getHttpClient(); - memberVarObjectMapper = apiClient.getObjectMapper(); - memberVarBaseUri = apiClient.getBaseUri(); - memberVarInterceptor = apiClient.getRequestInterceptor(); - memberVarReadTimeout = apiClient.getReadTimeout(); - memberVarResponseInterceptor = apiClient.getResponseInterceptor(); - } - - /** - * Get logs. - * - * @param logsRequestBody (required) - * @return File - * @throws ApiException if fails to make API call - */ - public File getLogs(final LogsRequestBody logsRequestBody) throws ApiException { - final ApiResponse localVarResponse = getLogsWithHttpInfo(logsRequestBody); - return localVarResponse.getData(); - } - - /** - * Get logs. - * - * @param logsRequestBody (required) - * @return ApiResponse<File> - * @throws ApiException if fails to make API call - */ - public ApiResponse getLogsWithHttpInfo(final LogsRequestBody logsRequestBody) throws ApiException { - final HttpRequest.Builder localVarRequestBuilder = getLogsRequestBuilder(logsRequestBody); - try { - final HttpResponse localVarResponse = memberVarHttpClient.send( - localVarRequestBuilder.build(), - HttpResponse.BodyHandlers.ofInputStream()); - if (memberVarResponseInterceptor != null) { - memberVarResponseInterceptor.accept(localVarResponse); - } - if (isErrorResponse(localVarResponse)) { - throw new ApiException(localVarResponse.statusCode(), - "getLogs call received non-success response", - localVarResponse.headers(), - localVarResponse.body() == null ? null : new String(localVarResponse.body().readAllBytes())); - } - - final File tmpFile = File.createTempFile("patched-logs-api", "response"); // CHANGED - tmpFile.deleteOnExit(); // CHANGED - - FileUtils.copyInputStreamToFile(localVarResponse.body(), tmpFile); // CHANGED - - return new ApiResponse( - localVarResponse.statusCode(), - localVarResponse.headers().map(), - tmpFile // CHANGED - ); - } catch (final IOException e) { - throw new ApiException(e); - } catch (final InterruptedException e) { - Thread.currentThread().interrupt(); - throw new ApiException(e); - } - } - - private Boolean isErrorResponse(final HttpResponse httpResponse) { - return httpResponse.statusCode() / 100 != 2; - } - - private HttpRequest.Builder getLogsRequestBuilder(final LogsRequestBody logsRequestBody) throws ApiException { - // verify the required parameter 'logsRequestBody' is set - if (logsRequestBody == null) { - throw new ApiException(400, "Missing the required parameter 'logsRequestBody' when calling getLogs"); - } - - final HttpRequest.Builder localVarRequestBuilder = HttpRequest.newBuilder(); - - final String localVarPath = "/v1/logs/get"; - - localVarRequestBuilder.uri(URI.create(memberVarBaseUri + localVarPath)); - - localVarRequestBuilder.header("Content-Type", "application/json"); - - localVarRequestBuilder.header("Accept", "text/plain"); // CHANGED - - try { - final byte[] localVarPostBody = memberVarObjectMapper.writeValueAsBytes(logsRequestBody); - localVarRequestBuilder.method("POST", HttpRequest.BodyPublishers.ofByteArray(localVarPostBody)); - } catch (final IOException e) { - throw new ApiException(e); - } - if (memberVarReadTimeout != null) { - localVarRequestBuilder.timeout(memberVarReadTimeout); - } - if (memberVarInterceptor != null) { - memberVarInterceptor.accept(localVarRequestBuilder); - } - return localVarRequestBuilder; - } - -} diff --git a/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt b/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt index ace9f4a9c1e..8af917ca932 100644 --- a/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt +++ b/airbyte-api/src/main/kotlin/AirbyteApiClient2.kt @@ -16,15 +16,23 @@ import io.airbyte.api.client2.generated.HealthApi import io.airbyte.api.client2.generated.JobRetryStatesApi import io.airbyte.api.client2.generated.JobsApi import io.airbyte.api.client2.generated.OperationApi +import io.airbyte.api.client2.generated.OrganizationApi +import io.airbyte.api.client2.generated.PermissionApi import io.airbyte.api.client2.generated.SecretsPersistenceConfigApi import io.airbyte.api.client2.generated.SourceApi import io.airbyte.api.client2.generated.SourceDefinitionApi import io.airbyte.api.client2.generated.SourceDefinitionSpecificationApi import io.airbyte.api.client2.generated.StateApi import io.airbyte.api.client2.generated.StreamStatusesApi +import io.airbyte.api.client2.generated.UserApi +import io.airbyte.api.client2.generated.WebBackendApi import io.airbyte.api.client2.generated.WorkspaceApi +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton import okhttp3.OkHttpClient -import java.io.IOException +import okhttp3.Response /** * This class wraps all the generated API clients and provides a single entry point. This class is meant @@ -45,52 +53,39 @@ import java.io.IOException *
  • 3. Integrate failsafe (https://failsafe.dev/) for circuit breaking / retry
  • * policies. * - *

    - * todo (cgardens): The LogsApi is intentionally not included because in the java client we had to do some - * work to set the correct headers in the generated code. At some point we will need to test that that - * functionality works in the new client (and if necessary, patch it). Context: https://github.com/airbytehq/airbyte/pull/1799 */ @Suppress("MemberVisibilityCanBePrivate") +@Singleton +@Requires(property = "airbyte.internal-api.base-path") class AirbyteApiClient2 @JvmOverloads constructor( - basePath: String, - policy: RetryPolicy = RetryPolicy.ofDefaults(), - var httpClient: OkHttpClient = OkHttpClient(), - throwOn5xx: Boolean = true, + @Value("\${airbyte.internal-api.base-path}") basePath: String, + @Named("airbyteApiClientRetryPolicy") policy: RetryPolicy, + @Named("airbyteApiOkHttpClient") httpClient: OkHttpClient, ) { - init { - if (throwOn5xx) { - httpClient = httpClient.newBuilder().addInterceptor(ThrowOn5xxInterceptor()).build() - } - } - + val attemptApi = AttemptApi(basePath = basePath, client = httpClient, policy = policy) val connectionApi = ConnectionApi(basePath = basePath, client = httpClient, policy = policy) val connectorBuilderProjectApi = ConnectorBuilderProjectApi(basePath = basePath, client = httpClient, policy = policy) val deploymentMetadataApi = DeploymentMetadataApi(basePath = basePath, client = httpClient, policy = policy) - val destinationDefinitionApi = DestinationDefinitionApi(basePath = basePath, client = httpClient, policy = policy) val destinationApi = DestinationApi(basePath = basePath, client = httpClient, policy = policy) - val destinationSpecificationApi = DestinationDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) + val destinationDefinitionApi = DestinationDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val destinationDefinitionSpecificationApi = + DestinationDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) + val healthApi = HealthApi(basePath = basePath, client = httpClient, policy = policy) val jobsApi = JobsApi(basePath = basePath, client = httpClient, policy = policy) val jobRetryStatesApi = JobRetryStatesApi(basePath = basePath, client = httpClient, policy = policy) val operationApi = OperationApi(basePath = basePath, client = httpClient, policy = policy) - val sourceDefinitionApi = SourceDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val organizationApi = OrganizationApi(basePath = basePath, client = httpClient, policy = policy) + val permissionApi = PermissionApi(basePath = basePath, client = httpClient, policy = policy) + val secretPersistenceConfigApi = SecretsPersistenceConfigApi(basePath = basePath, client = httpClient, policy = policy) val sourceApi = SourceApi(basePath = basePath, client = httpClient, policy = policy) - val sourceDefinitionSpecificationApi = SourceDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) - val workspaceApi = WorkspaceApi(basePath = basePath, client = httpClient, policy = policy) - val healthApi = HealthApi(basePath = basePath, client = httpClient, policy = policy) - val attemptApi = AttemptApi(basePath = basePath, client = httpClient, policy = policy) + val sourceDefinitionApi = SourceDefinitionApi(basePath = basePath, client = httpClient, policy = policy) + val sourceDefinitionSpecificationApi = + SourceDefinitionSpecificationApi(basePath = basePath, client = httpClient, policy = policy) val stateApi = StateApi(basePath = basePath, client = httpClient, policy = policy) val streamStatusesApi = StreamStatusesApi(basePath = basePath, client = httpClient, policy = policy) - val secretPersistenceConfigApi = SecretsPersistenceConfigApi(basePath = basePath, client = httpClient, policy = policy) - } - -class ThrowOn5xxInterceptor : okhttp3.Interceptor { - override fun intercept(chain: okhttp3.Interceptor.Chain): okhttp3.Response { - val response = chain.proceed(chain.request()) - if (response.code >= 500) { - throw IOException("HTTP error: ${response.code} ${response.message}") - } - return response + val userApi = UserApi(basePath = basePath, client = httpClient, policy = policy) + val webBackendApi = WebBackendApi(basePath = basePath, client = httpClient, policy = policy) + val workspaceApi = WorkspaceApi(basePath = basePath, client = httpClient, policy = policy) } -} diff --git a/airbyte-api/src/main/kotlin/WorkloadApiClient.kt b/airbyte-api/src/main/kotlin/WorkloadApiClient.kt index 1e54abbec10..dbab77deb68 100644 --- a/airbyte-api/src/main/kotlin/WorkloadApiClient.kt +++ b/airbyte-api/src/main/kotlin/WorkloadApiClient.kt @@ -6,6 +6,10 @@ package io.airbyte.api.client import dev.failsafe.RetryPolicy import io.airbyte.workload.api.client.generated.WorkloadApi +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton import okhttp3.OkHttpClient import okhttp3.Response @@ -31,14 +35,16 @@ import okhttp3.Response *

    */ @SuppressWarnings("Parameter") +@Singleton +@Requires(property = "airbyte.workload-api.base-path") class WorkloadApiClient { - var workloadApi: WorkloadApi + val workloadApi: WorkloadApi @JvmOverloads constructor( - basePath: String, - policy: RetryPolicy = RetryPolicy.ofDefaults(), - httpClient: OkHttpClient = OkHttpClient(), + @Value("\${airbyte.workload-api.base-path}") basePath: String, + @Named("workloadApiClientRetryPolicy") policy: RetryPolicy, + @Named("workloadApiOkHttpClient") httpClient: OkHttpClient, ) { workloadApi = WorkloadApi(basePath = basePath, client = httpClient, policy = policy) } diff --git a/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt new file mode 100644 index 00000000000..b0c06239fdc --- /dev/null +++ b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeader.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +/** + * Defines the custom Airbyte authentication header. + */ +interface AirbyteAuthHeader { + fun getHeaderName(): String + + fun getHeaderValue(): String +} diff --git a/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt new file mode 100644 index 00000000000..5fe6b59c89a --- /dev/null +++ b/airbyte-api/src/main/kotlin/auth/AirbyteAuthHeaderInterceptor.kt @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.context.annotation.Value +import io.micronaut.http.HttpHeaders +import jakarta.inject.Singleton +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import java.util.Optional + +/** + * Adds a custom Airbyte authentication header to requests made by a client. + */ +@Singleton +class AirbyteAuthHeaderInterceptor( + private val authHeaders: Optional, + @Value("\${micronaut.application.name}") private val userAgent: String, +) : Interceptor { + override fun intercept(chain: Interceptor.Chain): Response { + val originalRequest: Request = chain.request() + val builder: Request.Builder = originalRequest.newBuilder() + + if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) + } + + authHeaders.ifPresent { h -> builder.addHeader(h.getHeaderName(), h.getHeaderValue()) } + + return chain.proceed(builder.build()) + } +} diff --git a/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt b/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt index fdc2e8f041c..2e4f744debf 100644 --- a/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt +++ b/airbyte-api/src/main/kotlin/auth/InternalApiAuthenticationInterceptor.kt @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.api.client.auth import com.google.common.base.CaseFormat @@ -9,7 +13,11 @@ import okhttp3.Interceptor import okhttp3.Request import okhttp3.Response -private val LOGGER = KotlinLogging.logger {} +private val logger = KotlinLogging.logger {} + +fun formatUserAgent(userAgent: String): String { + return CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, userAgent) +} @Singleton class InternalApiAuthenticationInterceptor( @@ -22,14 +30,14 @@ class InternalApiAuthenticationInterceptor( val builder: Request.Builder = originalRequest.newBuilder() if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { - builder.addHeader(HttpHeaders.USER_AGENT, CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, userAgent)) + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) } if (authHeaderName.isNotBlank() && authHeaderValue.isNotBlank()) { - LOGGER.debug { "Adding authorization header..." } + logger.debug { "Adding authorization header..." } builder.addHeader(authHeaderName, authHeaderValue) } else { - LOGGER.debug { "Bearer token not provided." } + logger.debug { "Bearer token not provided." } } return chain.proceed(builder.build()) diff --git a/airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt b/airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt similarity index 57% rename from airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt rename to airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt index a5ac636c988..3687a3fb886 100644 --- a/airbyte-commons-auth/src/main/kotlin/io/airbyte/commons/auth/AuthenticationInterceptor.kt +++ b/airbyte-api/src/main/kotlin/auth/WorkloadApiAuthenticationInterceptor.kt @@ -1,4 +1,8 @@ -package io.airbyte.commons.auth +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Value @@ -9,31 +13,33 @@ import okhttp3.Request import okhttp3.Response import java.util.Base64 -private val LOGGER = KotlinLogging.logger {} +private val logger = KotlinLogging.logger {} @Singleton -class AuthenticationInterceptor( +class WorkloadApiAuthenticationInterceptor( @Value("\${airbyte.workload-api.bearer-token}") private val bearerToken: String, + @Value("\${micronaut.application.name}") private val userAgent: String, ) : Interceptor { override fun intercept(chain: Interceptor.Chain): Response { val originalRequest: Request = chain.request() val builder: Request.Builder = originalRequest.newBuilder() - builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) + if (originalRequest.header(HttpHeaders.USER_AGENT) == null) { + builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(userAgent)) + } if (bearerToken.isNotBlank()) { - LOGGER.debug { "Adding authorization header..." } + logger.debug { "Adding authorization header..." } val encodedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) - builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $encodedBearerToken") + builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $encodedBearerToken") } else { - LOGGER.debug { "Bearer token not provided." } + logger.debug { "Bearer token not provided." } } return chain.proceed(builder.build()) } companion object { - const val USER_AGENT_VALUE = "WorkloadLauncherApp" const val BEARER_TOKEN_PREFIX = "Bearer" } } diff --git a/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt b/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt new file mode 100644 index 00000000000..52d722b9968 --- /dev/null +++ b/airbyte-api/src/main/kotlin/client/ThrowOn5xxInterceptor.kt @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client + +import io.micronaut.context.annotation.Requires +import io.micronaut.http.HttpStatus +import jakarta.inject.Named +import jakarta.inject.Singleton +import okhttp3.Interceptor +import okhttp3.Response +import java.io.IOException + +@Singleton +@Named("throwOn5xxInterceptor") +@Requires(property = "airbyte.internal.throws-on-5xx", value = "true", defaultValue = "true") +class ThrowOn5xxInterceptor : Interceptor { + override fun intercept(chain: Interceptor.Chain): Response { + val response = chain.proceed(chain.request()) + if (response.code >= HttpStatus.INTERNAL_SERVER_ERROR.code) { + throw IOException("HTTP error: ${response.code} ${response.message}") + } + return response + } +} diff --git a/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt new file mode 100644 index 00000000000..c2e8c357f05 --- /dev/null +++ b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config + +import dev.failsafe.RetryPolicy +import io.airbyte.api.client.ThrowOn5xxInterceptor +import io.airbyte.api.client.auth.AirbyteAuthHeaderInterceptor +import io.airbyte.api.client.auth.InternalApiAuthenticationInterceptor +import io.airbyte.api.client.auth.WorkloadApiAuthenticationInterceptor +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micrometer.core.instrument.MeterRegistry +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import okhttp3.HttpUrl +import okhttp3.OkHttpClient +import okhttp3.Response +import org.openapitools.client.infrastructure.ClientException +import org.openapitools.client.infrastructure.ServerException +import java.io.IOException +import java.time.Duration +import java.util.Optional + +private val logger = KotlinLogging.logger {} + +@Factory +class ClientSupportFactory { + @Singleton + @Named("airbyteApiClientRetryPolicy") + @Requires(property = "airbyte.internal-api.base-path") + fun defaultAirbyteApiRetryPolicy( + @Value("\${airbyte.internal-api.retries.delay-seconds:2}") retryDelaySeconds: Long, + @Value("\${airbyte.inernal-api.retries.max:5}") maxRetries: Int, + meterRegistry: Optional, + ): RetryPolicy { + return generateDefaultRetryPolicy(retryDelaySeconds, maxRetries, meterRegistry, "airbyte-api-client") + } + + @Singleton + @Named("airbyteApiOkHttpClient") + @Requires(property = "airbyte.internal-api.base-path") + fun defaultAirbyteApiOkHttpClient( + @Value("\${airbyte.internal-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, + @Value("\${airbyte.internal-api.read-timeout-seconds}") readTimeoutSeconds: Long, + internalApiAuthenticationInterceptor: InternalApiAuthenticationInterceptor, + airbyteAuthHeaderInterceptor: AirbyteAuthHeaderInterceptor, + @Named("throwOn5xxInterceptor") throwOn5xxInterceptor: Optional, + ): OkHttpClient { + val builder: OkHttpClient.Builder = OkHttpClient.Builder() + builder.addInterceptor(internalApiAuthenticationInterceptor) + builder.addInterceptor(airbyteAuthHeaderInterceptor) + throwOn5xxInterceptor.ifPresent { builder.addInterceptor(it) } + builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) + builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) + return builder.build() + } + + @Singleton + @Named("workloadApiClientRetryPolicy") + @Requires(property = "airbyte.workload-api.base-path") + fun defaultWorkloadApiRetryPolicy( + @Value("\${airbyte.internal-api.retries.delay-seconds:2}") retryDelaySeconds: Long, + @Value("\${airbyte.inernal-api.retries.max:5}") maxRetries: Int, + meterRegistry: Optional, + ): RetryPolicy { + return generateDefaultRetryPolicy(retryDelaySeconds, maxRetries, meterRegistry, "workload-api-client") + } + + @Singleton + @Named("workloadApiOkHttpClient") + @Requires(property = "airbyte.workload-api.base-path") + fun defaultWorkloadApiOkHttpClient( + @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, + @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, + workloadApiAuthenticationInterceptor: WorkloadApiAuthenticationInterceptor, + airbyteAuthHeaderInterceptor: AirbyteAuthHeaderInterceptor, + ): OkHttpClient { + val builder: OkHttpClient.Builder = OkHttpClient.Builder() + builder.addInterceptor(workloadApiAuthenticationInterceptor) + builder.addInterceptor(airbyteAuthHeaderInterceptor) + builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) + builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) + return builder.build() + } + + private fun generateDefaultRetryPolicy( + retryDelaySeconds: Long, + maxRetries: Int, + meterRegistry: Optional, + metricPrefix: String, + ): RetryPolicy { + val metricTags = arrayOf("max-retries", maxRetries.toString()) + return RetryPolicy.builder() + .handle( + listOf( + IllegalStateException::class.java, + IOException::class.java, + UnsupportedOperationException::class.java, + ClientException::class.java, + ServerException::class.java, + ), + ) + // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup + .onAbort { l -> + logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.abort", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onFailure { l -> + logger.error(l.exception) { "Failed to call ${l.result.request.url}. Last response: ${l.result}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.failure", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onRetry { l -> + logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.retry", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "url", "method", l.lastResult.request.method), + *getUrlTags(l.lastResult.request.url), + ).increment() + } + } + .onRetriesExceeded { l -> + logger.error(l.exception) { "Retry attempts exceeded." } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.retries_exceeded", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .onSuccess { l -> + logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } + meterRegistry.ifPresent { + r -> + r.counter( + "$metricPrefix.success", + *metricTags, + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), + *getUrlTags(l.result.request.url), + ).increment() + } + } + .withDelay(Duration.ofSeconds(retryDelaySeconds)) + .withMaxRetries(maxRetries) + .build() + } + + private fun getUrlTags(httpUrl: HttpUrl): Array { + val last = httpUrl.pathSegments.last() + if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { + return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) + } else { + return arrayOf("url", httpUrl.toString()) + } + } +} diff --git a/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt new file mode 100644 index 00000000000..e787d2c6d01 --- /dev/null +++ b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config + +import com.auth0.jwt.JWT +import com.auth0.jwt.JWTCreator +import com.google.auth.oauth2.ServiceAccountCredentials +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Factory +import io.micronaut.context.annotation.Primary +import io.micronaut.context.annotation.Prototype +import io.micronaut.context.annotation.Requires +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import java.io.FileInputStream +import java.security.interfaces.RSAPrivateKey +import java.util.Date +import java.util.concurrent.TimeUnit + +private val logger = KotlinLogging.logger {} + +@Factory +class InternalApiAuthenticationFactory { + @Primary + @Singleton + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "true") + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun testInternalApiAuthToken( + @Value("\${airbyte.internal-api.auth-header.value}") airbyteApiAuthHeaderValue: String, + ): String { + return airbyteApiAuthHeaderValue + } + + @Singleton + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "false") + @Requires(env = [CONTROL_PLANE]) + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun controlPlaneInternalApiAuthToken( + @Value("\${airbyte.internal-api.auth-header.value}") airbyteApiAuthHeaderValue: String, + ): String { + return airbyteApiAuthHeaderValue + } + + /** + * Generate an auth token based on configs. This is called by the Api Client's requestInterceptor + * for each request. Using Prototype annotation here to make sure each time it's used it will + * generate a new JWT Signature if it's on data plane. + * + * + * For Data Plane workers, generate a signed JWT as described here: + * https://cloud.google.com/endpoints/docs/openapi/service-account-authentication + */ + @Prototype + @Requires(property = "airbyte.internal-api.base-path") + @Requires(property = "airbyte.acceptance.test.enabled", value = "false") + @Requires(env = [DATA_PLANE]) + @Named(INTERNAL_API_AUTH_TOKEN_BEAN_NAME) + fun dataPlaneInternalApiAuthToken( + @Value("\${airbyte.control.plane.auth-endpoint}") controlPlaneAuthEndpoint: String, + @Value("\${airbyte.data.plane.service-account.email}") dataPlaneServiceAccountEmail: String, + @Value("\${airbyte.data.plane.service-account.credentials-path}") dataPlaneServiceAccountCredentialsPath: String, + ): String { + return try { + val now = Date() + val expTime = + Date(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(JWT_TTL_MINUTES.toLong())) + // Build the JWT payload + val token: JWTCreator.Builder = + JWT.create() + .withIssuedAt(now) + .withExpiresAt(expTime) + .withIssuer(dataPlaneServiceAccountEmail) + .withAudience(controlPlaneAuthEndpoint) + .withSubject(dataPlaneServiceAccountEmail) + .withClaim(CLAIM_NAME, dataPlaneServiceAccountEmail) + + // TODO multi-cloud phase 2: check performance of on-demand token generation in load testing. might + // need to pull some of this outside of this method which is called for every API request + val stream = FileInputStream(dataPlaneServiceAccountCredentialsPath) + val cred = ServiceAccountCredentials.fromStream(stream) + val key = cred.privateKey as RSAPrivateKey + val algorithm: com.auth0.jwt.algorithms.Algorithm = com.auth0.jwt.algorithms.Algorithm.RSA256(null, key) + return "Bearer " + token.sign(algorithm) + } catch (e: Exception) { + logger.error(e) { "An issue occurred while generating a data plane auth token. Defaulting to empty string. Error Message: {}" } + return "" + } + } + + companion object { + const val CLAIM_NAME = "email" + const val CONTROL_PLANE = "control-plane" + const val DATA_PLANE = "data-plane" + const val INTERNAL_API_AUTH_TOKEN_BEAN_NAME = "internalApiAuthToken" + const val JWT_TTL_MINUTES = 5 + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt b/airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt similarity index 80% rename from airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt rename to airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt index 7bce2bc1c63..cc1ad6e147d 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/StatsDRegistryConfigurer.kt +++ b/airbyte-api/src/main/kotlin/config/StatsDRegistryConfigurer.kt @@ -1,4 +1,8 @@ -package io.airbyte.workers.config +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.config import io.github.oshai.kotlinlogging.KotlinLogging import io.micrometer.statsd.StatsdMeterRegistry @@ -12,10 +16,6 @@ import jakarta.inject.Singleton private val logger = KotlinLogging.logger {} -// TODO Temporarily copy this from the workload-launcher. Ultimately, this will move to airbyte-metrics/metrics-lib -// and would provide a mechanism to override/add additional tags and/or define the tags that will be included -// in metrics - /** * Custom Micronaut {@link MeterRegistryConfigurer} used to ensure that a common set of tags are * added to every Micrometer registry. Specifically, this class ensures that the tags for the @@ -28,11 +28,11 @@ private val logger = KotlinLogging.logger {} @RequiresMetrics class StatsDRegistryConfigurer : MeterRegistryConfigurer, Ordered { override fun configure(meterRegistry: StatsdMeterRegistry?) { - /* - * Use a LinkedHashSet to maintain order as items are added to the set. This ensures that the items - * are output as key1, value1, key2, value2, etc in order to maintain the relationship between key - * value pairs. - */ + /* + * Use a LinkedHashSet to maintain order as items are added to the set. This ensures that the items + * are output as key1, value1, key2, value2, etc in order to maintain the relationship between key + * value pairs. + */ val tags: MutableSet = LinkedHashSet() possiblyAddTag(DATA_DOG_SERVICE_TAG, "service", tags) diff --git a/airbyte-api/src/main/openapi/cloud-config.yaml b/airbyte-api/src/main/openapi/cloud-config.yaml index 7e9b002e3e3..00a7390a6a4 100644 --- a/airbyte-api/src/main/openapi/cloud-config.yaml +++ b/airbyte-api/src/main/openapi/cloud-config.yaml @@ -213,6 +213,25 @@ paths: $ref: "#/components/schemas/CreateKeycloakUserResponseBody" "409": $ref: "#/components/responses/ExceptionResponse" + /v1/users/send_verification_email: + post: + tags: + - user + summary: Triggers a verification email to be sent to the user + operationId: sendVerificationEmail + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/UserIdRequestBody" + required: true + responses: + "204": + description: The verification email was sent successfully. + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" # CLOUD_WORKSPACE /v1/cloud_workspaces/create: post: @@ -863,9 +882,6 @@ components: $ref: "#/components/schemas/UserStatus" companyName: type: string - email: - type: string - format: email news: type: boolean UserStatus: diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 2ea54289f7a..9ed388c77af 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -313,6 +313,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/workspaces/get_by_connection_id_with_tombstone: + post: + tags: + - workspace + summary: Find workspace by connection id including the tombstone ones + operationId: getWorkspaceByConnectionIdWithTombstone + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/workspaces/get_organization_info: post: tags: @@ -2195,6 +2218,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/getForJob: + post: + tags: + - connection + summary: Get a connection for a given jobId + operationId: getConnectionForJob + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionAndJobIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/list_by_actor_definition: post: tags: @@ -2399,6 +2445,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/clear: + post: + tags: + - connection + summary: Clear the data for the connection. Deletes data generated by the connection in the destination. Clear any cursors back to initial state. + operationId: clearConnection + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/JobInfoRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/auto_disable: post: tags: @@ -2424,6 +2493,29 @@ paths: $ref: "#/components/schemas/InternalOperationResult" "404": $ref: "#/components/responses/NotFoundResponse" + /v1/connections/clear/stream: + post: + tags: + - connection + summary: Clear the data for a specific stream in the connection. Deletes data generated by the stream in the destination. Clear any cursors back to initial state. + operationId: clearConnectionStream + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionStreamRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/JobInfoRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/reset/stream: post: tags: @@ -2447,6 +2539,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/refresh: + post: + tags: + - connection + summary: refresh the data for specific streams in the connection. If no stream is specify or the list of stream is empy, all the streams will be refreshed. Resets any cursors back to initial state. + operationId: refreshConnectionStream + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionStreamRefreshRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/BooleanRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/get_task_queue_name: post: tags: @@ -3761,12 +3876,8 @@ paths: $ref: "#/components/schemas/PermissionUpdate" required: true responses: - "200": + "204": description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/PermissionRead" "403": $ref: "#/components/responses/ForbiddenResponse" "404": @@ -4597,13 +4708,13 @@ paths: $ref: "#/components/schemas/UserInvitationCreateRequestBody" responses: "201": - description: Successfully created user invitation. + description: Successfully processed user invitation create request. content: application/json: schema: - $ref: "#/components/schemas/UserInvitationRead" + $ref: "#/components/schemas/UserInvitationCreateResponse" - /v1/user_invitations/{inviteCode}: + /v1/user_invitations/by_code/{inviteCode}: get: summary: Get a user invitation by its unique code (not primary key ID) tags: @@ -4626,6 +4737,27 @@ paths: "404": $ref: "#/components/responses/NotFoundResponse" + /v1/user_invitations/list_pending: + post: + summary: List pending invitations + tags: + - user_invitation + operationId: listPendingInvitations + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/UserInvitationListRequestBody" + responses: + "200": + description: A list of pending user invitations, or an empty list if no invitations are found. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/UserInvitationRead" + /v1/user_invitations/accept: post: summary: Accept a user invitation @@ -6055,6 +6187,7 @@ components: type: object required: - name + - organizationId properties: email: type: string @@ -6088,6 +6221,7 @@ components: required: - id - name + - organizationId properties: id: type: string @@ -6324,6 +6458,8 @@ components: $ref: "#/components/schemas/WebhookConfigRead" organizationId: $ref: "#/components/schemas/OrganizationId" + tombstone: + type: boolean WorkspaceOrganizationInfoRead: type: object description: Limited info about a workspace's organization that is safe to expose to workspace readers who are not members of the org. @@ -6485,6 +6621,8 @@ components: type: string draftManifest: $ref: "#/components/schemas/DeclarativeManifest" + yamlManifest: + type: string ConnectorBuilderProjectDetailsRead: type: object required: @@ -7727,6 +7865,16 @@ components: properties: connectionId: $ref: "#/components/schemas/ConnectionId" + ConnectionAndJobIdRequestBody: + type: object + required: + - connectionId + - jobId + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + jobId: + $ref: "#/components/schemas/JobId" ConnectionUptimeHistoryRequestBody: type: object required: @@ -7778,6 +7926,17 @@ components: type: array items: $ref: "#/components/schemas/ConnectionStream" + ConnectionStreamRefreshRequestBody: + type: object + required: + - connectionId + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + streams: + type: array + items: + $ref: "#/components/schemas/ConnectionStream" DbMigrationRequestBody: type: object required: @@ -8874,9 +9033,6 @@ components: $ref: "#/components/schemas/UserStatus" companyName: type: string - email: - type: string - format: email news: type: boolean metadata: @@ -9066,6 +9222,18 @@ components: type: array items: $ref: "#/components/schemas/SelectedFieldInfo" + minimumGenerationId: + type: integer + format: int64 + description: The minimum generation Id of the stream + generationId: + type: integer + format: int64 + description: Generation Id of the stream + syncId: + type: integer + format: int64 + description: Sync id of the stream. SelectedFieldInfo: type: object # TODO(mfsiega-airbyte): point to thorough documentation on nested fields and paths. @@ -9096,6 +9264,7 @@ components: - get_spec - sync - reset_connection + - refresh JobCreate: type: object required: @@ -9517,6 +9686,7 @@ components: - refresh_schema - heartbeat_timeout - destination_timeout + - transient_error AttemptStatus: type: string enum: @@ -10640,6 +10810,7 @@ components: type: object required: - edition + - version - webappUrl - initialSetupComplete - defaultUserId @@ -10652,6 +10823,9 @@ components: enum: - community - pro + version: + description: release version for the corresponding edition + type: string licenseType: type: string enum: @@ -11186,6 +11360,26 @@ components: scopeId: type: string format: uuid + UserInvitationCreateResponse: + type: object + properties: + inviteCode: + type: string + description: The created invite code, if the request resulted in a new invitation being created. + directlyAdded: + type: boolean + description: True if the request resulted in the user being directly added, without a created invitation. + UserInvitationListRequestBody: + type: object + required: + - scopeType + - scopeId + properties: + scopeType: + $ref: "#/components/schemas/ScopeType" + scopeId: + type: string + format: uuid UserInvitationRead: type: object required: diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt new file mode 100644 index 00000000000..4c05e62e314 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/AirbyteApiClientTest.kt @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client2 + +import dev.failsafe.RetryPolicy +import io.mockk.mockk +import okhttp3.OkHttpClient +import okhttp3.Response +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test + +class AirbyteApiClientTest { + @Test + fun `test that the Airbyte API client creates the underlying API objects with the provided configuration`() { + val basePath = "base-path" + val client: OkHttpClient = mockk() + val policy: RetryPolicy = mockk() + + val airbyteApiClient = AirbyteApiClient2(basePath, policy, client) + assertNotNull(airbyteApiClient.attemptApi) + assertEquals(client, airbyteApiClient.attemptApi.client) + assertEquals(policy, airbyteApiClient.attemptApi.policy) + assertEquals(basePath, airbyteApiClient.attemptApi.baseUrl) + assertNotNull(airbyteApiClient.connectionApi) + assertEquals(client, airbyteApiClient.connectionApi.client) + assertEquals(policy, airbyteApiClient.connectionApi.policy) + assertEquals(basePath, airbyteApiClient.connectionApi.baseUrl) + assertNotNull(airbyteApiClient.connectorBuilderProjectApi) + assertEquals(client, airbyteApiClient.connectorBuilderProjectApi.client) + assertEquals(policy, airbyteApiClient.connectorBuilderProjectApi.policy) + assertEquals(basePath, airbyteApiClient.connectorBuilderProjectApi.baseUrl) + assertNotNull(airbyteApiClient.deploymentMetadataApi) + assertEquals(client, airbyteApiClient.deploymentMetadataApi.client) + assertEquals(policy, airbyteApiClient.deploymentMetadataApi.policy) + assertEquals(basePath, airbyteApiClient.deploymentMetadataApi.baseUrl) + assertNotNull(airbyteApiClient.destinationApi) + assertEquals(client, airbyteApiClient.destinationApi.client) + assertEquals(policy, airbyteApiClient.destinationApi.policy) + assertEquals(basePath, airbyteApiClient.destinationApi.baseUrl) + assertNotNull(airbyteApiClient.destinationDefinitionApi) + assertEquals(client, airbyteApiClient.destinationDefinitionApi.client) + assertEquals(policy, airbyteApiClient.destinationDefinitionApi.policy) + assertEquals(basePath, airbyteApiClient.destinationDefinitionApi.baseUrl) + assertNotNull(airbyteApiClient.destinationDefinitionSpecificationApi) + assertEquals(client, airbyteApiClient.destinationDefinitionSpecificationApi.client) + assertEquals(policy, airbyteApiClient.destinationDefinitionSpecificationApi.policy) + assertEquals(basePath, airbyteApiClient.destinationDefinitionSpecificationApi.baseUrl) + assertNotNull(airbyteApiClient.healthApi) + assertEquals(client, airbyteApiClient.healthApi.client) + assertEquals(policy, airbyteApiClient.healthApi.policy) + assertEquals(basePath, airbyteApiClient.healthApi.baseUrl) + assertNotNull(airbyteApiClient.jobsApi) + assertEquals(client, airbyteApiClient.jobsApi.client) + assertEquals(policy, airbyteApiClient.jobsApi.policy) + assertEquals(basePath, airbyteApiClient.jobsApi.baseUrl) + assertNotNull(airbyteApiClient.jobRetryStatesApi) + assertEquals(client, airbyteApiClient.jobRetryStatesApi.client) + assertEquals(policy, airbyteApiClient.jobRetryStatesApi.policy) + assertEquals(basePath, airbyteApiClient.jobRetryStatesApi.baseUrl) + assertNotNull(airbyteApiClient.operationApi) + assertEquals(client, airbyteApiClient.operationApi.client) + assertEquals(policy, airbyteApiClient.operationApi.policy) + assertEquals(basePath, airbyteApiClient.operationApi.baseUrl) + assertNotNull(airbyteApiClient.organizationApi) + assertEquals(client, airbyteApiClient.organizationApi.client) + assertEquals(policy, airbyteApiClient.organizationApi.policy) + assertEquals(basePath, airbyteApiClient.organizationApi.baseUrl) + assertNotNull(airbyteApiClient.permissionApi) + assertEquals(client, airbyteApiClient.permissionApi.client) + assertEquals(policy, airbyteApiClient.permissionApi.policy) + assertEquals(basePath, airbyteApiClient.permissionApi.baseUrl) + assertNotNull(airbyteApiClient.secretPersistenceConfigApi) + assertEquals(client, airbyteApiClient.secretPersistenceConfigApi.client) + assertEquals(policy, airbyteApiClient.secretPersistenceConfigApi.policy) + assertEquals(basePath, airbyteApiClient.secretPersistenceConfigApi.baseUrl) + assertNotNull(airbyteApiClient.sourceApi) + assertEquals(client, airbyteApiClient.sourceApi.client) + assertEquals(policy, airbyteApiClient.sourceApi.policy) + assertEquals(basePath, airbyteApiClient.sourceApi.baseUrl) + assertNotNull(airbyteApiClient.sourceDefinitionApi) + assertEquals(client, airbyteApiClient.sourceDefinitionApi.client) + assertEquals(policy, airbyteApiClient.sourceDefinitionApi.policy) + assertEquals(basePath, airbyteApiClient.sourceDefinitionApi.baseUrl) + assertNotNull(airbyteApiClient.sourceDefinitionSpecificationApi) + assertEquals(client, airbyteApiClient.sourceDefinitionSpecificationApi.client) + assertEquals(policy, airbyteApiClient.sourceDefinitionSpecificationApi.policy) + assertEquals(basePath, airbyteApiClient.sourceDefinitionSpecificationApi.baseUrl) + assertNotNull(airbyteApiClient.stateApi) + assertEquals(client, airbyteApiClient.stateApi.client) + assertEquals(policy, airbyteApiClient.stateApi.policy) + assertEquals(basePath, airbyteApiClient.stateApi.baseUrl) + assertNotNull(airbyteApiClient.streamStatusesApi) + assertEquals(client, airbyteApiClient.streamStatusesApi.client) + assertEquals(policy, airbyteApiClient.streamStatusesApi.policy) + assertEquals(basePath, airbyteApiClient.streamStatusesApi.baseUrl) + assertNotNull(airbyteApiClient.userApi) + assertEquals(client, airbyteApiClient.userApi.client) + assertEquals(policy, airbyteApiClient.userApi.policy) + assertEquals(basePath, airbyteApiClient.userApi.baseUrl) + assertNotNull(airbyteApiClient.webBackendApi) + assertEquals(client, airbyteApiClient.webBackendApi.client) + assertEquals(policy, airbyteApiClient.webBackendApi.policy) + assertEquals(basePath, airbyteApiClient.webBackendApi.baseUrl) + assertNotNull(airbyteApiClient.workspaceApi) + assertEquals(client, airbyteApiClient.workspaceApi.client) + assertEquals(policy, airbyteApiClient.workspaceApi.policy) + assertEquals(basePath, airbyteApiClient.workspaceApi.baseUrl) + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt new file mode 100644 index 00000000000..10b0cbf2d47 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/ThrowOn5xxInterceptorTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client + +import io.micronaut.http.HttpStatus +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import java.io.IOException + +internal class ThrowOn5xxInterceptorTest { + @Test + internal fun `test that when the response has an error status code, an exception is thrown`() { + val statusCode = HttpStatus.INTERNAL_SERVER_ERROR.code + val responseMessage = "error" + val chain: Interceptor.Chain = + mockk { + every { request() } returns mockk() + every { proceed(any()) } returns + mockk { + every { code } returns statusCode + every { message } returns responseMessage + } + } + + val interceptor = ThrowOn5xxInterceptor() + + val e = + assertThrows { + interceptor.intercept(chain) + } + assertEquals("HTTP error: $statusCode $responseMessage", e.message) + verify(exactly = 1) { chain.proceed(any()) } + } + + @Test + internal fun `test that when the response is not an error, an exception is not thrown`() { + val chain: Interceptor.Chain = + mockk { + every { request() } returns mockk() + every { proceed(any()) } returns + mockk { + every { code } returns HttpStatus.OK.code + } + } + + val interceptor = ThrowOn5xxInterceptor() + + assertDoesNotThrow { + interceptor.intercept(chain) + } + verify(exactly = 1) { chain.proceed(any()) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt index 0f2eb60c42d..41156805619 100644 --- a/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/WorkloadApiTest.kt @@ -23,7 +23,7 @@ class WorkloadApiTest { const val MESSAGE = "message" const val BODY = "body" const val STATUS_CODE = 400 - const val BASE_PATH = "basepath" + const val BASE_PATH = "http://basepath" } @Test diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt new file mode 100644 index 00000000000..6a1e3bced82 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/AirbyteAuthHeaderInterceptorTest.kt @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test +import java.util.Optional + +internal class AirbyteAuthHeaderInterceptorTest { + @Test + internal fun `test that when the Airbyte auth header is provided, the authentication header is added`() { + val applicationName = "the-application-name" + val headerName = "header-name" + val headerValue = "header-value" + val authHeader = + object : AirbyteAuthHeader { + override fun getHeaderName(): String { + return headerName + } + + override fun getHeaderValue(): String { + return headerValue + } + } + val interceptor = AirbyteAuthHeaderInterceptor(Optional.of(authHeader), applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(headerName, headerValue) } + } + + @Test + internal fun `test that when the Airbyte auth header is not provided, the authentication header is not added`() { + val applicationName = "the-application-name" + val interceptor = AirbyteAuthHeaderInterceptor(Optional.empty(), applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), not(formatUserAgent(applicationName))) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt new file mode 100644 index 00000000000..50db82e2daa --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/InternalApiAuthenticationInterceptorTest.kt @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test + +internal class InternalApiAuthenticationInterceptorTest { + @Test + internal fun `test that when the internal API header name is not blank, the authentication header is added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "header-name" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, authHeaderValue, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(internalApiHeaderName, authHeaderValue) } + } + + @Test + internal fun `test that when the internal API header name is not blank but the token value is missing, the authentication header is not added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "header-name" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, "", applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), authHeaderValue) } + } + + @Test + internal fun `test that when the internal API header name is blank, the authentication header is not added`() { + val applicationName = "the-application-name" + val internalApiHeaderName = "" + val authHeaderValue = "the token" + val interceptor = InternalApiAuthenticationInterceptor(internalApiHeaderName, authHeaderValue, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(any(), authHeaderValue) } + } +} diff --git a/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt new file mode 100644 index 00000000000..f893b2c2db0 --- /dev/null +++ b/airbyte-api/src/test/kotlin/io/airbyte/api/client/auth/WorkloadApiAuthenticationInterceptorTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.api.client.auth + +import io.airbyte.api.client.auth.WorkloadApiAuthenticationInterceptor.Companion.BEARER_TOKEN_PREFIX +import io.micronaut.http.HttpHeaders +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import okhttp3.Interceptor +import okhttp3.Request +import okhttp3.Response +import org.junit.jupiter.api.Test +import java.util.Base64 + +internal class WorkloadApiAuthenticationInterceptorTest { + @Test + internal fun `test that when the bearer token is not blank, the authentication header is added`() { + val applicationName = "the-application-name" + val bearerToken = "a bearer token" + val expectedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) + val interceptor = WorkloadApiAuthenticationInterceptor(bearerToken, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify { builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $expectedBearerToken") } + } + + @Test + internal fun `test that when the bearer token is blank, the authentication header is not added`() { + val applicationName = "the-application-name" + val bearerToken = "" + val interceptor = WorkloadApiAuthenticationInterceptor(bearerToken, applicationName) + val chain: Interceptor.Chain = mockk() + val builder: Request.Builder = mockk() + val request: Request = mockk() + + every { builder.addHeader(any(), any()) } returns (builder) + every { builder.build() } returns (mockk()) + every { request.header(HttpHeaders.USER_AGENT) } returns null + every { request.newBuilder() } returns (builder) + every { chain.request() } returns (request) + every { chain.proceed(any()) } returns (mockk()) + + interceptor.intercept(chain) + + verify { builder.addHeader(HttpHeaders.USER_AGENT, formatUserAgent(applicationName)) } + verify(exactly = 0) { builder.addHeader(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $bearerToken") } + } +} diff --git a/airbyte-base-java-image/Dockerfile b/airbyte-base-java-image/Dockerfile deleted file mode 100644 index c51bbd06982..00000000000 --- a/airbyte-base-java-image/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM amazoncorretto:21 - -ARG DOCKER_BUILD_ARCH=amd64 - -WORKDIR /app - -RUN yum install -y tar - -# Add the Datadog Java APM agent -ADD https://dtdg.co/latest-java-tracer dd-java-agent.jar - -# Add the OpenTelemetry Java APM agent -ADD https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/latest/download/opentelemetry-javaagent.jar opentelemetry-javaagent.jar diff --git a/airbyte-base-java-python-image/Dockerfile b/airbyte-base-java-python-image/Dockerfile deleted file mode 100644 index 166388eca1c..00000000000 --- a/airbyte-base-java-python-image/Dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -FROM airbyte/airbyte-base-java-image:2.1.0 - -RUN yum update -y && \ - yum groupinstall -y "Development Tools" && \ - yum install -y openssl11-devel bzip2-devel libffi-devel zlib-devel sqlite-devel xz-devel - -ENV PYTHON_VERSION=3.9.11 - -# Set up python -RUN git clone https://github.com/pyenv/pyenv.git ~/.pyenv -ENV PYENV_ROOT /root/.pyenv -ENV PATH ${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:$PATH -RUN pyenv install ${PYTHON_VERSION} && pyenv global ${PYTHON_VERSION} diff --git a/airbyte-base-java-worker-image/Dockerfile b/airbyte-base-java-worker-image/Dockerfile deleted file mode 100644 index 4d3bfeb8eff..00000000000 --- a/airbyte-base-java-worker-image/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM airbyte/airbyte-base-java-image:2.1.0 - -ARG TARGETPLATFORM - -RUN amazon-linux-extras install -y docker -RUN yum install -y jq tar && yum clean all - -RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/$TARGETPLATFORM/kubectl" \ - && chmod +x kubectl && mv kubectl /usr/local/bin/ diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index 040fe3ead7b..661c601de76 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,5 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder WORKDIR /app ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte + ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-bootloader"] diff --git a/airbyte-bootloader/build.gradle.kts b/airbyte-bootloader/build.gradle.kts index be0509dea66..0b5e10e82e9 100644 --- a/airbyte-bootloader/build.gradle.kts +++ b/airbyte-bootloader/build.gradle.kts @@ -1,81 +1,83 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } configurations.all { - resolutionStrategy { - force(libs.flyway.core, libs.jooq) - } + resolutionStrategy { + force(libs.flyway.core, libs.jooq) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.flyway) - implementation(libs.jooq) - implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.flyway) + implementation(libs.jooq) + implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-featureflag")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-featureflag")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.snakeyaml) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.jupiter.system.stubs) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.jupiter.system.stubs) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.bootloader.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATABASE_URL" to "jdbc:postgresql://localhost:5432/airbyte", - )) - } + application { + mainClass = "io.airbyte.bootloader.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATABASE_URL" to "jdbc:postgresql://localhost:5432/airbyte", + ) + ) + } - docker { - imageName = "bootloader" - } + docker { + imageName = "bootloader" + } } diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java index b0faff25df5..b1ddc19aa4d 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java @@ -42,6 +42,7 @@ import java.io.IOException; import javax.sql.DataSource; import org.flywaydb.core.Flyway; +import org.flywaydb.database.postgresql.PostgreSQLConfigurationExtension; import org.jooq.DSLContext; /** @@ -103,14 +104,21 @@ public Flyway configFlyway(@Named("config") final FlywayConfigurationProperties public Flyway jobsFlyway(@Named("jobs") final FlywayConfigurationProperties jobsFlywayConfigurationProperties, @Named("jobs") final DataSource jobsDataSource, @Value("${airbyte.bootloader.migration-baseline-version}") final String baselineVersion) { - return jobsFlywayConfigurationProperties.getFluentConfiguration() + final var flywayConfiguration = jobsFlywayConfigurationProperties.getFluentConfiguration() .dataSource(unwrapDataSource(jobsDataSource)) .baselineVersion(baselineVersion) .baselineDescription(BASELINE_DESCRIPTION) .baselineOnMigrate(BASELINE_ON_MIGRATION) .installedBy(INSTALLED_BY) - .table(String.format("airbyte_%s_migrations", "jobs")) - .load(); + .table(String.format("airbyte_%s_migrations", "jobs")); + + // Setting the transactional lock to false allows us run queries outside transactions + // without hanging. This enables creating indexes concurrently (i.e. without locking tables) + flywayConfiguration.getPluginRegister() + .getPlugin(PostgreSQLConfigurationExtension.class) + .setTransactionalLock(false); + + return flywayConfiguration.load(); } @Singleton diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java index a623712032e..1da5d11698a 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java @@ -30,6 +30,8 @@ import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.specs.DefinitionsProvider; import io.airbyte.config.specs.LocalDefinitionsProvider; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -95,8 +97,8 @@ class BootloaderTest { // ⚠️ This line should change with every new migration to show that you meant to make a new // migration to the prod database - private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.50.41.007"; - private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.50.4.001"; + private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.55.1.003"; + private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.57.2.001"; private static final String CDK_VERSION = "1.2.3"; @BeforeEach @@ -145,18 +147,27 @@ void testBootloaderAppBlankDb() throws Exception { final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); val connectionService = new ConnectionServiceJooqImpl(configDatabase); + val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + val scopedConfigurationService = mock(ScopedConfigurationService.class); + val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + featureFlagClient, + connectionService, + actorDefinitionService, + scopedConfigurationService); val destinationService = new DestinationServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService); + connectionService, + actorDefinitionVersionUpdater); val sourceService = new SourceServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService); + connectionService, + actorDefinitionVersionUpdater); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), new CatalogServiceJooqImpl(configDatabase), @@ -190,7 +201,6 @@ void testBootloaderAppBlankDb() throws Exception { val actorDefinitionVersionHelper = new ActorDefinitionVersionHelper(configRepository, new NoOpDefinitionVersionOverrideProvider(), new NoOpDefinitionVersionOverrideProvider(), featureFlagClient); - val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); val supportStateUpdater = new SupportStateUpdater(actorDefinitionService, sourceService, destinationService, DeploymentMode.OSS, actorDefinitionVersionHelper, breakingChangeNotificationHelper, @@ -244,6 +254,13 @@ void testRequiredVersionUpgradePredicate() throws Exception { val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); val connectionService = new ConnectionServiceJooqImpl(configDatabase); + val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + val scopedConfigurationService = mock(ScopedConfigurationService.class); + val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + featureFlagClient, + connectionService, + actorDefinitionService, + scopedConfigurationService); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), new CatalogServiceJooqImpl(configDatabase), @@ -254,7 +271,8 @@ void testRequiredVersionUpgradePredicate() throws Exception { mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), @@ -265,7 +283,8 @@ void testRequiredVersionUpgradePredicate() throws Exception { mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), @@ -286,19 +305,20 @@ void testRequiredVersionUpgradePredicate() throws Exception { val actorDefinitionVersionHelper = new ActorDefinitionVersionHelper(configRepository, new NoOpDefinitionVersionOverrideProvider(), new NoOpDefinitionVersionOverrideProvider(), featureFlagClient); - val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); val sourceService = new SourceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService); + connectionService, + actorDefinitionVersionUpdater); val destinationService = new DestinationServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService); + connectionService, + actorDefinitionVersionUpdater); val supportStateUpdater = new SupportStateUpdater(actorDefinitionService, sourceService, destinationService, DeploymentMode.OSS, actorDefinitionVersionHelper, breakingChangeNotificationHelper, featureFlagClient); @@ -381,6 +401,13 @@ void testPostLoadExecutionExecutes() throws Exception { val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); val connectionService = new ConnectionServiceJooqImpl(configDatabase); + val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + val scopedConfigurationService = mock(ScopedConfigurationService.class); + val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + featureFlagClient, + connectionService, + actorDefinitionService, + scopedConfigurationService); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), new CatalogServiceJooqImpl(configDatabase), @@ -391,7 +418,8 @@ void testPostLoadExecutionExecutes() throws Exception { mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), @@ -402,7 +430,8 @@ void testPostLoadExecutionExecutes() throws Exception { mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/helpers/NoOpDefinitionVersionOverrideProvider.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/helpers/NoOpDefinitionVersionOverrideProvider.java index 743c089c144..17efcf0aafe 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/helpers/NoOpDefinitionVersionOverrideProvider.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/helpers/NoOpDefinitionVersionOverrideProvider.java @@ -6,6 +6,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.config.persistence.version_overrides.DefinitionVersionOverrideProvider; import java.util.Optional; import java.util.UUID; @@ -18,11 +19,11 @@ public class NoOpDefinitionVersionOverrideProvider implements DefinitionVersionOverrideProvider { @Override - public Optional getOverride(final ActorType actorType, - final UUID actorDefinitionId, - final UUID workspaceId, - @Nullable final UUID actorId, - final ActorDefinitionVersion defaultVersion) { + public Optional getOverride(final ActorType actorType, + final UUID actorDefinitionId, + final UUID workspaceId, + @Nullable final UUID actorId, + final ActorDefinitionVersion defaultVersion) { return Optional.empty(); } diff --git a/airbyte-commons-auth/build.gradle.kts b/airbyte-commons-auth/build.gradle.kts index 60eb54ada69..55f82472a47 100644 --- a/airbyte-commons-auth/build.gradle.kts +++ b/airbyte-commons-auth/build.gradle.kts @@ -1,36 +1,36 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.keycloak.client) - implementation(libs.bundles.micronaut) - implementation(libs.failsafe.okhttp) - implementation(libs.kotlin.logging) - implementation(libs.okhttp) - implementation(project(":airbyte-commons")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.keycloak.client) + implementation(libs.bundles.micronaut) + implementation(libs.failsafe.okhttp) + implementation(libs.kotlin.logging) + implementation(libs.okhttp) + implementation(project(":airbyte-commons")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) - testImplementation(libs.mockk) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockk) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies. @@ -39,5 +39,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated. // Once lombok has been removed, this can also be removed. tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AirbyteKeycloakConfiguration.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AirbyteKeycloakConfiguration.java index 95e63303aaf..243991a23ad 100644 --- a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AirbyteKeycloakConfiguration.java +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AirbyteKeycloakConfiguration.java @@ -32,7 +32,6 @@ public class AirbyteKeycloakConfiguration { String clientId; String redirectUri; String webClientId; - String accountClientId; String username; String password; Boolean resetRealm; diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AuthOidcConfiguration.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AuthOidcConfiguration.java new file mode 100644 index 00000000000..53ac339b453 --- /dev/null +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/AuthOidcConfiguration.java @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth.config; + +import io.micronaut.context.annotation.ConfigurationProperties; + +@ConfigurationProperties("airbyte.auth.identity-provider.oidc") +public record AuthOidcConfiguration( + String domain, + String appName, + String clientId, + String clientSecret) {} diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/IdentityProviderConfiguration.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/IdentityProviderConfiguration.java index bdeca475d16..c94b80aa854 100644 --- a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/IdentityProviderConfiguration.java +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/IdentityProviderConfiguration.java @@ -32,4 +32,12 @@ public enum ProviderType { String clientId; String clientSecret; + // Eventually, AuthOidcConfiguration will simply replace this class. + // For now, we want to support airbyte.auth.identity-providers for backwards-compatibility. + public OidcConfig toOidcConfig() { + final OidcConfig oidcConfig = new OidcConfig(domain, appName, clientId, clientSecret); + log.info("Converted IdentityProviderConfiguration to OidcConfig: {}", oidcConfig); + return oidcConfig; + } + } diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfig.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfig.java new file mode 100644 index 00000000000..c4a68899847 --- /dev/null +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfig.java @@ -0,0 +1,7 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth.config; + +public record OidcConfig(String domain, String appName, String clientId, String clientSecret) {} diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfigFactory.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfigFactory.java new file mode 100644 index 00000000000..a177bb98156 --- /dev/null +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/config/OidcConfigFactory.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth.config; + +import io.micronaut.context.annotation.Factory; +import io.micronaut.context.annotation.Requires; +import jakarta.inject.Singleton; +import java.util.List; +import lombok.extern.slf4j.Slf4j; + +/** + * This factory class is used to create an {@link OidcConfig} bean. It is only necessary because we + * are transitioning from using `airbyte.auth.identity-providers` to `airbyte.auth.oidc`. This + * factory creates conditional beans based on whether `airbyte.auth.oidc` is defined in + * `application.yml`, to allow backwards-compatibility with `airbyte.auth.identity-providers` in + * `airbyte.yml` for now. + */ +@Factory +@Slf4j +public class OidcConfigFactory { + + /** + * This bean is used when `airbyte.auth.identity-provider.type` is set to `oidc` in + * `application.yml`. This is the preferred way to configure OIDC, so this bean will take precedence + * over the other bean. + */ + @Singleton + @Requires(property = "airbyte.auth.identity-provider.type", + value = "oidc") + public OidcConfig createOidcConfig(final AuthOidcConfiguration authOidcConfiguration) { + return new OidcConfig( + authOidcConfiguration.domain(), + authOidcConfiguration.appName(), + authOidcConfiguration.clientId(), + authOidcConfiguration.clientSecret()); + } + + /** + * This bean is used for backwards-compatibility with `airbyte.auth.identity-providers` in + * `airbyte.yml`. Eventually, we will remove support for `airbyte.auth.identity-providers` and only + * use `airbyte.auth.identity-provider`. + */ + @Singleton + @Requires(missingProperty = "airbyte.auth.identity-provider") + @Requires(property = "airbyte.auth.identity-providers") + public OidcConfig createOidcConfigFromIdentityProviderConfigurations(final List identityProviderConfigurations) { + // throw an error if there are multiple IDPs configured. We're moving away from supporting a list of + // IDPs, but for backwards-compatibility, we still support a list of IDPs in `airbyte.yml` as long + // as it contains only one entry. + if (identityProviderConfigurations.size() > 1) { + log.error("Only one identity provider is supported. Found {} identity providers.", identityProviderConfigurations.size()); + throw new RuntimeException("Only one identity provider is supported."); + } + + log.warn("DEPRECATION WARNING: Using `auth.identity-providers` is deprecated. Please use `auth.oidc` in your airbyte.yaml file instead."); + return identityProviderConfigurations.getFirst().toOidcConfig(); + } + +} diff --git a/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/IdentityProviderConfigurationTest.java b/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/IdentityProviderConfigurationTest.java new file mode 100644 index 00000000000..67a3adef484 --- /dev/null +++ b/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/IdentityProviderConfigurationTest.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth.config; + +import io.micronaut.context.annotation.Property; +import io.micronaut.test.extensions.junit5.annotation.MicronautTest; +import jakarta.inject.Inject; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +@MicronautTest +public class IdentityProviderConfigurationTest { + + @Inject + IdentityProviderConfiguration identityProviderConfiguration; + + @Test + @Property(name = "airbyte.auth.identity-providers[0].type", + value = "oidc") + @Property(name = "airbyte.auth.identity-providers[0].domain", + value = "testdomain") + @Property(name = "airbyte.auth.identity-providers[0].appName", + value = "testApp") + @Property(name = "airbyte.auth.identity-providers[0].clientId", + value = "testClientId") + @Property(name = "airbyte.auth.identity-providers[0].clientSecret", + value = "testClientSecret") + void testToAuthOidcConfiguration() { + final OidcConfig result = identityProviderConfiguration.toOidcConfig(); + Assertions.assertEquals("testdomain", result.domain()); + Assertions.assertEquals("testApp", result.appName()); + Assertions.assertEquals("testClientId", result.clientId()); + Assertions.assertEquals("testClientSecret", result.clientSecret()); + } + +} diff --git a/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/OidcConfigFactoryTest.java b/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/OidcConfigFactoryTest.java new file mode 100644 index 00000000000..900187d8068 --- /dev/null +++ b/airbyte-commons-auth/src/test/java/io/airbyte/commons/auth/config/OidcConfigFactoryTest.java @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth.config; + +import io.micronaut.context.BeanContext; +import io.micronaut.context.annotation.Property; +import io.micronaut.test.extensions.junit5.annotation.MicronautTest; +import jakarta.inject.Inject; +import java.util.Optional; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +@MicronautTest(rebuildContext = true) +public class OidcConfigFactoryTest { + + @Inject + BeanContext beanContext; + + @Test + void testCreateOidcConfigNoAuthPropertiesSet() { + final Optional oidcConfig = beanContext.findBean(OidcConfig.class); + Assertions.assertTrue(oidcConfig.isEmpty()); + } + + @Test + @Property(name = "airbyte.auth.identity-provider.type", + value = "oidc") + @Property(name = "airbyte.auth.identity-provider.oidc.domain", + value = "https://testdomain.com") + @Property(name = "airbyte.auth.identity-provider.oidc.appName", + value = "testApp") + @Property(name = "airbyte.auth.identity-provider.oidc.clientId", + value = "testClientId") + @Property(name = "airbyte.auth.identity-provider.oidc.clientSecret", + value = "testClientSecret") + @Property(name = "airbyte.auth.identity-providers[0].type", + value = "oidc") + @Property(name = "airbyte.auth.identity-providers[0].domain", + value = "https://ignoreddomain.com") + @Property(name = "airbyte.auth.identity-providers[0].appName", + value = "ignoredApp") + @Property(name = "airbyte.auth.identity-providers[0].clientId", + value = "ignoredClientId") + @Property(name = "airbyte.auth.identity-providers[0].clientSecret", + value = "ignoredClientSecret") + void testCreateOidcConfig() { + final Optional oidcConfig = beanContext.findBean(OidcConfig.class); + Assertions.assertTrue(oidcConfig.isPresent()); + Assertions.assertEquals("https://testdomain.com", oidcConfig.get().domain()); + Assertions.assertEquals("testApp", oidcConfig.get().appName()); + Assertions.assertEquals("testClientId", oidcConfig.get().clientId()); + Assertions.assertEquals("testClientSecret", oidcConfig.get().clientSecret()); + } + + @Test + @Property(name = "airbyte.auth.identity-providers[0].type", + value = "oidc") + @Property(name = "airbyte.auth.identity-providers[0].domain", + value = "https://testdomain.com") + @Property(name = "airbyte.auth.identity-providers[0].appName", + value = "testApp") + @Property(name = "airbyte.auth.identity-providers[0].clientId", + value = "testClientId") + @Property(name = "airbyte.auth.identity-providers[0].clientSecret", + value = "testClientSecret") + void testCreateOidcConfigFromIdentityProviderConfigurations() { + final Optional oidcConfig = beanContext.findBean(OidcConfig.class); + Assertions.assertTrue(oidcConfig.isPresent()); + Assertions.assertEquals("https://testdomain.com", oidcConfig.get().domain()); + Assertions.assertEquals("testApp", oidcConfig.get().appName()); + Assertions.assertEquals("testClientId", oidcConfig.get().clientId()); + Assertions.assertEquals("testClientSecret", oidcConfig.get().clientSecret()); + } + + @Test + @Property(name = "airbyte.auth.identity-providers[0].type", + value = "oidc") + @Property(name = "airbyte.auth.identity-providers[0].domain", + value = "https://testdomain.com") + @Property(name = "airbyte.auth.identity-providers[0].appName", + value = "testApp") + @Property(name = "airbyte.auth.identity-providers[0].clientId", + value = "testClientId") + @Property(name = "airbyte.auth.identity-providers[0].clientSecret", + value = "testClientSecret") + @Property(name = "airbyte.auth.identity-providers[1].type", + value = "oidc") + @Property(name = "airbyte.auth.identity-providers[1].domain", + value = "https://testdomain2.com") + @Property(name = "airbyte.auth.identity-providers[1].appName", + value = "testApp2") + @Property(name = "airbyte.auth.identity-providers[1].clientId", + value = "testClientId2") + @Property(name = "airbyte.auth.identity-providers[1].clientSecret", + value = "testClientSecret2") + void testCreateOidcConfigFromIdentityProviderConfigurationsThrowsIfMultiple() { + Assertions.assertThrows(RuntimeException.class, () -> beanContext.findBean(OidcConfig.class)); + } + +} diff --git a/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt b/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt deleted file mode 100644 index 14c424a0461..00000000000 --- a/airbyte-commons-auth/src/test/kotlin/io/airbyte/commons/auth/AuthenticationInterceptorTest.kt +++ /dev/null @@ -1,56 +0,0 @@ -package io.airbyte.commons.auth - -import io.airbyte.commons.auth.AuthenticationInterceptor.Companion.BEARER_TOKEN_PREFIX -import io.airbyte.commons.auth.AuthenticationInterceptor.Companion.USER_AGENT_VALUE -import io.micronaut.http.HttpHeaders -import io.mockk.every -import io.mockk.mockk -import io.mockk.verify -import okhttp3.Interceptor -import okhttp3.Request -import okhttp3.Response -import org.junit.jupiter.api.Test -import java.util.Base64 - -class AuthenticationInterceptorTest { - @Test - fun `test that when the bearer token is not blank, the authentication header is added`() { - val bearerToken = "a bearer token" - val expectedBearerToken = Base64.getEncoder().encodeToString(bearerToken.toByteArray()) - val interceptor = AuthenticationInterceptor(bearerToken) - val chain: Interceptor.Chain = mockk() - val builder: Request.Builder = mockk() - val request: Request = mockk() - - every { builder.header(any(), any()) }.returns(builder) - every { builder.build() }.returns(mockk()) - every { request.newBuilder() }.returns(builder) - every { chain.request() }.returns(request) - every { chain.proceed(any()) }.returns(mockk()) - - interceptor.intercept(chain) - - verify { builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) } - verify { builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $expectedBearerToken") } - } - - @Test - fun `test that when the bearer token is blank, the authentication header is not added`() { - val bearerToken = "" - val interceptor = AuthenticationInterceptor(bearerToken) - val chain: Interceptor.Chain = mockk() - val builder: Request.Builder = mockk() - val request: Request = mockk() - - every { builder.header(any(), any()) }.returns(builder) - every { builder.build() }.returns(mockk()) - every { request.newBuilder() }.returns(builder) - every { chain.request() }.returns(request) - every { chain.proceed(any()) }.returns(mockk()) - - interceptor.intercept(chain) - - verify { builder.header(HttpHeaders.USER_AGENT, USER_AGENT_VALUE) } - verify(exactly = 0) { builder.header(HttpHeaders.AUTHORIZATION, "$BEARER_TOKEN_PREFIX $bearerToken") } - } -} diff --git a/airbyte-commons-converters/build.gradle.kts b/airbyte-commons-converters/build.gradle.kts index 531e53ff050..fafa7eeb404 100644 --- a/airbyte-commons-converters/build.gradle.kts +++ b/airbyte-commons-converters/build.gradle.kts @@ -1,36 +1,36 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) - implementation(libs.apache.commons.text) + implementation(libs.apache.commons.text) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(libs.guava) - implementation(libs.slf4j.api) - implementation(libs.bundles.datadog) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(libs.guava) + implementation(libs.slf4j.api) + implementation(libs.bundles.datadog) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.jmh.annotations) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.jmh.annotations) - testImplementation(libs.bundles.micronaut.test) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.bundles.micronaut.test) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java index 745380a41cc..4bd31c0fc6f 100644 --- a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java +++ b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java @@ -138,7 +138,10 @@ private static ConfiguredAirbyteStream toConfiguredStreamProtocol(final io.airby .withSyncMode(Enums.convertTo(config.getSyncMode(), io.airbyte.protocol.models.SyncMode.class)) .withDestinationSyncMode(Enums.convertTo(config.getDestinationSyncMode(), io.airbyte.protocol.models.DestinationSyncMode.class)) .withPrimaryKey(config.getPrimaryKey()) - .withCursorField(config.getCursorField()); + .withCursorField(config.getCursorField()) + .withGenerationId(config.getGenerationId()) + .withMinimumGenerationId(config.getMinimumGenerationId()) + .withSyncId(config.getSyncId()); } /** diff --git a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java index 313c58b10d1..de7757f20d6 100644 --- a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java +++ b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ConfigReplacer.java @@ -10,6 +10,9 @@ import io.airbyte.config.AllowedHosts; import io.airbyte.config.constants.AlwaysAllowedHosts; import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -64,7 +67,7 @@ public AllowedHosts getAllowedHosts(final AllowedHosts allowedHosts, final JsonN } if (value != null) { - valuesMap.put(fullKey, value); + valuesMap.put(fullKey, sanitize(value)); } } else if (type == JsonToken.START_OBJECT) { if (jsonParser.getCurrentName() != null) { @@ -98,4 +101,30 @@ public AllowedHosts getAllowedHosts(final AllowedHosts allowedHosts, final JsonN return resolvedAllowedHosts; } + public String sanitize(String s) { + try { + final String withProtocol = s.contains("://") ? s : "x://" + s; + final URI uri = new URI(withProtocol); + return uri.toURL().getHost(); + } catch (MalformedURLException | URISyntaxException e) { + // some hosts will be provided from the connector config with a protocol, like ftp://site.com or + // mongodb+srv://cluster0.abcd1.mongodb.net + String[] parts = s.split("://"); + s = parts.length > 1 ? parts[1] : parts[0]; + + // some hosts might have a trailing path. We only want the first chunk in all cases (e.g. + // http://site.com/path/foo/bar) + parts = s.split("/"); + s = parts[0]; + + // some hosts will have a username or password, like https://user:passowrd@site.com + parts = s.split("@"); + s = parts.length > 1 ? parts[1] : parts[0]; + + // remove slashes - we only want hostnames, not paths + s = s.replace("/", ""); + return s; + } + } + } diff --git a/airbyte-commons-license/build.gradle.kts b/airbyte-commons-license/build.gradle.kts index 7fc4b9b0ffa..e9c5f9dfe5e 100644 --- a/airbyte-commons-license/build.gradle.kts +++ b/airbyte-commons-license/build.gradle.kts @@ -1,29 +1,29 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-config:config-models")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-micronaut-security/build.gradle.kts b/airbyte-commons-micronaut-security/build.gradle.kts index 51f6bc23022..3e725a866d8 100644 --- a/airbyte-commons-micronaut-security/build.gradle.kts +++ b/airbyte-commons-micronaut-security/build.gradle.kts @@ -1,30 +1,30 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.security) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.security) - implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons")) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-micronaut/build.gradle.kts b/airbyte-commons-micronaut/build.gradle.kts index 6ff31d9bf6b..972c5096088 100644 --- a/airbyte-commons-micronaut/build.gradle.kts +++ b/airbyte-commons-micronaut/build.gradle.kts @@ -1,28 +1,28 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.security) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.security) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.inline) } tasks.named("test") { - maxHeapSize = "2g" + maxHeapSize = "2g" } diff --git a/airbyte-commons-protocol/build.gradle.kts b/airbyte-commons-protocol/build.gradle.kts index e337070f6e3..22a55a5b0f4 100644 --- a/airbyte-commons-protocol/build.gradle.kts +++ b/airbyte-commons-protocol/build.gradle.kts @@ -1,27 +1,27 @@ plugins { - id("io.airbyte.gradle.jvm") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-json-validation")) - implementation(libs.bundles.micronaut.annotation) - implementation(libs.airbyte.protocol) - implementation(libs.guava) - implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut.annotation) + implementation(libs.airbyte.protocol) + implementation(libs.guava) + implementation(libs.bundles.jackson) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-commons-server/build.gradle.kts b/airbyte-commons-server/build.gradle.kts index 5ee46f69885..d4632552000 100644 --- a/airbyte-commons-server/build.gradle.kts +++ b/airbyte-commons-server/build.gradle.kts @@ -1,98 +1,100 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used - // instead of versions from transitive dependencies - force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used + // instead of versions from transitive dependencies + force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.cache.caffeine) - implementation(libs.micronaut.inject) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.security) - implementation(libs.bundles.flyway) - implementation(libs.s3) - implementation(libs.aws.java.sdk.s3) - implementation(libs.sts) - implementation(libs.bundles.apache) - implementation(libs.slugify) - implementation(libs.quartz.scheduler) - implementation(libs.temporal.sdk) - implementation(libs.swagger.annotations) - implementation(libs.bundles.log4j) - implementation(libs.commons.io) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-oauth")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(project(":airbyte-worker-models")) - implementation(project(":airbyte-notification")) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.cache.caffeine) + implementation(libs.micronaut.inject) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.security) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) + implementation(libs.bundles.flyway) + implementation(libs.s3) + implementation(libs.aws.java.sdk.s3) + implementation(libs.sts) + implementation(libs.bundles.apache) + implementation(libs.slugify) + implementation(libs.quartz.scheduler) + implementation(libs.temporal.sdk) + implementation(libs.swagger.annotations) + implementation(libs.bundles.log4j) + implementation(libs.commons.io) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-oauth")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(project(":airbyte-worker-models")) + implementation(project(":airbyte-notification")) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.micronaut.http) - testImplementation(libs.mockk) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.micronaut.http) + testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.junit.jupiter.engine) } // Even though Kotlin is excluded on Spotbugs, this project // still runs into spotbug issues. Working theory is that // generated code is being picked up. Disable as a short-term fix. tasks.named("spotbugsMain") { - enabled = false + enabled = false } tasks.withType() { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } \ No newline at end of file diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceConverter.java new file mode 100644 index 00000000000..f7c568eba50 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/WorkspaceConverter.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.converters; + +import io.airbyte.api.model.generated.Geography; +import io.airbyte.api.model.generated.WorkspaceRead; +import io.airbyte.commons.enums.Enums; +import io.airbyte.config.StandardWorkspace; + +public class WorkspaceConverter { + + public static WorkspaceRead domainToApiModel(final StandardWorkspace workspace) { + final WorkspaceRead result = new WorkspaceRead() + .workspaceId(workspace.getWorkspaceId()) + .customerId(workspace.getCustomerId()) + .email(workspace.getEmail()) + .name(workspace.getName()) + .slug(workspace.getSlug()) + .initialSetupComplete(workspace.getInitialSetupComplete()) + .displaySetupWizard(workspace.getDisplaySetupWizard()) + .anonymousDataCollection(workspace.getAnonymousDataCollection()) + .news(workspace.getNews()) + .securityUpdates(workspace.getSecurityUpdates()) + .notifications(NotificationConverter.toApiList(workspace.getNotifications())) + .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) + .defaultGeography(Enums.convertTo(workspace.getDefaultGeography(), Geography.class)) + .organizationId(workspace.getOrganizationId()) + .tombstone(workspace.getTombstone()); + // Add read-only webhook configs. + if (workspace.getWebhookOperationConfigs() != null) { + result.setWebhookConfigs(WorkspaceWebhookConfigsConverter.toApiReads(workspace.getWebhookOperationConfigs())); + } + return result; + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConflictException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConflictException.java new file mode 100644 index 00000000000..e9925cc276f --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/ConflictException.java @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.errors; + +import io.micronaut.http.HttpStatus; + +/** + * Exception when a request conflicts with the current state of the server. For example, trying to + * accept an invitation that was already accepted. + */ +public class ConflictException extends KnownException { + + public ConflictException(final String message) { + super(message); + } + + public ConflictException(final String message, final Throwable cause) { + super(message, cause); + } + + @Override + public int getHttpCode() { + return HttpStatus.CONFLICT.getCode(); + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java index 8728b98ed7c..a1a3c43dab9 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java @@ -4,6 +4,7 @@ package io.airbyte.commons.server.handlers; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.api.model.generated.AttemptInfoRead; import io.airbyte.api.model.generated.AttemptStats; import io.airbyte.api.model.generated.CreateNewAttemptNumberResponse; @@ -20,19 +21,29 @@ import io.airbyte.commons.server.handlers.helpers.JobCreationAndStatusUpdateHelper; import io.airbyte.commons.temporal.TemporalUtils; import io.airbyte.config.AttemptFailureSummary; +import io.airbyte.config.JobConfig; import io.airbyte.config.JobOutput; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncStats; import io.airbyte.config.helpers.LogClientSingleton; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.featureflag.Connection; +import io.airbyte.featureflag.DeleteFullRefreshState; +import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.models.Job; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.protocol.models.SyncMode; import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; import java.nio.file.Path; import java.util.Optional; +import java.util.Set; +import java.util.UUID; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,20 +55,27 @@ public class AttemptHandler { private static final Logger LOGGER = LoggerFactory.getLogger(AttemptHandler.class); + private static final int FIRST_ATTEMPT = 0; private final JobPersistence jobPersistence; + private final StatePersistence statePersistence; private final JobConverter jobConverter; private final JobCreationAndStatusUpdateHelper jobCreationAndStatusUpdateHelper; private final Path workspaceRoot; + private final FeatureFlagClient featureFlagClient; public AttemptHandler(final JobPersistence jobPersistence, + final StatePersistence statePersistence, final JobConverter jobConverter, + final FeatureFlagClient featureFlagClient, final JobCreationAndStatusUpdateHelper jobCreationAndStatusUpdateHelper, @Named("workspaceRoot") final Path workspaceRoot) { this.jobPersistence = jobPersistence; + this.statePersistence = statePersistence; this.jobConverter = jobConverter; this.jobCreationAndStatusUpdateHelper = jobCreationAndStatusUpdateHelper; + this.featureFlagClient = featureFlagClient; this.workspaceRoot = workspaceRoot; } @@ -69,15 +87,46 @@ public CreateNewAttemptNumberResponse createNewAttemptNumber(final long jobId) t throw new UnprocessableContentException(String.format("Could not find jobId: %s", jobId), e); } - final Path jobRoot = TemporalUtils.getJobRoot(workspaceRoot, String.valueOf(jobId), (long) job.getAttemptsCount()); + final Path jobRoot = TemporalUtils.getJobRoot(workspaceRoot, String.valueOf(jobId), job.getAttemptsCount()); final Path logFilePath = jobRoot.resolve(LogClientSingleton.LOG_FILENAME); final int persistedAttemptNumber = jobPersistence.createAttempt(jobId, logFilePath); + + // We cannot easily do this in a transaction as the attempt and state tables are in separate logical + // databases. + final var removeFullRefreshStreamState = + job.getConfigType().equals(JobConfig.ConfigType.SYNC) || job.getConfigType().equals(JobConfig.ConfigType.REFRESH); + if (removeFullRefreshStreamState) { + if (featureFlagClient.boolVariation(DeleteFullRefreshState.INSTANCE, new Connection(job.getScope()))) { + LOGGER.info("Clearing full refresh state.."); + final var stateToClear = getFullRefreshStreams(job.getConfig().getSync().getConfiguredAirbyteCatalog(), job.getId()); + if (!stateToClear.isEmpty()) { + statePersistence.bulkDelete(UUID.fromString(job.getScope()), stateToClear); + } + } + } + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.ATTEMPT_CREATED_BY_RELEASE_STAGE, job); jobCreationAndStatusUpdateHelper.emitAttemptCreatedEvent(job, persistedAttemptNumber); return new CreateNewAttemptNumberResponse().attemptNumber(persistedAttemptNumber); } + @VisibleForTesting + Set getFullRefreshStreams(ConfiguredAirbyteCatalog catalog, long id) { + if (catalog == null) { + throw new BadRequestException("Missing configured catalog for job: " + id); + } + final var configuredStreams = catalog.getStreams(); + if (configuredStreams == null) { + throw new BadRequestException("Missing configured catalog stream for job: " + id); + } + + return configuredStreams.stream() + .filter(s -> s.getSyncMode().equals(SyncMode.FULL_REFRESH)) + .map(s -> new StreamDescriptor().withName(s.getStream().getName()).withNamespace(s.getStream().getNamespace())) + .collect(Collectors.toSet()); + } + public AttemptInfoRead getAttemptForJob(final long jobId, final int attemptNo) throws IOException { final Optional read = jobPersistence.getAttemptForJob(jobId, attemptNo) .map(jobConverter::getAttemptInfoRead); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java index 2f67ccbb889..0fbb25b106b 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java @@ -5,8 +5,6 @@ package io.airbyte.commons.server.handlers; import static io.airbyte.commons.converters.ConnectionHelper.validateCatalogDoesntContainDuplicateStreamNames; -import static io.airbyte.persistence.job.JobNotifier.CONNECTION_DISABLED_NOTIFICATION; -import static io.airbyte.persistence.job.JobNotifier.CONNECTION_DISABLED_WARNING_NOTIFICATION; import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; import static java.time.temporal.ChronoUnit.DAYS; @@ -61,7 +59,6 @@ import io.airbyte.config.DestinationConnection; import io.airbyte.config.FieldSelectionData; import io.airbyte.config.Geography; -import io.airbyte.config.JobConfig; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; @@ -79,6 +76,9 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.StreamGenerationRepository; +import io.airbyte.config.persistence.domain.Generation; +import io.airbyte.config.persistence.helper.CatalogGenerationSetter; import io.airbyte.featureflag.CheckWithCatalog; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Workspace; @@ -105,6 +105,7 @@ import java.io.IOException; import java.time.Instant; import java.time.LocalDate; +import java.time.LocalTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.ArrayList; @@ -147,9 +148,12 @@ public class ConnectionsHandler { private final Integer maxDaysOfOnlyFailedJobsBeforeConnectionDisable; private final Integer maxFailedJobsInARowBeforeConnectionDisable; private final int maxJobLookback = 10; + private final StreamRefreshesHandler streamRefreshesHandler; + private final StreamGenerationRepository streamGenerationRepository; + private final CatalogGenerationSetter catalogGenerationSetter; @Inject - public ConnectionsHandler( + public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, final JobPersistence jobPersistence, final ConfigRepository configRepository, @Named("uuidGenerator") final Supplier uuidGenerator, @@ -162,7 +166,9 @@ public ConnectionsHandler( final ConnectorDefinitionSpecificationHandler connectorSpecHandler, final JobNotifier jobNotifier, @Value("${airbyte.server.connection.disable.max-days}") final Integer maxDaysOfOnlyFailedJobsBeforeConnectionDisable, - @Value("${airbyte.server.connection.disable.max-jobs}") final Integer maxFailedJobsInARowBeforeConnectionDisable) { + @Value("${airbyte.server.connection.disable.max-jobs}") final Integer maxFailedJobsInARowBeforeConnectionDisable, + final StreamGenerationRepository streamGenerationRepository, + final CatalogGenerationSetter catalogGenerationSetter) { this.jobPersistence = jobPersistence; this.configRepository = configRepository; this.uuidGenerator = uuidGenerator; @@ -176,6 +182,9 @@ public ConnectionsHandler( this.jobNotifier = jobNotifier; this.maxDaysOfOnlyFailedJobsBeforeConnectionDisable = maxDaysOfOnlyFailedJobsBeforeConnectionDisable; this.maxFailedJobsInARowBeforeConnectionDisable = maxFailedJobsInARowBeforeConnectionDisable; + this.streamRefreshesHandler = streamRefreshesHandler; + this.streamGenerationRepository = streamGenerationRepository; + this.catalogGenerationSetter = catalogGenerationSetter; } /** @@ -337,9 +346,6 @@ InternalOperationResult autoDisableConnection(final UUID connectionId, final Ins } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning && !warningPreviouslySentForMaxDays) { // warn if number of consecutive failures hits 50% of MaxFailedJobsInARow jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, optionalLastJob.get(), attemptStats); return new InternalOperationResult().succeeded(false); } @@ -371,9 +377,6 @@ InternalOperationResult autoDisableConnection(final UUID connectionId, final Ins if (firstReplicationOlderThanMaxDisableWarningDays && successOlderThanPrevFailureByMaxWarningDays) { jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, optionalLastJob.get(), attemptStats); } return new InternalOperationResult().succeeded(false); } @@ -387,9 +390,6 @@ private void disableConnection(final StandardSync standardSync, final Job lastJo attemptStats.add(jobPersistence.getAttemptStats(lastJob.getId(), attempt.getAttemptNumber())); } jobNotifier.autoDisableConnection(lastJob, attemptStats); - // explicitly send to email if customer.io api key is set, since email notification cannot be set by - // configs through UI yet - jobNotifier.notifyJobByEmail(null, CONNECTION_DISABLED_NOTIFICATION, lastJob, attemptStats); } private int getDaysSinceTimestamp(final long currentTimestampInSeconds, final long timestampInSeconds) { @@ -718,6 +718,11 @@ public ConnectionRead getConnection(final UUID connectionId) return buildConnectionRead(connectionId); } + public ConnectionRead getConnectionForJob(final UUID connectionId, final Long jobId) + throws JsonValidationException, IOException, ConfigNotFoundException { + return buildConnectionRead(connectionId, jobId); + } + public CatalogDiff getDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog newCatalog, final ConfiguredAirbyteCatalog configuredCatalog) throws JsonValidationException { return new CatalogDiff().transforms(CatalogHelpers.getCatalogDiff( @@ -805,6 +810,7 @@ public Optional getConnectionAirbyteCatalog(final UUID connectio public void deleteConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { connectionHelper.deleteConnection(connectionId); eventRunner.forceDeleteConnection(connectionId); + streamRefreshesHandler.deleteRefreshesForConnection(connectionId); } private ConnectionRead buildConnectionRead(final UUID connectionId) @@ -813,6 +819,31 @@ private ConnectionRead buildConnectionRead(final UUID connectionId) return ApiPojoConverters.internalToConnectionRead(standardSync); } + private ConnectionRead buildConnectionRead(final UUID connectionId, final Long jobId) + throws ConfigNotFoundException, IOException, JsonValidationException { + final StandardSync standardSync = configRepository.getStandardSync(connectionId); + final Job job = jobPersistence.getJob(jobId); + final List generations = streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId); + final Optional catalogWithGeneration; + if (job.getConfigType() == ConfigType.SYNC) { + catalogWithGeneration = Optional.of(catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + standardSync.getCatalog(), + jobId, + List.of(), + generations)); + } else if (job.getConfigType() == ConfigType.REFRESH) { + catalogWithGeneration = Optional.of(catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + standardSync.getCatalog(), + jobId, + job.getConfig().getRefresh().getStreamsToRefresh(), + generations)); + } else { + catalogWithGeneration = Optional.empty(); + } + catalogWithGeneration.ifPresent(updatedCatalog -> standardSync.setCatalog(updatedCatalog)); + return ApiPojoConverters.internalToConnectionRead(standardSync); + } + public ConnectionReadList listConnectionsForWorkspaces(final ListConnectionsForWorkspacesRequestBody listConnectionsForWorkspacesRequestBody) throws IOException { @@ -870,7 +901,7 @@ public List getConnectionStatuses( final List connectionIds = connectionStatusesRequestBody.getConnectionIds(); final List result = new ArrayList<>(); for (final UUID connectionId : connectionIds) { - final List jobs = jobPersistence.listJobs(Set.of(JobConfig.ConfigType.SYNC, JobConfig.ConfigType.RESET_CONNECTION), + final List jobs = jobPersistence.listJobs(REPLICATION_TYPES, connectionId.toString(), maxJobLookback); final boolean isRunning = jobs.stream().anyMatch(job -> JobStatus.NON_TERMINAL_STATUSES.contains(job.getStatus())); @@ -923,7 +954,7 @@ public List getConnectionDataHistory(final Connec final ZoneId requestZone = ZoneId.of(connectionDataHistoryRequestBody.getTimezone()); // Start time in designated timezone - final ZonedDateTime endTimeInUserTimeZone = Instant.now().atZone(ZoneId.of(connectionDataHistoryRequestBody.getTimezone())); + final ZonedDateTime endTimeInUserTimeZone = Instant.now().atZone(requestZone).toLocalDate().atTime(LocalTime.MAX).atZone(requestZone); final ZonedDateTime startTimeInUserTimeZone = endTimeInUserTimeZone.toLocalDate().atStartOfDay(requestZone).minusDays(29); // Convert start time to UTC (since that's what the database uses) final Instant startTimeInUTC = startTimeInUserTimeZone.toInstant(); @@ -1122,8 +1153,11 @@ public void trackSchemaChange(final UUID workspaceId, final UUID connectionId, f payload.put("connection_id", connectionId); payload.put("schema_change_event_date", changeEventTimeline); payload.put("stream_change_type", streamTransform.getTransformType().toString()); - payload.put("stream_namespace", streamTransform.getStreamDescriptor().getNamespace()); - payload.put("stream_name", streamTransform.getStreamDescriptor().getName()); + StreamDescriptor streamDescriptor = streamTransform.getStreamDescriptor(); + if (streamDescriptor.getNamespace() != null) { + payload.put("stream_namespace", streamDescriptor.getNamespace()); + } + payload.put("stream_name", streamDescriptor.getName()); if (streamTransform.getTransformType() == TransformTypeEnum.UPDATE_STREAM) { payload.put("stream_field_changes", Jsons.serialize(streamTransform.getUpdateStream())); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java index d828a355ca6..17c1339f485 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java @@ -27,6 +27,7 @@ import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; import io.airbyte.commons.server.handlers.helpers.DeclarativeSourceManifestInjector; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ConfigSchema; @@ -77,6 +78,8 @@ public class ConnectorBuilderProjectsHandler { private final ConfigRepository configRepository; + + private final BuilderProjectUpdater buildProjectUpdater; private final Supplier uuidSupplier; private final DeclarativeSourceManifestInjector manifestInjector; private final CdkVersionProvider cdkVersionProvider; @@ -89,11 +92,12 @@ public class ConnectorBuilderProjectsHandler { private final JsonSecretsProcessor secretsProcessor; private final ConnectorBuilderServerApi connectorBuilderServerApiClient; - static final String SPEC_FIELD = "spec"; - static final String CONNECTION_SPECIFICATION_FIELD = "connection_specification"; + public static final String SPEC_FIELD = "spec"; + public static final String CONNECTION_SPECIFICATION_FIELD = "connection_specification"; @Inject public ConnectorBuilderProjectsHandler(final ConfigRepository configRepository, + final BuilderProjectUpdater builderProjectUpdater, final CdkVersionProvider cdkVersionProvider, @Named("uuidGenerator") final Supplier uuidSupplier, final DeclarativeSourceManifestInjector manifestInjector, @@ -106,6 +110,7 @@ public ConnectorBuilderProjectsHandler(final ConfigRepository configRepository, @Named("jsonSecretsProcessorWithCopy") final JsonSecretsProcessor secretsProcessor, final ConnectorBuilderServerApi connectorBuilderServerApiClient) { this.configRepository = configRepository; + this.buildProjectUpdater = builderProjectUpdater; this.cdkVersionProvider = cdkVersionProvider; this.uuidSupplier = uuidSupplier; this.manifestInjector = manifestInjector; @@ -154,22 +159,12 @@ public ConnectorBuilderProjectIdWithWorkspaceId createConnectorBuilderProject(fi } public void updateConnectorBuilderProject(final ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) - throws IOException, ConfigNotFoundException { + throws ConfigNotFoundException, IOException { + final ConnectorBuilderProject connectorBuilderProject = configRepository.getConnectorBuilderProject(projectUpdate.getBuilderProjectId(), false); validateProjectUnderRightWorkspace(connectorBuilderProject, projectUpdate.getWorkspaceId()); - if (connectorBuilderProject.getActorDefinitionId() != null) { - configRepository.updateBuilderProjectAndActorDefinition(projectUpdate.getBuilderProjectId(), - projectUpdate.getWorkspaceId(), - projectUpdate.getBuilderProject().getName(), - projectUpdate.getBuilderProject().getDraftManifest(), - connectorBuilderProject.getActorDefinitionId()); - } else { - configRepository.writeBuilderProjectDraft(projectUpdate.getBuilderProjectId(), - projectUpdate.getWorkspaceId(), - projectUpdate.getBuilderProject().getName(), - projectUpdate.getBuilderProject().getDraftManifest()); - } + buildProjectUpdater.persistBuilderProjectUpdate(projectUpdate); } public void deleteConnectorBuilderProject(final ConnectorBuilderProjectIdWithWorkspaceId projectDelete) @@ -392,7 +387,6 @@ private ConnectorBuilderHttpRequest convertHttpRequest(@Nullable final HttpReque ? new ConnectorBuilderHttpRequest() .url(request.getUrl()) .httpMethod(ConnectorBuilderHttpRequest.HttpMethodEnum.fromString(request.getHttpMethod().getValue())) - .parameters(request.getParameters()) .body(request.getBody()) .headers(request.getHeaders()) : null; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandler.java index a9f5bd8c8db..e48b8c1df77 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandler.java @@ -52,25 +52,15 @@ public ConnectorDocumentationRead getConnectorDocumentation(final ConnectorDocum final String dockerRepo = actorDefinitionVersion.getDockerRepository(); final String version = actorDefinitionVersion.getDockerImageTag(); - // prioritize versioned over latest, then inapp over full - final Optional versionedInappDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, version, true); - if (versionedInappDocString.isPresent()) { - return new ConnectorDocumentationRead().doc(versionedInappDocString.get()); + // prioritize versioned over latest + final Optional versionedDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, version); + if (versionedDocString.isPresent()) { + return new ConnectorDocumentationRead().doc(versionedDocString.get()); } - final Optional versionedFullDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, version, false); - if (versionedFullDocString.isPresent()) { - return new ConnectorDocumentationRead().doc(versionedFullDocString.get()); - } - - final Optional latestInappDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, LATEST, true); - if (latestInappDocString.isPresent()) { - return new ConnectorDocumentationRead().doc(latestInappDocString.get()); - } - - final Optional latestFullDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, LATEST, false); - if (latestFullDocString.isPresent()) { - return new ConnectorDocumentationRead().doc(latestFullDocString.get()); + final Optional latestDocString = remoteDefinitionsProvider.getConnectorDocumentation(dockerRepo, LATEST); + if (latestDocString.isPresent()) { + return new ConnectorDocumentationRead().doc(latestDocString.get()); } throw new NotFoundException(String.format("Could not find any documentation for connector %s", dockerRepo)); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java index c523016a134..18e80cb68fe 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java @@ -38,6 +38,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.ConfigRepository.ResourcesQueryPaginated; import io.airbyte.config.secrets.JsonSecretsProcessor; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseIconUrlInApiResponse; @@ -73,6 +74,7 @@ public class DestinationHandler { private final DestinationService destinationService; private final FeatureFlagClient featureFlagClient; private final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; @VisibleForTesting public DestinationHandler(final ConfigRepository configRepository, @@ -85,7 +87,8 @@ public DestinationHandler(final ConfigRepository configRepository, final ActorDefinitionVersionHelper actorDefinitionVersionHelper, final DestinationService destinationService, final FeatureFlagClient featureFlagClient, - final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper) { + final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper, + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater) { this.configRepository = configRepository; this.validator = integrationSchemaValidation; this.connectionsHandler = connectionsHandler; @@ -97,6 +100,7 @@ public DestinationHandler(final ConfigRepository configRepository, this.destinationService = destinationService; this.featureFlagClient = featureFlagClient; this.actorDefinitionHandlerHelper = actorDefinitionHandlerHelper; + this.actorDefinitionVersionUpdater = actorDefinitionVersionUpdater; } public DestinationRead createDestination(final DestinationCreate destinationCreate) @@ -231,7 +235,7 @@ public void upgradeDestinationVersion(final DestinationIdRequestBody destination final DestinationConnection destinationConnection = configRepository.getDestinationConnection(destinationIdRequestBody.getDestinationId()); final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); - configRepository.setActorDefaultVersion(destinationIdRequestBody.getDestinationId(), destinationDefinition.getDefaultVersionId()); + actorDefinitionVersionUpdater.upgradeActorVersion(destinationConnection, destinationDefinition); } public DestinationRead getDestination(final DestinationIdRequestBody destinationIdRequestBody) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java index 2cff3212fdc..a08dc6cb739 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java @@ -14,6 +14,7 @@ import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.license.ActiveAirbyteLicense; +import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs.AirbyteEdition; import io.airbyte.config.Organization; import io.airbyte.config.StandardWorkspace; @@ -40,6 +41,7 @@ public class InstanceConfigurationHandler { private final String webappUrl; private final AirbyteEdition airbyteEdition; + private final AirbyteVersion airbyteVersion; private final Optional airbyteKeycloakConfiguration; private final Optional activeAirbyteLicense; private final WorkspacePersistence workspacePersistence; @@ -55,6 +57,7 @@ public class InstanceConfigurationHandler { public InstanceConfigurationHandler(@Value("${airbyte.webapp-url:null}") final String webappUrl, @Value("${airbyte.tracking.strategy:}") final String trackingStrategy, final AirbyteEdition airbyteEdition, + final AirbyteVersion airbyteVersion, final Optional airbyteKeycloakConfiguration, final Optional activeAirbyteLicense, final WorkspacePersistence workspacePersistence, @@ -64,6 +67,7 @@ public InstanceConfigurationHandler(@Value("${airbyte.webapp-url:null}") final S this.webappUrl = webappUrl; this.trackingStrategy = trackingStrategy; this.airbyteEdition = airbyteEdition; + this.airbyteVersion = airbyteVersion; this.airbyteKeycloakConfiguration = airbyteKeycloakConfiguration; this.activeAirbyteLicense = activeAirbyteLicense; this.workspacePersistence = workspacePersistence; @@ -79,6 +83,7 @@ public InstanceConfigurationResponse getInstanceConfiguration() throws IOExcepti return new InstanceConfigurationResponse() .webappUrl(webappUrl) .edition(Enums.convertTo(airbyteEdition, EditionEnum.class)) + .version(airbyteVersion.serialize()) .licenseType(getLicenseType()) .auth(getAuthConfiguration()) .initialSetupComplete(initialSetupComplete) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java index 1a2d5a5c40a..7cd5c41b0db 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java @@ -5,6 +5,7 @@ package io.airbyte.commons.server.handlers; import static io.airbyte.featureflag.ContextKt.ANONYMOUS; +import static io.airbyte.persistence.job.models.Job.SYNC_REPLICATION_TYPES; import com.google.common.base.Preconditions; import io.airbyte.api.model.generated.AttemptInfoRead; @@ -62,7 +63,6 @@ import jakarta.inject.Singleton; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -140,6 +140,7 @@ public JobReadList listJobsFor(final JobListRequestBody request) throws IOExcept .stream() .map(type -> Enums.convertTo(type, JobConfig.ConfigType.class)) .collect(Collectors.toSet()); + final String configId = request.getConfigId(); final int pageSize = (request.getPagination() != null && request.getPagination().getPageSize() != null) ? request.getPagination().getPageSize() @@ -341,9 +342,17 @@ private static Map getStreamsToSyncMode(Job jo } private static List extractStreams(Job job) { - return job.getConfig().getSync() != null - ? job.getConfig().getSync().getConfiguredAirbyteCatalog().getStreams() - : List.of(); + if (job.getConfigType() == ConfigType.SYNC) { + return job.getConfig().getSync() != null + ? job.getConfig().getSync().getConfiguredAirbyteCatalog().getStreams() + : List.of(); + } else if (job.getConfigType() == ConfigType.REFRESH) { + return job.getConfig().getRefresh() != null + ? job.getConfig().getRefresh().getConfiguredAirbyteCatalog().getStreams() + : List.of(); + } else { + return List.of(); + } } public JobInfoRead getJobInfo(final JobIdRequestBody jobIdRequestBody) throws IOException { @@ -402,7 +411,7 @@ public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) public Optional getLatestRunningSyncJob(final UUID connectionId) throws IOException { final List nonTerminalSyncJobsForConnection = jobPersistence.listJobsForConnectionWithStatuses( connectionId, - Collections.singleton(ConfigType.SYNC), + SYNC_REPLICATION_TYPES, JobStatus.NON_TERMINAL_STATUSES); // there *should* only be a single running sync job for a connection, but diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java index cde865497c9..2bceb84571f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java @@ -7,6 +7,8 @@ import static io.airbyte.config.helpers.ResourceRequirementsUtils.getResourceRequirementsForJobType; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.ATTEMPT_NUMBER_KEY; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; +import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; +import static io.airbyte.persistence.job.models.Job.SYNC_REPLICATION_TYPES; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.api.model.generated.CheckInput; @@ -35,6 +37,7 @@ import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobSyncConfig; import io.airbyte.config.JobTypeResourceLimit.JobType; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.SourceConnection; @@ -144,7 +147,7 @@ public Object getJobInput(final SyncInput input) { final JobConfig.ConfigType jobConfigType = job.getConfig().getConfigType(); - if (JobConfig.ConfigType.SYNC.equals(jobConfigType)) { + if (SYNC_REPLICATION_TYPES.contains(jobConfigType)) { final SourceConnection source = configRepository.getSourceConnection(standardSync.getSourceId()); sourceVersion = actorDefinitionVersionHelper.getSourceVersion( configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()), @@ -226,8 +229,6 @@ public Object getJobInput(final SyncInput input) { .withDestinationConfiguration(attemptSyncConfig.getDestinationConfiguration()) .withOperationSequence(config.getOperationSequence()) .withWebhookOperationConfigs(config.getWebhookOperationConfigs()) - .withCatalog(config.getConfiguredAirbyteCatalog()) - .withState(attemptSyncConfig.getState()) .withSyncResourceRequirements(config.getSyncResourceRequirements()) .withConnectionId(connectionId) .withWorkspaceId(config.getWorkspaceId()) @@ -355,12 +356,32 @@ private JobSyncConfig getJobSyncConfig(final long jobId, final JobConfig jobConf .withIsDestinationCustomConnector(resetConnection.getIsDestinationCustomConnector()) .withWebhookOperationConfigs(resetConnection.getWebhookOperationConfigs()) .withWorkspaceId(resetConnection.getWorkspaceId()); + } else if (JobConfig.ConfigType.REFRESH.equals(jobConfigType)) { + final RefreshConfig refreshConfig = jobConfig.getRefresh(); + + return new JobSyncConfig() + .withNamespaceDefinition(refreshConfig.getNamespaceDefinition()) + .withNamespaceFormat(refreshConfig.getNamespaceFormat()) + .withPrefix(refreshConfig.getPrefix()) + .withSourceDockerImage(refreshConfig.getSourceDockerImage()) + .withSourceProtocolVersion(refreshConfig.getSourceProtocolVersion()) + .withSourceDefinitionVersionId(refreshConfig.getSourceDefinitionVersionId()) + .withDestinationDockerImage(refreshConfig.getDestinationDockerImage()) + .withDestinationProtocolVersion(refreshConfig.getDestinationProtocolVersion()) + .withDestinationDefinitionVersionId(refreshConfig.getDestinationDefinitionVersionId()) + .withConfiguredAirbyteCatalog(refreshConfig.getConfiguredAirbyteCatalog()) + .withOperationSequence(refreshConfig.getOperationSequence()) + .withSyncResourceRequirements(refreshConfig.getSyncResourceRequirements()) + .withIsSourceCustomConnector(refreshConfig.getIsSourceCustomConnector()) + .withIsDestinationCustomConnector(refreshConfig.getIsDestinationCustomConnector()) + .withWebhookOperationConfigs(refreshConfig.getWebhookOperationConfigs()) + .withWorkspaceId(refreshConfig.getWorkspaceId()); } else { throw new IllegalStateException( String.format("Unexpected config type %s for job %d. The only supported config types for this activity are (%s)", jobConfigType, jobId, - List.of(JobConfig.ConfigType.SYNC, JobConfig.ConfigType.RESET_CONNECTION))); + REPLICATION_TYPES)); } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java index f959ddaaa01..a7ab2251e42 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java @@ -78,9 +78,9 @@ public InternalOperationResult jobFailure(final JobFailureRequest input) { attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } if (!job.getConfigType().equals(JobConfig.ConfigType.RESET_CONNECTION)) { - jobNotifier.failJob(input.getReason(), job, attemptStats); + jobNotifier.failJob(job, attemptStats); } - jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_FAILED_BY_RELEASE_STAGE, job); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_FAILED_BY_RELEASE_STAGE, job, input); final UUID connectionId = UUID.fromString(job.getScope()); if (!connectionId.equals(input.getConnectionId())) { @@ -160,7 +160,7 @@ public InternalOperationResult jobSuccessWithAttemptNumber(final JobSuccessWithA if (!job.getConfigType().equals(JobConfig.ConfigType.RESET_CONNECTION)) { jobNotifier.successJob(job, attemptStats); } - jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_SUCCEEDED_BY_RELEASE_STAGE, job); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_SUCCEEDED_BY_RELEASE_STAGE, job, input); jobCreationAndStatusUpdateHelper.trackCompletion(job, JobStatus.SUCCEEDED); return new InternalOperationResult().succeeded(true); @@ -238,7 +238,6 @@ public void persistJobCancellation(final UUID connectionId, final long jobId, fi attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_CANCELLED_BY_RELEASE_STAGE, job); - jobNotifier.failJob("Job was cancelled", job, attemptStats); jobCreationAndStatusUpdateHelper.trackCompletion(job, JobStatus.FAILED); } catch (final IOException e) { jobCreationAndStatusUpdateHelper.trackCompletionForInternalFailure(jobId, connectionId, attemptNumber, diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java index 0b61e10476e..bdc560d2cc1 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java @@ -262,11 +262,11 @@ public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsent public CompleteOAuthResponse completeSourceOAuthHandleReturnSecret(final CompleteSourceOauthRequest completeSourceOauthRequest) throws JsonValidationException, ConfigNotFoundException, IOException { - final CompleteOAuthResponse oAuthTokens = completeSourceOAuth(completeSourceOauthRequest); - if (oAuthTokens != null && completeSourceOauthRequest.getReturnSecretCoordinate()) { - return writeOAuthResponseSecret(completeSourceOauthRequest.getWorkspaceId(), oAuthTokens); + final CompleteOAuthResponse completeOAuthResponse = completeSourceOAuth(completeSourceOauthRequest); + if (completeOAuthResponse != null && completeSourceOauthRequest.getReturnSecretCoordinate()) { + return writeOAuthResponseSecret(completeSourceOauthRequest.getWorkspaceId(), completeOAuthResponse); } else { - return oAuthTokens; + return completeOAuthResponse; } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java index bbe77adfb96..8b4da769343 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java @@ -10,6 +10,7 @@ import io.airbyte.api.model.generated.OrganizationRead; import io.airbyte.api.model.generated.OrganizationReadList; import io.airbyte.api.model.generated.OrganizationUpdateRequestBody; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Organization; import io.airbyte.config.Permission; @@ -17,7 +18,8 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository.ResourcesByUserQueryPaginated; import io.airbyte.config.persistence.OrganizationPersistence; -import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -28,8 +30,6 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import org.jooq.tools.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * OrganizationHandler for handling organization resource related operation. @@ -39,18 +39,17 @@ @Singleton public class OrganizationsHandler { - private static final Logger LOGGER = LoggerFactory.getLogger(OrganizationsHandler.class); - private final PermissionPersistence permissionPersistence; + private final PermissionService permissionService; private final OrganizationPersistence organizationPersistence; private final Supplier uuidGenerator; @Inject public OrganizationsHandler(final OrganizationPersistence organizationPersistence, - final PermissionPersistence permissionPersistence, + final PermissionService permissionService, @Named("uuidGenerator") final Supplier uuidGenerator) { this.organizationPersistence = organizationPersistence; - this.permissionPersistence = permissionPersistence; + this.permissionService = permissionService; this.uuidGenerator = uuidGenerator; } @@ -80,13 +79,17 @@ public OrganizationRead createOrganization(final OrganizationCreateRequestBody o .withPba(pba) .withOrgLevelBilling(orgLevelBilling); organizationPersistence.createOrganization(organization); - // Also create an OrgAdmin permission. - final Permission orgAdminPermission = new Permission() - .withPermissionId(uuidGenerator.get()) - .withUserId(userId) - .withOrganizationId(orgId) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN); - permissionPersistence.writePermission(orgAdminPermission); + + try { + // Also create an OrgAdmin permission. + permissionService.createPermission(new Permission() + .withPermissionId(uuidGenerator.get()) + .withUserId(userId) + .withOrganizationId(orgId) + .withPermissionType(PermissionType.ORGANIZATION_ADMIN)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); + } return buildOrganizationRead(organization); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java index 8bebee24eac..2cbe9121542 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/PermissionHandler.java @@ -17,13 +17,16 @@ import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.lang.Exceptions; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.commons.server.errors.OperationNotAllowedException; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Permission; import io.airbyte.config.helpers.PermissionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.PermissionPersistence; -import io.airbyte.config.persistence.SQLOperationNotAllowedException; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException; import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Named; @@ -35,7 +38,6 @@ import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; -import org.jooq.exception.DataAccessException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,14 +52,17 @@ public class PermissionHandler { private final Supplier uuidGenerator; private final PermissionPersistence permissionPersistence; private final WorkspaceService workspaceService; + private final PermissionService permissionService; public PermissionHandler( final PermissionPersistence permissionPersistence, final WorkspaceService workspaceService, - @Named("uuidGenerator") final Supplier uuidGenerator) { + @Named("uuidGenerator") final Supplier uuidGenerator, + final PermissionService permissionService) { this.uuidGenerator = uuidGenerator; this.permissionPersistence = permissionPersistence; this.workspaceService = workspaceService; + this.permissionService = permissionService; } /** @@ -91,15 +96,11 @@ public PermissionRead createPermission(final PermissionCreate permissionCreate) .withWorkspaceId(permissionCreate.getWorkspaceId()) .withOrganizationId(permissionCreate.getOrganizationId()); - permissionPersistence.writePermission(permission); - final PermissionRead result; try { - result = buildPermissionRead(permissionId); - } catch (final ConfigNotFoundException ex) { - LOGGER.error("Config not found for permissionId: {} in CreatePermission.", permissionId); - throw new IOException(ex); + return buildPermissionRead(permissionService.createPermission(permission)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); } - return result; } private Permission getPermissionById(final UUID permissionId) throws ConfigNotFoundException, IOException { @@ -185,12 +186,11 @@ public PermissionRead getPermission(final PermissionIdRequestBody permissionIdRe * "workspace_xxx"/"instance_admin" * * @param permissionUpdate The permission update. - * @return The updated permission. * @throws IOException if unable to update the permissions. * @throws ConfigNotFoundException if unable to update the permissions. * @throws OperationNotAllowedException if update is prevented by business logic. */ - public PermissionRead updatePermission(final PermissionUpdate permissionUpdate) + public void updatePermission(final PermissionUpdate permissionUpdate) throws IOException, ConfigNotFoundException, OperationNotAllowedException, JsonValidationException { // INSTANCE_ADMIN permissions are only created in special cases, so we block them here. @@ -207,15 +207,10 @@ public PermissionRead updatePermission(final PermissionUpdate permissionUpdate) .withWorkspaceId(existingPermission.getWorkspaceId()) // cannot be updated .withUserId(existingPermission.getUserId()); // cannot be updated try { - permissionPersistence.writePermission(updatedPermission); - } catch (final DataAccessException e) { - if (e.getCause() instanceof SQLOperationNotAllowedException) { - throw new OperationNotAllowedException(e.getCause().getMessage(), e); - } else { - throw new IOException(e); - } + permissionService.updatePermission(updatedPermission); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); } - return buildPermissionRead(permissionUpdate.getPermissionId()); } /** @@ -399,32 +394,35 @@ public PermissionReadList listPermissionsByUser(final UUID userId) throws IOExce * Deletes a permission. * * @param permissionIdRequestBody The permission to be deleted. - * @throws IOException if unable to delete the permission. - * @throws OperationNotAllowedException if deletion is prevented by business logic. + * @throws ConflictException if deletion is prevented by business logic. */ - public void deletePermission(final PermissionIdRequestBody permissionIdRequestBody) throws IOException { + public void deletePermission(final PermissionIdRequestBody permissionIdRequestBody) { try { - permissionPersistence.deletePermissionById(permissionIdRequestBody.getPermissionId()); - } catch (final DataAccessException e) { - if (e.getCause() instanceof SQLOperationNotAllowedException) { - throw new OperationNotAllowedException(e.getCause().getMessage(), e); - } else { - throw new IOException(e); - } + permissionService.deletePermission(permissionIdRequestBody.getPermissionId()); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); } } /** * Delete all permission records that match a particular userId and workspaceId. */ - public void deleteUserFromWorkspace(final PermissionDeleteUserFromWorkspaceRequestBody deleteUserFromWorkspaceRequestBody) throws IOException { + public void deleteUserFromWorkspace(final PermissionDeleteUserFromWorkspaceRequestBody deleteUserFromWorkspaceRequestBody) + throws IOException { final UUID userId = deleteUserFromWorkspaceRequestBody.getUserIdToRemove(); final UUID workspaceId = deleteUserFromWorkspaceRequestBody.getWorkspaceId(); // delete all workspace-level permissions that match the userId and workspaceId - permissionPersistence.listPermissionsByUser(userId).stream() + final List userWorkspacePermissionIds = permissionPersistence.listPermissionsByUser(userId).stream() .filter(permission -> permission.getWorkspaceId() != null && permission.getWorkspaceId().equals(workspaceId)) - .forEach(permission -> Exceptions.toRuntime(() -> permissionPersistence.deletePermissionById(permission.getPermissionId()))); + .map(Permission::getPermissionId) + .toList(); + + try { + permissionService.deletePermissions(userWorkspacePermissionIds); + } catch (final RemoveLastOrgAdminPermissionException e) { + throw new ConflictException(e.getMessage(), e); + } } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java index 28eb8372761..f975f7da396 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java @@ -82,6 +82,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -161,6 +162,7 @@ public class SchedulerHandler { private final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private final WorkspaceService workspaceService; private final SecretPersistenceConfigService secretPersistenceConfigService; + private final StreamRefreshesHandler streamRefreshesHandler; @VisibleForTesting public SchedulerHandler(final ConfigRepository configRepository, @@ -184,7 +186,8 @@ public SchedulerHandler(final ConfigRepository configRepository, final JobTracker jobTracker, final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler, final WorkspaceService workspaceService, - final SecretPersistenceConfigService secretPersistenceConfigService) { + final SecretPersistenceConfigService secretPersistenceConfigService, + final StreamRefreshesHandler streamRefreshesHandler) { this.configRepository = configRepository; this.secretsRepositoryWriter = secretsRepositoryWriter; this.synchronousSchedulerClient = synchronousSchedulerClient; @@ -210,6 +213,7 @@ public SchedulerHandler(final ConfigRepository configRepository, configRepository, jobNotifier, jobTracker); + this.streamRefreshesHandler = streamRefreshesHandler; } public CheckConnectionRead checkSourceConnectionFromSourceId(final SourceIdRequestBody sourceIdRequestBody) @@ -585,6 +589,7 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc final StandardSync standardSync = configRepository.getStandardSync(jobCreate.getConnectionId()); final List streamsToReset = streamResetPersistence.getStreamResets(jobCreate.getConnectionId()); log.info("Found the following streams to reset for connection {}: {}", jobCreate.getConnectionId(), streamsToReset); + final List streamsToRefresh = streamRefreshesHandler.getRefreshesForConnection(jobCreate.getConnectionId()); if (!streamsToReset.isEmpty()) { final DestinationConnection destination = configRepository.getDestinationConnection(standardSync.getDestinationId()); @@ -631,9 +636,17 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc ? jobPersistence.getLastReplicationJob(standardSync.getConnectionId()).orElseThrow(() -> new RuntimeException("No job available")).getId() : jobIdOptional.get(); + return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); + } else if (!streamsToRefresh.isEmpty()) { + final long jobId = jobFactory.createRefresh(jobCreate.getConnectionId(), streamsToRefresh); + + log.info("New refresh job created, with id: " + jobId); + final Job job = jobPersistence.getJob(jobId); + jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_CREATED_BY_RELEASE_STAGE, job); + return jobConverter.getJobInfoRead(jobPersistence.getJob(jobId)); } else { - final long jobId = jobFactory.create(jobCreate.getConnectionId()); + final long jobId = jobFactory.createSync(jobCreate.getConnectionId()); log.info("New job created, with id: " + jobId); final Job job = jobPersistence.getJob(jobId); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java index b644a4417e4..09a40e7dc4c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java @@ -48,6 +48,7 @@ import io.airbyte.config.secrets.SecretCoordinate; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -85,6 +86,7 @@ public class SourceHandler { private final JsonSecretsProcessor secretsProcessor; private final OAuthConfigSupplier oAuthConfigSupplier; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; private final FeatureFlagClient featureFlagClient; private final SourceService sourceService; private final WorkspaceService workspaceService; @@ -105,7 +107,8 @@ public SourceHandler(final ConfigRepository configRepository, final SourceService sourceService, final WorkspaceService workspaceService, final SecretPersistenceConfigService secretPersistenceConfigService, - final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper) { + final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper, + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater) { this.configRepository = configRepository; this.secretsRepositoryReader = secretsRepositoryReader; validator = integrationSchemaValidation; @@ -120,6 +123,7 @@ public SourceHandler(final ConfigRepository configRepository, this.workspaceService = workspaceService; this.secretPersistenceConfigService = secretPersistenceConfigService; this.actorDefinitionHandlerHelper = actorDefinitionHandlerHelper; + this.actorDefinitionVersionUpdater = actorDefinitionVersionUpdater; } public SourceRead createSourceWithOptionalSecret(final SourceCreate sourceCreate) @@ -240,7 +244,7 @@ public void upgradeSourceVersion(final SourceIdRequestBody sourceIdRequestBody) throws IOException, JsonValidationException, ConfigNotFoundException { final SourceConnection sourceConnection = configRepository.getSourceConnection(sourceIdRequestBody.getSourceId()); final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - configRepository.setActorDefaultVersion(sourceIdRequestBody.getSourceId(), sourceDefinition.getDefaultVersionId()); + actorDefinitionVersionUpdater.upgradeActorVersion(sourceConnection, sourceDefinition); } public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java index 7b4178aaa54..44158f34d09 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/UserHandler.java @@ -25,6 +25,7 @@ import io.airbyte.api.model.generated.UserUpdate; import io.airbyte.api.model.generated.UserWithPermissionInfoRead; import io.airbyte.api.model.generated.UserWithPermissionInfoReadList; +import io.airbyte.api.model.generated.WorkspaceCreateWithId; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.api.model.generated.WorkspaceRead; import io.airbyte.api.model.generated.WorkspaceReadList; @@ -35,7 +36,9 @@ import io.airbyte.commons.auth.config.InitialUserConfiguration; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.handlers.helpers.WorkspaceHelpersKt; import io.airbyte.commons.server.support.UserAuthenticationResolver; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Organization; @@ -49,6 +52,8 @@ import io.airbyte.config.persistence.PermissionPersistence; import io.airbyte.config.persistence.SQLOperationNotAllowedException; import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.services.PermissionRedundantException; +import io.airbyte.data.services.PermissionService; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -74,31 +79,37 @@ public class UserHandler { private final Supplier uuidGenerator; private final UserPersistence userPersistence; private final PermissionPersistence permissionPersistence; + private final PermissionService permissionService; private final PermissionHandler permissionHandler; private final WorkspacesHandler workspacesHandler; private final OrganizationPersistence organizationPersistence; private final UserAuthenticationResolver userAuthenticationResolver; private final Optional initialUserConfiguration; + private final ResourceBootstrapHandlerInterface resourceBootstrapHandler; @VisibleForTesting public UserHandler( final UserPersistence userPersistence, final PermissionPersistence permissionPersistence, + final PermissionService permissionService, final OrganizationPersistence organizationPersistence, final PermissionHandler permissionHandler, final WorkspacesHandler workspacesHandler, @Named("uuidGenerator") final Supplier uuidGenerator, final UserAuthenticationResolver userAuthenticationResolver, - final Optional initialUserConfiguration) { + final Optional initialUserConfiguration, + final ResourceBootstrapHandlerInterface resourceBootstrapHandler) { this.uuidGenerator = uuidGenerator; this.userPersistence = userPersistence; this.organizationPersistence = organizationPersistence; this.permissionPersistence = permissionPersistence; + this.permissionService = permissionService; this.workspacesHandler = workspacesHandler; this.permissionHandler = permissionHandler; this.userAuthenticationResolver = userAuthenticationResolver; this.initialUserConfiguration = initialUserConfiguration; + this.resourceBootstrapHandler = resourceBootstrapHandler; } /** @@ -221,18 +232,13 @@ public UserRead updateUser(final UserUpdate userUpdate) throws ConfigNotFoundExc final User user = buildUser(userRead); - // We do not allow update on these fields: userId, authUserId, authProvider. + // We do not allow update on these fields: userId, authUserId, authProvider, and email boolean hasUpdate = false; if (userUpdate.getName() != null) { user.setName(userUpdate.getName()); hasUpdate = true; } - if (userUpdate.getEmail() != null) { - user.setEmail(userUpdate.getEmail()); - hasUpdate = true; - } - if (userUpdate.getCompanyName() != null) { user.setCompanyName(userUpdate.getCompanyName()); hasUpdate = true; @@ -299,7 +305,6 @@ private void deleteUser(final UserRead userRead) throws ConfigNotFoundException, .authProvider(userRead.getAuthProvider()) .status(UserStatus.DISABLED) .companyName(userRead.getCompanyName()) - .email(userRead.getEmail()) .news(userRead.getNews()); updateUser(userUpdate); } @@ -381,13 +386,16 @@ private UserRead createUserFromIncomingUser(final User incomingUser, final UserA return createUser(userCreate); } - private void handleUserPermissionsAndWorkspace(final UserRead createdUser) throws IOException, JsonValidationException, ConfigNotFoundException { + private void handleUserPermissionsAndWorkspace(final UserRead createdUser) + throws IOException, JsonValidationException, ConfigNotFoundException { createInstanceAdminPermissionIfInitialUser(createdUser); final Optional ssoOrg = getSsoOrganizationIfExists(); if (ssoOrg.isPresent()) { + // SSO users will have some additional logic but will ultimately call createDefaultWorkspaceForUser handleSsoUser(createdUser, ssoOrg.get()); } else { - handleNonSsoUser(createdUser); + // non-SSO users will just create a default workspace + createDefaultWorkspaceForUser(createdUser, Optional.empty()); } } @@ -415,28 +423,42 @@ private void handleSsoUser(final UserRead user, final Organization organization) new ListWorkspacesInOrganizationRequestBody().organizationId(organization.getOrganizationId())); if (orgWorkspaces.getWorkspaces().isEmpty()) { - final WorkspaceRead defaultWorkspace = createDefaultWorkspaceForUser(user, Optional.of(organization)); - createPermissionForUserAndWorkspace(user.getUserId(), defaultWorkspace.getWorkspaceId(), PermissionType.WORKSPACE_ADMIN); + // Now calls bootstrap which includes all permissions and updates userRead. + createDefaultWorkspaceForUser(user, Optional.of(organization)); } } - private void handleNonSsoUser(final UserRead user) throws JsonValidationException, ConfigNotFoundException, IOException { - final WorkspaceRead defaultWorkspace = createDefaultWorkspaceForUser(user, Optional.empty()); - createPermissionForUserAndWorkspace(user.getUserId(), defaultWorkspace.getWorkspaceId(), PermissionType.WORKSPACE_ADMIN); - } - - private WorkspaceRead createDefaultWorkspaceForUser(final UserRead createdUser, final Optional organization) + protected void createDefaultWorkspaceForUser(final UserRead user, final Optional organization) throws JsonValidationException, IOException, ConfigNotFoundException { - final WorkspaceRead defaultWorkspace = workspacesHandler.createDefaultWorkspaceForUser(createdUser, organization); + // Only do this if the user doesn't already have a default workspace. + if (user.getDefaultWorkspaceId() != null) { + return; + } + + // Logic stolen from workspaceHandler.createDefaultWorkspaceForUser + final String companyName = user.getCompanyName(); + final String email = user.getEmail(); + final Boolean news = user.getNews(); + // otherwise, create a default workspace for this user + final WorkspaceCreateWithId workspaceCreate = new WorkspaceCreateWithId() + .name(WorkspaceHelpersKt.getDefaultWorkspaceName(organization, companyName, email)) + .organizationId(organization.map(Organization::getOrganizationId).orElse(null)) + .email(email) + .news(news) + .anonymousDataCollection(false) + .securityUpdates(false) + .displaySetupWizard(true) + .id(uuidGenerator.get()); + + final WorkspaceRead defaultWorkspace = resourceBootstrapHandler.bootStrapWorkspaceForCurrentUser(workspaceCreate); // set default workspace id in User table final UserUpdate userUpdateDefaultWorkspace = new UserUpdate() - .userId(createdUser.getUserId()) + .userId(user.getUserId()) .defaultWorkspaceId(defaultWorkspace.getWorkspaceId()); updateUser(userUpdateDefaultWorkspace); - return defaultWorkspace; } private Optional getSsoOrganizationIfExists() throws IOException { @@ -460,7 +482,7 @@ private void createPermissionForUserAndWorkspace(final UUID userId, final UUID w .permissionType(permissionType)); } - private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUser) throws IOException, JsonValidationException { + private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUser) { if (initialUserConfiguration.isEmpty()) { // do nothing if initial_user bean is not present. return; @@ -482,12 +504,16 @@ private void createInstanceAdminPermissionIfInitialUser(final UserRead createdUs LOGGER.info("creating instance_admin permission for user ID {} because their email matches this instance's configured initial_user", createdUser.getUserId()); - permissionPersistence.writePermission(new Permission() - .withPermissionId(uuidGenerator.get()) - .withUserId(createdUser.getUserId()) - .withPermissionType(Permission.PermissionType.INSTANCE_ADMIN) - .withOrganizationId(null) - .withWorkspaceId(null)); + try { + permissionService.createPermission(new Permission() + .withPermissionId(uuidGenerator.get()) + .withUserId(createdUser.getUserId()) + .withPermissionType(Permission.PermissionType.INSTANCE_ADMIN) + .withOrganizationId(null) + .withWorkspaceId(null)); + } catch (final PermissionRedundantException e) { + throw new ConflictException(e.getMessage(), e); + } } private WorkspaceUserReadList buildWorkspaceUserReadList(final List userPermissions, final UUID workspaceId) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java index 90d479bb149..d9e24e9db0f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java @@ -63,6 +63,11 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; +import io.airbyte.featureflag.Connection; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.Multi; +import io.airbyte.featureflag.UseClear; +import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.models.JobStatusSummary; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.validation.json.JsonValidationException; @@ -101,6 +106,7 @@ public class WebBackendConnectionsHandler { @Deprecated private final ConfigRepository configRepositoryDoNotUse; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final FeatureFlagClient featureFlagClient; public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, final StateHandler stateHandler, @@ -111,7 +117,8 @@ public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, final OperationsHandler operationsHandler, final EventRunner eventRunner, final ConfigRepository configRepositoryDoNotUse, - final ActorDefinitionVersionHelper actorDefinitionVersionHelper) { + final ActorDefinitionVersionHelper actorDefinitionVersionHelper, + final FeatureFlagClient featureFlagClient) { this.connectionsHandler = connectionsHandler; this.stateHandler = stateHandler; this.sourceHandler = sourceHandler; @@ -122,6 +129,7 @@ public WebBackendConnectionsHandler(final ConnectionsHandler connectionsHandler, this.eventRunner = eventRunner; this.configRepositoryDoNotUse = configRepositoryDoNotUse; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; + this.featureFlagClient = featureFlagClient; } public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) throws IOException { @@ -553,6 +561,7 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne final UUID connectionId = webBackendConnectionPatch.getConnectionId(); final ConnectionRead originalConnectionRead = connectionsHandler.getConnection(connectionId); boolean breakingChange = originalConnectionRead.getBreakingChange() != null && originalConnectionRead.getBreakingChange(); + boolean shouldRunSyncAfterClear = false; // If there have been changes to the sync catalog, check whether these changes result in or fix a // broken connection @@ -577,6 +586,10 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne connectionsHandler.getDiff(newAirbyteCatalog, CatalogConverter.toApi(mostRecentAirbyteCatalog, sourceVersion), CatalogConverter.toConfiguredProtocol(newAirbyteCatalog)); breakingChange = containsBreakingChange(catalogDiff); + + shouldRunSyncAfterClear = !featureFlagClient.boolVariation(UseClear.INSTANCE, new Multi(List.of( + new Connection(connectionId), + new Workspace(source.getWorkspaceId())))); } } @@ -595,7 +608,7 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne final ConnectionRead updatedConnectionRead = connectionsHandler.updateConnection(connectionPatch); // detect if any streams need to be reset based on the patch and initial catalog, if so, reset them - resetStreamsIfNeeded(webBackendConnectionPatch, oldConfiguredCatalog, updatedConnectionRead, originalConnectionRead); + resetStreamsIfNeeded(webBackendConnectionPatch, oldConfiguredCatalog, updatedConnectionRead, originalConnectionRead, shouldRunSyncAfterClear); /* * This catalog represents the full catalog that was used to create the configured catalog. It will * have all streams that were present at the time. It will have no configuration set. @@ -621,7 +634,8 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendConnectionPatch, final ConfiguredAirbyteCatalog oldConfiguredCatalog, final ConnectionRead updatedConnectionRead, - final ConnectionRead oldConnectionRead) + final ConnectionRead oldConnectionRead, + final boolean shouldRunSyncAfterClear) throws IOException, JsonValidationException, ConfigNotFoundException { final UUID connectionId = webBackendConnectionPatch.getConnectionId(); @@ -650,7 +664,8 @@ private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendCon } eventRunner.resetConnection( connectionId, - streamsToReset, true); + streamsToReset, + shouldRunSyncAfterClear); } } } @@ -798,7 +813,8 @@ private record Stream(String name, String namespace) { } - private boolean containsBreakingChange(final CatalogDiff diff) { + @VisibleForTesting + protected boolean containsBreakingChange(final CatalogDiff diff) { for (final StreamTransform streamTransform : diff.getTransforms()) { if (streamTransform.getTransformType() != TransformTypeEnum.UPDATE_STREAM) { continue; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java index da13b8adac6..1ca54aceb7b 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java @@ -4,6 +4,8 @@ package io.airbyte.commons.server.handlers; +import static io.airbyte.config.persistence.ConfigNotFoundException.NO_ORGANIZATION_FOR_WORKSPACE; + import com.github.slugify.Slugify; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -12,7 +14,6 @@ import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.Geography; import io.airbyte.api.model.generated.ListResourcesForWorkspacesRequestBody; import io.airbyte.api.model.generated.ListWorkspacesByUserRequestBody; import io.airbyte.api.model.generated.ListWorkspacesInOrganizationRequestBody; @@ -36,9 +37,12 @@ import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.converters.NotificationConverter; import io.airbyte.commons.server.converters.NotificationSettingsConverter; +import io.airbyte.commons.server.converters.WorkspaceConverter; import io.airbyte.commons.server.converters.WorkspaceWebhookConfigsConverter; +import io.airbyte.commons.server.errors.BadObjectSchemaKnownException; import io.airbyte.commons.server.errors.InternalServerKnownException; import io.airbyte.commons.server.errors.ValueConflictKnownException; +import io.airbyte.commons.server.handlers.helpers.WorkspaceHelpersKt; import io.airbyte.config.Organization; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.persistence.ConfigNotFoundException; @@ -112,29 +116,6 @@ public WorkspacesHandler(final ConfigRepository configRepository, this.trackingClient = trackingClient; } - private static WorkspaceRead buildWorkspaceRead(final StandardWorkspace workspace) { - final WorkspaceRead result = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(workspace.getEmail()) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .news(workspace.getNews()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) - .defaultGeography(Enums.convertTo(workspace.getDefaultGeography(), Geography.class)) - .organizationId(workspace.getOrganizationId()); - // Add read-only webhook configs. - if (workspace.getWebhookOperationConfigs() != null) { - result.setWebhookConfigs(WorkspaceWebhookConfigsConverter.toApiReads(workspace.getWebhookOperationConfigs())); - } - return result; - } - public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) throws JsonValidationException, IOException, ValueConflictKnownException, ConfigNotFoundException { @@ -158,6 +139,13 @@ public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) public WorkspaceRead createWorkspaceIfNotExist(final WorkspaceCreateWithId workspaceCreateWithId) throws JsonValidationException, IOException, ValueConflictKnownException, ConfigNotFoundException { + // We expect that the caller is specifying the workspace ID. + // Since this code is currently only called by OSS, it's enforced in the public API and the UI + // currently. + if (workspaceCreateWithId.getOrganizationId() == null) { + throw new BadObjectSchemaKnownException("Workspace missing org ID."); + } + final String email = workspaceCreateWithId.getEmail(); final Boolean anonymousDataCollection = workspaceCreateWithId.getAnonymousDataCollection(); final Boolean news = workspaceCreateWithId.getNews(); @@ -208,7 +196,7 @@ public WorkspaceRead createDefaultWorkspaceForUser(final UserRead user, final Op final Boolean news = user.getNews(); // otherwise, create a default workspace for this user final WorkspaceCreate workspaceCreate = new WorkspaceCreate() - .name(getDefaultWorkspaceName(organization, companyName, email)) + .name(WorkspaceHelpersKt.getDefaultWorkspaceName(organization, companyName, email)) .organizationId(organization.map(Organization::getOrganizationId).orElse(null)) .email(email) .news(news) @@ -218,24 +206,6 @@ public WorkspaceRead createDefaultWorkspaceForUser(final UserRead user, final Op return createWorkspace(workspaceCreate); } - private String getDefaultWorkspaceName(final Optional organization, final String companyName, final String email) { - String defaultWorkspaceName = ""; - if (organization.isPresent()) { - // use organization name as default workspace name - defaultWorkspaceName = organization.get().getName().trim(); - } - // if organization name is not available or empty, use user's company name (note: this is an - // optional field) - if (defaultWorkspaceName.isEmpty() && companyName != null) { - defaultWorkspaceName = companyName.trim(); - } - // if company name is still empty, use user's email (note: this is a required field) - if (defaultWorkspaceName.isEmpty()) { - defaultWorkspaceName = email; - } - return defaultWorkspaceName; - } - public void deleteWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) throws JsonValidationException, IOException, ConfigNotFoundException { // get existing implementation @@ -301,7 +271,7 @@ private NotificationSettings patchNotificationSettingsWithDefaultValue(final Wor public WorkspaceReadList listWorkspaces() throws JsonValidationException, IOException { final List reads = configRepository.listStandardWorkspaces(false).stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); return new WorkspaceReadList().workspaces(reads); } @@ -316,7 +286,7 @@ public WorkspaceReadList listAllWorkspacesPaginated(final ListResourcesForWorksp listResourcesForWorkspacesRequestBody.getPagination().getRowOffset(), listResourcesForWorkspacesRequestBody.getNameContains())) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); return new WorkspaceReadList().workspaces(reads); } @@ -331,7 +301,7 @@ public WorkspaceReadList listWorkspacesPaginated(final ListResourcesForWorkspace final List reads = standardWorkspaces .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); return new WorkspaceReadList().workspaces(reads); } @@ -341,7 +311,7 @@ public WorkspaceRead getWorkspace(final WorkspaceIdRequestBody workspaceIdReques final UUID workspaceId = workspaceIdRequestBody.getWorkspaceId(); final boolean includeTombstone = workspaceIdRequestBody.getIncludeTombstone() != null ? workspaceIdRequestBody.getIncludeTombstone() : false; final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, includeTombstone); - return buildWorkspaceRead(workspace); + return WorkspaceConverter.domainToApiModel(workspace); } public WorkspaceOrganizationInfoRead getWorkspaceOrganizationInfo(final WorkspaceIdRequestBody workspaceIdRequestBody) @@ -349,7 +319,7 @@ public WorkspaceOrganizationInfoRead getWorkspaceOrganizationInfo(final Workspac final UUID workspaceId = workspaceIdRequestBody.getWorkspaceId(); final Optional organization = organizationPersistence.getOrganizationByWorkspaceId(workspaceId); if (organization.isEmpty()) { - throw new ConfigNotFoundException("ORGANIZATION_FOR_WORKSPACE", workspaceId.toString()); + throw new ConfigNotFoundException(NO_ORGANIZATION_FOR_WORKSPACE, workspaceId.toString()); } return buildWorkspaceOrganizationInfoRead(organization.get()); } @@ -358,12 +328,14 @@ public WorkspaceOrganizationInfoRead getWorkspaceOrganizationInfo(final Workspac public WorkspaceRead getWorkspaceBySlug(final SlugRequestBody slugRequestBody) throws IOException, ConfigNotFoundException { // for now we assume there is one workspace and it has a default uuid. final StandardWorkspace workspace = configRepository.getWorkspaceBySlug(slugRequestBody.getSlug(), false); - return buildWorkspaceRead(workspace); + return WorkspaceConverter.domainToApiModel(workspace); } - public WorkspaceRead getWorkspaceByConnectionId(final ConnectionIdRequestBody connectionIdRequestBody) throws ConfigNotFoundException { - final StandardWorkspace workspace = configRepository.getStandardWorkspaceFromConnection(connectionIdRequestBody.getConnectionId(), false); - return buildWorkspaceRead(workspace); + public WorkspaceRead getWorkspaceByConnectionId(final ConnectionIdRequestBody connectionIdRequestBody, boolean includeTombstone) + throws ConfigNotFoundException { + final StandardWorkspace workspace = + configRepository.getStandardWorkspaceFromConnection(connectionIdRequestBody.getConnectionId(), includeTombstone); + return WorkspaceConverter.domainToApiModel(workspace); } public WorkspaceReadList listWorkspacesInOrganization(final ListWorkspacesInOrganizationRequestBody request) throws IOException { @@ -376,13 +348,13 @@ public WorkspaceReadList listWorkspacesInOrganization(final ListWorkspacesInOrga false, request.getPagination().getPageSize(), request.getPagination().getRowOffset()), nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } else { standardWorkspaces = workspacePersistence .listWorkspacesByOrganizationId(request.getOrganizationId(), false, nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } return new WorkspaceReadList().workspaces(standardWorkspaces); @@ -397,13 +369,13 @@ private WorkspaceReadList listWorkspacesByInstanceAdminUser(final ListWorkspaces false, request.getPagination().getPageSize(), request.getPagination().getRowOffset(), nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } else { standardWorkspaces = workspacePersistence .listWorkspacesByInstanceAdminUser(false, nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } return new WorkspaceReadList().workspaces(standardWorkspaces); @@ -425,13 +397,13 @@ public WorkspaceReadList listWorkspacesByUser(final ListWorkspacesByUserRequestB false, request.getPagination().getPageSize(), request.getPagination().getRowOffset()), nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } else { standardWorkspaces = workspacePersistence .listActiveWorkspacesByUserId(request.getUserId(), nameContains) .stream() - .map(WorkspacesHandler::buildWorkspaceRead) + .map(WorkspaceConverter::domainToApiModel) .collect(Collectors.toList()); } return new WorkspaceReadList().workspaces(standardWorkspaces); @@ -448,7 +420,7 @@ public WorkspaceRead updateWorkspace(final WorkspaceUpdate workspacePatch) throw validateWorkspacePatch(workspace, workspacePatch); - LOGGER.debug("Initial WorkspaceRead: {}", buildWorkspaceRead(workspace)); + LOGGER.debug("Initial WorkspaceRead: {}", WorkspaceConverter.domainToApiModel(workspace)); applyPatchToStandardWorkspace(workspace, workspacePatch); @@ -509,7 +481,7 @@ public void setFeedbackDone(final WorkspaceGiveFeedback workspaceGiveFeedback) private WorkspaceRead buildWorkspaceReadFromId(final UUID workspaceId) throws ConfigNotFoundException, IOException, JsonValidationException { final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, false); - return buildWorkspaceRead(workspace); + return WorkspaceConverter.domainToApiModel(workspace); } private WorkspaceOrganizationInfoRead buildWorkspaceOrganizationInfoRead(final Organization organization) { @@ -588,7 +560,7 @@ private WorkspaceRead persistStandardWorkspace(final StandardWorkspace workspace } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { throw new ConfigNotFoundException(e.getType(), e.getConfigId()); } - return buildWorkspaceRead(workspace); + return WorkspaceConverter.domainToApiModel(workspace); } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java new file mode 100644 index 00000000000..824a8d02c3c --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/BuilderProjectUpdater.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; + +public interface BuilderProjectUpdater { + + void persistBuilderProjectUpdate(final ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException; + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java index 62df19e7de4..c57c5a43548 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java @@ -73,7 +73,10 @@ public static io.airbyte.api.model.generated.AirbyteCatalog toApi(final Configur .primaryKey(configuredStream.getPrimaryKey()) .aliasName(Names.toAlphanumericAndUnderscore(configuredStream.getStream().getName())) .selected(true) - .fieldSelectionEnabled(getStreamHasFieldSelectionEnabled(fieldSelectionData, streamDescriptor)); + .fieldSelectionEnabled(getStreamHasFieldSelectionEnabled(fieldSelectionData, streamDescriptor)) + .generationId(configuredStream.getGenerationId()) + .minimumGenerationId(configuredStream.getMinimumGenerationId()) + .syncId(configuredStream.getSyncId()); if (configuration.getFieldSelectionEnabled()) { final List selectedColumns = new ArrayList<>(); // TODO(mfsiega-airbyte): support nested fields here. diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java new file mode 100644 index 00000000000..caf2e9a3249 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdater.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; +import java.util.List; + +public class CompositeBuilderProjectUpdater implements BuilderProjectUpdater { + /* + * Update multiple builder project updaters sequentially. The update method is intentionally not + * atomic as this is an experimental features. We don't want a problematic updater to prevent others + * from succeeding. This means it is possible for them to get out of sync. + */ + + private final List updaters; + + public CompositeBuilderProjectUpdater(final List updaters) { + this.updaters = updaters; + } + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException { + for (BuilderProjectUpdater updater : updaters) { + updater.persistBuilderProjectUpdate(projectUpdate); + } + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java new file mode 100644 index 00000000000..2225a2316e8 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdater.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.ConnectorBuilderProject; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import java.io.IOException; + +public class ConfigRepositoryBuilderProjectUpdater implements BuilderProjectUpdater { + + private final ConfigRepository configRepository; + + public ConfigRepositoryBuilderProjectUpdater(final ConfigRepository configRepository) { + + this.configRepository = configRepository; + } + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws ConfigNotFoundException, IOException { + final ConnectorBuilderProject connectorBuilderProject = configRepository.getConnectorBuilderProject(projectUpdate.getBuilderProjectId(), false); + + if (connectorBuilderProject.getActorDefinitionId() != null) { + configRepository.updateBuilderProjectAndActorDefinition(projectUpdate.getBuilderProjectId(), + projectUpdate.getWorkspaceId(), + projectUpdate.getBuilderProject().getName(), + projectUpdate.getBuilderProject().getDraftManifest(), + connectorBuilderProject.getActorDefinitionId()); + } else { + configRepository.writeBuilderProjectDraft(projectUpdate.getBuilderProjectId(), + projectUpdate.getWorkspaceId(), + projectUpdate.getBuilderProject().getName(), + projectUpdate.getBuilderProject().getDraftManifest()); + } + + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java index b6e28e9379c..16815844a55 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java @@ -4,12 +4,15 @@ package io.airbyte.commons.server.handlers.helpers; +import static io.airbyte.config.JobConfig.ConfigType.REFRESH; import static io.airbyte.config.JobConfig.ConfigType.SYNC; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.FAILURE_ORIGINS_KEY; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.FAILURE_TYPES_KEY; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import io.airbyte.api.model.generated.JobFailureRequest; +import io.airbyte.api.model.generated.JobSuccessWithAttemptNumberRequest; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.server.JobStatus; import io.airbyte.config.ActorDefinitionVersion; @@ -65,7 +68,7 @@ public class JobCreationAndStatusUpdateHelper { ReleaseStage.BETA, 3, ReleaseStage.GENERALLY_AVAILABLE, 4); private static final Comparator RELEASE_STAGE_COMPARATOR = Comparator.comparingInt(RELEASE_STAGE_ORDER::get); - public static final Set SYNC_CONFIG_SET = Set.of(SYNC); + public static final Set SYNC_CONFIG_SET = Set.of(SYNC, REFRESH); private final JobPersistence jobPersistence; private final ConfigRepository configRepository; @@ -154,7 +157,7 @@ public void failNonTerminalJobs(final UUID connectionId) throws IOException { attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } final Job failedJob = jobPersistence.getJob(jobId); - jobNotifier.failJob("Failing job in order to start from clean job state for new temporal workflow run.", failedJob, attemptStats); + jobNotifier.failJob(failedJob, attemptStats); trackCompletion(failedJob, JobStatus.FAILED); } } @@ -251,6 +254,8 @@ public List getJobToReleaseStages(final Job job) throws IOExceptio final List actorDefVersionIds = switch (job.getConfig().getConfigType()) { case SYNC -> List.of(job.getConfig().getSync().getDestinationDefinitionVersionId(), job.getConfig().getSync().getSourceDefinitionVersionId()); case RESET_CONNECTION -> List.of(job.getConfig().getResetConnection().getDestinationDefinitionVersionId()); + case REFRESH -> List.of(job.getConfig().getRefresh().getSourceDefinitionVersionId(), + job.getConfig().getRefresh().getDestinationDefinitionVersionId()); default -> throw new IllegalArgumentException("Unexpected config type: " + job.getConfigType()); }; @@ -258,6 +263,45 @@ public List getJobToReleaseStages(final Job job) throws IOExceptio } public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job) throws IOException { + emitToReleaseStagesMetricHelper(metric, job, Collections.emptyList()); + } + + public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job, final JobSuccessWithAttemptNumberRequest input) + throws IOException { + List additionalAttributes = new ArrayList<>(); + if (job.getConfigType() == SYNC) { + final var sync = job.getConfig().getSync(); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_ID, sync.getSourceDefinitionVersionId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.CONNECTION_ID, input.getConnectionId().toString())); + } else if (job.getConfigType() == REFRESH) { + final var refresh = job.getConfig().getRefresh(); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_ID, refresh.getSourceDefinitionVersionId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, refresh.getSourceDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, refresh.getDestinationDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, refresh.getWorkspaceId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.CONNECTION_ID, input.getConnectionId().toString())); + } + emitToReleaseStagesMetricHelper(metric, job, additionalAttributes); + } + + public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job, final JobFailureRequest input) throws IOException { + List additionalAttributes = new ArrayList<>(); + if (job.getConfigType() == SYNC) { + final var sync = job.getConfig().getSync(); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_ID, sync.getSourceDefinitionVersionId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); + additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.CONNECTION_ID, input.getConnectionId().toString())); + } + emitToReleaseStagesMetricHelper(metric, job, additionalAttributes); + } + + private void emitToReleaseStagesMetricHelper(final OssMetricsRegistry metric, final Job job, List additionalAttributes) + throws IOException { final var releaseStages = getJobToReleaseStages(job); if (releaseStages.isEmpty()) { return; @@ -265,8 +309,11 @@ public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final for (final ReleaseStage stage : releaseStages) { if (stage != null) { - MetricClientFactory.getMetricClient().count(metric, 1, - new MetricAttribute(MetricTags.RELEASE_STAGE, MetricTags.getReleaseStage(stage))); + List attributes = new ArrayList<>(); + attributes.add(new MetricAttribute(MetricTags.RELEASE_STAGE, MetricTags.getReleaseStage(stage))); + attributes.addAll(additionalAttributes); + + MetricClientFactory.getMetricClient().count(metric, 1, attributes.toArray(new MetricAttribute[0])); } } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java new file mode 100644 index 00000000000..8bf1cb47f77 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/LocalFileSystemBuilderProjectUpdater.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LocalFileSystemBuilderProjectUpdater implements BuilderProjectUpdater { + + private static final Logger LOGGER = LoggerFactory.getLogger(LocalFileSystemBuilderProjectUpdater.class); + + @Override + public void persistBuilderProjectUpdate(ExistingConnectorBuilderProjectWithWorkspaceId projectUpdate) throws IOException { + try { + writeJsonNodeToYamlFile(projectUpdate.getBuilderProject().getYamlManifest(), "/connectors", projectUpdate.getBuilderProject().getName()); + } catch (Exception e) { + /* + * While this flow is only meant to be used for local development, we swallow all exceptions to + * ensure this cannot affect the platform. Users can look through the logs if they suspect this is + * failing + */ + LOGGER.warn("Error writing manifest to local filesystem. Exception: {}. Builder Project: {}", e, projectUpdate.getBuilderProject()); + } + } + + public static void writeJsonNodeToYamlFile(String manifest, String basePath, String projectName) throws IOException { + + // Construct the file path + String filePath = Paths.get(basePath, "source-" + projectName, "source_" + projectName, "manifest.yaml").toString(); + + File file = new File(filePath); + + // Only try writing the file already exists + // This isn't meant to be used for creating new connectors + // We can revisit the flow in the future + if (file.exists()) { + Files.write(Paths.get(filePath), manifest.getBytes()); + } + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java index 50c28472c13..0205d1bc73e 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java @@ -23,4 +23,10 @@ public interface AirbyteTaskExecutors extends TaskExecutors { */ String SCHEDULER = "scheduler"; + /** + * The name of the {@link java.util.concurrent.ExecutorService} used for endpoints that belong to + * the public API. + */ + String PUBLIC_API = "public-api"; + } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java index d45abaf9bd5..ca50a04e5ad 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java @@ -30,11 +30,20 @@ public class AuthNettyServerCustomizer implements BeanCreatedEventListener getRbacRoles(final String authUserId, final HttpRequest request) { - final Map headerMap = request.getHeaders().asMap(String.class, String.class); - + public Collection getRbacRoles(final String authUserId, Map headerMap) { final List workspaceIds = headerResolver.resolveWorkspace(headerMap); final List organizationIds = headerResolver.resolveOrganization(headerMap); final Set targetAuthUserIds = headerResolver.resolveAuthUserIds(headerMap); @@ -68,6 +66,11 @@ public Collection getRbacRoles(final String authUserId, final HttpReques return roles; } + public Collection getRbacRoles(final String authUserId, final HttpRequest request) { + final Map headerMap = request.getHeaders().asMap(String.class, String.class); + return getRbacRoles(authUserId, headerMap); + } + public static Set getInstanceAdminRoles() { final Set roles = new HashSet<>(); roles.addAll(AuthRole.buildAuthRolesSet(AuthRole.ADMIN)); diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/Constants.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/Constants.kt new file mode 100644 index 00000000000..e174525f4c3 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/Constants.kt @@ -0,0 +1,3 @@ +package io.airbyte.commons.server + +const val API_DOC_URL = "https://reference.airbyte.com" diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/authorization/AirbyteApiAuthorizationHelper.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt similarity index 72% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/authorization/AirbyteApiAuthorizationHelper.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt index 2b363caa7a6..d8ea98443db 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/authorization/AirbyteApiAuthorizationHelper.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt @@ -1,18 +1,21 @@ -package io.airbyte.server.apis.publicapi.authorization +package io.airbyte.commons.server.authorization import io.airbyte.api.model.generated.PermissionCheckRead import io.airbyte.api.model.generated.PermissionType import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest +import io.airbyte.commons.auth.AuthRoleInterface import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.errors.problems.ForbiddenProblem import io.airbyte.commons.server.handlers.PermissionHandler import io.airbyte.commons.server.support.AuthenticationHeaderResolver import io.airbyte.commons.server.support.AuthenticationHttpHeaders.CONNECTION_ID_HEADER import io.airbyte.commons.server.support.AuthenticationHttpHeaders.DESTINATION_ID_HEADER import io.airbyte.commons.server.support.AuthenticationHttpHeaders.JOB_ID_HEADER +import io.airbyte.commons.server.support.AuthenticationHttpHeaders.ORGANIZATION_ID_HEADER import io.airbyte.commons.server.support.AuthenticationHttpHeaders.SOURCE_ID_HEADER import io.airbyte.commons.server.support.AuthenticationHttpHeaders.WORKSPACE_IDS_HEADER import io.airbyte.commons.server.support.CurrentUserService -import io.airbyte.server.apis.publicapi.problems.ForbiddenProblem +import io.airbyte.commons.server.support.RbacRoleHelper import io.github.oshai.kotlinlogging.KotlinLogging import jakarta.inject.Singleton import java.util.UUID @@ -20,14 +23,15 @@ import java.util.UUID private val logger = KotlinLogging.logger {} /** - * Authorization helper for the Airbyte API. Responsible for checking whether or not a user has access to the requested resources and should be called - * for any Airbyte API endpoint that requires authorization and doesn't go through the CloudAuthenticationProvider. + * Authorization helper. Responsible for checking whether a user has access to the requested resources and should be called + * for any API endpoint that requires authorization and doesn't go through the CloudAuthenticationProvider. */ @Singleton -class AirbyteApiAuthorizationHelper( +class ApiAuthorizationHelper( private val authorizationHeaderResolver: AuthenticationHeaderResolver, private val permissionHandler: PermissionHandler, private val currentUserService: CurrentUserService, + private val rbacRoleHelper: RbacRoleHelper, ) { private fun resolveIdsToWorkspaceIds( ids: List, @@ -53,6 +57,9 @@ class AirbyteApiAuthorizationHelper( Scope.JOB -> { buildPropertiesMapForJob(ids.first()) } + Scope.ORGANIZATION -> { + throw ForbiddenProblem("Cannot resolve organization Ids to workspace Ids.") + } } return authorizationHeaderResolver.resolveWorkspace(properties) } @@ -133,14 +140,18 @@ class AirbyteApiAuthorizationHelper( permissionTypes: Set, ) { logger.debug { "Checking workspace permissions for $ids in scope [${scope.name}]." } - if (ids.isEmpty() && scope != Scope.WORKSPACES) { - throw ForbiddenProblem("No Ids provided for scope: ${scope.name}.") - } + + // Workspace IDs are optional for WORKSPACES scope because the controller endpoint should infer in that case. if (ids.isEmpty() && scope == Scope.WORKSPACES) { logger.debug { "Empty workspaceIds, controller endpoint will pull all permissioned workspaces." } return } + // Disallow empty ids for other scopes + if (ids.isEmpty()) { + throw ForbiddenProblem("No Ids provided for scope: ${scope.name}.") + } + if (permissionHandler.isUserInstanceAdmin(userId)) { logger.debug { "User $userId is an instance admin, short circuiting auth check." } return @@ -178,6 +189,60 @@ class AirbyteApiAuthorizationHelper( return false } + /** + * Ensures the user has the required roles to access a given resource. + * + * Should use ApiAuthorizationHelper#buildRequiredRolesSet to build the required roles set. + */ + fun ensureUserHasAnyRequiredRoleOrThrow( + scope: Scope, + resourceIds: List, + requiredRoles: Set, + ) { + val authUserId = currentUserService.currentUser.authUserId + val idHeaderMap = buildIdHeaderMap(resourceIds, scope) + val userRoles = rbacRoleHelper.getRbacRoles(authUserId, idHeaderMap).toSet() + if (userRoles.intersect(requiredRoles.map(AuthRoleInterface::getLabel).toSet()).isEmpty()) { + throw ForbiddenProblem("User does not have any of the required roles to access resource(s) $resourceIds of type [${scope.name}].") + } + } + + /** + * Just resolves Ids to either a workspace or organization per the scope. + */ + private fun buildIdHeaderMap( + ids: List, + scope: Scope, + ): Map { + return when (scope) { + Scope.WORKSPACE -> { + buildPropertiesMapForWorkspaces(ids) + } + Scope.WORKSPACES -> { + buildPropertiesMapForWorkspaces(ids) + } + Scope.SOURCE -> { + buildPropertiesMapForSource(ids.first()) + } + Scope.DESTINATION -> { + buildPropertiesMapForDestination(ids.first()) + } + Scope.CONNECTION -> { + buildPropertiesMapForConnection(ids.first()) + } + Scope.JOB -> { + buildPropertiesMapForJob(ids.first()) + } + Scope.ORGANIZATION -> { + buildPropertiesMapForOrganization(ids.first()) + } + } + } + + private fun buildPropertiesMapForOrganization(id: String): Map { + return mapOf(Scope.ORGANIZATION.mappedHeaderProperty to id) + } + private fun buildPropertiesMapForConnection(id: String): Map { return mapOf(Scope.CONNECTION.mappedHeaderProperty to id) } @@ -206,4 +271,5 @@ enum class Scope(val mappedHeaderProperty: String) { SOURCE(SOURCE_ID_HEADER), DESTINATION(DESTINATION_ID_HEADER), JOB(JOB_ID_HEADER), + ORGANIZATION(ORGANIZATION_ID_HEADER), } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/AbstractThrowableProblemHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/AbstractThrowableProblemHandler.kt similarity index 85% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/AbstractThrowableProblemHandler.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/AbstractThrowableProblemHandler.kt index 0706afc3a14..d3bbd9e55d1 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/AbstractThrowableProblemHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/AbstractThrowableProblemHandler.kt @@ -1,10 +1,6 @@ -/* -* Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. -*/ -package io.airbyte.server.apis.publicapi.errorHandlers +package io.airbyte.commons.server.errors.problems import io.airbyte.commons.json.Jsons -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem import io.micronaut.context.annotation.Requires import io.micronaut.http.HttpRequest import io.micronaut.http.HttpResponse diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/BadRequestProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/BadRequestProblem.kt similarity index 64% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/BadRequestProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/BadRequestProblem.kt index ceb440b8d5e..14d8cf85117 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/BadRequestProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/BadRequestProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConflictProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConflictProblem.kt new file mode 100644 index 00000000000..f684cd42a9b --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConflictProblem.kt @@ -0,0 +1,23 @@ +package io.airbyte.commons.server.errors.problems + +import io.airbyte.commons.server.API_DOC_URL +import io.micronaut.http.HttpStatus +import java.io.Serial +import java.net.URI + +/** + * Thrown when a user attempts to start a sync run while one is already running. + */ +class ConflictProblem(message: String?) : AbstractThrowableProblem( + TYPE, + TITLE, + HttpStatus.CONFLICT, + message, +) { + companion object { + @Serial + private val serialVersionUID = 1L + private val TYPE = URI.create("$API_DOC_URL/reference/errors#409-state-conflict") + private const val TITLE = "state-conflict" + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt new file mode 100644 index 00000000000..f30b5c663af --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt @@ -0,0 +1,124 @@ +package io.airbyte.commons.server.errors.problems + +import io.airbyte.commons.server.API_DOC_URL +import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum +import io.micronaut.http.HttpStatus +import jakarta.validation.Valid +import java.io.Serial +import java.net.URI + +/** + * Thrown when a configuration for a connection is not valid. + */ +class ConnectionConfigurationProblem private constructor(message: String) : AbstractThrowableProblem( + TYPE, + TITLE, + HttpStatus.BAD_REQUEST, + "The body of the request contains an invalid connection configuration. $message", +) { + companion object { + @Serial + private val serialVersionUID = 1L + private val TYPE = URI.create("$API_DOC_URL/reference/errors") + private const val TITLE = "bad-request" + + fun handleSyncModeProblem( + connectionSyncMode: @Valid ConnectionSyncModeEnum?, + streamName: String, + validSyncModes: Set, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Cannot set sync mode to $connectionSyncMode for stream $streamName. Valid sync modes are: $validSyncModes", + ) + } + + fun invalidStreamName(validStreamNames: Collection): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Invalid stream found. The list of valid streams include: $validStreamNames.", + ) + } + + fun duplicateStream(streamName: String): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem("Duplicate stream found in configuration for: $streamName.") + } + + fun sourceDefinedCursorFieldProblem( + streamName: String, + cursorField: List, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Cursor Field " + cursorField + " is already defined by source for stream: " + streamName + + ". Do not include a cursor field configuration for this stream.", + ) + } + + fun missingCursorField(streamName: String): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "No default cursor field for stream: $streamName. Please include a cursor field configuration for this stream.", + ) + } + + fun invalidCursorField( + streamName: String, + validFields: List?>, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Invalid cursor field for stream: $streamName. The list of valid cursor fields include: $validFields.", + ) + } + + fun missingPrimaryKey(streamName: String): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "No default primary key for stream: $streamName. Please include a primary key configuration for this stream.", + ) + } + + fun primaryKeyAlreadyDefined( + streamName: String, + allowedPrimaryKey: List>, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Primary key for stream: $streamName is already pre-defined. Please remove the primaryKey or provide the value as $allowedPrimaryKey.", + ) + } + + fun invalidPrimaryKey( + streamName: String, + validFields: List?>, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Invalid cursor field for stream: $streamName. The list of valid primary keys fields: $validFields.", + ) + } + + fun duplicatePrimaryKey( + streamName: String, + key: List?>, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "Duplicate primary key detected for stream: $streamName, please don't provide the same column more than once. Key: $key", + ) + } + + fun invalidCronExpressionUnderOneHour(cronExpression: String): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "The cron expression " + cronExpression + + " is not valid or is less than the one hour minimum. The seconds and minutes values cannot be `*`.", + ) + } + + fun invalidCronExpression( + cronExpression: String, + message: String?, + ): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem( + "The cron expression $cronExpression is not valid. Error: $message" + + ". Please check the cron expression format at https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html", + ) + } + + fun missingCronExpression(): ConnectionConfigurationProblem { + return ConnectionConfigurationProblem("Missing cron expression in the schedule.") + } + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ForbiddenProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ForbiddenProblem.kt similarity index 63% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ForbiddenProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ForbiddenProblem.kt index a779fa3e1cc..32476d1f957 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ForbiddenProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ForbiddenProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidApiKeyProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidApiKeyProblem.kt similarity index 66% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidApiKeyProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidApiKeyProblem.kt index d30d46f9fd4..3d92cc098bc 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidApiKeyProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidApiKeyProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidRedirectUrlProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidRedirectUrlProblem.kt similarity index 68% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidRedirectUrlProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidRedirectUrlProblem.kt index b6a25a29250..0e89d58091a 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/InvalidRedirectUrlProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/InvalidRedirectUrlProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/NoUserFoundProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/NoUserFoundProblem.kt similarity index 71% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/NoUserFoundProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/NoUserFoundProblem.kt index e3fd51f846a..32fec780242 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/NoUserFoundProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/NoUserFoundProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/OAuthCallbackFailureProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/OAuthCallbackFailureProblem.kt new file mode 100644 index 00000000000..3e5887a8da6 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/OAuthCallbackFailureProblem.kt @@ -0,0 +1,20 @@ +package io.airbyte.commons.server.errors.problems + +import io.airbyte.commons.server.API_DOC_URL +import io.micronaut.http.HttpStatus +import java.io.Serial +import java.net.URI + +class OAuthCallbackFailureProblem(message: String?) : AbstractThrowableProblem( + TYPE, + TITLE, + HttpStatus.INTERNAL_SERVER_ERROR, + String.format("Unexpected problem completing OAuth: %s", message), +) { + companion object { + @Serial + private val serialVersionUID = 1L + private val TYPE = URI.create("$API_DOC_URL/reference/errors#oauth-callback-failure") + private const val TITLE = "oauth-callback-failure" + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ResourceNotFoundProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ResourceNotFoundProblem.kt similarity index 69% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ResourceNotFoundProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ResourceNotFoundProblem.kt index 231dc2d18d7..705ce615424 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ResourceNotFoundProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ResourceNotFoundProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/SyncConflictProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/SyncConflictProblem.kt similarity index 66% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/SyncConflictProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/SyncConflictProblem.kt index c88c16fec7f..9f8b4ee2fef 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/SyncConflictProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/SyncConflictProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnexpectedProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnexpectedProblem.kt similarity index 80% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnexpectedProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnexpectedProblem.kt index 6c055442e96..1a4416d21dd 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnexpectedProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnexpectedProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnknownValueProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnknownValueProblem.kt similarity index 66% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnknownValueProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnknownValueProblem.kt index 6059a34a20c..4d69b3431ce 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnknownValueProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnknownValueProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnprocessableEntityProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnprocessableEntityProblem.kt similarity index 73% rename from airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnprocessableEntityProblem.kt rename to airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnprocessableEntityProblem.kt index 4f887c45fe3..9ef13c03067 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/UnprocessableEntityProblem.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/UnprocessableEntityProblem.kt @@ -1,11 +1,6 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems +package io.airbyte.commons.server.errors.problems -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL +import io.airbyte.commons.server.API_DOC_URL import io.micronaut.http.HttpStatus import java.io.Serial import java.net.URI diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt new file mode 100644 index 00000000000..373638d4587 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt @@ -0,0 +1,171 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.WorkspaceCreateWithId +import io.airbyte.api.model.generated.WorkspaceRead +import io.airbyte.commons.auth.OrganizationAuthRole +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope +import io.airbyte.commons.server.converters.WorkspaceConverter +import io.airbyte.commons.server.errors.ApplicationErrorKnownException +import io.airbyte.commons.server.handlers.helpers.buildStandardWorkspace +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.ConfigSchema +import io.airbyte.config.Organization +import io.airbyte.config.Permission +import io.airbyte.config.Permission.PermissionType +import io.airbyte.config.User +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.data.services.OrganizationService +import io.airbyte.data.services.PermissionRedundantException +import io.airbyte.data.services.PermissionService +import io.airbyte.data.services.WorkspaceService +import jakarta.inject.Named +import jakarta.inject.Singleton +import org.slf4j.LoggerFactory +import java.util.UUID +import java.util.function.Supplier + +val DEFAULT_WORKSPACE_PERMISSION_TYPE = PermissionType.WORKSPACE_ADMIN +val DEFAULT_ORGANIZATION_PERMISSION_TYPE = PermissionType.ORGANIZATION_ADMIN + +@Singleton +open class ResourceBootstrapHandler( + @Named("uuidGenerator") private val uuidSupplier: Supplier, + private val workspaceService: WorkspaceService, + private val organizationService: OrganizationService, + private val permissionService: PermissionService, + private val currentUserService: CurrentUserService, + private val apiAuthorizationHelper: ApiAuthorizationHelper, +) : ResourceBootstrapHandlerInterface { + companion object { + val LOGGER = LoggerFactory.getLogger(ResourceBootstrapHandler::class.java) + } + + /** + * This is for bootstrapping a workspace and all the necessary links (organization) and permissions (workspace & organization). + */ + override fun bootStrapWorkspaceForCurrentUser(workspaceCreateWithId: WorkspaceCreateWithId): WorkspaceRead { + val user = currentUserService.getCurrentUser() + // The organization to use to set up the new workspace + val organization = + when (val organizationId = workspaceCreateWithId.organizationId) { + null -> findOrCreateOrganizationAndPermission(user) + else -> + organizationService.getOrganization(organizationId).orElseThrow { + ConfigNotFoundException( + ConfigSchema.ORGANIZATION, + "Attempted to bootstrap workspace but couldn't find existing organization $organizationId", + ) + } + } + + // Ensure user has the required permissions to create a workspace + val allowedRoles = setOf(OrganizationAuthRole.ORGANIZATION_ADMIN, OrganizationAuthRole.ORGANIZATION_EDITOR) + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organization.organizationId.toString()), allowedRoles) + + val standardWorkspace = buildStandardWorkspace(workspaceCreateWithId, organization, uuidSupplier) + workspaceService.writeWorkspaceWithSecrets(standardWorkspace) + + val workspacePermission = buildDefaultWorkspacePermission(user.userId, standardWorkspace.workspaceId) + + kotlin.runCatching { permissionService.createPermission(workspacePermission) }.onFailure { e -> + when (e) { + is PermissionRedundantException -> + LOGGER.info( + "Skipped redundant workspace permission creation for workspace ${standardWorkspace.workspaceId}", + ) + else -> throw e + } + } + + return WorkspaceConverter.domainToApiModel(standardWorkspace) + } + + public fun findOrCreateOrganizationAndPermission(user: User): Organization { + findExistingOrganization(user)?.let { return it } + + val organization = + Organization().apply { + this.organizationId = uuidSupplier.get() + this.userId = user.userId + this.name = getDefaultOrganizationName(user) + this.email = user.email + this.orgLevelBilling = false + this.pba = false + } + organizationService.writeOrganization(organization) + + val organizationPermission = buildDefaultOrganizationPermission(user.userId, organization.organizationId) + permissionService.createPermission(organizationPermission) + return organization + } + + /** + * Tries to find an existing organization for the user. Permission checks will happen elsewhere. + */ + open fun findExistingOrganization(user: User): Organization? { + val organizationPermissionList = permissionService.getPermissionsForUser(user.userId).filter { it.organizationId != null } + + val hasSingleOrganization = organizationPermissionList.size == 1 + val hasNoOrganization = organizationPermissionList.isEmpty() + + val organizationId = + when { + hasSingleOrganization -> { + organizationPermissionList.first().organizationId.let { + LOGGER.info( + "User {} is associated with only one organization with ID {}", + user.userId, + it, + ) + it + } + } + hasNoOrganization -> { + LOGGER.info("User {} is associated with no organization.", user.userId) + null + } + else -> throw ApplicationErrorKnownException("User is associated with more than one organization. Please specify an organization id.") + } + + return organizationId?.let { organizationService.getOrganization(it).orElse(null) } + } + + private fun buildDefaultWorkspacePermission( + userId: UUID, + workspaceId: UUID, + ): Permission { + return Permission().apply { + this.userId = userId + this.workspaceId = workspaceId + this.permissionType = DEFAULT_WORKSPACE_PERMISSION_TYPE + this.permissionId = uuidSupplier.get() + } + } + + private fun buildDefaultOrganizationPermission( + userId: UUID, + organizationId: UUID, + ): Permission { + return Permission().apply { + this.userId = userId + this.organizationId = organizationId + this.permissionType = DEFAULT_ORGANIZATION_PERMISSION_TYPE + this.permissionId = uuidSupplier.get() + } + } + + private fun getDefaultOrganizationName(user: User): String { + when { + user.companyName != null -> { + return "${user.companyName}'s Organization" + } + user.name != null -> { + return "${user.name}'s Organization" + } + else -> { + return "${user.email.split("@").first()}'s Organization" + } + } + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt new file mode 100644 index 00000000000..bd17e0de4c0 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerInterface.kt @@ -0,0 +1,8 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.WorkspaceCreateWithId +import io.airbyte.api.model.generated.WorkspaceRead + +interface ResourceBootstrapHandlerInterface { + fun bootStrapWorkspaceForCurrentUser(workspaceCreateWithId: WorkspaceCreateWithId): WorkspaceRead +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt new file mode 100644 index 00000000000..3b07e1b3138 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandler.kt @@ -0,0 +1,95 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.ConnectionStream +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.persistence.StreamRefreshesRepository +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.StreamDescriptor +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class StreamRefreshesHandler( + private val connectionService: ConnectionService, + private val streamRefreshesRepository: StreamRefreshesRepository, + private val eventRunner: EventRunner, + private val workspaceService: WorkspaceService, + private val featureFlagClient: FeatureFlagClient, +) { + fun deleteRefreshesForConnection(connectionId: UUID) { + streamRefreshesRepository.deleteByConnectionId(connectionId) + } + + open fun createRefreshesForConnection( + connectionId: UUID, + streams: List, + ): Boolean { + val workspaceId = workspaceService.getStandardWorkspaceFromConnection(connectionId, false).workspaceId + val shouldRunRefresh = + featureFlagClient.boolVariation( + ActivateRefreshes, + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + ), + ), + ) + + if (!shouldRunRefresh) { + return false + } + + val streamDescriptors: List = + if (streams.isNotEmpty()) { + connectionStreamsToStreamDescriptors(streams) + } else { + connectionService.getAllStreamsForConnection(connectionId) + } + + createRefreshesForStreams(connectionId, streamDescriptors) + + eventRunner.startNewManualSync(connectionId) + + return true + } + + open fun getRefreshesForConnection(connectionId: UUID): List { + return streamRefreshesRepository.findByConnectionId(connectionId) + } + + private fun createRefreshesForStreams( + connectionId: UUID, + streams: List, + ) { + val streamRefreshes: List = streamDescriptorsToStreamRefreshes(connectionId, streams) + + streamRefreshesRepository.saveAll(streamRefreshes) + } + + companion object { + open fun connectionStreamsToStreamDescriptors(connectionStreams: List): List { + return connectionStreams.map { connectionStream -> + StreamDescriptor() + .withName(connectionStream.streamName) + .withNamespace(connectionStream.streamNamespace) + } + } + + open fun streamDescriptorsToStreamRefreshes( + connectionId: UUID, + streamDescriptors: List, + ): List { + return streamDescriptors.map { streamDescriptor -> + StreamRefresh(connectionId = connectionId, streamName = streamDescriptor.name, streamNamespace = streamDescriptor.namespace) + } + } + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt new file mode 100644 index 00000000000..40b697b2146 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/WorkspaceHelpers.kt @@ -0,0 +1,104 @@ +package io.airbyte.commons.server.handlers.helpers + +import io.airbyte.api.model.generated.NotificationItem +import io.airbyte.api.model.generated.NotificationSettings +import io.airbyte.api.model.generated.NotificationType +import io.airbyte.api.model.generated.WorkspaceCreateWithId +import io.airbyte.commons.enums.Enums +import io.airbyte.commons.server.converters.NotificationConverter +import io.airbyte.commons.server.converters.NotificationSettingsConverter +import io.airbyte.commons.server.converters.WorkspaceWebhookConfigsConverter +import io.airbyte.config.Geography +import io.airbyte.config.Organization +import io.airbyte.config.StandardWorkspace +import java.util.Optional +import java.util.UUID +import java.util.function.Supplier + +// These helpers exist so that we can get some of the utility of working with workspaces but without needing to inject WorkspacesHandler + +fun buildStandardWorkspace( + workspaceCreateWithId: WorkspaceCreateWithId, + organization: Organization, + uuidSupplier: Supplier, +): StandardWorkspace { + val email = workspaceCreateWithId.email + val anonymousDataCollection = workspaceCreateWithId.anonymousDataCollection + val news = workspaceCreateWithId.news + val securityUpdates = workspaceCreateWithId.securityUpdates + val displaySetupWizard = workspaceCreateWithId.displaySetupWizard + + // if not set on the workspaceCreate, set the defaultGeography to AUTO + val defaultGeography = + if (workspaceCreateWithId.defaultGeography != null) { + Enums.convertTo( + workspaceCreateWithId.defaultGeography, + Geography::class.java, + ) + } else { + Geography.AUTO + } + + // NotificationSettings from input will be patched with default values. + val notificationSettings: NotificationSettings = patchNotificationSettingsWithDefaultValue(workspaceCreateWithId) + + return StandardWorkspace().apply { + this.workspaceId = workspaceCreateWithId.id ?: uuidSupplier.get() + this.customerId = uuidSupplier.get() // "customer_id" should be deprecated + this.name = workspaceCreateWithId.name + this.slug = uuidSupplier.get().toString() + this.initialSetupComplete = false + this.anonymousDataCollection = anonymousDataCollection ?: false + this.news = news ?: false + this.securityUpdates = securityUpdates ?: false + this.displaySetupWizard = displaySetupWizard ?: false + this.tombstone = false + this.notifications = NotificationConverter.toConfigList(workspaceCreateWithId.notifications) + this.notificationSettings = NotificationSettingsConverter.toConfig(notificationSettings) + this.defaultGeography = defaultGeography + this.webhookOperationConfigs = WorkspaceWebhookConfigsConverter.toPersistenceWrite(workspaceCreateWithId.webhookConfigs, uuidSupplier) + this.organizationId = organization.organizationId + this.email = email + } +} + +private fun patchNotificationSettingsWithDefaultValue(workspaceCreateWithId: WorkspaceCreateWithId): NotificationSettings { + val defaultNotificationType = NotificationItem().addNotificationTypeItem(NotificationType.CUSTOMERIO) + return NotificationSettings().apply { + this.sendOnSuccess = workspaceCreateWithId.notificationSettings?.sendOnSuccess ?: NotificationItem().notificationType(emptyList()) + this.sendOnFailure = workspaceCreateWithId.notificationSettings?.sendOnFailure ?: defaultNotificationType + this.sendOnConnectionUpdate = workspaceCreateWithId.notificationSettings?.sendOnConnectionUpdate ?: defaultNotificationType + + this.sendOnConnectionUpdateActionRequired = + workspaceCreateWithId.notificationSettings?.sendOnConnectionUpdateActionRequired ?: defaultNotificationType + + this.sendOnSyncDisabled = workspaceCreateWithId.notificationSettings?.sendOnSyncDisabled ?: defaultNotificationType + this.sendOnSyncDisabledWarning = workspaceCreateWithId.notificationSettings?.sendOnSyncDisabledWarning ?: defaultNotificationType + this.sendOnBreakingChangeWarning = workspaceCreateWithId.notificationSettings?.sendOnBreakingChangeWarning ?: defaultNotificationType + + this.sendOnBreakingChangeSyncsDisabled = + workspaceCreateWithId.notificationSettings?.sendOnBreakingChangeSyncsDisabled ?: defaultNotificationType + } +} + +fun getDefaultWorkspaceName( + organization: Optional, + companyName: String?, + email: String, +): String { + var defaultWorkspaceName = "" + if (organization.isPresent) { + // use organization name as default workspace name + defaultWorkspaceName = organization.get().name.trim() + } + // if organization name is not available or empty, use user's company name (note: this is an + // optional field) + if (defaultWorkspaceName.isEmpty() && companyName != null) { + defaultWorkspaceName = companyName.trim() + } + // if company name is still empty, use user's email (note: this is a required field) + if (defaultWorkspaceName.isEmpty()) { + defaultWorkspaceName = email + } + return defaultWorkspaceName +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java index 9b9ba8baf02..a7b65266c93 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java @@ -4,6 +4,7 @@ package io.airbyte.commons.server.handlers; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; @@ -13,7 +14,9 @@ import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; @@ -38,17 +41,27 @@ import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureOrigin; +import io.airbyte.config.JobConfig; import io.airbyte.config.JobOutput; +import io.airbyte.config.JobSyncConfig; import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StandardSyncSummary.ReplicationStatus; import io.airbyte.config.SyncStats; import io.airbyte.config.helpers.LogClientSingleton; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.TestClient; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.models.Attempt; import io.airbyte.persistence.job.models.AttemptStatus; import io.airbyte.persistence.job.models.Job; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.protocol.models.SyncMode; import java.io.IOException; import java.nio.file.Path; import java.time.Instant; @@ -56,14 +69,17 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.UUID; import java.util.stream.Stream; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -71,9 +87,11 @@ class AttemptHandlerTest { JobConverter jobConverter; JobPersistence jobPersistence; + StatePersistence statePersistence; Path path; AttemptHandler handler; JobCreationAndStatusUpdateHelper helper; + FeatureFlagClient ffClient; private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final long JOB_ID = 10002L; @@ -96,11 +114,14 @@ class AttemptHandlerTest { @BeforeEach public void init() { - jobPersistence = Mockito.mock(JobPersistence.class); - jobConverter = Mockito.mock(JobConverter.class); - path = Mockito.mock(Path.class); - helper = Mockito.mock(JobCreationAndStatusUpdateHelper.class); - handler = new AttemptHandler(jobPersistence, jobConverter, helper, path); + jobPersistence = mock(JobPersistence.class); + statePersistence = mock(StatePersistence.class); + jobConverter = mock(JobConverter.class); + path = mock(Path.class); + helper = mock(JobCreationAndStatusUpdateHelper.class); + ffClient = mock(TestClient.class); + + handler = new AttemptHandler(jobPersistence, statePersistence, jobConverter, ffClient, helper, path); } @Test @@ -118,7 +139,7 @@ void testInternalWorkerHandlerSetsTemporalWorkflowId() throws Exception { assertTrue(handler.setWorkflowInAttempt(requestBody).getSucceeded()); - Mockito.verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), + verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), queueCapture.capture()); assertEquals(ATTEMPT_NUMBER, attemptNumberCapture.getValue()); @@ -145,7 +166,7 @@ void testInternalWorkerHandlerSetsTemporalWorkflowIdThrows() throws Exception { assertFalse(handler.setWorkflowInAttempt(requestBody).getSucceeded()); - Mockito.verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), + verify(jobPersistence).setAttemptTemporalWorkflowInfo(jobIdCapture.capture(), attemptNumberCapture.capture(), workflowIdCapture.capture(), queueCapture.capture()); assertEquals(ATTEMPT_NUMBER, attemptNumberCapture.getValue()); @@ -180,7 +201,7 @@ void testInternalHandlerSetsAttemptSyncConfig() throws Exception { assertTrue(handler.saveSyncConfig(requestBody).getSucceeded()); - Mockito.verify(jobPersistence).writeAttemptSyncConfig(jobIdCapture.capture(), attemptNumberCapture.capture(), attemptSyncConfigCapture.capture()); + verify(jobPersistence).writeAttemptSyncConfig(jobIdCapture.capture(), attemptNumberCapture.capture(), attemptSyncConfigCapture.capture()); final io.airbyte.config.AttemptSyncConfig expectedAttemptSyncConfig = ApiPojoConverters.attemptSyncConfigToInternal(attemptSyncConfig); @@ -192,33 +213,121 @@ void testInternalHandlerSetsAttemptSyncConfig() throws Exception { @Test void createAttemptNumber() throws IOException { final int attemptNumber = 1; - final Job mJob = Mockito.mock(Job.class); - Mockito.when(mJob.getAttemptsCount()) - .thenReturn(ATTEMPT_NUMBER); + final var connId = UUID.randomUUID(); + final Job mJob = mock(Job.class); + when(mJob.getConfigType()).thenReturn(JobConfig.ConfigType.SYNC); + when(mJob.getAttemptsCount()).thenReturn(ATTEMPT_NUMBER); + when(mJob.getScope()).thenReturn(connId.toString()); + + final var mConfig = mock(JobConfig.class); + when(mJob.getConfig()).thenReturn(mConfig); + + final var mDyncConfig = mock(JobSyncConfig.class); + when(mConfig.getSync()).thenReturn(mDyncConfig); + when(mDyncConfig.getWorkspaceId()).thenReturn(UUID.randomUUID()); - Mockito.when(jobPersistence.getJob(JOB_ID)) - .thenReturn(mJob); + final var mCatalog = mock(ConfiguredAirbyteCatalog.class); + when(mDyncConfig.getConfiguredAirbyteCatalog()).thenReturn(mCatalog); - Mockito.when(path.resolve(Mockito.anyString())) - .thenReturn(path); + when(jobPersistence.getJob(JOB_ID)).thenReturn(mJob); + when(path.resolve(Mockito.anyString())).thenReturn(path); - final Path expectedRoot = TemporalUtils.getJobRoot(path, String.valueOf(JOB_ID), (long) ATTEMPT_NUMBER); + final Path expectedRoot = TemporalUtils.getJobRoot(path, String.valueOf(JOB_ID), ATTEMPT_NUMBER); final Path expectedLogPath = expectedRoot.resolve(LogClientSingleton.LOG_FILENAME); - Mockito.when(jobPersistence.createAttempt(JOB_ID, expectedLogPath)) + when(jobPersistence.createAttempt(JOB_ID, expectedLogPath)) .thenReturn(attemptNumber); + when(ffClient.boolVariation(any(), any())).thenReturn(true); final CreateNewAttemptNumberResponse output = handler.createNewAttemptNumber(JOB_ID); - Assertions.assertThat(output.getAttemptNumber()).isEqualTo(attemptNumber); + assertThat(output.getAttemptNumber()).isEqualTo(attemptNumber); + } + + @Nested + class ClearFullRefreshStreamStateFirstAttempt { + + @Test + void getFullRefreshStreamsShouldOnlyReturnFullRefreshStreams() { + final var connId = UUID.randomUUID(); + final Job mJob = mock(Job.class); + when(mJob.getConfigType()).thenReturn(JobConfig.ConfigType.SYNC); + when(mJob.getScope()).thenReturn(connId.toString()); + + final var mJobConfig = mock(JobConfig.class); + when(mJob.getConfig()).thenReturn(mJobConfig); + + final var mSyncConfig = mock(JobSyncConfig.class); + when(mJobConfig.getSync()).thenReturn(mSyncConfig); + + final var mCatalog = mock(ConfiguredAirbyteCatalog.class); + when(mSyncConfig.getConfiguredAirbyteCatalog()).thenReturn(mCatalog); + + when(mCatalog.getStreams()).thenReturn(List.of( + new ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(new AirbyteStream().withName("full")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(new AirbyteStream().withName("incre")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(new AirbyteStream().withName("full").withNamespace("name")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(new AirbyteStream().withName("incre").withNamespace("name")))); + + final var streams = handler.getFullRefreshStreams(mCatalog, 1); + final var exp = Set.of(new StreamDescriptor().withName("full"), new StreamDescriptor().withName("full").withNamespace("name")); + assertEquals(exp, streams); + } + + @ParameterizedTest() + @ValueSource(ints = {0, 1, 2, 3}) // six numbers + void createAttemptShouldAlwaysDeleteFullRefreshStreamState(int attemptNumber) throws IOException { + final var connId = UUID.randomUUID(); + final Job mJob = mock(Job.class); + when(mJob.getConfigType()).thenReturn(JobConfig.ConfigType.SYNC); + when(mJob.getAttemptsCount()).thenReturn(0); + when(mJob.getScope()).thenReturn(connId.toString()); + + final var mConfig = mock(JobConfig.class); + when(mJob.getConfig()).thenReturn(mConfig); + + final var mDyncConfig = mock(JobSyncConfig.class); + when(mConfig.getSync()).thenReturn(mDyncConfig); + when(mDyncConfig.getWorkspaceId()).thenReturn(UUID.randomUUID()); + + when(jobPersistence.getJob(JOB_ID)).thenReturn(mJob); + + when(path.resolve(Mockito.anyString())).thenReturn(path); + when(ffClient.boolVariation(any(), any())).thenReturn(true); + + final Path expectedRoot = TemporalUtils.getJobRoot(path, String.valueOf(JOB_ID), ATTEMPT_NUMBER); + final Path expectedLogPath = expectedRoot.resolve(LogClientSingleton.LOG_FILENAME); + + final var mCatalog = mock(ConfiguredAirbyteCatalog.class); + when(mDyncConfig.getConfiguredAirbyteCatalog()).thenReturn(mCatalog); + + when(mCatalog.getStreams()).thenReturn(List.of( + new ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(new AirbyteStream().withName("full")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(new AirbyteStream().withName("incre")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(new AirbyteStream().withName("full").withNamespace("name")), + new ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(new AirbyteStream().withName("incre").withNamespace("name")))); + + when(jobPersistence.createAttempt(JOB_ID, expectedLogPath)).thenReturn(attemptNumber); + + final CreateNewAttemptNumberResponse output = handler.createNewAttemptNumber(JOB_ID); + assertThat(output.getAttemptNumber()).isEqualTo(attemptNumber); + + ArgumentCaptor captor1 = ArgumentCaptor.forClass(UUID.class); + ArgumentCaptor> captor2 = ArgumentCaptor.forClass(Set.class); + verify(statePersistence).bulkDelete(captor1.capture(), captor2.capture()); + assertEquals(connId, captor1.getValue()); + assertEquals(Set.of(new StreamDescriptor().withName("full"), new StreamDescriptor().withName("full").withNamespace("name")), + captor2.getValue()); + } + } @Test - void createAttemptNumberWithUnownJobId() throws IOException { - final Job mJob = Mockito.mock(Job.class); - Mockito.when(mJob.getAttemptsCount()) + void createAttemptNumberWithUnknownJobId() throws IOException { + final Job mJob = mock(Job.class); + when(mJob.getAttemptsCount()) .thenReturn(ATTEMPT_NUMBER); - Mockito.when(jobPersistence.getJob(JOB_ID)) + when(jobPersistence.getJob(JOB_ID)) .thenThrow(new RuntimeException("unknown jobId " + JOB_ID)); Assertions.assertThatThrownBy(() -> handler.createNewAttemptNumber(JOB_ID)) @@ -299,27 +408,27 @@ void getAttemptCombinedStatsReturnsStats() throws Exception { void failAttemptSyncSummaryOutputPresent() throws IOException { handler.failAttempt(ATTEMPT_NUMBER, JOB_ID, failureSummary, standardSyncOutput); - Mockito.verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); - Mockito.verify(jobPersistence).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); - Mockito.verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); + verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); + verify(jobPersistence).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); + verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); } @Test void failAttemptSyncSummaryOutputNotPresent() throws IOException { handler.failAttempt(ATTEMPT_NUMBER, JOB_ID, failureSummary, null); - Mockito.verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); - Mockito.verify(jobPersistence, never()).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); - Mockito.verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); + verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); + verify(jobPersistence, never()).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); + verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); } @Test void failAttemptSyncSummaryNotPresent() throws IOException { handler.failAttempt(ATTEMPT_NUMBER, JOB_ID, null, standardSyncOutput); - Mockito.verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); - Mockito.verify(jobPersistence).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); - Mockito.verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, null); + verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); + verify(jobPersistence).writeOutput(JOB_ID, ATTEMPT_NUMBER, jobOutput); + verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, null); } @ParameterizedTest diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java index c1f188a80a1..92c6cd0e9c4 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java @@ -91,6 +91,7 @@ import io.airbyte.config.JobOutput; import io.airbyte.config.JobOutput.OutputType; import io.airbyte.config.JobSyncConfig; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.Schedule; import io.airbyte.config.Schedule.TimeUnit; import io.airbyte.config.ScheduleData; @@ -109,8 +110,12 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.StreamGenerationRepository; +import io.airbyte.config.persistence.domain.Generation; +import io.airbyte.config.persistence.helper.CatalogGenerationSetter; import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; @@ -138,7 +143,9 @@ import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.LocalDate; +import java.time.LocalTime; import java.time.ZoneId; +import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; @@ -212,6 +219,7 @@ class ConnectionsHandlerTest { private ConnectionHelper connectionHelper; private TestClient featureFlagClient; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; private ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private JsonSchemaValidator validator; @@ -228,8 +236,11 @@ class ConnectionsHandlerTest { private DestinationHandler destinationHandler; private SourceHandler sourceHandler; + private StreamRefreshesHandler streamRefreshesHandler; private JobNotifier jobNotifier; private Job job; + private StreamGenerationRepository streamGenerationRepository; + private CatalogGenerationSetter catalogGenerationSetter; @SuppressWarnings("unchecked") @BeforeEach @@ -315,6 +326,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio .withGeography(Geography.US); jobPersistence = mock(JobPersistence.class); + streamRefreshesHandler = mock(StreamRefreshesHandler.class); configRepository = mock(ConfigRepository.class); uuidGenerator = mock(Supplier.class); workspaceHelper = mock(WorkspaceHelper.class); @@ -322,6 +334,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio eventRunner = mock(EventRunner.class); connectionHelper = mock(ConnectionHelper.class); actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); + actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); connectorDefinitionSpecificationHandler = mock(ConnectorDefinitionSpecificationHandler.class); validator = mock(JsonSchemaValidator.class); secretsProcessor = mock(JsonSecretsProcessor.class); @@ -348,7 +361,8 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio actorDefinitionVersionHelper, destinationService, featureFlagClient, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); sourceHandler = new SourceHandler(configRepository, secretsRepositoryReader, validator, @@ -358,12 +372,15 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio configurationUpdate, oAuthConfigSupplier, actorDefinitionVersionHelper, featureFlagClient, sourceService, workspaceService, secretPersistenceConfigService, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); matchSearchHandler = new MatchSearchHandler(configRepository, destinationHandler, sourceHandler); jobNotifier = mock(JobNotifier.class); featureFlagClient = mock(TestClient.class); job = mock(Job.class); + streamGenerationRepository = mock(StreamGenerationRepository.class); + catalogGenerationSetter = mock(CatalogGenerationSetter.class); when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(sourceId)).thenReturn(workspaceId); when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(destinationId)).thenReturn(workspaceId); when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId)).thenReturn(workspaceId); @@ -376,6 +393,7 @@ class UnMockedConnectionHelper { @BeforeEach void setUp() throws JsonValidationException, ConfigNotFoundException, IOException { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -388,7 +406,9 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio connectorDefinitionSpecificationHandler, jobNotifier, MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW); + MAX_FAILURE_JOBS_IN_A_ROW, + streamGenerationRepository, + catalogGenerationSetter); when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -421,6 +441,70 @@ void testGetConnection() throws JsonValidationException, ConfigNotFoundException assertEquals(ConnectionHelpers.generateExpectedConnectionRead(standardSync), actualConnectionRead); } + @Test + void testGetConnectionForJob() throws JsonValidationException, ConfigNotFoundException, IOException { + final Long jobId = 456L; + + when(configRepository.getStandardSync(standardSync.getConnectionId())) + .thenReturn(standardSync); + when(jobPersistence.getJob(jobId)).thenReturn(new Job( + jobId, + ConfigType.SYNC, + null, + null, + null, + null, + null, + 0, + 0)); + List generations = List.of(new Generation("name", null, 1)); + when(streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(standardSync.getConnectionId())).thenReturn(generations); + when(catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + standardSync.getCatalog(), + jobId, + List.of(), + generations)).thenReturn(standardSync.getCatalog()); + + final ConnectionRead actualConnectionRead = connectionsHandler.getConnectionForJob(standardSync.getConnectionId(), jobId); + + assertEquals(ConnectionHelpers.generateExpectedConnectionRead(standardSync), actualConnectionRead); + } + + @Test + void testGetConnectionForJobWithRefresh() throws JsonValidationException, ConfigNotFoundException, IOException { + final Long jobId = 456L; + + List refreshStreamDescriptors = + List.of(new io.airbyte.protocol.models.StreamDescriptor().withName("name")); + + final JobConfig config = new JobConfig() + .withRefresh(new RefreshConfig().withStreamsToRefresh(refreshStreamDescriptors)); + + when(configRepository.getStandardSync(standardSync.getConnectionId())) + .thenReturn(standardSync); + when(jobPersistence.getJob(jobId)).thenReturn(new Job( + jobId, + ConfigType.REFRESH, + null, + config, + null, + null, + null, + 0, + 0)); + List generations = List.of(new Generation("name", null, 1)); + when(streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(standardSync.getConnectionId())).thenReturn(generations); + when(catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + standardSync.getCatalog(), + jobId, + refreshStreamDescriptors, + generations)).thenReturn(standardSync.getCatalog()); + + final ConnectionRead actualConnectionRead = connectionsHandler.getConnectionForJob(standardSync.getConnectionId(), jobId); + + assertEquals(ConnectionHelpers.generateExpectedConnectionRead(standardSync), actualConnectionRead); + } + @Test void testListConnectionsForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { when(configRepository.listWorkspaceStandardSyncs(source.getWorkspaceId(), false)) @@ -634,6 +718,7 @@ void testDeleteConnection() throws JsonValidationException, ConfigNotFoundExcept connectionsHandler.deleteConnection(connectionId); verify(connectionHelper).deleteConnection(connectionId); + verify(streamRefreshesHandler).deleteRefreshesForConnection(connectionId); } @Test @@ -711,7 +796,6 @@ void testWarningNotificationsForAutoDisablingMaxNumFailures() throws IOException assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); } @@ -731,7 +815,6 @@ void testWarningNotificationsForAutoDisablingMaxDaysOfFailure() throws IOExcepti assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); } @@ -753,7 +836,6 @@ void testWarningNotificationsDoesNotSpam() throws IOException, JsonValidationExc assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -775,7 +857,6 @@ void testWarningNotificationsDoesNotSpamAfterConsecutiveFailures() throws IOExce assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -794,7 +875,6 @@ void testOnlyFailuresButFirstJobYoungerThanMaxDaysWarning() throws IOException, assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -834,7 +914,6 @@ void testLessThanMaxFailuresInARow() throws IOException, JsonValidationException assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -850,7 +929,6 @@ void testNoRuns() throws IOException, JsonValidationException, ConfigNotFoundExc assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -883,7 +961,6 @@ void testIgnoreOnlyCancelledRuns() throws IOException, JsonValidationException, assertFalse(internalOperationResult.getSucceeded()); verify(configRepository, Mockito.never()).writeStandardSync(any()); verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).notifyJobByEmail(any(), any(), any(), any()); } private void verifyDisabled() throws IOException { @@ -891,7 +968,6 @@ private void verifyDisabled() throws IOException { argThat(standardSync -> (standardSync.getStatus().equals(Status.INACTIVE) && standardSync.getConnectionId().equals(connectionId)))); verify(configRepository, times(1)).writeStandardSync(standardSync); verify(jobNotifier, times(1)).autoDisableConnection(eq(job), any()); - verify(jobNotifier, times(1)).notifyJobByEmail(any(), any(), eq(job), any()); verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); } @@ -1495,6 +1571,7 @@ class ConnectionHistory { @BeforeEach void setUp() { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -1507,7 +1584,9 @@ void setUp() { connectorDefinitionSpecificationHandler, jobNotifier, MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW); + MAX_FAILURE_JOBS_IN_A_ROW, + streamGenerationRepository, + catalogGenerationSetter); } private Attempt generateMockAttempt(final Instant attemptTime, final long recordsSynced) { @@ -1584,8 +1663,13 @@ void testDataHistoryWithEmptyResponse() throws IOException { @DisplayName("Aggregates data correctly") void testDataHistoryAggregation() throws IOException { final UUID connectionId = UUID.randomUUID(); - final Instant endTime = Instant.now(); - final Instant startTime = endTime.minus(29, ChronoUnit.DAYS); + + final ZonedDateTime endTimeZoned = ZonedDateTime.now(ZoneId.of(TIMEZONE_LOS_ANGELES)).with(LocalTime.MAX); + final Instant endTime = endTimeZoned.toInstant(); + + final ZonedDateTime startTimeZoned = endTimeZoned.minusDays(29).with(LocalTime.MIN); + final Instant startTime = startTimeZoned.toInstant(); + final long attempt1Records = 100L; final long attempt2Records = 150L; final long attempt3Records = 200L; @@ -1608,7 +1692,7 @@ void testDataHistoryAggregation() throws IOException { attempt.getJobInfo().getId(), attempt.getAttempt().getOutput().map(output -> output.getSync().getStandardSyncSummary().getTotalStats().getRecordsCommitted()) .orElse(0L), - attempt.getAttempt().getEndedAtInSecond().map(endedAt -> endedAt).orElse(0L))).toList(); + attempt.getAttempt().getEndedAtInSecond().orElse(0L))).toList(); when(jobPersistence.listRecordsCommittedForConnectionAfterTimestamp(eq(connectionId), any(Instant.class))) .thenReturn(jobsAndRecords); @@ -1637,10 +1721,9 @@ class GetConnectionStreamHistory { @DisplayName("Handles empty history response") void testStreamHistoryWithEmptyResponse() throws IOException { final UUID connectionId = UUID.randomUUID(); - final String timezone = "America/Los_Angeles"; final ConnectionStreamHistoryRequestBody requestBody = new ConnectionStreamHistoryRequestBody() .connectionId(connectionId) - .timezone(timezone); + .timezone(TIMEZONE_LOS_ANGELES); when(jobPersistence.listAttemptsForConnectionAfterTimestamp(eq(connectionId), eq(ConfigType.SYNC), any(Instant.class))) .thenReturn(Collections.emptyList()); @@ -1741,6 +1824,7 @@ class StreamConfigurationDiff { @BeforeEach void setUp() { connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -1753,7 +1837,9 @@ void setUp() { connectorDefinitionSpecificationHandler, jobNotifier, MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW); + MAX_FAILURE_JOBS_IN_A_ROW, + streamGenerationRepository, + catalogGenerationSetter); } @Test @@ -2143,8 +2229,9 @@ void testConnectionStatus() new Job(0L, JobConfig.ConfigType.SYNC, connectionId.toString(), null, null, JobStatus.RUNNING, 1001L, 1000L, 1002L), new Job(0L, JobConfig.ConfigType.SYNC, connectionId.toString(), null, List.of(failedAttempt), JobStatus.FAILED, 901L, 900L, 902L), new Job(0L, JobConfig.ConfigType.SYNC, connectionId.toString(), null, null, JobStatus.SUCCEEDED, 801L, 800L, 802L)); - when(jobPersistence.listJobs(Set.of(JobConfig.ConfigType.SYNC, JobConfig.ConfigType.RESET_CONNECTION), connectionId.toString(), 10)) - .thenReturn(jobs); + when(jobPersistence.listJobs(REPLICATION_TYPES, + connectionId.toString(), 10)) + .thenReturn(jobs); final ConnectionStatusesRequestBody req = new ConnectionStatusesRequestBody().connectionIds(List.of(connectionId)); final List status = connectionsHandler.getConnectionStatuses(req); assertEquals(1, status.size()); @@ -2224,6 +2311,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(SOURCE_ID)).thenReturn(WORKSPACE_ID); when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(DESTINATION_ID)).thenReturn(WORKSPACE_ID); connectionsHandler = new ConnectionsHandler( + streamRefreshesHandler, jobPersistence, configRepository, uuidGenerator, @@ -2236,7 +2324,9 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio connectorDefinitionSpecificationHandler, jobNotifier, MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW); + MAX_FAILURE_JOBS_IN_A_ROW, + streamGenerationRepository, + catalogGenerationSetter); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java index d7b84353f89..a6d82215a73 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java @@ -42,6 +42,7 @@ import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; import io.airbyte.commons.server.handlers.helpers.DeclarativeSourceManifestInjector; import io.airbyte.config.ActorDefinitionConfigInjection; import io.airbyte.config.ActorDefinitionVersion; @@ -113,6 +114,7 @@ class ConnectorBuilderProjectsHandlerTest { } private ConfigRepository configRepository; + private BuilderProjectUpdater builderProjectUpdater; private ConnectorBuilderProjectsHandler connectorBuilderProjectsHandler; private Supplier uuidSupplier; private DeclarativeSourceManifestInjector manifestInjector; @@ -167,6 +169,7 @@ class ConnectorBuilderProjectsHandlerTest { @BeforeEach void setUp() throws JsonProcessingException { configRepository = mock(ConfigRepository.class); + builderProjectUpdater = mock(BuilderProjectUpdater.class); uuidSupplier = mock(Supplier.class); manifestInjector = mock(DeclarativeSourceManifestInjector.class); cdkVersionProvider = mock(CdkVersionProvider.class); @@ -184,7 +187,8 @@ void setUp() throws JsonProcessingException { workspaceId = UUID.randomUUID(); connectorBuilderProjectsHandler = - new ConnectorBuilderProjectsHandler(configRepository, cdkVersionProvider, uuidSupplier, manifestInjector, workspaceService, featureFlagClient, + new ConnectorBuilderProjectsHandler(configRepository, builderProjectUpdater, cdkVersionProvider, uuidSupplier, manifestInjector, + workspaceService, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, connectorBuilderService, secretsProcessor, connectorBuilderServerApiClient); } @@ -233,46 +237,9 @@ void testUpdateConnectorBuilderProject() throws IOException, ConfigNotFoundExcep connectorBuilderProjectsHandler.updateConnectorBuilderProject(update); - verify(configRepository, times(1)) - .writeBuilderProjectDraft( - project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), project.getManifestDraft()); - } - - @Test - void givenActorDefinitionAssociatedWithProjectWhenUpdateConnectorBuilderProjectThenUpdateProjectAndDefinition() throws Exception { - when(configRepository.getConnectorBuilderProject(A_BUILDER_PROJECT_ID, false)).thenReturn(anyBuilderProject() - .withBuilderProjectId(A_BUILDER_PROJECT_ID) - .withWorkspaceId(A_WORKSPACE_ID) - .withActorDefinitionId(A_SOURCE_DEFINITION_ID)); - - connectorBuilderProjectsHandler.updateConnectorBuilderProject(new ExistingConnectorBuilderProjectWithWorkspaceId() - .builderProject(new ConnectorBuilderProjectDetails() - .name(A_SOURCE_NAME) - .draftManifest(A_MANIFEST)) - .workspaceId(A_WORKSPACE_ID) - .builderProjectId(A_BUILDER_PROJECT_ID)); - - verify(configRepository, times(1)) - .updateBuilderProjectAndActorDefinition( - A_BUILDER_PROJECT_ID, A_WORKSPACE_ID, A_SOURCE_NAME, A_MANIFEST, A_SOURCE_DEFINITION_ID); - } - - @Test - @DisplayName("updateConnectorBuilderProject should update an existing project removing the draft") - void testUpdateConnectorBuilderProjectWipeDraft() throws IOException, ConfigNotFoundException { - final ConnectorBuilderProject project = generateBuilderProject(); - - when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); - - final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() - .builderProject(new ConnectorBuilderProjectDetails().name(project.getName())) - .workspaceId(workspaceId).builderProjectId(project.getBuilderProjectId()); - - connectorBuilderProjectsHandler.updateConnectorBuilderProject(update); - - verify(configRepository, times(1)) - .writeBuilderProjectDraft( - project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), null); + verify(builderProjectUpdater, times(1)) + .persistBuilderProjectUpdate( + update); } @Test @@ -664,7 +631,7 @@ private void testStreamReadForProject(ConnectorBuilderProject project, JsonNode final String responseBody = "[" + Jsons.serialize(record1) + "," + Jsons.serialize(record2) + "]"; final String requestUrl = "https://api.com/users"; final int responseStatus = 200; - final HttpRequest httpRequest = new HttpRequest(requestUrl, HttpMethod.GET, null, null, null); + final HttpRequest httpRequest = new HttpRequest(requestUrl, HttpMethod.GET, null, null); final HttpResponse httpResponse = new HttpResponse(responseStatus, responseBody, null); final StreamRead streamRead = new StreamRead(Collections.emptyList(), List.of( new StreamReadSlicesInner(List.of(new StreamReadSlicesInnerPagesInner(List.of(record1, record2), httpRequest, httpResponse)), null, null)), diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandlerTest.java index 3730234c1b7..6250962ecc4 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDocumentationHandlerTest.java @@ -58,10 +58,8 @@ class ConnectorDocumentationHandlerTest { new ActorDefinitionVersion().withDockerRepository(DESTINATION_DOCKER_REPO).withDockerImageTag( DESTINATION_VERSION_LATEST); - private static final String FULL_DOC_CONTENTS_OLD = "The full doc contents for the old version"; - private static final String INAPP_DOC_CONTENTS_OLD = "The inapp doc contents for the old version"; - private static final String FULL_DOC_CONTENTS_LATEST = "The full doc contents for the latest version"; - private static final String INAPP_DOC_CONTENTS_LATEST = "The inapp doc contents for the latest version"; + private static final String DOC_CONTENTS_OLD = "The doc contents for the old version"; + private static final String DOC_CONTENTS_LATEST = "The doc contents for the latest version"; @BeforeEach void setup() { @@ -83,7 +81,7 @@ void testNoSourceDocumentationFound() throws JsonValidationException, ConfigNotF when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, sourceId)).thenReturn(SOURCE_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(any(), any(), any())).thenReturn(Optional.empty()); + when(remoteDefinitionsProvider.getConnectorDocumentation(any(), any())).thenReturn(Optional.empty()); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE) .actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId).actorId(sourceId); @@ -92,172 +90,82 @@ void testNoSourceDocumentationFound() throws JsonValidationException, ConfigNotF } @Test - void testGetVersionedInappExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetVersionedExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID sourceId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, sourceId)).thenReturn(SOURCE_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_OLD)); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD)) + .thenReturn(Optional.of(DOC_CONTENTS_OLD)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE) .actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId).actorId(sourceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_OLD); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_OLD); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetVersionedFullExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetLatestExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID sourceId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, sourceId)).thenReturn(SOURCE_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_OLD)); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD)).thenReturn(Optional.empty()); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST)).thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE) .actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId).actorId(sourceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_OLD); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetLatestInappExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); - when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, sourceId)).thenReturn(SOURCE_DEFINITION_VERSION_OLD); - - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, true)).thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE) - .actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId).actorId(sourceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetLatestFullExistingSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID sourceId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); - when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, sourceId)).thenReturn(SOURCE_DEFINITION_VERSION_OLD); - - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_OLD, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, false)).thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE) - .actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId).actorId(sourceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetVersionedInappNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetVersionedNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, null)).thenReturn(SOURCE_DEFINITION_VERSION_LATEST); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST)) + .thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE).actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetVersionedFullNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetLatestNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, null)).thenReturn(SOURCE_DEFINITION_VERSION_LATEST); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST)).thenReturn(Optional.empty()); + when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST)).thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE).actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetLatestInappNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); - when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, null)).thenReturn(SOURCE_DEFINITION_VERSION_LATEST); - - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, true)).thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = - new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE).actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetLatestFullNewSourceDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID sourceDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardSourceDefinition(sourceDefinitionId)).thenReturn(SOURCE_DEFINITION); - when(actorDefinitionVersionHelper.getSourceVersion(SOURCE_DEFINITION, workspaceId, null)).thenReturn(SOURCE_DEFINITION_VERSION_LATEST); - - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, SOURCE_VERSION_LATEST, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(SOURCE_DOCKER_REPO, LATEST, false)).thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = - new ConnectorDocumentationRequestBody().actorType(ActorType.SOURCE).actorDefinitionId(sourceDefinitionId).workspaceId(workspaceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); @@ -273,7 +181,7 @@ void testNoDestinationDocumentationFound() throws JsonValidationException, Confi when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, destinationId)) .thenReturn(DESTINATION_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(any(), any(), any())).thenReturn(Optional.empty()); + when(remoteDefinitionsProvider.getConnectorDocumentation(any(), any())).thenReturn(Optional.empty()); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION) .actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId).actorId(destinationId); @@ -282,7 +190,7 @@ void testNoDestinationDocumentationFound() throws JsonValidationException, Confi } @Test - void testGetVersionedInappExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetVersionedExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID destinationDefinitionId = UUID.randomUUID(); final UUID destinationId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); @@ -290,21 +198,21 @@ void testGetVersionedInappExistingDestinationDocumentation() throws JsonValidati when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, destinationId)) .thenReturn(DESTINATION_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_OLD)); + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD)) + .thenReturn(Optional.of(DOC_CONTENTS_OLD)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION) .actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId).actorId(destinationId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_OLD); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_OLD); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetVersionedFullExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetLatestExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID destinationDefinitionId = UUID.randomUUID(); final UUID destinationId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); @@ -312,156 +220,59 @@ void testGetVersionedFullExistingDestinationDocumentation() throws JsonValidatio when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, destinationId)) .thenReturn(DESTINATION_DEFINITION_VERSION_OLD); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_OLD)); + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD)).thenReturn(Optional.empty()); + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST)) + .thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION) .actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId).actorId(destinationId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_OLD); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetLatestInappExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID destinationId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); - when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, destinationId)) - .thenReturn(DESTINATION_DEFINITION_VERSION_OLD); - - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION) - .actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId).actorId(destinationId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetLatestFullExistingDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID destinationId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); - when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, destinationId)) - .thenReturn(DESTINATION_DEFINITION_VERSION_OLD); - - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_OLD, false)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION) - .actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId).actorId(destinationId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetVersionedInappNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); - when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, null)) - .thenReturn(DESTINATION_DEFINITION_VERSION_LATEST); - - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = - new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION).actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetVersionedFullNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { - final UUID destinationDefinitionId = UUID.randomUUID(); - final UUID workspaceId = UUID.randomUUID(); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); - when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, null)) - .thenReturn(DESTINATION_DEFINITION_VERSION_LATEST); - - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); - - final ConnectorDocumentationRequestBody request = - new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION).actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId); - - final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); - final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); - - assertEquals(expectedResult, actualResult); - } - - @Test - void testGetLatestInappNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetVersionedNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID destinationDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, null)) .thenReturn(DESTINATION_DEFINITION_VERSION_LATEST); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, false)) - .thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, true)) - .thenReturn(Optional.of(INAPP_DOC_CONTENTS_LATEST)); + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST)) + .thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION).actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(INAPP_DOC_CONTENTS_LATEST); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); } @Test - void testGetLatestFullNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetLatestNewDestinationDocumentation() throws JsonValidationException, ConfigNotFoundException, IOException { final UUID destinationDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); when(configRepository.getStandardDestinationDefinition(destinationDefinitionId)).thenReturn(DESTINATION_DEFINITION); when(actorDefinitionVersionHelper.getDestinationVersion(DESTINATION_DEFINITION, workspaceId, null)) .thenReturn(DESTINATION_DEFINITION_VERSION_LATEST); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST, false)) + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, DESTINATION_VERSION_LATEST)) .thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, true)).thenReturn(Optional.empty()); - when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST, false)) - .thenReturn(Optional.of(FULL_DOC_CONTENTS_LATEST)); + when(remoteDefinitionsProvider.getConnectorDocumentation(DESTINATION_DOCKER_REPO, LATEST)) + .thenReturn(Optional.of(DOC_CONTENTS_LATEST)); final ConnectorDocumentationRequestBody request = new ConnectorDocumentationRequestBody().actorType(ActorType.DESTINATION).actorDefinitionId(destinationDefinitionId).workspaceId(workspaceId); final ConnectorDocumentationRead expectedResult = - new ConnectorDocumentationRead().doc(FULL_DOC_CONTENTS_LATEST); + new ConnectorDocumentationRead().doc(DOC_CONTENTS_LATEST); final ConnectorDocumentationRead actualResult = connectorDocumentationHandler.getConnectorDocumentation(request); assertEquals(expectedResult, actualResult); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java index 1552d97ecc6..5c4aeb30d13 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java @@ -38,6 +38,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.secrets.JsonSecretsProcessor; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; import io.airbyte.featureflag.TestClient; import io.airbyte.featureflag.UseIconUrlInApiResponse; @@ -69,6 +70,7 @@ class DestinationHandlerTest { private ConnectorSpecification connectorSpecification; private OAuthConfigSupplier oAuthConfigSupplier; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; private TestClient featureFlagClient; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; @@ -93,6 +95,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio destinationService = mock(DestinationService.class); featureFlagClient = mock(TestClient.class); actorDefinitionHandlerHelper = mock(ActorDefinitionHandlerHelper.class); + actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); when(featureFlagClient.boolVariation(UseIconUrlInApiResponse.INSTANCE, new Workspace(ANONYMOUS))) .thenReturn(true); @@ -129,7 +132,8 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio actorDefinitionVersionHelper, destinationService, featureFlagClient, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); when(actorDefinitionVersionHelper.getDestinationVersionWithOverrideStatus(standardDestinationDefinition, destinationConnection.getWorkspaceId(), destinationConnection.getDestinationId())).thenReturn(destinationDefinitionVersionWithOverrideStatus); @@ -243,19 +247,15 @@ void testUpdateDestination() void testUpgradeDestinationVersion() throws IOException, JsonValidationException, ConfigNotFoundException { final DestinationIdRequestBody requestBody = new DestinationIdRequestBody().destinationId(destinationConnection.getDestinationId()); - final UUID newDefaultVersionId = UUID.randomUUID(); - final StandardDestinationDefinition destinationDefinitionWithNewVersion = Jsons.clone(standardDestinationDefinition) - .withDefaultVersionId(newDefaultVersionId); - when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())) .thenReturn(destinationConnection); - when(configRepository.getStandardDestinationDefinition(destinationDefinitionWithNewVersion.getDestinationDefinitionId())) - .thenReturn(destinationDefinitionWithNewVersion); + when(configRepository.getStandardDestinationDefinition(standardDestinationDefinition.getDestinationDefinitionId())) + .thenReturn(standardDestinationDefinition); destinationHandler.upgradeDestinationVersion(requestBody); - // validate that we set the actor version to the actor definition (global) default version - verify(configRepository).setActorDefaultVersion(destinationConnection.getDestinationId(), newDefaultVersionId); + // validate that we call the actorDefinitionVersionUpdater to upgrade the version to global default + verify(actorDefinitionVersionUpdater).upgradeActorVersion(destinationConnection, standardDestinationDefinition); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandlerTest.java index 07a8962d6bb..339745633fb 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandlerTest.java @@ -21,6 +21,7 @@ import io.airbyte.commons.license.ActiveAirbyteLicense; import io.airbyte.commons.license.AirbyteLicense; import io.airbyte.commons.license.AirbyteLicense.LicenseType; +import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs.AirbyteEdition; import io.airbyte.config.Organization; import io.airbyte.config.StandardWorkspace; @@ -92,6 +93,7 @@ void testGetInstanceConfiguration(final boolean isPro, final boolean isInitialSe final InstanceConfigurationResponse expected = new InstanceConfigurationResponse() .edition(isPro ? EditionEnum.PRO : EditionEnum.COMMUNITY) + .version("0.50.1") .webappUrl(WEBAPP_URL) .licenseType(isPro ? LicenseTypeEnum.PRO : null) .auth(isPro ? new AuthConfiguration() @@ -126,6 +128,7 @@ void testGetInstanceConfigurationTrackingStrategy(final String envValue, final T WEBAPP_URL, envValue, AirbyteEdition.COMMUNITY, + new AirbyteVersion("0.50.1"), Optional.empty(), Optional.empty(), mWorkspacePersistence, @@ -185,6 +188,7 @@ void testSetupInstanceConfiguration(final boolean userNamePresent, final boolean final InstanceConfigurationResponse expected = new InstanceConfigurationResponse() .edition(EditionEnum.PRO) + .version("0.50.1") .webappUrl(WEBAPP_URL) .licenseType(LicenseTypeEnum.PRO) .auth(new AuthConfiguration() @@ -258,6 +262,7 @@ private InstanceConfigurationHandler getInstanceConfigurationHandler(final boole WEBAPP_URL, "logging", isPro ? AirbyteEdition.PRO : AirbyteEdition.COMMUNITY, + new AirbyteVersion("0.50.1"), isPro ? Optional.of(keycloakConfiguration) : Optional.empty(), isPro ? Optional.of(activeAirbyteLicense) : Optional.empty(), mWorkspacePersistence, diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java index c59fe30c607..45e68d032fb 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java @@ -5,6 +5,7 @@ package io.airbyte.commons.server.handlers; import static io.airbyte.featureflag.ContextKt.ANONYMOUS; +import static io.airbyte.persistence.job.models.Job.SYNC_REPLICATION_TYPES; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyInt; @@ -97,7 +98,7 @@ class JobHistoryHandlerTest { private static final long JOB_ID = 100L; private static final String JOB_CONFIG_ID = "ef296385-6796-413f-ac1b-49c4caba3f2b"; private static final JobStatus JOB_STATUS = JobStatus.SUCCEEDED; - private static final JobConfig.ConfigType CONFIG_TYPE = JobConfig.ConfigType.CHECK_CONNECTION_SOURCE; + private static final JobConfig.ConfigType CONFIG_TYPE = ConfigType.SYNC; private static final JobConfigType CONFIG_TYPE_FOR_API = JobConfigType.CHECK_CONNECTION_SOURCE; private static final JobConfig JOB_CONFIG = new JobConfig() .withConfigType(CONFIG_TYPE) @@ -367,7 +368,7 @@ void testListJobsFor() throws IOException { final var secondJobId = JOB_ID + 100; final var createdAt2 = CREATED_AT + 1000; final var secondJobAttempt = createAttempt(0, secondJobId, createdAt2, AttemptStatus.SUCCEEDED); - final var secondJob = new Job(secondJobId, ConfigType.DISCOVER_SCHEMA, JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(secondJobAttempt), + final var secondJob = new Job(secondJobId, ConfigType.SYNC, JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(secondJobAttempt), JobStatus.SUCCEEDED, null, createdAt2, createdAt2); final Set configTypes = Set.of( @@ -615,7 +616,7 @@ void testGetLatestRunningSyncJob() throws IOException { when(jobPersistence.listJobsForConnectionWithStatuses( connectionId, - Collections.singleton(ConfigType.SYNC), + SYNC_REPLICATION_TYPES, JobStatus.NON_TERMINAL_STATUSES)).thenReturn(List.of(newerRunningJob, olderRunningJob)); final Optional expectedJob = Optional.of(JobConverter.getJobRead(newerRunningJob)); @@ -631,7 +632,7 @@ void testGetLatestRunningSyncJobWhenNone() throws IOException { when(jobPersistence.listJobsForConnectionWithStatuses( connectionId, - Collections.singleton(ConfigType.SYNC), + SYNC_REPLICATION_TYPES, JobStatus.NON_TERMINAL_STATUSES)).thenReturn(Collections.emptyList()); final Optional actual = jobHistoryHandler.getLatestRunningSyncJob(connectionId); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java index e9096cd61af..9ecdcda9a96 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java @@ -191,8 +191,6 @@ void testGetSyncWorkflowInput() throws JsonValidationException, ConfigNotFoundEx .withDestinationId(DESTINATION_ID) .withSourceConfiguration(SOURCE_CONFIG_WITH_OAUTH_AND_INJECTED_CONFIG) .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) - .withState(STATE) - .withCatalog(jobSyncConfig.getConfiguredAirbyteCatalog()) .withIsReset(false); final JobRunConfig expectedJobRunConfig = new JobRunConfig() @@ -266,8 +264,6 @@ void testGetResetSyncWorkflowInput() throws IOException, ApiException, JsonValid .withDestinationId(DESTINATION_ID) .withSourceConfiguration(Jsons.emptyObject()) .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) - .withState(STATE) - .withCatalog(jobResetConfig.getConfiguredAirbyteCatalog()) .withWebhookOperationConfigs(jobResetConfig.getWebhookOperationConfigs()) .withIsReset(true); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java index 4df860bc9b6..d1a3481d7db 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java @@ -211,7 +211,6 @@ void persistJobCancellationSuccess() throws Exception { verify(jobPersistence).failAttempt(JOB_ID, ATTEMPT_NUMBER); verify(jobPersistence).writeAttemptFailureSummary(JOB_ID, ATTEMPT_NUMBER, failureSummary); verify(jobPersistence).cancelJob(JOB_ID); - verify(jobNotifier).failJob(eq("Job was cancelled"), eq(mockJob), any()); verify(helper).trackCompletion(any(), eq(JobStatus.FAILED)); } @@ -294,7 +293,7 @@ void setJobFailure() throws IOException { mSyncConfig.getState()); verify(jobPersistence).failJob(JOB_ID); - verify(jobNotifier).failJob(eq(failureReason), Mockito.any(), any()); + verify(jobNotifier).failJob(Mockito.any(), any()); verify(jobErrorReporter).reportSyncJobFailure(CONNECTION_ID, failureSummary, expectedReportingContext, expectedAttemptConfig); } @@ -324,8 +323,33 @@ void setJobFailureWithNullJobSyncConfig() throws IOException { jobsHandler.jobFailure(new JobFailureRequest().jobId(JOB_ID).attemptNumber(1).connectionId(CONNECTION_ID).reason(failureReason)); verify(jobPersistence).failJob(JOB_ID); - verify(jobNotifier).failJob(eq(failureReason), Mockito.any(), any()); + verify(jobNotifier).failJob(Mockito.any(), any()); verify(jobErrorReporter).reportSyncJobFailure(eq(CONNECTION_ID), eq(failureSummary), Mockito.any(), Mockito.any()); } + @Test + void testCancelledJobsDoNotNotify() throws IOException { + + final AttemptFailureSummary failureSummary = new AttemptFailureSummary() + .withFailures(Collections.singletonList( + new FailureReason() + .withFailureOrigin(FailureOrigin.SOURCE))); + + final Attempt mAttempt = Mockito.mock(Attempt.class); + Mockito.when(mAttempt.getFailureSummary()).thenReturn(Optional.of(failureSummary)); + + final JobConfig mJobConfig = Mockito.mock(JobConfig.class); + Mockito.when(mJobConfig.getSync()).thenReturn(null); + + final Job mJob = Mockito.mock(Job.class); + Mockito.when(mJob.getScope()).thenReturn(CONNECTION_ID.toString()); + Mockito.when(mJob.getConfig()).thenReturn(mJobConfig); + Mockito.when(mJob.getLastFailedAttempt()).thenReturn(Optional.of(mAttempt)); + Mockito.when(mJob.getConfigType()).thenReturn(SYNC); + Mockito.when(jobPersistence.getJob(JOB_ID)).thenReturn(mJob); + + jobsHandler.persistJobCancellation(CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, failureSummary); + verify(jobNotifier, never()).failJob(Mockito.any(), any()); + } + } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java index 45cc226a9a0..d8d3c318bc3 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OrganizationsHandlerTest.java @@ -6,7 +6,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -19,7 +18,7 @@ import io.airbyte.api.model.generated.Pagination; import io.airbyte.config.Organization; import io.airbyte.config.persistence.OrganizationPersistence; -import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.data.services.PermissionService; import java.util.List; import java.util.Optional; import java.util.UUID; @@ -36,17 +35,17 @@ class OrganizationsHandlerTest { private static final String ORGANIZATION_SSO_REALM = "realm"; private static final Organization ORGANIZATION = new Organization().withOrganizationId(ORGANIZATION_ID_1).withEmail(ORGANIZATION_EMAIL).withName(ORGANIZATION_NAME); - private PermissionPersistence permissionPersistence; + private PermissionService permissionService; private OrganizationPersistence organizationPersistence; private Supplier uuidSupplier; private OrganizationsHandler organizationsHandler; @BeforeEach void setup() { - permissionPersistence = mock(PermissionPersistence.class); + permissionService = mock(PermissionService.class); uuidSupplier = mock(Supplier.class); organizationPersistence = mock(OrganizationPersistence.class); - organizationsHandler = new OrganizationsHandler(organizationPersistence, permissionPersistence, uuidSupplier); + organizationsHandler = new OrganizationsHandler(organizationPersistence, permissionService, uuidSupplier); } @Test @@ -56,7 +55,6 @@ void testCreateOrganization() throws Exception { .withName(ORGANIZATION_NAME); when(uuidSupplier.get()).thenReturn(ORGANIZATION_ID_1); when(organizationPersistence.createOrganization(newOrganization)).thenReturn(newOrganization); - doNothing().when(permissionPersistence).writePermission(any()); final OrganizationRead result = organizationsHandler.createOrganization( new OrganizationCreateRequestBody().organizationName(ORGANIZATION_NAME).email(ORGANIZATION_EMAIL)); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java index 379b517b139..3eaf74740b2 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/PermissionHandlerTest.java @@ -9,8 +9,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import io.airbyte.api.model.generated.PermissionCheckRead; @@ -23,14 +23,15 @@ import io.airbyte.api.model.generated.PermissionUpdate; import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.config.Permission; import io.airbyte.config.Permission.PermissionType; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.User; import io.airbyte.config.persistence.PermissionPersistence; -import io.airbyte.config.persistence.SQLOperationNotAllowedException; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.services.PermissionService; +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException; import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -39,7 +40,6 @@ import java.util.Set; import java.util.UUID; import java.util.function.Supplier; -import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; @@ -50,18 +50,19 @@ @SuppressWarnings("PMD.AvoidDuplicateLiterals") class PermissionHandlerTest { - public static final String BLOCKED = "blocked"; private Supplier uuidSupplier; private PermissionPersistence permissionPersistence; private WorkspaceService workspaceService; private PermissionHandler permissionHandler; + private PermissionService permissionService; @BeforeEach void setUp() { permissionPersistence = mock(PermissionPersistence.class); uuidSupplier = mock(Supplier.class); workspaceService = mock(WorkspaceService.class); - permissionHandler = new PermissionHandler(permissionPersistence, workspaceService, uuidSupplier); + permissionService = mock(PermissionService.class); + permissionHandler = new PermissionHandler(permissionPersistence, workspaceService, uuidSupplier, permissionService); } @Test @@ -88,15 +89,15 @@ class CreatePermission { .withPermissionType(PermissionType.WORKSPACE_ADMIN); @Test - void testCreatePermission() throws IOException, JsonValidationException { + void testCreatePermission() throws Exception { final List existingPermissions = List.of(); - when(permissionPersistence.listPermissionsByUser(any())).thenReturn(existingPermissions); + when(permissionService.getPermissionsForUser(any())).thenReturn(existingPermissions); when(uuidSupplier.get()).thenReturn(PERMISSION_ID); - when(permissionPersistence.getPermission(any())).thenReturn(Optional.of(PERMISSION)); final PermissionCreate permissionCreate = new PermissionCreate() .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_OWNER) .userId(USER_ID) .workspaceId(WORKSPACE_ID); + when(permissionService.createPermission(any())).thenReturn(PERMISSION); final PermissionRead actualRead = permissionHandler.createPermission(permissionCreate); final PermissionRead expectedRead = new PermissionRead() .permissionId(PERMISSION_ID) @@ -163,30 +164,14 @@ void updatesPermission() throws Exception { .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN); // changing to workspace_admin - final PermissionRead expectedPermissionRead = new PermissionRead() - .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) - .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN) - .userId(PERMISSION_WORKSPACE_READER.getUserId()) - .workspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()); - - // after the update, getPermission will be called to build the response, so we need to mock it with - // the updated permission type - when(permissionPersistence.getPermission(PERMISSION_WORKSPACE_READER.getPermissionId())) - .thenReturn(Optional.of(new Permission() - .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) - .withPermissionType(PermissionType.WORKSPACE_ADMIN) // updated - .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) - .withUserId(PERMISSION_WORKSPACE_READER.getUserId()))); - - final PermissionRead actualPermissionRead = permissionHandler.updatePermission(update); + permissionHandler.updatePermission(update); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .withPermissionType(PermissionType.WORKSPACE_ADMIN) .withUserId(PERMISSION_WORKSPACE_READER.getUserId()) .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) .withOrganizationId(null)); - assertEquals(expectedPermissionRead, actualPermissionRead); } @Test @@ -198,25 +183,24 @@ void testUpdateToInstanceAdminPermissionThrows() { } @Test - void throwsOperationNotAllowedIfPersistenceBlocksUpdate() throws Exception { + void throwsConflictExceptionIfServiceBlocksUpdate() throws Exception { final PermissionUpdate update = new PermissionUpdate() .permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.ORGANIZATION_EDITOR); // changing to organization_editor - doThrow(new DataAccessException(BLOCKED, new SQLOperationNotAllowedException(BLOCKED))).when(permissionPersistence).writePermission(any()); - assertThrows(OperationNotAllowedException.class, () -> permissionHandler.updatePermission(update)); + doThrow(RemoveLastOrgAdminPermissionException.class).when(permissionService).updatePermission(any()); + assertThrows(ConflictException.class, () -> permissionHandler.updatePermission(update)); } @Test - void workspacePermissionUpdatesDoNotModifyIdFields() - throws JsonValidationException, io.airbyte.config.persistence.ConfigNotFoundException, IOException { + void workspacePermissionUpdatesDoNotModifyIdFields() throws Exception { final PermissionUpdate workspacePermissionUpdate = new PermissionUpdate() .permissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_EDITOR); // changing to workspace_editor permissionHandler.updatePermission(workspacePermissionUpdate); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_WORKSPACE_READER.getPermissionId()) .withPermissionType(PermissionType.WORKSPACE_EDITOR) .withWorkspaceId(PERMISSION_WORKSPACE_READER.getWorkspaceId()) // workspace ID preserved from original permission @@ -224,15 +208,14 @@ void workspacePermissionUpdatesDoNotModifyIdFields() } @Test - void organizationPermissionUpdatesDoNotModifyIdFields() - throws JsonValidationException, io.airbyte.config.persistence.ConfigNotFoundException, IOException { + void organizationPermissionUpdatesDoNotModifyIdFields() throws Exception { final PermissionUpdate orgPermissionUpdate = new PermissionUpdate() .permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .permissionType(io.airbyte.api.model.generated.PermissionType.ORGANIZATION_EDITOR); // changing to organization_editor permissionHandler.updatePermission(orgPermissionUpdate); - verify(permissionPersistence).writePermission(new Permission() + verify(permissionService).updatePermission(new Permission() .withPermissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()) .withPermissionType(PermissionType.ORGANIZATION_EDITOR) .withOrganizationId(PERMISSION_ORGANIZATION_ADMIN.getOrganizationId()) // organization ID preserved from original permission @@ -271,14 +254,14 @@ void deletesPermission() throws Exception { permissionHandler.deletePermission(new PermissionIdRequestBody().permissionId(PERMISSION_WORKSPACE_READER.getPermissionId())); - verify(permissionPersistence).deletePermissionById(PERMISSION_WORKSPACE_READER.getPermissionId()); + verify(permissionService).deletePermission(PERMISSION_WORKSPACE_READER.getPermissionId()); } @Test - void throwsOperationNotAllowedIfPersistenceBlocks() throws Exception { - doThrow(new DataAccessException(BLOCKED, new SQLOperationNotAllowedException(BLOCKED))).when(permissionPersistence) - .deletePermissionById(any()); - assertThrows(OperationNotAllowedException.class, () -> permissionHandler.deletePermission( + void throwsConflictIfPersistenceBlocks() throws Exception { + doThrow(RemoveLastOrgAdminPermissionException.class).when(permissionService).deletePermission(any()); + + assertThrows(ConflictException.class, () -> permissionHandler.deletePermission( new PermissionIdRequestBody().permissionId(PERMISSION_ORGANIZATION_ADMIN.getPermissionId()))); } @@ -714,7 +697,7 @@ class DeleteUserFromWorkspace { private static final UUID USER_ID = UUID.randomUUID(); @Test - void testDeleteUserFromWorkspace() throws IOException { + void testDeleteUserFromWorkspace() throws Exception { // should be deleted final Permission workspacePermission = new Permission() .withPermissionId(UUID.randomUUID()) @@ -736,42 +719,16 @@ void testDeleteUserFromWorkspace() throws IOException { .withOrganizationId(UUID.randomUUID()) .withPermissionType(PermissionType.ORGANIZATION_ADMIN); - // should not be deleted, different user - final Permission otherUserPermission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); - when(permissionPersistence.listPermissionsByUser(USER_ID)).thenReturn( List.of(workspacePermission, otherWorkspacePermission, orgPermission)); permissionHandler.deleteUserFromWorkspace(new PermissionDeleteUserFromWorkspaceRequestBody().userIdToRemove(USER_ID).workspaceId(WORKSPACE_ID)); // verify the intended permission was deleted - verify(permissionPersistence).deletePermissionById(workspacePermission.getPermissionId()); + verify(permissionService).deletePermissions(List.of(workspacePermission.getPermissionId())); // verify the other permissions were not deleted - verify(permissionPersistence, never()).deletePermissionById(otherWorkspacePermission.getPermissionId()); - verify(permissionPersistence, never()).deletePermissionById(otherUserPermission.getPermissionId()); - verify(permissionPersistence, never()).deletePermissionById(orgPermission.getPermissionId()); - } - - @Test - void testDeleteUserFromWorkspaceThrows() throws IOException { - final Permission permission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(USER_ID) - .withWorkspaceId(WORKSPACE_ID) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); - - when(permissionPersistence.listPermissionsByUser(USER_ID)).thenReturn(List.of(permission)); - - doThrow(new IOException()).when(permissionPersistence).deletePermissionById(permission.getPermissionId()); - - assertThrows(RuntimeException.class, () -> permissionHandler.deleteUserFromWorkspace(new PermissionDeleteUserFromWorkspaceRequestBody() - .userIdToRemove(USER_ID) - .workspaceId(WORKSPACE_ID))); + verifyNoMoreInteractions(permissionService); } } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java index 41611f5a757..c679057a28c 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java @@ -108,6 +108,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; @@ -265,6 +266,7 @@ class SchedulerHandlerTest { private ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private WorkspaceService workspaceService; private SecretPersistenceConfigService secretPersistenceConfigService; + private StreamRefreshesHandler streamRefreshesHandler; @BeforeEach void setup() throws JsonValidationException, ConfigNotFoundException, IOException { @@ -313,6 +315,9 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio .supportedDestinationSyncModes( List.of(io.airbyte.api.model.generated.DestinationSyncMode.OVERWRITE, io.airbyte.api.model.generated.DestinationSyncMode.APPEND))); + streamRefreshesHandler = mock(StreamRefreshesHandler.class); + when(streamRefreshesHandler.getRefreshesForConnection(any())).thenReturn(new ArrayList<>()); + schedulerHandler = new SchedulerHandler( configRepository, secretsRepositoryWriter, @@ -335,13 +340,14 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio jobTracker, connectorDefinitionSpecificationHandler, workspaceService, - secretPersistenceConfigService); + secretPersistenceConfigService, + streamRefreshesHandler); } @Test @DisplayName("Test job creation") void createJob() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(jobFactory.create(CONNECTION_ID)) + Mockito.when(jobFactory.createSync(CONNECTION_ID)) .thenReturn(JOB_ID); Mockito.when(configRepository.getStandardSync(CONNECTION_ID)) .thenReturn(Mockito.mock(StandardSync.class)); @@ -355,6 +361,27 @@ void createJob() throws JsonValidationException, ConfigNotFoundException, IOExce Assertions.assertThat(output.getJob().getId()).isEqualTo(JOB_ID); } + @Test + @DisplayName("Test refresh job creation") + void createRefreshJob() throws JsonValidationException, ConfigNotFoundException, IOException { + when(jobFactory.createRefresh(eq(CONNECTION_ID), any())) + .thenReturn(JOB_ID); + when(configRepository.getStandardSync(CONNECTION_ID)) + .thenReturn(mock(StandardSync.class)); + when(jobPersistence.getJob(JOB_ID)) + .thenReturn(job); + when(jobConverter.getJobInfoRead(job)) + .thenReturn(new JobInfoRead().job(new JobRead().id(JOB_ID))); + when(streamRefreshesHandler.getRefreshesForConnection(CONNECTION_ID)) + .thenReturn(List.of( + new StreamRefresh(UUID.randomUUID(), CONNECTION_ID, "name", "namespace", null))); + + final JobInfoRead output = schedulerHandler.createJob(new JobCreate().connectionId(CONNECTION_ID)); + + verify(jobFactory).createRefresh(eq(CONNECTION_ID), any()); + Assertions.assertThat(output.getJob().getId()).isEqualTo(JOB_ID); + } + @Test @DisplayName("Test reset job creation") void createResetJob() throws JsonValidationException, ConfigNotFoundException, IOException { diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java index 19dea91fba8..9ffe80a2f49 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java @@ -54,6 +54,7 @@ import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretCoordinate; import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -95,6 +96,7 @@ class SourceHandlerTest { private ConnectorSpecification connectorSpecification; private OAuthConfigSupplier oAuthConfigSupplier; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; private TestClient featureFlagClient; private static final String SHOES = "shoes"; @@ -103,8 +105,8 @@ class SourceHandlerTest { Field.of(SKU, JsonSchemaType.STRING)); private static final String ICON_URL = "https://connectors.airbyte.com/files/metadata/airbyte/destination-test/latest/icon.svg"; - private static boolean IS_VERSION_OVERRIDE_APPLIED = true; - private static SupportState SUPPORT_STATE = SupportState.SUPPORTED; + private static final boolean IS_VERSION_OVERRIDE_APPLIED = true; + private static final SupportState SUPPORT_STATE = SupportState.SUPPORTED; private SourceService sourceService; private WorkspaceService workspaceService; @@ -130,6 +132,7 @@ void setUp() throws IOException { workspaceService = mock(WorkspaceService.class); secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); actorDefinitionHandlerHelper = mock(ActorDefinitionHandlerHelper.class); + actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); connectorSpecification = ConnectorSpecificationHelpers.generateConnectorSpecification(); @@ -163,7 +166,8 @@ void setUp() throws IOException { configurationUpdate, oAuthConfigSupplier, actorDefinitionVersionHelper, featureFlagClient, sourceService, workspaceService, secretPersistenceConfigService, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); } @Test @@ -264,18 +268,14 @@ void testUpdateSource() throws JsonValidationException, ConfigNotFoundException, void testUpgradeSourceVersion() throws JsonValidationException, ConfigNotFoundException, IOException { final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceConnection.getSourceId()); - final UUID newDefaultVersionId = UUID.randomUUID(); - final StandardSourceDefinition sourceDefinitionWithNewVersion = Jsons.clone(standardSourceDefinition) - .withDefaultVersionId(newDefaultVersionId); - when(configRepository.getSourceConnection(sourceConnection.getSourceId())).thenReturn(sourceConnection); - when(configRepository.getStandardSourceDefinition(sourceDefinitionWithNewVersion.getSourceDefinitionId())) - .thenReturn(sourceDefinitionWithNewVersion); + when(configRepository.getStandardSourceDefinition(standardSourceDefinition.getSourceDefinitionId())) + .thenReturn(standardSourceDefinition); sourceHandler.upgradeSourceVersion(sourceIdRequestBody); - // validate that we set the actor version to the actor definition (global) default version - verify(configRepository).setActorDefaultVersion(sourceConnection.getSourceId(), newDefaultVersionId); + // validate that we call the actorDefinitionVersionUpdater to upgrade the version to global default + verify(actorDefinitionVersionUpdater).upgradeActorVersion(sourceConnection, standardSourceDefinition); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java index d44cbb83480..3725d23cd04 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/UserHandlerTest.java @@ -10,7 +10,6 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -50,6 +49,7 @@ import io.airbyte.config.persistence.OrganizationPersistence; import io.airbyte.config.persistence.PermissionPersistence; import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.services.PermissionService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.Arrays; @@ -83,6 +83,7 @@ class UserHandlerTest { OrganizationsHandler organizationsHandler; JwtUserAuthenticationResolver jwtUserAuthenticationResolver; InitialUserConfiguration initialUserConfiguration; + PermissionService permissionService; private static final UUID USER_ID = UUID.randomUUID(); private static final String USER_NAME = "user 1"; @@ -98,11 +99,13 @@ class UserHandlerTest { .withAuthProvider(AuthProvider.GOOGLE_IDENTITY_PLATFORM) .withStatus(Status.INVITED) .withName(USER_NAME); + private ResourceBootstrapHandler resourceBootstrapHandler; @BeforeEach void setUp() { userPersistence = mock(UserPersistence.class); permissionPersistence = mock(PermissionPersistence.class); + permissionService = mock(PermissionService.class); permissionHandler = mock(PermissionHandler.class); workspacesHandler = mock(WorkspacesHandler.class); organizationPersistence = mock(OrganizationPersistence.class); @@ -110,9 +113,11 @@ void setUp() { uuidSupplier = mock(Supplier.class); jwtUserAuthenticationResolver = mock(JwtUserAuthenticationResolver.class); initialUserConfiguration = mock(InitialUserConfiguration.class); + resourceBootstrapHandler = mock(ResourceBootstrapHandler.class); - userHandler = new UserHandler(userPersistence, permissionPersistence, organizationPersistence, permissionHandler, workspacesHandler, - uuidSupplier, jwtUserAuthenticationResolver, Optional.of(initialUserConfiguration)); + userHandler = + new UserHandler(userPersistence, permissionPersistence, permissionService, organizationPersistence, permissionHandler, workspacesHandler, + uuidSupplier, jwtUserAuthenticationResolver, Optional.of(initialUserConfiguration), resourceBootstrapHandler); } @Test @@ -324,7 +329,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio when(jwtUserAuthenticationResolver.resolveUser(NEW_AUTH_USER_ID)).thenReturn(newUser); when(uuidSupplier.get()).thenReturn(NEW_USER_ID); when(userPersistence.getUser(NEW_USER_ID)).thenReturn(Optional.of(newUser)); - when(workspacesHandler.createDefaultWorkspaceForUser(any(), any())).thenReturn(defaultWorkspace); + when(resourceBootstrapHandler.bootStrapWorkspaceForCurrentUser(any())).thenReturn(defaultWorkspace); } @ParameterizedTest @@ -351,8 +356,9 @@ void testNewUserCreation(final AuthProvider authProvider, } else { // replace default user handler with one that doesn't use initial user config (ie to test what // happens in Cloud) - userHandler = new UserHandler(userPersistence, permissionPersistence, organizationPersistence, permissionHandler, workspacesHandler, - uuidSupplier, jwtUserAuthenticationResolver, Optional.empty()); + userHandler = new UserHandler(userPersistence, permissionPersistence, permissionService, organizationPersistence, permissionHandler, + workspacesHandler, + uuidSupplier, jwtUserAuthenticationResolver, Optional.empty(), resourceBootstrapHandler); } if (isFirstOrgUser) { @@ -371,6 +377,9 @@ void testNewUserCreation(final AuthProvider authProvider, when(workspacesHandler.listWorkspacesInOrganization( new ListWorkspacesInOrganizationRequestBody().organizationId(ORGANIZATION.getOrganizationId()))).thenReturn( new WorkspaceReadList().workspaces(List.of(defaultWorkspace))); + if (newUser.getDefaultWorkspaceId() == null) { + newUser.setDefaultWorkspaceId(defaultWorkspace.getWorkspaceId()); + } } else { when(workspacesHandler.listWorkspacesInOrganization(any())).thenReturn(new WorkspaceReadList().workspaces(List.of())); } @@ -390,7 +399,7 @@ void testNewUserCreation(final AuthProvider authProvider, verifyUserRead(userRead, apiAuthProvider); verifyInstanceAdminPermissionCreation(initialUserEmail, initialUserPresent); verifyOrganizationPermissionCreation(ssoRealm, isFirstOrgUser); - verifyDefaultWorkspaceCreation(ssoRealm, isDefaultWorkspaceForOrgPresent, userPersistenceInOrder); + verifyDefaultWorkspaceCreation(isDefaultWorkspaceForOrgPresent, userPersistenceInOrder); } private void verifyCreatedUser(final AuthProvider expectedAuthProvider, final InOrder inOrder) throws IOException { @@ -400,38 +409,19 @@ private void verifyCreatedUser(final AuthProvider expectedAuthProvider, final In && user.getAuthProvider().equals(expectedAuthProvider))); } - private void verifyDefaultWorkspaceCreation(final String ssoRealm, final Boolean isDefaultWorkspaceForOrgPresent, final InOrder inOrder) - throws IOException, JsonValidationException, ConfigNotFoundException { - boolean workspaceCreated = false; - - if (ssoRealm == null) { - // always create a default workspace for non-SSO users - verify(workspacesHandler).createDefaultWorkspaceForUser( - argThat(user -> user.getUserId().equals(NEW_USER_ID)), - eq(Optional.empty())); - workspaceCreated = true; - - } else { - if (!isDefaultWorkspaceForOrgPresent) { - // create a default workspace for the org if one doesn't yet exist - verify(workspacesHandler).createDefaultWorkspaceForUser( - argThat(user -> user.getUserId().equals(NEW_USER_ID)), - argThat(org -> org.orElseThrow().getOrganizationId().equals(ORGANIZATION.getOrganizationId()))); - workspaceCreated = true; - - } else { - // never create an additional workspace for the org if one already exists. - verify(workspacesHandler, never()).createDefaultWorkspaceForUser(any(), any()); - } - } - if (workspaceCreated) { + private void verifyDefaultWorkspaceCreation(final Boolean isDefaultWorkspaceForOrgPresent, final InOrder inOrder) + throws IOException { + // No need to deal with other vars because SSO users and first org users etc. are all directed + // through the same codepath now. + if (!isDefaultWorkspaceForOrgPresent) { + // create a default workspace for the org if one doesn't yet exist + verify(resourceBootstrapHandler).bootStrapWorkspaceForCurrentUser(any()); // if a workspace was created, verify that the user's defaultWorkspaceId was updated // and that a workspaceAdmin permission was created for them. inOrder.verify(userPersistence).writeUser(argThat(user -> user.getDefaultWorkspaceId().equals(WORKSPACE_ID))); - verify(permissionHandler).createPermission(new PermissionCreate() - .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN) - .workspaceId(WORKSPACE_ID) - .userId(NEW_USER_ID)); + } else { + // never create an additional workspace for the org if one already exists. + verify(resourceBootstrapHandler, never()).bootStrapWorkspaceForCurrentUser(any()); } } @@ -443,17 +433,17 @@ private void verifyUserRead(final UserRead userRead, final io.airbyte.api.model. } private void verifyInstanceAdminPermissionCreation(final String initialUserEmail, final boolean initialUserPresent) - throws IOException { + throws Exception { // instance_admin permissions should only ever be created when the initial user config is present // (which should never be true in Cloud). // also, if the initial user email is null or doesn't match the new user's email, no instance_admin // permission should be created if (!initialUserPresent || initialUserEmail == null || !initialUserEmail.equalsIgnoreCase(NEW_EMAIL)) { - verify(permissionPersistence, never()) - .writePermission(argThat(permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN))); + verify(permissionService, never()) + .createPermission(argThat(permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN))); } else { // otherwise, instance_admin permission should be created - verify(permissionPersistence).writePermission(argThat( + verify(permissionService).createPermission(argThat( permission -> permission.getPermissionType().equals(PermissionType.INSTANCE_ADMIN) && permission.getUserId().equals(NEW_USER_ID))); } } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java index bffd7a95f59..5652e2d3b6a 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java @@ -11,9 +11,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -100,11 +102,14 @@ import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; +import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; +import io.airbyte.featureflag.UseClear; import io.airbyte.featureflag.UseIconUrlInApiResponse; import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; @@ -129,6 +134,8 @@ import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; @@ -154,6 +161,7 @@ class WebBackendConnectionsHandlerTest { private ConfigRepository configRepository; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private final FeatureFlagClient featureFlagClient = mock(TestClient.class); private static final String STREAM1 = "stream1"; private static final String STREAM2 = "stream2"; @@ -181,13 +189,13 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio final ConfigurationUpdate configurationUpdate = mock(ConfigurationUpdate.class); final OAuthConfigSupplier oAuthConfigSupplier = mock(OAuthConfigSupplier.class); final DestinationService destinationService = mock(DestinationService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SourceService sourceService = mock(SourceService.class); final WorkspaceService workspaceService = mock(WorkspaceService.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - final TestClient featureFlagClient = mock(TestClient.class); final Supplier uuidGenerator = mock(Supplier.class); when(featureFlagClient.boolVariation(UseIconUrlInApiResponse.INSTANCE, new Workspace(ANONYMOUS))) .thenReturn(true); @@ -202,7 +210,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio actorDefinitionVersionHelper, destinationService, featureFlagClient, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); final SourceHandler sourceHandler = new SourceHandler(configRepository, secretsRepositoryReader, @@ -213,9 +222,10 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio configurationUpdate, oAuthConfigSupplier, actorDefinitionVersionHelper, featureFlagClient, sourceService, workspaceService, secretPersistenceConfigService, - actorDefinitionHandlerHelper); + actorDefinitionHandlerHelper, + actorDefinitionVersionUpdater); - wbHandler = new WebBackendConnectionsHandler( + wbHandler = spy(new WebBackendConnectionsHandler( connectionsHandler, stateHandler, sourceHandler, @@ -225,7 +235,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio operationsHandler, eventRunner, configRepository, - actorDefinitionVersionHelper); + actorDefinitionVersionHelper, + featureFlagClient)); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(UUID.randomUUID()) @@ -923,8 +934,11 @@ void testUpdateConnectionWithOperations() throws JsonValidationException, Config verify(operationsHandler, times(1)).updateOperation(operationUpdate); } - @Test - void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationException, ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testUpdateConnectionWithUpdatedSchemaLegacy(boolean isClear) throws JsonValidationException, ConfigNotFoundException, IOException { + when(featureFlagClient.boolVariation(eq(UseClear.INSTANCE), any())).thenReturn(isClear); + final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() .namespaceDefinition(expected.getNamespaceDefinition()) .namespaceFormat(expected.getNamespaceFormat()) @@ -968,9 +982,11 @@ void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationExceptio when(configRepository.getAllStreamsForConnection(expected.getConnectionId())).thenReturn(connectionStreams); final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); + when(eventRunner.resetConnection(any(), any(), eq(!isClear))).thenReturn(successfulResult); when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); + when(configRepository.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); + final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); @@ -980,11 +996,14 @@ void testUpdateConnectionWithUpdatedSchemaLegacy() throws JsonValidationExceptio verify(schedulerHandler, times(0)).syncConnection(connectionId); verify(connectionsHandler, times(1)).updateConnection(any()); final InOrder orderVerifier = inOrder(eventRunner); - orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), connectionStreams, true); + orderVerifier.verify(eventRunner, times(1)).resetConnection(connectionId.getConnectionId(), connectionStreams, !isClear); } - @Test - void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationException, ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testUpdateConnectionWithUpdatedSchemaPerStream(boolean isClear) throws JsonValidationException, ConfigNotFoundException, IOException { + when(featureFlagClient.boolVariation(eq(UseClear.INSTANCE), any())).thenReturn(isClear); + final WebBackendConnectionUpdate updateBody = new WebBackendConnectionUpdate() .namespaceDefinition(expected.getNamespaceDefinition()) .namespaceFormat(expected.getNamespaceFormat()) @@ -1034,9 +1053,12 @@ void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationExcep when(connectionsHandler.getConnection(expected.getConnectionId())).thenReturn(connectionRead); final ManualOperationResult successfulResult = ManualOperationResult.builder().jobId(Optional.empty()).failingReason(Optional.empty()).build(); - when(eventRunner.resetConnection(any(), any(), anyBoolean())).thenReturn(successfulResult); + when(eventRunner.resetConnection(any(), any(), eq(!isClear))).thenReturn(successfulResult); when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); + when(configRepository.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); + doReturn(false).when(wbHandler).containsBreakingChange(any()); + final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); @@ -1051,7 +1073,7 @@ void testUpdateConnectionWithUpdatedSchemaPerStream() throws JsonValidationExcep new io.airbyte.protocol.models.StreamDescriptor().withName("updateStream"), new io.airbyte.protocol.models.StreamDescriptor().withName("configUpdateStream"), new io.airbyte.protocol.models.StreamDescriptor().withName("removeStream")), - true); + !isClear); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java index 7caf2ca36ee..58a848a36e1 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java @@ -277,7 +277,8 @@ void testCreateWorkspace() throws JsonValidationException, IOException, ConfigNo .notificationSettings(generateApiNotificationSettingsWithDefaultValue()) .defaultGeography(GEOGRAPHY_US) .webhookConfigs(List.of(new WebhookConfigRead().id(uuid).name(TEST_NAME))) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); assertEquals(expectedRead, actualRead); } @@ -326,7 +327,8 @@ void testCreateWorkspaceIfNotExist() throws JsonValidationException, IOException .notificationSettings(generateApiNotificationSettingsWithDefaultValue()) .defaultGeography(GEOGRAPHY_US) .webhookConfigs(List.of(new WebhookConfigRead().id(uuid).name(TEST_NAME))) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); assertEquals(expectedRead, actualRead); assertEquals(expectedRead, secondActualRead); @@ -343,7 +345,8 @@ void testCreateWorkspaceWithMinimumInput() throws JsonValidationException, IOExc final WorkspaceCreate workspaceCreate = new WorkspaceCreate() .name(NEW_WORKSPACE) - .email(TEST_EMAIL); + .email(TEST_EMAIL) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -360,7 +363,9 @@ void testCreateWorkspaceWithMinimumInput() throws JsonValidationException, IOExc .notifications(List.of()) .notificationSettings(generateDefaultApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) - .webhookConfigs(Collections.emptyList()); + .webhookConfigs(Collections.emptyList()) + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertEquals(expectedRead, actualRead); } @@ -384,7 +389,8 @@ void testCreateWorkspaceDuplicateSlug() throws JsonValidationException, IOExcept .news(false) .anonymousDataCollection(false) .securityUpdates(false) - .notifications(Collections.emptyList()); + .notifications(Collections.emptyList()) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -401,7 +407,9 @@ void testCreateWorkspaceDuplicateSlug() throws JsonValidationException, IOExcept .notifications(Collections.emptyList()) .notificationSettings(generateDefaultApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) - .webhookConfigs(Collections.emptyList()); + .webhookConfigs(Collections.emptyList()) + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertTrue(actualRead.getSlug().startsWith(workspace.getSlug())); assertNotEquals(workspace.getSlug(), actualRead.getSlug()); @@ -463,7 +471,8 @@ void testListWorkspaces() throws JsonValidationException, IOException { .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); final WorkspaceRead expectedWorkspaceRead2 = new WorkspaceRead() .workspaceId(workspace2.getWorkspaceId()) @@ -479,7 +488,8 @@ void testListWorkspaces() throws JsonValidationException, IOException { .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); final WorkspaceReadList actualWorkspaceReadList = workspacesHandler.listWorkspaces(); @@ -509,31 +519,18 @@ void testGetWorkspace() throws JsonValidationException, ConfigNotFoundException, .notificationSettings(generateApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) .webhookConfigs(List.of(new WebhookConfigRead().id(WEBHOOK_CONFIG_ID).name(TEST_NAME))) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); assertEquals(workspaceRead, workspacesHandler.getWorkspace(workspaceIdRequestBody)); } @Test - void testGetWorkspaceBySlug() throws JsonValidationException, ConfigNotFoundException, IOException { + void testGetWorkspaceBySlug() throws ConfigNotFoundException, IOException { when(configRepository.getWorkspaceBySlug("default", false)).thenReturn(workspace); final SlugRequestBody slugRequestBody = new SlugRequestBody().slug("default"); - final WorkspaceRead workspaceRead = new WorkspaceRead() - .workspaceId(workspace.getWorkspaceId()) - .customerId(workspace.getCustomerId()) - .email(TEST_EMAIL) - .name(workspace.getName()) - .slug(workspace.getSlug()) - .initialSetupComplete(workspace.getInitialSetupComplete()) - .displaySetupWizard(workspace.getDisplaySetupWizard()) - .news(workspace.getNews()) - .anonymousDataCollection(workspace.getAnonymousDataCollection()) - .securityUpdates(workspace.getSecurityUpdates()) - .notifications(NotificationConverter.toApiList(workspace.getNotifications())) - .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) - .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); + final WorkspaceRead workspaceRead = getWorkspaceReadPerWorkspace(workspace); assertEquals(workspaceRead, workspacesHandler.getWorkspaceBySlug(slugRequestBody)); } @@ -543,7 +540,13 @@ void testGetWorkspaceByConnectionId() throws ConfigNotFoundException { final UUID connectionId = UUID.randomUUID(); when(configRepository.getStandardWorkspaceFromConnection(connectionId, false)).thenReturn(workspace); final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionId); - final WorkspaceRead workspaceRead = new WorkspaceRead() + final WorkspaceRead workspaceRead = getWorkspaceReadPerWorkspace(workspace); + + assertEquals(workspaceRead, workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, false)); + } + + private WorkspaceRead getWorkspaceReadPerWorkspace(StandardWorkspace workspace) { + return new WorkspaceRead() .workspaceId(workspace.getWorkspaceId()) .customerId(workspace.getCustomerId()) .email(TEST_EMAIL) @@ -557,9 +560,8 @@ void testGetWorkspaceByConnectionId() throws ConfigNotFoundException { .notifications(NotificationConverter.toApiList(workspace.getNotifications())) .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); - - assertEquals(workspaceRead, workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); + .organizationId(ORGANIZATION_ID) + .tombstone(workspace.getTombstone()); } @Test @@ -568,7 +570,7 @@ void testGetWorkspaceByConnectionIdOnConfigNotFound() throws ConfigNotFoundExcep when(configRepository.getStandardWorkspaceFromConnection(connectionId, false)) .thenThrow(new ConfigNotFoundException("something", connectionId.toString())); final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionId); - assertThrows(ConfigNotFoundException.class, () -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); + assertThrows(ConfigNotFoundException.class, () -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, false)); } @ParameterizedTest @@ -660,7 +662,8 @@ void testUpdateWorkspace() .notificationSettings(generateApiNotificationSettings()) .defaultGeography(GEOGRAPHY_US) .webhookConfigs(List.of(new WebhookConfigRead().name(TEST_NAME).id(WEBHOOK_CONFIG_ID))) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); final StandardWorkspace expectedWorkspaceWithSecrets = new StandardWorkspace() .withWorkspaceId(workspace.getWorkspaceId()) @@ -678,7 +681,8 @@ void testUpdateWorkspace() .withNotificationSettings(generateNotificationSettings()) .withDefaultGeography(Geography.US) .withWebhookOperationConfigs(SECRET_WEBHOOK_CONFIGS) - .withOrganizationId(ORGANIZATION_ID); + .withOrganizationId(ORGANIZATION_ID) + .withTombstone(false); verify(workspaceService).writeWorkspaceWithSecrets(expectedWorkspaceWithSecrets); @@ -766,7 +770,8 @@ void testUpdateWorkspaceNoNameUpdate() throws JsonValidationException, ConfigNot .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettings()) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); @@ -801,7 +806,8 @@ void testWorkspaceUpdateOrganization() .notifications(NotificationConverter.toApiList(workspace.getNotifications())) .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(newOrgId); + .organizationId(newOrgId) + .tombstone(false); final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspaceOrganization(workspaceUpdateOrganization); verify(workspaceService).writeStandardWorkspaceNoSecrets(expectedWorkspace); @@ -836,7 +842,8 @@ void testWorkspacePatchUpdate() throws JsonValidationException, ConfigNotFoundEx .notifications(NotificationConverter.toApiList(workspace.getNotifications())) .notificationSettings(NotificationSettingsConverter.toApi(workspace.getNotificationSettings())) .defaultGeography(GEOGRAPHY_AUTO) - .organizationId(ORGANIZATION_ID); + .organizationId(ORGANIZATION_ID) + .tombstone(false); final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspace(workspaceUpdate); verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); @@ -873,7 +880,8 @@ void testWorkspaceIsWrittenThroughSecretsWriter() .securityUpdates(false) .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettings()) - .defaultGeography(GEOGRAPHY_US); + .defaultGeography(GEOGRAPHY_US) + .organizationId(ORGANIZATION_ID); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -890,7 +898,9 @@ void testWorkspaceIsWrittenThroughSecretsWriter() .notifications(List.of(generateApiNotification())) .notificationSettings(generateApiNotificationSettingsWithDefaultValue()) .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(Collections.emptyList()); + .webhookConfigs(Collections.emptyList()) + .tombstone(false) + .organizationId(ORGANIZATION_ID); assertEquals(expectedRead, actualRead); verify(workspaceService, times(1)).writeWorkspaceWithSecrets(any()); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java new file mode 100644 index 00000000000..14c351ff77d --- /dev/null +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/CompositeBuilderProjectUpdaterTest.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.config.persistence.ConfigNotFoundException; +import java.io.IOException; +import java.util.List; +import org.junit.Test; +import org.junit.jupiter.api.DisplayName; + +public class CompositeBuilderProjectUpdaterTest { + + @Test + @DisplayName("updateConnectorBuilderProject should call updateConnectorBuilderProject on underlying updaters") + public void testUpdateCompositeBuilderProjectUpdaterDelegates() throws ConfigNotFoundException, IOException { + final ExistingConnectorBuilderProjectWithWorkspaceId update = mock(ExistingConnectorBuilderProjectWithWorkspaceId.class); + final BuilderProjectUpdater updaterA = mock(BuilderProjectUpdater.class); + final BuilderProjectUpdater updaterB = mock(BuilderProjectUpdater.class); + CompositeBuilderProjectUpdater projectUpdater = new CompositeBuilderProjectUpdater(List.of(updaterA, updaterB)); + projectUpdater.persistBuilderProjectUpdate(update); + + verify(updaterA, times(1)) + .persistBuilderProjectUpdate(update); + verify(updaterB, times(1)) + .persistBuilderProjectUpdate(update); + } + +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java new file mode 100644 index 00000000000..b29b633cd48 --- /dev/null +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConfigRepositoryBuilderProjectUpdaterTest.java @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import static io.airbyte.commons.server.handlers.ConnectorBuilderProjectsHandler.CONNECTION_SPECIFICATION_FIELD; +import static io.airbyte.commons.server.handlers.ConnectorBuilderProjectsHandler.SPEC_FIELD; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.api.model.generated.ConnectorBuilderProjectDetails; +import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.ConnectorBuilderProject; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.ConfigRepository; +import java.io.IOException; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +public class ConfigRepositoryBuilderProjectUpdaterTest { + + private final JsonNode draftManifest = addSpec(Jsons.deserialize("{\"test\":123,\"empty\":{\"array_in_object\":[]}}")); + + private static final UUID A_SOURCE_DEFINITION_ID = UUID.randomUUID(); + private static final UUID A_BUILDER_PROJECT_ID = UUID.randomUUID(); + private static final UUID A_WORKSPACE_ID = UUID.randomUUID(); + private static final String A_DESCRIPTION = "a description"; + private static final String A_SOURCE_NAME = "a source name"; + private static final String A_NAME = "a name"; + private static final String A_DOCUMENTATION_URL = "http://documentation.url"; + private static final JsonNode A_MANIFEST; + private static final JsonNode A_SPEC; + + static { + try { + A_MANIFEST = new ObjectMapper().readTree("{\"a_manifest\": \"manifest_value\"}"); + A_SPEC = new ObjectMapper().readTree("{\"a_spec\": \"spec_value\"}"); + } catch (final JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + private final String specString = + """ + { + "type": "object", + "properties": { + "username": { + "type": "string" + }, + "password": { + "type": "string", + "airbyte_secret": true + } + } + }"""; + + private ConfigRepository configRepository; + private UUID workspaceId; + private ConfigRepositoryBuilderProjectUpdater projectUpdater; + + @BeforeEach + void setUp() { + configRepository = mock(ConfigRepository.class); + projectUpdater = new ConfigRepositoryBuilderProjectUpdater(configRepository); + } + + @Test + @DisplayName("updateConnectorBuilderProject should update an existing project removing the draft") + void testUpdateConnectorBuilderProjectWipeDraft() throws IOException, ConfigNotFoundException { + final ConnectorBuilderProject project = generateBuilderProject(); + + when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); + + final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails().name(project.getName())) + .workspaceId(workspaceId).builderProjectId(project.getBuilderProjectId()); + + projectUpdater.persistBuilderProjectUpdate(update); + + verify(configRepository, times(1)) + .writeBuilderProjectDraft( + project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), null); + } + + @Test + @DisplayName("updateConnectorBuilderProject should update an existing project") + void testUpdateConnectorBuilderProject() throws IOException, ConfigNotFoundException { + final ConnectorBuilderProject project = generateBuilderProject(); + + when(configRepository.getConnectorBuilderProject(project.getBuilderProjectId(), false)).thenReturn(project); + + final ExistingConnectorBuilderProjectWithWorkspaceId update = new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails() + .name(project.getName()) + .draftManifest(project.getManifestDraft())) + .workspaceId(workspaceId) + .builderProjectId(project.getBuilderProjectId()); + + projectUpdater.persistBuilderProjectUpdate(update); + + verify(configRepository, times(1)) + .writeBuilderProjectDraft( + project.getBuilderProjectId(), project.getWorkspaceId(), project.getName(), project.getManifestDraft()); + } + + @Test + void givenActorDefinitionAssociatedWithProjectWhenUpdateConnectorBuilderProjectThenUpdateProjectAndDefinition() throws Exception { + when(configRepository.getConnectorBuilderProject(A_BUILDER_PROJECT_ID, false)).thenReturn(anyBuilderProject() + .withBuilderProjectId(A_BUILDER_PROJECT_ID) + .withWorkspaceId(A_WORKSPACE_ID) + .withActorDefinitionId(A_SOURCE_DEFINITION_ID)); + + projectUpdater.persistBuilderProjectUpdate(new ExistingConnectorBuilderProjectWithWorkspaceId() + .builderProject(new ConnectorBuilderProjectDetails() + .name(A_SOURCE_NAME) + .draftManifest(A_MANIFEST)) + .workspaceId(A_WORKSPACE_ID) + .builderProjectId(A_BUILDER_PROJECT_ID)); + + verify(configRepository, times(1)) + .updateBuilderProjectAndActorDefinition( + A_BUILDER_PROJECT_ID, A_WORKSPACE_ID, A_SOURCE_NAME, A_MANIFEST, A_SOURCE_DEFINITION_ID); + } + + private ConnectorBuilderProject generateBuilderProject() throws JsonProcessingException { + final UUID projectId = UUID.randomUUID(); + return new ConnectorBuilderProject().withBuilderProjectId(projectId).withWorkspaceId(workspaceId).withName("Test project") + .withHasDraft(true).withManifestDraft(draftManifest); + } + + private JsonNode addSpec(JsonNode manifest) { + final JsonNode spec = Jsons.deserialize("{\"" + CONNECTION_SPECIFICATION_FIELD + "\":" + specString + "}"); + return ((ObjectNode) Jsons.clone(manifest)).set(SPEC_FIELD, spec); + } + + private static ConnectorBuilderProject anyBuilderProject() { + return new ConnectorBuilderProject(); + } + +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelperTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelperTest.java index 667cdd40d06..f13622e4d2c 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelperTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelperTest.java @@ -147,8 +147,8 @@ void failNonTerminalJobs() throws IOException { verify(mJobPersistence).writeAttemptFailureSummary(eq(runningJob.getId()), eq(attemptNo2), any()); verify(mJobPersistence).getJob(runningJob.getId()); verify(mJobPersistence).getJob(pendingJob.getId()); - verify(mJobNotifier).failJob(any(), eq(runningJob), any()); - verify(mJobNotifier).failJob(any(), eq(pendingJob), any()); + verify(mJobNotifier).failJob(eq(runningJob), any()); + verify(mJobNotifier).failJob(eq(pendingJob), any()); verify(mJobTracker).trackSync(runningJob, JobState.FAILED); verify(mJobTracker).trackSync(pendingJob, JobState.FAILED); verify(mJobPersistence).listJobsForConnectionWithStatuses(Fixtures.CONNECTION_ID, Job.REPLICATION_TYPES, JobStatus.NON_TERMINAL_STATUSES); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java index 6028acb482c..29aefde0401 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/support/AuthNettyServerCustomizerTest.java @@ -24,6 +24,9 @@ class AuthNettyServerCustomizerTest { private static final Integer MAX_CONTENT_LENGTH = 1024; + private static final Integer MAX_INITIAL_LINE_LENGTH = 4096; + private static final Integer MAX_HEADER_SIZE = 8192; + private static final Integer MAX_CHUNK_SIZE = 8192; private AuthorizationServerHandler authorizationServerHandler; @@ -32,7 +35,8 @@ class AuthNettyServerCustomizerTest { @BeforeEach void setup() { authorizationServerHandler = Mockito.mock(AuthorizationServerHandler.class); - customizer = new AuthNettyServerCustomizer(authorizationServerHandler, MAX_CONTENT_LENGTH); + customizer = new AuthNettyServerCustomizer( + authorizationServerHandler, MAX_CONTENT_LENGTH, MAX_INITIAL_LINE_LENGTH, MAX_HEADER_SIZE, MAX_CHUNK_SIZE); } @Test diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt new file mode 100644 index 00000000000..4be584e7270 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt @@ -0,0 +1,179 @@ +package io.airbyte.commons.server.authorization + +import io.airbyte.api.model.generated.PermissionCheckRead +import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.auth.OrganizationAuthRole +import io.airbyte.commons.auth.WorkspaceAuthRole +import io.airbyte.commons.server.errors.problems.ForbiddenProblem +import io.airbyte.commons.server.handlers.PermissionHandler +import io.airbyte.commons.server.support.AuthenticationHeaderResolver +import io.airbyte.commons.server.support.AuthenticationHttpHeaders.ORGANIZATION_ID_HEADER +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.commons.server.support.RbacRoleHelper +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import java.util.UUID + +class ApiAuthorizationHelperTest { + private val authenticationHeaderResolver = mockk() + private val currentUserService = mockk() + private val permissionHandler = mockk() + private val rbacRoleHelper = mockk() + private val apiAuthorizationHelper = ApiAuthorizationHelper(authenticationHeaderResolver, permissionHandler, currentUserService, rbacRoleHelper) + + private val userId = UUID.randomUUID() + private val authUserId = UUID.randomUUID() + private val workspaceId = UUID.randomUUID() + private val organizationId = UUID.randomUUID() + + @BeforeEach + fun setup() { + every { authenticationHeaderResolver.resolveWorkspace(any()) } returns listOf(workspaceId) + every { authenticationHeaderResolver.resolveOrganization(any()) } returns listOf(organizationId) + every { currentUserService.currentUser.userId } returns userId + every { currentUserService.currentUser.authUserId } returns authUserId.toString() + } + + @Test + fun `test checkWorkspacePermissions for instance admin`() { + every { permissionHandler.isUserInstanceAdmin(any()) } returns true + // shouldn't matter because we're an instance admin. + every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returns + PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED) + val ids = listOf(UUID.randomUUID().toString()) + val scope = Scope.WORKSPACE + val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) + assertDoesNotThrow { + apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + } + } + + @Test + fun `test checkWorkspacePermissions with empty workspace Ids`() { + every { permissionHandler.isUserInstanceAdmin(any()) } returns false + + val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) + + for (scope in Scope.entries) { + if (scope == Scope.WORKSPACES) { + // Allow empty ids for WORKSPACES scope specifically + assertDoesNotThrow { + apiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) + } + } else { + // Disallow empty ids for other scopes + assertThrows { + apiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) + } + } + } + } + + @Test + fun `test checkWorkspacePermissions with null workspace Ids`() { + every { permissionHandler.isUserInstanceAdmin(any()) } returns false + + val ids = listOf(UUID.randomUUID().toString()) + val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) + + // can't resolve workspaces + every { authenticationHeaderResolver.resolveWorkspace(any()) } returns null + + assertThrows { + apiAuthorizationHelper.checkWorkspacePermissions(ids, Scope.WORKSPACE, userId, permissionTypes) + } + } + + @Test + fun `test checkWorkspacePermissions for passing and failing permission checks`() { + every { permissionHandler.isUserInstanceAdmin(any()) } returns false + + val ids = listOf(UUID.randomUUID().toString()) + val scope = Scope.WORKSPACES + val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) + + // as long as we have one permission type that passes, we pass the overall check + every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returnsMany + listOf( + PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED), + PermissionCheckRead().message("yes").status(PermissionCheckRead.StatusEnum.SUCCEEDED), + ) + + assertDoesNotThrow { + apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + } + + // if no permission types pass, we fail the overall check + every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returnsMany + listOf( + PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED), + PermissionCheckRead().message("no again").status(PermissionCheckRead.StatusEnum.FAILED), + ) + + assertThrows { + apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + } + } + + @Test + fun `test ensureUserHasAnyRequiredRoleOrThrow for org ID`() { + val requiredRoles = setOf(OrganizationAuthRole.ORGANIZATION_EDITOR, OrganizationAuthRole.ORGANIZATION_ADMIN) + + // You're an org editor, and we require editor/admin -> pass + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(OrganizationAuthRole.ORGANIZATION_EDITOR.label) + assertDoesNotThrow { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + + // You're an org reader, and we require editor/admin -> fail + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(OrganizationAuthRole.ORGANIZATION_READER.label) + assertThrows { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + } + + @Test + fun `test ensureUserHasAnyRequiredRoleOrThrow for common required roles`() { + val requiredRoles = setOf(OrganizationAuthRole.ORGANIZATION_ADMIN, WorkspaceAuthRole.WORKSPACE_ADMIN) + + // You're a workspace admin ONLY, and we require either org admin OR workspace admin -> pass + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(WorkspaceAuthRole.WORKSPACE_ADMIN.label) + assertDoesNotThrow { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + + // You're an organization admin, and we require either org or workspace admin -> pass + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(OrganizationAuthRole.ORGANIZATION_ADMIN.label) + assertDoesNotThrow { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + + // You're only an org member, but you're a workspace admin -> pass + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(WorkspaceAuthRole.WORKSPACE_ADMIN.label, OrganizationAuthRole.ORGANIZATION_MEMBER.label) + assertDoesNotThrow { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + + // You're a workspace editor AND org member, and we require admin -> fail + every { + rbacRoleHelper.getRbacRoles(authUserId.toString(), mapOf(ORGANIZATION_ID_HEADER to organizationId.toString())) + } returns setOf(WorkspaceAuthRole.WORKSPACE_EDITOR.label, OrganizationAuthRole.ORGANIZATION_MEMBER.label) + assertThrows { + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow(Scope.ORGANIZATION, listOf(organizationId.toString()), requiredRoles) + } + } +} diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt new file mode 100644 index 00000000000..d5290e16f61 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/StreamRefreshesHandlerTest.kt @@ -0,0 +1,161 @@ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.ConnectionStream +import io.airbyte.commons.server.handlers.StreamRefreshesHandler.Companion.connectionStreamsToStreamDescriptors +import io.airbyte.commons.server.handlers.StreamRefreshesHandler.Companion.streamDescriptorsToStreamRefreshes +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.StandardWorkspace +import io.airbyte.config.persistence.StreamRefreshesRepository +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.StreamDescriptor +import io.mockk.called +import io.mockk.clearAllMocks +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import io.mockk.verifyOrder +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +internal class StreamRefreshesHandlerTest { + private val connectionService: ConnectionService = mockk() + private val streamRefreshesRepository: StreamRefreshesRepository = mockk() + private val eventRunner: EventRunner = mockk() + private val workspaceService: WorkspaceService = mockk() + private val featureFlagClient: FeatureFlagClient = mockk() + + private val streamRefreshesHandler = + StreamRefreshesHandler( + connectionService, + streamRefreshesRepository, + eventRunner, + workspaceService, + featureFlagClient, + ) + + private val workspaceId = UUID.randomUUID() + private val connectionId = UUID.randomUUID() + private val ffContext = + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + ), + ) + private val connectionStream = + listOf( + ConnectionStream().streamName("name1").streamNamespace("namespace1"), + ConnectionStream().streamName("name2"), + ) + private val streamDescriptors = + listOf( + StreamDescriptor().withName("name1").withNamespace("namespace1"), + StreamDescriptor().withName("name2"), + ) + + @BeforeEach + fun reset() { + clearAllMocks() + every { + workspaceService.getStandardWorkspaceFromConnection(connectionId, false) + } returns StandardWorkspace().withWorkspaceId(workspaceId) + } + + @Test + fun `test that nothing is submitted if the flag is disabled`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns false + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, listOf()) + + assertFalse(result) + + verify { + listOf( + streamRefreshesRepository.saveAll(any>()), + eventRunner.startNewManualSync(connectionId), + ) wasNot called + } + } + + @Test + fun `test that the refreshes entries are properly created`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns true + every { streamRefreshesRepository.saveAll(any>()) } returns listOf() + every { eventRunner.startNewManualSync(connectionId) } returns null + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, connectionStream) + + assertTrue(result) + + verifyOrder { + streamRefreshesRepository.saveAll(any>()) + eventRunner.startNewManualSync(connectionId) + } + } + + @Test + fun `test that the refreshes entries are properly created for all the streams if the provided list is empty`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, ffContext) } returns true + every { streamRefreshesRepository.saveAll(any>()) } returns listOf() + every { eventRunner.startNewManualSync(connectionId) } returns null + every { connectionService.getAllStreamsForConnection(connectionId) } returns streamDescriptors + + val result = streamRefreshesHandler.createRefreshesForConnection(connectionId, listOf()) + + assertTrue(result) + + verifyOrder { + streamRefreshesRepository.saveAll(any>()) + eventRunner.startNewManualSync(connectionId) + } + } + + @Test + fun `test the conversion from connection stream to stream descriptors`() { + val result = connectionStreamsToStreamDescriptors(connectionStream) + + assertEquals(streamDescriptors, result) + } + + @Test + fun `test the conversion from stream descriptors to stream refreshes`() { + val expected = + listOf( + StreamRefresh(connectionId = connectionId, streamName = "name1", streamNamespace = "namespace1"), + StreamRefresh(connectionId = connectionId, streamName = "name2", streamNamespace = null), + ) + + val result = streamDescriptorsToStreamRefreshes(connectionId, streamDescriptors) + + assertEquals(2, result.size) + result.stream().forEach({ + assertEquals(connectionId, it.connectionId) + if (it.streamNamespace == null) { + assertEquals("name2", it.streamName) + } else if (it.streamNamespace == "namespace1") { + assertEquals("name1", it.streamName) + } else { + throw RuntimeException("Unexpected streamNamespace {${it.streamNamespace}}") + } + }) + } + + @Test + fun `test delete`() { + val connectionId: UUID = UUID.randomUUID() + every { streamRefreshesRepository.deleteByConnectionId(connectionId) }.returns(Unit) + streamRefreshesHandler.deleteRefreshesForConnection(connectionId) + verify { streamRefreshesRepository.deleteByConnectionId(connectionId) } + } +} diff --git a/airbyte-commons-temporal-core/build.gradle.kts b/airbyte-commons-temporal-core/build.gradle.kts index 1ba09a2244d..c2bcd6a3ca5 100644 --- a/airbyte-commons-temporal-core/build.gradle.kts +++ b/airbyte-commons-temporal-core/build.gradle.kts @@ -1,21 +1,21 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") } dependencies { - implementation(libs.bundles.temporal) - implementation(libs.failsafe) + implementation(libs.bundles.temporal) + implementation(libs.failsafe) - // We do not want dependency on(databases from this library.) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-metrics:metrics-lib")) + // We do not want dependency on(databases from this library.) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-metrics:metrics-lib")) - testImplementation(libs.assertj.core) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testImplementation(libs.temporal.testing) - testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.assertj.core) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testImplementation(libs.temporal.testing) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt new file mode 100644 index 00000000000..cfcc55ca9bc --- /dev/null +++ b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.temporal.utils + +import io.temporal.api.enums.v1.TimeoutType +import io.temporal.failure.ActivityFailure +import io.temporal.failure.TimeoutFailure + +object ActivityFailureClassifier { + @JvmStatic + fun classifyException(e: Exception): TemporalFailureReason = + when (e) { + is ActivityFailure -> + when (e.cause) { + is TimeoutFailure -> + when ((e.cause as TimeoutFailure).timeoutType) { + // ScheduleToClose or StartToClose happen when the activity runs longer than the configured timeout. + // This is most likely an issue with the computation itself more than the infra. + TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_CLOSE, TimeoutType.TIMEOUT_TYPE_START_TO_CLOSE -> TemporalFailureReason.OPERATION_TIMEOUT + + // This is because we failed our background heartbeat. + // Either the app in charge of heartbeat disappeared or got stuck. + TimeoutType.TIMEOUT_TYPE_HEARTBEAT -> TemporalFailureReason.HEARTBEAT + + // We consider the rest as infra issue, we were most likely not able to start the task within the allocated time. + // Here is most likely TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_START or TimeoutType.UNRECOGNIZED + else -> TemporalFailureReason.SCHEDULER_OVERLOADED + } + + // This is a temporal error unrelated to a timeout. We do not have a more precised classification at the moment. + else -> TemporalFailureReason.NOT_A_TIMEOUT + } + + // This isn't an ActivityFailure exception, should be classified outside of this method + else -> TemporalFailureReason.UNKNOWN + } + + enum class TemporalFailureReason { + UNKNOWN, + NOT_A_TIMEOUT, + SCHEDULER_OVERLOADED, + HEARTBEAT, + OPERATION_TIMEOUT, + } +} diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/PayloadChecker.java b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/PayloadChecker.java index 50eca140307..06d4ff5be23 100644 --- a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/PayloadChecker.java +++ b/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/PayloadChecker.java @@ -7,6 +7,9 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.temporal.exception.SizeLimitException; +import io.airbyte.metrics.lib.MetricAttribute; +import io.airbyte.metrics.lib.MetricClient; +import io.airbyte.metrics.lib.OssMetricsRegistry; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -23,28 +26,48 @@ */ public class PayloadChecker { + private MetricClient metricClient; + private static final Logger log = LoggerFactory.getLogger(PayloadChecker.class); public static final int MAX_PAYLOAD_SIZE_BYTES = 4 * 1024 * 1024; + public PayloadChecker(final MetricClient metricClient) { + this.metricClient = metricClient; + } + + /** + * Validate the payload size fits within temporal message size limits. + * + * @param data to validate + * @param type of data + * @return data if size is valid + * @throws SizeLimitException if payload size exceeds temporal limits. + */ + public T validatePayloadSize(final T data) { + return validatePayloadSize(data, new MetricAttribute[0]); + } + /** * Validate the payload size fits within temporal message size limits. * * @param data to validate * @param type of data + * @param attrs for metric reporting * @return data if size is valid * @throws SizeLimitException if payload size exceeds temporal limits. */ - public static T validatePayloadSize(final T data) { + public T validatePayloadSize(final T data, final MetricAttribute[] attrs) { final String serializedData = Jsons.serialize(data); if (serializedData.length() > MAX_PAYLOAD_SIZE_BYTES) { emitInspectionLog(data); + metricClient.count(OssMetricsRegistry.PAYLOAD_SIZE_EXCEEDED, 1, attrs); throw new SizeLimitException(String.format("Complete result exceeds size limit (%s of %s)", serializedData.length(), MAX_PAYLOAD_SIZE_BYTES)); } return data; } - private static void emitInspectionLog(final T data) { + private void emitInspectionLog(final T data) { final JsonNode jsonData = Jsons.jsonNode(data); final Map inspectionMap = new HashMap<>(); for (Iterator it = jsonData.fieldNames(); it.hasNext();) { diff --git a/airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/utils/PayloadCheckerTest.java b/airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/utils/PayloadCheckerTest.java index e35fad370a7..17a9adc768e 100644 --- a/airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/utils/PayloadCheckerTest.java +++ b/airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/utils/PayloadCheckerTest.java @@ -6,24 +6,30 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; import io.airbyte.commons.temporal.exception.SizeLimitException; +import io.airbyte.metrics.lib.MetricClient; import org.junit.jupiter.api.Test; class PayloadCheckerTest { + MetricClient mMetricClient = mock(MetricClient.class); + + PayloadChecker payloadChecker = new PayloadChecker(mMetricClient); + record Payload(String data) {} @Test void testValidPayloadSize() { final Payload p = new Payload("1".repeat(PayloadChecker.MAX_PAYLOAD_SIZE_BYTES - "{\"data\":\"\"}".length())); - assertEquals(p, PayloadChecker.validatePayloadSize(p)); + assertEquals(p, payloadChecker.validatePayloadSize(p)); } @Test void testInvalidPayloadSize() { final Payload p = new Payload("1".repeat(PayloadChecker.MAX_PAYLOAD_SIZE_BYTES)); - assertThrows(SizeLimitException.class, () -> PayloadChecker.validatePayloadSize(p)); + assertThrows(SizeLimitException.class, () -> payloadChecker.validatePayloadSize(p)); } } diff --git a/airbyte-commons-temporal/build.gradle.kts b/airbyte-commons-temporal/build.gradle.kts index 3b3d55857c7..421fe0e64e0 100644 --- a/airbyte-commons-temporal/build.gradle.kts +++ b/airbyte-commons-temporal/build.gradle.kts @@ -1,42 +1,42 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.temporal) - implementation(libs.bundles.apache) - implementation(libs.failsafe) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.temporal) + implementation(libs.bundles.apache) + implementation(libs.failsafe) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-worker-models")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-worker-models")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-json-validation")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.temporal.testing) - // Needed to be able to mock final class) - testImplementation(libs.mockito.inline) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.temporal.testing) + // Needed to be able to mock final class) + testImplementation(libs.mockito.inline) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java index 78d60f76ca1..79e8886af28 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java @@ -77,27 +77,6 @@ public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final UUID c return signalWorkflowAndRepairIfNecessary(connectionId, signalMethod, Optional.empty()); } - /** - * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. - * - * If the workflow is unreachable, this will restart the workflow and send the signal in a single - * batched request. Batching is used to avoid race conditions between starting the workflow and - * executing the signal. - * - * @param connectionId the connection ID to execute this operation for - * @param signalMethod a function that takes in a connection manager workflow and executes a signal - * method on it, with 1 argument - * @param signalArgument the single argument to be input to the signal - * @return the healthy connection manager workflow that was signaled - * @throws DeletedWorkflowException if the connection manager workflow was deleted - */ - public ConnectionManagerWorkflow signalWorkflowAndRepairIfNecessary(final UUID connectionId, - final Function> signalMethod, - final T signalArgument) - throws DeletedWorkflowException { - return signalWorkflowAndRepairIfNecessary(connectionId, signalMethod, Optional.of(signalArgument)); - } - // This method unifies the logic of the above two, by using the optional signalArgument parameter to // indicate if an argument is being provided to the signal or not. // Keeping this private and only exposing the above methods outside this class provides a strict diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java new file mode 100644 index 00000000000..2e2ffdf8e00 --- /dev/null +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/ConnectionManageUtilsTest.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.temporal; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.temporal.exception.DeletedWorkflowException; +import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; +import io.airbyte.metrics.lib.MetricClient; +import io.temporal.client.BatchRequest; +import io.temporal.client.WorkflowOptions; +import java.util.UUID; +import org.junit.jupiter.api.Test; + +public class ConnectionManageUtilsTest { + + @Test + void signalAndRepairIfNeceesaryWhenNoWorkflowWillCreate() throws DeletedWorkflowException { + final var mWorkflow = mock(WorkflowClientWrapped.class); + final var mMetric = mock(MetricClient.class); + final var cid = UUID.randomUUID(); + + when(mWorkflow.newWorkflowStub(any(), any(WorkflowOptions.class))) + .thenReturn(mock(ConnectionManagerWorkflow.class)); + when(mWorkflow.newSignalWithStartRequest()).thenReturn(mock(BatchRequest.class)); + + final var utils = new ConnectionManagerUtils(mWorkflow, mMetric); + utils.signalWorkflowAndRepairIfNecessary(cid, (workflow) -> null); + // Because we do not mock the getConnectionManagerWorkflow call, the underlying call throws an + // exception + // and the logic recreates it. + verify(mWorkflow).signalWithStart(any()); + } + +} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java index 97804c368e7..70b1b1e9f50 100644 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java @@ -426,8 +426,9 @@ void testStartNewManualSyncAlreadyRunning() { @DisplayName("Test startNewManualSync repairs the workflow if it is in a bad state") void testStartNewManualSyncRepairsBadWorkflowState() { final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); + + // This simulates a workflow that is in a bad state. + when(mTerminatedConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); @@ -631,8 +632,9 @@ void testResetConnectionSuccessAndContinue() throws IOException { @DisplayName("Test resetConnection repairs the workflow if it is in a bad state") void testResetConnectionRepairsBadWorkflowState() throws IOException { final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); + + // This simulates a workflow that is in a bad state. + when(mTerminatedConnectionManagerWorkflow.getState()).thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); when(mTerminatedConnectionManagerWorkflow.getJobInformation()).thenReturn(new JobInformation(JOB_ID, ATTEMPT_ID)); final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); diff --git a/airbyte-commons-with-dependencies/build.gradle.kts b/airbyte-commons-with-dependencies/build.gradle.kts index 8e1f72bf6ea..b10042f6fdb 100644 --- a/airbyte-commons-with-dependencies/build.gradle.kts +++ b/airbyte-commons-with-dependencies/build.gradle.kts @@ -1,21 +1,21 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) - implementation(libs.guava) + implementation(libs.guava) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.mockito.core) - testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockito.core) + testImplementation(libs.bundles.micronaut.test) } diff --git a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigs.java b/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigs.java index f073cafa47a..a7ff7fe0c92 100644 --- a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigs.java +++ b/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigs.java @@ -19,7 +19,7 @@ */ public class WorkerConfigs { - private static final String DEFAULT_JOB_KUBE_BUSYBOX_IMAGE = "busybox:1.35"; + public static final String DEFAULT_JOB_KUBE_BUSYBOX_IMAGE = "busybox:1.35"; private static final String DEFAULT_JOB_KUBE_CURL_IMAGE = "curlimages/curl:7.87.0"; private static final String DEFAULT_JOB_KUBE_SOCAT_IMAGE = "alpine/socat:1.7.4.4-r0"; private final Configs.WorkerEnvironment workerEnvironment; diff --git a/airbyte-commons-worker/build.gradle.kts b/airbyte-commons-worker/build.gradle.kts index bea90cf1601..871e1a1a6eb 100644 --- a/airbyte-commons-worker/build.gradle.kts +++ b/airbyte-commons-worker/build.gradle.kts @@ -1,112 +1,112 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.http) - implementation(libs.kotlin.logging) - implementation(libs.bundles.micronaut.kotlin) - implementation(libs.micronaut.jackson.databind) - implementation(libs.bundles.kubernetes.client) - implementation(libs.java.jwt) - implementation(libs.gson) - implementation(libs.guava) - implementation(libs.temporal.sdk) { - exclude(module = "guava") - } - implementation(libs.apache.ant) - implementation(libs.apache.commons.text) - implementation(libs.bundles.datadog) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - implementation(libs.bundles.log4j) - implementation(libs.failsafe.okhttp) - implementation(libs.google.cloud.storage) - implementation(libs.okhttp) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.s3) - implementation(libs.sts) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.http) + implementation(libs.kotlin.logging) + implementation(libs.bundles.micronaut.kotlin) + implementation(libs.micronaut.jackson.databind) + implementation(libs.bundles.kubernetes.client) + implementation(libs.java.jwt) + implementation(libs.gson) + implementation(libs.guava) + implementation(libs.temporal.sdk) { + exclude(module = "guava") + } + implementation(libs.apache.ant) + implementation(libs.apache.commons.text) + implementation(libs.bundles.datadog) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + implementation(libs.bundles.log4j) + implementation(libs.failsafe.okhttp) + implementation(libs.google.cloud.storage) + implementation(libs.okhttp) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.s3) + implementation(libs.sts) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-worker-models")) - implementation(libs.jakarta.validation.api) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-worker-models")) + implementation(libs.jakarta.validation.api) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testAnnotationProcessor(libs.jmh.annotations) + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(libs.jmh.annotations) - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.annotation.processor) - kaptTest(libs.bundles.micronaut.test.annotation.processor) + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.annotation.processor) + kaptTest(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.json.path) - testImplementation(libs.bundles.mockito.inline) - testImplementation(libs.mockk) - testImplementation(variantOf(libs.opentracing.util) { classifier("tests") }) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.jmh.core) - testImplementation(libs.jmh.annotations) - testImplementation(libs.docker.java) - testImplementation(libs.docker.java.transport.httpclient5) - testImplementation(libs.reactor.test) - testImplementation(libs.mockk) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockk) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.json.path) + testImplementation(libs.bundles.mockito.inline) + testImplementation(libs.mockk) + testImplementation(variantOf(libs.opentracing.util) { classifier("tests") }) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.jmh.core) + testImplementation(libs.jmh.annotations) + testImplementation(libs.docker.java) + testImplementation(libs.docker.java.transport.httpclient5) + testImplementation(libs.reactor.test) + testImplementation(libs.mockk) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) - testRuntimeOnly(libs.javax.databind) + testRuntimeOnly(libs.junit.jupiter.engine) + testRuntimeOnly(libs.javax.databind) } tasks.named("test") { - maxHeapSize = "10g" + maxHeapSize = "10g" - useJUnitPlatform { - excludeTags("cloud-storage") - } + useJUnitPlatform { + excludeTags("cloud-storage") + } } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) @@ -115,5 +115,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) // Once lombok has been removed, this can also be removed.) tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java index f973fc32445..eaf0ff3d095 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java @@ -11,6 +11,7 @@ import io.airbyte.api.client.generated.SecretsPersistenceConfigApi; import io.airbyte.api.client.generated.StateApi; import io.airbyte.api.client.invoker.generated.ApiException; +import io.airbyte.api.client.model.generated.ConnectionAndJobIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionRead; import io.airbyte.api.client.model.generated.ConnectionState; @@ -32,8 +33,11 @@ import io.airbyte.config.helpers.StateMessageHelper; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.AutoBackfillOnNewColumns; +import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.Multi; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; import io.airbyte.featureflag.Workspace; @@ -42,6 +46,7 @@ import io.airbyte.workers.helper.BackfillHelper; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.models.ReplicationActivityInput; +import java.util.List; import java.util.UUID; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; @@ -83,12 +88,23 @@ public ReplicationInputHydrator(final ConnectionApi connectionApi, * @throws Exception from the Airbyte API */ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInput replicationActivityInput) throws Exception { + + final boolean canRunRefreshes = featureFlagClient.boolVariation(ActivateRefreshes.INSTANCE, new Multi( + List.of( + new Workspace(replicationActivityInput.getWorkspaceId()), + new Connection(replicationActivityInput.getConnectionId())))); // Retrieve the connection, which we need in a few places. - final ConnectionRead connectionInfo = - AirbyteApiClient + final ConnectionRead connectionInfo = canRunRefreshes ? AirbyteApiClient + .retryWithJitterThrows( + () -> connectionApi.getConnectionForJob(new ConnectionAndJobIdRequestBody() + .connectionId(replicationActivityInput.getConnectionId()) + .jobId(Long.parseLong(replicationActivityInput.getJobRunConfig().getJobId()))), + "retrieve the connection") + : AirbyteApiClient .retryWithJitterThrows( () -> connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(replicationActivityInput.getConnectionId())), "retrieve the connection"); + final ConfiguredAirbyteCatalog catalog = retrieveCatalog(connectionInfo); if (replicationActivityInput.getIsReset()) { // If this is a reset, we need to set the streams being reset to Full Refresh | Overwrite. diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java index 5b5547bbcdd..1531ca8a6f0 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java @@ -23,10 +23,12 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.DestinationTimeoutMonitor; @@ -35,9 +37,11 @@ import io.airbyte.workers.internal.exception.SourceException; import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalInt; +import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutionException; @@ -80,6 +84,7 @@ public class BufferedReplicationWorker implements ReplicationWorker { private final Stopwatch writeToDestStopwatch; private final Stopwatch readFromDestStopwatch; private final Stopwatch processFromDestStopwatch; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; private static final int sourceMaxBufferSize = 1000; private static final int destinationMaxBufferSize = 1000; @@ -96,9 +101,10 @@ public BufferedReplicationWorker(final String jobId, final ReplicationFeatureFlagReader replicationFeatureFlagReader, final ReplicationWorkerHelper replicationWorkerHelper, final DestinationTimeoutMonitor destinationTimeoutMonitor, - final BufferedReplicationWorkerType bufferedReplicationWorkerType) { + final BufferedReplicationWorkerType bufferedReplicationWorkerType, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, - replicationWorkerHelper, destinationTimeoutMonitor, bufferedReplicationWorkerType, OptionalInt.empty()); + replicationWorkerHelper, destinationTimeoutMonitor, bufferedReplicationWorkerType, OptionalInt.empty(), streamStatusCompletionTracker); } public BufferedReplicationWorker(final String jobId, @@ -112,7 +118,8 @@ public BufferedReplicationWorker(final String jobId, final ReplicationWorkerHelper replicationWorkerHelper, final DestinationTimeoutMonitor destinationTimeoutMonitor, final BufferedReplicationWorkerType bufferedReplicationWorkerType, - final OptionalInt pollTimeOutDurationForQueue) { + final OptionalInt pollTimeOutDurationForQueue, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this.jobId = jobId; this.attempt = attempt; this.source = source; @@ -140,6 +147,7 @@ public BufferedReplicationWorker(final String jobId, this.writeToDestStopwatch = new Stopwatch(); this.readFromDestStopwatch = new Stopwatch(); this.processFromDestStopwatch = new Stopwatch(); + this.streamStatusCompletionTracker = streamStatusCompletionTracker; } @Trace(operationName = WORKER_OPERATION_NAME) @@ -152,7 +160,7 @@ public ReplicationOutput run(final ReplicationInput replicationInput, final Path try { final ReplicationContext replicationContext = getReplicationContext(replicationInput); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot); + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); final CloseableWithTimeout destinationWithCloseTimeout = new CloseableWithTimeout(destination, mdc, flags); // note: resources are closed in the opposite order in which they are declared. thus source will be @@ -278,10 +286,13 @@ private void trackFailures(final V value, final Throwable t) { } private ReplicationContext getReplicationContext(final ReplicationInput replicationInput) { + + final UUID sourceDefinitionId = replicationWorkerHelper.getSourceDefinitionIdForSourceId(replicationInput.getSourceId()); + final UUID destinationDefinitionId = replicationWorkerHelper.getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId()); return new ReplicationContext(replicationInput.getIsReset(), replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), Long.parseLong(jobId), attempt, replicationInput.getWorkspaceId(), replicationInput.getSourceLauncherConfig().getDockerImage(), - replicationInput.getDestinationLauncherConfig().getDockerImage()); + replicationInput.getDestinationLauncherConfig().getDockerImage(), sourceDefinitionId, destinationDefinitionId); } @Override @@ -345,7 +356,12 @@ private void readFromSource() { while (!replicationWorkerHelper.getShouldAbort() && !(sourceIsFinished = sourceIsFinished()) && !messagesFromSourceQueue.isClosed()) { final Optional messageOptional = source.attemptRead(); if (messageOptional.isPresent()) { - while (!replicationWorkerHelper.getShouldAbort() && !messagesFromSourceQueue.add(messageOptional.get()) + final AirbyteMessage message = messageOptional.get(); + if (message.getType() == Type.TRACE && message.getTrace().getType() == AirbyteTraceMessage.Type.STREAM_STATUS) { + streamStatusCompletionTracker.track(message.getTrace().getStreamStatus()); + } + + while (!replicationWorkerHelper.getShouldAbort() && !messagesFromSourceQueue.add(message) && !messagesFromSourceQueue.isClosed()) { Thread.sleep(100); } @@ -434,6 +450,11 @@ private void writeToDestination() { destination.accept(message); } } + + final List statusMessageToSend = replicationWorkerHelper.getStreamStatusToSend(source.getExitValue()); + for (AirbyteMessage airbyteMessage : statusMessageToSend) { + destination.accept(airbyteMessage); + } } finally { destination.notifyEndOfInput(); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java index ac18c0a58f0..6357a29666d 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java @@ -18,10 +18,12 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.DestinationTimeoutMonitor; @@ -30,6 +32,7 @@ import io.airbyte.workers.internal.exception.SourceException; import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -77,6 +80,7 @@ public class DefaultReplicationWorker implements ReplicationWorker { private final RecordSchemaValidator recordSchemaValidator; private final HeartbeatTimeoutChaperone srcHeartbeatTimeoutChaperone; private final ReplicationFeatureFlagReader replicationFeatureFlagReader; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; private static final int executorShutdownGracePeriodInSeconds = 10; @@ -89,7 +93,8 @@ public DefaultReplicationWorker(final String jobId, final HeartbeatTimeoutChaperone srcHeartbeatTimeoutChaperone, final ReplicationFeatureFlagReader replicationFeatureFlagReader, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { this.jobId = jobId; this.attempt = attempt; this.destinationTimeoutMonitor = destinationTimeoutMonitor; @@ -102,6 +107,7 @@ public DefaultReplicationWorker(final String jobId, this.executors = Executors.newFixedThreadPool(5); this.recordSchemaValidator = recordSchemaValidator; this.srcHeartbeatTimeoutChaperone = srcHeartbeatTimeoutChaperone; + this.streamStatusCompletionTracker = streamStatusCompletionTracker; this.replicationFeatureFlagReader = replicationFeatureFlagReader; this.hasFailed = new AtomicBoolean(false); @@ -125,7 +131,6 @@ public final ReplicationOutput run(final ReplicationInput replicationInput, fina LOGGER.info("start sync worker. job id: {} attempt id: {}", jobId, attempt); LineGobbler.startSection("REPLICATION"); - try { LOGGER.info("configured sync modes: {}", replicationInput.getCatalog().getStreams() .stream() @@ -135,11 +140,12 @@ public final ReplicationOutput run(final ReplicationInput replicationInput, fina new ReplicationContext(replicationInput.getIsReset(), replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), Long.parseLong(jobId), attempt, replicationInput.getWorkspaceId(), replicationInput.getSourceLauncherConfig().getDockerImage(), - replicationInput.getDestinationLauncherConfig().getDockerImage()); + replicationInput.getDestinationLauncherConfig().getDockerImage(), + replicationWorkerHelper.getSourceDefinitionIdForSourceId(replicationInput.getSourceId()), + replicationWorkerHelper.getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId())); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot); - + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); replicate(jobRoot, replicationInput, flags); return replicationWorkerHelper.getReplicationOutput(); @@ -186,7 +192,8 @@ private void replicate(final Path jobRoot, source, destination, replicationWorkerHelper, - mdc), executors) + mdc, + streamStatusCompletionTracker), executors) .whenComplete((msg, ex) -> { if (ex != null) { ApmTraceUtils.addExceptionToTrace(ex); @@ -330,7 +337,8 @@ private static Runnable readFromDstRunnable(final AirbyteDestination destination private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource source, final AirbyteDestination destination, final ReplicationWorkerHelper replicationWorkerHelper, - final Map mdc) { + final Map mdc, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return () -> { MDC.setContextMap(mdc); LOGGER.info("Replication thread started."); @@ -346,6 +354,9 @@ private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource sou if (messageOptional.isPresent()) { final AirbyteMessage airbyteMessage = messageOptional.get(); + if (airbyteMessage.getType() == Type.TRACE && airbyteMessage.getTrace().getType() == AirbyteTraceMessage.Type.STREAM_STATUS) { + streamStatusCompletionTracker.track(airbyteMessage.getTrace().getStreamStatus()); + } final Optional processedAirbyteMessage = replicationWorkerHelper.processMessageFromSource(airbyteMessage); @@ -373,6 +384,12 @@ private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource sou } replicationWorkerHelper.endOfSource(); + List statusMessageToSend = replicationWorkerHelper.getStreamStatusToSend(source.getExitValue()); + + for (AirbyteMessage airbyteMessage : statusMessageToSend) { + destination.accept(airbyteMessage); + } + try { destination.notifyEndOfInput(); } catch (final Exception e) { diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java index 8a04a8bbb8b..8d09ef48652 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java @@ -9,6 +9,7 @@ import io.airbyte.analytics.TrackingClient; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.api.client.generated.DestinationApi; import io.airbyte.api.client.generated.SourceApi; import io.airbyte.api.client.generated.SourceDefinitionApi; @@ -44,6 +45,7 @@ import io.airbyte.workers.WorkerMetricReporter; import io.airbyte.workers.WorkerUtils; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -59,10 +61,10 @@ import io.airbyte.workers.internal.syncpersistence.SyncPersistence; import io.airbyte.workers.internal.syncpersistence.SyncPersistenceFactory; import io.airbyte.workers.process.AirbyteIntegrationLauncherFactory; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.micronaut.context.annotation.Value; import io.micronaut.core.util.CollectionUtils; import jakarta.inject.Singleton; +import java.time.Clock; import java.time.Duration; import java.util.ArrayList; import java.util.List; @@ -86,7 +88,6 @@ public class ReplicationWorkerFactory { private final AirbyteIntegrationLauncherFactory airbyteIntegrationLauncherFactory; private final SourceApi sourceApi; private final SourceDefinitionApi sourceDefinitionApi; - private final DestinationApi destinationApi; private final SyncPersistenceFactory syncPersistenceFactory; private final AirbyteMessageDataExtractor airbyteMessageDataExtractor; private final FeatureFlagClient featureFlagClient; @@ -94,27 +95,31 @@ public class ReplicationWorkerFactory { private final MetricClient metricClient; private final ReplicationAirbyteMessageEventPublishingHelper replicationAirbyteMessageEventPublishingHelper; private final TrackingClient trackingClient; - private final WorkloadApi workloadApi; + private final WorkloadApiClient workloadApiClient; private final boolean workloadEnabled; + private final DestinationApi destinationApi; + private final StreamStatusCompletionTracker streamStatusCompletionTracker; + private final Clock clock; public ReplicationWorkerFactory( final AirbyteIntegrationLauncherFactory airbyteIntegrationLauncherFactory, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final SourceApi sourceApi, final SourceDefinitionApi sourceDefinitionApi, - final DestinationApi destinationApi, final SyncPersistenceFactory syncPersistenceFactory, final FeatureFlagClient featureFlagClient, final FeatureFlags featureFlags, final ReplicationAirbyteMessageEventPublishingHelper replicationAirbyteMessageEventPublishingHelper, final MetricClient metricClient, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final TrackingClient trackingClient, - @Value("${airbyte.workload.enabled}") final boolean workloadEnabled) { + @Value("${airbyte.workload.enabled}") final boolean workloadEnabled, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { this.airbyteIntegrationLauncherFactory = airbyteIntegrationLauncherFactory; this.sourceApi = sourceApi; this.sourceDefinitionApi = sourceDefinitionApi; - this.destinationApi = destinationApi; this.syncPersistenceFactory = syncPersistenceFactory; this.airbyteMessageDataExtractor = airbyteMessageDataExtractor; this.replicationAirbyteMessageEventPublishingHelper = replicationAirbyteMessageEventPublishingHelper; @@ -122,9 +127,12 @@ public ReplicationWorkerFactory( this.featureFlagClient = featureFlagClient; this.featureFlags = featureFlags; this.metricClient = metricClient; - this.workloadApi = workloadApi; + this.workloadApiClient = workloadApiClient; this.workloadEnabled = workloadEnabled; this.trackingClient = trackingClient; + this.destinationApi = destinationApi; + this.streamStatusCompletionTracker = streamStatusCompletionTracker; + this.clock = clock; } /** @@ -175,8 +183,8 @@ public ReplicationWorker create(final ReplicationInput replicationInput, return createReplicationWorker(airbyteSource, airbyteDestination, messageTracker, syncPersistence, recordSchemaValidator, fieldSelector, heartbeatTimeoutChaperone, featureFlagClient, jobRunConfig, replicationInput, airbyteMessageDataExtractor, replicationAirbyteMessageEventPublishingHelper, - onReplicationRunning, metricClient, destinationTimeout, workloadApi, workloadEnabled, analyticsMessageTracker, - workloadId); + onReplicationRunning, metricClient, destinationTimeout, workloadApiClient, workloadEnabled, analyticsMessageTracker, + workloadId, sourceApi, destinationApi, streamStatusCompletionTracker, clock); } /** @@ -222,10 +230,12 @@ private static HeartbeatMonitor createHeartbeatMonitor(final UUID sourceDefiniti final Long maxSecondsBetweenMessages = sourceDefinitionId != null ? AirbyteApiClient.retryWithJitter(() -> sourceDefinitionApi .getSourceDefinition(new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId)), "get the source definition") .getMaxSecondsBetweenMessages() : null; + if (maxSecondsBetweenMessages != null) { - // reset jobs use an empty source to induce resetting all data in destination. return new HeartbeatMonitor(Duration.ofSeconds(maxSecondsBetweenMessages)); } + + // reset jobs use an empty source to induce resetting all data in destination. log.warn("An error occurred while fetch the max seconds between messages for this source. We are using a default of 24 hours"); return new HeartbeatMonitor(Duration.ofSeconds(TimeUnit.HOURS.toSeconds(24))); } @@ -313,10 +323,14 @@ private static ReplicationWorker createReplicationWorker(final AirbyteSource sou final VoidCallable onReplicationRunning, final MetricClient metricClient, final DestinationTimeoutMonitor destinationTimeout, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final boolean workloadEnabled, final AnalyticsMessageTracker analyticsMessageTracker, - final Optional workloadId) { + final Optional workloadId, + final SourceApi sourceApi, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { final Context flagContext = getFeatureFlagContext(replicationInput); final String workerImpl = featureFlagClient.stringVariation(ReplicationWorkerImpl.INSTANCE, flagContext); return buildReplicationWorkerInstance( @@ -340,10 +354,15 @@ private static ReplicationWorker createReplicationWorker(final AirbyteSource sou onReplicationRunning, metricClient, destinationTimeout, - workloadApi, + workloadApiClient, workloadEnabled, analyticsMessageTracker, - workloadId); + workloadId, + featureFlagClient, + sourceApi, + destinationApi, + streamStatusCompletionTracker, + clock); } private static Context getFeatureFlagContext(final ReplicationInput replicationInput) { @@ -385,25 +404,31 @@ private static ReplicationWorker buildReplicationWorkerInstance(final String wor final VoidCallable onReplicationRunning, final MetricClient metricClient, final DestinationTimeoutMonitor destinationTimeout, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final boolean workloadEnabled, final AnalyticsMessageTracker analyticsMessageTracker, - final Optional workloadId) { + final Optional workloadId, + final FeatureFlagClient featureFlagClient, + final SourceApi sourceApi, + final DestinationApi destinationApi, + final StreamStatusCompletionTracker streamStatusCompletionTracker, + final Clock clock) { final ReplicationWorkerHelper replicationWorkerHelper = new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - messageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, - workloadEnabled, analyticsMessageTracker, workloadId); + messageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, + workloadEnabled, analyticsMessageTracker, workloadId, sourceApi, destinationApi, streamStatusCompletionTracker); final Optional bufferedReplicationWorkerType = bufferedReplicationWorkerType(workerImpl); if (bufferedReplicationWorkerType.isPresent()) { metricClient.count(OssMetricsRegistry.REPLICATION_WORKER_CREATED, 1, new MetricAttribute(MetricTags.IMPLEMENTATION, workerImpl)); return new BufferedReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout, - bufferedReplicationWorkerType.get()); + bufferedReplicationWorkerType.get(), streamStatusCompletionTracker); } else { metricClient.count(OssMetricsRegistry.REPLICATION_WORKER_CREATED, 1, new MetricAttribute(MetricTags.IMPLEMENTATION, "default")); return new DefaultReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, - srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout); + srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeout, + new StreamStatusCompletionTracker(featureFlagClient, clock)); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java index d594fcd1a0f..90d1e562491 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java @@ -12,6 +12,7 @@ import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; import io.airbyte.config.Metadata; +import io.airbyte.config.StreamDescriptor; import io.airbyte.protocol.models.AirbyteTraceMessage; import java.util.Comparator; import java.util.List; @@ -102,11 +103,19 @@ public static FailureReason genericFailure(final AirbyteTraceMessage m, final Lo failureType = FailureType.SYSTEM_ERROR; } } + StreamDescriptor streamDescriptor = null; + if (m.getError().getStreamDescriptor() != null) { + streamDescriptor = new StreamDescriptor() + .withNamespace(m.getError().getStreamDescriptor().getNamespace()) + .withName(m.getError().getStreamDescriptor().getName()); + } + return new FailureReason() .withInternalMessage(m.getError().getInternalMessage()) .withExternalMessage(m.getError().getMessage()) .withStacktrace(m.getError().getStackTrace()) .withTimestamp(m.getEmittedAt().longValue()) + .withStreamDescriptor(streamDescriptor) .withFailureType(failureType) .withMetadata(traceMessageMetadata(jobId, attemptNumber)); } @@ -202,11 +211,18 @@ public static FailureReason sourceHeartbeatFailure(final Throwable t, * @param attemptNumber attempt number * @return failure reason */ - public static FailureReason destinationTimeoutFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { + public static FailureReason destinationTimeoutFailure(final Throwable t, + final Long jobId, + final Integer attemptNumber, + final String humanReadableThreshold, + final String timeBetweenLastAction) { + final var errorMessage = String.format( + "Airbyte detected that the Destination didn't make progress in the last %s, exceeding the configured %s threshold. Airbyte will try reading again on the next sync. Please see https://docs.airbyte.com/understanding-airbyte/heartbeats for more info.", + timeBetweenLastAction, humanReadableThreshold); return connectorCommandFailure(t, jobId, attemptNumber, ConnectorCommand.WRITE) .withFailureOrigin(FailureOrigin.DESTINATION) .withFailureType(FailureType.DESTINATION_TIMEOUT) - .withExternalMessage("Something went wrong when calling the destination. The destination seems stuck"); + .withExternalMessage(errorMessage); } /** diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java index d9af849cd67..ec8f8f6cd56 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java @@ -18,10 +18,7 @@ import io.airbyte.commons.protocol.DefaultProtocolSerializer; import io.airbyte.commons.protocol.ProtocolSerializer; import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.workers.WorkerUtils; @@ -60,7 +57,7 @@ public class DefaultAirbyteDestination implements AirbyteDestination { private final AirbyteStreamFactory streamFactory; private final AirbyteMessageBufferedWriterFactory messageWriterFactory; private final ProtocolSerializer protocolSerializer; - private final MetricClient metricClient; + private final MessageMetricsTracker messageMetricsTracker; private final AtomicBoolean inputHasEnded = new AtomicBoolean(false); @@ -69,7 +66,6 @@ public class DefaultAirbyteDestination implements AirbyteDestination { private Iterator messageIterator = null; private Integer exitValue = null; private final DestinationTimeoutMonitor destinationTimeoutMonitor; - private MetricAttribute connectionAttribute = null; @VisibleForTesting public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, @@ -79,9 +75,7 @@ public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory( LOGGER, CONTAINER_LOG_MDC_BUILDER, - Optional.empty(), - Runtime.getRuntime().maxMemory(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), new GsonPksExtractor()), new DefaultAirbyteMessageBufferedWriterFactory(), new DefaultProtocolSerializer(), @@ -101,7 +95,7 @@ public DefaultAirbyteDestination(final IntegrationLauncher integrationLauncher, this.messageWriterFactory = messageWriterFactory; this.protocolSerializer = protocolSerializer; this.destinationTimeoutMonitor = destinationTimeoutMonitor; - this.metricClient = metricClient; + this.messageMetricsTracker = new MessageMetricsTracker(metricClient); } @Override @@ -109,7 +103,7 @@ public void start(final WorkerDestinationConfig destinationConfig, final Path jo Preconditions.checkState(destinationProcess == null); if (destinationConfig.getConnectionId() != null) { - connectionAttribute = new MetricAttribute(MetricTags.CONNECTION_ID, destinationConfig.getConnectionId().toString()); + messageMetricsTracker.trackConnectionId(destinationConfig.getConnectionId()); } LOGGER.info("Running destination..."); @@ -132,12 +126,7 @@ public void start(final WorkerDestinationConfig destinationConfig, final Path jo @Override public void accept(final AirbyteMessage message) throws IOException { - final MetricAttribute typeAttribute = new MetricAttribute(MetricTags.MESSAGE_TYPE, message.getType().toString()); - if (connectionAttribute != null) { - metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_SENT, 1, connectionAttribute, typeAttribute); - } else { - metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_SENT, 1, typeAttribute); - } + messageMetricsTracker.trackDestSent(message.getType()); destinationTimeoutMonitor.startAcceptTimer(); acceptWithNoTimeoutMonitor(message); destinationTimeoutMonitor.resetAcceptTimer(); @@ -170,6 +159,8 @@ public void notifyEndOfInputWithNoTimeoutMonitor() throws IOException { @Override public void close() throws Exception { + emitDestinationMessageCountMetrics(); + if (destinationProcess == null) { LOGGER.debug("Destination process already exited"); return; @@ -190,6 +181,8 @@ public void close() throws Exception { @Override public void cancel() throws Exception { + emitDestinationMessageCountMetrics(); + LOGGER.info("Attempting to cancel destination process..."); if (destinationProcess == null) { @@ -229,14 +222,14 @@ public Optional attemptRead() { final Optional m = Optional.ofNullable(messageIterator.hasNext() ? messageIterator.next() : null); if (m.isPresent()) { - final MetricAttribute typeAttribute = new MetricAttribute(MetricTags.MESSAGE_TYPE, m.get().getType().toString()); - if (connectionAttribute != null) { - metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_READ, 1, connectionAttribute, typeAttribute); - } else { - metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_READ, 1, typeAttribute); - } + messageMetricsTracker.trackDestRead(m.get().getType()); } return m; } + private void emitDestinationMessageCountMetrics() { + messageMetricsTracker.flushDestReadCountMetric(); + messageMetricsTracker.flushDestSentCountMetric(); + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java index a52b601d96c..62a8ea9190b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java @@ -22,10 +22,7 @@ import io.airbyte.commons.logging.MdcScope.Builder; import io.airbyte.commons.protocol.ProtocolSerializer; import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.workers.WorkerUtils; @@ -64,13 +61,12 @@ public class DefaultAirbyteSource implements AirbyteSource { private final AirbyteStreamFactory streamFactory; private final ProtocolSerializer protocolSerializer; private final HeartbeatMonitor heartbeatMonitor; - private final MetricClient metricClient; + private final MessageMetricsTracker messageMetricsTracker; private Process sourceProcess = null; private Iterator messageIterator = null; private Integer exitValue = null; private final boolean featureFlagLogConnectorMsgs; - private MetricAttribute connectionAttribute = null; public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, final AirbyteStreamFactory streamFactory, @@ -83,7 +79,7 @@ public DefaultAirbyteSource(final IntegrationLauncher integrationLauncher, this.protocolSerializer = protocolSerializer; this.heartbeatMonitor = heartbeatMonitor; this.featureFlagLogConnectorMsgs = featureFlags.logConnectorMessages(); - this.metricClient = metricClient; + this.messageMetricsTracker = new MessageMetricsTracker(metricClient); } @Override @@ -91,7 +87,7 @@ public void start(final WorkerSourceConfig sourceConfig, final Path jobRoot, fin Preconditions.checkState(sourceProcess == null); if (connectionId != null) { - connectionAttribute = new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString()); + messageMetricsTracker.trackConnectionId(connectionId); } sourceProcess = integrationLauncher.read(jobRoot, @@ -152,18 +148,15 @@ public Optional attemptRead() { final Optional m = Optional.ofNullable(messageIterator.hasNext() ? messageIterator.next() : null); if (m.isPresent()) { - final MetricAttribute typeAttribute = new MetricAttribute(MetricTags.MESSAGE_TYPE, m.get().getType().toString()); - if (connectionAttribute != null) { - metricClient.count(OssMetricsRegistry.WORKER_SOURCE_MESSAGE_READ, 1, connectionAttribute, typeAttribute); - } else { - metricClient.count(OssMetricsRegistry.WORKER_SOURCE_MESSAGE_READ, 1, typeAttribute); - } + messageMetricsTracker.trackSourceRead(m.get().getType()); } return m; } @Override public void close() throws Exception { + emitSourceMessageReadCountMetric(); + if (sourceProcess == null) { LOGGER.debug("Source process already exited"); return; @@ -184,6 +177,8 @@ public void close() throws Exception { @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() throws Exception { + emitSourceMessageReadCountMetric(); + LOGGER.info("Attempting to cancel source process..."); if (sourceProcess == null) { @@ -195,6 +190,10 @@ public void cancel() throws Exception { } } + private void emitSourceMessageReadCountMetric() { + messageMetricsTracker.flushSourceReadCountMetric(); + } + private void logInitialStateAsJSON(final WorkerSourceConfig sourceConfig) { if (!featureFlagLogConnectorMsgs) { return; diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DestinationTimeoutMonitor.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DestinationTimeoutMonitor.java index b40eb47fff9..1d064aa07d4 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DestinationTimeoutMonitor.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DestinationTimeoutMonitor.java @@ -20,6 +20,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import org.apache.commons.lang3.time.DurationFormatUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,6 +47,8 @@ public class DestinationTimeoutMonitor implements AutoCloseable { private final AtomicReference currentAcceptCallStartTime = new AtomicReference<>(null); private final AtomicReference currentNotifyEndOfInputCallStartTime = new AtomicReference<>(null); + private final AtomicReference timeSinceLastAction = new AtomicReference<>(null); + private final UUID workspaceId; private ExecutorService lazyExecutorService; private final UUID connectionId; @@ -127,7 +130,7 @@ public void runWithTimeoutThread(final CompletableFuture runnableFuture) t LOGGER.info("thread status... timeout thread: {} , replication thread: {}", timeoutMonitorFuture.isDone(), runnableFuture.isDone()); if (timeoutMonitorFuture.isDone() && !runnableFuture.isDone()) { - onTimeout(runnableFuture); + onTimeout(runnableFuture, timeout.toMillis(), timeSinceLastAction.get()); } timeoutMonitorFuture.cancel(true); @@ -184,11 +187,11 @@ public void resetNotifyEndOfInputTimer() { currentNotifyEndOfInputCallStartTime.set(null); } - private void onTimeout(final CompletableFuture runnableFuture) { + private void onTimeout(final CompletableFuture runnableFuture, final long threshold, final long timeSinceLastAction) { if (throwExceptionOnTimeout) { runnableFuture.cancel(true); - throw new TimeoutException("Destination has timed out"); + throw new TimeoutException(threshold, timeSinceLastAction); } else { LOGGER.info("Destination has timed out but exception is not thrown due to feature " + "flag being disabled for workspace {} and connection {}", workspaceId, connectionId); @@ -228,10 +231,12 @@ private boolean hasTimedOutOnAccept() { // by the time we get here, currentAcceptCallStartTime might have already been reset. // this won't be a problem since we are not getting the start time from currentAcceptCallStartTime // but from startTime - if (System.currentTimeMillis() - startTime > timeout.toMillis()) { + final var timeSince = System.currentTimeMillis() - startTime; + if (timeSince > timeout.toMillis()) { LOGGER.error("Destination has timed out on accept call"); metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_ACCEPT_TIMEOUT, 1, new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); + timeSinceLastAction.set(timeSince); return true; } } @@ -245,10 +250,12 @@ private boolean hasTimedOutOnNotifyEndOfInput() { // by the time we get here, currentNotifyEndOfInputCallStartTime might have already been reset. // this won't be a problem since we are not getting the start time from // currentNotifyEndOfInputCallStartTime but from startTime - if (System.currentTimeMillis() - startTime > timeout.toMillis()) { + final var timeSince = System.currentTimeMillis() - startTime; + if (timeSince > timeout.toMillis()) { LOGGER.error("Destination has timed out on notifyEndOfInput call"); metricClient.count(OssMetricsRegistry.WORKER_DESTINATION_NOTIFY_END_OF_INPUT_TIMEOUT, 1, new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); + timeSinceLastAction.set(timeSince); return true; } } @@ -270,8 +277,15 @@ public void close() throws Exception { public static class TimeoutException extends RuntimeException { - public TimeoutException(final String message) { - super(message); + public final String humanReadableThreshold; + public final String humanReadableTimeSinceLastAction; + + public TimeoutException(final long thresholdMs, final long timeSinceLastActionMs) { + super(String.format("Last action %s ago, exceeding the threshold of %s.", + DurationFormatUtils.formatDurationWords(timeSinceLastActionMs, true, true), + DurationFormatUtils.formatDurationWords(thresholdMs, true, true))); + this.humanReadableThreshold = DurationFormatUtils.formatDurationWords(thresholdMs, true, true); + this.humanReadableTimeSinceLastAction = DurationFormatUtils.formatDurationWords(timeSinceLastActionMs, true, true); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatTimeoutChaperone.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatTimeoutChaperone.java index 1bb36342c9f..b205c234cb5 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatTimeoutChaperone.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatTimeoutChaperone.java @@ -100,7 +100,8 @@ public HeartbeatTimeoutChaperone(final HeartbeatMonitor heartbeatMonitor, * @throws ExecutionException - throw is the runnable throw an exception */ public void runWithHeartbeatThread(final CompletableFuture runnableFuture) throws ExecutionException { - LOGGER.info("Starting source heartbeat check. Will check every {} minutes.", timeoutCheckDuration.toMinutes()); + LOGGER.info("Starting source heartbeat check. Will check threshold of {} seconds, every {} minutes.", + heartbeatMonitor.getHeartbeatFreshnessThreshold().toSeconds(), timeoutCheckDuration.toMinutes()); final CompletableFuture heartbeatFuture = CompletableFuture.runAsync(customMonitor.orElse(this::monitor), getLazyExecutorService()); try { @@ -127,9 +128,9 @@ public void runWithHeartbeatThread(final CompletableFuture runnableFuture) new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString()), new MetricAttribute(MetricTags.KILLED, "true"), new MetricAttribute(MetricTags.SOURCE_IMAGE, sourceDockerImage)); - final var threshold = heartbeatMonitor.getHeartbeatFreshnessThreshold().getSeconds(); - final var timeBetweenLastRecord = heartbeatMonitor.getTimeSinceLastBeat().orElse(Duration.ZERO).getSeconds(); - throw new HeartbeatTimeoutException(threshold, timeBetweenLastRecord); + final var thresholdMs = heartbeatMonitor.getHeartbeatFreshnessThreshold().toMillis(); + final var timeBetweenLastRecordMs = heartbeatMonitor.getTimeSinceLastBeat().orElse(Duration.ZERO).toMillis(); + throw new HeartbeatTimeoutException(thresholdMs, timeBetweenLastRecordMs); } else { LOGGER.info("Do not terminate as feature flag is disable"); metricClient.count(OssMetricsRegistry.SOURCE_HEARTBEAT_FAILURE, 1, @@ -188,12 +189,12 @@ public static class HeartbeatTimeoutException extends RuntimeException { public final String humanReadableThreshold; public final String humanReadableTimeSinceLastRec; - public HeartbeatTimeoutException(final long threshold, final long timeBetweenLastRecord) { - super(String.format("Last record saw %s ago, exceeding the threshold of %s.", - DurationFormatUtils.formatDurationWords(timeBetweenLastRecord, true, true), - DurationFormatUtils.formatDurationWords(threshold, true, true))); - this.humanReadableThreshold = DurationFormatUtils.formatDurationWords(threshold, true, true); - this.humanReadableTimeSinceLastRec = DurationFormatUtils.formatDurationWords(threshold, true, true); + public HeartbeatTimeoutException(final long thresholdMs, final long timeBetweenLastRecordMs) { + super(String.format("Last record seen %s ago, exceeding the threshold of %s.", + DurationFormatUtils.formatDurationWords(timeBetweenLastRecordMs, true, true), + DurationFormatUtils.formatDurationWords(thresholdMs, true, true))); + this.humanReadableThreshold = DurationFormatUtils.formatDurationWords(thresholdMs, true, true); + this.humanReadableTimeSinceLastRec = DurationFormatUtils.formatDurationWords(timeBetweenLastRecordMs, true, true); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageMetricsTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageMetricsTracker.java new file mode 100644 index 00000000000..95fb15976a6 --- /dev/null +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageMetricsTracker.java @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.internal; + +import io.airbyte.metrics.lib.MetricAttribute; +import io.airbyte.metrics.lib.MetricClient; +import io.airbyte.metrics.lib.MetricTags; +import io.airbyte.metrics.lib.OssMetricsRegistry; +import io.airbyte.protocol.models.AirbyteMessage.Type; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Helper to emit metrics around messages exchanged with connectors. + */ +public class MessageMetricsTracker { + + private static final MetricAttribute RECORD_ATTRIBUTE = new MetricAttribute(MetricTags.MESSAGE_TYPE, Type.RECORD.toString()); + private static final MetricAttribute STATE_ATTRIBUTE = new MetricAttribute(MetricTags.MESSAGE_TYPE, Type.STATE.toString()); + + private final MetricClient metricClient; + private final AtomicLong destRecordReadCount = new AtomicLong(); + private final AtomicLong destStateReadCount = new AtomicLong(); + private final AtomicLong destRecordSentCount = new AtomicLong(); + private final AtomicLong destStateSentCount = new AtomicLong(); + private final AtomicLong sourceRecordReadCount = new AtomicLong(); + private final AtomicLong sourceStateReadCount = new AtomicLong(); + private MetricAttribute connectionAttribute = null; + + public MessageMetricsTracker(final MetricClient metricClient) { + this.metricClient = metricClient; + } + + public void trackConnectionId(final UUID connectionId) { + connectionAttribute = new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString()); + } + + public void trackDestRead(final Type type) { + if (type == Type.RECORD) { + destRecordReadCount.incrementAndGet(); + } else if (type == Type.STATE) { + destStateReadCount.incrementAndGet(); + } + } + + public void trackDestSent(final Type type) { + if (type == Type.RECORD) { + destRecordSentCount.incrementAndGet(); + } else if (type == Type.STATE) { + destStateSentCount.incrementAndGet(); + } + } + + public void trackSourceRead(final Type type) { + if (type == Type.RECORD) { + sourceRecordReadCount.incrementAndGet(); + } else if (type == Type.STATE) { + sourceStateReadCount.incrementAndGet(); + } + } + + public void flushDestReadCountMetric() { + emitMetric(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_READ, destRecordReadCount, RECORD_ATTRIBUTE); + emitMetric(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_READ, destStateReadCount, STATE_ATTRIBUTE); + } + + public void flushDestSentCountMetric() { + emitMetric(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_SENT, destRecordSentCount, RECORD_ATTRIBUTE); + emitMetric(OssMetricsRegistry.WORKER_DESTINATION_MESSAGE_SENT, destStateSentCount, STATE_ATTRIBUTE); + } + + public void flushSourceReadCountMetric() { + emitMetric(OssMetricsRegistry.WORKER_SOURCE_MESSAGE_READ, sourceRecordReadCount, RECORD_ATTRIBUTE); + emitMetric(OssMetricsRegistry.WORKER_SOURCE_MESSAGE_READ, sourceStateReadCount, STATE_ATTRIBUTE); + } + + private void emitMetric(final OssMetricsRegistry metric, final AtomicLong value, final MetricAttribute typeAttribute) { + if (connectionAttribute != null) { + metricClient.count(metric, value.getAndSet(0), connectionAttribute, typeAttribute); + } else { + metricClient.count(metric, value.getAndSet(0), typeAttribute); + } + } + +} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java index 3ccd0ae1864..0883cdc1e13 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactory.java @@ -31,16 +31,12 @@ import io.airbyte.workers.helper.GsonPksExtractor; import java.io.BufferedReader; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.nio.charset.StandardCharsets; -import java.text.CharacterIterator; -import java.text.StringCharacterIterator; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.function.Predicate; import java.util.stream.Stream; -import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,16 +55,13 @@ @SuppressWarnings("PMD.MoreThanOneLogger") public class VersionedAirbyteStreamFactory implements AirbyteStreamFactory { - public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolean printLongRecordPks) {} + public record InvalidLineFailureConfiguration(boolean printLongRecordPks) {} - public static final String RECORD_TOO_LONG = "Record is too long, the size is: "; + private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(VersionedAirbyteStreamFactory.class); - private static final Logger LOGGER = LoggerFactory.getLogger(VersionedAirbyteStreamFactory.class); - private static final double MAX_SIZE_RATIO = 0.8; - private static final long DEFAULT_MEMORY_LIMIT = Runtime.getRuntime().maxMemory(); - private static final MdcScope.Builder DEFAULT_MDC_SCOPE = MdcScope.DEFAULT_BUILDER; + @VisibleForTesting + static final MdcScope.Builder DEFAULT_MDC_SCOPE = MdcScope.DEFAULT_BUILDER; - private static final Logger DEFAULT_LOGGER = LOGGER; private static final Version fallbackVersion = new Version("0.2.0"); // Buffer size to use when detecting the protocol version. @@ -78,15 +71,13 @@ public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolea private static final int BUFFER_READ_AHEAD_LIMIT = 2 * 1024 * 1024; // 2 megabytes private static final int MESSAGES_LOOK_AHEAD_FOR_DETECTION = 10; private static final String TYPE_FIELD_NAME = "type"; - private static final int MAXIMUM_CHARACTERS_ALLOWED = 5_000_000; + private static final int MAXIMUM_CHARACTERS_ALLOWED = 20_000_000; // BASIC PROCESSING FIELDS protected final Logger logger; - private final long maxMemory; private final Optional connectionId; private final MdcScope.Builder containerLogMdcBuilder; - private final Optional> exceptionClass; // VERSION RELATED FIELDS private final AirbyteMessageSerDeProvider serDeProvider; @@ -108,9 +99,9 @@ public record InvalidLineFailureConfiguration(boolean failTooLongRecords, boolea * @return a VersionedAirbyteStreamFactory that does not perform any migration. */ @VisibleForTesting - public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory(final boolean failTooLongRecords) { - return noMigrationVersionedAirbyteStreamFactory(LOGGER, MdcScope.DEFAULT_BUILDER, Optional.empty(), Runtime.getRuntime().maxMemory(), - new InvalidLineFailureConfiguration(failTooLongRecords, false), new GsonPksExtractor()); + public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory() { + return noMigrationVersionedAirbyteStreamFactory(DEFAULT_LOGGER, MdcScope.DEFAULT_BUILDER, + new InvalidLineFailureConfiguration(false), new GsonPksExtractor()); } /** @@ -121,8 +112,6 @@ public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFac @VisibleForTesting public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFactory(final Logger logger, final MdcScope.Builder mdcBuilder, - final Optional> clazz, - final long maxMemory, final InvalidLineFailureConfiguration conf, final GsonPksExtractor gsonPksExtractor) { final AirbyteMessageSerDeProvider provider = new AirbyteMessageSerDeProvider( @@ -138,8 +127,7 @@ public static VersionedAirbyteStreamFactory noMigrationVersionedAirbyteStreamFac new AirbyteProtocolVersionedMigratorFactory(airbyteMessageMigrator, configuredAirbyteCatalogMigrator); return new VersionedAirbyteStreamFactory<>(provider, fac, AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION, Optional.empty(), - Optional.empty(), logger, - mdcBuilder, clazz, maxMemory, conf, gsonPksExtractor); + Optional.empty(), logger, mdcBuilder, conf, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -148,11 +136,10 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Optional connectionId, final Optional configuredAirbyteCatalog, final MdcScope.Builder containerLogMdcBuilder, - final Optional> exceptionClass, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { - this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, LOGGER, containerLogMdcBuilder, exceptionClass, - Runtime.getRuntime().maxMemory(), invalidLineFailureConfiguration, gsonPksExtractor); + this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, containerLogMdcBuilder, + invalidLineFailureConfiguration, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -160,11 +147,10 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Version protocolVersion, final Optional connectionId, final Optional configuredAirbyteCatalog, - final Optional> exceptionClass, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { - this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, DEFAULT_MDC_SCOPE, exceptionClass, - DEFAULT_MEMORY_LIMIT, invalidLineFailureConfiguration, gsonPksExtractor); + this(serDeProvider, migratorFactory, protocolVersion, connectionId, configuredAirbyteCatalog, DEFAULT_LOGGER, DEFAULT_MDC_SCOPE, + invalidLineFailureConfiguration, gsonPksExtractor); } public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProvider, @@ -174,15 +160,11 @@ public VersionedAirbyteStreamFactory(final AirbyteMessageSerDeProvider serDeProv final Optional configuredAirbyteCatalog, final Logger logger, final MdcScope.Builder containerLogMdcBuilder, - final Optional> exceptionClass, - final long maxMemory, final InvalidLineFailureConfiguration invalidLineFailureConfiguration, final GsonPksExtractor gsonPksExtractor) { // TODO AirbyteProtocolPredicate needs to be updated to be protocol version aware this.logger = logger; this.containerLogMdcBuilder = containerLogMdcBuilder; - this.exceptionClass = exceptionClass; - this.maxMemory = maxMemory; this.gsonPksExtractor = gsonPksExtractor; Preconditions.checkNotNull(protocolVersion); @@ -238,20 +220,6 @@ private Stream addLineReadLogic(final BufferedReader bufferedRea .peek(str -> { final long messageSize = str.getBytes(StandardCharsets.UTF_8).length; metricClient.distribution(OssMetricsRegistry.JSON_STRING_LENGTH, messageSize); - - if (exceptionClass.isPresent()) { - if (messageSize > maxMemory * MAX_SIZE_RATIO) { - connectionId.ifPresent(id -> metricClient.count(OssMetricsRegistry.RECORD_SIZE_ERROR, 1, - new MetricAttribute(MetricTags.CONNECTION_ID, id.toString()))); - final String errorMessage = String.format( - "Airbyte has received a message at %s UTC which is larger than %s (size: %s). " - + "The sync has been failed to prevent running out of memory.", - DateTime.now(), - humanReadableByteCountSI(maxMemory), - humanReadableByteCountSI(messageSize)); - throwExceptionClass(errorMessage); - } - } }) .flatMap(this::toAirbyteMessage) .filter(this::filterLog); @@ -341,21 +309,6 @@ protected void internalLog(final AirbyteLogMessage logMessage) { } } - // Human-readable byte size from - // https://stackoverflow.com/questions/3758606/how-can-i-convert-byte-size-into-a-human-readable-format-in-java - @SuppressWarnings("PMD.AvoidReassigningParameters") - private String humanReadableByteCountSI(long bytes) { - if (-1000 < bytes && bytes < 1000) { - return bytes + " B"; - } - final CharacterIterator ci = new StringCharacterIterator("kMGTPE"); - while (bytes <= -999_950 || bytes >= 999_950) { - bytes /= 1000; - ci.next(); - } - return String.format("%.1f %cB", bytes / 1000.0, ci.current()); - } - /** * For every incoming message, *

    @@ -366,6 +319,8 @@ private String humanReadableByteCountSI(long bytes) { * 3. upgrade the message to the platform version, if needed. */ protected Stream toAirbyteMessage(final String line) { + logLargeRecordWarning(line); + Optional m = deserializer.deserializeExact(line); if (m.isPresent()) { @@ -379,10 +334,29 @@ protected Stream toAirbyteMessage(final String line) { return upgradeMessage(m.get()); } - handleCannotDeserialize(line); + logMalformedLogMessage(line); return m.stream(); } + private void logLargeRecordWarning(final String line) { + try (final MdcScope ignored = containerLogMdcBuilder.build()) { + if (line.length() >= MAXIMUM_CHARACTERS_ALLOWED) { + connectionId.ifPresentOrElse(c -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.LINE_SKIPPED_TOO_LONG, 1, + new MetricAttribute(MetricTags.CONNECTION_ID, c.toString())), + () -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.LINE_SKIPPED_TOO_LONG, 1)); + MetricClientFactory.getMetricClient().distribution(OssMetricsRegistry.TOO_LONG_LINES_DISTRIBUTION, line.length()); + if (invalidLineFailureConfiguration.printLongRecordPks) { + logger.warn("[LARGE RECORD] Risk of Destinations not being able to properly handle: " + line.length()); + configuredAirbyteCatalog.ifPresent( + airbyteCatalog -> logger + .warn("[LARGE RECORD] The primary keys of the long record are: " + gsonPksExtractor.extractPks(airbyteCatalog, line))); + } + } + } catch (final Exception e) { + throw e; + } + } + /** * If a line cannot be deserialized into an AirbyteMessage, either: *

    @@ -397,36 +371,22 @@ protected Stream toAirbyteMessage(final String line) { *

    * */ - private void handleCannotDeserialize(final String line) { + private void logMalformedLogMessage(final String line) { try (final MdcScope ignored = containerLogMdcBuilder.build()) { - if (line.length() >= MAXIMUM_CHARACTERS_ALLOWED) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.LINE_SKIPPED_TOO_LONG, 1); - MetricClientFactory.getMetricClient().distribution(OssMetricsRegistry.TOO_LONG_LINES_DISTRIBUTION, line.length()); - if (invalidLineFailureConfiguration.printLongRecordPks) { - LOGGER.error("[LARGE RECORD] A record is too long with size: " + line.length()); - configuredAirbyteCatalog.ifPresent( - airbyteCatalog -> LOGGER - .error("[LARGE RECORD] The primary keys of the long record are: " + gsonPksExtractor.extractPks(airbyteCatalog, line))); - } - if (invalidLineFailureConfiguration.failTooLongRecords) { - if (exceptionClass.isPresent()) { - throwExceptionClass("One record is too big and can't be processed, the sync will be failed"); - } else { - throw new IllegalStateException(RECORD_TOO_LONG + line.length()); - } - } - } - if (line.toLowerCase().replaceAll("\\s", "").contains("{\"type\":\"record\",\"record\":")) { // Connectors can sometimes log error messages from failing to parse an AirbyteRecordMessage. // Filter on record into debug to try and prevent such cases. Though this catches non-record // messages, this is ok as we rather be safe than sorry. + logger.warn("Could not parse the string received from source, it seems to be a record message"); connectionId.ifPresentOrElse(c -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.LINE_SKIPPED_WITH_RECORD, 1, new MetricAttribute(MetricTags.CONNECTION_ID, c.toString())), () -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.LINE_SKIPPED_WITH_RECORD, 1)); logger.debug(line); } else { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NON_AIRBYTE_MESSAGE_LOG_LINE, 1); + connectionId.ifPresentOrElse( + c -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NON_AIRBYTE_MESSAGE_LOG_LINE, 1, + new MetricAttribute(MetricTags.CONNECTION_ID, c.toString())), + () -> MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NON_AIRBYTE_MESSAGE_LOG_LINE, 1)); logger.info(line); } } catch (final Exception e) { @@ -434,15 +394,6 @@ private void handleCannotDeserialize(final String line) { } } - private void throwExceptionClass(final String message) { - try { - throw exceptionClass.get().getConstructor(String.class) - .newInstance(message); - } catch (final InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) { - throw new RuntimeException(ex); - } - } - protected Stream upgradeMessage(final AirbyteMessage msg) { try { final AirbyteMessage message = migrator.upgrade(msg, configuredAirbyteCatalog); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/StateWithId.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/StateWithId.java new file mode 100644 index 00000000000..f24d7f6e4e2 --- /dev/null +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/StateWithId.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.models; + +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import java.util.OptionalInt; +import java.util.concurrent.atomic.AtomicInteger; + +public class StateWithId { + + private static final String ID = "id"; + + public static AirbyteMessage attachIdToStateMessageFromSource(final AirbyteMessage message) { + if (message.getType() == AirbyteMessage.Type.STATE) { + message.getState().setAdditionalProperty(ID, StateIdProvider.getNextId()); + } + return message; + } + + public static OptionalInt getIdFromStateMessage(final AirbyteMessage message) { + if (message.getType() == AirbyteMessage.Type.STATE) { + return OptionalInt.of(getIdFromStateMessage(message.getState())); + } + return OptionalInt.empty(); + } + + public static int getIdFromStateMessage(final AirbyteStateMessage message) { + return (int) message.getAdditionalProperties().get(ID); + } + + private static class StateIdProvider { + + private static final AtomicInteger id = new AtomicInteger(0); + + public static int getNextId() { + return id.incrementAndGet(); + } + + } + +} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java index e4e36e708a1..a0c344f464b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncherFactory.java @@ -11,7 +11,6 @@ import io.airbyte.commons.protocol.VersionedProtocolSerializer; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.featureflag.Connection; -import io.airbyte.featureflag.FailSyncIfTooBig; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Multi; import io.airbyte.featureflag.PrintLongRecordPks; @@ -29,8 +28,6 @@ import io.airbyte.workers.internal.HeartbeatMonitor; import io.airbyte.workers.internal.VersionedAirbyteMessageBufferedWriterFactory; import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; -import io.airbyte.workers.internal.exception.DestinationException; -import io.airbyte.workers.internal.exception.SourceException; import jakarta.inject.Singleton; import java.util.Collections; import java.util.List; @@ -107,21 +104,14 @@ public AirbyteSource createAirbyteSource(final IntegrationLauncherConfig sourceL final HeartbeatMonitor heartbeatMonitor) { final IntegrationLauncher sourceLauncher = createIntegrationLauncher(sourceLauncherConfig, syncResourceRequirements); - final boolean failTooLongRecords = featureFlagClient.boolVariation(FailSyncIfTooBig.INSTANCE, - new Multi(List.of( - new Connection(sourceLauncherConfig.getConnectionId()), - new Workspace(sourceLauncherConfig.getWorkspaceId())))); - final boolean printLongRecordPks = featureFlagClient.boolVariation(PrintLongRecordPks.INSTANCE, new Multi(List.of( new Connection(sourceLauncherConfig.getConnectionId()), new Workspace(sourceLauncherConfig.getWorkspaceId())))); return new DefaultAirbyteSource(sourceLauncher, - getStreamFactory(sourceLauncherConfig, configuredAirbyteCatalog, SourceException.class, DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration( - failTooLongRecords, - printLongRecordPks)), + getStreamFactory(sourceLauncherConfig, configuredAirbyteCatalog, DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER, + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(printLongRecordPks)), heartbeatMonitor, getProtocolSerializer(sourceLauncherConfig), featureFlags, @@ -144,9 +134,8 @@ public AirbyteDestination createAirbyteDestination(final IntegrationLauncherConf return new DefaultAirbyteDestination(destinationLauncher, getStreamFactory(destinationLauncherConfig, configuredAirbyteCatalog, - DestinationException.class, DefaultAirbyteDestination.CONTAINER_LOG_MDC_BUILDER, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false)), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false)), new VersionedAirbyteMessageBufferedWriterFactory(serDeProvider, migratorFactory, destinationLauncherConfig.getProtocolVersion(), Optional.of(configuredAirbyteCatalog)), getProtocolSerializer(destinationLauncherConfig), @@ -160,11 +149,10 @@ private VersionedProtocolSerializer getProtocolSerializer(final IntegrationLaunc private AirbyteStreamFactory getStreamFactory(final IntegrationLauncherConfig launcherConfig, final ConfiguredAirbyteCatalog configuredAirbyteCatalog, - final Class exceptionClass, final MdcScope.Builder mdcScopeBuilder, final VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration invalidLineFailureConfiguration) { return new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, launcherConfig.getProtocolVersion(), - Optional.of(launcherConfig.getConnectionId()), Optional.of(configuredAirbyteCatalog), mdcScopeBuilder, Optional.of(exceptionClass), + Optional.of(launcherConfig.getConnectionId()), Optional.of(configuredAirbyteCatalog), mdcScopeBuilder, invalidLineFailureConfiguration, gsonPksExtractor); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java index e35ba77b1ef..42e9ce29960 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java @@ -4,6 +4,8 @@ package io.airbyte.workers.process; +import static io.airbyte.commons.workers.config.WorkerConfigs.DEFAULT_JOB_KUBE_BUSYBOX_IMAGE; + import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.config.ResourceRequirements; @@ -15,13 +17,18 @@ import io.airbyte.workers.storage.StorageClient; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.exception.DocStoreAccessException; +import io.fabric8.kubernetes.api.model.CapabilitiesBuilder; import io.fabric8.kubernetes.api.model.ContainerBuilder; import io.fabric8.kubernetes.api.model.ContainerPort; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.EnvVar; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodBuilder; +import io.fabric8.kubernetes.api.model.PodSecurityContextBuilder; +import io.fabric8.kubernetes.api.model.SeccompProfileBuilder; import io.fabric8.kubernetes.api.model.SecretVolumeSourceBuilder; +import io.fabric8.kubernetes.api.model.SecurityContext; +import io.fabric8.kubernetes.api.model.SecurityContextBuilder; import io.fabric8.kubernetes.api.model.StatusDetails; import io.fabric8.kubernetes.api.model.Toleration; import io.fabric8.kubernetes.api.model.TolerationBuilder; @@ -44,6 +51,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -488,9 +496,11 @@ public void create(final Map allLabels, final List containerPorts = KubePodProcess.createContainerPortList(portMap); containerPorts.add(new ContainerPort(serverPort, null, null, null, null)); + final String initImageName = resolveInitContainerImageName(); + final var initContainer = new ContainerBuilder() .withName(KubePodProcess.INIT_CONTAINER_NAME) - .withImage("busybox:1.35") + .withImage(initImageName) .withVolumeMounts(volumeMounts) .withCommand(List.of( "sh", @@ -513,6 +523,7 @@ public void create(final Map allLabels, """, KubePodProcess.CONFIG_DIR, KubePodProcess.SUCCESS_FILE_NAME))) + .withSecurityContext(containerSecurityContext()) .build(); final var mainContainer = new ContainerBuilder() @@ -523,6 +534,7 @@ public void create(final Map allLabels, .withEnv(envVars) .withPorts(containerPorts) .withVolumeMounts(volumeMounts) + .withSecurityContext(containerSecurityContext()) .build(); final Pod podToCreate = new PodBuilder() @@ -543,6 +555,7 @@ public void create(final Map allLabels, .withVolumes(volumes) .withNodeSelector(nodeSelectors) .withTolerations(buildPodTolerations(tolerations)) + .withSecurityContext(new PodSecurityContextBuilder().withFsGroup(1000L).build()) .endSpec() .build(); @@ -582,6 +595,11 @@ public void create(final Map allLabels, copyFilesToKubeConfigVolumeMain(createdPod, updatedFileMap); } + private String resolveInitContainerImageName() { + final String initImageNameFromEnv = environmentVariables.get(io.airbyte.commons.envvar.EnvVar.JOB_KUBE_BUSYBOX_IMAGE.toString()); + return Objects.requireNonNullElse(initImageNameFromEnv, DEFAULT_JOB_KUBE_BUSYBOX_IMAGE); + } + private Toleration[] buildPodTolerations(final List tolerations) { if (tolerations == null || tolerations.isEmpty()) { return null; @@ -640,4 +658,24 @@ private void copyFilesToKubeConfigVolumeMain(final Pod podDefinition, final Map< } } + /** + * Returns a SecurityContext specific to containers. + * + * @return SecurityContext if ROOTLESS_WORKLOAD is enabled, null otherwise. + */ + private SecurityContext containerSecurityContext() { + if (Boolean.parseBoolean(io.airbyte.commons.envvar.EnvVar.ROOTLESS_WORKLOAD.fetch("false"))) { + return new SecurityContextBuilder() + .withAllowPrivilegeEscalation(false) + .withRunAsGroup(1000L) + .withRunAsUser(1000L) + .withReadOnlyRootFilesystem(false) + .withRunAsNonRoot(true) + .withCapabilities(new CapabilitiesBuilder().addAllToDrop(List.of("ALL")).build()) + .withSeccompProfile(new SeccompProfileBuilder().withType("RuntimeDefault").build()) + .build(); + } + return null; + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java index 89cb90dbd69..af9ee28707b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java @@ -21,6 +21,7 @@ import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.workers.helper.ConnectorApmSupportHelper; import io.airbyte.workers.models.SecretMetadata; +import io.fabric8.kubernetes.api.model.CapabilitiesBuilder; import io.fabric8.kubernetes.api.model.Container; import io.fabric8.kubernetes.api.model.ContainerBuilder; import io.fabric8.kubernetes.api.model.ContainerPort; @@ -32,9 +33,14 @@ import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.PodBuilder; import io.fabric8.kubernetes.api.model.PodFluent; +import io.fabric8.kubernetes.api.model.PodSecurityContext; +import io.fabric8.kubernetes.api.model.PodSecurityContextBuilder; import io.fabric8.kubernetes.api.model.Quantity; import io.fabric8.kubernetes.api.model.ResourceRequirementsBuilder; +import io.fabric8.kubernetes.api.model.SeccompProfileBuilder; import io.fabric8.kubernetes.api.model.SecretKeySelector; +import io.fabric8.kubernetes.api.model.SecurityContext; +import io.fabric8.kubernetes.api.model.SecurityContextBuilder; import io.fabric8.kubernetes.api.model.Toleration; import io.fabric8.kubernetes.api.model.TolerationBuilder; import io.fabric8.kubernetes.api.model.Volume; @@ -213,6 +219,7 @@ private static Container getInit(final boolean usesStdin, .withCommand("sh", "-c", initCommand) .withResources(getResourceRequirementsBuilder(resourceRequirements).build()) .withVolumeMounts(mainVolumeMounts) + .withSecurityContext(containerSecurityContext()) .build(); } @@ -286,7 +293,8 @@ private static Container getMain(final FeatureFlagClient featureFlagClient, .withCommand("sh", "-c", mainCommand) .withEnv(allEnvVars) .withWorkingDir(CONFIG_DIR) - .withVolumeMounts(mainVolumeMounts); + .withVolumeMounts(mainVolumeMounts) + .withSecurityContext(containerSecurityContext()); final ResourceRequirementsBuilder resourceRequirementsBuilder = getResourceRequirementsBuilder(resourceRequirements); if (resourceRequirementsBuilder != null) { @@ -521,6 +529,7 @@ public KubePodProcess(final String processRunnerHost, .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) .withResources(getResourceRequirementsBuilder(podResourceRequirements.stdIn()).build()) .withImagePullPolicy(sidecarImagePullPolicy) + .withSecurityContext(containerSecurityContext()) .build(); final Container relayStdout = new ContainerBuilder() @@ -530,6 +539,7 @@ public KubePodProcess(final String processRunnerHost, .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) .withResources(getResourceRequirementsBuilder(podResourceRequirements.stdOut()).build()) .withImagePullPolicy(sidecarImagePullPolicy) + .withSecurityContext(containerSecurityContext()) .build(); final Container relayStderr = new ContainerBuilder() @@ -539,6 +549,7 @@ public KubePodProcess(final String processRunnerHost, .withVolumeMounts(pipeVolumeMount, terminationVolumeMount) .withResources(getResourceRequirementsBuilder(podResourceRequirements.stdErr()).build()) .withImagePullPolicy(sidecarImagePullPolicy) + .withSecurityContext(containerSecurityContext()) .build(); final List socatContainers; @@ -594,6 +605,7 @@ public KubePodProcess(final String processRunnerHost, .withVolumeMounts(terminationVolumeMount) .withResources(getResourceRequirementsBuilder(podResourceRequirements.heartbeat()).build()) .withImagePullPolicy(sidecarImagePullPolicy) + .withSecurityContext(containerSecurityContext(100, 101)) // uid=100(curl_user) gid=101(curl_group) .build(); final List containers = Lists.concat(List.of(main, callHeartbeatServer), socatContainers); @@ -622,6 +634,7 @@ public KubePodProcess(final String processRunnerHost, .withInitContainers(init) .withContainers(containers) .withVolumes(pipeVolume, configVolume, terminationVolume, tmpVolume) + .withSecurityContext(podSecurityContext()) .endSpec() .build(); @@ -977,4 +990,50 @@ private static String prependPodInfo(final String message, final String podNames return String.format("(pod: %s / %s) - %s", podNamespace, podName, message); } + /** + * Returns a PodSecurityContext specific to containers. + * + * @return PodSecurityContext if ROOTLESS_WORKLOAD is enabled, null otherwise. + */ + private static PodSecurityContext podSecurityContext(final long user, final long group) { + if (Boolean.parseBoolean(io.airbyte.commons.envvar.EnvVar.ROOTLESS_WORKLOAD.fetch("false"))) { + return new PodSecurityContextBuilder() + .withRunAsUser(user) + .withRunAsGroup(group) + .withFsGroup(group) + .withRunAsNonRoot(true) + .withSeccompProfile(new SeccompProfileBuilder().withType("RuntimeDefault").build()) + .build(); + } + + return null; + } + + private static PodSecurityContext podSecurityContext() { + return podSecurityContext(1000, 1000); + } + + /** + * Returns a SecurityContext specific to containers. + * + * @return SecurityContext if ROOTLESS_WORKLOAD is enabled, null otherwise. + */ + private static SecurityContext containerSecurityContext(final long user, final long group) { + if (Boolean.parseBoolean(io.airbyte.commons.envvar.EnvVar.ROOTLESS_WORKLOAD.fetch("false"))) { + return new SecurityContextBuilder() + .withRunAsUser(user) + .withRunAsGroup(group) + .withAllowPrivilegeEscalation(false) + .withReadOnlyRootFilesystem(false) + .withCapabilities(new CapabilitiesBuilder().addAllToDrop(List.of("ALL")).build()) + .build(); + } + + return null; + } + + private static SecurityContext containerSecurityContext() { + return containerSecurityContext(1000, 1000); + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java index d0995cbb3ba..e20c0e6c568 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java @@ -7,6 +7,7 @@ import static io.airbyte.config.helpers.LogClientSingleton.fullLogPath; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.Geography; @@ -30,7 +31,6 @@ import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; import io.airbyte.workers.workload.exception.DocStoreAccessException; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.airbyte.workload.api.client.model.generated.Workload; import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest; import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest; @@ -48,7 +48,6 @@ import java.util.UUID; import java.util.concurrent.CancellationException; import java.util.function.Function; -import org.openapitools.client.infrastructure.ClientException; import org.openapitools.client.infrastructure.ServerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,7 +57,7 @@ */ public class WorkloadApiWorker implements Worker { - private static final int HTTP_CONFLICT_CODE = 409; + private static final int HTTP_CONFLICT_CODE = HttpStatus.CONFLICT.getCode(); private static final String DESTINATION = "destination"; private static final String SOURCE = "source"; @@ -66,7 +65,8 @@ public class WorkloadApiWorker implements Worker TERMINAL_STATUSES = Set.of(WorkloadStatus.CANCELLED, WorkloadStatus.FAILURE, WorkloadStatus.SUCCESS); private final JobOutputDocStore jobOutputDocStore; private final AirbyteApiClient apiClient; - private final WorkloadApi workloadApi; + private final WorkloadApiClient workloadApiClient; + private final WorkloadClient workloadClient; private final WorkloadIdGenerator workloadIdGenerator; private final ReplicationActivityInput input; private final FeatureFlagClient featureFlagClient; @@ -75,13 +75,15 @@ public class WorkloadApiWorker implements Worker output; output = fetchReplicationOutput(workloadId, (location) -> { @@ -228,29 +234,9 @@ private Context getFeatureFlagContext() { new Destination(input.getDestinationId()))); } - private void createWorkload(final WorkloadCreateRequest workloadCreateRequest) { - try { - workloadApi.workloadCreate(workloadCreateRequest); - } catch (final ClientException e) { - /* - * The Workload API returns a 304 response when the request to execute the workload has already been - * created. That response is handled in the form of a ClientException by the generated OpenAPI - * client. We do not want to cause the Temporal workflow to retry, so catch it and log the - * information so that the workflow will continue. - */ - if (e.getStatusCode() == HttpStatus.CONFLICT.getCode()) { - log.warn("Workload {} already created and in progress. Continuing...", workloadCreateRequest.getWorkloadId()); - } else { - throw new RuntimeException(e); - } - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - private Workload getWorkload(final String workloadId) { try { - return workloadApi.workloadGet(workloadId); + return workloadApiClient.getWorkloadApi().workloadGet(workloadId); } catch (final IOException e) { throw new RuntimeException(e); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java index d7cf84d7f78..fea45080944 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/AirbyteMessageUtils.java @@ -244,6 +244,22 @@ public static AirbyteMessage createStatusTraceMessage(final StreamDescriptor str .withTrace(airbyteTraceMessage); } + public static AirbyteMessage createStreamStatusTraceMessageWithType(final StreamDescriptor stream, + final AirbyteStreamStatusTraceMessage.AirbyteStreamStatus status) { + final AirbyteStreamStatusTraceMessage airbyteStreamStatusTraceMessage = new AirbyteStreamStatusTraceMessage() + .withStatus(status) + .withStreamDescriptor(stream); + + final AirbyteTraceMessage airbyteTraceMessage = new AirbyteTraceMessage() + .withEmittedAt(null) + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withStreamStatus(airbyteStreamStatusTraceMessage); + + return new AirbyteMessage() + .withType(Type.TRACE) + .withTrace(airbyteTraceMessage); + } + public static AirbyteMessage createAnalyticsTraceMessage(final String type, final String value) { final AirbyteAnalyticsTraceMessage airbyteAnalyticsTraceMessage = new AirbyteAnalyticsTraceMessage() .withType(type) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java index e3a8bbce704..9f1d8ccde55 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java @@ -62,9 +62,7 @@ public static ImmutablePair createSyncConfig(fi .withSourceId(replicationInput.getSourceId()) .withDestinationId(replicationInput.getDestinationId()) .withDestinationConfiguration(replicationInput.getDestinationConfiguration()) - .withCatalog(replicationInput.getCatalog()) .withSourceConfiguration(replicationInput.getSourceConfiguration()) - .withState(replicationInput.getState()) .withOperationSequence(replicationInput.getOperationSequence()) .withWorkspaceId(replicationInput.getWorkspaceId()) .withConnectionContext(new ConnectionContext().withOrganizationId(organizationId))); diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt new file mode 100644 index 00000000000..419505fb6cf --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/HelperBeanFactory.kt @@ -0,0 +1,13 @@ +package io.airbyte.workers.config + +import io.micronaut.context.annotation.Factory +import jakarta.inject.Singleton +import java.time.Clock + +@Factory +class HelperBeanFactory { + @Singleton + fun getClock(): Clock { + return Clock.systemUTC() + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt deleted file mode 100644 index 234bfc17481..00000000000 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/config/WorkloadApiClientFactory.kt +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.config - -import dev.failsafe.RetryPolicy -import io.airbyte.api.client.WorkloadApiClient -import io.airbyte.commons.auth.AuthenticationInterceptor -import io.airbyte.workload.api.client.generated.WorkloadApi -import io.github.oshai.kotlinlogging.KotlinLogging -import io.micrometer.core.instrument.MeterRegistry -import io.micronaut.context.annotation.Factory -import io.micronaut.context.annotation.Value -import jakarta.inject.Singleton -import okhttp3.HttpUrl -import okhttp3.OkHttpClient -import okhttp3.Response -import org.openapitools.client.infrastructure.ClientException -import org.openapitools.client.infrastructure.ServerException -import java.io.IOException -import java.time.Duration -import java.util.Optional - -private val logger = KotlinLogging.logger {} - -@Factory -class WorkloadApiClientFactory { - @Singleton - fun workloadApiClient( - @Value("\${airbyte.workload-api.base-path}") workloadApiBasePath: String, - @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.retries.delay-seconds}") retryDelaySeconds: Long, - @Value("\${airbyte.workload-api.retries.max}") maxRetries: Int, - authenticationInterceptor: AuthenticationInterceptor, - meterRegistry: Optional, - ): WorkloadApi { - val builder: OkHttpClient.Builder = OkHttpClient.Builder() - builder.addInterceptor(authenticationInterceptor) - builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) - builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) - - val okHttpClient: OkHttpClient = builder.build() - val metricTags = arrayOf("max-retries", maxRetries.toString()) - - val retryPolicy: RetryPolicy = - RetryPolicy.builder() - .handle( - listOf( - IllegalStateException::class.java, - IOException::class.java, - UnsupportedOperationException::class.java, - ClientException::class.java, - ServerException::class.java, - ), - ) - // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup - .onAbort { l -> - logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.abort", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onFailure { l -> - logger.error(l.exception) { "Failed to call $workloadApiBasePath. Last response: ${l.result}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.failure", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onRetry { l -> - logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retry", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "url", "method", l.lastResult.request.method), - *getUrlTags(l.lastResult.request.url), - ).increment() - } - } - .onRetriesExceeded { l -> - logger.error(l.exception) { "Retry attempts exceeded." } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retries_exceeded", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onSuccess { l -> - logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.success", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .withDelay(Duration.ofSeconds(retryDelaySeconds)) - .withMaxRetries(maxRetries) - .build() - - return WorkloadApiClient(workloadApiBasePath, retryPolicy, okHttpClient).workloadApi - } - - private fun getUrlTags(httpUrl: HttpUrl): Array { - val last = httpUrl.pathSegments.last() - if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { - return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) - } else { - return arrayOf("url", httpUrl.toString()) - } - } -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt similarity index 52% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java rename to airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt index e302c303869..c91afa685a6 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/context/ReplicationContext.java +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/context/ReplicationContext.kt @@ -1,10 +1,6 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.context; +package io.airbyte.workers.context -import java.util.UUID; +import java.util.UUID /** * Context of a Replication. @@ -20,13 +16,19 @@ * @param workspaceId The workspace ID associated with the sync. * @param sourceImage The name and version of the source image. * @param destinationImage The name and version of the destination image. + * @param sourceDefinitionId The source definition ID associated with the sync + * @param destinationDefinitionId The source definition ID associated with the sync */ -public record ReplicationContext(boolean isReset, - UUID connectionId, - UUID sourceId, - UUID destinationId, - Long jobId, - Integer attempt, - UUID workspaceId, - String sourceImage, - String destinationImage) {} +data class ReplicationContext( + val isReset: Boolean, + val connectionId: UUID, + val sourceId: UUID, + val destinationId: UUID, + val jobId: Long, + val attempt: Int, + val workspaceId: UUID, + val sourceImage: String, + val destinationImage: String, + val sourceDefinitionId: UUID, + val destinationDefinitionId: UUID, +) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt index 75c94c3e990..e0592c5f8f8 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt @@ -7,6 +7,11 @@ package io.airbyte.workers.general import com.fasterxml.jackson.core.JsonProcessingException import com.fasterxml.jackson.databind.ObjectMapper import com.google.common.annotations.VisibleForTesting +import io.airbyte.api.client.WorkloadApiClient +import io.airbyte.api.client.generated.DestinationApi +import io.airbyte.api.client.generated.SourceApi +import io.airbyte.api.client.model.generated.DestinationIdRequestBody +import io.airbyte.api.client.model.generated.SourceIdRequestBody import io.airbyte.api.client.model.generated.StreamStatusIncompleteRunCause import io.airbyte.commons.concurrency.VoidCallable import io.airbyte.commons.converters.ThreadedTimeTracker @@ -30,6 +35,7 @@ import io.airbyte.protocol.models.AirbyteMessage.Type import io.airbyte.protocol.models.AirbyteStateMessage import io.airbyte.protocol.models.AirbyteStateStats import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.airbyte.protocol.models.StreamDescriptor import io.airbyte.workers.WorkerUtils import io.airbyte.workers.context.ReplicationContext @@ -37,6 +43,7 @@ import io.airbyte.workers.context.ReplicationFeatureFlags import io.airbyte.workers.exception.WorkloadHeartbeatException import io.airbyte.workers.helper.AirbyteMessageDataExtractor import io.airbyte.workers.helper.FailureHelper +import io.airbyte.workers.helper.StreamStatusCompletionTracker import io.airbyte.workers.internal.AirbyteDestination import io.airbyte.workers.internal.AirbyteMapper import io.airbyte.workers.internal.AirbyteSource @@ -53,19 +60,20 @@ import io.airbyte.workers.internal.bookkeeping.getTotalStats import io.airbyte.workers.internal.exception.DestinationException import io.airbyte.workers.internal.exception.SourceException import io.airbyte.workers.internal.syncpersistence.SyncPersistence -import io.airbyte.workload.api.client.generated.WorkloadApi +import io.airbyte.workers.models.StateWithId.attachIdToStateMessageFromSource import io.airbyte.workload.api.client.model.generated.WorkloadHeartbeatRequest import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.http.HttpStatus import org.apache.commons.io.FileUtils -import org.openapitools.client.infrastructure.ClientException import org.slf4j.MDC import java.nio.file.Path import java.time.Duration import java.time.Instant import java.util.Collections import java.util.Optional +import java.util.UUID import java.util.concurrent.atomic.AtomicBoolean +import io.airbyte.workload.api.client.generated.infrastructure.ClientException as GeneratedClientException private val logger = KotlinLogging.logger { } @@ -78,10 +86,13 @@ class ReplicationWorkerHelper( private val replicationAirbyteMessageEventPublishingHelper: ReplicationAirbyteMessageEventPublishingHelper, private val timeTracker: ThreadedTimeTracker, private val onReplicationRunning: VoidCallable, - private val workloadApi: WorkloadApi, + private val workloadApiClient: WorkloadApiClient, private val workloadEnabled: Boolean, private val analyticsMessageTracker: AnalyticsMessageTracker, private val workloadId: Optional, + private val sourceApi: SourceApi, + private val destinationApi: DestinationApi, + private val streamStatusCompletionTracker: StreamStatusCompletionTracker, ) { private val metricClient = MetricClientFactory.getMetricClient() private val metricAttrs: MutableList = mutableListOf() @@ -135,7 +146,7 @@ class ReplicationWorkerHelper( throw RuntimeException("workloadId should always be present") } logger.info { "Sending workload heartbeat" } - workloadApi.workloadHeartbeat( + workloadApiClient.workloadApi.workloadHeartbeat( WorkloadHeartbeatRequest(workloadId.get()), ) lastSuccessfulHeartbeat = Instant.now() @@ -145,13 +156,15 @@ class ReplicationWorkerHelper( * Workload should stop because it is no longer expected to be running. * See [io.airbyte.workload.api.WorkloadApi.workloadHeartbeat] */ - if (e is ClientException && e.statusCode == HttpStatus.GONE.code) { - logger.warn(e) { "Received kill response from API, shutting down heartbeat" } + if (e is GeneratedClientException && e.statusCode == HttpStatus.GONE.code) { + metricClient.count(OssMetricsRegistry.HEARTBEAT_TERMINAL_SHUTDOWN, 1, *metricAttrs.toTypedArray()) markCancelled() return@Runnable } else if (Duration.between(lastSuccessfulHeartbeat, Instant.now()) > heartbeatTimeoutDuration) { logger.warn(e) { "Have not been able to update heartbeat for more than the timeout duration, shutting down heartbeat" } + metricClient.count(OssMetricsRegistry.HEARTBEAT_CONNECTIVITY_FAILURE_SHUTDOWN, 1, *metricAttrs.toTypedArray()) markFailed() + abort() trackFailure(WorkloadHeartbeatException("Workload Heartbeat Error", e)) return@Runnable } @@ -166,6 +179,7 @@ class ReplicationWorkerHelper( ctx: ReplicationContext, replicationFeatureFlags: ReplicationFeatureFlags, jobRoot: Path, + configuredAirbyteCatalog: ConfiguredAirbyteCatalog, ) { timeTracker.trackReplicationStartTime() @@ -180,6 +194,7 @@ class ReplicationWorkerHelper( } ApmTraceUtils.addTagsToTrace(ctx.connectionId, ctx.attempt.toLong(), ctx.jobId.toString(), jobRoot) + streamStatusCompletionTracker.startTracking(configuredAirbyteCatalog, ctx) } fun startDestination( @@ -285,6 +300,10 @@ class ReplicationWorkerHelper( internalProcessMessageFromDestination(message) } + fun getStreamStatusToSend(exitValue: Int): List { + return streamStatusCompletionTracker.finalize(exitValue, mapper) + } + @JvmOverloads @Throws(JsonProcessingException::class) fun getReplicationOutput(performanceMetrics: PerformanceMetrics? = null): ReplicationOutput { @@ -417,15 +436,24 @@ class ReplicationWorkerHelper( // internally we always want to deal with the state message we got from the // source, so we only modify the state message after processing it, right before we send it to the // destination - return internalProcessMessageFromSource(sourceRawMessage) + return attachIdToStateMessageFromSource(sourceRawMessage) + .let { internalProcessMessageFromSource(it) } .let { mapper.mapMessage(it) } - .let { Optional.of(it) } + .let { Optional.ofNullable(it) } } fun isWorkerV2TestEnabled(): Boolean { return workloadEnabled } + fun getSourceDefinitionIdForSourceId(sourceId: UUID): UUID { + return sourceApi.getSource(SourceIdRequestBody().sourceId(sourceId)).sourceDefinitionId + } + + fun getDestinationDefinitionIdForDestinationId(destinationId: UUID): UUID { + return destinationApi.getDestination(DestinationIdRequestBody().destinationId(destinationId)).destinationDefinitionId + } + private fun getTotalStats( timeTracker: ThreadedTimeTracker, hasReplicationCompleted: Boolean, @@ -448,8 +476,7 @@ class ReplicationWorkerHelper( val failures = mutableListOf() // only .setFailures() if a failure occurred or if there is an AirbyteErrorTraceMessage - messageTracker.errorTraceMessageFailure(context.jobId, context.attempt) - ?.let { failures.add(it) } + failures.addAll(messageTracker.errorTraceMessageFailure(context.jobId, context.attempt)) failures.addAll(replicationFailures) @@ -479,7 +506,14 @@ class ReplicationWorkerHelper( ex.humanReadableThreshold, ex.humanReadableTimeSinceLastRec, ) - is DestinationTimeoutMonitor.TimeoutException -> FailureHelper.destinationTimeoutFailure(ex, jobId, attempt) + is DestinationTimeoutMonitor.TimeoutException -> + FailureHelper.destinationTimeoutFailure( + ex, + jobId, + attempt, + ex.humanReadableThreshold, + ex.humanReadableTimeSinceLastAction, + ) is WorkloadHeartbeatException -> FailureHelper.platformFailure(ex, jobId, attempt, ex.message) else -> FailureHelper.replicationFailure(ex, jobId, attempt) } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt new file mode 100644 index 00000000000..9ab99be332f --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt @@ -0,0 +1,97 @@ +package io.airbyte.workers.helper + +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.DestinationDefinition +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.SourceDefinition +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.workers.context.ReplicationContext +import io.airbyte.workers.exception.WorkerException +import io.airbyte.workers.internal.AirbyteMapper +import jakarta.inject.Singleton +import java.time.Clock + +@Singleton +class StreamStatusCompletionTracker( + private val featureFlagClient: FeatureFlagClient, + private val clock: Clock, +) { + private val hasCompletedStatus = mutableMapOf() + private var shouldEmitStreamStatus = false + + open fun startTracking( + configuredAirbyteCatalog: ConfiguredAirbyteCatalog, + replicationContext: ReplicationContext, + ) { + shouldEmitStreamStatus = + featureFlagClient.boolVariation( + ActivateRefreshes, + Multi( + listOf( + Workspace(replicationContext.workspaceId), + Connection(replicationContext.connectionId), + SourceDefinition(replicationContext.sourceDefinitionId), + DestinationDefinition(replicationContext.destinationDefinitionId), + ), + ), + ) + + if (shouldEmitStreamStatus) { + configuredAirbyteCatalog.streams.forEach { stream -> + hasCompletedStatus[StreamDescriptor().withName(stream.stream.name).withNamespace(stream.stream.namespace)] = false + } + } + } + + open fun track(streamStatus: AirbyteStreamStatusTraceMessage) { + if (shouldEmitStreamStatus && streamStatus.status == AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) { + hasCompletedStatus[streamStatus.streamDescriptor] ?: run { + throw WorkerException("A stream status has been detected for a stream not present in the catalog") + } + hasCompletedStatus[streamStatus.streamDescriptor] = true + } + } + + open fun finalize( + exitCode: Int, + namespacingMapper: AirbyteMapper, + ): List { + if (!shouldEmitStreamStatus) { + return listOf() + } + return if (0 == exitCode) { + streamDescriptorsToCompleteStatusMessage(hasCompletedStatus.keys, namespacingMapper) + } else { + streamDescriptorsToCompleteStatusMessage(hasCompletedStatus.filter { it.value }.keys, namespacingMapper) + } + } + + private fun streamDescriptorsToCompleteStatusMessage( + streamDescriptors: Set, + namespacingMapper: AirbyteMapper, + ): List { + return streamDescriptors.map { + namespacingMapper.mapMessage( + AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withEmittedAt(clock.millis().toDouble()) + .withStreamStatus( + AirbyteStreamStatusTraceMessage() + .withStatus(AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) + .withStreamDescriptor(it), + ), + ), + ) + } + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/AnalyticsMessageTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/AnalyticsMessageTracker.kt index a256b9ed5a0..f48abba9746 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/AnalyticsMessageTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/AnalyticsMessageTracker.kt @@ -45,7 +45,7 @@ class AnalyticsMessageTracker(private val trackingClient: TrackingClient) { } } - private fun generateAnalyticsMetadata(currentMessages: List): Map? { + private fun generateAnalyticsMetadata(currentMessages: List): Map { val context = requireNotNull(ctx) val jsonList: ArrayNode = Jsons.arrayNode() jsonList.addAll(currentMessages) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt index d01d5fc542e..14849a378f6 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/Mapper.kt @@ -6,6 +6,7 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType import io.airbyte.protocol.models.AirbyteMessage import io.airbyte.protocol.models.AirbyteMessage.Type import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType +import io.airbyte.protocol.models.AirbyteTraceMessage import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.github.oshai.kotlinlogging.KotlinLogging @@ -89,6 +90,15 @@ class NamespacingMapper streamDescriptor.namespace = destinationNamespace streamDescriptor.name = destinationStreamName } + Type.TRACE -> + with(message.trace) { + if (this.type != AirbyteTraceMessage.Type.STREAM_STATUS) { + return@with + } + val streamDescriptor = this.streamStatus.streamDescriptor + streamDescriptor.name = transformStreamName(streamDescriptor.name, streamPrefix) + streamDescriptor.namespace = transformNamespace(streamDescriptor.namespace) + } else -> Unit } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTracker.kt index de60459c143..d55ef490de5 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTracker.kt @@ -10,6 +10,7 @@ import io.airbyte.workers.helper.FailureHelper import io.airbyte.workers.internal.stateaggregator.DefaultStateAggregator import io.airbyte.workers.internal.stateaggregator.StateAggregator import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.ArrayList private val logger = KotlinLogging.logger {} @@ -72,17 +73,12 @@ class AirbyteMessageTracker( fun errorTraceMessageFailure( jobId: Long, attempt: Int, - ): FailureReason? { - val srcMsg = srcErrorTraceMsgs.firstOrNull() - val dstMsg = dstErrorTraceMsgs.firstOrNull() - - return when { - srcMsg == null && dstMsg == null -> null - srcMsg != null && dstMsg == null -> FailureHelper.sourceFailure(srcMsg, jobId, attempt) - srcMsg == null && dstMsg != null -> FailureHelper.destinationFailure(dstMsg, jobId, attempt) - srcMsg != null && dstMsg != null && srcMsg.emittedAt <= dstMsg.emittedAt -> FailureHelper.sourceFailure(srcMsg, jobId, attempt) - else -> FailureHelper.destinationFailure(dstMsg, jobId, attempt) - } + ): List { + val allErrors = + srcErrorTraceMsgs.map { + FailureHelper.sourceFailure(it, jobId, attempt) + } + dstErrorTraceMsgs.map { FailureHelper.destinationFailure(it, jobId, attempt) } + return allErrors.sortedBy { it.getTimestamp() } } /** diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt index e1500b36efb..40873cf68dd 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt @@ -7,6 +7,7 @@ import io.airbyte.config.SyncStats import io.airbyte.featureflag.Connection import io.airbyte.featureflag.EmitStateStatsToSegment import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.LogsForStripeChecksumDebugging import io.airbyte.featureflag.Multi import io.airbyte.featureflag.Workspace import io.airbyte.metrics.lib.MetricAttribute @@ -57,6 +58,11 @@ class ParallelStreamStatsTracker( featureFlagClient.boolVariation(EmitStateStatsToSegment, connectionContext) } + private val logsForStripeChecksumDebugging: Boolean by lazy { + val connectionContext = Multi(listOf(Connection(connectionId), Workspace(workspaceId))) + featureFlagClient.boolVariation(LogsForStripeChecksumDebugging, connectionContext) + } + @Volatile private var hasEstimatesErrors = false @@ -118,7 +124,11 @@ class ParallelStreamStatsTracker( else -> { val statsTracker = getOrCreateStreamStatsTracker(getNameNamespacePair(stateMessage)) statsTracker.trackStateFromSource(stateMessage) - updateChecksumValidationStatus(statsTracker.areStreamStatsReliable(), AirbyteMessageOrigin.SOURCE, getNameNamespacePair(stateMessage)) + updateChecksumValidationStatus( + statsTracker.areStreamStatsReliable(), + AirbyteMessageOrigin.SOURCE, + getNameNamespacePair(stateMessage), + ) validateStateChecksum( stateMessage, statsTracker.getTrackedEmittedRecordsSinceLastStateMessage().toDouble(), @@ -228,6 +238,7 @@ class ParallelStreamStatsTracker( val errorMessage = "${origin.name.lowercase().replaceFirstChar { it.uppercase() }} state message checksum is invalid: " + "state source record count $sourceRecordCount does not equal state destination record count $destinationRecordCount" + + ". Please note that the destination count matches the platform count" + if (includeStreamInLogs) " for stream ${getNameNamespacePair(stateMessage)}." else "." logger.error { errorMessage } emitChecksumMetrics(CHECKSUM_PLATFORM_DESTINATION_MISMATCH) @@ -242,6 +253,11 @@ class ParallelStreamStatsTracker( if (includeStreamInLogs) " for stream ${getNameNamespacePair(stateMessage)}." else "." } } + } else { + logger.info { + "Source state count is not available for comparison with destination count, " + + "but destination count matches the platform count." + } } } else { logger.info { @@ -307,15 +323,19 @@ class ParallelStreamStatsTracker( if (!shouldEmitStateStatsToSegment(stateMessage)) { return } - val payload: MutableMap = HashMap() - payload["connection_id"] = connectionId.toString() - payload["job_id"] = jobId.toString() - payload["attempt_number"] = attemptNumber.toString() - payload["state_origin"] = stateOrigin - payload["record_count"] = recordCount.toString() - payload["valid_data"] = checksumValidationEnabled.toString() - payload["state_type"] = stateMessage.type.toString() - payload["state_hash"] = stateMessage.getStateHashCode(Hashing.murmur3_32_fixed()).toString() + val payload: MutableMap = + mutableMapOf( + "connection_id" to connectionId.toString(), + "job_id" to jobId.toString(), + "attempt_number" to attemptNumber.toString(), + "state_origin" to stateOrigin, + "record_count" to recordCount.toString(), + "valid_data" to checksumValidationEnabled.toString(), + "state_type" to stateMessage.type.toString(), + "state_hash" to stateMessage.getStateHashCode(Hashing.murmur3_32_fixed()).toString(), + "state_id" to stateMessage.getStateIdForStatsTracking().toString(), + ) + if (stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM) { val nameNamespacePair = getNameNamespacePair(stateMessage) if (nameNamespacePair.namespace != null) { @@ -538,6 +558,7 @@ class ParallelStreamStatsTracker( return StreamStatsTracker( nameNamespacePair = pair, metricClient = metricClient, + logsForStripeChecksumDebugging = logsForStripeChecksumDebugging, ).also { streamTrackers[pair] = it } } } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt index 823a698b547..3f09f6d3bbc 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/StatsTracker.kt @@ -1,7 +1,6 @@ package io.airbyte.workers.internal.bookkeeping import com.google.common.hash.HashFunction -import com.google.common.hash.Hashing import com.google.common.util.concurrent.AtomicDouble import io.airbyte.commons.json.Jsons import io.airbyte.metrics.lib.MetricClient @@ -10,6 +9,7 @@ import io.airbyte.protocol.models.AirbyteEstimateTraceMessage import io.airbyte.protocol.models.AirbyteRecordMessage import io.airbyte.protocol.models.AirbyteStateMessage import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.workers.models.StateWithId import io.github.oshai.kotlinlogging.KotlinLogging import java.time.LocalDateTime import java.time.temporal.ChronoUnit @@ -72,7 +72,7 @@ data class EmittedStatsCounters( * state. */ private data class StagedStats( - val stateHash: Int, + val stateId: Int, val stateMessage: AirbyteStateMessage, val emittedStatsCounters: EmittedStatsCounters, val receivedTime: LocalDateTime, @@ -94,14 +94,15 @@ private val logger = KotlinLogging.logger { } class StreamStatsTracker( val nameNamespacePair: AirbyteStreamNameNamespacePair, private val metricClient: MetricClient, + private val logsForStripeChecksumDebugging: Boolean, ) { val streamStats = StreamStatsCounters() - private val hashFunction = Hashing.murmur3_32_fixed() - private val stateHashes = ConcurrentHashMap.newKeySet() + private val stateIds = ConcurrentHashMap.newKeySet() private val stagedStatsList = ConcurrentLinkedQueue() private var emittedStats = EmittedStatsCounters() private var previousEmittedStats = EmittedStatsCounters() private var previousStateMessageReceivedAt: LocalDateTime? = null + private var alreadyLogged: Boolean = false /** * Bookkeeping for when a record message is read. @@ -130,6 +131,14 @@ class StreamStatsTracker( emittedRecordsCount.incrementAndGet() emittedBytesCount.addAndGet(estimatedBytesSize) } + + if (logsForStripeChecksumDebugging && !alreadyLogged && stateIds.size > 0) { + logger.info { + "Received records for the stream ${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + + " after receiving a state message" + } + alreadyLogged = true + } } /** @@ -149,8 +158,8 @@ class StreamStatsTracker( return } - val stateHash: Int = stateMessage.getStateHashCode(hashFunction) - if (!stateHashes.add(stateHash)) { + val stateId: Int = stateMessage.getStateIdForStatsTracking() + if (!this.stateIds.add(stateId)) { // State collision detected, it means that state tracking is compromised for this stream. // Rather than reporting incorrect data, we skip all operations that involve state tracking. streamStats.unreliableStateOperations.set(true) @@ -168,7 +177,7 @@ class StreamStatsTracker( previousEmittedStats = emittedStats emittedStats = EmittedStatsCounters() - stagedStatsList.add(StagedStats(stateHash, stateMessage, previousEmittedStats, currentTime)) + stagedStatsList.add(StagedStats(stateId, stateMessage, previousEmittedStats, currentTime)) // Updating state checkpointing metrics // previousStateMessageReceivedAt is null when it's the first state message of a stream. @@ -208,12 +217,12 @@ class StreamStatsTracker( return } - val stateHash: Int = stateMessage.getStateHashCode(hashFunction) - if (!stateHashes.contains(stateHash)) { + val stateId: Int = stateMessage.getStateIdForStatsTracking() + if (!stateIds.contains(stateId)) { metricClient.count(OssMetricsRegistry.STATE_ERROR_UNKNOWN_FROM_DESTINATION, 1) logger.warn { "Unexpected state from destination for stream ${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + - "$stateHash not found in the stored stateHashes" + "$stateId not found in the stored stateIds" } return } else if (stagedStatsList.isEmpty()) { @@ -225,31 +234,39 @@ class StreamStatsTracker( return } - logger.info { "Hash of the state message received from the destination $stateHash" } + logger.debug { "Id of the state message received from the destination $stateId" } var stagedStats: StagedStats? = null // un-stage stats until the stateMessage while (!stagedStatsList.isEmpty()) { stagedStats = stagedStatsList.poll() - logger.info { - "removing ${stagedStats.stateHash} from the stored stateHashes for the stream " + + logger.debug { + "removing ${stagedStats.stateId} from the stored stateIds for the stream " + "${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + "state received time ${stagedStats.receivedTime}" + "stagedStatsList size after poll: ${stagedStatsList.size}, " + - "stateHashes size before removal ${stateHashes.size}" + "stateIds size before removal ${stateIds.size}" } - // Cleaning up stateHashes as we go to avoid un-staging on duplicate or our of order state messages - stateHashes.remove(stagedStats.stateHash) + // Cleaning up stateIds as we go to avoid un-staging on duplicate or our of order state messages + stateIds.remove(stagedStats.stateId) // Increment committed stats as we are un-staging stats streamStats.committedBytesCount.addAndGet(stagedStats.emittedStatsCounters.emittedBytesCount.get()) streamStats.committedRecordsCount.addAndGet(stagedStats.emittedStatsCounters.remittedRecordsCount.get()) - if (stagedStats.stateHash == stateHash) { + if (stagedStats.stateId == stateId) { break } } + if (logsForStripeChecksumDebugging) { + logger.info { + "Received state message back from destination for the stream , " + + "${nameNamespacePair.namespace}:${nameNamespacePair.name}, " + + "committed record count is ${streamStats.committedRecordsCount} , total records at this point is ${streamStats.emittedRecordsCount} " + } + } + // Updating state checkpointing metrics stagedStats?.receivedTime?.until(currentTime, ChronoUnit.SECONDS)?.let { durationBetweenStateEmittedAndCommitted -> streamStats.maxSecondsBetweenStateEmittedAndCommitted.accumulate(durationBetweenStateEmittedAndCommitted) @@ -277,10 +294,10 @@ class StreamStatsTracker( } fun getTrackedCommittedRecordsSinceLastStateMessage(stateMessage: AirbyteStateMessage): Long { - val stateHashCode = stateMessage.getStateHashCode(hashFunction) - val stagedStats: StagedStats? = stagedStatsList.find { it.stateHash == stateHashCode } + val stateId = stateMessage.getStateIdForStatsTracking() + val stagedStats: StagedStats? = stagedStatsList.find { it.stateId == stateId } if (stagedStats == null) { - logger.warn { "Could not find the state message with hash $stateHashCode in the stagedStatsList" } + logger.warn { "Could not find the state message with id $stateId in the stagedStatsList" } } return stagedStats?.emittedStatsCounters?.remittedRecordsCount?.get() ?: 0 } @@ -298,6 +315,8 @@ fun AirbyteStateMessage.getStateHashCode(hashFunction: HashFunction): Int = else -> hashFunction.hashBytes(Jsons.serialize(data).toByteArray()).hashCode() } +fun AirbyteStateMessage.getStateIdForStatsTracking(): Int = StateWithId.getIdFromStateMessage(this) + private fun updateMean( previousMean: Double, previousCount: Long, diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt index 793700353d3..a51ef2e00b9 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt @@ -4,17 +4,12 @@ import datadog.trace.api.Trace import io.airbyte.api.client.AirbyteApiClient import io.airbyte.api.client.generated.AttemptApi import io.airbyte.api.client.generated.StateApi -import io.airbyte.api.client.invoker.generated.ApiException import io.airbyte.api.client.model.generated.AttemptStats import io.airbyte.api.client.model.generated.AttemptStreamStats -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody import io.airbyte.api.client.model.generated.ConnectionState import io.airbyte.api.client.model.generated.ConnectionStateCreateOrUpdate -import io.airbyte.api.client.model.generated.ConnectionStateType import io.airbyte.api.client.model.generated.SaveStatsRequestBody import io.airbyte.commons.converters.StateConverter -import io.airbyte.config.StateType -import io.airbyte.config.StateWrapper import io.airbyte.config.SyncStats import io.airbyte.config.helpers.StateMessageHelper import io.airbyte.metrics.lib.MetricAttribute @@ -25,7 +20,6 @@ import io.airbyte.metrics.lib.OssMetricsRegistry import io.airbyte.protocol.models.AirbyteEstimateTraceMessage import io.airbyte.protocol.models.AirbyteRecordMessage import io.airbyte.protocol.models.AirbyteStateMessage -import io.airbyte.protocol.models.CatalogHelpers import io.airbyte.protocol.models.ConfiguredAirbyteCatalog import io.airbyte.workers.internal.bookkeeping.SyncStatsTracker import io.airbyte.workers.internal.bookkeeping.getPerStreamStats @@ -83,7 +77,6 @@ class SyncPersistenceImpl ) : SyncPersistence, SyncStatsTracker by syncStatsTracker { private var stateBuffer = stateAggregatorFactory.create() private var stateFlushFuture: ScheduledFuture<*>? = null - private var onlyFlushAtTheEnd = false private var isReceivingStats = false private var stateToFlush: StateAggregator? = null private var statsToPersist: SaveStatsRequestBody? = null @@ -130,30 +123,11 @@ class SyncPersistenceImpl metricClient.count(OssMetricsRegistry.STATE_BUFFERING, 1) stateBuffer.ingest(stateMessage) - startBackgroundFlushStateTask(connectionId, stateMessage) + startBackgroundFlushStateTask(connectionId) } - private fun startBackgroundFlushStateTask( - connectionId: UUID, - stateMessage: AirbyteStateMessage, - ) { - if (stateFlushFuture != null || onlyFlushAtTheEnd) { - return - } - - // Fetch the current persisted state to see if it is a state migration. - // In case of a state migration, we only flush at the end of the sync to avoid dropping states in - // case of a sync failure - val currentPersistedState: ConnectionState? = - try { - stateApi.getState(ConnectionIdRequestBody().connectionId(connectionId)) - } catch (e: ApiException) { - logger.warn(e) { "Failed to check current state for connectionId $connectionId, it will be retried next time we see a state" } - return - } - if (isMigration(currentPersistedState, stateMessage) && stateMessage.type == AirbyteStateMessage.AirbyteStateType.STREAM) { - logger.info { "State type migration from LEGACY to STREAM detected, all states will be persisted at the end of the sync" } - onlyFlushAtTheEnd = true + private fun startBackgroundFlushStateTask(connectionId: UUID) { + if (stateFlushFuture != null) { return } @@ -220,9 +194,6 @@ class SyncPersistenceImpl if (hasStatesToFlush()) { // we still have data to flush prepareDataForFlush() - if (onlyFlushAtTheEnd) { - validateStreamMigration() - } try { retryWithJitterThrows("Flush States from SyncPersistenceImpl") { doFlushState() @@ -333,16 +304,6 @@ class SyncPersistenceImpl metricClient.count(OssMetricsRegistry.STATE_COMMIT_ATTEMPT_SUCCESSFUL, 1) } - private fun isMigration( - currentPersistedState: ConnectionState?, - stateMessage: AirbyteStateMessage, - ): Boolean { - return ( - !isStateEmpty(currentPersistedState) && currentPersistedState?.stateType == ConnectionStateType.LEGACY && - stateMessage.type != AirbyteStateMessage.AirbyteStateType.LEGACY - ) - } - private fun doFlushStats() { if (!hasStatsToFlush()) { return @@ -364,17 +325,6 @@ class SyncPersistenceImpl private fun hasStatsToFlush(): Boolean = isReceivingStats && statsToPersist != null - private fun validateStreamMigration() { - val state = stateToFlush?.getAggregated() ?: return - - StateMessageHelper.getTypedState(state.state) - .getOrNull() - ?.takeIf { it.stateType == StateType.STREAM } - ?.let { - validateStreamStates(it, catalog) - } - } - /** * Wraps RetryWithJitterThrows for testing. * @@ -454,29 +404,3 @@ private fun MetricClient.emitFailedStatsCloseMetrics(connectionId: UUID?) { val attribute: MetricAttribute? = connectionId?.let { MetricAttribute(MetricTags.CONNECTION_ID, it.toString()) } count(OssMetricsRegistry.STATS_COMMIT_NOT_ATTEMPTED, 1, attribute) } - -/** - * Validate that the LEGACY -> STREAM migration is correct - * - * During the migration, we will lose any previous stream state that isn't in the new state. To - * avoid a potential loss of state, we ensure that all the incremental streams are present in the - * new state. - * - * @param state the new state we want to persist - * @param configuredCatalog the configured catalog of the connection of state - */ -fun validateStreamStates( - state: StateWrapper, - configuredCatalog: ConfiguredAirbyteCatalog, -) { - val stateStreamDescriptors = state.stateMessages.map { it.stream.streamDescriptor }.toList() - - CatalogHelpers.extractIncrementalStreamDescriptors(configuredCatalog) - .find { !stateStreamDescriptors.contains(it) } - ?.let { - throw IllegalStateException( - "Job ran during migration from Legacy State to Per Stream State. One of the streams that did not have state is: " + - "(namespace: ${it.namespace}, name: ${it.name}). Job must be retried in order to properly store state.", - ) - } -} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/StorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/StorageClient.kt index 01563bb2b82..b4944c80994 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/StorageClient.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/StorageClient.kt @@ -67,6 +67,7 @@ enum class DocumentType(val prefix: Path) { LOGS(prefix = Path.of("/job-logging")), STATE(prefix = Path.of("/state")), WORKLOAD_OUTPUT(prefix = Path.of("/workload/output")), + ACTIVITY_PAYLOADS(prefix = Path.of("/activity-payloads")), } /** @@ -299,6 +300,7 @@ internal fun GcsStorageConfig.gcsClient(): Storage { */ internal fun MinioStorageConfig.s3Client(): S3Client = S3Client.builder() + .serviceConfiguration { it.pathStyleAccessEnabled(true) } .credentialsProvider { AwsBasicCredentials.create(this@s3Client.accessKey, this@s3Client.secretAccessKey) } .endpointOverride(URI(this@s3Client.endpoint)) // The region isn't actually used but is required. Set to us-east-1 based on https://github.com/minio/minio/discussions/15063. @@ -331,4 +333,5 @@ fun StorageConfig.bucketName(type: DocumentType): String = DocumentType.STATE -> this.buckets.state DocumentType.WORKLOAD_OUTPUT -> this.buckets.workloadOutput DocumentType.LOGS -> this.buckets.log + DocumentType.ACTIVITY_PAYLOADS -> this.buckets.activityPayload } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt new file mode 100644 index 00000000000..0d761a76d7f --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClient.kt @@ -0,0 +1,124 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.commons.json.JsonSerde +import io.airbyte.metrics.lib.ApmTraceUtils +import io.airbyte.metrics.lib.MetricAttribute +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.MetricTags +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.airbyte.workers.storage.StorageClient +import io.github.oshai.kotlinlogging.KotlinLogging + +private val logger = KotlinLogging.logger {} + +/** + * Writes and reads activity payloads to and from the configured object store. + * Currently just handles JSON serialization, but can be updated as necessary. + * */ +class ActivityPayloadStorageClient( + private val storageClientRaw: StorageClient, + private val jsonSerde: JsonSerde, + private val metricClient: MetricClient, +) { + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON. + * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. + * + * @return the unmarshalled object on a hit and null on a miss. + */ + inline fun readJSON(uri: ActivityPayloadURI): T? { + return readJSON(uri, T::class.java) + } + + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON to [target] class. + * Any Exceptions thrown by the raw object storage client or json deserializer will be forwarded to the caller. + * + * @return the unmarshalled object on a hit and null on a miss. + */ + fun readJSON( + uri: ActivityPayloadURI, + target: Class, + ): T? { + metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_READ_FROM_DOC_STORE, 1) + + return storageClientRaw.read(uri.id) + ?.let { jsonSerde.deserialize(it, target) } + } + + /** + * It marshals the given object to JSON and writes it to object storage at a location determined by the given [uri]. + * Any Exceptions thrown by the raw object storage client or json serializer will be forwarded to the caller. + * + * @return Unit + */ + fun writeJSON( + uri: ActivityPayloadURI, + payload: T, + ) { + metricClient.count(OssMetricsRegistry.ACTIVITY_PAYLOAD_WRITTEN_TO_DOC_STORE, 1) + + return storageClientRaw.write(uri.id, jsonSerde.serialize(payload)) + } + + /** + * It reads the object from the location described by the given [uri] and unmarshals it from JSON to [target] class + * and compares it to the [expected] recording a metric based on the result. + * + * Any Exceptions thrown by the raw object storage client or json serializer will be forwarded to the caller. + * + * @return the object passed for comparison + */ + fun validateOutput( + uri: ActivityPayloadURI?, + target: Class, + expected: T, + comparator: Comparator, + attrs: List, + ): T { + if (uri == null) { + val baseAttrs = attrs + MetricAttribute(MetricTags.URI_NULL, true.toString()) + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *baseAttrs.toTypedArray()) + + return expected + } + + ApmTraceUtils.addTagsToTrace(mapOf(Pair(MetricTags.URI_ID, uri.id), Pair(MetricTags.URI_VERSION, uri.version))) + + val baseAttrs = + attrs + + listOf( + MetricAttribute(MetricTags.URI_NULL, false.toString()), + MetricAttribute(MetricTags.URI_ID, uri.id), + MetricAttribute(MetricTags.URI_VERSION, uri.version), + MetricAttribute(MetricTags.PAYLOAD_NAME, target.name), + ) + + val remote: T? + try { + remote = readJSON(uri, target) + } catch (e: Exception) { + logger.error { e } + + ApmTraceUtils.addExceptionToTrace(e) + val attrsWithException = + baseAttrs + MetricAttribute(MetricTags.FAILURE_CAUSE, e.javaClass.simpleName) + + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *attrsWithException.toTypedArray()) + + return expected + } + + val match = comparator.compare(expected, remote) == 0 + val miss = remote == null + + val attrsWithMatch = + baseAttrs + + MetricAttribute(MetricTags.IS_MATCH, match.toString()) + + MetricAttribute(MetricTags.IS_MISS, miss.toString()) + + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *attrsWithMatch.toTypedArray()) + + return expected + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt new file mode 100644 index 00000000000..c0890c9ecba --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadURI.kt @@ -0,0 +1,43 @@ +package io.airbyte.workers.storage.activities + +import java.util.UUID +import io.airbyte.config.ActivityPayloadURI as OpenApi + +enum class ActivityPayloadURIVersion { + V1, +} + +class ActivityPayloadURI( + val id: String, + val version: String = ActivityPayloadURIVersion.V1.name, +) { + companion object Factory { + @JvmStatic + fun v1( + connectionId: UUID, + jobId: Long, + attemptNumber: Int, + payloadName: String, + ): ActivityPayloadURI { + return ActivityPayloadURI("${connectionId}_${jobId}_${attemptNumber}_$payloadName", ActivityPayloadURIVersion.V1.name) + } + + @JvmStatic + fun fromOpenApi(dto: OpenApi?): ActivityPayloadURI? { + if (dto == null || dto.version == null || dto.id == null) { + return null + } + + return ActivityPayloadURI( + version = dto.version, + id = dto.id, + ) + } + } + + fun toOpenApi(): OpenApi { + return OpenApi() + .withId(id) + .withVersion(version) + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt new file mode 100644 index 00000000000..10d8d4e18f7 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/NaiveEqualityComparator.kt @@ -0,0 +1,8 @@ +package io.airbyte.workers.storage.activities + +class NaiveEqualityComparator : Comparator { + override fun compare( + o1: T?, + o2: T?, + ): Int = if (o1 == o2) 0 else 1 +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt new file mode 100644 index 00000000000..c432c9a8ede --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/storage/activities/OutputStorageClient.kt @@ -0,0 +1,86 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.metrics.lib.ApmTraceUtils +import io.airbyte.metrics.lib.MetricAttribute +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.MetricTags +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.github.oshai.kotlinlogging.KotlinLogging +import java.util.UUID +import io.airbyte.config.ActivityPayloadURI as OpenApiURI + +private val logger = KotlinLogging.logger {} + +/** + * Client for writing per-attempt outputs to object storage. This is for outputs that are not directly + * operationalized against, but are useful debugging and troubleshooting purposes. + */ +class OutputStorageClient + @JvmOverloads + constructor( + private val storageClient: ActivityPayloadStorageClient, + private val metricClient: MetricClient, + private val payloadName: String, + private val target: Class, + private val comparator: Comparator = NaiveEqualityComparator(), + ) { + /** + * Persists and object to storage id-ed by connection, job and attempt number. + */ + fun persist( + obj: T?, + connectionId: UUID, + jobId: Long, + attemptNumber: Int, + metricAttributes: Array, + ): OpenApiURI? { + if (obj == null) return null + + val uri = ActivityPayloadURI.v1(connectionId, jobId, attemptNumber, payloadName) + + try { + storageClient.writeJSON(uri, obj) + } catch (e: Exception) { + val attrs = + listOf(*metricAttributes) + + listOf( + MetricAttribute(MetricTags.URI_ID, uri.id), + MetricAttribute(MetricTags.URI_VERSION, uri.version), + MetricAttribute(MetricTags.FAILURE_CAUSE, e.javaClass.simpleName), + MetricAttribute(MetricTags.PAYLOAD_NAME, payloadName), + ) + + ApmTraceUtils.addExceptionToTrace(e) + ApmTraceUtils.addTagsToTrace(attrs) + + logger.error { "Failure writing $payloadName to object storage." } + logger.error { "Message: ${e.message}" } + logger.error { "Stack Trace: ${e.stackTrace}" } + + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_WRITE, 1, *attrs.toTypedArray()) + } + + return uri.toOpenApi() + } + + /** + * Queries object storage based on the provided uri. Emits a metric whether it's a match. + */ + fun validate( + expected: T?, + uri: OpenApiURI, + attrs: List, + ) { + if (expected == null) return + + val domainUri = ActivityPayloadURI.fromOpenApi(uri) ?: return + + storageClient.validateOutput( + domainUri, + target, + expected, + comparator, + attrs, + ) + } + } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt index e4b8faf317b..a74b04fff06 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt @@ -27,6 +27,9 @@ object OrchestratorConstants { const val SIDECAR_INPUT = "sidecarInput.json" const val WORKLOAD_ID_FILE = "workload.txt" + // See the application.yml of the container-orchestrator for value + const val SERVER_PORT = 9000 + // define two ports for stdout/stderr usage on the container orchestrator pod const val PORT1 = 9877 const val PORT2 = 9878 @@ -80,6 +83,7 @@ object OrchestratorConstants { EnvVar.AWS_SECRET_ACCESS_KEY, EnvVar.DD_AGENT_HOST, EnvVar.DD_DOGSTATSD_PORT, + EnvVar.DOCKER_HOST, EnvVar.GOOGLE_APPLICATION_CREDENTIALS, EnvVar.JOB_DEFAULT_ENV_MAP, EnvVar.JOB_ISOLATED_KUBE_NODE_SELECTORS, @@ -107,8 +111,10 @@ object OrchestratorConstants { EnvVar.MINIO_ENDPOINT, EnvVar.OTEL_COLLECTOR_ENDPOINT, EnvVar.PUBLISH_METRICS, + EnvVar.ROOTLESS_WORKLOAD, EnvVar.SOCAT_KUBE_CPU_LIMIT, EnvVar.SOCAT_KUBE_CPU_REQUEST, + EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, EnvVar.STORAGE_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt new file mode 100644 index 00000000000..5d05822176a --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt @@ -0,0 +1,129 @@ +package io.airbyte.workers.sync + +import io.airbyte.api.client.WorkloadApiClient +import io.airbyte.config.ConnectorJobOutput +import io.airbyte.config.FailureReason +import io.airbyte.workers.workload.JobOutputDocStore +import io.airbyte.workload.api.client.model.generated.Workload +import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest +import io.airbyte.workload.api.client.model.generated.WorkloadStatus +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.http.HttpStatus +import jakarta.inject.Singleton +import org.openapitools.client.infrastructure.ClientException +import java.io.IOException +import kotlin.jvm.optionals.getOrElse +import kotlin.time.Duration.Companion.seconds + +private val logger = KotlinLogging.logger { } + +/** + * WorkloadClient that abstracts common interactions with the workload-api. + * This client should be preferred over direct usage of the WorkloadApiClient. + */ +@Singleton +class WorkloadClient(private val workloadApiClient: WorkloadApiClient, private val jobOutputDocStore: JobOutputDocStore) { + companion object { + val TERMINAL_STATUSES = setOf(WorkloadStatus.SUCCESS, WorkloadStatus.FAILURE, WorkloadStatus.CANCELLED) + } + + fun createWorkload(workloadCreateRequest: WorkloadCreateRequest) { + try { + workloadApiClient.workloadApi.workloadCreate(workloadCreateRequest) + } catch (e: ClientException) { + /* + * The Workload API returns a 409 response when the request to execute the workload has already been + * created. That response is handled in the form of a ClientException by the generated OpenAPI + * client. We do not want to cause the Temporal workflow to retry, so catch it and log the + * information so that the workflow will continue. + */ + if (e.statusCode == HttpStatus.CONFLICT.code) { + logger.warn { "Workload ${workloadCreateRequest.workloadId} already created and in progress. Continuing..." } + } else { + throw RuntimeException(e) + } + } catch (e: IOException) { + throw RuntimeException(e) + } + } + + fun waitForWorkload( + workloadId: String, + pollingFrequencyInSeconds: Int, + ) { + try { + var workload = workloadApiClient.workloadApi.workloadGet(workloadId) + while (!isWorkloadTerminal(workload)) { + Thread.sleep(pollingFrequencyInSeconds.seconds.inWholeMilliseconds) + workload = workloadApiClient.workloadApi.workloadGet(workloadId) + } + } catch (e: IOException) { + throw RuntimeException(e) + } catch (e: InterruptedException) { + throw RuntimeException(e) + } + } + + fun getConnectorJobOutput( + workloadId: String, + onFailure: (FailureReason) -> ConnectorJobOutput, + ): ConnectorJobOutput { + return Result.runCatching { + jobOutputDocStore.read(workloadId).orElseThrow() + }.fold( + onFailure = { t -> onFailure(handleMissingConnectorJobOutput(workloadId, t)) }, + onSuccess = { x -> x }, + ) + } + + private fun handleMissingConnectorJobOutput( + workloadId: String, + t: Throwable?, + ): FailureReason { + return Result.runCatching { + val workload = workloadApiClient.workloadApi.workloadGet(workloadId) + + return when (workload.status) { + // This is pretty bad, the workload succeeded, but we failed to read the output + WorkloadStatus.SUCCESS -> + FailureReason() + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.SYSTEM_ERROR) + .withExternalMessage("Failed to read the output") + .withInternalMessage("Failed to read the output of a successful workload $workloadId") + .withStacktrace(t?.stackTraceToString()) + + // do some classification from workload.terminationSource + WorkloadStatus.CANCELLED, WorkloadStatus.FAILURE -> + FailureReason() + .withFailureOrigin( + when (workload.terminationSource) { + "source" -> FailureReason.FailureOrigin.SOURCE + "destination" -> FailureReason.FailureOrigin.DESTINATION + else -> FailureReason.FailureOrigin.AIRBYTE_PLATFORM + }, + ) + .withExternalMessage("Workload terminated by ${workload.terminationSource}") + .withInternalMessage(workload.terminationReason) + + // We should never be in this situation, workload is still running not having an output is expected, + // we should not be trying to read the output of a non-terminal workload. + else -> + FailureReason() + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.SYSTEM_ERROR) + .withExternalMessage("Expected error in the platform") + .withInternalMessage("$workloadId isn't in a terminal state, no output available") + } + }.getOrElse { + FailureReason() + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.TRANSIENT_ERROR) + .withExternalMessage("Platform failure") + .withInternalMessage("Unable to reach the workload-api") + .withStacktrace(it.stackTraceToString()) + } + } + + private fun isWorkloadTerminal(workload: Workload): Boolean = workload.status in TERMINAL_STATUSES +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt new file mode 100644 index 00000000000..f1e3bd1975e --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/temporal/FailureConverter.kt @@ -0,0 +1,67 @@ +package io.airbyte.workers.temporal + +import io.airbyte.commons.temporal.utils.ActivityFailureClassifier +import io.airbyte.config.ActorType +import io.airbyte.config.FailureReason +import org.apache.commons.lang3.exception.ExceptionUtils +import org.slf4j.LoggerFactory +import java.lang.String +import kotlin.time.Duration +import kotlin.time.toKotlinDuration + +class FailureConverter { + @JvmOverloads + fun getFailureReason( + commandName: String, + actorType: ActorType, + e: Exception, + timeout: java.time.Duration? = null, + ): FailureReason = getFailureReason(commandName, actorType, e, timeout?.toKotlinDuration()) + + fun getFailureReason( + commandName: String, + actorType: ActorType, + e: Exception, + timeout: Duration?, + ): FailureReason { + val failureReason = + FailureReason() + .withFailureOrigin(if (actorType == ActorType.SOURCE) FailureReason.FailureOrigin.SOURCE else FailureReason.FailureOrigin.DESTINATION) + .withStacktrace(ExceptionUtils.getStackTrace(e)) + val classifiedExc = ActivityFailureClassifier.classifyException(e) + LoggerFactory.getLogger("test").error("exception classified as $classifiedExc") + when (classifiedExc) { + ActivityFailureClassifier.TemporalFailureReason.HEARTBEAT -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.SYSTEM_ERROR) + .withExternalMessage("$commandName connection failed because of an internal error.") + .withInternalMessage("$commandName pod failed to heartbeat, verify resource and heath of the worker/check pods.") + + ActivityFailureClassifier.TemporalFailureReason.SCHEDULER_OVERLOADED -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withFailureType(FailureReason.FailureType.TRANSIENT_ERROR) + .withExternalMessage("Airbyte Platform is experiencing a higher than usual load, please try again later.") + .withInternalMessage("$commandName wasn't able to start within the expected time, verify scheduler and worker load.") + + ActivityFailureClassifier.TemporalFailureReason.OPERATION_TIMEOUT -> + failureReason + .withExternalMessage("$commandName took too long.") + .withInternalMessage("$commandName exceeded the timeout${timeout?.let { " of ${it.inWholeMinutes} minutes" }.orEmpty()}.") + + ActivityFailureClassifier.TemporalFailureReason.UNKNOWN, ActivityFailureClassifier.TemporalFailureReason.NOT_A_TIMEOUT -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withExternalMessage("$commandName failed because of an internal error") + .withInternalMessage("$commandName failed because of an internal error") + + else -> + failureReason + .withFailureOrigin(FailureReason.FailureOrigin.AIRBYTE_PLATFORM) + .withExternalMessage("$commandName failed because of an internal error") + .withInternalMessage("$commandName failed because of an internal error") + } + return failureReason + } +} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java index 633c54768e5..1c16f6f6aa4 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java @@ -5,6 +5,8 @@ package io.airbyte.workers; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -21,6 +23,7 @@ import io.airbyte.api.client.model.generated.AirbyteStreamAndConfiguration; import io.airbyte.api.client.model.generated.AirbyteStreamConfiguration; import io.airbyte.api.client.model.generated.CatalogDiff; +import io.airbyte.api.client.model.generated.ConnectionAndJobIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionRead; import io.airbyte.api.client.model.generated.ConnectionState; @@ -39,6 +42,7 @@ import io.airbyte.config.SyncResourceRequirements; import io.airbyte.config.helpers.StateMessageHelper; import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.AutoBackfillOnNewColumns; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Flag; @@ -51,7 +55,8 @@ import java.util.List; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; /** * Tests for the replication activity specifically. @@ -99,7 +104,8 @@ class ReplicationInputHydratorTest { } }] """)); - private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig(); + private static final Long JOB_ID = 123L; + private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig().withJobId(JOB_ID.toString()); private static final IntegrationLauncherConfig DESTINATION_LAUNCHER_CONFIG = new IntegrationLauncherConfig(); private static final IntegrationLauncherConfig SOURCE_LAUNCHER_CONFIG = new IntegrationLauncherConfig(); private static final SyncResourceRequirements SYNC_RESOURCE_REQUIREMENTS = new SyncResourceRequirements(); @@ -133,9 +139,6 @@ void setup() throws ApiException { when(airbyteApiClient.getStateApi()).thenReturn(stateApi); when(airbyteApiClient.getJobsApi()).thenReturn(jobsApi); when(airbyteApiClient.getSecretPersistenceConfigApi()).thenReturn(secretsPersistenceConfigApi); - when(connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))).thenReturn(new ConnectionRead() - .connectionId(CONNECTION_ID) - .syncCatalog(SYNC_CATALOG)); when(stateApi.getState(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))).thenReturn(CONNECTION_STATE_RESPONSE); } @@ -172,8 +175,14 @@ private ReplicationActivityInput getDefaultReplicationActivityInputForTest() { false); } - @Test - void testGenerateReplicationInputRetrievesInputs() throws Exception { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testGenerateReplicationInputRetrievesInputs(final boolean withRefresh) throws Exception { + if (withRefresh) { + mockRefresh(); + } else { + mockNonRefresh(); + } // Verify that we get the state and catalog from the API. final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); @@ -184,8 +193,14 @@ void testGenerateReplicationInputRetrievesInputs() throws Exception { assertEquals(TEST_STREAM_NAME, replicationInput.getCatalog().getStreams().get(0).getStream().getName()); } - @Test - void testGenerateReplicationInputHandlesResets() throws Exception { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testGenerateReplicationInputHandlesResets(final boolean withRefresh) throws Exception { + if (withRefresh) { + mockRefresh(); + } else { + mockNonRefresh(); + } // Verify that if the sync is a reset, we retrieve the job info and handle the streams accordingly. final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); final ReplicationActivityInput input = getDefaultReplicationActivityInputForTest(); @@ -193,20 +208,23 @@ void testGenerateReplicationInputHandlesResets() throws Exception { when(jobsApi.getLastReplicationJob(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))).thenReturn( new JobOptionalRead().job(new JobRead().resetConfig(new ResetConfig().streamsToReset(List.of( new StreamDescriptor().name(TEST_STREAM_NAME).namespace(TEST_STREAM_NAMESPACE)))))); - when(connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))).thenReturn(new ConnectionRead() - .connectionId(CONNECTION_ID) - .syncCatalog(SYNC_CATALOG)); final var replicationInput = replicationInputHydrator.getHydratedReplicationInput(input); assertEquals(1, replicationInput.getCatalog().getStreams().size()); assertEquals(io.airbyte.protocol.models.SyncMode.FULL_REFRESH, replicationInput.getCatalog().getStreams().get(0).getSyncMode()); } - @Test - void testGenerateReplicationInputHandlesBackfills() throws Exception { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testGenerateReplicationInputHandlesBackfills(final boolean withRefresh) throws Exception { + if (withRefresh) { + mockRefresh(); + } else { + mockNonRefresh(); + } // Verify that if backfill is enabled, and we have an appropriate diff, then we clear the state for // the affected streams. mockEnableFeatureFlagForWorkspace(AutoBackfillOnNewColumns.INSTANCE, WORKSPACE_ID); - mockEnableBackfillForConnection(); + mockEnableBackfillForConnection(withRefresh); final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); final ReplicationActivityInput input = getDefaultReplicationActivityInputForTest(); input.setSchemaRefreshOutput(new RefreshSchemaActivityOutput(CATALOG_DIFF)); @@ -219,11 +237,36 @@ private void mockEnableFeatureFlagForWorkspace(final Flag flag, final U when(featureFlagClient.boolVariation(flag, new Workspace(workspaceId))).thenReturn(true); } - private void mockEnableBackfillForConnection() throws ApiException { - when(connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))).thenReturn(new ConnectionRead() - .connectionId(CONNECTION_ID) - .syncCatalog(SYNC_CATALOG) - .backfillPreference(SchemaChangeBackfillPreference.ENABLED)); + private void mockEnableBackfillForConnection(final boolean withRefresh) throws ApiException { + if (withRefresh) { + when(connectionApi.getConnectionForJob(new ConnectionAndJobIdRequestBody().connectionId(CONNECTION_ID).jobId(JOB_ID))) + .thenReturn(new ConnectionRead() + .connectionId(CONNECTION_ID) + .syncCatalog(SYNC_CATALOG) + .backfillPreference(SchemaChangeBackfillPreference.ENABLED)); + } else { + when(connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))) + .thenReturn(new ConnectionRead() + .connectionId(CONNECTION_ID) + .syncCatalog(SYNC_CATALOG) + .backfillPreference(SchemaChangeBackfillPreference.ENABLED)); + } + } + + private void mockRefresh() throws ApiException { + when(featureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(true); + when(connectionApi.getConnectionForJob(new ConnectionAndJobIdRequestBody().connectionId(CONNECTION_ID).jobId(JOB_ID))) + .thenReturn(new ConnectionRead() + .connectionId(CONNECTION_ID) + .syncCatalog(SYNC_CATALOG)); + } + + private void mockNonRefresh() throws ApiException { + when(featureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(false); + when(connectionApi.getConnection(new ConnectionIdRequestBody().connectionId(CONNECTION_ID))) + .thenReturn(new ConnectionRead() + .connectionId(CONNECTION_ID) + .syncCatalog(SYNC_CATALOG)); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/StateWithIdTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/StateWithIdTest.java new file mode 100644 index 00000000000..0911e49bd19 --- /dev/null +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/StateWithIdTest.java @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStateStats; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.workers.models.StateWithId; +import java.util.Collections; +import java.util.Optional; +import java.util.Queue; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.LinkedBlockingQueue; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; + +public class StateWithIdTest { + + private static final Queue EXPECTED_IDS = new LinkedBlockingQueue<>(); + + static { + EXPECTED_IDS.add(1); + EXPECTED_IDS.add(2); + EXPECTED_IDS.add(3); + EXPECTED_IDS.add(4); + } + + @ParameterizedTest + @ValueSource(strings = { + "{\"{\\\"schema\\\":null,\\\"payload\\\":[\\\"db_jagkjrgxhw\\\",{\\\"server\\\":\\\"db_jagkjrgxhw\\\"}]}\":" + + "\"{\\\"last_snapshot_record\\\":true,\\\"lsn\\\":23896935,\\\"txId\\\":505,\\\"ts_usec\\\":1659422332985000,\\\"snapshot\\\":true}\"}", + "{\"[\\\"db_jagkjrgxhw\\\",{\\\"server\\\":\\\"db_jagkjrgxhw\\\"}]}\":\"{\\\"last_snapshot_record\\\":true," + + "\\\"lsn\\\":23896935,\\\"txId\\\":505,\\\"ts_usec\\\":1659422332985000,\\\"snapshot\\\":true}\"}", + "{\"[\\\"db_jagkjrgxhw\\\",{\\\"server\\\":\\\"db_jagkjrgxhw\\\"}]\":\"{\\\"transaction_id\\\":null,\\\"lsn\\\":" + + "23896935,\\\"txId\\\":505,\\\"ts_usec\\\":1677520006097984}\"}" + }) + public void globalStateTest(final String cdcState) { + final Random random = new Random(); + final double recordCount = random.nextDouble(); + final String cursorName = UUID.randomUUID().toString(); + + final AirbyteStateMessage originalState = getAirbyteGlobalStateMessage(cdcState, recordCount, cursorName); + final AirbyteMessage originalMessage = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) + .withState(originalState); + + final AirbyteStateMessage copyOfOriginalState = getAirbyteGlobalStateMessage(cdcState, recordCount, cursorName); + final AirbyteMessage copyOfOriginal = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) + .withState(copyOfOriginalState); + + assertEquals(originalMessage, copyOfOriginal); + final Integer expectedId = EXPECTED_IDS.poll(); + final AirbyteMessage stateMessageWithIdAdded = StateWithId.attachIdToStateMessageFromSource(copyOfOriginal); + assertNotEquals(originalMessage, stateMessageWithIdAdded); + assertEquals(originalMessage.getState().getGlobal(), stateMessageWithIdAdded.getState().getGlobal()); + assertEquals(expectedId, StateWithId.getIdFromStateMessage(stateMessageWithIdAdded).orElseThrow()); + + final String serializedMessage = Jsons.serialize(stateMessageWithIdAdded); + Optional deserializedMessage = Jsons.tryDeserializeExact(serializedMessage, AirbyteMessage.class); + assertEquals(stateMessageWithIdAdded, deserializedMessage.orElseThrow()); + assertEquals(originalMessage.getState().getGlobal(), deserializedMessage.orElseThrow().getState().getGlobal()); + assertEquals(expectedId, StateWithId.getIdFromStateMessage(deserializedMessage.orElseThrow()).orElseThrow()); + } + + @Test + public void streamStateTest() { + final Random random = new Random(); + final double recordCount = random.nextDouble(); + final String cursorName = UUID.randomUUID().toString(); + + final AirbyteStateMessage originalState = getAirbyteStreamStateMessage(recordCount, cursorName); + final AirbyteMessage originalMessage = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) + .withState(originalState); + + final AirbyteStateMessage copyOfOriginalState = getAirbyteStreamStateMessage(recordCount, cursorName); + final AirbyteMessage copyOfOriginal = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) + .withState(copyOfOriginalState); + + assertEquals(originalMessage, copyOfOriginal); + final Integer expectedId = EXPECTED_IDS.poll(); + final AirbyteMessage stateMessageWithIdAdded = StateWithId.attachIdToStateMessageFromSource(copyOfOriginal); + assertNotEquals(originalMessage, stateMessageWithIdAdded); + assertEquals(originalMessage.getState().getGlobal(), stateMessageWithIdAdded.getState().getGlobal()); + assertEquals(expectedId, StateWithId.getIdFromStateMessage(stateMessageWithIdAdded).orElseThrow()); + + final String serializedMessage = Jsons.serialize(stateMessageWithIdAdded); + Optional deserializedMessage = Jsons.tryDeserializeExact(serializedMessage, AirbyteMessage.class); + assertEquals(stateMessageWithIdAdded, deserializedMessage.orElseThrow()); + assertEquals(originalMessage.getState().getStream(), deserializedMessage.orElseThrow().getState().getStream()); + assertEquals(expectedId, StateWithId.getIdFromStateMessage(deserializedMessage.orElseThrow()).orElseThrow()); + } + + private static AirbyteStateMessage getAirbyteStreamStateMessage(final double recordCount, final String cursorName) { + final AirbyteStreamState streamState = new AirbyteStreamState() + .withStreamState(Jsons.jsonNode(ImmutableMap.of(cursorName, 1))) + .withStreamDescriptor(new StreamDescriptor().withName(cursorName).withNamespace(cursorName)); + final AirbyteStateStats airbyteStateStats = new AirbyteStateStats().withRecordCount(recordCount); + return new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(streamState) + .withSourceStats(airbyteStateStats); + } + + private static AirbyteStateMessage getAirbyteGlobalStateMessage(final String cdcState, final double recordCount, final String cursorName) { + final JsonNode cdcStateAsJson = Jsons.deserialize(cdcState); + final AirbyteGlobalState globalState = new AirbyteGlobalState().withSharedState(cdcStateAsJson).withStreamStates(Collections.singletonList( + new AirbyteStreamState() + .withStreamState(Jsons.jsonNode(ImmutableMap.of(cursorName, 1))) + .withStreamDescriptor(new StreamDescriptor().withName(cursorName).withNamespace(cursorName)))); + final AirbyteStateStats airbyteStateStats = new AirbyteStateStats().withRecordCount(recordCount); + return new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(globalState) + .withSourceStats(airbyteStateStats); + } + +} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java index 9ee0246aecd..eb588f0836e 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/BufferedReplicationWorkerTest.java @@ -28,8 +28,9 @@ class BufferedReplicationWorkerTest extends ReplicationWorkerTest { ReplicationWorker getDefaultReplicationWorker(final boolean fieldSelectionEnabled) { final var fieldSelector = new FieldSelector(recordSchemaValidator, workerMetricReporter, fieldSelectionEnabled, false); replicationWorkerHelper = spy(new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, false, analyticsMessageTracker, - Optional.empty())); + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, false, + analyticsMessageTracker, + Optional.empty(), sourceApi, destinationApi, streamStatusCompletionTracker)); return new BufferedReplicationWorker( JOB_ID, JOB_ATTEMPT, @@ -42,7 +43,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onRep replicationWorkerHelper, destinationTimeoutMonitor, getQueueType(), - OptionalInt.of(1)); + OptionalInt.of(1), + streamStatusCompletionTracker); } public BufferedReplicationWorkerType getQueueType() { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java index fb0438445d5..f14f1b57e96 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java @@ -21,8 +21,9 @@ class DefaultReplicationWorkerTest extends ReplicationWorkerTest { ReplicationWorker getDefaultReplicationWorker(final boolean fieldSelectionEnabled) { final var fieldSelector = new FieldSelector(recordSchemaValidator, workerMetricReporter, fieldSelectionEnabled, false); replicationWorkerHelper = spy(new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, mapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApi, false, analyticsMessageTracker, - Optional.empty())); + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onReplicationRunning, workloadApiClient, false, + analyticsMessageTracker, + Optional.empty(), sourceApi, destinationApi, streamStatusCompletionTracker)); return new DefaultReplicationWorker( JOB_ID, JOB_ATTEMPT, @@ -33,7 +34,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), onRep heartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, - destinationTimeoutMonitor); + destinationTimeoutMonitor, + streamStatusCompletionTracker); } // DefaultReplicationWorkerTests. diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java index 4c52d03824b..fbb4cbb8166 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java @@ -17,6 +17,9 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.core.JsonProcessingException; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.converters.ThreadedTimeTracker; import io.airbyte.persistence.job.models.ReplicationInput; @@ -29,6 +32,7 @@ import io.airbyte.workers.context.ReplicationContext; import io.airbyte.workers.context.ReplicationFeatureFlags; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -54,6 +58,8 @@ class ReplicationWorkerHelperTest { private AirbyteMessageTracker messageTracker; private SyncPersistence syncPersistence; private AnalyticsMessageTracker analyticsMessageTracker; + private StreamStatusCompletionTracker streamStatusCompletionTracker; + private WorkloadApiClient workloadApiClient; @BeforeEach void setUp() { @@ -62,7 +68,10 @@ void setUp() { syncPersistence = mock(SyncPersistence.class); messageTracker = mock(AirbyteMessageTracker.class); analyticsMessageTracker = mock(AnalyticsMessageTracker.class); + streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); + workloadApiClient = mock(WorkloadApiClient.class); when(messageTracker.getSyncStatsTracker()).thenReturn(syncStatsTracker); + when(workloadApiClient.getWorkloadApi()).thenReturn(mock(WorkloadApi.class)); replicationWorkerHelper = spy(new ReplicationWorkerHelper( mock(AirbyteMessageDataExtractor.class), mock(FieldSelector.class), @@ -72,20 +81,27 @@ void setUp() { mock(ReplicationAirbyteMessageEventPublishingHelper.class), mock(ThreadedTimeTracker.class), mock(VoidCallable.class), - mock(WorkloadApi.class), + workloadApiClient, false, analyticsMessageTracker, - Optional.empty())); + Optional.empty(), + mock(SourceApi.class), + mock(DestinationApi.class), + streamStatusCompletionTracker)); } @Test void testGetReplicationOutput() throws JsonProcessingException { // Need to pass in a replication context + final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog().withAdditionalProperty("test", "test"); + final ReplicationContext replicationContext = new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, + 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, UUID.randomUUID(), UUID.randomUUID()); replicationWorkerHelper.initialize( - new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, - 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE), + replicationContext, mock(ReplicationFeatureFlags.class), - mock(Path.class)); + mock(Path.class), + catalog); + verify(streamStatusCompletionTracker).startTracking(catalog, replicationContext); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( mock(AirbyteDestination.class), @@ -106,12 +122,13 @@ void testGetReplicationOutput() throws JsonProcessingException { void testAnalyticsMessageHandling() { final ReplicationContext context = new ReplicationContext(true, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), 0L, - 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE); + 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, UUID.randomUUID(), UUID.randomUUID()); // Need to pass in a replication context replicationWorkerHelper.initialize( context, mock(ReplicationFeatureFlags.class), - mock(Path.class)); + mock(Path.class), + mock(ConfiguredAirbyteCatalog.class)); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( mock(AirbyteDestination.class), diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java index 4a839e40474..6cbc5449fcf 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java @@ -31,6 +31,11 @@ import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; +import io.airbyte.api.client.model.generated.DestinationRead; +import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamStatusIncompleteRunCause; import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.converters.ConnectorConfigUpdater; @@ -61,6 +66,7 @@ import io.airbyte.protocol.models.AirbyteLogMessage.Level; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.protocol.models.Config; import io.airbyte.protocol.models.StreamDescriptor; @@ -73,6 +79,7 @@ import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; import io.airbyte.workers.helper.FailureHelper; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteSource; import io.airbyte.workers.internal.AnalyticsMessageTracker; @@ -143,12 +150,15 @@ abstract class ReplicationWorkerTest { protected static final AirbyteMessage STATE_MESSAGE = AirbyteMessageUtils.createStateMessage(STREAM_NAME, "checkpoint", "1"); protected static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = AirbyteMessageUtils.createErrorTraceMessage("a connector error occurred", Double.valueOf(123)); + protected static final Config CONNECTOR_CONFIG = new Config().withAdditionalProperty("my_key", "my_new_value"); protected static final AirbyteMessage CONFIG_MESSAGE = AirbyteMessageUtils.createConfigControlMessage(CONNECTOR_CONFIG, 1D); protected static final String STREAM1 = "stream1"; protected static final String NAMESPACE = "namespace"; protected static final String INDUCED_EXCEPTION = "induced exception"; + protected static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); + protected static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); protected Path jobRoot; protected SimpleAirbyteSource sourceStub; @@ -175,8 +185,12 @@ abstract class ReplicationWorkerTest { protected ReplicationWorkerHelper replicationWorkerHelper; protected WorkloadApi workloadApi; + protected WorkloadApiClient workloadApiClient; protected AnalyticsMessageTracker analyticsMessageTracker; + protected SourceApi sourceApi; + protected DestinationApi destinationApi; + protected StreamStatusCompletionTracker streamStatusCompletionTracker; ReplicationWorker getDefaultReplicationWorker() { return getDefaultReplicationWorker(false); @@ -221,9 +235,17 @@ void setup() throws Exception { destinationTimeoutMonitor = mock(DestinationTimeoutMonitor.class); replicationAirbyteMessageEventPublishingHelper = mock(ReplicationAirbyteMessageEventPublishingHelper.class); workloadApi = mock(WorkloadApi.class); + workloadApiClient = mock(WorkloadApiClient.class); + when(workloadApiClient.getWorkloadApi()).thenReturn(workloadApi); analyticsMessageTracker = mock(AnalyticsMessageTracker.class); + sourceApi = mock(SourceApi.class); + when(sourceApi.getSource(any())).thenReturn(new SourceRead().sourceDefinitionId(SOURCE_DEFINITION_ID)); + destinationApi = mock(DestinationApi.class); + when(destinationApi.getDestination(any())).thenReturn(new DestinationRead().destinationDefinitionId(DESTINATION_DEFINITION_ID)); + streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); + when(messageTracker.getSyncStatsTracker()).thenReturn(syncStatsTracker); when(mapper.mapCatalog(destinationConfig.getCatalog())).thenReturn(destinationConfig.getCatalog()); @@ -532,22 +554,25 @@ void testReplicationRunnableSourceUpdateConfig() throws Exception { verify(replicationAirbyteMessageEventPublishingHelper).publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.SOURCE, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); } @Test void testSourceConfigPersistError() throws Exception { sourceStub.setMessages(CONFIG_MESSAGE); - final String persistErrorMessage = "there was a problem persisting the new config"; doThrow(new RuntimeException(persistErrorMessage)) .when(replicationAirbyteMessageEventPublishingHelper) .publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.SOURCE, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); final ReplicationWorker worker = getDefaultReplicationWorker(); + doReturn(SOURCE_DEFINITION_ID).when(replicationWorkerHelper).getSourceDefinitionIdForSourceId(replicationInput.getSourceId()); + doReturn(DESTINATION_DEFINITION_ID).when(replicationWorkerHelper).getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId()); final ReplicationOutput output = worker.run(replicationInput, jobRoot); assertEquals(ReplicationStatus.FAILED, output.getReplicationAttemptSummary().getStatus()); @@ -566,7 +591,8 @@ void testReplicationRunnableDestinationUpdateConfig() throws Exception { verify(replicationAirbyteMessageEventPublishingHelper).publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); } @Test @@ -580,7 +606,8 @@ void testDestinationConfigPersistError() throws Exception { .publishStatusEvent(new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, CONFIG_MESSAGE, new ReplicationContext(false, replicationInput.getConnectionId(), replicationInput.getSourceId(), replicationInput.getDestinationId(), - Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE))); + Long.valueOf(JOB_ID), JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, DESTINATION_IMAGE, SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID))); final ReplicationWorker worker = getDefaultReplicationWorker(); @@ -605,7 +632,7 @@ void testReplicationRunnableDestinationFailure() throws Exception { @Test void testReplicationRunnableDestinationFailureViaTraceMessage() throws Exception { final FailureReason failureReason = FailureHelper.destinationFailure(ERROR_TRACE_MESSAGE, Long.valueOf(JOB_ID), JOB_ATTEMPT); - when(messageTracker.errorTraceMessageFailure(Long.parseLong(JOB_ID), JOB_ATTEMPT)).thenReturn(failureReason); + when(messageTracker.errorTraceMessageFailure(Long.parseLong(JOB_ID), JOB_ATTEMPT)).thenReturn(List.of(failureReason)); final ReplicationWorker worker = getDefaultReplicationWorker(); @@ -1137,12 +1164,21 @@ void testGetFailureReason() { assertEquals(failureReason.getFailureOrigin(), FailureOrigin.SOURCE); failureReason = ReplicationWorkerHelper.getFailureReason(new DestinationException(""), jobId, attempt); assertEquals(failureReason.getFailureOrigin(), FailureOrigin.DESTINATION); - failureReason = ReplicationWorkerHelper.getFailureReason(new HeartbeatTimeoutChaperone.HeartbeatTimeoutException(10, 15), jobId, attempt); + failureReason = ReplicationWorkerHelper.getFailureReason(new HeartbeatTimeoutChaperone.HeartbeatTimeoutException(10000, 15000), jobId, attempt); assertEquals(failureReason.getFailureOrigin(), FailureOrigin.SOURCE); assertEquals(failureReason.getFailureType(), FailureReason.FailureType.HEARTBEAT_TIMEOUT); + assertEquals( + "Airbyte detected that the Source didn't send any records in the last 15 seconds, exceeding the configured 10 seconds threshold. Airbyte will try reading again on the next sync. Please see https://docs.airbyte.com/understanding-airbyte/heartbeats for more info.", + failureReason.getExternalMessage()); + assertEquals("Last record seen 15 seconds ago, exceeding the threshold of 10 seconds.", failureReason.getInternalMessage()); failureReason = ReplicationWorkerHelper.getFailureReason(new RuntimeException(), jobId, attempt); assertEquals(failureReason.getFailureOrigin(), FailureOrigin.REPLICATION); - failureReason = ReplicationWorkerHelper.getFailureReason(new TimeoutException(""), jobId, attempt); + failureReason = ReplicationWorkerHelper.getFailureReason(new TimeoutException(10000, 15000), jobId, attempt); + assertEquals( + "Airbyte detected that the Destination didn't make progress in the last 15 seconds, exceeding the configured 10 seconds threshold. Airbyte will try reading again on the next sync. Please see https://docs.airbyte.com/understanding-airbyte/heartbeats for more info.", + failureReason.getExternalMessage()); + assertEquals("Last action 15 seconds ago, exceeding the threshold of 10 seconds.", failureReason.getInternalMessage()); + System.out.println(failureReason.getInternalMessage()); assertEquals(failureReason.getFailureOrigin(), FailureOrigin.DESTINATION); assertEquals(failureReason.getFailureType(), FailureType.DESTINATION_TIMEOUT); } @@ -1171,6 +1207,35 @@ void testCallHeartbeat() throws WorkerException { verify(replicationWorkerHelper).getWorkloadStatusHeartbeat(any()); } + @Test + void testStreamStatusCompletionTracking() throws Exception { + sourceStub.setMessages(RECORD_MESSAGE1); + + final ReplicationWorker worker = getDefaultReplicationWorker(); + + worker.run(replicationInput, jobRoot); + + verify(streamStatusCompletionTracker).startTracking(any(), any()); + + verify(streamStatusCompletionTracker).finalize(0, mapper); + } + + @Test + void testStreamStatusCompletionTrackingTrackSourceMessage() throws Exception { + + AirbyteMessage streamStatus = AirbyteMessageUtils.createStatusTraceMessage(new StreamDescriptor().withName(STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + sourceStub.setMessages(RECORD_MESSAGE1, streamStatus); + + final ReplicationWorker worker = getDefaultReplicationWorker(); + + worker.run(replicationInput, jobRoot); + + verify(streamStatusCompletionTracker).startTracking(any(), any()); + verify(streamStatusCompletionTracker).track(streamStatus.getTrace().getStreamStatus()); + verify(streamStatusCompletionTracker).finalize(0, mapper); + } + private ReplicationContext simpleContext(final boolean isReset) { return new ReplicationContext( isReset, @@ -1181,7 +1246,9 @@ private ReplicationContext simpleContext(final boolean isReset) { JOB_ATTEMPT, replicationInput.getWorkspaceId(), SOURCE_IMAGE, - DESTINATION_IMAGE); + DESTINATION_IMAGE, + SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java index 3c48cfab333..4cada03a333 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/BufferedReplicationWorkerPerformanceTest.java @@ -11,6 +11,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -42,10 +43,11 @@ public ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return new BufferedReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor, - BufferedReplicationWorkerType.BUFFERED_WITH_LINKED_BLOCKING_QUEUE); + BufferedReplicationWorkerType.BUFFERED_WITH_LINKED_BLOCKING_QUEUE, streamStatusCompletionTracker); } public static void main(final String[] args) throws IOException, InterruptedException { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java index b0bb8eb88a4..a87da89def1 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/DefaultReplicationWorkerPerformanceTest.java @@ -10,6 +10,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -41,9 +42,11 @@ public ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor) { + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker) { return new DefaultReplicationWorker(jobId, attempt, source, destination, syncPersistence, recordSchemaValidator, - srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor); + srcHeartbeatTimeoutChaperone, replicationFeatureFlagReader, replicationWorkerHelper, destinationTimeoutMonitor, + streamStatusCompletionTracker); } public static void main(final String[] args) throws IOException, InterruptedException { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java index 25c097353ca..96d22ebf07a 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/performance/ReplicationWorkerPerformanceTest.java @@ -10,6 +10,9 @@ import static org.mockito.Mockito.when; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.WorkloadApiClient; +import io.airbyte.api.client.generated.DestinationApi; +import io.airbyte.api.client.generated.SourceApi; import io.airbyte.commons.converters.ConnectorConfigUpdater; import io.airbyte.commons.converters.ThreadedTimeTracker; import io.airbyte.commons.features.EnvVariableFeatureFlags; @@ -39,6 +42,7 @@ import io.airbyte.workers.general.ReplicationWorker; import io.airbyte.workers.general.ReplicationWorkerHelper; import io.airbyte.workers.helper.AirbyteMessageDataExtractor; +import io.airbyte.workers.helper.StreamStatusCompletionTracker; import io.airbyte.workers.internal.AirbyteDestination; import io.airbyte.workers.internal.AirbyteMapper; import io.airbyte.workers.internal.AirbyteSource; @@ -90,7 +94,8 @@ public abstract ReplicationWorker getReplicationWorker(final String jobId, final AirbyteMessageDataExtractor airbyteMessageDataExtractor, final ReplicationAirbyteMessageEventPublishingHelper messageEventPublishingHelper, final ReplicationWorkerHelper replicationWorkerHelper, - final DestinationTimeoutMonitor destinationTimeoutMonitor); + final DestinationTimeoutMonitor destinationTimeoutMonitor, + final StreamStatusCompletionTracker streamStatusCompletionTracker); /** * Hook up the DefaultReplicationWorker to a test harness with an insanely quick Source @@ -143,7 +148,7 @@ public void executeOneSync() throws InterruptedException { catalogMigrator.initialize(); final var migratorFactory = new AirbyteProtocolVersionedMigratorFactory(msgMigrator, catalogMigrator); - final var versionFac = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(false); + final var versionFac = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(); final HeartbeatMonitor heartbeatMonitor = new HeartbeatMonitor(DEFAULT_HEARTBEAT_FRESHNESS_THRESHOLD); final var versionedAbSource = new DefaultAirbyteSource(integrationLauncher, versionFac, heartbeatMonitor, migratorFactory.getProtocolSerializer(new Version("0.2.0")), @@ -177,12 +182,15 @@ public void executeOneSync() throws InterruptedException { final boolean fieldSelectionEnabled = false; final FieldSelector fieldSelector = new FieldSelector(validator, metricReporter, fieldSelectionEnabled, false); + final WorkloadApiClient workloadApiClient = mock(WorkloadApiClient.class); + when(workloadApiClient.getWorkloadApi()).thenReturn(mock(WorkloadApi.class)); final ReplicationWorkerHelper replicationWorkerHelper = new ReplicationWorkerHelper(airbyteMessageDataExtractor, fieldSelector, dstNamespaceMapper, messageTracker, syncPersistence, - replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> {}, mock(WorkloadApi.class), false, + replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> {}, workloadApiClient, false, analyticsMessageTracker, - Optional.empty()); + Optional.empty(), mock(SourceApi.class), mock(DestinationApi.class), mock(StreamStatusCompletionTracker.class)); + final StreamStatusCompletionTracker streamStatusCompletionTracker = mock(StreamStatusCompletionTracker.class); final var worker = getReplicationWorker("1", 0, versionedAbSource, @@ -197,7 +205,8 @@ replicationAirbyteMessageEventPublishingHelper, new ThreadedTimeTracker(), () -> airbyteMessageDataExtractor, replicationAirbyteMessageEventPublishingHelper, replicationWorkerHelper, - destinationTimeoutMonitor); + destinationTimeoutMonitor, + streamStatusCompletionTracker); final AtomicReference output = new AtomicReference<>(); final Thread workerThread = new Thread(() -> { try { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java index bee9a0f49e9..5a5fd150c40 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/FailureHelperTest.java @@ -16,6 +16,7 @@ import io.airbyte.config.Metadata; import io.airbyte.protocol.models.AirbyteErrorTraceMessage; import io.airbyte.protocol.models.AirbyteTraceMessage; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.FailureHelper.ConnectorCommand; import io.airbyte.workers.test_utils.AirbyteMessageUtils; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class FailureHelperTest { @@ -72,12 +74,45 @@ void testGenericFailureFromTrace() throws Exception { assertEquals(FailureType.CONFIG_ERROR, failureReason.getFailureType()); } + @Test + void testFailureWithTransientFailureType() { + final AirbyteTraceMessage traceMessage = + AirbyteMessageUtils.createErrorTraceMessage("sample trace message", 10.0, AirbyteErrorTraceMessage.FailureType.TRANSIENT_ERROR); + final FailureReason reason = FailureHelper.genericFailure(traceMessage, 1034L, 0); + assertEquals(FailureType.TRANSIENT_ERROR, reason.getFailureType()); + } + @Test void testGenericFailureFromTraceNoFailureType() throws Exception { final FailureReason failureReason = FailureHelper.genericFailure(TRACE_MESSAGE, Long.valueOf(12345), 1); assertEquals(failureReason.getFailureType(), FailureType.SYSTEM_ERROR); } + @Test + void testExtractStreamDescriptor() { + String name = "users"; + String namespace = "public"; + final AirbyteTraceMessage traceMessage = + AirbyteMessageUtils.createErrorTraceMessage("a error with a stream", 80.0, AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR); + traceMessage.getError().setStreamDescriptor(new StreamDescriptor().withName(name).withNamespace(namespace)); + final FailureReason failureReason = FailureHelper.genericFailure(traceMessage, 1934L, 0); + Assertions.assertNotNull(failureReason.getStreamDescriptor()); + assertEquals(failureReason.getStreamDescriptor().getName(), name); + assertEquals(failureReason.getStreamDescriptor().getNamespace(), namespace); + } + + @Test + void testExtractStreamDescriptorNoNamespace() { + String name = "users"; + final AirbyteTraceMessage traceMessage = + AirbyteMessageUtils.createErrorTraceMessage("a error with a stream", 80.0, AirbyteErrorTraceMessage.FailureType.SYSTEM_ERROR); + traceMessage.getError().setStreamDescriptor(new StreamDescriptor().withName(name)); + final FailureReason failureReason = FailureHelper.genericFailure(traceMessage, 1934L, 0); + Assertions.assertNotNull(failureReason.getStreamDescriptor()); + assertEquals(failureReason.getStreamDescriptor().getName(), name); + assertNull(failureReason.getStreamDescriptor().getNamespace()); + } + @Test void testConnectorCommandFailure() { final Throwable t = new RuntimeException(); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartBeatTimeoutChaperoneTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartBeatTimeoutChaperoneTest.java index 275cc023fa0..3ae658273bc 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartBeatTimeoutChaperoneTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/HeartBeatTimeoutChaperoneTest.java @@ -5,6 +5,8 @@ package io.airbyte.workers.internal; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -24,7 +26,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; class HeartBeatTimeoutChaperoneTest { @@ -40,6 +41,8 @@ class HeartBeatTimeoutChaperoneTest { @Test void testFailHeartbeat() { when(featureFlagClient.boolVariation(eq(ShouldFailSyncIfHeartbeatFailure.INSTANCE), any())).thenReturn(true); + when(heartbeatMonitor.getHeartbeatFreshnessThreshold()).thenReturn(Duration.ofSeconds(1)); + final HeartbeatTimeoutChaperone heartbeatTimeoutChaperone = new HeartbeatTimeoutChaperone( heartbeatMonitor, timeoutCheckDuration, @@ -49,14 +52,17 @@ void testFailHeartbeat() { connectionId, metricClient); - Assertions.assertThatThrownBy(() -> heartbeatTimeoutChaperone.runWithHeartbeatThread(CompletableFuture.runAsync(() -> { - try { - Thread.sleep(Long.MAX_VALUE); - } catch (final InterruptedException e) { - throw new RuntimeException(e); - } - }))) - .isInstanceOf(HeartbeatTimeoutChaperone.HeartbeatTimeoutException.class); + final var thrown = assertThrows(HeartbeatTimeoutChaperone.HeartbeatTimeoutException.class, + () -> heartbeatTimeoutChaperone.runWithHeartbeatThread(CompletableFuture.runAsync(() -> { + try { + Thread.sleep(Long.MAX_VALUE); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } + }))); + + assertEquals("Last record seen 0 seconds ago, exceeding the threshold of 1 second.", thrown.getMessage()); + verify(metricClient, times(1)).count(OssMetricsRegistry.SOURCE_HEARTBEAT_FAILURE, 1, new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString()), new MetricAttribute(MetricTags.KILLED, "true"), diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java index 925d9baadca..b637f6f0ad1 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/NamespacingMapperTest.java @@ -13,10 +13,12 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.test_utils.AirbyteMessageUtils; import java.util.Map; import org.junit.jupiter.api.BeforeEach; @@ -38,6 +40,9 @@ class NamespacingMapperTest { Field.of(FIELD_NAME, JsonSchemaType.STRING)); private AirbyteMessage recordMessage; private AirbyteMessage stateMessage; + private final AirbyteMessage streamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(STREAM_NAME).withNamespace(INPUT_NAMESPACE), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); private Map destinationToSourceNamespaceAndStreamName; private static AirbyteMessage createRecordMessage() { @@ -84,6 +89,15 @@ void testSourceNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(INPUT_NAMESPACE); + + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -106,10 +120,19 @@ void testEmptySourceNamespace() { assertEquals(originalMessage, recordMessage); originalMessage.getRecord().withNamespace(null); + final AirbyteMessage originalStreamStatusMessage = Jsons.clone(streamStatusMessage); + assertEquals(originalStreamStatusMessage, streamStatusMessage); + originalStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); expectedMessage.getRecord().withNamespace(null); final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(originalStreamStatusMessage); + assertEquals(expectedMessage, actualMessage); } @@ -134,6 +157,12 @@ void testDestinationNamespace() { final AirbyteMessage expectedMessage = AirbyteMessageUtils.createRecordMessage(OUTPUT_PREFIX + STREAM_NAME, FIELD_NAME, BLUE); final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -162,6 +191,14 @@ void testCustomFormatWithVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(expectedNamespace); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -190,6 +227,14 @@ void testCustomFormatWithoutVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(recordMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(expectedNamespace); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(streamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test @@ -220,6 +265,18 @@ void testEmptyCustomFormatWithVariableNamespace() { final AirbyteMessage actualMessage = mapper.mapMessage(originalMessage); assertEquals(expectedMessage, actualMessage); + + final AirbyteMessage originalStreamStatusMessage = Jsons.clone(streamStatusMessage); + assertEquals(originalStreamStatusMessage, streamStatusMessage); + originalStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + + final AirbyteMessage expectedStreamStatusMessage = AirbyteMessageUtils.createStreamStatusTraceMessageWithType( + new StreamDescriptor().withName(OUTPUT_PREFIX + STREAM_NAME), + AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE); + expectedStreamStatusMessage.getTrace().getStreamStatus().getStreamDescriptor().withNamespace(null); + final AirbyteMessage actualStreamStatusMessage = mapper.mapMessage(originalStreamStatusMessage); + + assertEquals(expectedStreamStatusMessage, actualStreamStatusMessage); } @Test diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java index aab2b98fa08..29b3e40f38f 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java @@ -5,12 +5,12 @@ package io.airbyte.workers.internal; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.MdcScope.Builder; @@ -42,6 +42,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -50,6 +51,7 @@ import org.junit.jupiter.params.provider.ValueSource; import org.junit.platform.commons.util.ClassLoaderUtils; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class VersionedAirbyteStreamFactoryTest { @@ -68,7 +70,12 @@ class ParseMessages { @BeforeEach void setup() { - logger = mock(Logger.class); + logger = spy(LoggerFactory.getLogger(VersionedAirbyteStreamFactoryTest.class)); + } + + @AfterEach() + void afterEach() { + verifyNoMoreInteractions(logger); } @Test @@ -79,7 +86,6 @@ void testValid() { final Stream expectedStream = Stream.of(record1); assertEquals(expectedStream.collect(Collectors.toList()), messageStream.collect(Collectors.toList())); - verify(logger).info("Reading messages from protocol version {}{}", "0.2.0", ""); } @Test @@ -156,22 +162,6 @@ void testFailValidation() { verify(logger, atLeastOnce()).error(anyString(), anyString()); } - @Test - void testFailsSize() { - final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); - - final InputStream inputStream = new ByteArrayInputStream(record1.toString().getBytes(StandardCharsets.UTF_8)); - final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - - final Stream messageStream = - VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 1L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), gsonPksExtractor) - .create(bufferedReader); - - assertThrows(RuntimeException.class, () -> messageStream.toList()); - } - @ParameterizedTest @ValueSource(strings = { // Missing closing bracket. @@ -184,13 +174,16 @@ void testFailsSize() { "{\"type\": \"record\", \"record\": {\"stream\": \"transactions\", \"data\": {\"transaction_id\": Infinity }}}"}) void testMalformedRecordShouldOnlyDebugLog(final String invalidRecord) { stringToMessageStream(invalidRecord).collect(Collectors.toList()); + verifyBlankedRecordRecordWarning(); verify(logger).debug(invalidRecord); } - private VersionedAirbyteStreamFactory getFactory(final boolean failTooLongMessage) { + private VersionedAirbyteStreamFactory getFactory() { return VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 100000L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(failTooLongMessage, false), + .noMigrationVersionedAirbyteStreamFactory( + logger, + new Builder(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor); } @@ -202,56 +195,65 @@ private VersionedAirbyteStreamFactory getFactory(final boolean failTooLongMessag @Test void testToAirbyteMessageValid() { final String messageLine = String.format(VALID_MESSAGE_TEMPLATE, "hello"); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).hasSize(1); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).hasSize(1); } @Test void testToAirbyteMessageRandomLog() { - Assertions.assertThat(getFactory(false).toAirbyteMessage("I should not be send on the same channel than the airbyte messages")) + final String randomLog = "I should not be send on the same channel than the airbyte messages"; + Assertions.assertThat(getFactory().toAirbyteMessage(randomLog)) .isEmpty(); + verify(logger).info(randomLog); } @Test void testToAirbyteMessageMixedUpRecordShouldOnlyDebugLog() { final String messageLine = "It shouldn't be here" + String.format(VALID_MESSAGE_TEMPLATE, "hello"); - getFactory(false).toAirbyteMessage(messageLine); + getFactory().toAirbyteMessage(messageLine); + verifyBlankedRecordRecordWarning(); verify(logger).debug(messageLine); } @Test void testToAirbyteMessageMixedUpRecordFailureDisable() { final String messageLine = "It shouldn't be here" + String.format(VALID_MESSAGE_TEMPLATE, "hello"); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).isEmpty(); - } - - @Test - void testToAirbyteMessageVeryLongMessageFail() { - final StringBuilder longStringBuilder = new StringBuilder(5_000_000); - for (int i = 0; i < 25_000_000; i++) { - longStringBuilder.append("a"); - } - final String messageLine = String.format(VALID_MESSAGE_TEMPLATE, longStringBuilder); - assertThrows(RuntimeException.class, () -> getFactory(true).toAirbyteMessage(messageLine)); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).isEmpty(); + verifyBlankedRecordRecordWarning(); + verify(logger).debug(messageLine); } @Test void testToAirbyteMessageVeryLongMessageDontFail() { - final StringBuilder longStringBuilder = new StringBuilder(5_000_000); + // This roughly corresponds to a 25_000_000 * 2 bytes string. + final StringBuilder longStringBuilder = new StringBuilder(25_000_000); for (int i = 0; i < 25_000_000; i++) { longStringBuilder.append("a"); } final String messageLine = String.format(VALID_MESSAGE_TEMPLATE, longStringBuilder); - Assertions.assertThat(getFactory(false).toAirbyteMessage(messageLine)).isEmpty(); + Assertions.assertThat(getFactory().toAirbyteMessage(messageLine)).isNotEmpty(); } private Stream stringToMessageStream(final String inputString) { final InputStream inputStream = new ByteArrayInputStream(inputString.getBytes(StandardCharsets.UTF_8)); final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); - return VersionedAirbyteStreamFactory - .noMigrationVersionedAirbyteStreamFactory(logger, new Builder(), Optional.of(RuntimeException.class), 100000L, - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + + final var stream = VersionedAirbyteStreamFactory + .noMigrationVersionedAirbyteStreamFactory( + logger, + new Builder(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .create(bufferedReader); + verifyStreamHeader(); + return stream; + } + + private void verifyBlankedRecordRecordWarning() { + verify(logger).warn("Could not parse the string received from source, it seems to be a record message"); + } + + private void verifyStreamHeader() { + verify(logger).info("Reading messages from protocol version {}{}", "0.2.0", ""); } } @@ -282,8 +284,8 @@ void beforeEach() { void testCreate() { final Version initialVersion = new Version("0.1.2"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor); final BufferedReader bufferedReader = new BufferedReader(new StringReader("")); @@ -296,8 +298,8 @@ void testCreate() { void testCreateWithVersionDetection() { final Version initialVersion = new Version("0.0.0"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); @@ -313,8 +315,8 @@ void testCreateWithVersionDetection() { void testCreateWithVersionDetectionFallback() { final Version initialVersion = new Version("0.0.6"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); @@ -330,8 +332,8 @@ void testCreateWithVersionDetectionFallback() { void testCreateWithVersionDetectionWithoutSpecMessage() { final Version initialVersion = new Version("0.0.1"); final VersionedAirbyteStreamFactory streamFactory = - new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), Optional.empty(), - new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false, false), + new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, initialVersion, Optional.empty(), Optional.empty(), + new VersionedAirbyteStreamFactory.InvalidLineFailureConfiguration(false), gsonPksExtractor) .withDetectVersion(true); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTrackerTest.java index 44529479f0b..742293ae029 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTrackerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/AirbyteMessageTrackerTest.java @@ -5,7 +5,7 @@ package io.airbyte.workers.internal.bookkeeping; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -18,6 +18,9 @@ import io.airbyte.protocol.models.StreamDescriptor; import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.test_utils.AirbyteMessageUtils; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -193,8 +196,12 @@ void testErrorTraceMessageFailureWithMultipleTraceErrors() { messageTracker.acceptFromDestination(dstMsg1); messageTracker.acceptFromDestination(dstMsg2); - final FailureReason failureReason = FailureHelper.sourceFailure(srcMsg1.getTrace(), Long.valueOf(123), 1); - assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), failureReason); + List failureReasons = new ArrayList<>(); + failureReasons.addAll( + Stream.of(srcMsg1, srcMsg2).map(m -> FailureHelper.sourceFailure(m.getTrace(), Long.valueOf(123), 1)).toList()); + failureReasons.addAll( + Stream.of(dstMsg1, dstMsg2).map(m -> FailureHelper.destinationFailure(m.getTrace(), Long.valueOf(123), 1)).toList()); + assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), failureReasons); } @Test @@ -203,12 +210,12 @@ void testErrorTraceMessageFailureWithOneTraceError() { messageTracker.acceptFromDestination(destMessage); final FailureReason failureReason = FailureHelper.destinationFailure(destMessage.getTrace(), Long.valueOf(123), 1); - assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), failureReason); + assertEquals(messageTracker.errorTraceMessageFailure(123L, 1), List.of(failureReason)); } @Test void testErrorTraceMessageFailureWithNoTraceErrors() { - assertNull(messageTracker.errorTraceMessageFailure(123L, 1)); + assertTrue(messageTracker.errorTraceMessageFailure(123L, 1).isEmpty()); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java index 8b967f02493..c6fd2e84bd7 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/StreamStatusTrackerTest.java @@ -64,6 +64,8 @@ class StreamStatusTrackerTest { private static final UUID SOURCE_ID = UUID.randomUUID(); private static final UUID STREAM_ID = UUID.randomUUID(); private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); + private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); private static final Duration TIMESTAMP = Duration.of(12345L, ChronoUnit.MILLIS); private AirbyteApiClient airbyteApiClient; @@ -101,8 +103,7 @@ void testCurrentStatusNoStatus() { void testTrackingStartedStatus(final boolean isReset) throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage airbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent event = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, replicationContext); final StreamStatusCreateRequestBody expected = new StreamStatusCreateRequestBody() .streamName(streamDescriptor.getName()) @@ -115,7 +116,7 @@ void testTrackingStartedStatus(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); final StreamStatusRead streamStatusRead = new StreamStatusRead() .attemptNumber(ATTEMPT) .connectionId(CONNECTION_ID) @@ -143,8 +144,7 @@ void testTrackingRunningStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -161,7 +161,7 @@ void testTrackingRunningStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -180,8 +180,7 @@ void testTrackingCompleteSourceOnly() throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -189,7 +188,7 @@ void testTrackingCompleteSourceOnly() throws ApiException { final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceCompleteAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -208,8 +207,7 @@ void testTrackingCompleteDestinationOnly() throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -217,7 +215,7 @@ void testTrackingCompleteDestinationOnly() throws ApiException { final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(AirbyteMessageOrigin.DESTINATION, destinationCompleteAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -238,8 +236,7 @@ void testTrackingCompleteSourceAndCompleteDestination(final boolean isReset) thr final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -260,7 +257,7 @@ void testTrackingCompleteSourceAndCompleteDestination(final boolean isReset) thr .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -286,8 +283,7 @@ void testTrackingCompleteDestinationAndCompleteSource(final boolean isReset) thr final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -308,7 +304,7 @@ void testTrackingCompleteDestinationAndCompleteSource(final boolean isReset) thr .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -338,8 +334,7 @@ void testTrackingIncompleteSourceOnly(final boolean isReset) throws ApiException final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -360,7 +355,7 @@ void testTrackingIncompleteSourceOnly(final boolean isReset) throws ApiException .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -383,8 +378,7 @@ void testTrackingIncompleteDestinationOnly(final boolean isReset) throws ApiExce final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -406,7 +400,7 @@ void testTrackingIncompleteDestinationOnly(final boolean isReset) throws ApiExce .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -430,8 +424,7 @@ void testTrackingIncompleteSourceAndIncompleteDestination(final boolean isReset) final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -455,7 +448,7 @@ void testTrackingIncompleteSourceAndIncompleteDestination(final boolean isReset) .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -481,8 +474,7 @@ void testTrackingIncompleteDestinationAndIncompleteSource(final boolean isReset) final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -506,7 +498,7 @@ void testTrackingIncompleteDestinationAndIncompleteSource(final boolean isReset) .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -532,8 +524,7 @@ void testTrackingIncompleteSourceAndCompleteDestination(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -556,7 +547,7 @@ void testTrackingIncompleteSourceAndCompleteDestination(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -582,8 +573,7 @@ void testTrackingCompleteDestinationAndIncompleteSource(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -606,7 +596,7 @@ void testTrackingCompleteDestinationAndIncompleteSource(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -632,8 +622,7 @@ void testTrackingCompleteSourceAndIncompleteDestination(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -657,7 +646,7 @@ void testTrackingCompleteSourceAndIncompleteDestination(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -683,8 +672,7 @@ void testTrackingIncompleteDestinationAndCompleteSource(final boolean isReset) t final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage destinationIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceCompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -708,7 +696,7 @@ void testTrackingIncompleteDestinationAndCompleteSource(final boolean isReset) t .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -733,8 +721,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); final AirbyteMessage sourceIncompleteAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final var incompleteRunCause = StreamStatusIncompleteRunCause.FAILED; final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); @@ -755,7 +742,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -776,8 +763,7 @@ void testTrackingInternalIncomplete(final boolean isReset) throws ApiException { void testTrackingOutOfOrderStartedStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage airbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent event = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, replicationContext); final StreamStatusCreateRequestBody expected = new StreamStatusCreateRequestBody() .streamName(streamDescriptor.getName()) @@ -790,7 +776,7 @@ void testTrackingOutOfOrderStartedStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -807,8 +793,7 @@ void testTrackingOutOfOrderRunningStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent startedEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, startedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent runningEvent = @@ -825,7 +810,7 @@ void testTrackingOutOfOrderRunningStatus() throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -846,14 +831,13 @@ void testTrackingOutOfOrderCompleteStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage destinationStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); final AirbyteMessage sourceStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, COMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, destinationStoppedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceStoppedAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); streamStatusTracker.track(sourceEvent); streamStatusTracker.track(destinationEvent); @@ -868,14 +852,13 @@ void testTrackingOutOfOrderIncompleteStatus() throws ApiException { final AirbyteMessageOrigin airbyteMessageOrigin = AirbyteMessageOrigin.SOURCE; final AirbyteMessage destinationStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); final AirbyteMessage sourceStoppedAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.INCOMPLETE, TIMESTAMP); - final ReplicationContext replicationContext = - new ReplicationContext(false, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(false); final ReplicationAirbyteMessageEvent destinationEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, destinationStoppedAirbyteMessage, replicationContext); final ReplicationAirbyteMessageEvent sourceEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, sourceStoppedAirbyteMessage, replicationContext); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); streamStatusTracker.track(sourceEvent); streamStatusTracker.track(destinationEvent); @@ -888,8 +871,7 @@ void testTrackingOutOfOrderIncompleteStatus() throws ApiException { @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionRunning(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -913,7 +895,7 @@ void testForceCompletionRunning(final boolean isReset) throws ApiException { .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -929,8 +911,7 @@ void testForceCompletionRunning(final boolean isReset) throws ApiException { @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -957,7 +938,7 @@ void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiExcep .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -977,8 +958,7 @@ void testForceCompletionPartiallyComplete(final boolean isReset) throws ApiExcep @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1010,7 +990,7 @@ void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiExcep .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1028,8 +1008,7 @@ void testForceCompletionAlreadyIncomplete(final boolean isReset) throws ApiExcep @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionAlreadyComplete(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1059,7 +1038,7 @@ void testForceCompletionAlreadyComplete(final boolean isReset) throws ApiExcepti .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1080,11 +1059,10 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE final Integer attempt = 2; final Long jobId = 2L; final UUID connectionId = UUID.randomUUID(); - final ReplicationContext replicationContext1 = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext1 = getDefaultContext(isReset); final ReplicationContext replicationContext2 = new ReplicationContext(isReset, connectionId, UUID.randomUUID(), UUID.randomUUID(), jobId, attempt, WORKSPACE_ID, SOURCE_IMAGE, - DESTINATION_IMAGE); + DESTINATION_IMAGE, SOURCE_DEFINITION_ID, DESTINATION_DEFINITION_ID); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage runningAirbyteMessage = createAirbyteMessage(streamDescriptor, AirbyteStreamStatus.RUNNING, TIMESTAMP); @@ -1108,7 +1086,8 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext1.workspaceId(), replicationContext1.connectionId(), replicationContext1.jobId(), replicationContext1.attempt()); + replicationContext1.getWorkspaceId(), replicationContext1.getConnectionId(), replicationContext1.getJobId(), + replicationContext1.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead().id(STREAM_ID)); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1124,8 +1103,7 @@ void testForceCompletionDifferentConnectionId(final boolean isReset) throws ApiE @ParameterizedTest @ValueSource(booleans = {true, false}) void testForceCompletionHandleException(final boolean isReset) throws ApiException { - final ReplicationContext replicationContext = - new ReplicationContext(isReset, CONNECTION_ID, DESTINATION_ID, SOURCE_ID, JOB_ID, ATTEMPT, WORKSPACE_ID, SOURCE_IMAGE, DESTINATION_IMAGE); + final ReplicationContext replicationContext = getDefaultContext(isReset); final AirbyteMessage startedAirbyteMessage = createAirbyteMessage(streamDescriptor, STARTED, TIMESTAMP); final AirbyteMessage forceCompletionMessage = createAirbyteMessage(new StreamDescriptor(), COMPLETE, TIMESTAMP); @@ -1146,7 +1124,7 @@ void testForceCompletionHandleException(final boolean isReset) throws ApiExcepti .transitionedAt(TIMESTAMP.toMillis()) .workspaceId(WORKSPACE_ID); final StreamStatusKey streamStatusKey = new StreamStatusKey(streamDescriptor.getName(), streamDescriptor.getNamespace(), - replicationContext.workspaceId(), replicationContext.connectionId(), replicationContext.jobId(), replicationContext.attempt()); + replicationContext.getWorkspaceId(), replicationContext.getConnectionId(), replicationContext.getJobId(), replicationContext.getAttempt()); when(streamStatusesApi.createStreamStatus(any())).thenReturn(new StreamStatusRead()); when(airbyteApiClient.getStreamStatusesApi()).thenReturn(streamStatusesApi); @@ -1167,4 +1145,18 @@ private AirbyteMessage createAirbyteMessage(final StreamDescriptor streamDescrip return new AirbyteMessage().withType(Type.TRACE).withTrace(traceMessage); } + private ReplicationContext getDefaultContext(boolean isReset) { + return new ReplicationContext(isReset, + CONNECTION_ID, + DESTINATION_ID, + SOURCE_ID, + JOB_ID, + ATTEMPT, + WORKSPACE_ID, + SOURCE_IMAGE, + DESTINATION_IMAGE, + SOURCE_DEFINITION_ID, + DESTINATION_DEFINITION_ID); + } + } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java index 466b822f140..80b5c27c8ac 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/bookkeeping/events/AirbyteControlMessageEventListenerTest.java @@ -53,7 +53,7 @@ void testDestinationControlMessage() { when(airbyteControlMessage.getType()).thenReturn(AirbyteControlMessage.Type.CONNECTOR_CONFIG); when(airbyteMessage.getType()).thenReturn(Type.CONTROL); when(airbyteMessage.getControl()).thenReturn(airbyteControlMessage); - when(ReplicationContext.destinationId()).thenReturn(destinationId); + when(ReplicationContext.getDestinationId()).thenReturn(destinationId); final ReplicationAirbyteMessageEvent replicationAirbyteMessageEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, ReplicationContext); @@ -77,7 +77,7 @@ void testSourceControlMessage() { when(airbyteControlMessage.getType()).thenReturn(AirbyteControlMessage.Type.CONNECTOR_CONFIG); when(airbyteMessage.getType()).thenReturn(Type.CONTROL); when(airbyteMessage.getControl()).thenReturn(airbyteControlMessage); - when(ReplicationContext.sourceId()).thenReturn(sourceId); + when(ReplicationContext.getSourceId()).thenReturn(sourceId); final ReplicationAirbyteMessageEvent replicationAirbyteMessageEvent = new ReplicationAirbyteMessageEvent(airbyteMessageOrigin, airbyteMessage, ReplicationContext); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt index 5c9472b62c8..8775d55d9cf 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt @@ -1,6 +1,7 @@ package io.airbyte.workers.internal.sync import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.api.client.generated.ConnectionApi import io.airbyte.api.client.model.generated.ConnectionRead import io.airbyte.api.client.model.generated.Geography @@ -17,6 +18,7 @@ import io.airbyte.workers.internal.exception.SourceException import io.airbyte.workers.models.ReplicationActivityInput import io.airbyte.workers.storage.StorageClient import io.airbyte.workers.sync.WorkloadApiWorker +import io.airbyte.workers.sync.WorkloadClient import io.airbyte.workers.workload.JobOutputDocStore import io.airbyte.workers.workload.WorkloadIdGenerator import io.airbyte.workload.api.client.generated.WorkloadApi @@ -40,6 +42,7 @@ internal class WorkloadApiWorkerTest { private var apiClient: AirbyteApiClient = mockk() private var connectionApi: ConnectionApi = mockk() private var workloadApi: WorkloadApi = mockk() + private var workloadApiClient: WorkloadApiClient = mockk() private var featureFlagClient: FeatureFlagClient = mockk() private var jobOutputDocStore: JobOutputDocStore = mockk() private lateinit var replicationActivityInput: ReplicationActivityInput @@ -50,6 +53,7 @@ internal class WorkloadApiWorkerTest { @BeforeEach fun beforeEach() { every { apiClient.connectionApi } returns connectionApi + every { workloadApiClient.workloadApi } returns workloadApi featureFlagClient = TestClient() jobRoot = Path.of("test", "path") replicationActivityInput = ReplicationActivityInput() @@ -58,7 +62,8 @@ internal class WorkloadApiWorkerTest { WorkloadApiWorker( jobOutputDocStore, apiClient, - workloadApi, + workloadApiClient, + WorkloadClient(workloadApiClient, jobOutputDocStore), workloadIdGenerator, replicationActivityInput, featureFlagClient, @@ -70,7 +75,6 @@ internal class WorkloadApiWorkerTest { val jobId = 13L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -93,7 +97,6 @@ internal class WorkloadApiWorkerTest { val jobId = 13L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -117,7 +120,6 @@ internal class WorkloadApiWorkerTest { val jobId = 313L val attemptNumber = 37 val workloadId = "my-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) @@ -237,7 +239,7 @@ internal class WorkloadApiWorkerTest { assertThrows { workloadApiWorker.run(replicationInput, jobRoot) } } - fun initializeReplicationInput( + private fun initializeReplicationInput( jobId: Long, attemptNumber: Int, ) { @@ -262,7 +264,7 @@ internal class WorkloadApiWorkerTest { } } - fun mockWorkload( + private fun mockWorkload( status: WorkloadStatus, terminationSource: String? = null, terminationReason: String? = null, diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java index 670e8ea10f3..05b0a9e6131 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java @@ -24,7 +24,6 @@ import io.airbyte.api.client.generated.AttemptApi; import io.airbyte.api.client.generated.StateApi; import io.airbyte.api.client.invoker.generated.ApiException; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; import io.airbyte.api.client.model.generated.ConnectionState; import io.airbyte.api.client.model.generated.ConnectionStateCreateOrUpdate; import io.airbyte.api.client.model.generated.ConnectionStateType; @@ -34,12 +33,9 @@ import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; -import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.StreamDescriptor; -import io.airbyte.protocol.models.SyncMode; import io.airbyte.workers.internal.bookkeeping.SyncStatsTracker; import io.airbyte.workers.internal.stateaggregator.StateAggregatorFactory; import java.util.List; @@ -107,7 +103,6 @@ void afterEach() throws Exception { void testPersistHappyPath() throws ApiException { final AirbyteStateMessage stateA1 = getStreamState("A", 1); syncPersistence.persist(connectionId, stateA1); - verify(stateApi).getState(any()); verify(executorService).scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(flushPeriod), eq(TimeUnit.SECONDS)); clearInvocations(executorService, stateApi); @@ -395,69 +390,6 @@ void testLegacyStatesAreGettingIntoTheScheduledFlushLogic() throws Exception { assertTrue(Jsons.serialize(captor.getValue()).contains("myOtherState2")); } - @Test - void testLegacyStateMigrationToStreamAreOnlyFlushedAtTheEnd() throws Exception { - // Migration is defined by current state returned from the API is LEGACY, and we are trying to - // persist a non LEGACY state - when(stateApi.getState(new ConnectionIdRequestBody().connectionId(connectionId))) - .thenReturn(new ConnectionState().state(Jsons.deserialize("{\"state\":\"some_state\"}")).stateType(ConnectionStateType.LEGACY)); - - final AirbyteStateMessage message = getStreamState("migration1", 12); - syncPersistence.persist(connectionId, message); - verify(stateApi).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - reset(stateApi); - - // Since we're delaying the flush, executorService should not have been called - // We also want to make sure we are not calling getState every time - final AirbyteStateMessage otherMessage = getStreamState("migration2", 10); - syncPersistence.persist(connectionId, otherMessage); - verify(stateApi, never()).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - when(executorService.awaitTermination(anyLong(), any())).thenReturn(true); - when(catalog.getStreams()).thenReturn(List.of( - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("migration1")).withSyncMode(SyncMode.INCREMENTAL), - new ConfiguredAirbyteStream().withStream(new AirbyteStream().withName("migration2")).withSyncMode(SyncMode.INCREMENTAL))); - syncPersistence.close(); - verifyStateUpdateApiCall(List.of(message, otherMessage)); - } - - @Test - void testLegacyStateMigrationToGlobalGettingIntoTheScheduledFlushLogic() throws ApiException, InterruptedException { - // Migration is defined by current state returned from the API is LEGACY, and we are trying to - // persist a non LEGACY state - when(stateApi.getState(new ConnectionIdRequestBody().connectionId(connectionId))) - .thenReturn(new ConnectionState().state(Jsons.deserialize("{\"state\":\"some_state\"}")).stateType(ConnectionStateType.LEGACY)); - - final AirbyteStateMessage message = getGlobalState(14); - syncPersistence.persist(connectionId, message); - verify(stateApi).getState(new ConnectionIdRequestBody().connectionId(connectionId)); - verify(executorService).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - } - - @Test - void testDoNotStartThreadUntilStateCheckSucceeds() throws ApiException { - when(stateApi.getState(any())) - .thenThrow(new ApiException()) - .thenReturn(null); - - final AirbyteStateMessage s1 = getStreamState("stream 1", 9); - syncPersistence.persist(connectionId, s1); - // First getState failed, we should not have started the thread or persisted states - verify(executorService, never()).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - verify(stateApi, never()).createOrUpdateState(any()); - - final AirbyteStateMessage s2 = getStreamState("stream 2", 19); - syncPersistence.persist(connectionId, s2); - verify(executorService).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); - - // Since the first state check failed, we should be flushing both states on the first flush - actualFlushMethod.getValue().run(); - verifyStateUpdateApiCall(List.of(s1, s2)); - } - @Test void testSyncStatsTrackerWrapping() { syncPersistence.updateStats(new AirbyteRecordMessage()); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java index d6074dbc68b..a5af70e6dad 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/DockerProcessFactoryTest.java @@ -17,6 +17,7 @@ import io.airbyte.commons.io.IOs; import io.airbyte.commons.io.LineGobbler; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.logging.MdcScope; import io.airbyte.commons.workers.config.WorkerConfigs; import io.airbyte.commons.workers.config.WorkerConfigsProvider; import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; @@ -31,6 +32,8 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; @@ -157,10 +160,13 @@ void testEnvMapSet() throws IOException, WorkerException, InterruptedException { final StringBuilder out = new StringBuilder(); final StringBuilder err = new StringBuilder(); - LineGobbler.gobble(process.getInputStream(), out::append); - LineGobbler.gobble(process.getErrorStream(), err::append); - + final ExecutorService stdoutGobblerExecutor = Executors.newSingleThreadExecutor(); + final ExecutorService stderrGobblerExecutor = Executors.newSingleThreadExecutor(); + LineGobbler.gobble(process.getInputStream(), out::append, "unused", MdcScope.DEFAULT_BUILDER, stdoutGobblerExecutor); + LineGobbler.gobble(process.getErrorStream(), err::append, "unused", MdcScope.DEFAULT_BUILDER, stderrGobblerExecutor); WorkerUtils.gentleClose(process, 20, TimeUnit.SECONDS); + stdoutGobblerExecutor.awaitTermination(10, TimeUnit.SECONDS); + stderrGobblerExecutor.awaitTermination(10, TimeUnit.SECONDS); assertEquals(0, process.exitValue(), String.format("Process failed with stdout: %s and stderr: %s", out, err)); assertEquals("ENV_VAR_1=ENV_VALUE_1", out.toString(), String.format("Output did not contain the expected string. stdout: %s", out)); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java index 9aedbe46620..ad15bb7d262 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/utils/ConfigReplacerTest.java @@ -30,10 +30,11 @@ class ConfigReplacerTest { @SuppressWarnings("PMD.AvoidUsingHardCodedIP") void getAllowedHostsGeneralTest() throws IOException { final AllowedHosts allowedHosts = new AllowedHosts(); - final List hosts = new ArrayList(); + final List hosts = new ArrayList<>(); hosts.add("localhost"); hosts.add("static-site.com"); hosts.add("${host}"); + hosts.add("${host_with_extras}"); hosts.add("${number}"); hosts.add("${subdomain}.vendor.com"); hosts.add("${tunnel_method.tunnel_host}"); @@ -43,6 +44,7 @@ void getAllowedHostsGeneralTest() throws IOException { expected.add("localhost"); expected.add("static-site.com"); expected.add("foo.com"); + expected.add("protected-site.com"); expected.add("123"); expected.add("account.vendor.com"); expected.add("1.2.3.4"); @@ -50,6 +52,7 @@ void getAllowedHostsGeneralTest() throws IOException { final String configJson = "{\"host\": \"foo.com\", " + + "\"host_with_extras\": \"ftp://user:password@protected-site.com/some-route\", " + "\"number\": 123, " + "\"subdomain\": \"account\", " + "\"password\": \"abc123\", " @@ -57,6 +60,7 @@ void getAllowedHostsGeneralTest() throws IOException { final JsonNode config = mapper.readValue(configJson, JsonNode.class); final AllowedHosts response = replacer.getAllowedHosts(allowedHosts, config); + System.out.println(response.getHosts()); assertThat(response.getHosts()).isEqualTo(expected); } @@ -115,4 +119,14 @@ void alwaysAllowedHostsListIsImmutable() { } } + @Test + void sanitization() { + assertThat(replacer.sanitize("basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user@basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user:password@basic.com")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("http://user:password@basic.com/some/path")).isEqualTo("basic.com"); + assertThat(replacer.sanitize("mongo+srv://user:password@basic.com/some/path")).isEqualTo("basic.com"); + } + } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt new file mode 100644 index 00000000000..bce1d2972b6 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt @@ -0,0 +1,168 @@ +package io.airbyte.workers.helper + +import io.airbyte.featureflag.ActivateRefreshes +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.DestinationDefinition +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.SourceDefinition +import io.airbyte.featureflag.Workspace +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteStream +import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage +import io.airbyte.protocol.models.AirbyteTraceMessage +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.ConfiguredAirbyteStream +import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.workers.context.ReplicationContext +import io.airbyte.workers.internal.AirbyteMapper +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.time.Clock +import java.util.UUID + +internal class StreamStatusCompletionTrackerTest { + private val featureFlagClient: FeatureFlagClient = mockk() + private val clock: Clock = mockk() + private val mapper: AirbyteMapper = mockk() + + private val streamStatusCompletionTracker = StreamStatusCompletionTracker(featureFlagClient, clock) + + private val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream().withStream(AirbyteStream().withName("name1")), + ConfiguredAirbyteStream().withStream(AirbyteStream().withName("name2").withNamespace("namespace2")), + ), + ) + + private val connectionId = UUID.randomUUID() + private val workspaceId = UUID.randomUUID() + private val sourceDefinitionId = UUID.randomUUID() + private val destinationDefinitionId = UUID.randomUUID() + private val featureFlagContext = + Multi( + listOf( + Workspace(workspaceId), + Connection(connectionId), + SourceDefinition(sourceDefinitionId), + DestinationDefinition(destinationDefinitionId), + ), + ) + private val replicationContext = + ReplicationContext( + false, + connectionId, + UUID.randomUUID(), + UUID.randomUUID(), + 0, + 0, + workspaceId, + "", + "", + sourceDefinitionId, + destinationDefinitionId, + ) + + @BeforeEach + fun init() { + every { clock.millis() } returns 1 + every { mapper.mapMessage(any()) } returnsArgument 0 + } + + @Test + fun `test that we get all the streams if the exit code is 0 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + getStreamStatusCompletedMessage("name2", "namespace2"), + ), + result, + ) + } + + @Test + fun `test that we get all the streams if the exit code is 0 and some stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + getStreamStatusCompletedMessage("name2", "namespace2"), + ), + result, + ) + } + + @Test + fun `test that we get no streams if the exit code is 1 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + val result = streamStatusCompletionTracker.finalize(1, mapper) + + assertEquals(listOf(), result) + } + + @Test + fun `test that we get the status of the streams that send a status if the exit code is 1 and no stream status is send`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns true + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(1, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + ), + result, + ) + } + + @Test + fun `test that no message is send if the flag is false`() { + every { featureFlagClient.boolVariation(ActivateRefreshes, featureFlagContext) } returns false + + streamStatusCompletionTracker.startTracking(catalog, replicationContext) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals(listOf(), result) + } + + private fun getStreamStatusCompletedMessage( + name: String, + namespace: String? = null, + ): AirbyteMessage { + return AirbyteMessage() + .withType(AirbyteMessage.Type.TRACE) + .withTrace( + AirbyteTraceMessage() + .withType(AirbyteTraceMessage.Type.STREAM_STATUS) + .withEmittedAt(1.0) + .withStreamStatus( + AirbyteStreamStatusTraceMessage() + .withStatus(AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) + .withStreamDescriptor( + StreamDescriptor() + .withName(name) + .withNamespace(namespace), + ), + ), + ) + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt index 8e0ba931add..c7cb98f99c1 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/AnalyticsMessageTrackerTest.kt @@ -26,6 +26,8 @@ class AnalyticsMessageTrackerTest { private lateinit var trackingClient: TrackingClient private lateinit var analyticsMessageTracker: AnalyticsMessageTracker private lateinit var ctx: ReplicationContext + private val sourceDefinitionId = UUID.randomUUID() + private val destinationDefinitionId = UUID.randomUUID() @BeforeEach fun setUp() { @@ -35,7 +37,7 @@ class AnalyticsMessageTrackerTest { ctx = ReplicationContext( false, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID(), - 1, 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, + 1, 1, UUID.randomUUID(), SOURCE_IMAGE, DESTINATION_IMAGE, sourceDefinitionId, destinationDefinitionId, ) analyticsMessageTracker.ctx = ctx } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTrackerTest.kt index 01dd68f537a..b40fbeceafa 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTrackerTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTrackerTest.kt @@ -15,6 +15,7 @@ import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.OssMetricsRegistry import io.airbyte.protocol.models.AirbyteEstimateTraceMessage import io.airbyte.protocol.models.AirbyteGlobalState +import io.airbyte.protocol.models.AirbyteMessage import io.airbyte.protocol.models.AirbyteRecordMessage import io.airbyte.protocol.models.AirbyteStateMessage import io.airbyte.protocol.models.AirbyteStateStats @@ -23,12 +24,15 @@ import io.airbyte.protocol.models.AirbyteStreamState import io.airbyte.protocol.models.StreamDescriptor import io.airbyte.workers.context.ReplicationFeatureFlags import io.airbyte.workers.exception.InvalidChecksumException +import io.airbyte.workers.models.StateWithId import io.airbyte.workers.test_utils.AirbyteMessageUtils import io.github.oshai.kotlinlogging.KotlinLogging import io.mockk.every import io.mockk.mockk import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertDoesNotThrow @@ -529,7 +533,7 @@ class ParallelStreamStatsTrackerTest { @Test fun testNoStatsForNullStreamAreReturned() { // Checking for LegacyStates - val legacyState = AirbyteMessageUtils.createStateMessage(1337).state + val legacyState = StateWithId.attachIdToStateMessageFromSource(AirbyteMessageUtils.createStateMessage(1337)).state statsTracker.updateSourceStatesStats(legacyState) statsTracker.updateDestinationStateStats(legacyState) @@ -545,7 +549,7 @@ class ParallelStreamStatsTrackerTest { Assertions.assertTrue(statsTracker.getStreamToCommittedBytes().isEmpty()) // Checking for GlobalStates - val globalState = AirbyteMessageUtils.createGlobalStateMessage(1337).state + val globalState = createGlobalState(1337) statsTracker.updateSourceStatesStats(globalState) statsTracker.updateDestinationStateStats(globalState) @@ -560,23 +564,31 @@ class ParallelStreamStatsTrackerTest { val namespace = "namespace" val recordCount = 10 val stateMessage1 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -585,12 +597,12 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCount, name, namespace) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) trackRecords(recordCount - 2, name, namespace) Assertions.assertThrows(InvalidChecksumException::class.java) { - statsTracker.updateSourceStatesStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) } } @@ -600,25 +612,33 @@ class ParallelStreamStatsTrackerTest { val namespace = "namespace" val recordCount = 10 val stateMessage1 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble() - 2)) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble() - 2)) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -627,15 +647,15 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCount, name, namespace) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) trackRecords(recordCount - 2, name, namespace) - statsTracker.updateSourceStatesStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) Assertions.assertThrows(InvalidChecksumException::class.java) { - statsTracker.updateDestinationStateStats(stateMessage2) + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @@ -645,25 +665,33 @@ class ParallelStreamStatsTrackerTest { val namespace = "namespace" val recordCount = 10 val stateMessage1 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -672,15 +700,15 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCount, name, namespace) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) trackRecords(recordCount, name, namespace) - statsTracker.updateSourceStatesStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) Assertions.assertThrows(InvalidChecksumException::class.java) { - stateMessage2.sourceStats.recordCount = (recordCount - 2).toDouble() - statsTracker.updateDestinationStateStats(stateMessage2) + stateMessage2.state.sourceStats.recordCount = (recordCount - 2).toDouble() + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @@ -690,25 +718,33 @@ class ParallelStreamStatsTrackerTest { val namespace = "namespace" val recordCount = 10 val stateMessage1 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withStream( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - ) - .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -717,17 +753,55 @@ class ParallelStreamStatsTrackerTest { assertDoesNotThrow { trackRecords(recordCount, name, namespace) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) trackRecords(recordCount, name, namespace) - statsTracker.updateSourceStatesStats(stateMessage2) - statsTracker.updateDestinationStateStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @Test internal fun `test that no exception is raised when the state message checksum comparison is disabled due to collisions`() { + val name = "name" + val namespace = "namespace" + val recordCount = 10 + val stateMessage1 = + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM), + ), + ) + + val replicationFeatureFlags: ReplicationFeatureFlags = mockk() + every { replicationFeatureFlags.failOnInvalidChecksum } returns true + statsTracker.setReplicationFeatureFlags(replicationFeatureFlags) + + assertDoesNotThrow { + trackRecords(recordCount, name, namespace) + // First assert that the checksums match + statsTracker.updateSourceStatesStats(stateMessage1.state) + + trackRecords(recordCount, name, namespace) + statsTracker.updateSourceStatesStats(stateMessage1.state) + + statsTracker.updateDestinationStateStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) + } + assertFalse(statsTracker.isChecksumValidationEnabled()) + } + + @Test + internal fun `test that hash collision doesnt happen when same state messages arrive`() { val name = "name" val namespace = "namespace" val recordCount = 10 @@ -741,6 +815,23 @@ class ParallelStreamStatsTrackerTest { .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + val copyOfStateMessage1 = + AirbyteStateMessage() + .withStream( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name).withNamespace(namespace)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + ) + .withSourceStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount(recordCount.toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + + assertEquals(stateMessage1, copyOfStateMessage1) + val state = StateWithId.attachIdToStateMessageFromSource(AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(stateMessage1)).state + val state2 = + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState(copyOfStateMessage1), + ).state val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -749,15 +840,15 @@ class ParallelStreamStatsTrackerTest { assertDoesNotThrow { trackRecords(recordCount, name, namespace) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(state) trackRecords(recordCount, name, namespace) - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(state2) - statsTracker.updateDestinationStateStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateDestinationStateStats(state) + statsTracker.updateDestinationStateStats(state2) } - assertFalse(statsTracker.isChecksumValidationEnabled()) + assertTrue(statsTracker.isChecksumValidationEnabled()) } @Test @@ -769,43 +860,51 @@ class ParallelStreamStatsTrackerTest { val recordCountStream1 = 10 val recordCountStream2 = 15 val stateMessage1 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -816,13 +915,13 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCountStream2, name2, namespace2) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) trackRecords(recordCountStream1, name1, namespace1) trackRecords(recordCountStream2, name2, namespace2) - statsTracker.updateSourceStatesStats(stateMessage2) - statsTracker.updateDestinationStateStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @@ -835,43 +934,51 @@ class ParallelStreamStatsTrackerTest { val recordCountStream1 = 10 val recordCountStream2 = 15 val stateMessage1 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -882,14 +989,14 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCountStream2, name2, namespace2) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) trackRecords(recordCountStream1, name1, namespace1) trackRecords(recordCountStream2, name2, namespace2) - statsTracker.updateSourceStatesStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) - statsTracker.updateDestinationStateStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage2) + statsTracker.updateDestinationStateStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @@ -902,24 +1009,28 @@ class ParallelStreamStatsTrackerTest { val recordCountStream1 = 10 val recordCountStream2 = 15 val stateMessage1 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -930,14 +1041,14 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCountStream2, name2, namespace2) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) trackRecords(recordCountStream1, name1, namespace1) trackRecords(recordCountStream2, name2, namespace2) - statsTracker.updateSourceStatesStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) - statsTracker.updateDestinationStateStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateDestinationStateStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) } assertFalse(statsTracker.isChecksumValidationEnabled()) @@ -952,43 +1063,51 @@ class ParallelStreamStatsTrackerTest { val recordCountStream1 = 10 val recordCountStream2 = 15 val stateMessage1 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 10))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 10))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 15))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val stateMessage2 = - AirbyteStateMessage() - .withGlobal( - AirbyteGlobalState() - .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) - .withStreamStates( - listOf( - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), - AirbyteStreamState() - .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) - .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), - ), - ), - ) - .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) - .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage().withType(AirbyteMessage.Type.STATE).withState( + AirbyteStateMessage() + .withGlobal( + AirbyteGlobalState() + .withSharedState(Jsons.jsonNode(mapOf("wal" to 20))) + .withStreamStates( + listOf( + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name1).withNamespace(namespace1)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 20))), + AirbyteStreamState() + .withStreamDescriptor(StreamDescriptor().withName(name2).withNamespace(namespace2)) + .withStreamState(Jsons.jsonNode(mapOf("id" to 30))), + ), + ), + ) + .withSourceStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withDestinationStats(AirbyteStateStats().withRecordCount((recordCountStream1 + recordCountStream2).toDouble())) + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL), + ), + ) val replicationFeatureFlags: ReplicationFeatureFlags = mockk() every { replicationFeatureFlags.failOnInvalidChecksum } returns true @@ -998,15 +1117,15 @@ class ParallelStreamStatsTrackerTest { trackRecords(recordCountStream2, name2, namespace2) // First assert that the checksums match - statsTracker.updateSourceStatesStats(stateMessage1) - statsTracker.updateDestinationStateStats(stateMessage1) + statsTracker.updateSourceStatesStats(stateMessage1.state) + statsTracker.updateDestinationStateStats(stateMessage1.state) trackRecords(recordCountStream1, name1, namespace1) trackRecords(recordCountStream2, name2, namespace2) - statsTracker.updateSourceStatesStats(stateMessage2) + statsTracker.updateSourceStatesStats(stateMessage2.state) Assertions.assertThrows(InvalidChecksumException::class.java) { - stateMessage2.sourceStats.recordCount = recordCountStream1.toDouble() - statsTracker.updateDestinationStateStats(stateMessage2) + stateMessage2.state.sourceStats.recordCount = recordCountStream1.toDouble() + statsTracker.updateDestinationStateStats(stateMessage2.state) } } @@ -1145,7 +1264,11 @@ class ParallelStreamStatsTrackerTest { streamName: String, value: Int, ): AirbyteStateMessage { - return AirbyteMessageUtils.createStreamStateMessage(streamName, value) + return StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState(AirbyteMessageUtils.createStreamStateMessage(streamName, value)), + ).state } private fun createGlobalState( @@ -1156,8 +1279,17 @@ class ParallelStreamStatsTrackerTest { for (streamName in streamNames) { streamStates.add(AirbyteMessageUtils.createStreamState(streamName).withStreamState(Jsons.jsonNode(value))) } - return AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) - .withGlobal(AirbyteGlobalState().withStreamStates(streamStates)) + return StateWithId.attachIdToStateMessageFromSource( + AirbyteMessage() + .withType(AirbyteMessage.Type.STATE) + .withState( + AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + AirbyteGlobalState() + .withStreamStates(streamStates), + ), + ), + ).state } private fun trackRecords( diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientFactoryTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientFactoryTest.kt index d11e2def434..06bfb87a272 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientFactoryTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientFactoryTest.kt @@ -25,7 +25,7 @@ import software.amazon.awssdk.services.s3.S3Client * When upgrading to Micronaut 4, the `@get:Primary` and `@get:Bean` annotations might be replaceable with @MockBean. */ -private val bucket = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload") +private val bucket = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") @MicronautTest @Property(name = STORAGE_TYPE, value = "local") diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientTest.kt index 1b7eacc884f..08848e355cc 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/StorageClientTest.kt @@ -35,7 +35,7 @@ private const val KEY = "a" private const val DOC1 = "hello" private const val DOC2 = "bye" -private val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload") +private val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") class DocumentTypeTest { @Test diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt new file mode 100644 index 00000000000..f91871ed1aa --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/ActivityPayloadStorageClientTest.kt @@ -0,0 +1,166 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.commons.json.JsonSerde +import io.airbyte.config.StandardSyncOutput +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.airbyte.workers.models.RefreshSchemaActivityOutput +import io.airbyte.workers.storage.StorageClient +import io.mockk.every +import io.mockk.impl.annotations.MockK +import io.mockk.junit5.MockKExtension +import io.mockk.verify +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith + +@ExtendWith(MockKExtension::class) +class ActivityPayloadStorageClientTest { + @MockK + private lateinit var metricClient: MetricClient + + @MockK + private lateinit var storageClientRaw: StorageClient + + @MockK + private lateinit var serde: JsonSerde + + private lateinit var client: ActivityPayloadStorageClient + + private var comparator = NaiveEqualityComparator() + + @BeforeEach + fun setup() { + client = ActivityPayloadStorageClient(storageClientRaw, serde, metricClient) + + every { metricClient.count(any(), any(), *anyVararg()) } returns Unit + + every { storageClientRaw.write(any(), any()) } returns Unit + + every { storageClientRaw.read(any()) } returns "" + } + + @Test + fun `readJSON reads json and unmarshalls to specified class for a given uri`() { + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + val refreshOutput = RefreshSchemaActivityOutput() + + every { + storageClientRaw.read("sync-output") + } returns "serialized-sync-output" + + every { + serde.deserialize("serialized-sync-output", StandardSyncOutput::class.java) + } returns syncOutput + + val result1 = client.readJSON(ActivityPayloadURI("sync-output")) + + Assertions.assertEquals(syncOutput, result1) + + every { + storageClientRaw.read("refresh-output") + } returns "serialized-refresh-output" + + every { + serde.deserialize("serialized-refresh-output", RefreshSchemaActivityOutput::class.java) + } returns refreshOutput + + val result2 = client.readJSON(ActivityPayloadURI("refresh-output")) + + Assertions.assertEquals(refreshOutput, result2) + } + + @Test + fun `readJSON handles null`() { + every { + storageClientRaw.read("sync-output") + } returns null + + val result = client.readJSON(ActivityPayloadURI("sync-output")) + + Assertions.assertNull(result) + } + + @Test + fun `writeJSON serializes to json and writes to a given uri`() { + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { + serde.serialize(syncOutput) + } returns "serialized-sync-output" + + client.writeJSON(ActivityPayloadURI("sync-output"), syncOutput) + + verify { storageClientRaw.write("sync-output", "serialized-sync-output") } + } + + @Test + fun `validateOutput records a result for a match`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { serde.deserialize(any(), StandardSyncOutput::class.java) } returns syncOutput + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records a result for a mismatch`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput1 = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + val syncOutput2 = StandardSyncOutput().withAdditionalProperty("some", "unique-value-2") + + every { serde.deserialize(any(), StandardSyncOutput::class.java) } returns syncOutput2 + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput1, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records a result for a read miss`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { storageClientRaw.read(uri.id) } returns null + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_VALIDATION_RESULT, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records read failure for null uri`() { + val uri = null + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *anyVararg()) + } + } + + @Test + fun `validateOutput records read failure on client read exception`() { + val uri = ActivityPayloadURI("id", "version") + val syncOutput = StandardSyncOutput().withAdditionalProperty("some", "unique-value-1") + + every { storageClientRaw.read(uri.id) } throws RuntimeException("yikes") + + client.validateOutput(uri, StandardSyncOutput::class.java, syncOutput, comparator, listOf()) + + verify { + metricClient.count(OssMetricsRegistry.PAYLOAD_FAILURE_READ, 1, *anyVararg()) + } + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt new file mode 100644 index 00000000000..d540bdfe7e2 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/storage/activities/OutputStorageClientTest.kt @@ -0,0 +1,67 @@ +package io.airbyte.workers.storage.activities + +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.ATTEMPT_NUMBER +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.CONNECTION_ID +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.JOB_ID +import io.airbyte.workers.storage.activities.OutputStorageClientTest.Fixtures.TEST_PAYLOAD_NAME +import io.mockk.every +import io.mockk.impl.annotations.MockK +import io.mockk.junit5.MockKExtension +import io.mockk.verify +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.extension.ExtendWith +import java.util.UUID + +@ExtendWith(MockKExtension::class) +class OutputStorageClientTest { + @MockK + private lateinit var metricClient: MetricClient + + @MockK + private lateinit var storageClient: ActivityPayloadStorageClient + + private lateinit var client: OutputStorageClient + + class TestClass(value1: String, value2: Long) + + @BeforeEach + fun setup() { + client = OutputStorageClient(storageClient, metricClient, TEST_PAYLOAD_NAME, TestClass::class.java) + + every { metricClient.count(any(), any(), *anyVararg()) } returns Unit + } + + @Test + fun `persist writes json to storage`() { + val obj = TestClass("test", 123) + client.persist(obj, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) + + verify(exactly = 1) { storageClient.writeJSON(any(), obj) } + } + + @Test + fun `persist short circuits if input null`() { + client.persist(null, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) + + verify(exactly = 0) { storageClient.writeJSON(any(), any()) } + } + + @Test + fun `persist swallows exceptions`() { + val obj = TestClass("test", 123) + + every { storageClient.writeJSON(any(), any()) } throws Exception("bang") + + assertDoesNotThrow { client.persist(obj, CONNECTION_ID, JOB_ID, ATTEMPT_NUMBER, arrayOf()) } + } + + object Fixtures { + const val TEST_PAYLOAD_NAME = "test-payload" + val CONNECTION_ID: UUID = UUID.randomUUID() + const val JOB_ID = 9987124L + const val ATTEMPT_NUMBER = 2 + } +} diff --git a/airbyte-commons/build.gradle.kts b/airbyte-commons/build.gradle.kts index cf905a15798..c9a7c3ca3ea 100644 --- a/airbyte-commons/build.gradle.kts +++ b/airbyte-commons/build.gradle.kts @@ -1,55 +1,58 @@ import de.undercouch.gradle.tasks.download.Download plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - alias(libs.plugins.de.undercouch.download) - kotlin("jvm") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + alias(libs.plugins.de.undercouch.download) + kotlin("jvm") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.log4j.core) - - implementation(libs.bundles.jackson) - implementation(libs.guava) - implementation(libs.bundles.slf4j) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - implementation(libs.google.cloud.storage) - implementation(libs.bundles.log4j) - implementation(libs.airbyte.protocol) - - // this dependency is an exception to the above rule because it is only used INTERNALLY to the commons library. - implementation(libs.json.path) - - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.log4j.core) + + implementation(libs.bundles.jackson) + implementation(libs.guava) + implementation(libs.bundles.slf4j) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + implementation(libs.google.cloud.storage) + implementation(libs.bundles.log4j) + implementation(libs.airbyte.protocol) + + // this dependency is an exception to the above rule because it is only used INTERNALLY to the commons library. + implementation(libs.json.path) + + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + + testRuntimeOnly(libs.junit.jupiter.engine) } airbyte { - spotless { - excludes = listOf("src/main/resources/seed/specs_secrets_mask.yaml") - } + spotless { + excludes = listOf("src/main/resources/seed/specs_secrets_mask.yaml") + } } val downloadSpecSecretMask = tasks.register("downloadSpecSecretMask") { - src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") - dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) - overwrite(true) + src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") + dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) + overwrite(true) + onlyIfModified(true) } tasks.named("processResources") { - dependsOn(downloadSpecSecretMask) + dependsOn(downloadSpecSecretMask) } tasks.named("test") { - environment(mapOf( - "Z_TESTING_PURPOSES_ONLY_1" to "value-defined", - "Z_TESTING_PURPOSES_ONLY_2" to " ", - )) + environment( + mapOf( + "Z_TESTING_PURPOSES_ONLY_1" to "value-defined", + "Z_TESTING_PURPOSES_ONLY_2" to " ", + ) + ) } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRole.java b/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRole.java index d6964fe9f38..8851cf630d7 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRole.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRole.java @@ -37,7 +37,7 @@ * so by definition they have some access to the instance.)

  • * */ -public enum AuthRole { +public enum AuthRole implements AuthRoleInterface { OWNER(500, AuthRoleConstants.OWNER), ADMIN(400, AuthRoleConstants.ADMIN), diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRoleInterface.java b/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRoleInterface.java new file mode 100644 index 00000000000..32325d2a1b3 --- /dev/null +++ b/airbyte-commons/src/main/java/io/airbyte/commons/auth/AuthRoleInterface.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.auth; + +/** + * This interface allows us to approximate a discriminated union of the implementers (AuthRole, + * WorkspaceAuthRole, and OrganizationAuthRole). This allows our the + * ApiAuthorizationHelper.kt#ensureUserHasAnyRequiredRoleOrThrow to accept a list containing any of + * the types without forcing us to use strings for everything. + */ +public interface AuthRoleInterface { + + int getAuthority(); + + String getLabel(); + +} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/auth/OrganizationAuthRole.java b/airbyte-commons/src/main/java/io/airbyte/commons/auth/OrganizationAuthRole.java index b001c62cd48..c1441c1e986 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/auth/OrganizationAuthRole.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/auth/OrganizationAuthRole.java @@ -16,7 +16,7 @@ * organization leveled auth role and workspace leveled auth roles. See AuthRole.java for more * information. */ -public enum OrganizationAuthRole { +public enum OrganizationAuthRole implements AuthRoleInterface { ORGANIZATION_ADMIN(400, AuthRoleConstants.ORGANIZATION_ADMIN), ORGANIZATION_EDITOR(300, AuthRoleConstants.ORGANIZATION_EDITOR), diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/auth/WorkspaceAuthRole.java b/airbyte-commons/src/main/java/io/airbyte/commons/auth/WorkspaceAuthRole.java index 862460dc017..556d7ba0065 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/auth/WorkspaceAuthRole.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/auth/WorkspaceAuthRole.java @@ -11,7 +11,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public enum WorkspaceAuthRole { +public enum WorkspaceAuthRole implements AuthRoleInterface { WORKSPACE_ADMIN(400, AuthRoleConstants.WORKSPACE_ADMIN), WORKSPACE_EDITOR(300, AuthRoleConstants.WORKSPACE_EDITOR), diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java index 50d549a50cb..1e888236cde 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/EnvVariableFeatureFlags.java @@ -18,7 +18,6 @@ public class EnvVariableFeatureFlags implements FeatureFlags { // Set this value to true to see all messages from the source to destination, set to one second // emission public static final String LOG_CONNECTOR_MESSAGES = "LOG_CONNECTOR_MESSAGES"; - public static final String NEED_STATE_VALIDATION = "NEED_STATE_VALIDATION"; public static final String APPLY_FIELD_SELECTION = "APPLY_FIELD_SELECTION"; public static final String FIELD_SELECTION_WORKSPACES = "FIELD_SELECTION_WORKSPACES"; @@ -43,11 +42,6 @@ public boolean logConnectorMessages() { return getEnvOrDefault(LOG_CONNECTOR_MESSAGES, false, Boolean::parseBoolean); } - @Override - public boolean needStateValidation() { - return getEnvOrDefault(NEED_STATE_VALIDATION, true, Boolean::parseBoolean); - } - @Override public boolean applyFieldSelection() { return getEnvOrDefault(APPLY_FIELD_SELECTION, false, Boolean::parseBoolean); diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java index 8fa34ee2a54..fbbf6239d15 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/features/FeatureFlags.java @@ -16,8 +16,6 @@ public interface FeatureFlags { boolean logConnectorMessages(); - boolean needStateValidation(); - /** * Return true if field selection should be applied. See also fieldSelectionWorkspaces. * diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java index e59e919560e..21f5cb55c75 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java @@ -68,8 +68,28 @@ public static void gobble(final InputStream is, final Consumer consumer, * @param mdcScopeBuilder mdc scope to be used during consumption */ public static void gobble(final InputStream is, final Consumer consumer, final String caller, final MdcScope.Builder mdcScopeBuilder) { + gobble(is, consumer, caller, mdcScopeBuilder, Executors.newSingleThreadExecutor()); + } + + /** + * Connect an input stream to be consumed by consumer with an {@link MdcScope}, caller label, and + * executor. + * + * Passing the executor lets you wait to ensure that all lines have been gobbled, since it happens + * asynchronously. + * + * @param is input stream + * @param consumer consumer + * @param caller name of caller + * @param mdcScopeBuilder mdc scope to be used during consumption + * @param executor executor to run gobbling + */ + public static void gobble(final InputStream is, + final Consumer consumer, + final String caller, + final MdcScope.Builder mdcScopeBuilder, + final ExecutorService executor) { if (is != null) { - final ExecutorService executor = Executors.newSingleThreadExecutor(); final Map mdc = MDC.getCopyOfContextMap(); final var gobbler = new LineGobbler(is, consumer, executor, mdc, caller, mdcScopeBuilder); executor.submit(gobbler); @@ -121,14 +141,6 @@ public static void endSection(final String message) { this(is, consumer, executor, mdc, GENERIC, MdcScope.DEFAULT_BUILDER); } - LineGobbler(final InputStream is, - final Consumer consumer, - final ExecutorService executor, - final Map mdc, - final MdcScope.Builder mdcScopeBuilder) { - this(is, consumer, executor, mdc, GENERIC, mdcScopeBuilder); - } - LineGobbler(final InputStream is, final Consumer consumer, final ExecutorService executor, diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt new file mode 100644 index 00000000000..f3d1c6583ec --- /dev/null +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonSerde.kt @@ -0,0 +1,19 @@ +package io.airbyte.commons.json + +/** + * Serde: _Ser_ialization + _de_serialization + * + * Singleton wrapper around Jsons for use with DI and allow testability via mocking. Add methods here as prudent. + */ +class JsonSerde { + fun serialize(obj: T): String { + return Jsons.serialize(obj) + } + + fun deserialize( + json: String, + target: Class, + ): T? { + return Jsons.deserialize(json, target) + } +} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java index 0171620958f..1e7b141116b 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java @@ -8,6 +8,7 @@ import static java.util.stream.Collectors.toMap; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.core.util.Separators; @@ -45,8 +46,18 @@ @SuppressWarnings({"PMD.AvoidReassigningParameters", "PMD.AvoidCatchingThrowable"}) public class Jsons { + private static final StreamReadConstraints STREAM_READ_CONSTRAINTS = StreamReadConstraints + .builder() + .maxStringLength(Integer.MAX_VALUE) + .build(); + // Object Mapper is thread-safe private static final ObjectMapper OBJECT_MAPPER = MoreMappers.initMapper(); + + static { + OBJECT_MAPPER.getFactory().setStreamReadConstraints(STREAM_READ_CONSTRAINTS); + } + /** * Exact ObjectMapper preserves float information by using the Java Big Decimal type. */ @@ -55,6 +66,7 @@ public class Jsons { static { OBJECT_MAPPER_EXACT = MoreMappers.initMapper(); OBJECT_MAPPER_EXACT.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); + OBJECT_MAPPER_EXACT.getFactory().setStreamReadConstraints(STREAM_READ_CONSTRAINTS); } private static final ObjectWriter OBJECT_WRITER = OBJECT_MAPPER.writer(new JsonPrettyPrinter()); diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java index d11418ac725..5e748db126e 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/logging/MaskedDataInterceptor.java @@ -11,9 +11,11 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.yaml.Yamls; import java.nio.charset.Charset; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.Logger; @@ -42,10 +44,55 @@ public class MaskedDataInterceptor implements RewritePolicy { protected static final Logger logger = StatusLogger.getLogger(); + /** + * Regular expression pattern flag that enables case in-sensitive matching. + */ + private static final String CASE_INSENSITIVE_FLAG = "(?i)"; + + // This is a little circuitous, but it gets the regex syntax highlighting in intelliJ to work. + private static final String DESTINATION_ERROR_PREFIX = Pattern.compile("^(?.*destination.*\\s+>\\s+ERROR.+)").pattern(); + + /** + * Regular expression replacement pattern for applying the mask to PII log messages. + */ + private static final String KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN = + "${destinationPrefix}${messagePrefix}" + AirbyteSecretConstants.SECRETS_MASK; + + /** + * Delimiter used as part of the regular expression pattern for applying the mask to property + * values. + */ + private static final String PROPERTY_MATCHING_PATTERN_DELIMITER = "|"; + + /** + * Regular expression pattern prefix for applying the mask to property values. + */ + private static final String PROPERTY_MATCHING_PATTERN_PREFIX = "\"("; + + /** + * Regular expression pattern suffix for applying the mask to property values. + */ + private static final String PROPERTY_MATCHING_PATTERN_SUFFIX = ")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)"; + + /** + * Name of the key in the mask YAML file that contains the list of maskable properties. + */ + private static final String PROPERTIES_KEY = "properties"; + + /** + * Regular expression pattern used to replace a key/value property with a masked value while + * maintaining the property key/name. + */ + private static final String REPLACEMENT_PATTERN = "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\""; + /** * The pattern used to determine if a message contains sensitive data. */ - private final Optional pattern; + private final Optional pattern; + + private static final List KNOWN_PII_PATTERNS = List.of( + Pattern.compile(DESTINATION_ERROR_PREFIX + "(?Received\\s+invalid\\s+message:)(.+)$"), + Pattern.compile(DESTINATION_ERROR_PREFIX + "(?org\\.jooq\\.exception\\.DataAccessException: SQL.+values\\s+\\()(.+)$")); @PluginFactory public static MaskedDataInterceptor createPolicy( @@ -82,11 +129,21 @@ public LogEvent rewrite(final LogEvent source) { * @return The possibly masked log message. */ private String applyMask(final String message) { - if (pattern.isPresent()) { - return message.replaceAll(pattern.get(), "\"$1\":\"" + AirbyteSecretConstants.SECRETS_MASK + "\""); - } else { - return message; - } + final String piiScrubbedMessage = removeKnownPii(message); + return pattern.map(p -> p.matcher(piiScrubbedMessage).replaceAll(REPLACEMENT_PATTERN)) + .orElse(piiScrubbedMessage); + } + + /** + * Removes known PII from the message. + * + * @param message the log line + * @return a redacted log line + */ + private static String removeKnownPii(final String message) { + return KNOWN_PII_PATTERNS.stream() + .reduce(message, (msg, pattern) -> pattern.matcher(msg).replaceAll( + KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN), (a, b) -> a); } /** @@ -100,7 +157,7 @@ private Set getMaskableProperties(final String specMaskFile) { try { final String maskFileContents = IOUtils.toString(getClass().getResourceAsStream(specMaskFile), Charset.defaultCharset()); final Map> properties = Jsons.object(Yamls.deserialize(maskFileContents), new TypeReference<>() {}); - return properties.getOrDefault("properties", Set.of()); + return properties.getOrDefault(PROPERTIES_KEY, Set.of()); } catch (final Exception e) { logger.error("Unable to load mask data from '{}': {}.", specMaskFile, e.getMessage()); return Set.of(); @@ -113,9 +170,9 @@ private Set getMaskableProperties(final String specMaskFile) { * @param specMaskFile The spec mask file. * @return The regular expression pattern used to find maskable properties. */ - private Optional buildPattern(final String specMaskFile) { + private Optional buildPattern(final String specMaskFile) { final Set maskableProperties = getMaskableProperties(specMaskFile); - return !maskableProperties.isEmpty() ? Optional.of(generatePattern(maskableProperties)) : Optional.empty(); + return !maskableProperties.isEmpty() ? Optional.of(Pattern.compile(generatePattern(maskableProperties))) : Optional.empty(); } /** @@ -126,10 +183,10 @@ private Optional buildPattern(final String specMaskFile) { */ private String generatePattern(final Set properties) { final StringBuilder builder = new StringBuilder(); - builder.append("(?i)"); // case insensitive - builder.append("\"("); - builder.append(properties.stream().collect(Collectors.joining("|"))); - builder.append(")\"\\s*:\\s*(\"(?:[^\"\\\\]|\\\\.)*\"|\\[[^]\\[]*]|\\d+)"); + builder.append(CASE_INSENSITIVE_FLAG); + builder.append(PROPERTY_MATCHING_PATTERN_PREFIX); + builder.append(properties.stream().collect(Collectors.joining(PROPERTY_MATCHING_PATTERN_DELIMITER))); + builder.append(PROPERTY_MATCHING_PATTERN_SUFFIX); return builder.toString(); } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteVersion.java b/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteVersion.java index f6a22b06908..78f1590d2d4 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteVersion.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/version/AirbyteVersion.java @@ -19,27 +19,6 @@ public AirbyteVersion(final String major, final String minor, final String patch super(major, minor, patch); } - /** - * Test if versions are compatible. Only the major and minor part of the Version is taken into - * account. - * - * @param version1 to test - * @param version2 to test - * @throws IllegalStateException if they are not compatible - */ - public static void assertIsCompatible(final AirbyteVersion version1, final AirbyteVersion version2) throws IllegalStateException { - if (!isCompatible(version1, version2)) { - throw new IllegalStateException(getErrorMessage(version1, version2)); - } - } - - private static String getErrorMessage(final AirbyteVersion version1, final AirbyteVersion version2) { - return String.format( - "Version mismatch between %s and %s.\n" - + "Please upgrade or reset your Airbyte Database, see more at https://docs.airbyte.io/operator-guides/upgrading-airbyte", - version1.serialize(), version2.serialize()); - } - @Override public String toString() { return "AirbyteVersion{" @@ -50,29 +29,4 @@ public String toString() { + '}'; } - /** - * Convert a version to itself without its patch version. - * - * @param airbyteVersion to convert - * @return version without patch - */ - public static AirbyteVersion versionWithoutPatch(final AirbyteVersion airbyteVersion) { - final String versionWithoutPatch = "" + airbyteVersion.getMajorVersion() - + "." - + airbyteVersion.getMinorVersion() - + ".0-" - + airbyteVersion.serialize().replace("\n", "").strip().split("-")[1]; - return new AirbyteVersion(versionWithoutPatch); - } - - /** - * Convert a string representation of a version to itself without its patch version. - * - * @param airbyteVersion to convert - * @return version without patch - */ - public static AirbyteVersion versionWithoutPatch(final String airbyteVersion) { - return versionWithoutPatch(new AirbyteVersion(airbyteVersion)); - } - } diff --git a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt index a4af450ae98..017ec4370bd 100644 --- a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt +++ b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt @@ -31,6 +31,7 @@ enum class EnvVar { DD_VERSION, DEPLOYMENT_ENV, DEPLOYMENT_MODE, + DOCKER_HOST, DOCKER_NETWORK, FEATURE_FLAG_CLIENT, @@ -70,9 +71,12 @@ enum class EnvVar { OTEL_COLLECTOR_ENDPOINT, + PATH_TO_CONNECTORS, + PUBLISH_METRICS, REMOTE_DATAPLANE_SERVICEACCOUNTS, + ROOTLESS_WORKLOAD, SERVICE_NAME, SIDECAR_KUBE_CPU_LIMIT, @@ -81,6 +85,7 @@ enum class EnvVar { SIDECAR_MEMORY_REQUEST, SOCAT_KUBE_CPU_LIMIT, SOCAT_KUBE_CPU_REQUEST, + STORAGE_BUCKET_ACTIVITY_PAYLOAD, STORAGE_BUCKET_LOG, STORAGE_BUCKET_STATE, STORAGE_BUCKET_WORKLOAD_OUTPUT, diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java index 23f4d566c1c..af81de9f6c5 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/logging/MaskedDataInterceptorTest.java @@ -32,6 +32,28 @@ class MaskedDataInterceptorTest { private static final String JSON_WITHOUT_SECRETS = "{\"prop1\":\"test\",\"" + OTHER + "\":{\"prop2\":\"value\",\"prop3\":1234}}"; public static final String TEST_SPEC_SECRET_MASK_YAML = "/test_spec_secret_mask.yaml"; + public static final String TEST_LOGGED_RECORD_CONTENTS = + "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR i.a.c.i.b.Destination$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120 " + + "Received invalid message: {\"type\":\"RECORD\",\"record\":{\"namespace\":\""; + public static final String REDACTED_LOGGED_RECORD_CONTENTS = + "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR i.a.c.i.b.Destination$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120 " + + "Received invalid message:" + + AirbyteSecretConstants.SECRETS_MASK; + public static final String TEST_LOGGED_SQL_VALUES = + "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 i.a.c.i.d.a.FlushWorkers(flush$lambda$6):192 Flush Worker (632c9) " + + "-- flush worker " + + "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + + "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + + "_airbyte_loaded_at) values ('UUID', a bunch of other stuff"; + + public static final String REDACTED_LOGGED_SQL_VALUES = + "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 i.a.c.i.d.a.FlushWorkers(flush$lambda$6):192 Flush Worker (632c9) " + + "-- flush worker " + + "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + + "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + + "_airbyte_loaded_at) values (" + + AirbyteSecretConstants.SECRETS_MASK; + @Test void testMaskingMessageWithStringSecret() { final Message message = mock(Message.class); @@ -125,4 +147,30 @@ void testMissingMaskingFileDoesNotPreventLogging() { }); } + @Test + void testMaskingMessageWithSqlValues() { + final Message message = mock(Message.class); + final LogEvent logEvent = mock(LogEvent.class); + when(message.getFormattedMessage()).thenReturn(TEST_LOGGED_SQL_VALUES); + when(logEvent.getMessage()).thenReturn(message); + + final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); + + final LogEvent result = interceptor.rewrite(logEvent); + assertEquals(REDACTED_LOGGED_SQL_VALUES, result.getMessage().getFormattedMessage()); + } + + @Test + void testMaskingMessageWithRecordContents() { + final Message message = mock(Message.class); + final LogEvent logEvent = mock(LogEvent.class); + when(message.getFormattedMessage()).thenReturn(TEST_LOGGED_RECORD_CONTENTS); + when(logEvent.getMessage()).thenReturn(message); + + final MaskedDataInterceptor interceptor = MaskedDataInterceptor.createPolicy(TEST_SPEC_SECRET_MASK_YAML); + + final LogEvent result = interceptor.rewrite(logEvent); + assertEquals(REDACTED_LOGGED_RECORD_CONTENTS, result.getMessage().getFormattedMessage()); + } + } diff --git a/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java b/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java index 571c4e01eb7..b48e34163b2 100644 --- a/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java +++ b/airbyte-commons/src/test/java/io/airbyte/commons/version/AirbyteVersionTest.java @@ -105,12 +105,6 @@ void testSerialize() { assertEquals(nonDevVersion, new AirbyteVersion(nonDevVersion).serialize()); } - @Test - void testCheckVersion() { - AirbyteVersion.assertIsCompatible(new AirbyteVersion("3.2.1"), new AirbyteVersion("3.2.1")); - assertThrows(IllegalStateException.class, () -> AirbyteVersion.assertIsCompatible(new AirbyteVersion("1.2.3"), new AirbyteVersion("3.2.1"))); - } - @Test void testCheckOnlyPatchVersion() { assertFalse(new AirbyteVersion(VERSION_678).checkOnlyPatchVersionIsUpdatedComparedTo(new AirbyteVersion(VERSION_678))); diff --git a/airbyte-config/config-models/build.gradle.kts b/airbyte-config/config-models/build.gradle.kts index 87d850b99d9..008e84125da 100644 --- a/airbyte-config/config-models/build.gradle.kts +++ b/airbyte-config/config-models/build.gradle.kts @@ -2,93 +2,93 @@ import org.gradle.api.tasks.testing.logging.TestLogEvent import org.jsonschema2pojo.SourceType plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("com.github.eirnym.js2p") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("com.github.eirnym.js2p") + kotlin("jvm") + kotlin("kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - kapt(libs.bundles.micronaut.annotation.processor) - - api(libs.bundles.micronaut.annotation) - - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-commons")) - - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.spotbugs.annotations) - implementation(libs.guava) - implementation(libs.micronaut.kotlin.extension.functions) - implementation(libs.google.cloud.storage) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.s3) - implementation(libs.sts) - implementation(libs.bundles.apache) - implementation(libs.airbyte.protocol) - implementation(libs.commons.io) - - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.bundles.micronaut.test) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + kapt(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-commons")) + + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.spotbugs.annotations) + implementation(libs.guava) + implementation(libs.micronaut.kotlin.extension.functions) + implementation(libs.google.cloud.storage) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.s3) + implementation(libs.sts) + implementation(libs.bundles.apache) + implementation(libs.airbyte.protocol) + implementation(libs.commons.io) + + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + + testRuntimeOnly(libs.junit.jupiter.engine) } jsonSchema2Pojo { - setSourceType(SourceType.YAMLSCHEMA.name) - setSource(files("${sourceSets["main"].output.resourcesDir}/types")) - targetDirectory = file("$buildDir/generated/src/gen/java/") + setSourceType(SourceType.YAMLSCHEMA.name) + setSource(files("${sourceSets["main"].output.resourcesDir}/types")) + targetDirectory = file("$buildDir/generated/src/gen/java/") - targetPackage = "io.airbyte.config" - useLongIntegers = true + targetPackage = "io.airbyte.config" + useLongIntegers = true - removeOldOutput = true + removeOldOutput = true - generateBuilders = true - includeConstructors = false - includeSetters = true - serializable = true + generateBuilders = true + includeConstructors = false + includeSetters = true + serializable = true } tasks.named("test") { - useJUnitPlatform { - excludeTags("log4j2-config", "logger-client") - } + useJUnitPlatform { + excludeTags("log4j2-config", "logger-client") + } } tasks.named("compileKotlin") { - dependsOn(tasks.named("generateJsonSchema2Pojo")) + dependsOn(tasks.named("generateJsonSchema2Pojo")) } tasks.register("log4j2IntegrationTest") { - useJUnitPlatform { - includeTags("log4j2-config") - } - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) - } + useJUnitPlatform { + includeTags("log4j2-config") + } + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) + } } tasks.register("logClientsIntegrationTest") { - useJUnitPlatform { - includeTags("logger-client") - } - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) - } + useJUnitPlatform { + includeTags("logger-client") + } + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.SKIPPED, TestLogEvent.FAILED) + } } afterEvaluate { - tasks.named("kaptGenerateStubsKotlin") { - dependsOn(tasks.named("generateJsonSchema2Pojo")) - } + tasks.named("kaptGenerateStubsKotlin") { + dependsOn(tasks.named("generateJsonSchema2Pojo")) + } } diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java index 6548e5ec4e1..340d4798252 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -54,7 +54,6 @@ public class EnvConfigs implements Configs { private static final String DEFAULT_JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"; private static final String DEFAULT_JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"; private static final String DEFAULT_JOB_KUBE_SOCAT_IMAGE = "alpine/socat:1.7.4.4-r0"; - private static final String DEFAULT_JOB_KUBE_BUSYBOX_IMAGE = "busybox:1.35"; public static final int DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE = 100; public static final int DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE = 14; @@ -87,7 +86,8 @@ private StorageConfig getLogConfiguration() { final var buckets = new StorageBucketConfig( getEnsureEnv(EnvVar.STORAGE_BUCKET_LOG), getEnsureEnv(EnvVar.STORAGE_BUCKET_STATE), - getEnsureEnv(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT)); + getEnsureEnv(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT), + getEnsureEnv(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD)); return switch (getEnsureEnv(EnvVar.STORAGE_TYPE)) { case "GCS" -> new GcsStorageConfig( diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java deleted file mode 100644 index b6956feb76f..00000000000 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/storage/MinioS3ClientFactory.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.function.Supplier; -import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; -import software.amazon.awssdk.regions.Region; -import software.amazon.awssdk.services.s3.S3Client; - -/** - * When using minio, we can still leverage the S3Client, we just slightly change what information we - * pass to it. Takes in the constructor our standard format for minio configuration and provides a - * factory that uses that configuration to create an S3Client. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -public class MinioS3ClientFactory implements Supplier { - - private final MinioStorageConfig config; - - public MinioS3ClientFactory(final MinioStorageConfig config) { - this.config = config; - } - - @Override - public S3Client get() { - final var builder = S3Client.builder(); - - // The Minio S3 client. - final var minioEndpoint = config.getEndpoint(); - try { - final var minioUri = new URI(minioEndpoint); - builder.credentialsProvider(() -> AwsBasicCredentials.create(config.getAccessKey(), config.getSecretAccessKey())); - builder.endpointOverride(minioUri); - builder.region(Region.US_EAST_1); // Although this is not used, the S3 client will error out if this is not set. Set a stub value. - } catch (final URISyntaxException e) { - throw new RuntimeException("Error creating S3 log client to Minio", e); - } - - return builder.build(); - } - -} diff --git a/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/helpers/LogConfigs.kt b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/helpers/LogConfigs.kt index cfbc3541505..3f2ff208f48 100644 --- a/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/helpers/LogConfigs.kt +++ b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/helpers/LogConfigs.kt @@ -17,7 +17,7 @@ class LogConfigs( val EMPTY: LogConfigs = LogConfigs( LocalStorageConfig( - StorageBucketConfig("log", "state", "workload"), + StorageBucketConfig("log", "state", "workload", "payload"), "/tmp/local-storage", ), ) diff --git a/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/MinioS3ClientFactory.kt b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/MinioS3ClientFactory.kt new file mode 100644 index 00000000000..d5f192f2a94 --- /dev/null +++ b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/MinioS3ClientFactory.kt @@ -0,0 +1,32 @@ +package io.airbyte.config.storage + +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.s3.S3Client +import java.net.URI +import java.util.function.Supplier + +class MinioS3ClientFactory(private val config: MinioStorageConfig) : Supplier { + override fun get(): S3Client = + runCatching { + val minioUri = URI(config.endpoint) + + with(S3Client.builder()) { + serviceConfiguration { + it.pathStyleAccessEnabled(true) + } + credentialsProvider { + AwsBasicCredentials.create( + config.accessKey, + config.secretAccessKey, + ) + } + endpointOverride(minioUri) + // Although this is not used, the S3 client will error out if this is not set. + region(Region.US_EAST_1) + build() + } + }.getOrElse { + throw RuntimeException("Error creating S3 log client to Minio", it) + } +} diff --git a/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/StorageConfig.kt b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/StorageConfig.kt index 9c5d0bd4809..c544d9449a5 100644 --- a/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/StorageConfig.kt +++ b/airbyte-config/config-models/src/main/kotlin/io/airbyte/config/storage/StorageConfig.kt @@ -64,6 +64,7 @@ data class GcsStorageConfig( put(EnvVar.STORAGE_BUCKET_LOG, buckets.log) put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) + put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) put(EnvVar.STORAGE_TYPE, StorageType.GCS.name) put(EnvVar.GOOGLE_APPLICATION_CREDENTIALS, applicationCredentials) }.mapKeys { it.key.name } @@ -91,6 +92,7 @@ data class S3StorageConfig( put(EnvVar.STORAGE_BUCKET_LOG, buckets.log) put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) + put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) put(EnvVar.STORAGE_TYPE, StorageType.S3.name) accessKey?.let { put(EnvVar.AWS_ACCESS_KEY_ID, accessKey) @@ -124,6 +126,7 @@ data class MinioStorageConfig( put(EnvVar.STORAGE_BUCKET_LOG, buckets.log) put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) + put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) put(EnvVar.STORAGE_TYPE, StorageType.MINIO.name) put(EnvVar.AWS_ACCESS_KEY_ID, accessKey) put(EnvVar.AWS_SECRET_ACCESS_KEY, secretAccessKey) @@ -149,6 +152,7 @@ class LocalStorageConfig( put(EnvVar.STORAGE_BUCKET_LOG, buckets.log) put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) + put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) put(EnvVar.STORAGE_TYPE, StorageType.LOCAL.name) put(EnvVar.LOCAL_ROOT, root) }.mapKeys { it.key.name } @@ -162,6 +166,7 @@ data class StorageBucketConfig( @Value("\${$STORAGE_BUCKET.log}") val log: String, @Value("\${$STORAGE_BUCKET.state}") val state: String, @Value("\${$STORAGE_BUCKET.workload-output}") val workloadOutput: String, + @Value("\${$STORAGE_BUCKET.activity-payload}") val activityPayload: String, ) /** diff --git a/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml b/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml new file mode 100644 index 00000000000..5ef4f09ae60 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ActivityPayloadURI.yaml @@ -0,0 +1,15 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/ActivityPayloadURI.yaml +title: ActivityPayloadURI +description: URI struct for activity payloads +type: object +additionalProperties: true +required: + - id + - version +properties: + id: + type: string + version: + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/ConfigOriginType.yaml b/airbyte-config/config-models/src/main/resources/types/ConfigOriginType.yaml index 86d43438051..3f272b097f0 100644 --- a/airbyte-config/config-models/src/main/resources/types/ConfigOriginType.yaml +++ b/airbyte-config/config-models/src/main/resources/types/ConfigOriginType.yaml @@ -6,3 +6,4 @@ description: ScopedConfiguration origin types type: string enum: - user + - breaking_change diff --git a/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml b/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml index e99cf18954e..cd5c5fbfe45 100644 --- a/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml +++ b/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml @@ -29,6 +29,7 @@ properties: - refresh_schema - heartbeat_timeout - destination_timeout + - transient_error internalMessage: description: Human readable failure description for consumption by technical system operators, like Airbyte engineers or OSS users. type: string @@ -47,3 +48,6 @@ properties: type: boolean timestamp: type: integer + streamDescriptor: + description: The name of the stream that caused the failure when available. + "$ref": StreamDescriptor.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/InvitationStatus.yaml b/airbyte-config/config-models/src/main/resources/types/InvitationStatus.yaml index 6f0fdcab0f3..0c13f937544 100644 --- a/airbyte-config/config-models/src/main/resources/types/InvitationStatus.yaml +++ b/airbyte-config/config-models/src/main/resources/types/InvitationStatus.yaml @@ -9,3 +9,4 @@ enum: - accepted - cancelled - declined + - expired diff --git a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml b/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml index 8698c0792dd..79534182e84 100644 --- a/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml +++ b/airbyte-config/config-models/src/main/resources/types/JobConfig.yaml @@ -17,6 +17,7 @@ properties: - getSpec - sync - resetConnection + - refresh checkConnection: "$ref": JobCheckConnectionConfig.yaml discoverCatalog: @@ -27,3 +28,5 @@ properties: "$ref": JobSyncConfig.yaml resetConnection: "$ref": JobResetConnectionConfig.yaml + refresh: + "$ref": RefreshConfig.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/JobType.yaml b/airbyte-config/config-models/src/main/resources/types/JobType.yaml index 86df39230c2..34697834998 100644 --- a/airbyte-config/config-models/src/main/resources/types/JobType.yaml +++ b/airbyte-config/config-models/src/main/resources/types/JobType.yaml @@ -12,3 +12,4 @@ enum: - reset_connection - connection_updater - replicate + - refresh diff --git a/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml b/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml new file mode 100644 index 00000000000..e6cef43fa88 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/RefreshConfig.yaml @@ -0,0 +1,75 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/JobSyncConfig.yaml +title: RefreshConfig +description: configuration of the refresh operation +type: object +additionalProperties: true +required: + - configuredAirbyteCatalog + - sourceDockerImage + - destinationDockerImage + - streamsToRefresh +properties: + streamsToRefresh: + type: array + items: + type: object + existingJavaType: io.airbyte.protocol.models.StreamDescriptor + namespaceDefinition: + "$ref": NamespaceDefinitionType.yaml + namespaceFormat: + type: string + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + description: Prefix that will be prepended to the name of each stream when it is written to the destination. + type: string + configuredAirbyteCatalog: + description: the configured airbyte catalog + type: object + existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog + sourceDockerImage: + description: Image name of the source with tag. + type: string + sourceProtocolVersion: + description: Airbyte Protocol Version of the source + type: object + existingJavaType: io.airbyte.commons.version.Version + destinationDockerImage: + description: Image name of the destination with tag. + type: string + destinationProtocolVersion: + description: Airbyte Protocol Version of the destination + type: object + existingJavaType: io.airbyte.commons.version.Version + operationSequence: + description: Sequence of configurations of operations to apply as part of the sync + type: array + items: + "$ref": StandardSyncOperation.yaml + webhookOperationConfigs: + description: The webhook operation configs belonging to this workspace. Must conform to WebhookOperationConfigs.yaml. + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + syncResourceRequirements: + description: Resource requirements to use for the sync + $ref: SyncResourceRequirements.yaml + isSourceCustomConnector: + description: determine if the source running image is a custom connector. + type: boolean + isDestinationCustomConnector: + description: determine if the destination running image is a custom connector. + type: boolean + workspaceId: + description: The id of the workspace associated with the sync + type: string + format: uuid + sourceDefinitionVersionId: + description: The id of the source definition version used for the sync + type: string + format: uuid + destinationDefinitionVersionId: + description: The id of the destination definition version used for the sync + type: string + format: uuid diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml index dcae92f5811..33527acf726 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml @@ -10,7 +10,6 @@ required: - sourceConfiguration - destinationId - destinationConfiguration - - catalog properties: namespaceDefinition: "$ref": NamespaceDefinitionType.yaml @@ -46,14 +45,6 @@ properties: description: The webhook operation configs belonging to this workspace. See webhookOperationConfigs in StandardWorkspace.yaml. type: object existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalog: - description: the configured airbyte catalog - type: object - # necessary because the configuration declaration is in a separate package. - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - state: - description: optional state of the previous run. this object is defined per integration. - "$ref": State.yaml syncResourceRequirements: description: Resource requirements to use for the sync $ref: SyncResourceRequirements.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml index 6dc0e2ae6c7..16516c32756 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml @@ -7,8 +7,6 @@ type: object additionalProperties: true required: - standardSyncSummary - - state - - output_catalog properties: standardSyncSummary: "$ref": StandardSyncSummary.yaml @@ -16,11 +14,9 @@ properties: "$ref": NormalizationSummary.yaml webhookOperationSummary: "$ref": WebhookOperationSummary.yaml - state: - "$ref": State.yaml - output_catalog: - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog failures: type: array items: "$ref": FailureReason.yaml + catalogUri: + "$ref": ActivityPayloadURI.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml index d71017712fc..f13f19c22c6 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncSummary.yaml @@ -36,3 +36,5 @@ properties: "$ref": StreamSyncStats.yaml performanceMetrics: "$ref": PerformanceMetrics.yaml + streamCount: + type: integer diff --git a/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml b/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml new file mode 100644 index 00000000000..417df3e84f3 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml @@ -0,0 +1,12 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-config/config-models/src/main/resources/types/StreamDescriptor.yaml +title: StreamDescriptor +type: object +required: +additionalProperties: true +properties: + namespace: + type: string + name: + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/UserInvitation.yaml b/airbyte-config/config-models/src/main/resources/types/UserInvitation.yaml index 3ea0ea6dc08..851beb72d4d 100644 --- a/airbyte-config/config-models/src/main/resources/types/UserInvitation.yaml +++ b/airbyte-config/config-models/src/main/resources/types/UserInvitation.yaml @@ -13,6 +13,7 @@ required: - scopeType - permissionType - status + - expiresAt additionalProperties: true properties: id: @@ -29,6 +30,10 @@ properties: description: Email address of the user who is being invited type: string format: email + acceptedByUserId: + description: ID of the user who accepted the invitation + type: string + format: uuid scopeId: description: ID of the workspace/organization that the user is being invited to type: string @@ -50,3 +55,7 @@ properties: description: last updated timestamp of the invitation type: integer format: int64 + expiresAt: + description: Timestamp at which the invitation will expire + type: integer + format: int64 diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsIntTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsIntTest.java index cf11f762c20..b0c2a813e18 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsIntTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/GcsLogsIntTest.java @@ -27,10 +27,7 @@ class GcsLogsIntTest { private static Storage getClientFactory() { return new DefaultGcsClientFactory(new GcsStorageConfig( - new StorageBucketConfig( - System.getenv(EnvVar.STORAGE_BUCKET_LOG.name()), - "", - ""), + new StorageBucketConfig(System.getenv(EnvVar.STORAGE_BUCKET_LOG.name()), "", "", ""), System.getenv(EnvVar.GOOGLE_APPLICATION_CREDENTIALS.name()))).get(); } diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java index 0b3ff96f969..6e45d09792c 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/S3LogsTest.java @@ -39,7 +39,7 @@ class S3LogsTest { Objects.requireNonNull(region); LOG_CONFIGS = new LogConfigs(new S3StorageConfig( - new StorageBucketConfig(bucketLog, "state", "workload"), + new StorageBucketConfig(bucketLog, "state", "workload", "payload"), EnvVar.AWS_ACCESS_KEY_ID.fetch(), EnvVar.AWS_SECRET_ACCESS_KEY.fetch(), region)); diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java index 40b91dc06f9..cd62490926b 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/CloudLogsClientTest.java @@ -11,7 +11,7 @@ class CloudLogsClientTest { @Test void testGcs() { - final var bucket = new StorageBucketConfig("log", "state", "workload"); + final var bucket = new StorageBucketConfig("log", "state", "workload", "payload"); final var config = new GcsStorageConfig(bucket, "path/to/google/secret"); new DefaultGcsClientFactory(config); } diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java index ef1bcf3d7ce..db4a3203602 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/DefaultS3ClientFactoryTest.java @@ -12,7 +12,7 @@ class DefaultS3ClientFactoryTest { @Test void testS3() { - final var bucket = new StorageBucketConfig("log", "state", "workload"); + final var bucket = new StorageBucketConfig("log", "state", "workload", "payload"); final var config = new S3StorageConfig(bucket, "access-key", "access-key-secret", "us-east-1"); assertDoesNotThrow(() -> new DefaultS3ClientFactory(config).get()); diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java deleted file mode 100644 index 0900899872d..00000000000 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/storage/MinioS3ClientFactoryTest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.storage; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; - -import org.junit.jupiter.api.Test; - -class MinioS3ClientFactoryTest { - - @Test - void testMinio() { - final var bucket = new StorageBucketConfig("log", "state", "workload"); - final var config = new MinioStorageConfig(bucket, "access", "secret", "http://endpoint.test"); - - assertDoesNotThrow(() -> new MinioS3ClientFactory(config).get()); - } - -} diff --git a/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/helpers/CloudLogsTest.kt b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/helpers/CloudLogsTest.kt index a166ab8a09c..a0e7de99300 100644 --- a/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/helpers/CloudLogsTest.kt +++ b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/helpers/CloudLogsTest.kt @@ -7,7 +7,7 @@ import io.airbyte.config.storage.StorageBucketConfig import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.Test -private val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload") +private val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") class CloudLogsTest { @Test diff --git a/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/MinioS3ClientFactoryTest.kt b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/MinioS3ClientFactoryTest.kt new file mode 100644 index 00000000000..86e3969570d --- /dev/null +++ b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/MinioS3ClientFactoryTest.kt @@ -0,0 +1,14 @@ +package io.airbyte.config.storage + +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow + +val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") +val config = MinioStorageConfig(buckets = buckets, accessKey = "access", secretAccessKey = "secret", endpoint = "http://endpoint.test") + +class MinioS3ClientFactoryTest { + @Test + fun `minio doesn't throw exception`() { + assertDoesNotThrow { MinioS3ClientFactory(config).get() } + } +} diff --git a/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/StorageConfigTest.kt b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/StorageConfigTest.kt index 4765349f8b9..c6e871b9089 100644 --- a/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/StorageConfigTest.kt +++ b/airbyte-config/config-models/src/test/kotlin/io/airbyte/config/storage/StorageConfigTest.kt @@ -8,13 +8,10 @@ import org.junit.jupiter.api.Assertions.assertFalse import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.Test -private const val TYPE = "$STORAGE_ROOT.type" -private const val PROP_BUCKET_LOG = "$STORAGE_BUCKET.log" -private const val PROP_BUCKET_STATE = "$STORAGE_BUCKET.state" -private const val PROP_BUCKET_WORKLOAD = "$STORAGE_BUCKET.workload-output" private const val VAL_BUCKET_LOG = "log" private const val VAL_BUCKET_STATE = "state" private const val VAL_BUCKET_WORKLOAD = "workload" +private const val VAL_BUCKET_ACTIVITY_PAYLOAD = "payload" @MicronautTest(environments = ["storage-local"]) class LocalStorageConfigTest { @@ -28,6 +25,7 @@ class LocalStorageConfigTest { assertEquals(VAL_BUCKET_LOG, buckets.log) assertEquals(VAL_BUCKET_STATE, buckets.state) assertEquals(VAL_BUCKET_WORKLOAD, buckets.workloadOutput) + assertEquals(VAL_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) assertEquals("/tmp", root) } } @@ -41,6 +39,7 @@ class LocalStorageConfigTest { EnvVar.STORAGE_BUCKET_LOG to VAL_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE to VAL_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT to VAL_BUCKET_WORKLOAD, + EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD to VAL_BUCKET_ACTIVITY_PAYLOAD, EnvVar.LOCAL_ROOT to "/tmp", ).mapKeys { it.key.name } @@ -61,6 +60,7 @@ class GcsStorageConfigTest { assertEquals(VAL_BUCKET_LOG, buckets.log) assertEquals(VAL_BUCKET_STATE, buckets.state) assertEquals(VAL_BUCKET_WORKLOAD, buckets.workloadOutput) + assertEquals(VAL_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) assertEquals("credz", applicationCredentials) } } @@ -74,6 +74,7 @@ class GcsStorageConfigTest { EnvVar.STORAGE_BUCKET_LOG to VAL_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE to VAL_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT to VAL_BUCKET_WORKLOAD, + EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD to VAL_BUCKET_ACTIVITY_PAYLOAD, EnvVar.GOOGLE_APPLICATION_CREDENTIALS to "credz", ).mapKeys { it.key.name } @@ -94,6 +95,7 @@ class MinioStorageConfigTest { assertEquals(VAL_BUCKET_LOG, buckets.log) assertEquals(VAL_BUCKET_STATE, buckets.state) assertEquals(VAL_BUCKET_WORKLOAD, buckets.workloadOutput) + assertEquals(VAL_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) assertEquals("access", this.accessKey) assertEquals("secret-access", secretAccessKey) assertEquals("endpoint", endpoint) @@ -109,6 +111,7 @@ class MinioStorageConfigTest { EnvVar.STORAGE_BUCKET_LOG to VAL_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE to VAL_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT to VAL_BUCKET_WORKLOAD, + EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD to VAL_BUCKET_ACTIVITY_PAYLOAD, EnvVar.AWS_ACCESS_KEY_ID to "access", EnvVar.AWS_SECRET_ACCESS_KEY to "secret-access", EnvVar.MINIO_ENDPOINT to "endpoint", @@ -131,6 +134,7 @@ class S3StorageConfigTest { assertEquals(VAL_BUCKET_LOG, buckets.log) assertEquals(VAL_BUCKET_STATE, buckets.state) assertEquals(VAL_BUCKET_WORKLOAD, buckets.workloadOutput) + assertEquals(VAL_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) assertEquals("access", this.accessKey) assertEquals("secret-access", secretAccessKey) assertEquals("us-moon-1", region) @@ -146,6 +150,7 @@ class S3StorageConfigTest { EnvVar.STORAGE_BUCKET_LOG to VAL_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE to VAL_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT to VAL_BUCKET_WORKLOAD, + EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD to VAL_BUCKET_ACTIVITY_PAYLOAD, EnvVar.AWS_ACCESS_KEY_ID to "access", EnvVar.AWS_SECRET_ACCESS_KEY to "secret-access", EnvVar.AWS_DEFAULT_REGION to "us-moon-1", @@ -162,7 +167,7 @@ class StorageConfigTest { val private = "nobody" val public = "everybody" val masked = "*******" - val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload") + val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") val gcs = GcsStorageConfig(buckets = buckets, applicationCredentials = private) val minio = diff --git a/airbyte-config/config-models/src/test/resources/application-storage-gcs.yml b/airbyte-config/config-models/src/test/resources/application-storage-gcs.yml index 96ba5b1c1c5..c7c304622d4 100644 --- a/airbyte-config/config-models/src/test/resources/application-storage-gcs.yml +++ b/airbyte-config/config-models/src/test/resources/application-storage-gcs.yml @@ -6,5 +6,6 @@ airbyte: log: log state: state workload-output: workload + activity-payload: payload gcs: application-credentials: credz diff --git a/airbyte-config/config-models/src/test/resources/application-storage-local.yml b/airbyte-config/config-models/src/test/resources/application-storage-local.yml index a8ee0625ee2..a6a1629b6ed 100644 --- a/airbyte-config/config-models/src/test/resources/application-storage-local.yml +++ b/airbyte-config/config-models/src/test/resources/application-storage-local.yml @@ -6,5 +6,6 @@ airbyte: log: log state: state workload-output: workload + activity-payload: payload local: root: /tmp diff --git a/airbyte-config/config-models/src/test/resources/application-storage-minio.yml b/airbyte-config/config-models/src/test/resources/application-storage-minio.yml index 09cbb508371..d1fbd5ab192 100644 --- a/airbyte-config/config-models/src/test/resources/application-storage-minio.yml +++ b/airbyte-config/config-models/src/test/resources/application-storage-minio.yml @@ -6,6 +6,7 @@ airbyte: log: log state: state workload-output: workload + activity-payload: payload minio: access-key: access secret-access-key: secret-access diff --git a/airbyte-config/config-models/src/test/resources/application-storage-s3.yml b/airbyte-config/config-models/src/test/resources/application-storage-s3.yml index 6c7c5d3c79e..e4b69917311 100644 --- a/airbyte-config/config-models/src/test/resources/application-storage-s3.yml +++ b/airbyte-config/config-models/src/test/resources/application-storage-s3.yml @@ -6,6 +6,7 @@ airbyte: log: log state: state workload-output: workload + activity-payload: payload s3: access-key: access secret-access-key: secret-access diff --git a/airbyte-config/config-persistence/build.gradle.kts b/airbyte-config/config-persistence/build.gradle.kts index 398c62c7fe3..4981c623f20 100644 --- a/airbyte-config/config-persistence/build.gradle.kts +++ b/airbyte-config/config-persistence/build.gradle.kts @@ -1,56 +1,77 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - `java-test-fixtures` + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + `java-test-fixtures` + kotlin("jvm") + kotlin("kapt") } configurations.all { - exclude(group = "io.micronaut.flyway") + exclude(group = "io.micronaut.flyway") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - api(libs.bundles.micronaut.annotation) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.bundles.apache) - implementation(libs.google.cloud.storage) - implementation(libs.commons.io) - implementation(libs.jackson.databind) - - testImplementation(libs.hamcrest.all) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.bundles.flyway) - testImplementation(libs.mockito.inline) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - - testRuntimeOnly(libs.junit.jupiter.engine) - - integrationTestImplementation(project(":airbyte-config:config-persistence")) - - testFixturesApi(libs.jackson.databind) - testFixturesApi(libs.guava) - testFixturesApi(project(":airbyte-json-validation")) - testFixturesApi(project(":airbyte-commons")) - testFixturesApi(project(":airbyte-config:config-models")) - testFixturesApi(project(":airbyte-config:config-secrets")) - testFixturesApi(libs.airbyte.protocol) - testFixturesApi(libs.lombok) - testFixturesAnnotationProcessor(libs.lombok) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.bundles.apache) + implementation(libs.google.cloud.storage) + implementation(libs.commons.io) + implementation(libs.jackson.databind) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) + + testImplementation(libs.hamcrest.all) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.bundles.flyway) + testImplementation(libs.mockito.inline) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + + testRuntimeOnly(libs.junit.jupiter.engine) + + integrationTestImplementation(project(":airbyte-config:config-persistence")) + + testFixturesApi(libs.jackson.databind) + testFixturesApi(libs.guava) + testFixturesApi(project(":airbyte-json-validation")) + testFixturesApi(project(":airbyte-commons")) + testFixturesApi(project(":airbyte-config:config-models")) + testFixturesApi(project(":airbyte-config:config-secrets")) + testFixturesApi(libs.airbyte.protocol) + testFixturesApi(libs.lombok) + testFixturesAnnotationProcessor(libs.lombok) +} + +// The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) +// Kapt, by default, runs all annotation(processors and disables annotation(processing by javac, however) +// this default behavior(breaks the lombok java annotation(processor. To avoid(lombok breaking, ksp(has) +// keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) +// Once lombok has been removed, this can also be removed.) +tasks.withType().configureEach { + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionVersionHelper.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionVersionHelper.java index f7222442b52..632e9ef4b9c 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionVersionHelper.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionVersionHelper.java @@ -125,7 +125,7 @@ public ActorDefinitionVersionWithOverrideStatus getSourceVersionWithOverrideStat throws ConfigNotFoundException, IOException, JsonValidationException { final ActorDefinitionVersion defaultVersion = getDefaultSourceVersion(sourceDefinition, workspaceId, actorId); - Optional versionOverride = Optional.empty(); + Optional versionOverride = Optional.empty(); if (featureFlagClient.boolVariation(EnableConfigurationOverrideProvider.INSTANCE, new Workspace(workspaceId))) { versionOverride = configOverrideProvider.getOverride( @@ -145,7 +145,7 @@ public ActorDefinitionVersionWithOverrideStatus getSourceVersionWithOverrideStat defaultVersion); } - return new ActorDefinitionVersionWithOverrideStatus(versionOverride.orElse(defaultVersion), versionOverride.isPresent()); + return versionOverride.orElse(new ActorDefinitionVersionWithOverrideStatus(defaultVersion, false)); } /** @@ -189,7 +189,7 @@ public ActorDefinitionVersionWithOverrideStatus getDestinationVersionWithOverrid throws ConfigNotFoundException, IOException, JsonValidationException { final ActorDefinitionVersion defaultVersion = getDefaultDestinationVersion(destinationDefinition, workspaceId, actorId); - Optional versionOverride = Optional.empty(); + Optional versionOverride = Optional.empty(); if (featureFlagClient.boolVariation(EnableConfigurationOverrideProvider.INSTANCE, new Workspace(workspaceId))) { versionOverride = configOverrideProvider.getOverride( @@ -209,7 +209,7 @@ public ActorDefinitionVersionWithOverrideStatus getDestinationVersionWithOverrid defaultVersion); } - return new ActorDefinitionVersionWithOverrideStatus(versionOverride.orElse(defaultVersion), versionOverride.isPresent()); + return versionOverride.orElse(new ActorDefinitionVersionWithOverrideStatus(defaultVersion, false)); } /** diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java index 06d96d10d9e..d178f932ca9 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigNotFoundException.java @@ -12,6 +12,11 @@ */ public class ConfigNotFoundException extends Exception { + // This is a specific error type that is used when an organization cannot be found + // from a given workspace. Workspaces will soon require an organization, so this + // error is temporary and will be removed once the requirement is enforced. + public static final String NO_ORGANIZATION_FOR_WORKSPACE = "NO_ORGANIZATION_FOR_WORKSPACE"; + private static final long serialVersionUID = 836273627; private final String type; private final String configId; diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index 8d9a22f9d57..b6529bf823d 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -833,25 +833,6 @@ public void writeSourceConnectionNoSecrets(final SourceConnection partialSource) sourceService.writeSourceConnectionNoSecrets(partialSource); } - /** - * Delete a source by id. - * - * @param sourceId source id - * @return true if a source was deleted, false otherwise. - * @throws JsonValidationException - throws if returned sources are invalid - * @throws io.airbyte.data.exceptions.ConfigNotFoundException - throws if no source with that id can - * be found. - * @throws IOException - you never know when you IO - */ - @Deprecated - public boolean deleteSource(final UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException { - try { - return sourceService.deleteSource(sourceId); - } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { - throw new ConfigNotFoundException(e.getType(), e.getConfigId()); - } - } - /** * Returns all sources in the database. Does not contain secrets. To hydrate with secrets see the * config-secrets module. diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java index a8f0a45105d..61e86618e11 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/PermissionPersistence.java @@ -8,7 +8,6 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.PERMISSION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.USER; import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.field; import static org.jooq.impl.DSL.select; import io.airbyte.commons.enums.Enums; @@ -19,8 +18,6 @@ import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import java.io.IOException; -import java.sql.SQLException; -import java.time.OffsetDateTime; import java.util.List; import java.util.Optional; import java.util.UUID; @@ -28,7 +25,6 @@ import org.jooq.DSLContext; import org.jooq.Record; import org.jooq.Result; -import org.jooq.impl.DSL; /** * Permission Persistence. @@ -42,93 +38,10 @@ public class PermissionPersistence { private final ExceptionWrappingDatabase database; - public static final String PRIMARY_KEY = "id"; - public PermissionPersistence(final Database database) { this.database = new ExceptionWrappingDatabase(database); } - /** - * Create or update Permission. - * - * @param permission permission to write into database. - * @throws IOException in case of a db error. - */ - public void writePermission(final Permission permission) throws IOException { - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType permissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(permission.getPermissionType()); - - database.transaction(ctx -> { - final OffsetDateTime timestamp = OffsetDateTime.now(); - final boolean isExistingConfig = ctx.fetchExists(select() - .from(PERMISSION) - .where(PERMISSION.ID.eq(permission.getPermissionId()))); - - if (isExistingConfig) { - updatePermission(ctx, permission, timestamp); - } else { - ctx.insertInto(PERMISSION) - .set(PERMISSION.ID, permission.getPermissionId()) - .set(PERMISSION.PERMISSION_TYPE, permissionType) - .set(PERMISSION.USER_ID, permission.getUserId()) - .set(PERMISSION.WORKSPACE_ID, permission.getWorkspaceId()) - .set(PERMISSION.ORGANIZATION_ID, permission.getOrganizationId()) - .set(PERMISSION.CREATED_AT, timestamp) - .set(PERMISSION.UPDATED_AT, timestamp) - .execute(); - } - return null; - }); - } - - private void updatePermission(final DSLContext transactionCtx, final Permission updatedPermission, final OffsetDateTime timestamp) - throws SQLException { - - final Permission priorPermission; - try { - priorPermission = getPermission(updatedPermission.getPermissionId()).orElseThrow(); - } catch (final IOException e) { - throw new SQLException(e); - } - - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType priorPermissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(priorPermission.getPermissionType()); - - final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType newPermissionType = - PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(updatedPermission.getPermissionType()); - - transactionCtx.update(PERMISSION) - .set(PERMISSION.PERMISSION_TYPE, newPermissionType) - .set(PERMISSION.WORKSPACE_ID, updatedPermission.getWorkspaceId()) - .set(PERMISSION.ORGANIZATION_ID, updatedPermission.getOrganizationId()) - .set(PERMISSION.USER_ID, updatedPermission.getUserId()) - .set(PERMISSION.UPDATED_AT, timestamp) - .where(PERMISSION.ID.eq(updatedPermission.getPermissionId())) - .execute(); - - // if the updated permission started off as an organization admin, check to see if the org still - // has an admin after the update is applied. if not, roll back the transaction. - final boolean wasOrganizationAdminUpdate = - priorPermissionType.equals(io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType.organization_admin); - - // use priorPermission instead of updatedPermission in case the organization ID changed in the - // update. - if (wasOrganizationAdminUpdate && countOrganizationAdmins(transactionCtx, priorPermission.getOrganizationId()) < 1) { - // trigger a transaction rollback - throw new SQLOperationNotAllowedException( - "Preventing update that would have removed the last OrganizationAdmin from organization " + updatedPermission.getOrganizationId()); - } - } - - private int countOrganizationAdmins(final DSLContext ctx, final UUID organizationId) { - // fetch the count of permission records with type OrganizationAdmin and in the indicated - // organizationId - return ctx.fetchCount(select() - .from(PERMISSION) - .where(PERMISSION.PERMISSION_TYPE.eq(io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType.organization_admin)) - .and(PERMISSION.ORGANIZATION_ID.eq(organizationId))); - } - /** * Get a permission by permission Id. * @@ -193,38 +106,6 @@ private Permission createPermissionFromRecord(final Record record) { .withOrganizationId(record.get(PERMISSION.ORGANIZATION_ID)); } - /** - * Delete Permissions by id. - * - * - */ - public boolean deletePermissionById(final UUID permissionId) throws IOException { - return database.transaction(ctx -> { - final Permission deletedPermission; - try { - deletedPermission = getPermission(permissionId).orElseThrow(); - } catch (final IOException e) { - throw new SQLException(e); - } - final int modifiedCount = ctx.deleteFrom(PERMISSION).where(field(DSL.name(PRIMARY_KEY)).eq(permissionId)).execute(); - - // return early if nothing was deleted - if (modifiedCount == 0) { - return false; - } - - // check if this deletion removed the last OrganizationAdmin from the organization - final boolean wasOrganizationAdminDeletion = deletedPermission.getPermissionType().equals(PermissionType.ORGANIZATION_ADMIN); - if (wasOrganizationAdminDeletion && countOrganizationAdmins(ctx, deletedPermission.getOrganizationId()) < 1) { - // trigger a rollback by throwing an exception - throw new SQLOperationNotAllowedException( - "Rolling back delete that would have removed the last OrganizationAdmin from organization " + deletedPermission.getOrganizationId()); - } - - return modifiedCount > 0; - }); - } - /** * List all users with permissions to the workspace. Note it does not take organization info into * account. @@ -238,10 +119,6 @@ public List listUsersInWorkspace(final UUID workspaceId) throws return this.database.query(ctx -> listPermissionsForWorkspace(ctx, workspaceId)); } - public List listUserPermissionsGrantingWorkspaceAccess(final UUID workspaceId) throws IOException { - return this.database.query(ctx -> listPermissionsForWorkspace(ctx, workspaceId)); - } - public List listInstanceAdminUsers() throws IOException { return this.database.query(ctx -> listInstanceAdminPermissions(ctx)); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java new file mode 100644 index 00000000000..75c3d3918ed --- /dev/null +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/RefreshJobStateUpdater.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import jakarta.inject.Singleton; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +@Singleton +public class RefreshJobStateUpdater { + + private final StatePersistence statePersistence; + + public RefreshJobStateUpdater(final StatePersistence statePersistence) { + this.statePersistence = statePersistence; + } + + public void updateStateWrapperForRefresh(final UUID connectionId, final StateWrapper currentState, final List streamsToRefresh) + throws IOException { + final StateWrapper updatedState = new StateWrapper(); + final Set streamDescriptorsToRefresh = streamsToRefresh + .stream() + .map(c -> new StreamDescriptor().withName(c.getStreamName()).withNamespace(c.getStreamNamespace())) + .collect(Collectors.toSet()); + + switch (currentState.getStateType()) { + case GLOBAL -> { + final List streamStatesToRetain = new ArrayList<>(); + final AirbyteStateMessage currentGlobalStateMessage = currentState.getGlobal(); + final List currentStreamStates = currentGlobalStateMessage.getGlobal().getStreamStates(); + for (final AirbyteStreamState streamState : currentStreamStates) { + final StreamDescriptor streamDescriptor = streamState.getStreamDescriptor(); + if (!streamDescriptorsToRefresh.contains(streamDescriptor)) { + streamStatesToRetain.add(streamState); + } + } + updatedState.setStateType(StateType.GLOBAL); + updatedState.setGlobal(new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(streamStatesToRetain.isEmpty() ? null : currentGlobalStateMessage.getGlobal().getSharedState()) + .withStreamStates(streamStatesToRetain))); + + } + case STREAM -> { + final List streamStatesToRetain = new ArrayList<>(); + for (final AirbyteStateMessage stateMessage : currentState.getStateMessages()) { + final StreamDescriptor streamDescriptor = stateMessage.getStream().getStreamDescriptor(); + if (!streamDescriptorsToRefresh.contains(streamDescriptor)) { + streamStatesToRetain.add(stateMessage); + } + } + updatedState.setStateType(StateType.STREAM); + updatedState.setStateMessages(streamStatesToRetain); + } + default -> updatedState.setStateType(StateType.LEGACY); + } + statePersistence.updateOrCreateState(connectionId, updatedState); + } + +} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java index 672cb4f8702..8f5387d2ab8 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java @@ -111,6 +111,19 @@ public void updateOrCreateState(final UUID connectionId, final StateWrapper stat }); } + public void bulkDelete(final UUID connectionId, final Set fullRefreshStreams) throws IOException { + final var conditions = fullRefreshStreams.stream().map(stream -> { + var nameCondition = DSL.field(DSL.name(STATE.STREAM_NAME.getName())).eq(stream.getName()); + var connCondition = DSL.field(DSL.name(STATE.CONNECTION_ID.getName())).eq(connectionId); + var namespaceCondition = stream.getNamespace() == null + ? DSL.field(DSL.name(STATE.NAMESPACE.getName())).isNull() + : DSL.field(DSL.name(STATE.NAMESPACE.getName())).eq(stream.getNamespace()); + + return DSL.and(namespaceCondition, nameCondition, connCondition); + }).reduce(DSL.noCondition(), DSL::or); + this.database.transaction(ctx -> ctx.deleteFrom(STATE).where(conditions).execute()); + } + private static void clearLegacyState(final DSLContext ctx, final UUID connectionId) { final StateUpdateBatch stateUpdateBatch = new StateUpdateBatch(); writeStateToDb(ctx, connectionId, null, null, StateType.LEGACY, null, stateUpdateBatch); diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java index 2382add7e01..514fc7e40de 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java @@ -183,9 +183,9 @@ private User createUserFromRecord(final Record record) { return new User() .withUserId(record.get(USER.ID)) .withName(record.get(USER.NAME)) - .withAuthUserId(record.get(USER.AUTH_USER_ID)) - .withAuthProvider(record.get(USER.AUTH_PROVIDER) == null ? null - : Enums.toEnum(record.get(USER.AUTH_PROVIDER, String.class), io.airbyte.config.AuthProvider.class).orElseThrow()) + .withAuthUserId(record.get(AUTH_USER.AUTH_USER_ID)) + .withAuthProvider(record.get(AUTH_USER.AUTH_PROVIDER) == null ? null + : Enums.toEnum(record.get(AUTH_USER.AUTH_PROVIDER, String.class), io.airbyte.config.AuthProvider.class).orElseThrow()) .withDefaultWorkspaceId(record.get(USER.DEFAULT_WORKSPACE_ID)) .withStatus(record.get(USER.STATUS) == null ? null : Enums.toEnum(record.get(USER.STATUS, String.class), User.Status.class).orElseThrow()) .withCompanyName(record.get(USER.COMPANY_NAME)) @@ -278,6 +278,19 @@ public Optional getUserByEmail(final String email) throws IOException { return Optional.of(createUserFromRecord(result.get(0))); } + /** + * Fetch all users with a given email address. + */ + public List getUsersByEmail(final String email) throws IOException { + return database.query(ctx -> ctx + .select(asterisk()) + .from(USER) + .where(USER.EMAIL.eq(email)).fetch()) + .stream() + .map(this::createUserFromRecord) + .toList(); + } + /** * Get the default user if it exists by looking up the hardcoded default user id. */ diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProvider.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProvider.java index e25e1109caf..7f963928e0f 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProvider.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProvider.java @@ -6,15 +6,24 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; +import io.airbyte.config.ConfigOriginType; import io.airbyte.config.ConfigResourceType; import io.airbyte.config.ConfigScopeType; import io.airbyte.config.ScopedConfiguration; import io.airbyte.config.StandardWorkspace; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.shared.ConnectorVersionKey; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.UseActorScopedDefaultVersions; +import io.airbyte.featureflag.UseBreakingChangeScopedConfigs; +import io.airbyte.featureflag.Workspace; +import io.airbyte.metrics.lib.MetricAttribute; +import io.airbyte.metrics.lib.MetricClient; +import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -32,13 +41,19 @@ public class ConfigurationDefinitionVersionOverrideProvider implements Definitio private final WorkspaceService workspaceService; private final ActorDefinitionService actorDefinitionService; private final ScopedConfigurationService scopedConfigurationService; + private final FeatureFlagClient featureFlagClient; + private final MetricClient metricClient; public ConfigurationDefinitionVersionOverrideProvider(final WorkspaceService workspaceService, final ActorDefinitionService actorDefinitionService, - final ScopedConfigurationService scopedConfigurationService) { + final ScopedConfigurationService scopedConfigurationService, + final FeatureFlagClient featureFlagClient, + final MetricClient metricClient) { this.workspaceService = workspaceService; this.actorDefinitionService = actorDefinitionService; this.scopedConfigurationService = scopedConfigurationService; + this.featureFlagClient = featureFlagClient; + this.metricClient = metricClient; } private UUID getOrganizationId(final UUID workspaceId) { @@ -71,17 +86,37 @@ private Optional getScopedConfig(final UUID actorDefinition } @Override - public Optional getOverride(final ActorType actorType, - final UUID actorDefinitionId, - final UUID workspaceId, - final @Nullable UUID actorId, - final ActorDefinitionVersion defaultVersion) { - - final Optional config = getScopedConfig(actorDefinitionId, workspaceId, actorId); - if (config.isPresent()) { + public Optional getOverride(final ActorType actorType, + final UUID actorDefinitionId, + final UUID workspaceId, + final @Nullable UUID actorId, + final ActorDefinitionVersion defaultVersion) { + + final Optional optConfig = getScopedConfig(actorDefinitionId, workspaceId, actorId); + if (optConfig.isPresent()) { + final ScopedConfiguration config = optConfig.get(); + if (config.getOriginType() == ConfigOriginType.BREAKING_CHANGE) { + if (featureFlagClient.boolVariation(UseActorScopedDefaultVersions.INSTANCE, new Workspace(workspaceId))) { + // If the above feature flag is off, defaultVersion won't consider breaking change setbacks, + // so metrics wouldn't be accurate + final String status = defaultVersion.getVersionId().toString().equals(config.getValue()) ? "ok" : "invalid"; + metricClient.count(OssMetricsRegistry.CONNECTOR_BREAKING_CHANGE_PIN_SERVED, 1, + new MetricAttribute("workspace_id", workspaceId.toString()), + new MetricAttribute("actor_id", actorId != null ? actorId.toString() : "null"), + new MetricAttribute("actor_default_version", defaultVersion.getVersionId().toString()), + new MetricAttribute("pinned_version", config.getValue()), + new MetricAttribute("status", status)); + } + + if (!featureFlagClient.boolVariation(UseBreakingChangeScopedConfigs.INSTANCE, new Workspace(workspaceId))) { + return Optional.empty(); + } + } + try { - final ActorDefinitionVersion version = actorDefinitionService.getActorDefinitionVersion(UUID.fromString(config.get().getValue())); - return Optional.of(version); + final ActorDefinitionVersion version = actorDefinitionService.getActorDefinitionVersion(UUID.fromString(config.getValue())); + final boolean isManualOverride = config.getOriginType() == ConfigOriginType.USER; + return Optional.of(new ActorDefinitionVersionWithOverrideStatus(version, isManualOverride)); } catch (final ConfigNotFoundException | IOException e) { throw new RuntimeException(e); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/DefinitionVersionOverrideProvider.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/DefinitionVersionOverrideProvider.java index 6731d1c61a4..482abb4e1ab 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/DefinitionVersionOverrideProvider.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/DefinitionVersionOverrideProvider.java @@ -6,6 +6,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import jakarta.annotation.Nullable; import java.util.Optional; import java.util.UUID; @@ -17,10 +18,10 @@ */ public interface DefinitionVersionOverrideProvider { - Optional getOverride(final ActorType actorType, - final UUID actorDefinitionId, - final UUID workspaceId, - @Nullable final UUID actorId, - final ActorDefinitionVersion defaultVersion); + Optional getOverride(final ActorType actorType, + final UUID actorDefinitionId, + final UUID workspaceId, + @Nullable final UUID actorId, + final ActorDefinitionVersion defaultVersion); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProvider.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProvider.java index 9d023f45b2e..6a0c96b5e07 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProvider.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProvider.java @@ -8,6 +8,7 @@ import io.airbyte.commons.version.Version; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.config.persistence.ActorDefinitionVersionResolver; import io.airbyte.featureflag.ConnectorVersionOverride; import io.airbyte.featureflag.Context; @@ -85,11 +86,11 @@ public List getContexts(final ActorType actorType, final UUID actorDefi } @Override - public Optional getOverride(final ActorType actorType, - final UUID actorDefinitionId, - final UUID workspaceId, - @Nullable final UUID actorId, - final ActorDefinitionVersion defaultVersion) { + public Optional getOverride(final ActorType actorType, + final UUID actorDefinitionId, + final UUID workspaceId, + @Nullable final UUID actorId, + final ActorDefinitionVersion defaultVersion) { final List contexts = getContexts(actorType, actorDefinitionId, workspaceId, actorId); final String overrideTag = featureFlagClient.stringVariation(ConnectorVersionOverride.INSTANCE, new Multi(contexts)); @@ -111,7 +112,7 @@ public Optional getOverride(final ActorType actorType, } LOGGER.info("Using connector version override for definition {} with tag {}", actorDefinitionId, overrideTag); } - return version; + return version.map(v -> new ActorDefinitionVersionWithOverrideStatus(v, true)); } catch (final IOException e) { LOGGER.error("Failed to read or persist actor definition version for definition {} with tag {}", actorDefinitionId, diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt new file mode 100644 index 00000000000..b40b1b6cc75 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamGenerationRepository.kt @@ -0,0 +1,26 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.micronaut.data.annotation.Query +import io.micronaut.data.jdbc.annotation.JdbcRepository +import io.micronaut.data.model.query.builder.sql.Dialect +import io.micronaut.data.repository.PageableRepository +import java.util.UUID + +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface StreamGenerationRepository : PageableRepository { + fun findByConnectionId(connectionId: UUID): List + + fun deleteByConnectionId(connectionId: UUID) + + @Query( + value = """ + SELECT stream_name, stream_namespace, MAX(generation_id) as generation_id + FROM stream_generation + WHERE connection_id = :connectionId + GROUP BY (stream_name, stream_namespace) + """, + ) + fun getMaxGenerationOfStreamsForConnectionId(connectionId: UUID): List +} diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt new file mode 100644 index 00000000000..de4051b9236 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/StreamRefreshesRepository.kt @@ -0,0 +1,31 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.StreamRefresh +import io.micronaut.data.annotation.Query +import io.micronaut.data.jdbc.annotation.JdbcRepository +import io.micronaut.data.model.query.builder.sql.Dialect +import io.micronaut.data.repository.PageableRepository +import java.util.UUID + +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface StreamRefreshesRepository : PageableRepository { + fun findByConnectionId(connectionId: UUID): List + + fun deleteByConnectionId(connectionId: UUID) + + @Query( + value = """ + DELETE FROM stream_refreshes + WHERE connection_id = :connectionId + AND stream_name = :streamName + AND ((:streamNamespace) IS NULL OR stream_namespace = :streamNamespace) + """, + ) + fun deleteByConnectionIdAndStreamNameAndStreamNamespace( + connectionId: UUID, + streamName: String, + streamNamespace: String?, + ) + + fun existsByConnectionId(connectionId: UUID): Boolean +} diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt new file mode 100644 index 00000000000..489c2c0b0c2 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamGeneration.kt @@ -0,0 +1,51 @@ +package io.airbyte.config.persistence.domain + +import io.micronaut.core.annotation.NonNull +import io.micronaut.core.annotation.Nullable +import io.micronaut.data.annotation.DateCreated +import io.micronaut.data.annotation.DateUpdated +import io.micronaut.data.annotation.Id +import io.micronaut.data.annotation.MappedEntity +import jakarta.persistence.Column +import java.time.OffsetDateTime +import java.util.UUID + +@MappedEntity("stream_generation") +data class StreamGeneration( + @field:Id + @NonNull + var id: UUID? = UUID.randomUUID(), + @Column(name = "connection_id") + @NonNull + var connectionId: UUID, + @Column(name = "stream_name") + @NonNull + var streamName: String, + @Column(name = "stream_namespace") + @Nullable + var streamNamespace: String? = null, + @Column(name = "generation_id") + @NonNull + var generationId: Long, + @Column(name = "start_job_id") + @NonNull + var startJobId: Long, + @Column(name = "created_at") + @DateCreated + var createdAt: OffsetDateTime? = null, + @Column(name = "updated_at") + @DateUpdated + var updatedAt: OffsetDateTime? = null, +) + +@MappedEntity +data class Generation( + @Column(name = "stream_name") + @NonNull + val streamName: String, + @Column(name = "stream_namespace") + @Nullable + val streamNamespace: String? = null, + @Column(name = "generation_id") + val generationId: Long, +) diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt new file mode 100644 index 00000000000..56a9684af0a --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/domain/StreamRefresh.kt @@ -0,0 +1,29 @@ +package io.airbyte.config.persistence.domain + +import io.micronaut.core.annotation.NonNull +import io.micronaut.core.annotation.Nullable +import io.micronaut.data.annotation.DateCreated +import io.micronaut.data.annotation.Id +import io.micronaut.data.annotation.MappedEntity +import jakarta.persistence.Column +import java.time.OffsetDateTime +import java.util.UUID + +@MappedEntity("stream_refreshes") +data class StreamRefresh( + @field:Id + @NonNull + var id: UUID? = UUID.randomUUID(), + @Column(name = "connection_id") + @NonNull + var connectionId: UUID, + @Column(name = "stream_name") + @NonNull + var streamName: String, + @Column(name = "stream_namespace") + @Nullable + var streamNamespace: String? = null, + @Column(name = "created_at") + @DateCreated + var createdAt: OffsetDateTime? = null, +) diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetter.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetter.kt new file mode 100644 index 00000000000..56110f3a275 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetter.kt @@ -0,0 +1,49 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.commons.json.Jsons +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.StreamDescriptor +import jakarta.inject.Singleton + +@Singleton +class CatalogGenerationSetter { + fun updateCatalogWithGenerationAndSyncInformation( + catalog: ConfiguredAirbyteCatalog, + jobId: Long, + streamRefreshes: List, + generations: List, + ): ConfiguredAirbyteCatalog { + val generationByStreamDescriptor: Map = getCurrentGenerationByStreamDescriptor(generations) + + val catalogCopy = Jsons.clone(catalog) + + catalogCopy.streams.forEach { + configuredAirbyteStream -> + val streamDescriptor = + StreamDescriptor().withName( + configuredAirbyteStream.stream.name, + ).withNamespace(configuredAirbyteStream.stream.namespace) + val currentGeneration = generationByStreamDescriptor.getOrDefault(streamDescriptor, 0) + val isRefresh = streamRefreshes.contains(streamDescriptor) + + configuredAirbyteStream.syncId = jobId + configuredAirbyteStream.generationId = currentGeneration + configuredAirbyteStream.minimumGenerationId = if (isRefresh) currentGeneration else 0 + } + + return catalogCopy + } + + private fun getCurrentGenerationByStreamDescriptor(generations: List): Map { + return generations + .map { StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) to it.generationId }.toMap() + } + + private fun getStreamRefreshesAsStreamDescriptors(streamRefreshes: List): Set { + return streamRefreshes.map { + StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) + }.toHashSet() + } +} diff --git a/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt new file mode 100644 index 00000000000..491217b06f2 --- /dev/null +++ b/airbyte-config/config-persistence/src/main/kotlin/io/airbyte/config/persistence/helper/GenerationBumper.kt @@ -0,0 +1,70 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.config.persistence.StreamGenerationRepository +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.airbyte.config.persistence.domain.StreamRefresh +import io.airbyte.protocol.models.StreamDescriptor +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class GenerationBumper(val streamGenerationRepository: StreamGenerationRepository) { + /** + * This is increasing the generation of the stream being refreshed. + * For each stream being refreshed, it fetches the current generation and then create a new entry in the generation + * table with the generation being bumped. + * @param connectionId - the connectionId of the generation being increased + * @param jobId - The current jobId + * @param streamRefreshes - List of the stream being refreshed + */ + fun updateGenerationForStreams( + connectionId: UUID, + jobId: Long, + streamRefreshes: List, + ) { + val streamDescriptors: Set = + streamRefreshes + .map { StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) }.toHashSet() + + val currentMaxGeneration: List = streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) + + val streamDescriptorWithoutAGeneration = + streamDescriptors.filter { + val missingInGeneration = + currentMaxGeneration.find { generation: Generation -> + generation.streamName == it.name && generation.streamNamespace == it.namespace + } == null + missingInGeneration + } + + val newGenerations = + streamDescriptorWithoutAGeneration.map { + Generation( + streamName = it.name, + streamNamespace = it.namespace, + generationId = 0L, + ) + } + + val generationToUpdate: List = + currentMaxGeneration.filter { + val streamDescriptor = StreamDescriptor().withName(it.streamName).withNamespace(it.streamNamespace) + streamDescriptors.contains(streamDescriptor) + } + newGenerations + + val updatedStreamGeneration = + generationToUpdate.map { + StreamGeneration( + id = UUID.randomUUID(), + connectionId = connectionId, + streamName = it.streamName, + streamNamespace = it.streamNamespace, + generationId = it.generationId + 1, + startJobId = jobId, + ) + } + + streamGenerationRepository.saveAll(updatedStreamGeneration) + } +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java index 9c0449c6ce8..425dae73784 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java @@ -19,7 +19,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -120,6 +123,10 @@ void setup() throws SQLException, JsonValidationException, IOException { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -131,7 +138,8 @@ void setup() throws SQLException, JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -142,7 +150,8 @@ void setup() throws SQLException, JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java index 59b9b04cf08..b236e3171ff 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java @@ -25,7 +25,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -55,6 +58,7 @@ class ActorDefinitionPersistenceTest extends BaseConfigDatabaseTest { + private static final String TEST_DEFAULT_MAX_SECONDS = "3600"; private static final UUID WORKSPACE_ID = UUID.randomUUID(); private static final String DOCKER_IMAGE_TAG = "0.0.1"; @@ -65,13 +69,18 @@ void setup() throws SQLException { truncateAllTables(); final FeatureFlagClient featureFlagClient = mock(TestClient.class); - when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))).thenReturn("3600"); + when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))) + .thenReturn(TEST_DEFAULT_MAX_SECONDS); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); final ConnectionService connectionService = new ConnectionServiceJooqImpl(database); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -83,7 +92,8 @@ void setup() throws SQLException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -94,7 +104,8 @@ void setup() throws SQLException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -123,8 +134,19 @@ void testSourceDefinitionDefaultMaxSeconds() throws JsonValidationException, Con } @Test - void testSourceDefinitionMaxSeconds() throws JsonValidationException, ConfigNotFoundException, IOException { - assertReturnsSrcDef(createBaseSourceDefWithoutMaxSecondsBetweenMessages().withMaxSecondsBetweenMessages(1L)); + void testSourceDefinitionMaxSecondsGreaterThenDefaultShouldReturnConfigured() throws JsonValidationException, ConfigNotFoundException, IOException { + assertReturnsSrcDef( + createBaseSourceDefWithoutMaxSecondsBetweenMessages().withMaxSecondsBetweenMessages(Long.parseLong(TEST_DEFAULT_MAX_SECONDS) + 1)); + } + + @Test + void testSourceDefinitionMaxSecondsLessThenDefaultShouldReturnDefault() throws JsonValidationException, ConfigNotFoundException, IOException { + final var def = createBaseSourceDefWithoutMaxSecondsBetweenMessages().withMaxSecondsBetweenMessages(1L); + final ActorDefinitionVersion actorDefinitionVersion = createBaseActorDefVersion(def.getSourceDefinitionId()); + configRepository.writeConnectorMetadata(def, actorDefinitionVersion); + final var exp = + def.withDefaultVersionId(actorDefinitionVersion.getVersionId()).withMaxSecondsBetweenMessages(Long.parseLong(TEST_DEFAULT_MAX_SECONDS)); + assertEquals(exp, configRepository.getStandardSourceDefinition(def.getSourceDefinitionId())); } private void assertReturnsSrcDef(final StandardSourceDefinition srcDef) throws ConfigNotFoundException, IOException, JsonValidationException { diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionHelperTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionHelperTest.java index 803cae16408..6ef5dd547a6 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionHelperTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionHelperTest.java @@ -46,6 +46,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; class ActorDefinitionVersionHelperTest { @@ -131,10 +132,11 @@ void testGetSourceVersionFromActorDefault() throws ConfigNotFoundException, IOEx assertFalse(versionWithOverrideStatus.isOverrideApplied()); } - @Test - void testGetSourceVersionWithConfigOverride() throws ConfigNotFoundException, IOException, JsonValidationException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testGetSourceVersionWithConfigOverride(final boolean isOverrideApplied) throws ConfigNotFoundException, IOException, JsonValidationException { when(mConfigOverrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, isOverrideApplied))); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(ACTOR_DEFINITION_ID) @@ -143,7 +145,7 @@ void testGetSourceVersionWithConfigOverride() throws ConfigNotFoundException, IO final ActorDefinitionVersionWithOverrideStatus versionWithOverrideStatus = actorDefinitionVersionHelper.getSourceVersionWithOverrideStatus(sourceDefinition, WORKSPACE_ID, ACTOR_ID); assertEquals(OVERRIDDEN_VERSION, versionWithOverrideStatus.actorDefinitionVersion()); - assertTrue(versionWithOverrideStatus.isOverrideApplied()); + assertEquals(isOverrideApplied, versionWithOverrideStatus.isOverrideApplied()); verify(mConfigOverrideProvider).getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); verifyNoInteractions(mFFOverrideProvider); @@ -152,7 +154,7 @@ void testGetSourceVersionWithConfigOverride() throws ConfigNotFoundException, IO @Test void testGetSourceVersionWithOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mFFOverrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(ACTOR_DEFINITION_ID) @@ -192,7 +194,7 @@ void testGetSourceVersionForWorkspaceWithActorScopedFF() throws ConfigNotFoundEx @Test void testGetSourceVersionForWorkspaceWithOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mFFOverrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, null, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(ACTOR_DEFINITION_ID) @@ -208,7 +210,7 @@ void testGetSourceVersionForWorkspaceWithOverride() throws ConfigNotFoundExcepti @Test void testGetSourceVersionForWorkspaceWithConfigOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mConfigOverrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, null, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(ACTOR_DEFINITION_ID) @@ -251,7 +253,7 @@ void testGetDestinationVersionFromActorDefault() throws ConfigNotFoundException, @Test void testGetDestinationVersionWithOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mFFOverrideProvider.getOverride(ActorType.DESTINATION, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() .withDestinationDefinitionId(ACTOR_DEFINITION_ID) @@ -269,7 +271,7 @@ void testGetDestinationVersionWithOverride() throws ConfigNotFoundException, IOE @Test void testGetDestinationVersionWithConfigOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mConfigOverrideProvider.getOverride(ActorType.DESTINATION, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() .withDestinationDefinitionId(ACTOR_DEFINITION_ID) @@ -309,7 +311,7 @@ void testGetDestinationVersionForWorkspaceWithActorScopedFF() throws ConfigNotFo @Test void testGetDestinationVersionForWorkspaceWithOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mFFOverrideProvider.getOverride(ActorType.DESTINATION, ACTOR_DEFINITION_ID, WORKSPACE_ID, null, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() .withDestinationDefinitionId(ACTOR_DEFINITION_ID) @@ -325,7 +327,7 @@ void testGetDestinationVersionForWorkspaceWithOverride() throws ConfigNotFoundEx @Test void testGetDestinationVersionForWorkspaceWithConfigOverride() throws ConfigNotFoundException, IOException, JsonValidationException { when(mConfigOverrideProvider.getOverride(ActorType.DESTINATION, ACTOR_DEFINITION_ID, WORKSPACE_ID, null, DEFAULT_VERSION)) - .thenReturn(Optional.of(OVERRIDDEN_VERSION)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(OVERRIDDEN_VERSION, true))); final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() .withDestinationDefinitionId(ACTOR_DEFINITION_ID) @@ -435,7 +437,7 @@ void testGetActiveWorkspaceSyncsWithSourceVersionIds() throws JsonValidationExce .thenReturn(sourceWithOverride); when(mFFOverrideProvider.getOverride(ActorType.SOURCE, sourceDefinition.getSourceDefinitionId(), WORKSPACE_ID, sourceWithOverride.getSourceId(), ADV_2_0_0)) - .thenReturn(Optional.of(ADV_1_0_0)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(ADV_1_0_0, true))); final List unsupportedVersionIds = List.of(ADV_1_0_0.getVersionId(), ADV_2_0_0.getVersionId()); when(mConfigRepository.listSourcesWithVersionIds(unsupportedVersionIds)) @@ -544,7 +546,7 @@ void testGetActiveWorkspaceSyncsWithDestinationVersionIds() throws JsonValidatio when(mFFOverrideProvider.getOverride(ActorType.DESTINATION, destinationDefinition.getDestinationDefinitionId(), WORKSPACE_ID, destinationWithOverride.getDestinationId(), ADV_2_0_0)) - .thenReturn(Optional.of(ADV_1_0_0)); + .thenReturn(Optional.of(new ActorDefinitionVersionWithOverrideStatus(ADV_1_0_0, true))); final List unsupportedVersionIds = List.of(ADV_1_0_0.getVersionId(), ADV_2_0_0.getVersionId()); when(mConfigRepository.listDestinationsWithVersionIds(unsupportedVersionIds)) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java index 0c4fab61297..e1b5239b3a3 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java @@ -28,7 +28,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -122,6 +125,10 @@ void beforeEach() throws Exception { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -133,7 +140,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -144,7 +152,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorPersistenceTest.java index 7749f6628ad..4cb813d8942 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorPersistenceTest.java @@ -18,7 +18,10 @@ import io.airbyte.config.StandardWorkspace; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -60,6 +63,10 @@ void setup() throws SQLException, IOException, JsonValidationException { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -71,7 +78,8 @@ void setup() throws SQLException, IOException, JsonValidationException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -82,7 +90,8 @@ void setup() throws SQLException, IOException, JsonValidationException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java index c57b73dbd91..89de9608bf5 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java @@ -19,7 +19,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -62,6 +65,10 @@ void beforeEach() throws Exception { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -72,7 +79,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -83,7 +91,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java index c750f565f67..594d0240895 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java @@ -41,7 +41,10 @@ import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -105,8 +108,12 @@ void setup() throws IOException, JsonValidationException, SQLException { final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); final ConnectionService connectionService = new ConnectionServiceJooqImpl(database); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -118,7 +125,8 @@ void setup() throws IOException, JsonValidationException, SQLException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -129,7 +137,8 @@ void setup() throws IOException, JsonValidationException, SQLException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorBuilderProjectPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorBuilderProjectPersistenceTest.java index bdb3ec8280b..0a39219d001 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorBuilderProjectPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorBuilderProjectPersistenceTest.java @@ -26,7 +26,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -91,6 +94,10 @@ void beforeEach() throws Exception { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -101,7 +108,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -112,7 +120,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java index 08852777b37..49825fe6f9b 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java @@ -33,7 +33,10 @@ import io.airbyte.config.SupportLevel; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -58,6 +61,7 @@ import java.util.Map; import java.util.Optional; import java.util.UUID; +import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -92,6 +96,10 @@ void setup() throws SQLException, JsonValidationException, IOException { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -102,7 +110,8 @@ void setup() throws SQLException, JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -113,7 +122,8 @@ void setup() throws SQLException, JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -498,22 +508,23 @@ void testTransactionRollbackOnFailure() throws IOException, JsonValidationExcept assertEquals(initialSourceDefinitionDefaultVersionId, initialSourceDefaultVersionId); // Introduce a breaking change between 0.0.1 and UPGRADE_IMAGE_TAG to make the upgrade breaking, but - // with a tag that will - // fail validation. We want to check that the state is rolled back correctly. - final String invalidUpgradeTag = "1.0"; + // with a version that will fail to write (due to null docker repo). + // We want to check that the state is rolled back correctly. + final String invalidVersion = "1.0.0"; final List breakingChangesForDef = - List.of(MockData.actorDefinitionBreakingChange("1.0.0").withActorDefinitionId(sourceDefId)); + List.of(MockData.actorDefinitionBreakingChange(invalidVersion).withActorDefinitionId(sourceDefId)); final UUID newVersionId = UUID.randomUUID(); final ActorDefinitionVersion newVersion = MockData.actorDefinitionVersion() .withActorDefinitionId(sourceDefId) .withVersionId(newVersionId) - .withDockerImageTag(invalidUpgradeTag) + .withDockerRepository(null) + .withDockerImageTag(invalidVersion) .withDocumentationUrl("https://www.something.new"); final StandardSourceDefinition updatedSourceDefinition = Jsons.clone(sourceDefinition).withName("updated name"); - assertThrows(IllegalArgumentException.class, + assertThrows(DataAccessException.class, () -> configRepository.writeConnectorMetadata(updatedSourceDefinition, newVersion, breakingChangesForDef)); final UUID sourceDefinitionDefaultVersionIdAfterFailedUpgrade = @@ -523,7 +534,7 @@ void testTransactionRollbackOnFailure() throws IOException, JsonValidationExcept final StandardSourceDefinition sourceDefinitionAfterFailedUpgrade = configRepository.getStandardSourceDefinition(sourceDefId); final Optional newActorDefinitionVersionAfterFailedUpgrade = - configRepository.getActorDefinitionVersion(sourceDefId, invalidUpgradeTag); + configRepository.getActorDefinitionVersion(sourceDefId, invalidVersion); final ActorDefinitionVersion defaultActorDefinitionVersionAfterFailedUpgrade = configRepository.getActorDefinitionVersion(sourceDefinitionDefaultVersionIdAfterFailedUpgrade); diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/DeclarativeManifestPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/DeclarativeManifestPersistenceTest.java index 1a72e5bb0c4..def646b230d 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/DeclarativeManifestPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/DeclarativeManifestPersistenceTest.java @@ -20,7 +20,10 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -79,6 +82,10 @@ void beforeEach() throws Exception { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -89,7 +96,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -100,7 +108,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java index b870c8845a7..14da7b8444f 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java @@ -38,7 +38,6 @@ class OrganizationPersistenceTest extends BaseConfigDatabaseTest { private OrganizationPersistence organizationPersistence; private UserPersistence userPersistence; - private PermissionPersistence permissionPersistence; private WorkspaceService workspaceService; private TestClient featureFlagClient; private SecretsRepositoryReader secretsRepositoryReader; @@ -47,7 +46,6 @@ class OrganizationPersistenceTest extends BaseConfigDatabaseTest { @BeforeEach void beforeEach() throws Exception { - permissionPersistence = new PermissionPersistence(database); userPersistence = new UserPersistence(database); organizationPersistence = new OrganizationPersistence(database); featureFlagClient = new TestClient(); @@ -206,7 +204,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user an admin access to org 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId1) .withUserId(userId) @@ -221,7 +219,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user an editor access to org 2 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId2) .withUserId(userId) @@ -236,7 +234,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withPba(false) .withOrgLevelBilling(false)); // grant user a read access to org 3 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(orgId3) .withUserId(userId) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java index 08685754b0e..c3168c0b50d 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java @@ -14,6 +14,7 @@ import io.airbyte.config.UserPermission; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; @@ -27,15 +28,11 @@ import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; import io.airbyte.test.utils.BaseConfigDatabaseTest; -import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.List; import java.util.Optional; -import java.util.UUID; -import org.jooq.exception.DataAccessException; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; class PermissionPersistenceTest extends BaseConfigDatabaseTest { @@ -51,7 +48,7 @@ void beforeEach() throws Exception { setupTestData(); } - private void setupTestData() throws IOException, JsonValidationException { + private void setupTestData() throws Exception { final UserPersistence userPersistence = new UserPersistence(database); final FeatureFlagClient featureFlagClient = mock(TestClient.class); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); @@ -59,6 +56,7 @@ private void setupTestData() throws IOException, JsonValidationException { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); final ConfigRepository configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -69,7 +67,8 @@ private void setupTestData() throws IOException, JsonValidationException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -80,7 +79,8 @@ private void setupTestData() throws IOException, JsonValidationException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -102,7 +102,7 @@ private void setupTestData() throws IOException, JsonValidationException { // write permission table for (final Permission permission : MockData.permissions()) { - permissionPersistence.writePermission(permission); + BaseConfigDatabaseTest.writePermission(permission); } } @@ -145,12 +145,6 @@ void listPermissionByWorkspaceTest() throws IOException { Assertions.assertEquals(2, permissions.size()); } - @Test - void deletePermissionByIdTest() throws IOException { - permissionPersistence.deletePermissionById(MockData.PERMISSION_ID_4); - Assertions.assertEquals(Optional.empty(), permissionPersistence.getPermission(MockData.PERMISSION_ID_4)); - } - @Test void listUsersInOrganizationTest() throws IOException { final List userPermissions = permissionPersistence.listUsersInOrganization(MockData.ORGANIZATION_ID_1); @@ -226,227 +220,4 @@ void isAuthUserInstanceAdmin() throws IOException { Assertions.assertFalse(permissionPersistence.isAuthUserInstanceAdmin(user2.getAuthUserId())); } - @Nested - class WritePermission { - - @Test - void createNewPermission() throws IOException { - final Permission permission = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_1) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(permission)); - Assertions.assertEquals(permission, permissionPersistence.getPermission(permission.getPermissionId()).orElseThrow()); - } - - @Test - void createPermissionExceptionTest() { - // writing permissions against Permission table constraint should throw db exception. - - // invalid permission 1: permission type cannot be null - final Permission invalidPermission1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withOrganizationId(UUID.randomUUID()) - .withPermissionType(null); - - // invalid permission 2: for workspace level permission, org id should be null and workspace id - // cannot be null - final Permission invalidPermission2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withOrganizationId(UUID.randomUUID()) - .withPermissionType(PermissionType.WORKSPACE_OWNER); - - // invalid permission 3: for organization level permission, org id cannot be null and workspace id - // should be null - final Permission invalidPermission3 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withUserId(UUID.randomUUID()) - .withWorkspaceId(UUID.randomUUID()) - .withPermissionType(PermissionType.ORGANIZATION_MEMBER); - - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission1)); - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission2)); - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(invalidPermission3)); - } - - } - - /** - * Note that while the Persistence layer allows updates to ID fields, the API layer does not. Since - * blocking such updates is an explicit API-level concern, our persistence layer tests cover updates - * to ID fields. - */ - @Nested - class UpdatePermission { - - final Permission instanceAdminPermission = MockData.permission1; - final Permission workspaceAdminPermission = MockData.permission4; - final Permission organizationReaderPermission = MockData.permission7; - - @Test - void updateWorkspacePermission() throws IOException { - final Permission update = workspaceAdminPermission - .withPermissionType(PermissionType.WORKSPACE_READER) // change to a different workspace-level permission type - .withWorkspaceId(MockData.WORKSPACE_ID_2) // change to a different workspace ID - .withUserId(MockData.CREATOR_USER_ID_1); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(update, updated); - } - - @Test - void updateOrganizationPermission() throws IOException { - final Permission update = organizationReaderPermission - .withPermissionType(PermissionType.ORGANIZATION_EDITOR) // change to a different organization-level permission type - .withOrganizationId(MockData.ORGANIZATION_ID_3) // change to a different organization ID - .withUserId(MockData.CREATOR_USER_ID_1); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(update, updated); - } - - @Test - void updateInstanceAdminPermission() throws IOException { - final Permission update = instanceAdminPermission - .withUserId(MockData.CREATOR_USER_ID_2); // change to a different user ID - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(update)); - final Permission updated = permissionPersistence.getPermission(update.getPermissionId()).orElseThrow(); - - Assertions.assertEquals(instanceAdminPermission.getPermissionId(), updated.getPermissionId()); - Assertions.assertEquals(PermissionType.INSTANCE_ADMIN, updated.getPermissionType()); - Assertions.assertEquals(MockData.CREATOR_USER_ID_2, updated.getUserId()); - } - - @Test - void shouldNotUpdateInstanceAdminPermissionTypeToOthers() { - final Permission update = new Permission() - .withPermissionId(instanceAdminPermission.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); // another permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - @Test - void shouldNotUpdateWorkspaceLevelPermissionTypeToOrganizationLevelPermissions() { - final Permission update = new Permission() - .withPermissionId(workspaceAdminPermission.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); // org level permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - @Test - void shouldNotUpdateOrganizationLevelPermissionTypeToWorkspaceLevelPermissions() { - final Permission update = new Permission() - .withPermissionId(organizationReaderPermission.getPermissionId()) - .withPermissionType(PermissionType.WORKSPACE_ADMIN); // workspace level permission type - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.writePermission(update)); - } - - } - - @Nested - class SpecializedCases { - - @Test - void cannotDeleteLastOrganizationAdmin() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.deletePermissionById(orgAdmin1.getPermissionId())); - final DataAccessException thrown = - Assertions.assertThrows(DataAccessException.class, () -> permissionPersistence.deletePermissionById(orgAdmin2.getPermissionId())); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin permission is still present in the DB - Assertions.assertEquals(orgAdmin2, permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow()); - } - - @Test - void cannotDemoteLastOrganizationAdmin() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(orgAdmin1.withPermissionType(PermissionType.ORGANIZATION_EDITOR))); - - final Permission demotionUpdate = orgAdmin2 - .withPermissionId(orgAdmin2.getPermissionId()) - .withPermissionType(PermissionType.ORGANIZATION_EDITOR); - - final DataAccessException thrown = Assertions.assertThrows(DataAccessException.class, - () -> permissionPersistence.writePermission(demotionUpdate)); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin is still an org-admin, ie the update did not persist - Assertions.assertEquals( - PermissionType.ORGANIZATION_ADMIN, - permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow().getPermissionType()); - } - - @Test - void cannotChangeLastOrganizationAdminToADifferentOrg() throws IOException { - final Permission orgAdmin1 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_1); - final Permission orgAdmin2 = new Permission() - .withPermissionId(UUID.randomUUID()) - .withOrganizationId(MockData.ORGANIZATION_ID_2) - .withPermissionType(PermissionType.ORGANIZATION_ADMIN) - .withUserId(MockData.CREATOR_USER_ID_2); - - permissionPersistence.writePermission(orgAdmin1); - permissionPersistence.writePermission(orgAdmin2); - - Assertions.assertDoesNotThrow(() -> permissionPersistence.writePermission(orgAdmin1.withPermissionType(PermissionType.ORGANIZATION_EDITOR))); - - final Permission demotionUpdate = orgAdmin2 - .withPermissionId(orgAdmin2.getPermissionId()) - .withOrganizationId(MockData.ORGANIZATION_ID_3); - - final DataAccessException thrown = Assertions.assertThrows(DataAccessException.class, - () -> permissionPersistence.writePermission(demotionUpdate)); - - Assertions.assertInstanceOf(SQLOperationNotAllowedException.class, thrown.getCause()); - - // make sure the last org-admin is still in the original org, ie the update did not persist - Assertions.assertEquals( - MockData.ORGANIZATION_ID_2, - permissionPersistence.getPermission(orgAdmin2.getPermissionId()).orElseThrow().getOrganizationId()); - } - - } - } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java new file mode 100644 index 00000000000..154053c45a3 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/RefreshJobStateUpdaterTest.java @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.protocol.models.AirbyteGlobalState; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import io.airbyte.protocol.models.StreamDescriptor; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class RefreshJobStateUpdaterTest { + + private StatePersistence statePersistence; + private RefreshJobStateUpdater refreshJobStateUpdater; + + @BeforeEach + public void init() { + statePersistence = mock(StatePersistence.class); + refreshJobStateUpdater = new RefreshJobStateUpdater(statePersistence); + } + + @Test + public void streamStateTest() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + final AirbyteStateMessage stateMessageFromRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1)))); + + final AirbyteStateMessage stateMessageFromNonRefreshStream = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2)))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream)); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null))); + final StateWrapper expected = + new StateWrapper().withStateType(StateType.STREAM).withStateMessages(Collections.singletonList(stateMessageFromNonRefreshStream)); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + + @Test + public void globalStateTest() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToNotRefresh = "stream-not-refresh"; + final String streamNamespace = "namespace"; + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToNotRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal( + new AirbyteGlobalState().withSharedState(sharedState).withStreamStates(Collections.singletonList(stateMessageFromNonRefreshStream))); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null))); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + + @Test + public void fullGlobalState() throws IOException { + final UUID connectionId = UUID.randomUUID(); + final String streamToRefresh = "name"; + final String streamToRefresh2 = "stream-refresh2"; + final String streamNamespace = "namespace"; + + final AirbyteStreamState stateMessageFromRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor", 1))); + + final AirbyteStreamState stateMessageFromNonRefreshStream = new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName(streamToRefresh2).withNamespace(streamNamespace)) + .withStreamState(Jsons.jsonNode(ImmutableMap.of("cursor-2", 2))); + + final JsonNode sharedState = Jsons.jsonNode(ImmutableMap.of("shared-state", 5)); + + final AirbyteStateMessage existingStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(sharedState) + .withStreamStates(Arrays.asList(stateMessageFromRefreshStream, stateMessageFromNonRefreshStream))); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(existingStateMessage); + + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, stateWrapper, + List.of(new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh, streamNamespace, null), + new StreamRefresh(UUID.randomUUID(), connectionId, streamToRefresh2, streamNamespace, null))); + final AirbyteStateMessage expectedStateMessage = new AirbyteStateMessage() + .withType(AirbyteStateMessage.AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState().withSharedState(null).withStreamStates(Collections.emptyList())); + + final StateWrapper expected = new StateWrapper().withStateType(StateType.GLOBAL).withGlobal(expectedStateMessage); + verify(statePersistence).updateOrCreateState(connectionId, expected); + } + +} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java index ec25c9018c7..60bdf1bbe4e 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java @@ -37,7 +37,10 @@ import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -99,8 +102,12 @@ void beforeEach() throws Exception { final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); final ConnectionService connectionService = new ConnectionServiceJooqImpl(database); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -111,7 +118,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -122,7 +130,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java index 590046312f6..065b06bc395 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java @@ -21,7 +21,10 @@ import io.airbyte.config.StateWrapper; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; @@ -48,6 +51,7 @@ import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.UUID; import org.jooq.JSONB; import org.jooq.impl.DSL; @@ -70,17 +74,21 @@ class StatePersistenceTest extends BaseConfigDatabaseTest { void beforeEach() throws DatabaseInitializationException, IOException, JsonValidationException, SQLException { truncateAllTables(); - setupTestData(); + connectionId = setupTestData(); statePersistence = new StatePersistence(database); } - private void setupTestData() throws JsonValidationException, IOException { + private UUID setupTestData() throws JsonValidationException, IOException { final FeatureFlagClient featureFlagClient = mock(TestClient.class); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); final ConnectionService connectionService = new ConnectionServiceJooqImpl(database); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); final ConfigRepository configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -91,7 +99,8 @@ private void setupTestData() throws JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -102,7 +111,8 @@ private void setupTestData() throws JsonValidationException, IOException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -130,7 +140,7 @@ private void setupTestData() throws JsonValidationException, IOException { configRepository.writeDestinationConnectionNoSecrets(destinationConnection); configRepository.writeStandardSync(sync); - connectionId = sync.getConnectionId(); + return sync.getConnectionId(); } @Test @@ -594,6 +604,221 @@ void testStatePersistenceLegacyWriteConsistency() throws IOException, SQLExcepti Assertions.assertEquals(readStates.get(0).getState(), stateWrapper.getLegacyState()); } + @Test + void testBulkDeletePerStream() throws IOException { + final StateWrapper perStreamToModify = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n1")) + .withStreamState(Jsons.deserialize(""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize(""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-2")) + .withStreamState(Jsons.deserialize(""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n2")) + .withStreamState(Jsons.deserialize(""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1").withNamespace("keep-n1")) + .withStreamState(Jsons.deserialize(""))))); + statePersistence.updateOrCreateState(connectionId, clone(perStreamToModify)); + + final var toDelete = Set.of( + new StreamDescriptor().withName("del-1").withNamespace("del-n1"), + new StreamDescriptor().withName("del-2"), + new StreamDescriptor().withName("del-1").withNamespace("del-n2")); + statePersistence.bulkDelete(connectionId, toDelete); + + var curr = statePersistence.getCurrentState(connectionId); + final StateWrapper exp = new StateWrapper() + .withStateType(StateType.STREAM) + .withStateMessages(Arrays.asList( + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize(""))), + new AirbyteStateMessage() + .withType(AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1").withNamespace("keep-n1")) + .withStreamState(Jsons.deserialize(""))))); + assertEquals(exp, curr.get()); + } + + @Test + void testBulkDeleteGlobal() throws IOException { + final StateWrapper globalToModify = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-2")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n2")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1").withNamespace("keep-n1")) + .withStreamState(Jsons.deserialize("")))))); + + statePersistence.updateOrCreateState(connectionId, clone(globalToModify)); + + final var toDelete = Set.of( + new StreamDescriptor().withName("del-1").withNamespace("del-n1"), + new StreamDescriptor().withName("del-2"), + new StreamDescriptor().withName("del-1").withNamespace("del-n2")); + statePersistence.bulkDelete(connectionId, toDelete); + + var curr = statePersistence.getCurrentState(connectionId); + final StateWrapper exp = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1").withNamespace("keep-n1")) + .withStreamState(Jsons.deserialize("")))))); + assertEquals(exp, curr.get()); + } + + @Test + void testBulkDeleteCorrectConnection() throws IOException, JsonValidationException { + final StateWrapper globalToModify = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-2")) + .withStreamState(Jsons.deserialize("")))))); + + statePersistence.updateOrCreateState(connectionId, clone(globalToModify)); + + final var secondConn = setupSecondConnection(); + statePersistence.updateOrCreateState(secondConn, clone(globalToModify)); + + final var toDelete = Set.of( + new StreamDescriptor().withName("del-1").withNamespace("del-n1"), + new StreamDescriptor().withName("del-2")); + statePersistence.bulkDelete(connectionId, toDelete); + + var curr = statePersistence.getCurrentState(connectionId); + final StateWrapper exp = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Collections.singletonList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("keep-1")) + .withStreamState(Jsons.deserialize("")))))); + assertEquals(exp, curr.get()); + + var untouched = statePersistence.getCurrentState(secondConn); + assertEquals(globalToModify, untouched.get()); + } + + private UUID setupSecondConnection() throws JsonValidationException, IOException { + final FeatureFlagClient featureFlagClient = mock(TestClient.class); + final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); + final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); + final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + + final ConnectionService connectionService = new ConnectionServiceJooqImpl(database); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); + final ConfigRepository configRepository = new ConfigRepository( + new ActorDefinitionServiceJooqImpl(database), + new CatalogServiceJooqImpl(database), + connectionService, + new ConnectorBuilderServiceJooqImpl(database), + new DestinationServiceJooqImpl(database, + featureFlagClient, + secretsRepositoryReader, + secretsRepositoryWriter, + secretPersistenceConfigService, + connectionService, + actorDefinitionVersionUpdater), + new OAuthServiceJooqImpl(database, + featureFlagClient, + secretsRepositoryReader, + secretPersistenceConfigService), + new OperationServiceJooqImpl(database), + new SourceServiceJooqImpl(database, + featureFlagClient, + secretsRepositoryReader, + secretsRepositoryWriter, + secretPersistenceConfigService, + connectionService, + actorDefinitionVersionUpdater), + new WorkspaceServiceJooqImpl(database, + featureFlagClient, + secretsRepositoryReader, + secretsRepositoryWriter, + secretPersistenceConfigService)); + + final StandardWorkspace workspace = MockData.standardWorkspaces().get(0); + final StandardSourceDefinition sourceDefinition = MockData.publicSourceDefinition(); + final SourceConnection sourceConnection = MockData.sourceConnections().get(0); + final ActorDefinitionVersion actorDefinitionVersion = MockData.actorDefinitionVersion() + .withActorDefinitionId(sourceDefinition.getSourceDefinitionId()) + .withVersionId(sourceDefinition.getDefaultVersionId()); + final StandardDestinationDefinition destinationDefinition = MockData.grantableDestinationDefinition1(); + final ActorDefinitionVersion actorDefinitionVersion2 = MockData.actorDefinitionVersion() + .withActorDefinitionId(destinationDefinition.getDestinationDefinitionId()) + .withVersionId(destinationDefinition.getDefaultVersionId()); + final DestinationConnection destinationConnection = MockData.destinationConnections().get(1); + // we don't need sync operations in this test suite, zero them out. + final StandardSync sync = Jsons.clone(MockData.standardSyncs().get(1)).withOperationIds(Collections.emptyList()); + + configRepository.writeStandardWorkspaceNoSecrets(workspace); + configRepository.writeConnectorMetadata(sourceDefinition, actorDefinitionVersion); + configRepository.writeSourceConnectionNoSecrets(sourceConnection); + configRepository.writeConnectorMetadata(destinationDefinition, actorDefinitionVersion2); + configRepository.writeDestinationConnectionNoSecrets(destinationConnection); + configRepository.writeStandardSync(sync); + return sync.getConnectionId(); + } + private StateWrapper clone(final StateWrapper state) { return switch (state.getStateType()) { case LEGACY -> new StateWrapper() diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java index 3c6b97b8847..79eccbeabfe 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java @@ -18,6 +18,7 @@ import io.airbyte.config.StandardWorkspace; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; @@ -92,6 +93,7 @@ void beforeEach() throws Exception { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -102,7 +104,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -113,7 +116,8 @@ void beforeEach() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java index 06ff904c55e..c9c59e0cc76 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java @@ -15,6 +15,7 @@ import io.airbyte.config.User; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; @@ -54,6 +55,7 @@ void setup() { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -64,7 +66,8 @@ void setup() { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -75,7 +78,8 @@ void setup() { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -144,6 +148,16 @@ void getUserByEmailTest() throws IOException { } } + @Test + void getUsersByEmailTest() throws IOException { + for (final User user : MockData.dupEmailUsers()) { + userPersistence.writeUser(user); + } + + final List usersWithSameEmail = userPersistence.getUsersByEmail(MockData.DUP_EMAIL); + Assertions.assertEquals(new HashSet<>(MockData.dupEmailUsers()), new HashSet<>(usersWithSameEmail)); + } + @Test void deleteUserByIdTest() throws IOException { userPersistence.deleteUserById(MockData.CREATOR_USER_ID_1); @@ -274,7 +288,6 @@ class UserAccessTests { void setup() throws IOException, JsonValidationException, SQLException { truncateAllTables(); - final PermissionPersistence permissionPersistence = new PermissionPersistence(database); final OrganizationPersistence organizationPersistence = new OrganizationPersistence(database); organizationPersistence.createOrganization(ORG); @@ -289,7 +302,7 @@ void setup() throws IOException, JsonValidationException, SQLException { for (final Permission permission : List.of(ORG_MEMBER_USER_PERMISSION, ORG_READER_PERMISSION, WORKSPACE_2_READER_PERMISSION, WORKSPACE_3_READER_PERMISSION, BOTH_USER_WORKSPACE_PERMISSION, BOTH_USER_ORGANIZATION_PERMISSION)) { - permissionPersistence.writePermission(permission); + BaseConfigDatabaseTest.writePermission(permission); } } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java index 9371b9d6cf2..1a98873eec3 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java @@ -15,6 +15,7 @@ import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; @@ -133,6 +134,7 @@ void beforeEach() { final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), new CatalogServiceJooqImpl(database), @@ -143,7 +145,8 @@ void beforeEach() { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -154,7 +157,8 @@ void beforeEach() { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java index 6f48c4c2e4c..2bd01503104 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java @@ -31,9 +31,11 @@ import io.airbyte.config.persistence.ConfigRepository.ResourcesByUserQueryPaginated; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; -import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; @@ -70,9 +72,7 @@ class WorkspacePersistenceTest extends BaseConfigDatabaseTest { private ConfigRepository configRepository; private WorkspacePersistence workspacePersistence; - private PermissionPersistence permissionPersistence; private UserPersistence userPersistence; - private WorkspaceService workspaceService; private FeatureFlagClient featureFlagClient; private SecretsRepositoryReader secretsRepositoryReader; private SecretsRepositoryWriter secretsRepositoryWriter; @@ -84,10 +84,12 @@ void setup() throws Exception { secretsRepositoryReader = mock(SecretsRepositoryReader.class); secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - workspaceService = spy(new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, - secretPersistenceConfigService)); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), @@ -99,7 +101,8 @@ void setup() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new OAuthServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, @@ -110,14 +113,14 @@ void setup() throws Exception { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService), + connectionService, + actorDefinitionVersionUpdater), new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService))); workspacePersistence = new WorkspacePersistence(database); - permissionPersistence = new PermissionPersistence(database); userPersistence = new UserPersistence(database); final OrganizationPersistence organizationPersistence = new OrganizationPersistence(database); @@ -438,14 +441,14 @@ void testListWorkspacesByUserIdWithKeywordWithPagination() throws Exception { configRepository.writeStandardWorkspaceNoSecrets(workspace2); // create a workspace permission for workspace 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withWorkspaceId(workspaceId1) .withUserId(userId) .withPermissionType(PermissionType.WORKSPACE_OWNER)); // create an org permission that should grant access to workspace 2 and 3 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_2) .withUserId(userId) @@ -501,14 +504,14 @@ void testListWorkspacesByUserIdWithoutKeywordWithoutPagination() throws Exceptio configRepository.writeStandardWorkspaceNoSecrets(workspace3); // create a workspace-level permission for workspace 1 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withWorkspaceId(workspace1Id) .withUserId(userId) .withPermissionType(PermissionType.WORKSPACE_READER)); // create an org-level permission that should grant access to workspace 2 - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_2) .withUserId(userId) @@ -516,7 +519,7 @@ void testListWorkspacesByUserIdWithoutKeywordWithoutPagination() throws Exceptio // create an org-member permission that should NOT grant access to workspace 3, because // org-member is too low of a permission to grant read-access to workspaces in the org. - permissionPersistence.writePermission(new Permission() + BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) .withOrganizationId(MockData.ORGANIZATION_ID_3) .withUserId(userId) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java index ca61c9a74a0..ade11d17e79 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java @@ -7,6 +7,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -17,6 +19,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; import io.airbyte.config.AllowedHosts; +import io.airbyte.config.ConfigOriginType; import io.airbyte.config.ConfigResourceType; import io.airbyte.config.ConfigSchema; import io.airbyte.config.ConfigScopeType; @@ -25,11 +28,20 @@ import io.airbyte.config.ScopedConfiguration; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.SuggestedStreams; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.shared.ConnectorVersionKey; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.TestClient; +import io.airbyte.featureflag.UseActorScopedDefaultVersions; +import io.airbyte.featureflag.UseBreakingChangeScopedConfigs; +import io.airbyte.featureflag.Workspace; +import io.airbyte.metrics.lib.MetricAttribute; +import io.airbyte.metrics.lib.MetricClient; +import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -39,6 +51,8 @@ import java.util.UUID; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; class ConfigurationDefinitionVersionOverrideProviderTest { @@ -65,6 +79,7 @@ class ConfigurationDefinitionVersionOverrideProviderTest { .withNormalizationTag("tag") .withNormalizationIntegrationType("bigquery"); private static final ActorDefinitionVersion DEFAULT_VERSION = new ActorDefinitionVersion() + .withVersionId(UUID.randomUUID()) .withDockerRepository(DOCKER_REPOSITORY) .withActorDefinitionId(ACTOR_DEFINITION_ID) .withDockerImageTag(DOCKER_IMAGE_TAG) @@ -77,6 +92,7 @@ class ConfigurationDefinitionVersionOverrideProviderTest { .withSupportsDbt(true) .withNormalizationConfig(NORMALIZATION_CONFIG); private static final ActorDefinitionVersion OVERRIDE_VERSION = new ActorDefinitionVersion() + .withVersionId(UUID.randomUUID()) .withDockerRepository(DOCKER_REPOSITORY) .withActorDefinitionId(ACTOR_DEFINITION_ID) .withDockerImageTag(DOCKER_IMAGE_TAG_2) @@ -92,6 +108,8 @@ class ConfigurationDefinitionVersionOverrideProviderTest { private WorkspaceService mWorkspaceService; private ActorDefinitionService mActorDefinitionService; private ScopedConfigurationService mScopedConfigurationService; + private FeatureFlagClient mFeatureFlagClient; + private MetricClient mMetricClient; private ConfigurationDefinitionVersionOverrideProvider overrideProvider; @BeforeEach @@ -99,10 +117,19 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio mWorkspaceService = mock(WorkspaceService.class); mActorDefinitionService = mock(ActorDefinitionService.class); mScopedConfigurationService = mock(ScopedConfigurationService.class); - overrideProvider = new ConfigurationDefinitionVersionOverrideProvider(mWorkspaceService, mActorDefinitionService, mScopedConfigurationService); + mFeatureFlagClient = mock(TestClient.class); + mMetricClient = mock(MetricClient.class); + overrideProvider = new ConfigurationDefinitionVersionOverrideProvider(mWorkspaceService, mActorDefinitionService, mScopedConfigurationService, + mFeatureFlagClient, mMetricClient); when(mWorkspaceService.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)) .thenReturn(new StandardWorkspace().withOrganizationId(ORGANIZATION_ID)); + + when(mFeatureFlagClient.boolVariation(eq(UseActorScopedDefaultVersions.INSTANCE), any())) + .thenReturn(true); + + when(mFeatureFlagClient.boolVariation(eq(UseBreakingChangeScopedConfigs.INSTANCE), any())) + .thenReturn(true); } @Test @@ -113,7 +140,7 @@ void testGetVersionNoOverride() { ConfigScopeType.ACTOR, ACTOR_ID))) .thenReturn(Optional.empty()); - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); assertTrue(optResult.isEmpty()); @@ -125,8 +152,9 @@ void testGetVersionNoOverride() { verifyNoInteractions(mActorDefinitionService); } - @Test - void testGetVersionWithOverride() throws ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(strings = {"user", "breaking_change"}) + void testGetVersionWithOverride(final String originTypeStr) throws ConfigNotFoundException, IOException { final UUID versionId = UUID.randomUUID(); final ScopedConfiguration versionConfig = new ScopedConfiguration() .withId(UUID.randomUUID()) @@ -134,7 +162,8 @@ void testGetVersionWithOverride() throws ConfigNotFoundException, IOException { .withScopeId(WORKSPACE_ID) .withResourceType(ConfigResourceType.ACTOR_DEFINITION) .withResourceId(ACTOR_DEFINITION_ID) - .withValue(versionId.toString()); + .withValue(versionId.toString()) + .withOriginType(ConfigOriginType.fromValue(originTypeStr)); when(mScopedConfigurationService.getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, Map.of( @@ -145,11 +174,14 @@ void testGetVersionWithOverride() throws ConfigNotFoundException, IOException { when(mActorDefinitionService.getActorDefinitionVersion(versionId)).thenReturn(OVERRIDE_VERSION); - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); assertTrue(optResult.isPresent()); - assertEquals(OVERRIDE_VERSION, optResult.get()); + assertEquals(OVERRIDE_VERSION, optResult.get().actorDefinitionVersion()); + + final boolean expectedOverrideStatus = originTypeStr.equals("user"); + assertEquals(expectedOverrideStatus, optResult.get().isOverrideApplied()); verify(mScopedConfigurationService).getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, Map.of( @@ -161,8 +193,9 @@ void testGetVersionWithOverride() throws ConfigNotFoundException, IOException { verifyNoMoreInteractions(mScopedConfigurationService, mActorDefinitionService); } - @Test - void testGetVersionWithOverrideNoActor() throws ConfigNotFoundException, IOException { + @ParameterizedTest + @ValueSource(strings = {"user", "breaking_change"}) + void testGetVersionWithOverrideNoActor(final String originTypeStr) throws ConfigNotFoundException, IOException { final UUID versionId = UUID.randomUUID(); final ScopedConfiguration versionConfig = new ScopedConfiguration() .withId(UUID.randomUUID()) @@ -170,7 +203,8 @@ void testGetVersionWithOverrideNoActor() throws ConfigNotFoundException, IOExcep .withScopeId(WORKSPACE_ID) .withResourceType(ConfigResourceType.ACTOR_DEFINITION) .withResourceId(ACTOR_DEFINITION_ID) - .withValue(versionId.toString()); + .withValue(versionId.toString()) + .withOriginType(ConfigOriginType.fromValue(originTypeStr)); when(mScopedConfigurationService.getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, Map.of( @@ -180,11 +214,14 @@ void testGetVersionWithOverrideNoActor() throws ConfigNotFoundException, IOExcep when(mActorDefinitionService.getActorDefinitionVersion(versionId)).thenReturn(OVERRIDE_VERSION); - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, null, DEFAULT_VERSION); assertTrue(optResult.isPresent()); - assertEquals(OVERRIDE_VERSION, optResult.get()); + assertEquals(OVERRIDE_VERSION, optResult.get().actorDefinitionVersion()); + + final boolean expectedOverrideStatus = originTypeStr.equals("user"); + assertEquals(expectedOverrideStatus, optResult.get().isOverrideApplied()); verify(mScopedConfigurationService).getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, Map.of( @@ -229,4 +266,75 @@ void testThrowsIfVersionIdDoesNotExist() throws ConfigNotFoundException, IOExcep verifyNoMoreInteractions(mScopedConfigurationService, mActorDefinitionService); } + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testBCPinIntegrityMetricsEmitted(final Boolean withMismatchedVersions) throws ConfigNotFoundException, IOException { + final UUID versionId = OVERRIDE_VERSION.getVersionId(); + final ScopedConfiguration breakingChangePin = new ScopedConfiguration() + .withId(UUID.randomUUID()) + .withScopeType(ConfigScopeType.WORKSPACE) + .withScopeId(WORKSPACE_ID) + .withResourceType(ConfigResourceType.ACTOR_DEFINITION) + .withResourceId(ACTOR_DEFINITION_ID) + .withValue(versionId.toString()) + .withOriginType(ConfigOriginType.BREAKING_CHANGE); + + when(mScopedConfigurationService.getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, + Map.of( + ConfigScopeType.ORGANIZATION, ORGANIZATION_ID, + ConfigScopeType.WORKSPACE, WORKSPACE_ID, + ConfigScopeType.ACTOR, ACTOR_ID))) + .thenReturn(Optional.of(breakingChangePin)); + + when(mActorDefinitionService.getActorDefinitionVersion(versionId)).thenReturn(OVERRIDE_VERSION); + + final ActorDefinitionVersion defaultVersion = withMismatchedVersions ? DEFAULT_VERSION : OVERRIDE_VERSION; + + final Optional optResult = + overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, defaultVersion); + + assertTrue(optResult.isPresent()); + assertEquals(OVERRIDE_VERSION, optResult.get().actorDefinitionVersion()); + + verify(mMetricClient).count(OssMetricsRegistry.CONNECTOR_BREAKING_CHANGE_PIN_SERVED, 1, + new MetricAttribute("workspace_id", WORKSPACE_ID.toString()), + new MetricAttribute("actor_id", ACTOR_ID.toString()), + new MetricAttribute("actor_default_version", defaultVersion.getVersionId().toString()), + new MetricAttribute("pinned_version", versionId.toString()), + new MetricAttribute("status", withMismatchedVersions ? "invalid" : "ok")); + } + + @Test + void testBCPinIntegrityMetricsNotEmittedWhenFFOff() throws ConfigNotFoundException, IOException { + when(mFeatureFlagClient.boolVariation(UseActorScopedDefaultVersions.INSTANCE, new Workspace(WORKSPACE_ID))) + .thenReturn(false); + + final UUID versionId = OVERRIDE_VERSION.getVersionId(); + final ScopedConfiguration breakingChangePin = new ScopedConfiguration() + .withId(UUID.randomUUID()) + .withScopeType(ConfigScopeType.WORKSPACE) + .withScopeId(WORKSPACE_ID) + .withResourceType(ConfigResourceType.ACTOR_DEFINITION) + .withResourceId(ACTOR_DEFINITION_ID) + .withValue(versionId.toString()) + .withOriginType(ConfigOriginType.BREAKING_CHANGE); + + when(mScopedConfigurationService.getScopedConfiguration(ConnectorVersionKey.INSTANCE, ConfigResourceType.ACTOR_DEFINITION, ACTOR_DEFINITION_ID, + Map.of( + ConfigScopeType.ORGANIZATION, ORGANIZATION_ID, + ConfigScopeType.WORKSPACE, WORKSPACE_ID, + ConfigScopeType.ACTOR, ACTOR_ID))) + .thenReturn(Optional.of(breakingChangePin)); + + when(mActorDefinitionService.getActorDefinitionVersion(versionId)).thenReturn(OVERRIDE_VERSION); + + final Optional optResult = + overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); + + assertTrue(optResult.isPresent()); + assertEquals(OVERRIDE_VERSION, optResult.get().actorDefinitionVersion()); + + verifyNoInteractions(mMetricClient); + } + } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProviderTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProviderTest.java index 2a51361b36f..2787b7acf9a 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProviderTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/FeatureFlagDefinitionVersionOverrideProviderTest.java @@ -24,6 +24,7 @@ import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.SuggestedStreams; +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.config.persistence.ActorDefinitionVersionResolver; import io.airbyte.featureflag.ConnectorVersionOverride; import io.airbyte.featureflag.Context; @@ -108,7 +109,7 @@ void setup() { @Test void testGetVersionNoOverride() { - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); assertTrue(optResult.isEmpty()); @@ -122,11 +123,13 @@ void testGetVersionWithOverride() throws IOException { when(mActorDefinitionVersionResolver.resolveVersionForTag(ACTOR_DEFINITION_ID, ActorType.SOURCE, DOCKER_REPOSITORY, DOCKER_IMAGE_TAG_2)) .thenReturn(Optional.of(OVERRIDE_VERSION)); - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); - assertEquals(OVERRIDE_VERSION, optResult.orElse(null)); + assertTrue(optResult.isPresent()); + assertEquals(OVERRIDE_VERSION, optResult.get().actorDefinitionVersion()); + assertTrue(optResult.get().isOverrideApplied()); verify(mActorDefinitionVersionResolver).resolveVersionForTag(ACTOR_DEFINITION_ID, ActorType.SOURCE, DOCKER_REPOSITORY, DOCKER_IMAGE_TAG_2); verifyNoMoreInteractions(mActorDefinitionVersionResolver); } @@ -138,7 +141,7 @@ void testOverrideIsEmptyIfVersionDoesNotResolve() throws IOException { when(mActorDefinitionVersionResolver.resolveVersionForTag(ACTOR_DEFINITION_ID, ActorType.SOURCE, DOCKER_REPOSITORY, DOCKER_IMAGE_TAG_2)) .thenReturn(Optional.empty()); - final Optional optResult = + final Optional optResult = overrideProvider.getOverride(ActorType.SOURCE, ACTOR_DEFINITION_ID, WORKSPACE_ID, ACTOR_ID, DEFAULT_VERSION); diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt new file mode 100644 index 00000000000..e5bf540a567 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/RepositoryTestSetup.kt @@ -0,0 +1,213 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.DestinationConnection +import io.airbyte.config.Geography +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardDestinationDefinition +import io.airbyte.config.StandardSourceDefinition +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardWorkspace +import io.airbyte.config.SupportLevel +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater +import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl +import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl +import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl +import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl +import io.airbyte.db.factory.DSLContextFactory +import io.airbyte.db.instance.test.TestDatabaseProviders +import io.micronaut.context.ApplicationContext +import io.micronaut.context.env.PropertySource +import io.micronaut.data.connection.jdbc.advice.DelegatingDataSource +import io.mockk.every +import io.mockk.mockk +import org.jooq.DSLContext +import org.jooq.SQLDialect +import org.junit.jupiter.api.AfterAll +import org.junit.jupiter.api.BeforeAll +import org.testcontainers.containers.PostgreSQLContainer +import java.util.UUID +import javax.sql.DataSource + +open class RepositoryTestSetup { + companion object { + val connectionId1 = UUID.randomUUID() + val connectionId2 = UUID.randomUUID() + private lateinit var context: ApplicationContext + private lateinit var jooqDslContext: DSLContext + + // we run against an actual database to ensure micronaut data and jooq properly integrate + private val container: PostgreSQLContainer<*> = + PostgreSQLContainer("postgres:13-alpine") + .withDatabaseName("airbyte") + .withUsername("docker") + .withPassword("docker") + + @BeforeAll + @JvmStatic + fun setup() { + container.start() + // set the micronaut datasource properties to match our container we started up + context = + ApplicationContext.run( + PropertySource.of( + "test", + mapOf( + "datasources.config.driverClassName" to "org.postgresql.Driver", + "datasources.config.db-type" to "postgres", + "datasources.config.dialect" to "POSTGRES", + "datasources.config.url" to container.jdbcUrl, + "datasources.config.username" to container.username, + "datasources.config.password" to container.password, + ), + ), + ) + + // removes micronaut transactional wrapper that doesn't play nice with our non-micronaut factories + val dataSource = (context.getBean(DataSource::class.java) as DelegatingDataSource).targetDataSource + jooqDslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES) + val databaseProviders = TestDatabaseProviders(dataSource, jooqDslContext) + + // this line is what runs the migrations + val database = databaseProviders.createNewConfigsDatabase() + + val workspaceId = UUID.randomUUID() + val workspaceService = + WorkspaceServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + ) + + workspaceService.writeStandardWorkspaceNoSecrets( + StandardWorkspace() + .withWorkspaceId(workspaceId) + .withDefaultGeography(Geography.US) + .withName("") + .withSlug("") + .withInitialSetupComplete(true), + ) + + val actorDefinitionUpdate: ActorDefinitionVersionUpdater = mockk() + + every { actorDefinitionUpdate.updateSourceDefaultVersion(any(), any(), any()) } returns Unit + every { actorDefinitionUpdate.updateDestinationDefaultVersion(any(), any(), any()) } returns Unit + + val sourceJooq = + SourceServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + mockk(), + actorDefinitionUpdate, + ) + + val sourceDefinitionId = UUID.randomUUID() + val sourceDefinitionVersionId = UUID.randomUUID() + + sourceJooq.writeConnectorMetadata( + StandardSourceDefinition() + .withSourceDefinitionId(sourceDefinitionId) + .withName("sourceDef"), + ActorDefinitionVersion() + .withVersionId(sourceDefinitionVersionId) + .withActorDefinitionId(sourceDefinitionId) + .withDockerRepository("") + .withDockerImageTag("") + .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) + .withSupportLevel(SupportLevel.CERTIFIED), + listOf(), + ) + + val actorDefinitionService = + ActorDefinitionServiceJooqImpl( + database, + ) + actorDefinitionService.updateActorDefinitionDefaultVersionId(sourceDefinitionId, sourceDefinitionVersionId) + + val sourceId = UUID.randomUUID() + sourceJooq.writeSourceConnectionNoSecrets( + SourceConnection() + .withSourceId(sourceId) + .withName("source") + .withSourceDefinitionId(sourceDefinitionId) + .withDefaultVersionId(sourceDefinitionVersionId) + .withWorkspaceId(workspaceId), + ) + + val destinationService = + DestinationServiceJooqImpl( + database, + mockk(), + mockk(), + mockk(), + mockk(), + mockk(), + actorDefinitionUpdate, + ) + + val destinationDefinitionId = UUID.randomUUID() + val destinationDefinitionVersionId = UUID.randomUUID() + destinationService.writeConnectorMetadata( + StandardDestinationDefinition() + .withDestinationDefinitionId(destinationDefinitionId) + .withName("sourceDef"), + ActorDefinitionVersion() + .withVersionId(destinationDefinitionVersionId) + .withActorDefinitionId(destinationDefinitionId) + .withDockerRepository("") + .withDockerImageTag("") + .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED) + .withSupportLevel(SupportLevel.CERTIFIED), + listOf(), + ) + + actorDefinitionService.updateActorDefinitionDefaultVersionId(destinationDefinitionId, destinationDefinitionVersionId) + + val destinationId = UUID.randomUUID() + destinationService.writeDestinationConnectionNoSecrets( + DestinationConnection() + .withDestinationId(destinationId) + .withName("destination") + .withDestinationDefinitionId(destinationDefinitionId) + .withDefaultVersionId(destinationDefinitionVersionId) + .withWorkspaceId(workspaceId), + ) + + val connectionRepo = StandardSyncPersistence(database) + connectionRepo.writeStandardSync( + StandardSync() + .withConnectionId(connectionId1) + .withGeography(Geography.US) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withName("not null") + .withBreakingChange(true), + ) + + connectionRepo.writeStandardSync( + StandardSync() + .withConnectionId(connectionId2) + .withGeography(Geography.US) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withName("not null") + .withBreakingChange(true), + ) + } + + @AfterAll + @JvmStatic + fun dbDown() { + container.close() + } + } + + fun getRepository(clazz: Class): T { + return context.getBean(clazz) + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt new file mode 100644 index 00000000000..62ac76a1209 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamGenerationRepositoryTest.kt @@ -0,0 +1,120 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test + +@MicronautTest(environments = [Environment.TEST]) +class StreamGenerationRepositoryTest : RepositoryTestSetup() { + @AfterEach + fun cleanDb() { + getRepository(StreamGenerationRepository::class.java).deleteAll() + } + + @Test + fun `test db insertion`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname", + streamNamespace = "snamespace", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + assertEquals(1, getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration.connectionId).size) + } + + @Test + fun `find by connection id and stream name`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + val streamGeneration2 = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", + generationId = 1, + startJobId = 1, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration2) + + val streamGeneration3 = + StreamGeneration( + connectionId = connectionId2, + streamName = "sname3", + generationId = 2, + startJobId = 2, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration3) + + val streamGenerationForConnectionIds = getRepository(StreamGenerationRepository::class.java).findByConnectionId(connectionId1) + assertEquals(2, streamGenerationForConnectionIds.size) + + val maxGenerationOfStreamsByConnectionId1 = + getRepository( + StreamGenerationRepository::class.java, + ).getMaxGenerationOfStreamsForConnectionId(connectionId1) + val expectedRecord1 = Generation("sname1", "snamespace1", 0) + val expectedRecord2 = Generation("sname2", "snamespace2", 1) + assertEquals(2, maxGenerationOfStreamsByConnectionId1.size) + assertThat(maxGenerationOfStreamsByConnectionId1).containsExactlyInAnyOrder(expectedRecord1, expectedRecord2) + + val maxGenerationOfStreamsByConnectionId2 = + getRepository( + StreamGenerationRepository::class.java, + ).getMaxGenerationOfStreamsForConnectionId(connectionId2) + assertEquals(1, maxGenerationOfStreamsByConnectionId2.size) + val expectedRecord3 = Generation(streamName = "sname3", generationId = 2) + assertThat(maxGenerationOfStreamsByConnectionId2).containsExactlyInAnyOrder(expectedRecord3) + } + + @Test + fun `delete by connection id`() { + val streamGeneration = + StreamGeneration( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + generationId = 0, + startJobId = 0, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration) + + val streamGeneration2 = + StreamGeneration( + connectionId = connectionId2, + streamName = "sname2", + streamNamespace = "sname2", + generationId = 1, + startJobId = 1, + ) + + getRepository(StreamGenerationRepository::class.java).save(streamGeneration2) + + getRepository(StreamGenerationRepository::class.java).deleteByConnectionId(streamGeneration.connectionId) + + assertTrue(getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration.connectionId).isEmpty()) + assertTrue(getRepository(StreamGenerationRepository::class.java).findByConnectionId(streamGeneration2.connectionId).isNotEmpty()) + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt new file mode 100644 index 00000000000..fbb6f7cd0d5 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/StreamRefreshesRepositoryTest.kt @@ -0,0 +1,140 @@ +package io.airbyte.config.persistence + +import io.airbyte.config.persistence.domain.StreamRefresh +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test + +@MicronautTest(environments = [Environment.TEST]) +class StreamRefreshesRepositoryTest : RepositoryTestSetup() { + @AfterEach + fun cleanDb() { + getRepository(StreamRefreshesRepository::class.java).deleteAll() + } + + @Test + fun `test db insertion`() { + val streamRefresh = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname", + streamNamespace = "snamespace", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh) + assertTrue(getRepository(StreamRefreshesRepository::class.java).existsByConnectionId(streamRefresh.connectionId)) + } + + @Test + fun `find by connection id`() { + val streamRefresh1 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh1) + + val streamRefresh2 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh2) + + val streamRefresh3 = + StreamRefresh( + connectionId = connectionId2, + streamName = "sname3", + streamNamespace = "snamespace3", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh3) + + assertEquals(2, getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1).size) + } + + @Test + fun `delete by connection id`() { + val streamRefresh1 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh1) + + val streamRefresh2 = + StreamRefresh( + connectionId = connectionId2, + streamName = "sname2", + streamNamespace = "snamespace2", + ) + + getRepository(StreamRefreshesRepository::class.java).save(streamRefresh2) + + getRepository(StreamRefreshesRepository::class.java).deleteByConnectionId(streamRefresh1.connectionId) + + assertTrue(getRepository(StreamRefreshesRepository::class.java).findByConnectionId(streamRefresh1.connectionId).isEmpty()) + assertTrue(getRepository(StreamRefreshesRepository::class.java).findByConnectionId(streamRefresh2.connectionId).isNotEmpty()) + } + + @Test + fun `delete by connection id and stream name and namespace`() { + val streamRefresh1 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname1", + streamNamespace = "snamespace1", + ) + + val streamRefresh2 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname2", + streamNamespace = "snamespace2", + ) + + val streamRefresh3 = + StreamRefresh( + connectionId = connectionId1, + streamName = "sname3", + ) + + getRepository(StreamRefreshesRepository::class.java).saveAll(listOf(streamRefresh1, streamRefresh2, streamRefresh3)) + + getRepository( + StreamRefreshesRepository::class.java, + ).deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId1, streamRefresh3.streamName, streamRefresh3.streamNamespace) + val refreshes: List = getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1) + assertEquals(2, refreshes.size) + refreshes.forEach { + assertEquals(connectionId1, it.connectionId) + if (streamRefresh1.streamName.equals(it.streamName)) { + assertEquals(streamRefresh1.streamNamespace, it.streamNamespace) + } else if (streamRefresh2.streamName.equals(it.streamName)) { + assertEquals(streamRefresh2.streamNamespace, it.streamNamespace) + } else { + throw RuntimeException("Unknown stream name " + it.streamName) + } + } + + getRepository( + StreamRefreshesRepository::class.java, + ).deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId1, streamRefresh2.streamName, streamRefresh2.streamNamespace) + val refreshes2: List = getRepository(StreamRefreshesRepository::class.java).findByConnectionId(connectionId1) + assertEquals(1, refreshes2.size) + refreshes2.forEach { + assertEquals(connectionId1, it.connectionId) + assertEquals(streamRefresh1.streamName, (it.streamName)) + assertEquals(streamRefresh1.streamNamespace, it.streamNamespace) + } + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetterTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetterTest.kt new file mode 100644 index 00000000000..ec62c72d03e --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/CatalogGenerationSetterTest.kt @@ -0,0 +1,111 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.protocol.models.AirbyteStream +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.ConfiguredAirbyteStream +import io.airbyte.protocol.models.StreamDescriptor +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +class CatalogGenerationSetterTest { + private val catalogGenerationSetter = CatalogGenerationSetter() + + private val catalog = + ConfiguredAirbyteCatalog().withStreams( + listOf( + ConfiguredAirbyteStream().withStream( + AirbyteStream() + .withName("name1") + .withNamespace("namespace1"), + ), + ConfiguredAirbyteStream().withStream( + AirbyteStream() + .withName("name2") + .withNamespace("namespace2"), + ), + ), + ) + + private val generations = + listOf( + Generation( + streamName = "name1", + streamNamespace = "namespace1", + generationId = 1L, + ), + Generation( + streamName = "name2", + streamNamespace = "namespace2", + generationId = 2L, + ), + ) + + val jobId = 3L + val connectionId = UUID.randomUUID() + + @BeforeEach + fun init() { + } + + @Test + fun `test that no refresh truncation is performed if there is no refresh`() { + val updatedCatalog = + catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + catalog = catalog, + jobId = jobId, + streamRefreshes = listOf(), + generations = generations, + ) + + updatedCatalog.streams.forEach { + assertEquals(0L, it.minimumGenerationId) + assertEquals(jobId, it.syncId) + } + } + + @Test + fun `test that truncation are properly requested`() { + val updatedCatalog = + catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + catalog = catalog, + jobId = jobId, + streamRefreshes = + listOf( + StreamDescriptor().withName("name1").withNamespace("namespace1"), + StreamDescriptor().withName("name2").withNamespace("namespace2"), + ), + generations = generations, + ) + + updatedCatalog.streams.forEach { + assertEquals(it.generationId, it.minimumGenerationId) + assertEquals(jobId, it.syncId) + } + } + + @Test + fun `test that truncation are properly requested when partial`() { + val updatedCatalog = + catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation( + catalog = catalog, + jobId = jobId, + streamRefreshes = listOf(StreamDescriptor().withName("name1").withNamespace("namespace1")), + generations = generations, + ) + + updatedCatalog.streams.forEach { + if (it.stream.name == "name1" && it.stream.namespace == "namespace1") { + assertEquals(it.generationId, it.minimumGenerationId) + assertEquals(jobId, it.syncId) + assertEquals(1L, it.generationId) + } else { + assertEquals(0L, it.minimumGenerationId) + assertEquals(jobId, it.syncId) + assertEquals(2L, it.generationId) + } + } + } +} diff --git a/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt new file mode 100644 index 00000000000..468afa5600e --- /dev/null +++ b/airbyte-config/config-persistence/src/test/kotlin/io/airbyte/config/persistence/helper/GenerationBumperTest.kt @@ -0,0 +1,91 @@ +package io.airbyte.config.persistence.helper + +import io.airbyte.config.persistence.StreamGenerationRepository +import io.airbyte.config.persistence.domain.Generation +import io.airbyte.config.persistence.domain.StreamGeneration +import io.airbyte.config.persistence.domain.StreamRefresh +import io.mockk.every +import io.mockk.mockk +import io.mockk.slot +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class GenerationBumperTest { + val streamGenerationRepository: StreamGenerationRepository = mockk() + + val generationBumper = GenerationBumper(streamGenerationRepository) + + val connectionId = UUID.randomUUID() + val jobId = 456L + + val generations = + listOf( + Generation( + streamName = "name1", + streamNamespace = "namespace1", + generationId = 42, + ), + Generation( + streamName = "name2", + streamNamespace = "namespace2", + generationId = 42, + ), + ) + + @Test + fun `increase the generation properly`() { + every { streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) } returns generations + val generationSlot = slot>() + every { streamGenerationRepository.saveAll(capture(generationSlot)) } returns listOf() + + generationBumper.updateGenerationForStreams( + connectionId, + jobId, + listOf( + StreamRefresh( + connectionId = connectionId, + streamName = "name1", + streamNamespace = "namespace1", + ), + ), + ) + + val capturedStreamGenerations = generationSlot.captured + assertEquals(1, capturedStreamGenerations.size) + + val streamGeneration = capturedStreamGenerations[0] + assertEquals("name1", streamGeneration.streamName) + assertEquals("namespace1", streamGeneration.streamNamespace) + assertEquals(43, streamGeneration.generationId) + assertEquals(jobId, streamGeneration.startJobId) + } + + @Test + fun `increase the generation properly if generation is missing`() { + every { streamGenerationRepository.getMaxGenerationOfStreamsForConnectionId(connectionId) } returns generations + val generationSlot = slot>() + every { streamGenerationRepository.saveAll(capture(generationSlot)) } returns listOf() + + generationBumper.updateGenerationForStreams( + connectionId, + jobId, + listOf( + StreamRefresh( + connectionId = connectionId, + streamName = "name3", + streamNamespace = "namespace3", + ), + ), + ) + + val capturedStreamGenerations = generationSlot.captured + assertEquals(1, capturedStreamGenerations.size) + + val streamGeneration = capturedStreamGenerations[0] + assertEquals("name3", streamGeneration.streamName) + assertEquals("namespace3", streamGeneration.streamNamespace) + assertEquals(1L, streamGeneration.generationId) + assertEquals(jobId, streamGeneration.startJobId) + } +} diff --git a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java index fa8a7beb162..127ebf1d8af 100644 --- a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java +++ b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java @@ -132,6 +132,9 @@ public class MockData { static final UUID CREATOR_USER_ID_3 = UUID.randomUUID(); static final UUID CREATOR_USER_ID_4 = UUID.randomUUID(); static final UUID CREATOR_USER_ID_5 = UUID.randomUUID(); + static final UUID DUP_EMAIL_USER_ID_1 = UUID.randomUUID(); + static final UUID DUP_EMAIL_USER_ID_2 = UUID.randomUUID(); + static final String DUP_EMAIL = "dup-email@airbyte.io"; // Permission static final UUID PERMISSION_ID_1 = UUID.randomUUID(); @@ -295,6 +298,34 @@ public static List users() { return Arrays.asList(user1, user2, user3, user4, user5); } + public static List dupEmailUsers() { + final User dupEmailUser1 = new User() + .withUserId(DUP_EMAIL_USER_ID_1) + .withName("dup-email-user-1") + .withAuthUserId(DUP_EMAIL_USER_ID_1.toString()) + .withAuthProvider(AuthProvider.KEYCLOAK) + .withDefaultWorkspaceId(null) + .withStatus(User.Status.REGISTERED) + .withCompanyName("dup-user-company") + .withEmail(DUP_EMAIL) + .withNews(true) + .withUiMetadata(null); + + final User dupEmailUser2 = new User() + .withUserId(DUP_EMAIL_USER_ID_2) + .withName("dup-email-user-2") + .withAuthUserId(DUP_EMAIL_USER_ID_2.toString()) + .withAuthProvider(AuthProvider.KEYCLOAK) + .withDefaultWorkspaceId(null) + .withStatus(User.Status.REGISTERED) + .withCompanyName("dup-user-company") + .withEmail(DUP_EMAIL) + .withNews(true) + .withUiMetadata(null); + + return Arrays.asList(dupEmailUser1, dupEmailUser2); + } + public static List permissions() { return Arrays.asList(permission1, permission2, permission3, permission4, permission5, permission6, permission7); } diff --git a/airbyte-config/config-secrets/build.gradle.kts b/airbyte-config/config-secrets/build.gradle.kts index 9c6ad784c90..51fab71214c 100644 --- a/airbyte-config/config-secrets/build.gradle.kts +++ b/airbyte-config/config-secrets/build.gradle.kts @@ -1,58 +1,58 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - `java-test-fixtures` - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + `java-test-fixtures` + kotlin("jvm") + kotlin("kapt") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) - api(libs.bundles.micronaut.kotlin) - api(libs.kotlin.logging) - api(libs.slf4j.api) - api(libs.bundles.log4j) - api(libs.micronaut.jackson.databind) - api(libs.google.cloud.storage) - api(libs.micronaut.jooq) - api(libs.guava) - api(libs.bundles.secret.hydration) - api(libs.airbyte.protocol) - api(libs.jakarta.transaction.api) - api(libs.micronaut.data.tx) - api(libs.aws.java.sdk.sts) - api(project(":airbyte-commons")) + api(libs.bundles.micronaut.annotation) + api(libs.bundles.micronaut.kotlin) + api(libs.kotlin.logging) + api(libs.slf4j.api) + api(libs.bundles.log4j) + api(libs.micronaut.jackson.databind) + api(libs.google.cloud.storage) + api(libs.micronaut.jooq) + api(libs.guava) + api(libs.bundles.secret.hydration) + api(libs.airbyte.protocol) + api(libs.jakarta.transaction.api) + api(libs.micronaut.data.tx) + api(libs.aws.java.sdk.sts) + api(project(":airbyte-commons")) - /* - * Marked as "implementation" to avoid leaking these dependencies to services - * that only use the retrieval side of the secret infrastructure. The services - * that do need these dependencies will already have them declared, as they will - * need to define singletons from these modules in order for everything work. - */ - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) + /* + * Marked as "implementation" to avoid leaking these dependencies to services + * that only use the retrieval side of the secret infrastructure. The services + * that do need these dependencies will already have them declared, as they will + * need to define singletons from these modules in order for everything work. + */ + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.kotlin.test.runner.junit5) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.airbyte.protocol) - testImplementation(libs.apache.commons.lang) - testImplementation(libs.testcontainers.vault) - testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.kotlin.test.runner.junit5) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.airbyte.protocol) + testImplementation(libs.apache.commons.lang) + testImplementation(libs.testcontainers.vault) + testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) } // This is a workaround related to kaptBuild errors. It seems to be because there are no tests in cloud-airbyte-api-server. // TODO: this should be removed when we move to kotlin 1.9.20 // TODO: we should write tests afterEvaluate { - tasks.named("kaptGenerateStubsTestKotlin") { - enabled = false - } + tasks.named("kaptGenerateStubsTestKotlin") { + enabled = false + } } \ No newline at end of file diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt index c3a371b247b..580000c81f6 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsHelpers.kt @@ -496,52 +496,6 @@ object SecretsHelpers { return SecretCoordinate(coordinateBase, version) } - /** - * This method takes in the key (JSON key or HMAC key) of a workspace service account as a secret - * and generates a co-ordinate for the secret so that the secret can be written in secret - * persistence at the generated co-ordinate. - * - * @param newSecret The JSON key or HMAC key value - * @param secretReader To read the value from secret persistence for comparison with the new value - * @param workspaceId of the service account - * @param uuidSupplier provided to allow a test case to produce known UUIDs in order for easy * - * fixture creation. - * @param oldSecretCoordinate a nullable full coordinate (base+version) retrieved from the * - * previous config - * @param keyType HMAC ot JSON key - * @return a coordinate (versioned reference to where the secret is stored in the persistence) - */ - fun convertServiceAccountCredsToSecret( - newSecret: String, - secretReader: ReadOnlySecretPersistence, - workspaceId: UUID, - uuidSupplier: Supplier, - oldSecretCoordinate: JsonNode?, - keyType: String, - ): SecretCoordinateToPayload { - val oldSecretFullCoordinate = - if (oldSecretCoordinate != null && oldSecretCoordinate.has(COORDINATE_FIELD)) oldSecretCoordinate[COORDINATE_FIELD].asText() else null - val coordinateForStagingConfig: SecretCoordinate = - getSecretCoordinate( - "service_account_" + keyType + "_", - newSecret, - secretReader, - workspaceId, - uuidSupplier, - oldSecretFullCoordinate, - ) - return SecretCoordinateToPayload( - coordinateForStagingConfig, - newSecret, - Jsons.jsonNode>( - java.util.Map.of( - COORDINATE_FIELD, - coordinateForStagingConfig.fullCoordinate, - ), - ), - ) - } - /** * Takes in the secret coordinate in form of a JSON and fetches the secret from the store. * diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt index 372b8fc4ba4..f1f3f3b48c5 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt @@ -13,11 +13,15 @@ import io.airbyte.validation.json.JsonValidationException import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Requires import jakarta.inject.Singleton +import java.time.Duration +import java.time.Instant import java.util.Optional import java.util.UUID private val logger = KotlinLogging.logger {} +private val EPHEMERAL_SECRET_LIFE_DURATION = Duration.ofHours(2) + /** * This class takes secrets as arguments but never returns a secrets as return values (even the ones * that are passed in as arguments). It is responsible for writing connector secrets to the correct @@ -178,6 +182,8 @@ open class SecretsRepositoryWriter( * Takes in a connector configuration with secrets. Saves the secrets and returns the configuration * object with the secrets removed and replaced with pointers to the environment secret persistence. * + * This method is intended for ephemeral secrets, hence the lack of workspace. + * * @param fullConfig full config * @param spec connector specification * @return partial config @@ -186,13 +192,21 @@ open class SecretsRepositoryWriter( fullConfig: JsonNode, spec: ConnectorSpecification, ): JsonNode { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, secretPersistence) + return splitSecretConfig( + NO_WORKSPACE, + fullConfig, + spec, + secretPersistence, + Instant.now().plus(EPHEMERAL_SECRET_LIFE_DURATION), + ) } /** * Takes in a connector configuration with secrets. Saves the secrets and returns the configuration * object with the secrets removed and replaced with pointers to the provided runtime secret persistence. * + * This method is intended for ephemeral secrets, hence the lack of workspace. + * * @param fullConfig full config * @param spec connector specification * @param runtimeSecretPersistence runtime secret persistence @@ -203,7 +217,13 @@ open class SecretsRepositoryWriter( spec: ConnectorSpecification, runtimeSecretPersistence: RuntimeSecretPersistence, ): JsonNode { - return splitSecretConfig(NO_WORKSPACE, fullConfig, spec, runtimeSecretPersistence) + return splitSecretConfig( + NO_WORKSPACE, + fullConfig, + spec, + runtimeSecretPersistence, + Instant.now().plus(EPHEMERAL_SECRET_LIFE_DURATION), + ) } private fun splitSecretConfig( @@ -211,6 +231,7 @@ open class SecretsRepositoryWriter( fullConfig: JsonNode, spec: ConnectorSpecification, secretPersistence: SecretPersistence, + expireTime: Instant? = null, ): JsonNode { val splitSecretConfig: SplitSecretConfig = SecretsHelpers.splitConfig( @@ -219,8 +240,9 @@ open class SecretsRepositoryWriter( spec.connectionSpecification, secretPersistence, ) + // modify this to add expire time splitSecretConfig.getCoordinateToPayload().forEach { (coordinate: SecretCoordinate, payload: String) -> - secretPersistence.write(coordinate, payload) + secretPersistence.writeWithExpiry(coordinate, payload, expireTime) } return splitSecretConfig.partialConfig } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/AwsSecretManagerPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/AwsSecretManagerPersistence.kt index 16fb3e330cf..2230bdd0f38 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/AwsSecretManagerPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/AwsSecretManagerPersistence.kt @@ -118,7 +118,7 @@ class AwsSecretManagerPersistence(private val awsClient: AwsClient, private val * * @param coordinate SecretCoordinate to delete. */ - private fun deleteSecret(coordinate: SecretCoordinate) { + override fun delete(coordinate: SecretCoordinate) { awsClient.client.deleteSecret( DeleteSecretRequest() .withSecretId(coordinate.coordinateBase) @@ -180,7 +180,7 @@ class AwsClient( if (serializedConfig == null) { logger.debug { "fetching access key/secret key based AWS secret manager" } AWSSecretsManagerClientBuilder.standard().withRegion(awsRegion).apply { - if (awsAccessKey != null && awsSecretKey != null) { + if (!awsAccessKey.isNullOrEmpty() && !awsSecretKey.isNullOrEmpty()) { withCredentials(AWSStaticCredentialsProvider(BasicAWSCredentials(awsAccessKey, awsSecretKey))) } }.build() @@ -188,7 +188,7 @@ class AwsClient( logger.debug { "fetching role based AWS secret manager" } val stsClient = AWSSecurityTokenServiceClientBuilder.standard().withRegion(awsRegion).apply { - if (awsAccessKey != null && awsSecretKey != null) { + if (!awsAccessKey.isNullOrEmpty() && !awsSecretKey.isNullOrEmpty()) { withCredentials(AWSStaticCredentialsProvider(BasicAWSCredentials(awsAccessKey, awsSecretKey))) } }.build() diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt index ebe822fd8eb..d638e6d56be 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/GoogleSecretManagerPersistence.kt @@ -7,6 +7,7 @@ package io.airbyte.config.secrets.persistence import com.google.api.gax.core.FixedCredentialsProvider import com.google.api.gax.rpc.NotFoundException import com.google.auth.oauth2.ServiceAccountCredentials +import com.google.cloud.Timestamp import com.google.cloud.secretmanager.v1.ProjectName import com.google.cloud.secretmanager.v1.Replication import com.google.cloud.secretmanager.v1.Secret @@ -24,6 +25,7 @@ import jakarta.inject.Named import jakarta.inject.Singleton import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets +import java.time.Instant private val logger = KotlinLogging.logger {} @@ -52,7 +54,7 @@ class GoogleSecretManagerPersistence( return response.payload.data.toStringUtf8() } } catch (e: NotFoundException) { - logger.warn(e) { "Unable to locate secret for coordinate ${coordinate.fullCoordinate}." } + logger.warn { "Unable to locate secret for coordinate ${coordinate.fullCoordinate}." } return "" } catch (e: Exception) { logger.error(e) { "Unable to read secret for coordinate ${coordinate.fullCoordinate}. " } @@ -64,15 +66,7 @@ class GoogleSecretManagerPersistence( coordinate: SecretCoordinate, payload: String, ) { - googleSecretManagerServiceClient.createClient().use { client -> - if (read(coordinate).isEmpty()) { - val secretBuilder = Secret.newBuilder().setReplication(replicationPolicy) - client.createSecret(ProjectName.of(gcpProjectId), coordinate.fullCoordinate, secretBuilder.build()) - } - val name = SecretName.of(gcpProjectId, coordinate.fullCoordinate) - val secretPayload = SecretPayload.newBuilder().setData(ByteString.copyFromUtf8(payload)).build() - client.addSecretVersion(name, secretPayload) - } + writeWithExpiry(coordinate, payload) } companion object { @@ -88,6 +82,36 @@ class GoogleSecretManagerPersistence( .setAutomatic(Replication.Automatic.newBuilder().build()) .build() } + + override fun writeWithExpiry( + coordinate: SecretCoordinate, + payload: String, + expiry: Instant?, + ) { + googleSecretManagerServiceClient.createClient().use { client -> + if (read(coordinate).isEmpty()) { + val secretBuilder = Secret.newBuilder().setReplication(replicationPolicy) + + expiry?.let { + val expireTime = com.google.protobuf.Timestamp.newBuilder().setSeconds(it.epochSecond).build() + secretBuilder.setExpireTime(expireTime) + } + + client.createSecret(ProjectName.of(gcpProjectId), coordinate.fullCoordinate, secretBuilder.build()) + } + + val name = SecretName.of(gcpProjectId, coordinate.fullCoordinate) + val secretPayload = SecretPayload.newBuilder().setData(ByteString.copyFromUtf8(payload)).build() + client.addSecretVersion(name, secretPayload) + } + } + + override fun delete(coordinate: SecretCoordinate) { + googleSecretManagerServiceClient.createClient().use { client -> + val secretName = SecretName.of(gcpProjectId, coordinate.fullCoordinate) + client.deleteSecret(secretName) + } + } } @Singleton diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt index 83c69b9fdc8..b694da88faa 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt @@ -60,4 +60,8 @@ open class LocalTestingSecretPersistence( coordinate.fullCoordinate, ).execute() } + + override fun delete(coordinate: SecretCoordinate) { + return + } } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/NoOpSecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/NoOpSecretPersistence.kt index a66a9bdbd01..7b6ea560bf9 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/NoOpSecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/NoOpSecretPersistence.kt @@ -19,4 +19,8 @@ class NoOpSecretPersistence : SecretPersistence { ) { return } + + override fun delete(coordinate: SecretCoordinate) { + return + } } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/RuntimeSecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/RuntimeSecretPersistence.kt index 484e5cc7da1..4313cd502ae 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/RuntimeSecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/RuntimeSecretPersistence.kt @@ -77,6 +77,10 @@ class RuntimeSecretPersistence(private val secretPersistenceConfig: SecretPersis secretPersistence.write(coordinate, payload) } + override fun delete(coordinate: SecretCoordinate) { + return + } + private fun buildAwsSecretManager(configuration: Map): AwsSecretManagerPersistence { // We default to ACCESS_KEY auth val authType = configuration["auth_type"]?.uppercase() ?: AwsAuthType.ACCESS_KEY.value diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt index 55d00074252..d82f6e30962 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/SecretPersistence.kt @@ -5,6 +5,7 @@ package io.airbyte.config.secrets.persistence import io.airbyte.config.secrets.SecretCoordinate +import java.time.Instant /** * Provides a read-only interface to a backing secrets store similar to [SecretPersistence]. @@ -36,4 +37,15 @@ interface SecretPersistence : ReadOnlySecretPersistence { coordinate: SecretCoordinate, payload: String, ) + + fun writeWithExpiry( + coordinate: SecretCoordinate, + payload: String, + expiry: Instant? = null, + ) { + // Default implementation does not support expiry. + write(coordinate, payload) + } + + fun delete(coordinate: SecretCoordinate) } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/VaultSecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/VaultSecretPersistence.kt index 41ff319e5b5..16a764010fb 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/VaultSecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/VaultSecretPersistence.kt @@ -57,6 +57,10 @@ class VaultSecretPersistence( } } + override fun delete(coordinate: SecretCoordinate) { + return + } + companion object { private const val SECRET_KEY = "value" } diff --git a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/AwsSecretManagerPersistenceTest.kt b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/AwsSecretManagerPersistenceTest.kt index 970e92b6959..579c1e53640 100644 --- a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/AwsSecretManagerPersistenceTest.kt +++ b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/AwsSecretManagerPersistenceTest.kt @@ -7,6 +7,8 @@ package io.airbyte.config.secrets.persistence import com.amazonaws.secretsmanager.caching.SecretCache import com.amazonaws.services.secretsmanager.AWSSecretsManager import com.amazonaws.services.secretsmanager.model.CreateSecretResult +import com.amazonaws.services.secretsmanager.model.DeleteSecretRequest +import com.amazonaws.services.secretsmanager.model.DeleteSecretResult import com.amazonaws.services.secretsmanager.model.DescribeSecretResult import com.amazonaws.services.secretsmanager.model.GetSecretValueResult import com.amazonaws.services.secretsmanager.model.ResourceNotFoundException @@ -174,4 +176,26 @@ class AwsSecretManagerPersistenceTest { verify { mockAwsClient.updateSecret(any()) } } + + @Test + fun `test deleting a secret via the client deletes the secret`() { + val secret = "secret value" + val coordinate = SecretCoordinate.fromFullCoordinate("secret_coordinate_v1") + val mockClient: AwsClient = mockk() + val mockCache: AwsCache = mockk() + val mockAwsCache: SecretCache = mockk() + val mockAwsClient: AWSSecretsManager = mockk() + val persistence = AwsSecretManagerPersistence(mockClient, mockCache) + every { mockAwsCache.getSecretString(any()) } returns secret + every { mockAwsClient.deleteSecret(any()) } returns mockk() + every { mockCache.cache } returns mockAwsCache + every { mockClient.client } returns mockAwsClient + every { mockClient.serializedConfig } returns null + every { mockClient.kmsKeyArn } returns null + every { mockClient.tags } returns emptyMap() + + persistence.delete(coordinate) + + verify { mockAwsClient.deleteSecret(any()) } + } } diff --git a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt index 462fff0a76e..5209ce7b812 100644 --- a/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt +++ b/airbyte-config/config-secrets/src/test/kotlin/secrets/persistence/GoogleSecretManagerPersistenceTest.kt @@ -16,12 +16,17 @@ import com.google.cloud.secretmanager.v1.SecretVersion import com.google.cloud.secretmanager.v1.SecretVersionName import com.google.protobuf.ByteString import io.airbyte.config.secrets.SecretCoordinate +import io.airbyte.config.secrets.persistence.GoogleSecretManagerPersistence.Companion.replicationPolicy import io.grpc.Status +import io.mockk.Runs import io.mockk.every +import io.mockk.just import io.mockk.mockk import io.mockk.verify import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test +import java.time.Duration +import java.time.Instant class GoogleSecretManagerPersistenceTest { @Test @@ -102,6 +107,43 @@ class GoogleSecretManagerPersistenceTest { verify { mockGoogleClient.addSecretVersion(any(), any()) } } + @Test + fun `test writing a secret with expiry via the client creates the secret with expiry`() { + val secret = "secret value" + val projectId = "test" + val coordinate = SecretCoordinate.fromFullCoordinate("secret_coordinate_v1") + val mockClient: GoogleSecretManagerServiceClient = mockk() + val mockGoogleClient: SecretManagerServiceClient = mockk() + val mockResponse: AccessSecretVersionResponse = mockk() + val mockPayload: SecretPayload = mockk() + val persistence = GoogleSecretManagerPersistence(projectId, mockClient) + + every { mockPayload.data } returns ByteString.copyFromUtf8(secret) + every { mockResponse.payload } returns mockPayload + every { mockGoogleClient.accessSecretVersion(ofType(SecretVersionName::class)) } throws + NotFoundException( + NullPointerException("test"), + GrpcStatusCode.of( + Status.Code.NOT_FOUND, + ), + false, + ) + every { mockGoogleClient.createSecret(any(), any(), any()) } returns mockk() + every { mockGoogleClient.addSecretVersion(any(), any()) } returns mockk() + every { mockGoogleClient.close() } returns Unit + every { mockClient.createClient() } returns mockGoogleClient + + val expiry = Instant.now().plus(Duration.ofMinutes(1)) + persistence.writeWithExpiry(coordinate, secret, expiry) + + val sb = + Secret.newBuilder().setReplication( + replicationPolicy, + ).setExpireTime(com.google.protobuf.Timestamp.newBuilder().setSeconds(expiry.epochSecond).build()).build() + verify { mockGoogleClient.createSecret(ProjectName.of("test"), coordinate.fullCoordinate, sb) } + verify { mockGoogleClient.addSecretVersion(any(), any()) } + } + @Test fun `test writing a secret via the client updates an existing secret`() { val secret = "secret value" @@ -124,4 +166,26 @@ class GoogleSecretManagerPersistenceTest { verify { mockGoogleClient.addSecretVersion(any(), any()) } } + + @Test + fun `test deleting a secret via the client deletes the secret`() { + val secret = "secret value" + val projectId = "test" + val coordinate = SecretCoordinate.fromFullCoordinate("secret_coordinate_v1") + val mockClient: GoogleSecretManagerServiceClient = mockk() + val mockGoogleClient: SecretManagerServiceClient = mockk() + val mockResponse: AccessSecretVersionResponse = mockk() + val mockPayload: SecretPayload = mockk() + val persistence = GoogleSecretManagerPersistence(projectId, mockClient) + + every { mockPayload.data } returns ByteString.copyFromUtf8(secret) + every { mockResponse.payload } returns mockPayload + every { mockClient.createClient() } returns mockGoogleClient + every { mockGoogleClient.deleteSecret(ofType(SecretName::class)) } just Runs + every { mockGoogleClient.close() } returns Unit + + persistence.delete(coordinate) + + verify { mockGoogleClient.deleteSecret(any()) } + } } diff --git a/airbyte-config/config-secrets/src/testFixtures/kotlin/secrets/MemorySecretPersistence.kt b/airbyte-config/config-secrets/src/testFixtures/kotlin/secrets/MemorySecretPersistence.kt index b5712e4770f..96e43b92b74 100644 --- a/airbyte-config/config-secrets/src/testFixtures/kotlin/secrets/MemorySecretPersistence.kt +++ b/airbyte-config/config-secrets/src/testFixtures/kotlin/secrets/MemorySecretPersistence.kt @@ -23,6 +23,10 @@ class MemorySecretPersistence : SecretPersistence { secretMap[coordinate] = payload } + override fun delete(coordinate: SecretCoordinate) { + secretMap.remove(coordinate) + } + val map: Map get() = secretMap.toMutableMap() } diff --git a/airbyte-config/init/Dockerfile b/airbyte-config/init/Dockerfile index b18390e8d59..fd9cd7d20eb 100644 --- a/airbyte-config/init/Dockerfile +++ b/airbyte-config/init/Dockerfile @@ -1,4 +1,4 @@ -ARG ALPINE_IMAGE=alpine:3.13 +ARG ALPINE_IMAGE=alpine:3.18 FROM ${ALPINE_IMAGE} AS seed WORKDIR /app diff --git a/airbyte-config/init/readme.md b/airbyte-config/init/README.md similarity index 100% rename from airbyte-config/init/readme.md rename to airbyte-config/init/README.md diff --git a/airbyte-config/init/build.gradle.kts b/airbyte-config/init/build.gradle.kts index d7db99f93d5..1c1dd12b3a5 100644 --- a/airbyte-config/init/build.gradle.kts +++ b/airbyte-config/init/build.gradle.kts @@ -1,51 +1,51 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + api(libs.bundles.micronaut.annotation) - implementation(project(":airbyte-commons")) - implementation("commons-cli:commons-cli:1.4") - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-persistence:job-persistence")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-json-validation")) - implementation(libs.guava) + implementation(project(":airbyte-commons")) + implementation("commons-cli:commons-cli:1.4") + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-persistence:job-persistence")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-json-validation")) + implementation(libs.guava) - testImplementation(project(":airbyte-test-utils")) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(project(":airbyte-test-utils")) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } airbyte { - docker { - imageName = "init" - } + docker { + imageName = "init" + } } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/bin/scripts") + from("scripts") + into("build/airbyte/docker/bin/scripts") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts) + dependsOn(copyScripts) } tasks.processResources { - from("${project.rootDir}/airbyte-connector-builder-resources") + from("${project.rootDir}/airbyte-connector-builder-resources") } diff --git a/airbyte-config/init/src/main/resources/icons/propel.svg b/airbyte-config/init/src/main/resources/icons/propel.svg new file mode 100644 index 00000000000..3c8d80a317d --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/propel.svg @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/zenhub.svg b/airbyte-config/init/src/main/resources/icons/zenhub.svg new file mode 100644 index 00000000000..ccca65a2b49 --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/zenhub.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-config/specs/build.gradle.kts b/airbyte-config/specs/build.gradle.kts index d08c1907a40..beecd09b264 100644 --- a/airbyte-config/specs/build.gradle.kts +++ b/airbyte-config/specs/build.gradle.kts @@ -1,53 +1,54 @@ import de.undercouch.gradle.tasks.download.Download plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("de.undercouch.download") version "5.4.0" + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("de.undercouch.download") version "5.4.0" } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - api(libs.bundles.micronaut.annotation) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) - - implementation(libs.commons.cli) - implementation(libs.commons.io) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.google.cloud.storage) - implementation(libs.micronaut.cache.caffeine) - implementation(libs.airbyte.protocol) - implementation(libs.okhttp) - - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.mockwebserver) - testImplementation(libs.junit.pioneer) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) + + implementation(libs.commons.cli) + implementation(libs.commons.io) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.google.cloud.storage) + implementation(libs.micronaut.cache.caffeine) + implementation(libs.airbyte.protocol) + implementation(libs.okhttp) + + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.mockwebserver) + testImplementation(libs.junit.pioneer) } airbyte { - spotless { - excludes = listOf( - "src/main/resources/seed/oss_registry.json", - "src/main/resources/seed/local_oss_registry.json", - ) - } + spotless { + excludes = listOf( + "src/main/resources/seed/oss_registry.json", + "src/main/resources/seed/local_oss_registry.json", + ) + } } val downloadConnectorRegistry = tasks.register("downloadConnectorRegistry") { - src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") - dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) - overwrite( true) + src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") + dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) + overwrite(true) + onlyIfModified(true) } tasks.processResources { - dependsOn(downloadConnectorRegistry) + dependsOn(downloadConnectorRegistry) } diff --git a/airbyte-config/specs/src/main/java/io/airbyte/config/specs/RemoteDefinitionsProvider.java b/airbyte-config/specs/src/main/java/io/airbyte/config/specs/RemoteDefinitionsProvider.java index f558555a035..ffefb83b59a 100644 --- a/airbyte-config/specs/src/main/java/io/airbyte/config/specs/RemoteDefinitionsProvider.java +++ b/airbyte-config/specs/src/main/java/io/airbyte/config/specs/RemoteDefinitionsProvider.java @@ -168,9 +168,9 @@ URL getRegistryEntryUrl(final String connectorName, final String version) { } @VisibleForTesting - URL getDocUrl(final String connectorRepository, final String version, final Boolean inapp) { + URL getDocUrl(final String connectorRepository, final String version) { try { - return remoteRegistryBaseUrl.resolve(String.format("metadata/%s/%s/doc%s", connectorRepository, version, inapp ? ".inapp.md" : ".md")).toURL(); + return remoteRegistryBaseUrl.resolve(String.format("metadata/%s/%s/doc.md", connectorRepository, version)).toURL(); } catch (final MalformedURLException e) { throw new RuntimeException("Invalid URL format", e); } @@ -229,8 +229,8 @@ Optional getConnectorRegistryEntryJson(final String connectorName, fin * * @return Optional containing the connector doc if it can be found, or empty otherwise. */ - public Optional getConnectorDocumentation(final String connectorRepository, final String version, final Boolean inapp) { - final URL docUrl = getDocUrl(connectorRepository, version, inapp); + public Optional getConnectorDocumentation(final String connectorRepository, final String version) { + final URL docUrl = getDocUrl(connectorRepository, version); final Request request = new Request.Builder() .url(docUrl) .header(ACCEPT, MediaType.APPLICATION_JSON) @@ -247,7 +247,7 @@ public Optional getConnectorDocumentation(final String connectorReposito } } catch (final IOException e) { throw new RuntimeException( - String.format("Failed to fetch %s connector documentation for %s:%s", inapp ? "inapp" : "full", connectorRepository, version), e); + String.format("Failed to fetch connector documentation for %s:%s", connectorRepository, version), e); } } diff --git a/airbyte-config/specs/src/test/java/io/airbyte/config/specs/RemoteDefinitionsProviderTest.java b/airbyte-config/specs/src/test/java/io/airbyte/config/specs/RemoteDefinitionsProviderTest.java index c656bcdd55e..e7d0a54191c 100644 --- a/airbyte-config/specs/src/test/java/io/airbyte/config/specs/RemoteDefinitionsProviderTest.java +++ b/airbyte-config/specs/src/test/java/io/airbyte/config/specs/RemoteDefinitionsProviderTest.java @@ -273,7 +273,7 @@ void testGetConnectorDocumentation() { final RemoteDefinitionsProvider remoteDefinitionsProvider = new RemoteDefinitionsProvider(baseUrl, DEPLOYMENT_MODE, TimeUnit.SECONDS.toMillis(30)); - final Optional documentationResult = remoteDefinitionsProvider.getConnectorDocumentation(CONNECTOR_REPOSITORY, CONNECTOR_VERSION, false); + final Optional documentationResult = remoteDefinitionsProvider.getConnectorDocumentation(CONNECTOR_REPOSITORY, CONNECTOR_VERSION); assertTrue(documentationResult.isPresent()); assertEquals(documentationResult.get(), connectorDocumentationBody); } diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index afed694eede..83dcd12cbe8 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -0.65.0 +0.83.0 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index ef0d3631df6..e7e71bbe55a 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -1,8 +1,8 @@ -ARG BASE_IMAGE=airbyte/airbyte-base-java-python-image:1.1.0 -FROM ${BASE_IMAGE} AS connector-builder-server +ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.1.0 +FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=0.65.0 +ARG CDK_VERSION=0.83.0 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.9/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK @@ -18,7 +18,10 @@ ENV VERSION ${VERSION} WORKDIR /app # This is automatically unzipped by Docker +USER root ADD airbyte-app.tar /app +RUN chown -R airbyte:airbyte /app +USER airbyte:airbyte # wait for upstream dependencies to become available before starting server ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/${APPLICATION}"] diff --git a/airbyte-connector-builder-server/README.md b/airbyte-connector-builder-server/README.md index 3e69357b68c..714cfa73d38 100644 --- a/airbyte-connector-builder-server/README.md +++ b/airbyte-connector-builder-server/README.md @@ -22,25 +22,67 @@ export CDK_PYTHON= export CDK_ENTRYPOINT= ``` +Example commands: +``` +export CDK_PYTHON=~/code/airbyte/airbyte-cdk/python/.venv/bin/python +export CDK_ENTRYPOINT=~/code/airbyte/airbyte-cdk/python/airbyte_cdk/connector_builder/main.py +``` + Then run the server (You can also do this w/o build) ```bash ./gradlew -p oss airbyte-connector-builder-server:run ``` -The server is now reachable on localhost:80 +The server is now reachable on localhost:8080 If you want to run the full platform with this local instance, you must edit the `.env` file as follows: ``` bash # replace this -CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:80 +CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:8080 # with this -CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:80 +CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:8080 ``` Note: there are two different, but very similarly-named, environment variables; you must edit `CONNECTOR_BUILDER_SERVER_API_HOST`, not `CONNECTOR_BUILDER_API_HOST`. +### Running the platform with support for custom components (docker-compose only) + +1. Run the OSS platform locally with builder docker-compose extension + 1. Example command: PATH_TO_CONNECTORS=/Users/alex/code/airbyte/airbyte-integrations/connectors docker compose -f docker-compose.yaml -f docker-compose.builder.yaml up + 2. Where PATH_TO_CONNECTORS points to the airbyte-integrations/connectors subdirectory in the opensource airbyte repository +2. Open the connector builder and develop your connector +3. When needing a custom componentt: + 1. Switch to the YAML view + 2. Define the custom component +4. Write the custom components and its unit tests +5. Run test read + +Note that connector modules are added to the path at startup time. The platform instance must be restarted if you add a new connector module. + +Follow these additional instructions if the connector requires 3rd party libraries that are not available in the CDK: + +Developing connectors that require 3rd party libraries can be done by running the connector-builder-server locally and pointing to a custom virtual environment. + +1. Create a virtual environment and install the CDK + any 3rd party library required +2. export CDK_PYTHON= + - `CDK_PYTHON` should point to the virtual environment's python executable (example: `export CDK_PYTHON=~/code/airbyte/airbyte-cdk/python/.venv/bin/python`) +3. export CDK_ENTRYPOINT= +4. ./gradlew -p oss airbyte-connector-builder-server:run + 1. The server is now reachable on localhost:8080 +5. Update the server to point to port 8080 by editing .env and replacing + + ``` + CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:8080 + ``` + with + ``` + CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:8080 + ``` + +6. Follow the standard instructions + ## OpenAPI generation Run it via Gradle by running this from the Airbyte project root: diff --git a/airbyte-connector-builder-server/build.gradle.kts b/airbyte-connector-builder-server/build.gradle.kts index 36528111045..1cfe65ef841 100644 --- a/airbyte-connector-builder-server/build.gradle.kts +++ b/airbyte-connector-builder-server/build.gradle.kts @@ -1,159 +1,166 @@ import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage -import java.util.Properties import org.openapitools.generator.gradle.plugin.tasks.GenerateTask +import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("org.openapi.generator") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("org.openapi.generator") + id("io.airbyte.gradle.publish") } dependencies { - // Micronaut dependencies) - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation(libs.jackson.datatype) - implementation("com.googlecode.json-simple:json-simple:1.1.1") - - // Cloud service dependencies. These are not strictly necessary yet, but likely needed for any full-fledged cloud service) - implementation(libs.bundles.datadog) - // implementation(libs.bundles.temporal uncomment this when we start using temporal to invoke connector commands) - implementation(libs.sentry.java) - implementation(libs.guava) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.micronaut.http) - implementation(libs.micronaut.security) - implementation(libs.jakarta.annotation.api) - implementation(libs.jakarta.ws.rs.api) - - implementation(project(":airbyte-commons")) - - // OpenAPI code generation(dependencies) - implementation(libs.swagger.annotations) - - // Internal dependencies) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-server")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-metrics:metrics-lib")) - - implementation(libs.airbyte.protocol) - - runtimeOnly(libs.snakeyaml) - - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) + // Micronaut dependencies) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(libs.jackson.datatype) + implementation("com.googlecode.json-simple:json-simple:1.1.1") + + // Cloud service dependencies. These are not strictly necessary yet, but likely needed for any full-fledged cloud service) + implementation(libs.bundles.datadog) + // implementation(libs.bundles.temporal uncomment this when we start using temporal to invoke connector commands) + implementation(libs.sentry.java) + implementation(libs.guava) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.micronaut.http) + implementation(libs.micronaut.security) + implementation(libs.jakarta.annotation.api) + implementation(libs.jakarta.ws.rs.api) + + implementation(project(":airbyte-commons")) + + // OpenAPI code generation(dependencies) + implementation(libs.swagger.annotations) + + // Internal dependencies) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-server")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-metrics:metrics-lib")) + + implementation(libs.airbyte.protocol) + + runtimeOnly(libs.snakeyaml) + + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.connector_builder.MicronautConnectorBuilderServerRunner" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: ""), + application { + mainClass = "io.airbyte.connector_builder.MicronautConnectorBuilderServerRunner" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: ""), "AIRBYTE_VERSION" to env["VERSION"].toString(), // path to CDK virtual environment) - "CDK_PYTHON" to (System.getenv("CDK_PYTHON") ?: ""), + "CDK_PYTHON" to (System.getenv("CDK_PYTHON") ?: ""), // path to CDK connector builder"s main.py) "CDK_ENTRYPOINT" to (System.getenv("CDK_ENTRYPOINT") ?: ""), - )) - } - docker { - imageName = "connector-builder-server" - } + ) + ) + } + docker { + imageName = "connector-builder-server" + } } val generateOpenApiServer = tasks.register("generateOpenApiServer") { - val specFile = "$projectDir/src/main/openapi/openapi.yaml" - inputs.file(specFile) - inputSpec = specFile - outputDir = "$buildDir/generated/api/server" - - generatorName = "jaxrs-spec" - apiPackage = "io.airbyte.connector_builder.api.generated" - invokerPackage = "io.airbyte.connector_builder.api.invoker.generated" - modelPackage = "io.airbyte.connector_builder.api.model.generated" - - schemaMappings.putAll(mapOf( - "ConnectorConfig" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorManifest" to "com.fasterxml.jackson.databind.JsonNode", - )) - - // Our spec does not have nullable, but if it changes, this would be a gotcha that we would want to avoid) - configOptions.putAll(mapOf( - "dateLibrary" to "java8", - "generatePom" to "false", - "interfaceOnly" to "true", - /*) - JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. - It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. - The below Jackson annotation(is made to only(keep non null values in serialized json. - We are not yet using nullable=true properties in our OpenApi so this is a valid(workaround at the moment to circumvent the default JAX-RS behavior described above. - Feel free to read the conversation(on https://github.com/airbytehq/airbyte/pull/13370 for more details. - */ - "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", - )) - - doLast { - updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${apiPackage.get().replace(".", "/")}")) - updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${modelPackage.get().replace(".", "/")}")) - } + val specFile = "$projectDir/src/main/openapi/openapi.yaml" + inputs.file(specFile) + inputSpec = specFile + outputDir = "$buildDir/generated/api/server" + + generatorName = "jaxrs-spec" + apiPackage = "io.airbyte.connector_builder.api.generated" + invokerPackage = "io.airbyte.connector_builder.api.invoker.generated" + modelPackage = "io.airbyte.connector_builder.api.model.generated" + + schemaMappings.putAll( + mapOf( + "ConnectorConfig" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorManifest" to "com.fasterxml.jackson.databind.JsonNode", + ) + ) + + // Our spec does not have nullable, but if it changes, this would be a gotcha that we would want to avoid) + configOptions.putAll( + mapOf( + "dateLibrary" to "java8", + "generatePom" to "false", + "interfaceOnly" to "true", + /*) + JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. + It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. + The below Jackson annotation(is made to only(keep non null values in serialized json. + We are not yet using nullable=true properties in our OpenApi so this is a valid(workaround at the moment to circumvent the default JAX-RS behavior described above. + Feel free to read the conversation(on https://github.com/airbytehq/airbyte/pull/13370 for more details. + */ + "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", + ) + ) + + doLast { + updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${apiPackage.get().replace(".", "/")}")) + updateToJakartaApi(file("${outputDir.get()}/src/gen/java/${modelPackage.get().replace(".", "/")}")) + } } tasks.named("compileJava") { - dependsOn(generateOpenApiServer) + dependsOn(generateOpenApiServer) } //// Ensures that the generated models are compiled during the build step so they are available for use at runtime) sourceSets { - main { - java { - srcDirs("$buildDir/generated/api/server/src/gen/java") - } - resources { - srcDir("$projectDir/src/main/openapi/") - } + main { + java { + srcDirs("$buildDir/generated/api/server/src/gen/java") + } + resources { + srcDir("$projectDir/src/main/openapi/") } + } } val copyPythonDeps = tasks.register("copyPythonDependencies") { - from("$projectDir/requirements.txt") - into("$buildDir/airbyte/docker/") + from("$projectDir/requirements.txt") + into("$buildDir/airbyte/docker/") } // tasks.named("dockerBuildImage") { - // Set build args - // Current CDK version(used by the Connector Builder and workers running Connector Builder connectors - val cdkVersion: String = File(project.projectDir.parentFile, "airbyte-connector-builder-resources/CDK_VERSION").readText().trim() - buildArgs.put("CDK_VERSION", cdkVersion) + // Set build args + // Current CDK version(used by the Connector Builder and workers running Connector Builder connectors + val cdkVersion: String = File(project.projectDir.parentFile, "airbyte-connector-builder-resources/CDK_VERSION").readText().trim() + buildArgs.put("CDK_VERSION", cdkVersion) - dependsOn(copyPythonDeps, generateOpenApiServer) + dependsOn(copyPythonDeps, generateOpenApiServer) } -private fun updateToJakartaApi(srcDir:File) { - srcDir.walk().forEach { file -> - if(file.isFile) { - var contents = file.readText() - contents = contents.replace("javax.ws.rs", "jakarta.ws.rs") - .replace("javax.validation", "jakarta.validation") - .replace("javax.annotation", "jakarta.annotation") - file.writeText(contents) - } +private fun updateToJakartaApi(srcDir: File) { + srcDir.walk().forEach { file -> + if (file.isFile) { + var contents = file.readText() + contents = contents.replace("javax.ws.rs", "jakarta.ws.rs") + .replace("javax.validation", "jakarta.validation") + .replace("javax.annotation", "jakarta.annotation") + file.writeText(contents) } + } } diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index 7b51c89f5aa..64353fc18a6 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==0.65.0 +airbyte-cdk==0.83.0 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index b3d69874c7e..08e45da3c9e 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile # -airbyte-cdk==0.65.0 +airbyte-cdk==0.83.0 # via -r requirements.in -airbyte-protocol-models==0.5.1 +airbyte-protocol-models==0.9.0 # via airbyte-cdk attrs==23.2.0 # via @@ -17,57 +17,82 @@ backoff==2.2.1 # via airbyte-cdk bracex==2.4 # via wcmatch -cachetools==5.3.2 +cachetools==5.3.3 # via airbyte-cdk cattrs==23.2.3 # via requests-cache certifi==2024.2.2 # via requests +cffi==1.16.0 + # via cryptography charset-normalizer==3.3.2 # via requests +cryptography==42.0.5 + # via airbyte-cdk deprecated==1.2.14 # via airbyte-cdk dpath==2.0.8 # via airbyte-cdk -exceptiongroup==1.2.0 +exceptiongroup==1.2.1 # via cattrs genson==1.2.2 # via airbyte-cdk -idna==3.6 +idna==3.7 # via requests isodate==0.6.1 # via airbyte-cdk jinja2==3.1.3 # via airbyte-cdk -jsonref==0.3.0 +jsonpatch==1.33 + # via langchain-core +jsonpointer==2.4 + # via jsonpatch +jsonref==0.2 # via airbyte-cdk jsonschema==3.2.0 # via airbyte-cdk +langchain-core==0.1.42 + # via airbyte-cdk +langsmith==0.1.49 + # via langchain-core markupsafe==2.1.5 # via jinja2 +orjson==3.10.1 + # via langsmith +packaging==23.2 + # via langchain-core pendulum==2.1.2 # via airbyte-cdk platformdirs==4.2.0 # via requests-cache -pydantic==1.10.14 +pycparser==2.22 + # via cffi +pydantic==1.10.15 # via # airbyte-cdk # airbyte-protocol-models + # langchain-core + # langsmith +pyjwt==2.8.0 + # via airbyte-cdk pyrate-limiter==3.1.1 # via airbyte-cdk pyrsistent==0.20.0 # via jsonschema -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via # airbyte-cdk # pendulum pytzdata==2020.1 # via pendulum pyyaml==6.0.1 - # via airbyte-cdk + # via + # airbyte-cdk + # langchain-core requests==2.31.0 # via # airbyte-cdk + # langsmith # requests-cache requests-cache==1.2.0 # via airbyte-cdk @@ -77,7 +102,9 @@ six==1.16.0 # jsonschema # python-dateutil # url-normalize -typing-extensions==4.9.0 +tenacity==8.2.3 + # via langchain-core +typing-extensions==4.11.0 # via # cattrs # pydantic diff --git a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/ProcessOutputParser.java b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/ProcessOutputParser.java index e9c7c5028b7..9c811c5b788 100644 --- a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/ProcessOutputParser.java +++ b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/ProcessOutputParser.java @@ -4,15 +4,12 @@ package io.airbyte.connector_builder.command_runner; -import static io.airbyte.workers.internal.VersionedAirbyteStreamFactory.RECORD_TOO_LONG; - import datadog.trace.api.Trace; import io.airbyte.commons.io.IOs; import io.airbyte.connector_builder.TracingHelper; import io.airbyte.connector_builder.exceptions.AirbyteCdkInvalidInputException; import io.airbyte.connector_builder.exceptions.CdkProcessException; import io.airbyte.connector_builder.exceptions.CdkUnknownException; -import io.airbyte.connector_builder.exceptions.UnprocessableEntityException; import io.airbyte.connector_builder.requester.AirbyteCdkRequesterImpl; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; @@ -52,9 +49,6 @@ AirbyteRecordMessage parse( } catch (final NullPointerException exc) { throwCdkException(process, cdkCommand); } catch (final IllegalStateException e) { - if (e.getMessage().contains(RECORD_TOO_LONG)) { - throw new UnprocessableEntityException("API response is too large. Reduce the size by requesting smaller pages or time intervals.", e); - } throw e; } diff --git a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/SynchronousPythonCdkCommandRunner.java b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/SynchronousPythonCdkCommandRunner.java index 70b386a03a4..715fe458106 100644 --- a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/SynchronousPythonCdkCommandRunner.java +++ b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/command_runner/SynchronousPythonCdkCommandRunner.java @@ -31,6 +31,10 @@ public class SynchronousPythonCdkCommandRunner implements SynchronousCdkCommandR private final String python; private final String cdkEntrypoint; + // `:` separated path to the modules that will be imported by the Python executable + // Custom components must be in one of these modules to be loaded + private final String pythonPath; + private static final Logger LOGGER = LoggerFactory.getLogger(SynchronousPythonCdkCommandRunner.class); @Inject @@ -38,11 +42,13 @@ public SynchronousPythonCdkCommandRunner( final AirbyteFileWriter writer, final AirbyteStreamFactory streamFactory, final String python, - final String cdkEntrypoint) { + final String cdkEntrypoint, + final String pythonPath) { this.writer = writer; this.streamFactory = streamFactory; this.python = python; this.cdkEntrypoint = cdkEntrypoint; + this.pythonPath = pythonPath; } /** @@ -84,16 +90,22 @@ AirbyteCdkProcess start( catalog.getFilepath()); LOGGER.debug("Preparing command for {}: {}", cdkCommand, Joiner.on(" ").join(command)); final ProcessBuilder processBuilder = new ProcessBuilder(command); + addPythonPathToSubprocessEnvironment(processBuilder); + final AirbyteCdkPythonProcess cdkProcess = new AirbyteCdkPythonProcess( writer, config, catalog, processBuilder); cdkProcess.start(); return cdkProcess; } - AirbyteArgument write(final String name, final String contents) throws IOException { + private AirbyteArgument write(final String name, final String contents) throws IOException { final AirbyteArgument arg = new AirbyteArgument(this.writer); arg.setUpArg(name, contents); return arg; } + private void addPythonPathToSubprocessEnvironment(ProcessBuilder processBuilder) { + processBuilder.environment().put("PYTHONPATH", this.pythonPath); + } + } diff --git a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java index aee13ee18a8..bcb21696518 100644 --- a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java +++ b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/config/ApplicationBeanFactory.java @@ -12,6 +12,11 @@ import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; import io.micronaut.context.annotation.Factory; import jakarta.inject.Singleton; +import java.io.File; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; /** * Defines the instantiation of handler classes. @@ -43,9 +48,38 @@ public SynchronousCdkCommandRunner synchronousPythonCdkCommandRunner() { return new SynchronousPythonCdkCommandRunner( new AirbyteFileWriterImpl(), // This should eventually be constructed via DI. - VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(true), + VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(), this.getPython(), - this.getCdkEntrypoint()); + this.getCdkEntrypoint(), + this.getPythonPath()); + } + + private String getPythonPath() { + String pathToConnectors = getPathToConnectors(); + List subdirectories = listSubdirectories(pathToConnectors); + return createPythonPathFromListOfPaths(pathToConnectors, subdirectories); + } + + private String getPathToConnectors() { + return "/connectors"; + } + + private static List listSubdirectories(String path) { + File file = new File(path); + String[] directories = file.list((current, name) -> new File(current, name).isDirectory()); + return Optional.ofNullable(directories).stream() + .flatMap(Arrays::stream) + .collect(Collectors.toList()); + } + + static String createPythonPathFromListOfPaths(String path, List subdirectories) { + /* + * Creates a `:`-separated path of all connector directories. The connector directories that contain + * a python module can then be imported. + */ + return subdirectories.stream() + .map(subdirectory -> path + "/" + subdirectory) + .collect(Collectors.joining(":")); } } diff --git a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImpl.java b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImpl.java index 0356c04a7dd..6e40b51241b 100644 --- a/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImpl.java +++ b/airbyte-connector-builder-server/src/main/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImpl.java @@ -53,7 +53,7 @@ public class AirbyteCdkRequesterImpl implements AirbyteCdkRequester { "json_schema": {}, "supported_sync_modes": ["full_refresh", "incremental"] }, - "sync_mode": "full_refresh", + "sync_mode": "incremental", "destination_sync_mode": "overwrite" } ] diff --git a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml index 9ac26451acc..bc85e0a6ce4 100644 --- a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml +++ b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml @@ -218,9 +218,6 @@ components: url: type: string description: URL that the request was sent to - parameters: - type: object - description: The request parameters that were set on the HTTP request, if any body: type: string description: The body of the HTTP request, if present diff --git a/airbyte-connector-builder-server/src/main/resources/application.yml b/airbyte-connector-builder-server/src/main/resources/application.yml index e5057520694..14df703e3c7 100644 --- a/airbyte-connector-builder-server/src/main/resources/application.yml +++ b/airbyte-connector-builder-server/src/main/resources/application.yml @@ -1,6 +1,15 @@ micronaut: application: name: airbyte-connector-builder-server + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + analytics-tracking-deployments: + charset: "UTF-8" + expire-after-access: 10m + analytics-tracking-identity: + charset: "UTF-8" + expire-after-access: 10m env: cloud-deduction: true metrics: @@ -16,7 +25,7 @@ micronaut: authentication-provider-strategy: ALL enabled: ${API_AUTHORIZATION_ENABLED:false} server: - port: 80 + port: 8080 cors: enabled: true netty: @@ -24,6 +33,7 @@ micronaut: enabled: ${HTTP_ACCESS_LOG_ENABLED:true} aggregator: max-content-length: 52428800 # 50MB + max-header-size: ${NETTY_MAX_HEADER_SIZE:32768} endpoints: v1/manifest_template: enable: true @@ -77,6 +87,9 @@ endpoints: beans: enabled: true sensitive: false + caches: + enabled: true + sensitive: false env: enabled: true sensitive: false diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/command_runner/MockSynchronousPythonCdkCommandRunner.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/command_runner/MockSynchronousPythonCdkCommandRunner.java index 3f0d5ab24e2..78844b3e5f5 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/command_runner/MockSynchronousPythonCdkCommandRunner.java +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/command_runner/MockSynchronousPythonCdkCommandRunner.java @@ -27,7 +27,7 @@ public class MockSynchronousPythonCdkCommandRunner extends SynchronousPythonCdkC public MockSynchronousPythonCdkCommandRunner( final AirbyteFileWriter writer, final AirbyteStreamFactory streamFactory) { - super(writer, streamFactory, "", ""); + super(writer, streamFactory, "", "", ""); } public MockSynchronousPythonCdkCommandRunner( @@ -38,7 +38,7 @@ public MockSynchronousPythonCdkCommandRunner( final InputStream inputStream, final InputStream errorStream, final OutputStream outputStream) { - super(writer, streamFactory, "", ""); + super(writer, streamFactory, "", "", ""); this.shouldThrow = shouldThrow; this.exitCode = exitCode; this.inputStream = inputStream; diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/config/ApplicationBeanFactoryTest.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/config/ApplicationBeanFactoryTest.java new file mode 100644 index 00000000000..eb2396ad2be --- /dev/null +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/config/ApplicationBeanFactoryTest.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.connector_builder.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.common.collect.Lists; +import java.util.List; +import org.junit.jupiter.api.Test; + +public class ApplicationBeanFactoryTest { + + private static final String ROOT_PATH = "/path/to/root"; + + @Test + void testCreatePythonPathFromListOfPaths() { + List subdirectories = Lists.newArrayList("source-connector", "destination-connector"); + String pythonpath = ApplicationBeanFactory.createPythonPathFromListOfPaths(ROOT_PATH, subdirectories); + + String expectedPythonPath = "/path/to/root/source-connector:/path/to/root/destination-connector"; + + assertEquals(expectedPythonPath, pythonpath); + } + + @Test + void testCreatePythonPathFromListOfPathsNoSubdirectories() { + // This test case verifies the scenario where no local files are mounted + List subdirectories = Lists.newArrayList(); + String pythonpath = ApplicationBeanFactory.createPythonPathFromListOfPaths(ROOT_PATH, subdirectories); + + String expectedPythonPath = ""; + + assertEquals(expectedPythonPath, pythonpath); + } + +} diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java index 6522a8a94bb..4df64ab7723 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/controllers/ConnectorBuilderControllerIntegrationTest.java @@ -69,7 +69,7 @@ class ConnectorBuilderControllerIntegrationTest { void setup() { this.healthHandler = mock(HealthHandler.class); this.writer = new MockAirbyteFileWriterImpl(); - this.streamFactory = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(false); + this.streamFactory = VersionedAirbyteStreamFactory.noMigrationVersionedAirbyteStreamFactory(); } @BeforeAll diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json index 1d53e62a5a4..5b2a57a8808 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/fixtures/RecordStreamRead.json @@ -10,8 +10,7 @@ { "records": [], "request": { - "url": "https://api.courier.com/messages", - "parameters": { "page_size": ["1"] }, + "url": "https://api.courier.com/messages?page_size=1", "body": null, "headers": { "User-Agent": "python-requests/2.28.2", diff --git a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java index d029e50d5d5..43eae71e48b 100644 --- a/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java +++ b/airbyte-connector-builder-server/src/test/java/io/airbyte/connector_builder/requester/AirbyteCdkRequesterImplTest.java @@ -64,7 +64,7 @@ ArgumentCaptor testReadStreamSuccess(final Integer recordLimit, final In + "\"2023-11-01T00:00:00+00:00\", \"listItem\": \"item\"}, \"state\": {\"airbyte\": \"state\"}}, {\"pages\": []}]," + "\"inferred_schema\": {\"schema\": 1}, \"latest_config_update\": { \"config_key\": \"config_value\"}," + "\"auxiliary_requests\": [{\"title\": \"Refresh token\",\"description\": \"Obtains access token\",\"request\": {\"url\": " - + "\"https://a-url.com/oauth2/v1/tokens/bearer\",\"parameters\": null,\"headers\": {\"Content-Type\": " + + "\"https://a-url.com/oauth2/v1/tokens/bearer\",\"headers\": {\"Content-Type\": " + "\"application/x-www-form-urlencoded\"},\"http_method\": \"POST\",\"body\": \"a_request_body\"},\"response\": {\"status\": 200," + "\"body\": \"a_response_body\",\"headers\": {\"Date\": \"Tue, 11 Jul 2023 16:28:10 GMT\"}}}]}"); final ArgumentCaptor configCaptor = ArgumentCaptor.forClass(String.class); diff --git a/airbyte-connector-sidecar/Dockerfile b/airbyte-connector-sidecar/Dockerfile index 58466c6ba08..d645421e2a7 100644 --- a/airbyte-connector-sidecar/Dockerfile +++ b/airbyte-connector-sidecar/Dockerfile @@ -1,4 +1,10 @@ -FROM amazoncorretto:21 AS connector-sidecar +ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.2.0 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + +FROM airbyte/airbyte-base-java-worker-image:${JAVA_WORKER_BASE_IMAGE_VERSION} ARG DOCKER_BUILD_ARCH=amd64 @@ -8,12 +14,9 @@ ARG VERSION=dev ENV APPLICATION airbyte-connector-sidecar ENV VERSION=${VERSION} -WORKDIR /app - -COPY WellKnownTypes.json /app - -# Move connector-sidecar app -ADD airbyte-app.tar /app +COPY --chown=airbyte:airbyte WellKnownTypes.json /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte # wait for upstream dependencies to become available before starting server ENTRYPOINT ["/bin/bash", "-c", "/app/airbyte-app/bin/${APPLICATION}"] diff --git a/airbyte-connector-sidecar/build.gradle.kts b/airbyte-connector-sidecar/build.gradle.kts index 1ba2240e423..1830f46de07 100644 --- a/airbyte-connector-sidecar/build.gradle.kts +++ b/airbyte-connector-sidecar/build.gradle.kts @@ -6,144 +6,146 @@ import java.util.Properties import java.util.zip.ZipFile plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.publish") - id("io.airbyte.gradle.docker") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + kotlin("jvm") + kotlin("kapt") } buildscript { - repositories { - mavenCentral() - } - dependencies { - // necessary to convert the well_know_types from yaml to json - val jacksonVersion = libs.versions.fasterxml.version.get() - classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") - classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") - } + repositories { + mavenCentral() + } + dependencies { + // necessary to convert the well_know_types from yaml to json + val jacksonVersion = libs.versions.fasterxml.version.get() + classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") + classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") + } } val airbyteProtocol by configurations.creating configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid(updated version brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid(updated version brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.s3, libs.aws.java.sdk.s3) + } } configurations.all { - exclude(group = "io.micronaut", module = "micronaut-http-server-netty") - exclude(group = "io.micronaut.openapi") - exclude(group = "io.micronaut.flyway") - exclude(group = "io.micronaut.sql") + exclude(group = "io.micronaut", module = "micronaut-http-server-netty") + exclude(group = "io.micronaut.openapi") + exclude(group = "io.micronaut.flyway") + exclude(group = "io.micronaut.sql") } dependencies { - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.log4j) - implementation(libs.bundles.micronaut.light) - implementation(libs.google.cloud.storage) - implementation(libs.java.jwt) - implementation(libs.kotlin.logging) - implementation(libs.micronaut.jackson.databind) - implementation(libs.slf4j.api) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-metrics:metrics-lib")) // necessary for doc store - implementation(project(":airbyte-worker-models")) - implementation(libs.airbyte.protocol) - - runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.kotlin.reflect) - runtimeOnly(libs.appender.log4j2) - runtimeOnly(libs.bundles.bouncycastle) // cryptography package - - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.annotation.processor) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.mockk) - testImplementation(libs.kotlin.test.runner.junit5) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.airbyte.protocol) - testImplementation(libs.apache.commons.lang) - - airbyteProtocol(libs.airbyte.protocol) { - isTransitive = false - } + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.log4j) + implementation(libs.bundles.micronaut.light) + implementation(libs.google.cloud.storage) + implementation(libs.java.jwt) + implementation(libs.kotlin.logging) + implementation(libs.micronaut.jackson.databind) + implementation(libs.slf4j.api) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-metrics:metrics-lib")) // necessary for doc store + implementation(project(":airbyte-worker-models")) + implementation(libs.airbyte.protocol) + + runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.kotlin.reflect) + runtimeOnly(libs.appender.log4j2) + runtimeOnly(libs.bundles.bouncycastle) // cryptography package + + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.annotation.processor) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.mockk) + testImplementation(libs.kotlin.test.runner.junit5) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.airbyte.protocol) + testImplementation(libs.apache.commons.lang) + + airbyteProtocol(libs.airbyte.protocol) { + isTransitive = false + } } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass.set("io.airbyte.connectorSidecar.ApplicationKt") - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMutableMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATA_PLANE_ID" to "local", - "MICRONAUT_ENVIRONMENTS" to "test" - )) - } - docker { - imageName.set("connector-sidecar") - } + application { + mainClass.set("io.airbyte.connectorSidecar.ApplicationKt") + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMutableMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATA_PLANE_ID" to "local", + "MICRONAUT_ENVIRONMENTS" to "test" + ) + ) + } + docker { + imageName.set("connector-sidecar") + } } // Duplicated from :airbyte-worker, eventually, this should be handled in :airbyte-protocol) val generateWellKnownTypes = tasks.register("generateWellKnownTypes") { - inputs.files(airbyteProtocol) // declaring inputs) - val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") - outputs.file(targetFile) // declaring outputs) - - doLast { - val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" - airbyteProtocol.files.forEach { - val zip = ZipFile(it) - val entry = zip.getEntry(wellKnownTypesYamlPath) - - val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } - val rawJson = yamlToJson(wellKnownTypesYaml) - targetFile.getParentFile().mkdirs() - targetFile.writeText(rawJson) - } + inputs.files(airbyteProtocol) // declaring inputs) + val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") + outputs.file(targetFile) // declaring outputs) + + doLast { + val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" + airbyteProtocol.files.forEach { + val zip = ZipFile(it) + val entry = zip.getEntry(wellKnownTypesYamlPath) + + val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } + val rawJson = yamlToJson(wellKnownTypesYaml) + targetFile.getParentFile().mkdirs() + targetFile.writeText(rawJson) } + } } tasks.named("dockerBuildImage") { - dependsOn(generateWellKnownTypes) + dependsOn(generateWellKnownTypes) } fun yamlToJson(rawYaml: String): String { - val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) - return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) + val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) + return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) } // This is a workaround related to kaptBuild errors. It seems to be because there are no tests in cloud-airbyte-api-server. // TODO: this should be removed when we move to kotlin 1.9.20 // TODO: we should write tests afterEvaluate { - tasks.named("kaptGenerateStubsTestKotlin") { - enabled = false - } + tasks.named("kaptGenerateStubsTestKotlin") { + enabled = false + } } diff --git a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt index 399b8eac463..c5bef6d646a 100644 --- a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt +++ b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorMessageProcessor.kt @@ -92,7 +92,7 @@ class ConnectorMessageProcessor( val errorMessage: String = String.format("Lost connection to the connector") throw WorkerException(errorMessage, e) } catch (e: Exception) { - throw WorkerException("Unexpected error while getting checking connection.", e) + throw WorkerException("Unexpected error performing $operationType.", e) } } @@ -155,7 +155,7 @@ class ConnectorMessageProcessor( .withMessage(result.connectionStatus.message) jobOutput.checkConnection = output } else if (failureReason.isEmpty) { - throw WorkerException("Error checking connection status: no status nor failure reason were outputted") + throw WorkerException("Error checking connection status: no status nor failure reason provided") } OperationType.DISCOVER -> @@ -165,7 +165,7 @@ class ConnectorMessageProcessor( .writeDiscoverCatalogResult(buildSourceDiscoverSchemaWriteRequestBody(input.discoveryInput, result.catalog)) jobOutput.discoverCatalogId = apiResult.catalogId } else if (failureReason.isEmpty) { - throw WorkerException("Error checking connection status: no status nor failure reason were outputted") + throw WorkerException("Error discovering catalog: no failure reason provided") } OperationType.SPEC -> diff --git a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt index d18875bbb5b..0855275229f 100644 --- a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt +++ b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt @@ -2,6 +2,7 @@ package io.airbyte.connectorSidecar import com.google.common.annotations.VisibleForTesting import com.google.common.base.Stopwatch +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.commons.json.Jsons import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory @@ -21,7 +22,6 @@ import io.airbyte.workers.internal.VersionedAirbyteStreamFactory.InvalidLineFail import io.airbyte.workers.models.SidecarInput import io.airbyte.workers.sync.OrchestratorConstants import io.airbyte.workers.workload.JobOutputDocStore -import io.airbyte.workload.api.client.generated.WorkloadApi import io.airbyte.workload.api.client.model.generated.WorkloadFailureRequest import io.airbyte.workload.api.client.model.generated.WorkloadSuccessRequest import io.github.oshai.kotlinlogging.KotlinLogging @@ -47,7 +47,7 @@ class ConnectorWatcher( val serDeProvider: AirbyteMessageSerDeProvider, val airbyteProtocolVersionedMigratorFactory: AirbyteProtocolVersionedMigratorFactory, val gsonPksExtractor: GsonPksExtractor, - val workloadApi: WorkloadApi, + val workloadApiClient: WorkloadApiClient, val jobOutputDocStore: JobOutputDocStore, ) { fun run() { @@ -61,7 +61,13 @@ class ConnectorWatcher( while (!areNeededFilesPresent()) { Thread.sleep(100) if (fileTimeoutReach(stopwatch)) { - failWorkload(workloadId, null) + logger.warn { "Failed to find output files from connector within timeout $fileTimeoutMinutes. Is the connector still running?" } + val failureReason = + FailureReason() + .withFailureOrigin(FailureReason.FailureOrigin.UNKNOWN) + .withExternalMessage("Failed to find output files from connector within timeout $fileTimeoutMinutes.") + + failWorkload(workloadId, failureReason) exitFileNotFound() // The return is needed for the test return @@ -112,7 +118,7 @@ class ConnectorWatcher( } } jobOutputDocStore.write(workloadId, connectorOutput) - workloadApi.workloadSuccess(WorkloadSuccessRequest(workloadId)) + workloadApiClient.workloadApi.workloadSuccess(WorkloadSuccessRequest(workloadId)) } catch (e: Exception) { logger.error(e) { "Error performing operation: ${e.javaClass.name}" } @@ -153,24 +159,26 @@ class ConnectorWatcher( }, Optional.empty(), Optional.empty(), - Optional.empty>(), - InvalidLineFailureConfiguration(false, false), + InvalidLineFailureConfiguration(false), gsonPksExtractor, ) } @VisibleForTesting fun exitProperly() { + logger.info { "Deliberately exiting process with code 0." } exitProcess(0) } @VisibleForTesting fun exitInternalError() { + logger.info { "Deliberately exiting process with code 1." } exitProcess(1) } @VisibleForTesting fun exitFileNotFound() { + logger.info { "Deliberately exiting process with code 2." } exitProcess(2) } @@ -239,8 +247,9 @@ class ConnectorWatcher( workloadId: String, failureReason: FailureReason?, ) { + logger.info { "Failing workload $workloadId." } if (failureReason != null) { - workloadApi.workloadFailure( + workloadApiClient.workloadApi.workloadFailure( WorkloadFailureRequest( workloadId, failureReason.failureOrigin.value(), @@ -248,7 +257,7 @@ class ConnectorWatcher( ), ) } else { - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) + workloadApiClient.workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) } } } diff --git a/airbyte-connector-sidecar/src/main/resources/application.yml b/airbyte-connector-sidecar/src/main/resources/application.yml index 7343b8f7653..767d69a1c5a 100644 --- a/airbyte-connector-sidecar/src/main/resources/application.yml +++ b/airbyte-connector-sidecar/src/main/resources/application.yml @@ -25,6 +25,7 @@ airbyte: log: ${STORAGE_BUCKET_LOG} state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} + activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:} local: @@ -48,6 +49,8 @@ airbyte: credentials-path: ${DATA_PLANE_SERVICE_ACCOUNT_CREDENTIALS_PATH:} email: ${DATA_PLANE_SERVICE_ACCOUNT_EMAIL:} sidecar: + # Can we bump this value? Does it need to be configured per operation? + # Should we pass it in from the launcher? file-timeout-minutes: ${SIDECAR_FILE_TIMEOUT_MINUTES:9} workload-api: base-path: ${WORKLOAD_API_HOST:} diff --git a/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt b/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt index 204b58bcee7..2c495917a79 100644 --- a/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt +++ b/airbyte-connector-sidecar/src/test/kotlin/io/airbyte/connectorSidecar/ConnectorWatchTest.kt @@ -1,5 +1,6 @@ package io.airbyte.connectorSidecar +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory import io.airbyte.config.ActorType @@ -50,6 +51,9 @@ class ConnectorWatchTest { @MockK private lateinit var workloadApi: WorkloadApi + @MockK + private lateinit var workloadApiClient: WorkloadApiClient + @MockK private lateinit var jobOutputDocStore: JobOutputDocStore @@ -66,6 +70,8 @@ class ConnectorWatchTest { @BeforeEach fun init() { + every { workloadApiClient.workloadApi } returns workloadApi + connectorWatcher = spyk( ConnectorWatcher( @@ -76,7 +82,7 @@ class ConnectorWatchTest { serDeProvider, airbyteProtocolVersionedMigratorFactory, gsonPksExtractor, - workloadApi, + workloadApiClient, jobOutputDocStore, ), ) @@ -192,12 +198,12 @@ class ConnectorWatchTest { every { connectorWatcher.exitFileNotFound() } returns Unit - every { workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) } returns Unit + every { workloadApi.workloadFailure(any()) } returns Unit connectorWatcher.run() verifyOrder { - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId)) + workloadApi.workloadFailure(any()) connectorWatcher.exitFileNotFound() } } diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 615f06aa9bf..7321567c686 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -1,4 +1,10 @@ -FROM airbyte/airbyte-base-java-worker:1.0.0 +ARG JAVA_WORKER_BASE_IMAGE_VERSION=2.2.0 + +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app + +FROM airbyte/airbyte-base-java-worker-image:${JAVA_WORKER_BASE_IMAGE_VERSION} # Don't change this manually. Bump version expects to make moves based on this string ARG VERSION=dev @@ -7,11 +13,9 @@ ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} WORKDIR /app - -COPY WellKnownTypes.json /app - -# Move orchestrator app -ADD airbyte-app.tar /app +COPY --chown=airbyte:airbyte WellKnownTypes.json /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte # wait for upstream dependencies to become available before starting server ENTRYPOINT ["/bin/bash", "-c", "/app/airbyte-app/bin/${APPLICATION}"] diff --git a/airbyte-container-orchestrator/build.gradle.kts b/airbyte-container-orchestrator/build.gradle.kts index 3ce31bbfef1..17ce6488b62 100644 --- a/airbyte-container-orchestrator/build.gradle.kts +++ b/airbyte-container-orchestrator/build.gradle.kts @@ -6,122 +6,123 @@ import com.fasterxml.jackson.module.kotlin.registerKotlinModule import java.util.zip.ZipFile buildscript { - repositories { - mavenCentral() - } - dependencies { - // necessary to convert the well_know_types from yaml to json - val jacksonVersion = libs.versions.fasterxml.version.get() - classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") - classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") - } + repositories { + mavenCentral() + } + dependencies { + // necessary to convert the well_know_types from yaml to json + val jacksonVersion = libs.versions.fasterxml.version.get() + classpath("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jacksonVersion") + classpath("com.fasterxml.jackson.module:jackson-module-kotlin:$jacksonVersion") + } } plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } val airbyteProtocol by configurations.creating configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid(updated version brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.s3, libs.aws.java.sdk.s3) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid(updated version brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.s3, libs.aws.java.sdk.s3) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.guava) - implementation(libs.s3) - implementation(libs.aws.java.sdk.s3) - implementation(libs.sts) - implementation(libs.kubernetes.client) - implementation(libs.bundles.datadog) - implementation(libs.bundles.log4j) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-commons-worker")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-worker-models")) - - runtimeOnly(libs.snakeyaml) - - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.mockito.inline) - testImplementation(libs.bundles.bouncycastle) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers) - testImplementation(libs.platform.testcontainers.postgresql) - - airbyteProtocol(libs.airbyte.protocol) { - isTransitive = false - } + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.guava) + implementation(libs.s3) + implementation(libs.aws.java.sdk.s3) + implementation(libs.sts) + implementation(libs.kubernetes.client) + implementation(libs.bundles.datadog) + implementation(libs.bundles.log4j) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-commons-worker")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-worker-models")) + + runtimeOnly(libs.snakeyaml) + + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.mockito.inline) + testImplementation(libs.bundles.bouncycastle) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers) + testImplementation(libs.platform.testcontainers.postgresql) + + airbyteProtocol(libs.airbyte.protocol) { + isTransitive = false + } } airbyte { - application { - mainClass = "io.airbyte.container_orchestrator.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "container-orchestrator" - } + application { + mainClass = "io.airbyte.container_orchestrator.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "container-orchestrator" + } } // Duplicated from :airbyte-worker, eventually, this should be handled in :airbyte-protocol) val generateWellKnownTypes = tasks.register("generateWellKnownTypes") { - inputs.files(airbyteProtocol) // declaring inputs) - val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") - outputs.file(targetFile) // declaring outputs) - - doLast { - val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" - airbyteProtocol.files.forEach { - val zip = ZipFile(it) - val entry = zip.getEntry(wellKnownTypesYamlPath) - - val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } - val rawJson = yamlToJson(wellKnownTypesYaml) - targetFile.getParentFile().mkdirs() - targetFile.writeText(rawJson) - } + inputs.files(airbyteProtocol) // declaring inputs) + val targetFile = project.file("build/airbyte/docker/WellKnownTypes.json") + outputs.file(targetFile) // declaring outputs) + + doLast { + val wellKnownTypesYamlPath = "airbyte_protocol/well_known_types.yaml" + airbyteProtocol.files.forEach { + val zip = ZipFile(it) + val entry = zip.getEntry(wellKnownTypesYamlPath) + + val wellKnownTypesYaml = zip.getInputStream(entry).bufferedReader().use { reader -> reader.readText() } + val rawJson = yamlToJson(wellKnownTypesYaml) + targetFile.getParentFile().mkdirs() + targetFile.writeText(rawJson) } + } } tasks.named("dockerBuildImage") { - dependsOn(generateWellKnownTypes) + dependsOn(generateWellKnownTypes) } fun yamlToJson(rawYaml: String): String { - val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) - return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) + val mappedYaml: Any = YAMLMapper().registerKotlinModule().readValue(rawYaml) + return ObjectMapper().registerKotlinModule().writeValueAsString(mappedYaml) } diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java index 6130ed8a2ab..9cab558d6da 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java @@ -4,6 +4,7 @@ package io.airbyte.container_orchestrator.config; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.envvar.EnvVar; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; @@ -36,7 +37,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.fabric8.kubernetes.client.DefaultKubernetesClient; import io.micronaut.context.annotation.Factory; import io.micronaut.context.annotation.Prototype; @@ -120,13 +120,13 @@ JobOrchestrator jobOrchestrator( final JobRunConfig jobRunConfig, final ReplicationWorkerFactory replicationWorkerFactory, final AsyncStateManager asyncStateManager, - final WorkloadApi workloadApi, + final WorkloadApiClient workloadApiClient, final WorkloadIdGenerator workloadIdGenerator, @Value("${airbyte.workload.enabled}") final boolean workloadEnabled, final JobOutputDocStore jobOutputDocStore) { return switch (application) { case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(configDir, envConfigs, jobRunConfig, - replicationWorkerFactory, asyncStateManager, workloadApi, workloadIdGenerator, workloadEnabled, jobOutputDocStore); + replicationWorkerFactory, asyncStateManager, workloadApiClient, workloadIdGenerator, workloadEnabled, jobOutputDocStore); case NormalizationLauncherWorker.NORMALIZATION -> new NormalizationJobOrchestrator(envConfigs, processFactory, jobRunConfig, asyncStateManager); case DbtLauncherWorker.DBT -> new DbtJobOrchestrator(envConfigs, workerConfigsProvider, processFactory, jobRunConfig, asyncStateManager); case AsyncOrchestratorPodProcess.NO_OP -> new NoOpOrchestrator(); diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java index 0558dd3d396..2018ad4ac24 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestrator.java @@ -11,6 +11,7 @@ import com.google.common.annotations.VisibleForTesting; import datadog.trace.api.Trace; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.temporal.TemporalUtils; import io.airbyte.config.Configs; @@ -31,7 +32,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest; import io.airbyte.workload.api.client.model.generated.WorkloadFailureRequest; import io.airbyte.workload.api.client.model.generated.WorkloadSuccessRequest; @@ -55,7 +55,7 @@ public class ReplicationJobOrchestrator implements JobOrchestrator failureReason) throws IOException { if (failureReason.isPresent()) { - workloadApi.workloadFailure(new WorkloadFailureRequest(workloadId, + workloadApiClient.getWorkloadApi().workloadFailure(new WorkloadFailureRequest(workloadId, failureReason.get().getFailureOrigin().value(), failureReason.get().getExternalMessage())); } else { - workloadApi.workloadFailure(new WorkloadFailureRequest(workloadId, null, null)); + workloadApiClient.getWorkloadApi().workloadFailure(new WorkloadFailureRequest(workloadId, null, null)); } } private void succeedWorkload(final String workloadId) throws IOException { - workloadApi.workloadSuccess(new WorkloadSuccessRequest(workloadId)); + workloadApiClient.getWorkloadApi().workloadSuccess(new WorkloadSuccessRequest(workloadId)); } private void markJobRunning() { diff --git a/airbyte-container-orchestrator/src/main/resources/application-k8s.yml b/airbyte-container-orchestrator/src/main/resources/application-k8s.yml new file mode 100644 index 00000000000..2226fc3d3ce --- /dev/null +++ b/airbyte-container-orchestrator/src/main/resources/application-k8s.yml @@ -0,0 +1,9 @@ +micronaut: + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + # We overwrite the expiry in orchestrator to avoid making the workspace API call again and again for state stats metrics + analytics-tracking-deployments: + expire-after-access: 24h + analytics-tracking-identity: + expire-after-access: 24h diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml index 9fdf0bb73ad..29d16fffe1e 100644 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ b/airbyte-container-orchestrator/src/main/resources/application.yml @@ -1,10 +1,19 @@ micronaut: application: name: airbyte-container-orchestrator + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + analytics-tracking-deployments: + charset: "UTF-8" + expire-after-access: 10m + analytics-tracking-identity: + charset: "UTF-8" + expire-after-access: 10m env: cloud-deduction: true server: - port: 9000 + port: 9000 # If this value is modified, update the SERVER_PORT constant in OrchestratorConstants.kt executors: control-message: type: fixed @@ -39,6 +48,7 @@ airbyte: log: ${STORAGE_BUCKET_LOG} state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} + activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:} local: @@ -129,6 +139,9 @@ endpoints: beans: enabled: true sensitive: false + caches: + enabled: true + sensitive: false env: enabled: true sensitive: false diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java index 08166c624ef..a7adaea2408 100644 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java +++ b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java @@ -10,6 +10,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.commons.envvar.EnvVar; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.workers.config.WorkerConfigs; @@ -29,7 +30,6 @@ import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.generated.WorkloadApi; import io.micronaut.context.annotation.Bean; import io.micronaut.context.annotation.Replaces; import io.micronaut.context.env.Environment; @@ -69,7 +69,7 @@ class ContainerOrchestratorFactoryTest { JobRunConfig jobRunConfig; @Inject - WorkloadApi workloadApi; + WorkloadApiClient workloadApiClient; @Inject ReplicationWorkerFactory replicationWorkerFactory; @@ -124,29 +124,29 @@ void jobOrchestrator() { final var repl = factory.jobOrchestrator( ReplicationLauncherWorker.REPLICATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("Replication", repl.getOrchestratorName()); final var norm = factory.jobOrchestrator( NormalizationLauncherWorker.NORMALIZATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("Normalization", norm.getOrchestratorName()); final var dbt = factory.jobOrchestrator( DbtLauncherWorker.DBT, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, - replicationWorkerFactory, asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("DBT Transformation", dbt.getOrchestratorName()); final var noop = factory.jobOrchestrator( AsyncOrchestratorPodProcess.NO_OP, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("NO_OP", noop.getOrchestratorName()); var caught = false; try { factory.jobOrchestrator("does not exist", configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, - asyncStateManager, workloadApi, new WorkloadIdGenerator(), false, jobOutputDocStore); + asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); } catch (final Exception e) { caught = true; } diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java index cddb7d66f6f..7de9a8b04dc 100644 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java +++ b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/orchestrator/ReplicationJobOrchestratorTest.java @@ -13,6 +13,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import io.airbyte.api.client.WorkloadApiClient; import io.airbyte.config.Configs; import io.airbyte.config.ReplicationAttemptSummary; import io.airbyte.config.ReplicationOutput; @@ -43,6 +44,7 @@ class ReplicationJobOrchestratorTest { private ReplicationWorkerFactory replicationWorkerFactory; private WorkloadApi workloadApi; + private WorkloadApiClient workloadApiClient; private WorkloadIdGenerator workloadIdGenerator; private ReplicationWorker replicationWorker; @@ -50,8 +52,11 @@ class ReplicationJobOrchestratorTest { void setUp() { replicationWorkerFactory = mock(ReplicationWorkerFactory.class); workloadApi = mock(WorkloadApi.class); + workloadApiClient = mock(WorkloadApiClient.class); workloadIdGenerator = mock(WorkloadIdGenerator.class); replicationWorker = mock(ReplicationWorker.class); + + when(workloadApiClient.getWorkloadApi()).thenReturn(workloadApi); } @Test @@ -69,7 +74,7 @@ void testRunWithWorkloadEnabledRunCancelled() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -99,7 +104,7 @@ void testRunWithWorkloadEnabledRunCompleted() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -128,7 +133,7 @@ void testRunWithWorkloadEnabledRunFailed() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); @@ -157,7 +162,7 @@ void testRunWithWorkloadEnabledRunThrowsException() throws Exception { jobRunConfig, replicationWorkerFactory, mock(AsyncStateManager.class), - workloadApi, + workloadApiClient, workloadIdGenerator, true, mock(JobOutputDocStore.class)); diff --git a/airbyte-container-orchestrator/src/test/resources/application-test.yml b/airbyte-container-orchestrator/src/test/resources/application-test.yml index b9073d36e8e..cb2b3e2bf42 100644 --- a/airbyte-container-orchestrator/src/test/resources/application-test.yml +++ b/airbyte-container-orchestrator/src/test/resources/application-test.yml @@ -58,6 +58,7 @@ airbyte: log: ${STORAGE_BUCKET_LOG:log} state: ${STORAGE_BUCKET_STATE:state} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT:workload-output} + activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD:activity-payload} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:not-blank} local: diff --git a/airbyte-cron/Dockerfile b/airbyte-cron/Dockerfile index 3c0abb1bfbe..2fbf270cbd6 100644 --- a/airbyte-cron/Dockerfile +++ b/airbyte-cron/Dockerfile @@ -1,5 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder WORKDIR /app ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte + ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-cron"] diff --git a/airbyte-cron/build.gradle.kts b/airbyte-cron/build.gradle.kts index 03aafeb83f3..032063cb4ab 100644 --- a/airbyte-cron/build.gradle.kts +++ b/airbyte-cron/build.gradle.kts @@ -1,76 +1,80 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - kapt(libs.bundles.micronaut.annotation.processor) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.bundles.kubernetes.client) - implementation(libs.bundles.temporal) - implementation(libs.bundles.datadog) - implementation(libs.failsafe) - implementation(libs.failsafe.okhttp) - implementation(libs.java.jwt) - implementation(libs.kotlin.logging) - implementation(libs.okhttp) - implementation(libs.sentry.java) - implementation(libs.lombok) - implementation(libs.commons.io) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.bundles.kubernetes.client) + implementation(libs.bundles.temporal) + implementation(libs.bundles.datadog) + implementation(libs.failsafe) + implementation(libs.failsafe.okhttp) + implementation(libs.java.jwt) + implementation(libs.kotlin.logging) + implementation(libs.okhttp) + implementation(libs.sentry.java) + implementation(libs.lombok) + implementation(libs.commons.io) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-persistence:job-persistence")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-persistence:job-persistence")) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.snakeyaml) - testImplementation(libs.bundles.junit) - testImplementation(libs.mockk) + testImplementation(libs.bundles.junit) + testImplementation(libs.mockk) } -val env = Properties().apply { +val env = + Properties().apply { load(rootProject.file(".env.dev").inputStream()) -} + } airbyte { - application { - mainClass = "io.airbyte.cron.MicronautCronRunner" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - )) - } + application { + mainClass = "io.airbyte.cron.MicronautCronRunner" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + ), + ) + } - docker { - imageName = "cron" - } + docker { + imageName = "cron" + } } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) @@ -79,5 +83,5 @@ airbyte { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) // Once lombok has been removed, this can also be removed.) tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt b/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt deleted file mode 100644 index 868bb998c97..00000000000 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/ApiBeanFactory.kt +++ /dev/null @@ -1,134 +0,0 @@ -package io.airbyte.cron.config - -import dev.failsafe.RetryPolicy -import io.airbyte.api.client.WorkloadApiClient -import io.airbyte.commons.auth.AuthenticationInterceptor -import io.airbyte.commons.temporal.config.WorkerMode -import io.airbyte.workload.api.client.generated.WorkloadApi -import io.github.oshai.kotlinlogging.KotlinLogging -import io.micrometer.core.instrument.MeterRegistry -import io.micronaut.context.annotation.Factory -import io.micronaut.context.annotation.Value -import io.micronaut.context.env.Environment -import jakarta.inject.Named -import jakarta.inject.Singleton -import okhttp3.HttpUrl -import okhttp3.OkHttpClient -import okhttp3.Response -import org.openapitools.client.infrastructure.ClientException -import org.openapitools.client.infrastructure.ServerException -import java.io.IOException -import java.time.Duration -import java.util.Optional - -private val logger = KotlinLogging.logger {} - -@Factory -class ApiBeanFactory { - @Singleton - fun workloadApiClient( - @Value("\${airbyte.workload-api.base-path}") workloadApiBasePath: String, - @Value("\${airbyte.workload-api.connect-timeout-seconds}") connectTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.read-timeout-seconds}") readTimeoutSeconds: Long, - @Value("\${airbyte.workload-api.retries.delay-seconds}") retryDelaySeconds: Long, - @Value("\${airbyte.workload-api.retries.max}") maxRetries: Int, - authenticationInterceptor: AuthenticationInterceptor, - meterRegistry: Optional, - ): WorkloadApi { - val builder: OkHttpClient.Builder = OkHttpClient.Builder() - builder.addInterceptor(authenticationInterceptor) - builder.readTimeout(Duration.ofSeconds(readTimeoutSeconds)) - builder.connectTimeout(Duration.ofSeconds(connectTimeoutSeconds)) - - val okHttpClient: OkHttpClient = builder.build() - val metricTags = arrayOf("max-retries", maxRetries.toString()) - - val retryPolicy: RetryPolicy = - RetryPolicy.builder() - .handle( - listOf( - IllegalStateException::class.java, - IOException::class.java, - UnsupportedOperationException::class.java, - ClientException::class.java, - ServerException::class.java, - ), - ) - // TODO move these metrics into a centralized metric registery as part of the MetricClient refactor/cleanup - .onAbort { l -> - logger.warn { "Attempt aborted. Attempt count ${l.attemptCount}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.abort", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onFailure { l -> - logger.error(l.exception) { "Failed to call ${l.result.request.url}. Last response: ${l.result}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.failure", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onRetry { l -> - logger.warn { "Retry attempt ${l.attemptCount} of $maxRetries. Last response: ${l.lastResult}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retry", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.lastResult.request.method), - *getUrlTags(l.lastResult.request.url), - ).increment() - } - } - .onRetriesExceeded { l -> - logger.error(l.exception) { "Retry attempts exceeded." } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.retries_exceeded", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .onSuccess { l -> - logger.debug { "Successfully called ${l.result.request.url}. Response: ${l.result}, isRetry: ${l.isRetry}" } - meterRegistry.ifPresent { r -> - r.counter( - "workload_api_client.success", - *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), - ).increment() - } - } - .withDelay(Duration.ofSeconds(retryDelaySeconds)) - .withMaxRetries(maxRetries) - .build() - - return WorkloadApiClient(workloadApiBasePath, retryPolicy, okHttpClient).workloadApi - } - - @Singleton - @Named("internalApiScheme") - fun internalApiScheme(environment: Environment): String { - return if (environment.activeNames.contains(WorkerMode.CONTROL_PLANE)) "http" else "https" - } - - private fun getUrlTags(httpUrl: HttpUrl): Array { - val last = httpUrl.pathSegments.last() - if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { - return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) - } else { - return arrayOf("url", httpUrl.toString()) - } - } -} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt index a40b2731f93..ddcb3d86309 100644 --- a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt +++ b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/WorkloadMonitor.kt @@ -1,14 +1,13 @@ package io.airbyte.cron.jobs import datadog.trace.api.Trace -import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.metrics.annotations.Instrument import io.airbyte.metrics.annotations.Tag import io.airbyte.metrics.lib.MetricAttribute import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags import io.airbyte.metrics.lib.OssMetricsRegistry -import io.airbyte.workload.api.client.generated.WorkloadApi import io.airbyte.workload.api.client.model.generated.ExpiredDeadlineWorkloadListRequest import io.airbyte.workload.api.client.model.generated.LongRunningWorkloadRequest import io.airbyte.workload.api.client.model.generated.Workload @@ -32,10 +31,9 @@ private val logger = KotlinLogging.logger { } value = "true", ) open class WorkloadMonitor( - private val workloadApi: WorkloadApi, + private val workloadApiClient: WorkloadApiClient, @Property(name = "airbyte.workload.monitor.non-sync-workload-timeout") private val nonSyncWorkloadTimeout: Duration, @Property(name = "airbyte.workload.monitor.sync-workload-timeout") private val syncWorkloadTimeout: Duration, - private val featureFlagClient: FeatureFlagClient, private val metricClient: MetricClient, private val timeProvider: (ZoneId) -> OffsetDateTime = OffsetDateTime::now, ) { @@ -59,7 +57,7 @@ open class WorkloadMonitor( logger.info { "Checking for not started workloads." } val oldestStartedTime = timeProvider(ZoneOffset.UTC) val notStartedWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestStartedTime, status = listOf(WorkloadStatus.CLAIMED), @@ -80,7 +78,7 @@ open class WorkloadMonitor( logger.info { "Checking for not claimed workloads." } val oldestClaimTime = timeProvider(ZoneOffset.UTC) val notClaimedWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestClaimTime, status = listOf(WorkloadStatus.PENDING), @@ -102,7 +100,7 @@ open class WorkloadMonitor( logger.info { "Checking for non heartbeating workloads." } val oldestHeartbeatTime = timeProvider(ZoneOffset.UTC) val nonHeartbeatingWorkloads = - workloadApi.workloadListWithExpiredDeadline( + workloadApiClient.workloadApi.workloadListWithExpiredDeadline( ExpiredDeadlineWorkloadListRequest( oldestHeartbeatTime, status = listOf(WorkloadStatus.RUNNING, WorkloadStatus.LAUNCHED), @@ -123,7 +121,7 @@ open class WorkloadMonitor( open fun cancelRunningForTooLongNonSyncWorkloads() { logger.info { "Checking for workloads running for too long with timeout value $nonSyncWorkloadTimeout" } val nonHeartbeatingWorkloads = - workloadApi.workloadListOldNonSync( + workloadApiClient.workloadApi.workloadListOldNonSync( LongRunningWorkloadRequest( createdBefore = timeProvider(ZoneOffset.UTC).minus(nonSyncWorkloadTimeout), ), @@ -143,7 +141,7 @@ open class WorkloadMonitor( open fun cancelRunningForTooLongSyncWorkloads() { logger.info { "Checking for sync workloads running for too long with timeout value $syncWorkloadTimeout" } val nonHeartbeatingWorkloads = - workloadApi.workloadListOldSync( + workloadApiClient.workloadApi.workloadListOldSync( LongRunningWorkloadRequest( createdBefore = timeProvider(ZoneOffset.UTC).minus(syncWorkloadTimeout), ), @@ -161,7 +159,7 @@ open class WorkloadMonitor( var status = "fail" try { logger.info { "Cancelling workload ${it.id}, reason: $reason" } - workloadApi.workloadFailure(WorkloadFailureRequest(workloadId = it.id, reason = reason, source = source)) + workloadApiClient.workloadApi.workloadFailure(WorkloadFailureRequest(workloadId = it.id, reason = reason, source = source)) status = "ok" } catch (e: Exception) { logger.warn(e) { "Failed to cancel workload ${it.id}" } diff --git a/airbyte-cron/src/main/resources/application.yml b/airbyte-cron/src/main/resources/application.yml index 9397777cf98..ce823361448 100644 --- a/airbyte-cron/src/main/resources/application.yml +++ b/airbyte-cron/src/main/resources/application.yml @@ -1,6 +1,17 @@ micronaut: application: name: airbyte-cron + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + analytics-tracking-deployments: + charset: "UTF-8" + expire-after-access: 10m + analytics-tracking-identity: + charset: "UTF-8" + expire-after-access: 10m + remote-definitions-provider: + expire-after-write: 15s env: cloud-deduction: true metrics: @@ -12,9 +23,6 @@ micronaut: step: ${MICROMETER_METRICS_STEP:PT30S} host: ${STATSD_HOST:localhost} port: ${STATSD_PORT:8125} - caches: - remote-definitions-provider: - expire-after-write: 15s server: port: 9001 @@ -106,6 +114,9 @@ endpoints: beans: enabled: true sensitive: false + caches: + enabled: true + sensitive: false env: enabled: true sensitive: false diff --git a/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt b/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt index 9a51fa7c4a9..5dd2fc3616f 100644 --- a/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt +++ b/airbyte-cron/src/test/kotlin/io/airbyte/cron/jobs/WorkloadMonitorTest.kt @@ -1,7 +1,6 @@ package io.airbyte.cron.jobs -import io.airbyte.featureflag.FeatureFlagClient -import io.airbyte.featureflag.TestClient +import io.airbyte.api.client.WorkloadApiClient import io.airbyte.metrics.lib.MetricAttribute import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags @@ -32,8 +31,8 @@ class WorkloadMonitorTest { lateinit var currentTime: OffsetDateTime lateinit var metricClient: MetricClient lateinit var workloadApi: WorkloadApi + lateinit var workloadApiClient: WorkloadApiClient lateinit var workloadMonitor: WorkloadMonitor - lateinit var featureFlagClient: FeatureFlagClient @BeforeEach fun beforeEach() { @@ -41,14 +40,14 @@ class WorkloadMonitorTest { mockk().also { every { it.count(any(), any(), *anyVararg()) } returns Unit } - featureFlagClient = TestClient(emptyMap()) workloadApi = mockk() + workloadApiClient = mockk() + every { workloadApiClient.workloadApi } returns workloadApi workloadMonitor = WorkloadMonitor( - workloadApi = workloadApi, + workloadApiClient = workloadApiClient, nonSyncWorkloadTimeout = nonSyncTimeout, syncWorkloadTimeout = syncTimeout, - featureFlagClient = featureFlagClient, metricClient = metricClient, timeProvider = { _: ZoneId -> currentTime }, ) diff --git a/airbyte-data/build.gradle.kts b/airbyte-data/build.gradle.kts index 539912933ac..b1a8d31f515 100644 --- a/airbyte-data/build.gradle.kts +++ b/airbyte-data/build.gradle.kts @@ -1,60 +1,60 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") - `java-test-fixtures` + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("org.jetbrains.kotlin.jvm") + id("org.jetbrains.kotlin.kapt") + `java-test-fixtures` } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - - api(libs.bundles.micronaut.annotation) - - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - - kaptTest(platform(libs.micronaut.platform)) - kaptTest(libs.bundles.micronaut.test.annotation.processor) - - implementation(libs.bundles.apache) - implementation(libs.bundles.jackson) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.guava) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-featureflag")) - implementation(libs.airbyte.protocol) - // For Keycloak Application Management - implementation(libs.bundles.keycloak.client) - - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockk) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.junit) - - // TODO: flip this import - MockData should live in airbyte-data's testFixtures - // and be imported in this manner by config-persistence - // We can move the BaseConfigDatasets to airbyte-data's testFixtures as well. - testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + + api(libs.bundles.micronaut.annotation) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + kaptTest(platform(libs.micronaut.platform)) + kaptTest(libs.bundles.micronaut.test.annotation.processor) + + implementation(libs.bundles.apache) + implementation(libs.bundles.jackson) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.guava) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-featureflag")) + implementation(libs.airbyte.protocol) + // For Keycloak Application Management + implementation(libs.bundles.keycloak.client) + + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockk) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.junit) + + // TODO: flip this import - MockData should live in airbyte-data's testFixtures + // and be imported in this manner by config-persistence + // We can move the BaseConfigDatasets to airbyte-data's testFixtures as well. + testImplementation(testFixtures(project(":airbyte-config:config-persistence"))) } // Even though Kotlin is excluded on Spotbugs, this project // still runs into spotbug issues. Working theory is that // generated code is being picked up. Disable as a short-term fix. tasks.named("spotbugsMain") { - enabled = false + enabled = false } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/ActorDefinitionService.java b/airbyte-data/src/main/java/io/airbyte/data/services/ActorDefinitionService.java index f03b44fca51..141cf923873 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/ActorDefinitionService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/ActorDefinitionService.java @@ -8,6 +8,7 @@ import io.airbyte.config.ActorDefinitionBreakingChange; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.services.shared.ActorWorkspaceOrganizationIds; import java.io.IOException; import java.util.List; import java.util.Map; @@ -44,8 +45,18 @@ public interface ActorDefinitionService { List getActorDefinitionVersions(List actorDefinitionVersionIds) throws IOException; + void updateActorDefinitionDefaultVersionId(final UUID actorDefinitionId, final UUID versionId) throws IOException; + + Optional getDefaultVersionForActorDefinitionIdOptional(final UUID actorDefinitionId) throws IOException; + void setActorDefaultVersion(UUID actorId, UUID actorDefinitionVersionId) throws IOException; + void setActorDefaultVersions(List actorId, UUID actorDefinitionVersionId) throws IOException; + + Set getActorsWithDefaultVersionId(UUID defaultVersionId) throws IOException; + + List getActorIdsForDefinition(UUID actorDefinitionId) throws IOException; + List listBreakingChangesForActorDefinition(UUID actorDefinitionId) throws IOException; void setActorDefinitionVersionSupportStates(List actorDefinitionVersionIds, ActorDefinitionVersion.SupportState supportState) diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/SecretPersistenceConfigService.java b/airbyte-data/src/main/java/io/airbyte/data/services/SecretPersistenceConfigService.java index eb185457569..863b6f4112d 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/SecretPersistenceConfigService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/SecretPersistenceConfigService.java @@ -17,8 +17,6 @@ public interface SecretPersistenceConfigService { SecretPersistenceConfig getSecretPersistenceConfig(ScopeType scope, UUID scopeId) throws IOException, ConfigNotFoundException; - Optional getSecretPersistenceCoordinate(UUID workspaceId, UUID organizationId) throws IOException; - Optional createOrUpdateSecretPersistenceConfig(ScopeType scope, UUID scopeId, SecretPersistenceType secretPersistenceType, diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java b/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java index f419c7ba2a5..f7d56b4a712 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java @@ -43,8 +43,6 @@ public interface SourceService { void writeSourceConnectionNoSecrets(SourceConnection partialSource) throws IOException; - boolean deleteSource(UUID sourceId) throws JsonValidationException, ConfigNotFoundException, IOException; - List listSourceConnection() throws IOException; List listWorkspaceSourceConnection(UUID workspaceId) throws IOException; diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImpl.java index f6bfef8a539..08f6562233b 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImpl.java @@ -23,6 +23,7 @@ import io.airbyte.config.ScopeType; import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ActorDefinitionService; +import io.airbyte.data.services.shared.ActorWorkspaceOrganizationIds; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; import io.airbyte.db.instance.configs.jooq.generated.Tables; @@ -290,6 +291,47 @@ public void setActorDefaultVersion(final UUID actorId, final UUID actorDefinitio .execute()); } + @Override + public void setActorDefaultVersions(final List actorIds, final UUID actorDefinitionVersionId) throws IOException { + database.query(ctx -> ctx.update(Tables.ACTOR) + .set(Tables.ACTOR.DEFAULT_VERSION_ID, actorDefinitionVersionId) + .set(Tables.ACTOR.UPDATED_AT, OffsetDateTime.now()) + .where(Tables.ACTOR.ID.in(actorIds)) + .execute()); + } + + @Override + public Set getActorsWithDefaultVersionId(final UUID defaultVersionId) throws IOException { + return database.query(ctx -> ctx.select(ACTOR.ID) + .from(ACTOR) + .where(ACTOR.DEFAULT_VERSION_ID.eq(defaultVersionId)) + .fetch() + .stream() + .map(r -> r.get(ACTOR.ID)) + .collect(Collectors.toSet())); + } + + @Override + public List getActorIdsForDefinition(final UUID actorDefinitionId) throws IOException { + return database.query(ctx -> ctx.select(ACTOR.ID, ACTOR.WORKSPACE_ID, WORKSPACE.ORGANIZATION_ID) + .from(ACTOR) + .join(WORKSPACE).on(ACTOR.WORKSPACE_ID.eq(WORKSPACE.ID)) + .where(ACTOR.ACTOR_DEFINITION_ID.eq(actorDefinitionId)) + .fetch() + .stream() + .map(record -> new ActorWorkspaceOrganizationIds(record.get(ACTOR.ID), record.get(ACTOR.WORKSPACE_ID), record.get(WORKSPACE.ORGANIZATION_ID))) + .toList()); + } + + @Override + public void updateActorDefinitionDefaultVersionId(final UUID actorDefinitionId, final UUID versionId) throws IOException { + database.query(ctx -> ctx.update(ACTOR_DEFINITION) + .set(ACTOR_DEFINITION.DEFAULT_VERSION_ID, versionId) + .set(ACTOR_DEFINITION.UPDATED_AT, OffsetDateTime.now()) + .where(ACTOR_DEFINITION.ID.eq(actorDefinitionId)) + .execute()); + } + /** * Get the list of breaking changes available affecting an actor definition. * @@ -477,7 +519,7 @@ private ActorDefinitionVersion getDefaultVersionForActorDefinitionId(final UUID } private ActorDefinitionVersion getDefaultVersionForActorDefinitionId(final UUID actorDefinitionId, final DSLContext ctx) { - return getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); + return ConnectorMetadataJooqHelper.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); } /** @@ -486,15 +528,9 @@ private ActorDefinitionVersion getDefaultVersionForActorDefinitionId(final UUID * the case is if we are in the process of inserting and have already written the source definition, * but not yet set its default version. */ - private Optional getDefaultVersionForActorDefinitionIdOptional(final UUID actorDefinitionId, final DSLContext ctx) { - return ctx.select(Tables.ACTOR_DEFINITION_VERSION.asterisk()) - .from(ACTOR_DEFINITION) - .join(ACTOR_DEFINITION_VERSION).on(Tables.ACTOR_DEFINITION_VERSION.ID.eq(Tables.ACTOR_DEFINITION.DEFAULT_VERSION_ID)) - .where(ACTOR_DEFINITION.ID.eq(actorDefinitionId)) - .fetch() - .stream() - .findFirst() - .map(DbConverter::buildActorDefinitionVersion); + @Override + public Optional getDefaultVersionForActorDefinitionIdOptional(final UUID actorDefinitionId) throws IOException { + return database.query(ctx -> ConnectorMetadataJooqHelper.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx)); } /** diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java index 466a0f6e01e..32d4a32b8a8 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java @@ -480,9 +480,10 @@ private Set getEarlySyncJobsFromResult(final Result result) { } /** - * This query retrieves billable sync jobs (job status: INCOMPLETE, SUCCEEDED and CANCELLED) for - * connections that have been created in the past 7 days OR finds the first successful sync jobs for - * their corresponding connections. These results are used to mark these early syncs as free. + * This query retrieves billable sync jobs (jobs in a terminal status - succeeded, cancelled, + * failed) for connections that have been created in the past 7 days OR finds the first successful + * sync jobs for their corresponding connections. These results are used to mark these early syncs + * as free. */ private static final String EARLY_SYNC_JOB_QUERY = // Find the first successful sync job ID for every connection. @@ -500,7 +501,9 @@ private Set getEarlySyncJobsFromResult(final Result result) { + " FROM jobs j" + " LEFT JOIN connection c ON c.id = UUID(j.scope)" + " LEFT JOIN FirstSuccessfulJobIdByConnection min_j_ids ON j.id = min_j_ids.min_job_id" - + " WHERE j.status IN ('succeeded', 'incomplete', 'cancelled')" + // Consider only jobs that are in a generally accepted terminal status + // io/airbyte/persistence/job/models/JobStatus.java:23 + + " WHERE j.status IN ('succeeded', 'cancelled', 'failed')" + " AND j.config_type = 'sync'" + " AND c.id IS NOT NULL" // Keep a job if it was created within 7 days of its connection's creation, diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java index ceafec45ba1..46e6216824d 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java @@ -4,35 +4,21 @@ package io.airbyte.data.services.impls.jooq; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION_VERSION; -import static io.airbyte.featureflag.ContextKt.ANONYMOUS; -import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.Version; import io.airbyte.config.ActorDefinitionBreakingChange; import io.airbyte.config.ActorDefinitionVersion; -import io.airbyte.config.BreakingChangeScope; -import io.airbyte.config.helpers.BreakingChangeScopeFactory; -import io.airbyte.config.helpers.StreamBreakingChangeScope; -import io.airbyte.data.services.ConnectionService; import io.airbyte.db.instance.configs.jooq.generated.Tables; import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; import io.airbyte.db.instance.configs.jooq.generated.enums.SupportLevel; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.UseBreakingChangeScopes; -import io.airbyte.featureflag.Workspace; -import java.io.IOException; import java.time.LocalDate; import java.time.OffsetDateTime; -import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import org.jooq.DSLContext; @@ -40,19 +26,11 @@ import org.jooq.Query; /** - * Helper class for logic related to connector metadata (breaking changes, actor definition - * versions) that is the same regardless of whether we are working with a source or destination. + * Helper class used to read/write connector metadata from associated tables (breaking changes, + * actor definition versions). */ public class ConnectorMetadataJooqHelper { - private final FeatureFlagClient featureFlagClient; - private final ConnectionService connectionService; - - public ConnectorMetadataJooqHelper(final FeatureFlagClient featureFlagClient, final ConnectionService connectionService) { - this.featureFlagClient = featureFlagClient; - this.connectionService = connectionService; - } - /** * Write an actor definition version. * @@ -188,109 +166,6 @@ public static Optional getActorDefinitionVersion(final U .map(DbConverter::buildActorDefinitionVersion); } - /** - * Set the ActorDefinitionVersion for a given tag as the default version for the associated actor - * definition. Check docker image tag on the new ADV; if an ADV exists for that tag, set the - * existing ADV for the tag as the default. Otherwise, insert the new ADV and set it as the default. - * - * @param actorDefinitionVersion new actor definition version - */ - public void setActorDefinitionVersionForTagAsDefault(final ActorDefinitionVersion actorDefinitionVersion, - final List breakingChangesForDefinition, - final DSLContext ctx) { - final ActorDefinitionVersion writtenADV = writeActorDefinitionVersion(actorDefinitionVersion, ctx); - setActorDefinitionVersionAsDefaultVersion(writtenADV, breakingChangesForDefinition, ctx); - } - - private void setActorDefinitionVersionAsDefaultVersion(final ActorDefinitionVersion actorDefinitionVersion, - final List breakingChangesForDefinition, - final DSLContext ctx) { - if (actorDefinitionVersion.getVersionId() == null) { - throw new RuntimeException("Can't set an actorDefinitionVersion as default without it having a versionId."); - } - - final Optional currentDefaultVersion = - getDefaultVersionForActorDefinitionIdOptional(actorDefinitionVersion.getActorDefinitionId(), ctx); - currentDefaultVersion - .ifPresent(currentDefault -> { - final Set actorsToUpgrade = getActorsToUpgrade(currentDefault, actorDefinitionVersion, breakingChangesForDefinition, ctx); - updateActorsDefaultVersion(actorsToUpgrade, actorDefinitionVersion.getVersionId(), ctx); - }); - - updateActorDefinitionDefaultVersionId(actorDefinitionVersion.getActorDefinitionId(), actorDefinitionVersion.getVersionId(), ctx); - } - - @VisibleForTesting - public Set getActorsToUpgrade(final ActorDefinitionVersion currentDefaultVersion, - final ActorDefinitionVersion newVersion, - final List breakingChangesForDefinition, - final DSLContext ctx) { - final List breakingChangesForUpgrade = getBreakingChangesForUpgrade( - currentDefaultVersion.getDockerImageTag(), newVersion.getDockerImageTag(), breakingChangesForDefinition); - - final Set upgradeCandidates = getActorsOnDefaultVersion(currentDefaultVersion.getVersionId(), ctx); - - breakingChangesForUpgrade.forEach(breakingChange -> { - final Set actorsImpactedByBreakingChange = getActorsAffectedByBreakingChange(upgradeCandidates, breakingChange); - upgradeCandidates.removeAll(actorsImpactedByBreakingChange); - }); - - return upgradeCandidates; - } - - @VisibleForTesting - public Set getActorsAffectedByBreakingChange(final Set actorIds, - final ActorDefinitionBreakingChange breakingChange) { - if (!featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))) { - return actorIds; - } - - final List scopedImpact = breakingChange.getScopedImpact(); - if (breakingChange.getScopedImpact() == null || breakingChange.getScopedImpact().isEmpty()) { - return actorIds; - } - - final Set actorsImpactedByBreakingChange = new HashSet<>(); - scopedImpact.forEach((impactScope) -> { - switch (impactScope.getScopeType()) { - case STREAM: { - final StreamBreakingChangeScope streamBreakingChangeScope = BreakingChangeScopeFactory.createStreamBreakingChangeScope(impactScope); - actorsImpactedByBreakingChange.addAll(getActorsInStreamBreakingChangeScope(actorIds, streamBreakingChangeScope)); - break; - } - default: - throw new RuntimeException("Unsupported breaking change scope type: " + impactScope.getScopeType()); - } - }); - return actorsImpactedByBreakingChange; - } - - private Set getActorsInStreamBreakingChangeScope(final Set actorIdsToFilter, - final StreamBreakingChangeScope streamBreakingChangeScope) { - return actorIdsToFilter - .stream() - .filter(actorId -> getActorSyncsAnyListedStream(actorId, streamBreakingChangeScope.getImpactedScopes())) - .collect(Collectors.toSet()); - } - - private boolean getActorSyncsAnyListedStream(final UUID actorId, final List streamNames) { - try { - return connectionService.actorSyncsAnyListedStream(actorId, streamNames); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - private static Set getActorsOnDefaultVersion(final UUID defaultVersionId, final DSLContext ctx) { - return ctx.select(ACTOR.ID) - .from(ACTOR) - .where(ACTOR.DEFAULT_VERSION_ID.eq(defaultVersionId)) - .fetch() - .stream() - .map(record -> record.get(ACTOR.ID)) - .collect(Collectors.toSet()); - } - /** * Get an optional ADV for an actor definition's default version. The optional will be empty if the * defaultVersionId of the actor definition is set to null in the DB. The only time this should be @@ -308,57 +183,6 @@ public static Optional getDefaultVersionForActorDefiniti .map(DbConverter::buildActorDefinitionVersion); } - private static void updateActorsDefaultVersion(final Set actorIds, - final UUID newDefaultVersionId, - final DSLContext ctx) { - ctx.update(ACTOR) - .set(ACTOR.UPDATED_AT, OffsetDateTime.now()) - .set(ACTOR.DEFAULT_VERSION_ID, newDefaultVersionId) - .where(ACTOR.ID.in(actorIds)) - .execute(); - } - - /** - * Given a current version and a version to upgrade to, and a list of breaking changes, determine - * which breaking changes, if any, apply to upgrading from the current version to the version to - * upgrade to. - * - * @param currentDockerImageTag version to upgrade from - * @param dockerImageTagForUpgrade version to upgrade to - * @param breakingChangesForDef a list of breaking changes to check - * @return list of applicable breaking changes - */ - @VisibleForTesting - public static List getBreakingChangesForUpgrade(final String currentDockerImageTag, - final String dockerImageTagForUpgrade, - final List breakingChangesForDef) { - if (breakingChangesForDef.isEmpty()) { - // If there aren't breaking changes, early exit in order to avoid trying to parse versions. - // This is helpful for custom connectors or local dev images for connectors that don't have - // breaking changes. - return List.of(); - } - - final Version currentVersion = new Version(currentDockerImageTag); - final Version versionToUpgradeTo = new Version(dockerImageTagForUpgrade); - - if (versionToUpgradeTo.lessThanOrEqualTo(currentVersion)) { - // When downgrading, we don't take into account breaking changes. - return List.of(); - } - - return breakingChangesForDef.stream().filter(breakingChange -> currentVersion.lessThan(breakingChange.getVersion()) - && versionToUpgradeTo.greaterThanOrEqualTo(breakingChange.getVersion())).collect(Collectors.toList()); - } - - private static void updateActorDefinitionDefaultVersionId(final UUID actorDefinitionId, final UUID versionId, final DSLContext ctx) { - ctx.update(ACTOR_DEFINITION) - .set(ACTOR_DEFINITION.UPDATED_AT, OffsetDateTime.now()) - .set(ACTOR_DEFINITION.DEFAULT_VERSION_ID, versionId) - .where(ACTOR_DEFINITION.ID.eq(actorDefinitionId)) - .execute(); - } - /** * Writes a list of actor definition breaking changes in one transaction. Updates entries if they * already exist. diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java index c41be5d2d29..764ba3726ad 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java @@ -253,6 +253,17 @@ public static DestinationConnection buildDestinationConnection(final Record reco * @return source definition */ public static StandardSourceDefinition buildStandardSourceDefinition(final Record record, final long defaultMaxSecondsBetweenMessages) { + var maxSecondsBetweenMessage = record.get(ACTOR_DEFINITION.MAX_SECONDS_BETWEEN_MESSAGES) == null + ? defaultMaxSecondsBetweenMessages + : record.get(ACTOR_DEFINITION.MAX_SECONDS_BETWEEN_MESSAGES).longValue(); + + // All sources are starting to set this field according to their rate limits. As a + // safeguard for sources with rate limits that are too low e.g. minutes etc, we default to + // our defaults. One day, we'll relax this, be conservative for now. + if (maxSecondsBetweenMessage < defaultMaxSecondsBetweenMessages) { + maxSecondsBetweenMessage = defaultMaxSecondsBetweenMessages; + } + return new StandardSourceDefinition() .withSourceDefinitionId(record.get(ACTOR_DEFINITION.ID)) .withDefaultVersionId(record.get(ACTOR_DEFINITION.DEFAULT_VERSION_ID)) @@ -267,9 +278,7 @@ public static StandardSourceDefinition buildStandardSourceDefinition(final Recor .withResourceRequirements(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS) == null ? null : Jsons.deserialize(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS).data(), ActorDefinitionResourceRequirements.class)) - .withMaxSecondsBetweenMessages(record.get(ACTOR_DEFINITION.MAX_SECONDS_BETWEEN_MESSAGES) == null - ? defaultMaxSecondsBetweenMessages - : record.get(ACTOR_DEFINITION.MAX_SECONDS_BETWEEN_MESSAGES).longValue()); + .withMaxSecondsBetweenMessages(maxSecondsBetweenMessage); } /** diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java index 1a108fa6491..a7846fc1dc0 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java @@ -6,7 +6,6 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION_VERSION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION_WORKSPACE_GRANT; import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; import static org.jooq.impl.DSL.asterisk; @@ -28,6 +27,7 @@ import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -81,7 +81,7 @@ public class DestinationServiceJooqImpl implements DestinationService { private final SecretsRepositoryWriter secretsRepositoryWriter; private final SecretPersistenceConfigService secretPersistenceConfigService; private final ConnectionService connectionService; - private final ConnectorMetadataJooqHelper connectorMetadataJooqHelper; + private final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; @VisibleForTesting public DestinationServiceJooqImpl(@Named("configDatabase") final Database database, @@ -89,14 +89,15 @@ public DestinationServiceJooqImpl(@Named("configDatabase") final Database databa final SecretsRepositoryReader secretsRepositoryReader, final SecretsRepositoryWriter secretsRepositoryWriter, final SecretPersistenceConfigService secretPersistenceConfigService, - final ConnectionService connectionService) { + final ConnectionService connectionService, + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater) { this.database = new ExceptionWrappingDatabase(database); this.connectionService = connectionService; this.featureFlagClient = featureFlagClient; this.secretsRepositoryReader = secretsRepositoryReader; this.secretsRepositoryWriter = secretsRepositoryWriter; this.secretPersistenceConfigService = secretPersistenceConfigService; - this.connectorMetadataJooqHelper = new ConnectorMetadataJooqHelper(featureFlagClient, connectionService); + this.actorDefinitionVersionUpdater = actorDefinitionVersionUpdater; } /** @@ -404,6 +405,8 @@ public void writeCustomConnectorMetadata( io.airbyte.db.instance.configs.jooq.generated.enums.ScopeType.valueOf(scopeType.toString()), ctx); return null; }); + + actorDefinitionVersionUpdater.updateDestinationDefaultVersion(destinationDefinition, defaultVersion, List.of()); } /** @@ -426,6 +429,9 @@ public void writeConnectorMetadata(final StandardDestinationDefinition destinati writeConnectorMetadata(destinationDefinition, actorDefinitionVersion, breakingChangesForDefinition, ctx); return null; }); + + // FIXME(pedro): this should be moved out of this service + actorDefinitionVersionUpdater.updateDestinationDefaultVersion(destinationDefinition, actorDefinitionVersion, breakingChangesForDefinition); } /** @@ -470,7 +476,7 @@ private void writeConnectorMetadata(final StandardDestinationDefinition destinat final DSLContext ctx) { writeStandardDestinationDefinition(Collections.singletonList(destinationDefinition), ctx); ConnectorMetadataJooqHelper.writeActorDefinitionBreakingChanges(breakingChangesForDefinition, ctx); - connectorMetadataJooqHelper.setActorDefinitionVersionForTagAsDefault(actorDefinitionVersion, breakingChangesForDefinition, ctx); + ConnectorMetadataJooqHelper.writeActorDefinitionVersion(actorDefinitionVersion, ctx); } private Stream destDefQuery(final Optional destDefId, final boolean includeTombstone) throws IOException { @@ -591,18 +597,7 @@ private void writeDestinationConnection(final List config } private ActorDefinitionVersion getDefaultVersionForActorDefinitionId(final UUID actorDefinitionId, final DSLContext ctx) { - return getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); - } - - private Optional getDefaultVersionForActorDefinitionIdOptional(final UUID actorDefinitionId, final DSLContext ctx) { - return ctx.select(Tables.ACTOR_DEFINITION_VERSION.asterisk()) - .from(ACTOR_DEFINITION) - .join(ACTOR_DEFINITION_VERSION).on(Tables.ACTOR_DEFINITION_VERSION.ID.eq(Tables.ACTOR_DEFINITION.DEFAULT_VERSION_ID)) - .where(ACTOR_DEFINITION.ID.eq(actorDefinitionId)) - .fetch() - .stream() - .findFirst() - .map(DbConverter::buildActorDefinitionVersion); + return ConnectorMetadataJooqHelper.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); } private Condition includeTombstones(final Field tombstoneField, final boolean includeTombstones) { diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SecretPersistenceConfigServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SecretPersistenceConfigServiceJooqImpl.java index 3de8dadcb82..6cb0103a9ab 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SecretPersistenceConfigServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SecretPersistenceConfigServiceJooqImpl.java @@ -6,8 +6,6 @@ import static io.airbyte.db.instance.configs.jooq.generated.tables.SecretPersistenceConfig.SECRET_PERSISTENCE_CONFIG; import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.inline; import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; @@ -75,33 +73,6 @@ public SecretPersistenceConfig getSecretPersistenceConfig(final ScopeType scope, throw new ConfigNotFoundException(ConfigSchema.SECRET_PERSISTENCE_CONFIG, List.of(scope, scopeId).toString()); } - /** - * Retrieve secret persistence configs in order of precedence: workspace -> organization. - * - * @param workspaceId workspace ID - * @param organizationId organization ID - * @return Optional secret persistence config for the first scope found. - * @throws IOException it could happen - */ - @Override - public Optional getSecretPersistenceCoordinate(final UUID workspaceId, final UUID organizationId) throws IOException { - final Result result = database.query(ctx -> { - final SelectJoinStep query = ctx.select(asterisk(), inline(1).as(SORT_ORDER)).from(SECRET_PERSISTENCE_CONFIG); - return query.where( - SECRET_PERSISTENCE_CONFIG.SCOPE_TYPE.eq(SecretPersistenceScopeType.workspace), - SECRET_PERSISTENCE_CONFIG.SCOPE_ID.eq(workspaceId)) - .unionAll( - ctx.select(asterisk(), inline(2).as(SORT_ORDER)).from(SECRET_PERSISTENCE_CONFIG) - .where( - SECRET_PERSISTENCE_CONFIG.SCOPE_TYPE.eq(SecretPersistenceScopeType.organization), - SECRET_PERSISTENCE_CONFIG.SCOPE_ID.eq(organizationId))) - .orderBy(field(SORT_ORDER).asc()) - .limit(1).fetch(); - }); - - return result.stream().findFirst().map(DbConverter::buildSecretPersistenceCoordinate); - } - @Override public Optional createOrUpdateSecretPersistenceConfig(final ScopeType scope, final UUID scopeId, diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java index ca34f9c9498..63fca04a09e 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java @@ -10,7 +10,6 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; import static io.airbyte.featureflag.ContextKt.ANONYMOUS; import static org.jooq.impl.DSL.asterisk; -import static org.jooq.impl.DSL.field; import static org.jooq.impl.DSL.noCondition; import static org.jooq.impl.DSL.select; @@ -29,6 +28,7 @@ import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; @@ -73,7 +73,6 @@ import org.jooq.Record1; import org.jooq.Result; import org.jooq.SelectJoinStep; -import org.jooq.Table; import org.jooq.impl.DSL; @Slf4j @@ -88,21 +87,22 @@ public class SourceServiceJooqImpl implements SourceService { private final SecretsRepositoryWriter secretsRepositoryWriter; private final SecretPersistenceConfigService secretPersistenceConfigService; private final ConnectionService connectionService; - private final ConnectorMetadataJooqHelper connectorMetadataJooqHelper; + private final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater; public SourceServiceJooqImpl(@Named("configDatabase") final Database database, final FeatureFlagClient featureFlagClient, final SecretsRepositoryReader secretsRepositoryReader, final SecretsRepositoryWriter secretsRepositoryWriter, final SecretPersistenceConfigService secretPersistenceConfigService, - final ConnectionService connectionService) { + final ConnectionService connectionService, + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater) { this.database = new ExceptionWrappingDatabase(database); this.connectionService = connectionService; this.featureFlagClient = featureFlagClient; this.secretRepositoryReader = secretsRepositoryReader; this.secretsRepositoryWriter = secretsRepositoryWriter; this.secretPersistenceConfigService = secretPersistenceConfigService; - this.connectorMetadataJooqHelper = new ConnectorMetadataJooqHelper(featureFlagClient, connectionService); + this.actorDefinitionVersionUpdater = actorDefinitionVersionUpdater; } /** @@ -285,21 +285,6 @@ public void writeSourceConnectionNoSecrets(final SourceConnection partialSource) }); } - /** - * Delete a source by id. - * - * @param sourceId - * @return true if a source was deleted, false otherwise. - * @throws JsonValidationException - throws if returned sources are invalid - * @throws ConfigNotFoundException - throws if no source with that id can be found. - * @throws IOException - you never know when you IO - */ - @Override - public boolean deleteSource(final UUID sourceId) - throws JsonValidationException, ConfigNotFoundException, IOException { - return deleteById(ACTOR, sourceId); - } - /** * Returns all sources in the database. Does not contain secrets. * @@ -417,6 +402,9 @@ public void writeConnectorMetadata(final StandardSourceDefinition sourceDefiniti writeConnectorMetadata(sourceDefinition, actorDefinitionVersion, breakingChangesForDefinition, ctx); return null; }); + + // FIXME(pedro): this should be moved out of this service + actorDefinitionVersionUpdater.updateSourceDefaultVersion(sourceDefinition, actorDefinitionVersion, breakingChangesForDefinition); } @Override @@ -431,6 +419,8 @@ public void writeCustomConnectorMetadata(final StandardSourceDefinition sourceDe io.airbyte.db.instance.configs.jooq.generated.enums.ScopeType.valueOf(scopeType.toString()), ctx); return null; }); + + actorDefinitionVersionUpdater.updateSourceDefaultVersion(sourceDefinition, defaultVersion, List.of()); } /** @@ -459,7 +449,7 @@ public List listSourcesWithVersionIds( * @param sourceDefinitionId to retrieve the default max seconds between messages for. * @return */ - private Long retrieveDefaultMaxSecondsBetweenMessages(UUID sourceDefinitionId) { + private Long retrieveDefaultMaxSecondsBetweenMessages(final UUID sourceDefinitionId) { return Long.parseLong(featureFlagClient.stringVariation(HeartbeatMaxSecondsBetweenMessages.INSTANCE, new SourceDefinition(sourceDefinitionId))); } @@ -485,7 +475,7 @@ private void writeConnectorMetadata(final StandardSourceDefinition sourceDefinit final DSLContext ctx) { writeStandardSourceDefinition(Collections.singletonList(sourceDefinition), ctx); ConnectorMetadataJooqHelper.writeActorDefinitionBreakingChanges(breakingChangesForDefinition, ctx); - connectorMetadataJooqHelper.setActorDefinitionVersionForTagAsDefault(actorDefinitionVersion, breakingChangesForDefinition, ctx); + ConnectorMetadataJooqHelper.writeActorDefinitionVersion(actorDefinitionVersion, ctx); } private Stream sourceDefQuery(final Optional sourceDefId, final boolean includeTombstone) throws IOException { @@ -560,7 +550,7 @@ private Result actorDefinitionsJoinedWithGrants(final UUID scopeId, .fetch()); } - public Optional getOrganizationIdFromWorkspaceId(final UUID scopeId) throws IOException { + private Optional getOrganizationIdFromWorkspaceId(final UUID scopeId) throws IOException { final Optional> optionalRecord = database.query(ctx -> ctx.select(WORKSPACE.ORGANIZATION_ID).from(WORKSPACE) .where(WORKSPACE.ID.eq(scopeId)).fetchOptional()); return optionalRecord.map(Record1::value1); @@ -573,7 +563,7 @@ private Entry actorDefinitionWithGrantStatus(final Record outerJ return Map.entry(actorDefinition, granted); } - static void writeStandardSourceDefinition(final List configs, final DSLContext ctx) { + private static void writeStandardSourceDefinition(final List configs, final DSLContext ctx) { final OffsetDateTime timestamp = OffsetDateTime.now(); configs.forEach((standardSourceDefinition) -> { final boolean isExistingConfig = ctx.fetchExists(DSL.select() @@ -670,30 +660,7 @@ private void writeSourceConnection(final List configs, final D } private ActorDefinitionVersion getDefaultVersionForActorDefinitionId(final UUID actorDefinitionId, final DSLContext ctx) { - return getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); - } - - /** - * Get an optional ADV for an actor definition's default version. The optional will be empty if the - * defaultVersionId of the actor definition is set to null in the DB. The only time this should be - * the case is if we are in the process of inserting and have already written the source definition, - * but not yet set its default version. - */ - private Optional getDefaultVersionForActorDefinitionIdOptional(final UUID actorDefinitionId, final DSLContext ctx) { - return ConnectorMetadataJooqHelper.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx); - } - - /** - * Deletes all records with given id. If it deletes anything, returns true. Otherwise, false. - * - * @param table - table from which to delete the record - * @param id - id of the record to delete - * @return true if anything was deleted, otherwise false. - * @throws IOException - you never know when you io - */ - @SuppressWarnings("SameParameterValue") - private boolean deleteById(final Table table, final UUID id) throws IOException { - return database.transaction(ctx -> ctx.deleteFrom(table)).where(field(DSL.name(PRIMARY_KEY)).eq(id)).execute() > 0; + return ConnectorMetadataJooqHelper.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId, ctx).orElseThrow(); } private Stream listSourceQuery(final Optional configId) throws IOException { @@ -780,7 +747,7 @@ public void writeSourceConnectionWithSecrets( writeSourceConnectionNoSecrets(partialSource); } - public Optional getSourceIfExists(final UUID sourceId) { + private Optional getSourceIfExists(final UUID sourceId) { try { return Optional.of(getSourceConnection(sourceId)); } catch (final ConfigNotFoundException | JsonValidationException | IOException e) { diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java index 31e96fe5d26..660e8ee7201 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java @@ -112,22 +112,22 @@ public Application createApplication(final User user, final String name) { /** * List all Applications for a user. * - * @param userId The user to list Applications for. + * @param user The user to list Applications for. * @return The list of Applications for the user. */ @Override - public List listApplicationsByUser(final User userId) { - final var users = keycloakAdminClient + public List listApplicationsByUser(final User user) { + final var clientUsers = keycloakAdminClient .realm(keycloakConfiguration.getClientRealm()) .users() - .searchByAttributes(USER_ID + ":" + userId.getAuthUserId()); + .searchByAttributes(USER_ID + ":" + user.getAuthUserId()); final var existingClient = new ArrayList(); - for (final var user : users) { + for (final var clientUser : clientUsers) { final var client = keycloakAdminClient .realm(keycloakConfiguration.getClientRealm()) .clients() - .findByClientId(user.getAttributes().get(CLIENT_ID).getFirst()) + .findByClientId(clientUser.getAttributes().get(CLIENT_ID).getFirst()) .stream() .findFirst(); @@ -159,13 +159,10 @@ public Optional deleteApplication(final User user, final String app return Optional.empty(); } - // Get the user_id attribute from the client - final var userId = client.get().getAttributes().getOrDefault(USER_ID, null); - if (userId == null) { - throw new BadRequestException("Client does not have a user_id attribute"); - } + final var userApplications = listApplicationsByUser(user); - if (!userId.equals(String.valueOf(user.getAuthUserId()))) { + // Only allow the user to delete their own Applications. + if (userApplications.stream().noneMatch(application -> application.getClientId().equals(applicationId))) { throw new BadRequestException("You do not have permission to delete this Application"); } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/shared/ActorWorkspaceOrganizationIds.java b/airbyte-data/src/main/java/io/airbyte/data/services/shared/ActorWorkspaceOrganizationIds.java new file mode 100644 index 00000000000..5cd9f2f6b0f --- /dev/null +++ b/airbyte-data/src/main/java/io/airbyte/data/services/shared/ActorWorkspaceOrganizationIds.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.data.services.shared; + +import java.util.UUID; +import javax.annotation.Nullable; + +/** + * A record that represents IDs for an actor and its associated workspace and organization. + * + * @param actorId - actor ID + * @param workspaceId - workspace ID + * @param organizationId - organization ID + */ +public record ActorWorkspaceOrganizationIds(UUID actorId, UUID workspaceId, @Nullable UUID organizationId) { + +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt new file mode 100644 index 00000000000..c7ddf3e4f6a --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt @@ -0,0 +1,397 @@ +package io.airbyte.data.helpers + +import com.google.common.annotations.VisibleForTesting +import io.airbyte.commons.version.Version +import io.airbyte.config.ActorDefinitionBreakingChange +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.ActorType +import io.airbyte.config.BreakingChangeScope +import io.airbyte.config.ConfigOriginType +import io.airbyte.config.ConfigResourceType +import io.airbyte.config.ConfigScopeType +import io.airbyte.config.DestinationConnection +import io.airbyte.config.ScopedConfiguration +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardDestinationDefinition +import io.airbyte.config.StandardSourceDefinition +import io.airbyte.config.helpers.BreakingChangeScopeFactory +import io.airbyte.config.helpers.StreamBreakingChangeScope +import io.airbyte.data.services.ActorDefinitionService +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.ScopedConfigurationService +import io.airbyte.data.services.shared.ConfigScopeMapWithId +import io.airbyte.data.services.shared.ConnectorVersionKey +import io.airbyte.featureflag.ANONYMOUS +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.UseBreakingChangeScopes +import io.airbyte.featureflag.Workspace +import jakarta.inject.Singleton +import java.io.IOException +import java.util.UUID +import java.util.stream.Collectors + +@Singleton +class ActorDefinitionVersionUpdater( + private val featureFlagClient: FeatureFlagClient, + private val connectionService: ConnectionService, + private val actorDefinitionService: ActorDefinitionService, + private val scopedConfigurationService: ScopedConfigurationService, +) { + fun updateDestinationDefaultVersion( + destinationDefinition: StandardDestinationDefinition, + newDefaultVersion: ActorDefinitionVersion, + breakingChangesForDefinition: List, + ) { + return updateDefaultVersion( + destinationDefinition.destinationDefinitionId, + newDefaultVersion, + breakingChangesForDefinition, + ) + } + + fun updateSourceDefaultVersion( + sourceDefinition: StandardSourceDefinition, + newDefaultVersion: ActorDefinitionVersion, + breakingChangesForDefinition: List, + ) { + return updateDefaultVersion( + sourceDefinition.sourceDefinitionId, + newDefaultVersion, + breakingChangesForDefinition, + ) + } + + /** + * Upgrade the source to the latest version, opting-in to any breaking changes that may exist. + */ + fun upgradeActorVersion( + source: SourceConnection, + sourceDefinition: StandardSourceDefinition, + ) { + return upgradeActorVersion( + source.sourceId, + sourceDefinition.sourceDefinitionId, + sourceDefinition.defaultVersionId, + ActorType.SOURCE, + ) + } + + /** + * Upgrade the destination to the latest version, opting-in to any breaking changes that may exist. + */ + fun upgradeActorVersion( + destination: DestinationConnection, + destinationDefinition: StandardDestinationDefinition, + ) { + return upgradeActorVersion( + destination.destinationId, + destinationDefinition.destinationDefinitionId, + destinationDefinition.defaultVersionId, + ActorType.DESTINATION, + ) + } + + @VisibleForTesting + internal fun upgradeActorVersion( + actorId: UUID, + actorDefinitionId: UUID, + newVersionId: UUID, + actorType: ActorType, + ) { + val versionPinConfigOpt = + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + + versionPinConfigOpt.ifPresent { versionPinConfig -> + if (versionPinConfig.originType != ConfigOriginType.BREAKING_CHANGE) { + throw IllegalStateException("This %s is manually pinned to a version, and therefore cannot be upgraded.".format(actorType)) + } + + scopedConfigurationService.deleteScopedConfiguration(versionPinConfig.id) + } + + actorDefinitionService.setActorDefaultVersion(actorId, newVersionId) + } + + @VisibleForTesting + internal fun updateDefaultVersion( + actorDefinitionId: UUID, + newDefaultVersion: ActorDefinitionVersion, + breakingChangesForDefinition: List, + ) { + if (newDefaultVersion.versionId == null) { + throw RuntimeException("Can't set an actorDefinitionVersion as default without it having a versionId.") + } + + val currentDefaultVersionOpt = actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(actorDefinitionId) + currentDefaultVersionOpt.ifPresent { currentDefaultVersion -> + val breakingChangesForUpgrade = + getBreakingChangesForUpgrade( + currentDefaultVersion.dockerImageTag, + newDefaultVersion.dockerImageTag, + breakingChangesForDefinition, + ) + + // Old: update actor.default_version_id for unaffected actors + val actorsToUpgrade = getActorsToUpgrade(currentDefaultVersion, breakingChangesForUpgrade) + actorDefinitionService.setActorDefaultVersions(actorsToUpgrade.stream().toList(), newDefaultVersion.versionId) + + // New: determine which actors should NOT be upgraded, and pin those back + processBreakingChangesForUpgrade(currentDefaultVersion, breakingChangesForUpgrade) + } + + actorDefinitionService.updateActorDefinitionDefaultVersionId(actorDefinitionId, newDefaultVersion.versionId) + + // New: for breaking changes that have been rolled back, clear old pins that may have been created + processBreakingChangePinRollbacks(actorDefinitionId, newDefaultVersion, breakingChangesForDefinition) + } + + private fun getConfigScopeMaps(actorDefinitionId: UUID): Collection { + val actorScopes = actorDefinitionService.getActorIdsForDefinition(actorDefinitionId) + return actorScopes.map { + ConfigScopeMapWithId( + it.actorId, + mapOf( + ConfigScopeType.ACTOR to it.actorId, + ConfigScopeType.WORKSPACE to it.workspaceId, + ConfigScopeType.ORGANIZATION to it.organizationId, + ), + ) + } + } + + private fun getActorIdsToPinForBreakingChange( + actorDefinitionId: UUID, + breakingChange: ActorDefinitionBreakingChange, + configScopeMaps: Collection, + ): Set { + // upgrade candidates: any actor that doesn't have a pin on it + // this must happen in order, for each BC, so when processing multiple breaking changes at once we + // determine affected actors correctly + val upgradeCandidates = getUpgradeCandidates(actorDefinitionId, configScopeMaps) + + // actors to pin: any actor from candidates (no pins) that is impacted by a breaking change + return getActorsAffectedByBreakingChange(upgradeCandidates, breakingChange) + } + + @VisibleForTesting + internal fun processBreakingChangesForUpgrade( + currentDefaultVersion: ActorDefinitionVersion, + breakingChangesForUpgrade: List, + ) { + if (breakingChangesForUpgrade.isEmpty()) return + + val actorDefinitionId = currentDefaultVersion.actorDefinitionId + val configScopeMaps = getConfigScopeMaps(actorDefinitionId) + for (breakingChange in breakingChangesForUpgrade) { + val actorIdsToPin = getActorIdsToPinForBreakingChange(actorDefinitionId, breakingChange, configScopeMaps) + if (actorIdsToPin.isNotEmpty()) { + // create the pins + createBreakingChangePinsForActors(actorIdsToPin, currentDefaultVersion, breakingChange) + } + } + } + + /** + * For breaking changes that have been rolled back, clear old pins that may have been created. + * Removing the pins will cause the actors to use the new default version. + */ + @VisibleForTesting + internal fun processBreakingChangePinRollbacks( + actorDefinitionId: UUID, + newDefaultVersion: ActorDefinitionVersion, + breakingChangesForDef: List, + ) { + val rolledBackBreakingChanges = + getBreakingChangesAfterVersion( + newDefaultVersion.dockerImageTag, + breakingChangesForDef, + ) + + if (rolledBackBreakingChanges.isEmpty()) return + + val scopedConfigsToRemove = + scopedConfigurationService.listScopedConfigurationsWithOrigins( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigOriginType.BREAKING_CHANGE, + rolledBackBreakingChanges.map { it.version.serialize() }, + ) + + if (scopedConfigsToRemove.isNotEmpty()) { + scopedConfigurationService.deleteScopedConfigurations(scopedConfigsToRemove.map { it.id }) + } + } + + @VisibleForTesting + internal fun getUpgradeCandidates( + actorDefinitionId: UUID, + configScopeMaps: Collection, + ): Set { + val scopedConfigs = + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + configScopeMaps.toList(), + ) + + // upgrade candidates are all those actorIds that don't have a version config + return configScopeMaps.stream() + .filter { !scopedConfigs.containsKey(it.id) } + .map { it.id } + .collect(Collectors.toSet()) + } + + @VisibleForTesting + internal fun createBreakingChangePinsForActors( + actorIds: Set, + currentVersion: ActorDefinitionVersion, + breakingChange: ActorDefinitionBreakingChange, + ) { + val scopedConfigurationsToCreate = + actorIds.map { actorId -> + ScopedConfiguration() + .withId(UUID.randomUUID()) + .withKey(ConnectorVersionKey.key) + .withValue(currentVersion.versionId.toString()) + .withResourceType(ConfigResourceType.ACTOR_DEFINITION) + .withResourceId(currentVersion.actorDefinitionId) + .withScopeType(ConfigScopeType.ACTOR) + .withScopeId(actorId) + .withOriginType(ConfigOriginType.BREAKING_CHANGE) + .withOrigin(breakingChange.version.serialize()) + }.toList() + scopedConfigurationService.insertScopedConfigurations(scopedConfigurationsToCreate) + } + + @VisibleForTesting + fun getActorsToUpgrade( + currentDefaultVersion: ActorDefinitionVersion, + breakingChangesForUpgrade: List, + ): Set { + val upgradeCandidates = actorDefinitionService.getActorsWithDefaultVersionId(currentDefaultVersion.versionId).toMutableSet() + + for (breakingChange in breakingChangesForUpgrade) { + val actorsImpactedByBreakingChange = getActorsAffectedByBreakingChange(upgradeCandidates, breakingChange) + upgradeCandidates.removeAll(actorsImpactedByBreakingChange) + } + + return upgradeCandidates + } + + @VisibleForTesting + fun getActorsAffectedByBreakingChange( + actorIds: Set, + breakingChange: ActorDefinitionBreakingChange, + ): Set { + if (!featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS))) { + return actorIds + } + + val scopedImpact = breakingChange.scopedImpact + if (breakingChange.scopedImpact == null || breakingChange.scopedImpact.isEmpty()) { + return actorIds + } + + val actorsImpactedByBreakingChange: MutableSet = HashSet() + for (impactScope in scopedImpact) { + if (impactScope.scopeType == BreakingChangeScope.ScopeType.STREAM) { + val streamBreakingChangeScope = BreakingChangeScopeFactory.createStreamBreakingChangeScope(impactScope) + actorsImpactedByBreakingChange.addAll(getActorsInStreamBreakingChangeScope(actorIds, streamBreakingChangeScope)) + } else { + throw RuntimeException("Unsupported breaking change scope type: " + impactScope.scopeType) + } + } + + return actorsImpactedByBreakingChange + } + + private fun getActorsInStreamBreakingChangeScope( + actorIdsToFilter: Set, + streamBreakingChangeScope: StreamBreakingChangeScope, + ): Set { + return actorIdsToFilter + .stream() + .filter { actorId: UUID -> + getActorSyncsAnyListedStream( + actorId, + streamBreakingChangeScope.impactedScopes, + ) + } + .collect(Collectors.toSet()) + } + + private fun getActorSyncsAnyListedStream( + actorId: UUID, + streamNames: List, + ): Boolean { + try { + return connectionService.actorSyncsAnyListedStream(actorId, streamNames) + } catch (e: IOException) { + throw java.lang.RuntimeException(e) + } + } + + /** + * Given a current version and a version to upgrade to, and a list of breaking changes, determine + * which breaking changes, if any, apply to upgrading from the current version to the version to + * upgrade to. + * + * @param currentDockerImageTag version to upgrade from + * @param dockerImageTagForUpgrade version to upgrade to + * @param breakingChangesForDef a list of breaking changes to check + * @return list of applicable breaking changes + */ + @VisibleForTesting + internal fun getBreakingChangesForUpgrade( + currentDockerImageTag: String, + dockerImageTagForUpgrade: String, + breakingChangesForDef: List, + ): List { + if (breakingChangesForDef.isEmpty()) { + // If there aren't breaking changes, early exit in order to avoid trying to parse versions. + // This is helpful for custom connectors or local dev images for connectors that don't have + // breaking changes. + return listOf() + } + + val currentVersion = Version(currentDockerImageTag) + val versionToUpgradeTo = Version(dockerImageTagForUpgrade) + + if (versionToUpgradeTo.lessThanOrEqualTo(currentVersion)) { + // When downgrading, we don't take into account breaking changes. + return listOf() + } + + return breakingChangesForDef.stream().filter { breakingChange -> + ( + currentVersion.lessThan(breakingChange.version) && + versionToUpgradeTo.greaterThanOrEqualTo(breakingChange.version) + ) + }.sorted { bc1, bc2 -> bc1.version.versionCompareTo(bc2.version) }.toList() + } + + /** + * Given a new image tag, and a list of breaking changes, determine which breaking changes, if any, + * are after the new version (i.e. are not applicable to the new version). + */ + @VisibleForTesting + internal fun getBreakingChangesAfterVersion( + newImageTag: String, + breakingChangesForDef: List, + ): List { + if (breakingChangesForDef.isEmpty()) { + return listOf() + } + + val newVersion = Version(newImageTag) + return breakingChangesForDef.filter { it.version.greaterThan(newVersion) }.toList() + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt new file mode 100644 index 00000000000..26c20f01a09 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt @@ -0,0 +1,12 @@ +package io.airbyte.data.repositories + +import io.airbyte.data.repositories.entities.ConnectionTimelineEvent +import io.micronaut.data.jdbc.annotation.JdbcRepository +import io.micronaut.data.model.query.builder.sql.Dialect +import io.micronaut.data.repository.PageableRepository +import java.util.UUID + +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface ConnectionTimelineEventRepository : PageableRepository { + fun findByConnectionId(connectionId: UUID): List +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt index 547a94b45e6..570d2d405e9 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/PermissionRepository.kt @@ -11,4 +11,12 @@ import java.util.UUID * NOTE: eventually this will fully replace the PermissionPersistence class. */ @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") -interface PermissionRepository : PageableRepository +interface PermissionRepository : PageableRepository { + fun findByIdIn(permissionIds: List): List + + fun findByUserId(userId: UUID): List + + fun findByOrganizationId(organizationId: UUID): List + + fun deleteByIdIn(permissionIds: List) +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt index 15d9b6ac310..7cde7d6daa3 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt @@ -1,6 +1,7 @@ package io.airbyte.data.repositories import io.airbyte.data.repositories.entities.ScopedConfiguration +import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigOriginType import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigResourceType import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigScopeType import io.micronaut.data.jdbc.annotation.JdbcRepository @@ -26,5 +27,15 @@ interface ScopedConfigurationRepository : PageableRepository, ): List + fun findByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginInList( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + ): List + fun findByKey(key: String): List + + fun deleteByIdInList(ids: List) } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/UserInvitationRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/UserInvitationRepository.kt index 85513a69b9b..ce3a19632f9 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/UserInvitationRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/UserInvitationRepository.kt @@ -1,6 +1,8 @@ package io.airbyte.data.repositories import io.airbyte.data.repositories.entities.UserInvitation +import io.airbyte.data.services.impls.data.mappers.EntityInvitationStatus +import io.airbyte.data.services.impls.data.mappers.EntityScopeType import io.micronaut.data.jdbc.annotation.JdbcRepository import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository @@ -10,4 +12,17 @@ import java.util.UUID @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") interface UserInvitationRepository : PageableRepository { fun findByInviteCode(inviteCode: String): Optional + + fun findByStatusAndScopeTypeAndScopeId( + status: EntityInvitationStatus, + scopeType: EntityScopeType, + scopeId: UUID, + ): List + + fun findByStatusAndScopeTypeAndScopeIdAndInvitedEmail( + status: EntityInvitationStatus, + scopeType: EntityScopeType, + scopeId: UUID, + invitedEmail: String, + ): List } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ConnectionTimelineEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ConnectionTimelineEvent.kt new file mode 100644 index 00000000000..ea80b04629a --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ConnectionTimelineEvent.kt @@ -0,0 +1,24 @@ +package io.airbyte.data.repositories.entities + +import io.micronaut.data.annotation.AutoPopulated +import io.micronaut.data.annotation.DateCreated +import io.micronaut.data.annotation.Id +import io.micronaut.data.annotation.MappedEntity +import io.micronaut.data.annotation.TypeDef +import io.micronaut.data.model.DataType +import java.time.OffsetDateTime +import java.util.UUID + +@MappedEntity("connection_timeline_event") +data class ConnectionTimelineEvent( + @field:Id + @AutoPopulated + var id: UUID? = null, + var connectionId: UUID, + var userId: UUID? = null, + var eventType: String, + @field:TypeDef(type = DataType.JSON) + var summary: String? = null, + @DateCreated + var createdAt: java.time.OffsetDateTime? = null, +) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/UserInvitation.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/UserInvitation.kt index 521dab53d01..3bc0dacd5c8 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/UserInvitation.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/UserInvitation.kt @@ -3,8 +3,10 @@ package io.airbyte.data.repositories.entities import io.airbyte.db.instance.configs.jooq.generated.enums.InvitationStatus import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType import io.airbyte.db.instance.configs.jooq.generated.enums.ScopeType +import io.micronaut.core.annotation.Nullable import io.micronaut.data.annotation.AutoPopulated import io.micronaut.data.annotation.DateCreated +import io.micronaut.data.annotation.DateUpdated import io.micronaut.data.annotation.Id import io.micronaut.data.annotation.MappedEntity import io.micronaut.data.annotation.TypeDef @@ -19,6 +21,8 @@ data class UserInvitation( var inviteCode: String, var inviterUserId: UUID, var invitedEmail: String, + @Nullable + var acceptedByUserId: UUID? = null, var scopeId: UUID, @field:TypeDef(type = DataType.OBJECT) var scopeType: ScopeType, @@ -28,6 +32,7 @@ data class UserInvitation( var status: InvitationStatus, @DateCreated var createdAt: java.time.OffsetDateTime? = null, - @DateCreated + @DateUpdated var updatedAt: java.time.OffsetDateTime? = null, + var expiresAt: java.time.OffsetDateTime, ) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt new file mode 100644 index 00000000000..43e35a4c7c4 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt @@ -0,0 +1,7 @@ +package io.airbyte.data.services + +import io.airbyte.data.repositories.entities.ConnectionTimelineEvent + +interface ConnectionTimelineEventService { + fun writeEvent(event: ConnectionTimelineEvent): ConnectionTimelineEvent +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt new file mode 100644 index 00000000000..8ec67ce41f3 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt @@ -0,0 +1,48 @@ +package io.airbyte.data.services + +import io.airbyte.config.Permission +import java.util.UUID + +/** + * A service that manages permissions. + */ +interface PermissionService { + /** + * Get all permissions for a given user. + */ + fun getPermissionsForUser(userId: UUID): List + + /** + * Delete a permission by its unique id. + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun deletePermission(permissionId: UUID) + + /** + * Delete a list of permissions by their unique ids. + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun deletePermissions(permissionIds: List) + + /** + * Create a permission. + */ + @Throws(PermissionRedundantException::class) + fun createPermission(permission: Permission): Permission + + /** + * Update a permission + */ + @Throws(RemoveLastOrgAdminPermissionException::class) + fun updatePermission(permission: Permission) +} + +/** + * Exception thrown when an operation on a permission cannot be performed because it is redundant. + */ +class PermissionRedundantException(message: String) : Exception(message) + +/** + * Exception thrown when attempting an operation on a permission that would result in an organization without any org-admin. + */ +class RemoveLastOrgAdminPermissionException(message: String) : Exception(message) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt index 251e4bee983..229f4e68a5e 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt @@ -1,12 +1,16 @@ package io.airbyte.data.services +import io.airbyte.config.ConfigOriginType import io.airbyte.config.ConfigResourceType import io.airbyte.config.ConfigScopeType import io.airbyte.config.ScopedConfiguration +import io.airbyte.data.services.shared.ConfigScopeMapWithId import io.airbyte.data.services.shared.ScopedConfigurationKey import java.util.Optional import java.util.UUID +data class KeyedScopeMap(val key: String, val scopeMap: Map) + /** * A service that manages scoped configurations. */ @@ -51,11 +55,33 @@ interface ScopedConfigurationService { scopes: Map, ): Optional + /** + * Get scoped configurations for multiple key, resource and scope map (in batch). + * + * This will resolve the configuration by evaluating the scopes in the priority order defined by the given key. + * Scopes included in the map must be defined as a supported scope in the key definition (see ScopedConfigurationKey). + * + * IDs in the provided list of scope maps should be unique. + * The same ID used in the input list will be used as the key in the output map, and the value will be the resolved configuration. + * If no configuration exists for an ID, it will not be included in the output map. + */ + fun getScopedConfigurations( + configKey: ScopedConfigurationKey, + resourceType: ConfigResourceType, + resourceId: UUID, + scopeMaps: List, + ): Map + /** * Write a scoped configuration. */ fun writeScopedConfiguration(scopedConfiguration: ScopedConfiguration): ScopedConfiguration + /** + * Insert multiple configurations. + */ + fun insertScopedConfigurations(scopedConfigurations: List): List + /** * List all scoped configurations. */ @@ -77,8 +103,24 @@ interface ScopedConfigurationService { scopeIds: List, ): List + /** + * List scoped configurations with given origin values for an origin type. + */ + fun listScopedConfigurationsWithOrigins( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + ): List + /** * Delete a scoped configuration by id. */ fun deleteScopedConfiguration(configId: UUID) + + /** + * Delete multiple configurations by their IDs. + */ + fun deleteScopedConfigurations(configIds: List) } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/UserInvitationService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/UserInvitationService.kt index 3cd14dd5936..fddff4d05e4 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/UserInvitationService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/UserInvitationService.kt @@ -1,5 +1,6 @@ package io.airbyte.data.services +import io.airbyte.config.ScopeType import io.airbyte.config.UserInvitation import java.util.UUID @@ -12,17 +13,27 @@ interface UserInvitationService { */ fun getUserInvitationByInviteCode(inviteCode: String): UserInvitation + /** + * Get a list of pending invitations for a given scope type and scope id. + */ + fun getPendingInvitations( + scopeType: ScopeType, + scopeId: UUID, + ): List + /** * Create a new user invitation. */ + @Throws(InvitationDuplicateException::class) fun createUserInvitation(invitation: UserInvitation): UserInvitation /** * Accept a user invitation and create resulting permission record. */ + @Throws(InvitationStatusUnexpectedException::class) fun acceptUserInvitation( inviteCode: String, - invitedUserId: UUID, + acceptingUserId: UUID, ): UserInvitation /** @@ -36,5 +47,18 @@ interface UserInvitationService { /** * Cancel a user invitation. */ + @Throws(InvitationStatusUnexpectedException::class) fun cancelUserInvitation(inviteCode: String): UserInvitation } + +/** + * Exception thrown when an operation on an invitation cannot be performed because it has an + * unexpected status. For instance, trying to accept an invitation that is not pending. + */ +class InvitationStatusUnexpectedException(message: String) : Exception(message) + +/** + * Exception thrown when trying to create a duplicate invitation, ie creating new invitation with + * the same email and scope as an existing pending invitation. + */ +class InvitationDuplicateException(message: String) : Exception(message) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceImpl.kt new file mode 100644 index 00000000000..9c73e088b38 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceImpl.kt @@ -0,0 +1,13 @@ +package io.airbyte.data.services.impls.data + +import io.airbyte.data.repositories.ConnectionTimelineEventRepository +import io.airbyte.data.repositories.entities.ConnectionTimelineEvent +import io.airbyte.data.services.ConnectionTimelineEventService +import jakarta.inject.Singleton + +@Singleton +class ConnectionTimelineEventServiceImpl(private val repository: ConnectionTimelineEventRepository) : ConnectionTimelineEventService { + override fun writeEvent(event: ConnectionTimelineEvent): ConnectionTimelineEvent { + return repository.save(event) + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt new file mode 100644 index 00000000000..78c6468473d --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt @@ -0,0 +1,170 @@ +package io.airbyte.data.services.impls.data + +import io.airbyte.commons.auth.OrganizationAuthRole +import io.airbyte.commons.auth.WorkspaceAuthRole +import io.airbyte.config.ConfigSchema +import io.airbyte.config.Permission +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.data.repositories.PermissionRepository +import io.airbyte.data.services.PermissionRedundantException +import io.airbyte.data.services.PermissionService +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException +import io.airbyte.data.services.WorkspaceService +import io.airbyte.data.services.impls.data.mappers.toConfigModel +import io.airbyte.data.services.impls.data.mappers.toEntity +import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType +import io.micronaut.transaction.annotation.Transactional +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +open class PermissionServiceDataImpl( + private val workspaceService: WorkspaceService, + private val permissionRepository: PermissionRepository, +) : PermissionService { + override fun getPermissionsForUser(userId: UUID): List { + return permissionRepository.findByUserId(userId).map { it.toConfigModel() } + } + + @Transactional("config") + override fun deletePermission(permissionId: UUID) { + throwIfDeletingLastOrgAdmin(listOf(permissionId)) + permissionRepository.deleteById(permissionId) + } + + @Transactional("config") + override fun deletePermissions(permissionIds: List) { + throwIfDeletingLastOrgAdmin(permissionIds) + permissionRepository.deleteByIdIn(permissionIds) + } + + @Transactional("config") + override fun createPermission(permission: Permission): Permission { + val existingUserPermissions = getPermissionsForUser(permission.userId).toSet() + + // throw if new permission would be redundant + if (isRedundantWorkspacePermission(permission, existingUserPermissions)) { + throw PermissionRedundantException( + "Permission type ${permission.permissionType} would be redundant for user ${permission.userId}. Preventing creation.", + ) + } + + // remove any permissions that would be made redundant by adding in the new permission + deletePermissionsMadeRedundantByPermission(permission, existingUserPermissions) + + return permissionRepository.save(permission.toEntity()).toConfigModel() + } + + @Transactional("config") + override fun updatePermission(permission: Permission) { + // throw early if the update would remove the last org admin + throwIfUpdateWouldRemoveLastOrgAdmin(permission) + + val otherPermissionsForUser = getPermissionsForUser(permission.userId).filter { it.permissionId != permission.permissionId }.toSet() + + // remove the permission being updated if it is now redundant. + if (isRedundantWorkspacePermission(permission, otherPermissionsForUser)) { + permissionRepository.deleteById(permission.permissionId) + return + } + + // remove any permissions that would be made redundant by adding in the newly-updated permission + deletePermissionsMadeRedundantByPermission(permission, otherPermissionsForUser) + + permissionRepository.update(permission.toEntity()).toConfigModel() + } + + private fun deletePermissionsMadeRedundantByPermission( + permission: Permission, + otherPermissions: Set, + ) { + otherPermissions.filter { isRedundantWorkspacePermission(it, otherPermissions - it + permission) } + .map { it.permissionId } + .takeIf { it.isNotEmpty() } + ?.let { permissionRepository.deleteByIdIn(it) } + } + + private fun throwIfDeletingLastOrgAdmin(permissionIdsToDelete: List) { + // get all org admin permissions being deleted, if any + val deletedOrgAdminPermissions = + permissionRepository.findByIdIn(permissionIdsToDelete).filter { + it.permissionType == PermissionType.organization_admin + } + + // group deleted org admin permission IDs by organization ID + val orgIdToDeletedOrgAdminPermissionIds = deletedOrgAdminPermissions.groupBy({ it.organizationId!! }, { it.id!! }) + + // for each group, make sure the last org-admin isn't being deleted + orgIdToDeletedOrgAdminPermissionIds.forEach { + (orgId, deletedOrgAdminIds) -> + throwIfDeletingLastOrgAdminForOrg(orgId, deletedOrgAdminIds.toSet()) + } + } + + private fun throwIfDeletingLastOrgAdminForOrg( + orgId: UUID, + deletedOrgAdminPermissionIds: Set, + ) { + // get all other permissions for the organization that are not being deleted + val otherOrgPermissions = permissionRepository.findByOrganizationId(orgId).filter { it.id !in deletedOrgAdminPermissionIds } + + // if there are no other org-admin permissions remaining in the org, throw an exception + if (otherOrgPermissions.none { it.permissionType == PermissionType.organization_admin }) { + throw RemoveLastOrgAdminPermissionException("Cannot delete the last admin in Organization $orgId.") + } + } + + private fun throwIfUpdateWouldRemoveLastOrgAdmin(updatedPermission: Permission) { + // return early if the permission is not for an organization + val orgId = updatedPermission.organizationId ?: return + + // get the current state of the permission in the database + val priorPermission = + permissionRepository.findById(updatedPermission.permissionId) + .orElseThrow { ConfigNotFoundException(ConfigSchema.PERMISSION, "Permission not found: ${updatedPermission.permissionId}") } + + // return early if the permission was not an org admin prior to the update + if (priorPermission.permissionType != PermissionType.organization_admin) { + return + } + + // get all other permissions for the organization + val otherOrgPermissions = permissionRepository.findByOrganizationId(orgId).filter { it.id != updatedPermission.permissionId } + + // if the permission being updated is the last org admin, throw an exception + if (otherOrgPermissions.none { it.permissionType == PermissionType.organization_admin }) { + throw RemoveLastOrgAdminPermissionException("Cannot demote the last admin in Organization $orgId.") + } + } + + private fun isRedundantWorkspacePermission( + permission: Permission, + existingUserPermissions: Set, + ): Boolean { + // only workspace permissions can be redundant + val workspaceId = permission.workspaceId ?: return false + + // if the workspace is not in an organization, it cannot have redundant permissions + val orgIdForWorkspace = workspaceService.getOrganizationIdFromWorkspaceId(workspaceId).orElse(null) ?: return false + + // if the user has no org-level permission, the workspace permission cannot be redundant + val existingOrgPermission = existingUserPermissions.find { it.organizationId == orgIdForWorkspace } ?: return false + + // if the new permission is less than or equal to the existing org-level permission, it is redundant + return getAuthority(permission.permissionType) <= getAuthority(existingOrgPermission.permissionType) + } + + private fun getAuthority(permissionType: Permission.PermissionType): Int { + return when (permissionType) { + Permission.PermissionType.INSTANCE_ADMIN -> throw IllegalArgumentException("INSTANCE_ADMIN permissions are not supported") + Permission.PermissionType.ORGANIZATION_ADMIN -> OrganizationAuthRole.ORGANIZATION_ADMIN.authority + Permission.PermissionType.ORGANIZATION_EDITOR -> OrganizationAuthRole.ORGANIZATION_EDITOR.authority + Permission.PermissionType.ORGANIZATION_READER -> OrganizationAuthRole.ORGANIZATION_READER.authority + Permission.PermissionType.ORGANIZATION_MEMBER -> OrganizationAuthRole.ORGANIZATION_MEMBER.authority + Permission.PermissionType.WORKSPACE_OWNER -> WorkspaceAuthRole.WORKSPACE_ADMIN.authority + Permission.PermissionType.WORKSPACE_ADMIN -> WorkspaceAuthRole.WORKSPACE_ADMIN.authority + Permission.PermissionType.WORKSPACE_EDITOR -> WorkspaceAuthRole.WORKSPACE_EDITOR.authority + Permission.PermissionType.WORKSPACE_READER -> WorkspaceAuthRole.WORKSPACE_READER.authority + } + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt index 3c6b1b77927..46b8c0ef0d4 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt @@ -1,5 +1,6 @@ package io.airbyte.data.services.impls.data +import io.airbyte.config.ConfigOriginType import io.airbyte.config.ConfigResourceType import io.airbyte.config.ConfigSchema import io.airbyte.config.ConfigScopeType @@ -9,6 +10,7 @@ import io.airbyte.data.repositories.ScopedConfigurationRepository import io.airbyte.data.services.ScopedConfigurationService import io.airbyte.data.services.impls.data.mappers.toConfigModel import io.airbyte.data.services.impls.data.mappers.toEntity +import io.airbyte.data.services.shared.ConfigScopeMapWithId import io.airbyte.data.services.shared.ScopedConfigurationKey import jakarta.inject.Singleton import java.util.Optional @@ -66,30 +68,58 @@ class ScopedConfigurationServiceDataImpl(private val repository: ScopedConfigura resourceId: UUID, scopes: Map, ): Optional { - scopes.keys.forEach { - if (!configKey.supportedScopes.contains(it)) { - throw IllegalArgumentException("Scope type $it is not supported by key ${configKey.key}") + val id = UUID.randomUUID() + val configsRes = + getScopedConfigurations( + configKey, + resourceType, + resourceId, + listOf(ConfigScopeMapWithId(id, scopes)), + ) + return Optional.ofNullable(configsRes[id]) + } + + override fun getScopedConfigurations( + configKey: ScopedConfigurationKey, + resourceType: ConfigResourceType, + resourceId: UUID, + scopeMaps: List, + ): Map { + val idsPerScopeType = + scopeMaps + .flatMap { it.scopeMap.entries } + .filter { it.value != null } + .groupBy({ it.key }, { it.value!! }) + + for (scopeType in idsPerScopeType.keys) { + if (!configKey.supportedScopes.contains(scopeType)) { + throw IllegalArgumentException("Scope type $scopeType is not supported by key ${configKey.key}") } } - // Later down could optimize this to only make one query. - for (scope in configKey.supportedScopes) { - if (scopes.containsKey(scope)) { - val scopedConfig = - getScopedConfiguration( - configKey.key, - resourceType, - resourceId, - scope, - scopes.getValue(scope), - ) - if (scopedConfig.isPresent) { - return scopedConfig + // Fetch all configs at once per type. This means max 1 query per scope type regardless of the number of entries in the input map. + val configsPerScopeType = + idsPerScopeType.mapValues { (scopeType, ids) -> + listScopedConfigurationsWithScopes(configKey.key, resourceType, resourceId, scopeType, ids.toSet().toList()) + } + + val outMap = mutableMapOf() + for (scopeMapWithId in scopeMaps) { + // Evaluate in priority order as defined by the config key. + val scopeMap = scopeMapWithId.scopeMap + for (scope in configKey.supportedScopes) { + if (scopeMap.containsKey(scope)) { + val scopeId = scopeMap.getValue(scope) ?: continue + val scopedConfig = configsPerScopeType[scope]?.find { it.scopeId == scopeId } + if (scopedConfig != null) { + outMap[scopeMapWithId.id] = scopedConfig + break + } } } } - return Optional.empty() + return outMap } override fun writeScopedConfiguration(scopedConfiguration: ScopedConfiguration): ScopedConfiguration { @@ -100,6 +130,10 @@ class ScopedConfigurationServiceDataImpl(private val repository: ScopedConfigura return repository.save(scopedConfiguration.toEntity()).toConfigModel() } + override fun insertScopedConfigurations(scopedConfigurations: List): List { + return repository.saveAll(scopedConfigurations.map { it.toEntity() }).map { it.toConfigModel() } + } + override fun listScopedConfigurations(): List { return repository.findAll().map { it.toConfigModel() }.toList() } @@ -124,7 +158,27 @@ class ScopedConfigurationServiceDataImpl(private val repository: ScopedConfigura ).map { it.toConfigModel() }.toList() } + override fun listScopedConfigurationsWithOrigins( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + ): List { + return repository.findByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginInList( + key, + resourceType.toEntity(), + resourceId, + originType.toEntity(), + origins, + ).map { it.toConfigModel() }.toList() + } + override fun deleteScopedConfiguration(configId: UUID) { repository.deleteById(configId) } + + override fun deleteScopedConfigurations(configIds: List) { + repository.deleteByIdInList(configIds) + } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImpl.kt index 85458742715..dfcb6def191 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImpl.kt @@ -1,18 +1,23 @@ package io.airbyte.data.services.impls.data import io.airbyte.config.ConfigSchema -import io.airbyte.config.InvitationStatus -import io.airbyte.config.Permission import io.airbyte.config.ScopeType import io.airbyte.config.UserInvitation import io.airbyte.data.exceptions.ConfigNotFoundException import io.airbyte.data.repositories.PermissionRepository import io.airbyte.data.repositories.UserInvitationRepository +import io.airbyte.data.repositories.entities.Permission +import io.airbyte.data.services.InvitationDuplicateException +import io.airbyte.data.services.InvitationStatusUnexpectedException import io.airbyte.data.services.UserInvitationService +import io.airbyte.data.services.impls.data.mappers.EntityInvitationStatus +import io.airbyte.data.services.impls.data.mappers.EntityScopeType +import io.airbyte.data.services.impls.data.mappers.EntityUserInvitation import io.airbyte.data.services.impls.data.mappers.toConfigModel import io.airbyte.data.services.impls.data.mappers.toEntity import io.micronaut.transaction.annotation.Transactional import jakarta.inject.Singleton +import java.time.OffsetDateTime import java.util.UUID @Singleton @@ -26,41 +31,73 @@ open class UserInvitationServiceDataImpl( }.toConfigModel() } + override fun getPendingInvitations( + scopeType: ScopeType, + scopeId: UUID, + ): List { + return userInvitationRepository.findByStatusAndScopeTypeAndScopeId( + EntityInvitationStatus.pending, + scopeType.toEntity(), + scopeId, + ).map { it.toConfigModel() } + } + override fun createUserInvitation(invitation: UserInvitation): UserInvitation { + // throw an exception if a pending invitation already exists for the same email and scope + val existingInvitations = + userInvitationRepository.findByStatusAndScopeTypeAndScopeIdAndInvitedEmail( + EntityInvitationStatus.pending, + invitation.scopeType.toEntity(), + invitation.scopeId, + invitation.invitedEmail, + ) + if (existingInvitations.isNotEmpty()) { + throw InvitationDuplicateException( + "A pending invitation already exists for InvitedEmail: ${invitation.invitedEmail}, ScopeType: ${invitation.scopeType} " + + "and ScopeId: ${invitation.scopeId}", + ) + } + return userInvitationRepository.save(invitation.toEntity()).toConfigModel() } @Transactional("config") override fun acceptUserInvitation( inviteCode: String, - invitedUserId: UUID, + acceptingUserId: UUID, ): UserInvitation { // fetch the invitation by code val invitation = userInvitationRepository.findByInviteCode(inviteCode).orElseThrow { ConfigNotFoundException(ConfigSchema.USER_INVITATION, inviteCode) - }.toConfigModel() + } - if (invitation.status != InvitationStatus.PENDING) { - throw IllegalStateException("Invitation status is not pending: ${invitation.status}") + // mark the invitation status as expired if expiresAt is in the past + if (invitation.expiresAt.isBefore(OffsetDateTime.now())) { + invitation.status = EntityInvitationStatus.expired + userInvitationRepository.update(invitation) } + // throw an exception if the invitation is not pending. Note that this will also + // catch the case where the invitation is expired. + throwIfNotPending(invitation) + // create a new permission record according to the invitation - val permission = - Permission().apply { - userId = invitedUserId - permissionType = invitation.permissionType - when (invitation.scopeType) { - ScopeType.ORGANIZATION -> organizationId = invitation.scopeId - ScopeType.WORKSPACE -> workspaceId = invitation.scopeId - else -> throw IllegalStateException("Unknown scope type: ${invitation.scopeType}") - } + Permission( + id = UUID.randomUUID(), + userId = acceptingUserId, + permissionType = invitation.permissionType, + ).apply { + when (invitation.scopeType) { + EntityScopeType.organization -> organizationId = invitation.scopeId + EntityScopeType.workspace -> workspaceId = invitation.scopeId } - permissionRepository.save(permission.toEntity()) + }.let { permissionRepository.save(it) } - // update the invitation status to accepted - invitation.status = InvitationStatus.ACCEPTED - val updatedInvitation = userInvitationRepository.update(invitation.toEntity()) + // mark the invitation as accepted + invitation.status = EntityInvitationStatus.accepted + invitation.acceptedByUserId = acceptingUserId + val updatedInvitation = userInvitationRepository.update(invitation) return updatedInvitation.toConfigModel() } @@ -73,6 +110,25 @@ open class UserInvitationServiceDataImpl( } override fun cancelUserInvitation(inviteCode: String): UserInvitation { - TODO("Not yet implemented") + val invitation = + userInvitationRepository.findByInviteCode(inviteCode).orElseThrow { + ConfigNotFoundException(ConfigSchema.USER_INVITATION, inviteCode) + } + + throwIfNotPending(invitation) + + invitation.status = EntityInvitationStatus.cancelled + val updatedInvitation = userInvitationRepository.update(invitation) + + return updatedInvitation.toConfigModel() + } + + private fun throwIfNotPending(invitation: EntityUserInvitation) { + if (invitation.status != EntityInvitationStatus.pending) { + throw InvitationStatusUnexpectedException( + "Expected invitation for ScopeType: ${invitation.scopeType} and ScopeId: ${invitation.scopeId} to " + + "be PENDING, but instead it had Status: ${invitation.status}", + ) + } } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt index a00a3abe24b..0f40bfe3978 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/PermissionTypeMapper.kt @@ -12,12 +12,13 @@ fun EntityPermissionType.toConfigModel(): ModelPermissionType { EntityPermissionType.organization_editor -> ModelPermissionType.ORGANIZATION_EDITOR EntityPermissionType.organization_reader -> ModelPermissionType.ORGANIZATION_READER EntityPermissionType.organization_member -> ModelPermissionType.ORGANIZATION_MEMBER - else -> throw IllegalArgumentException("Unexpected permission type: $this") + EntityPermissionType.instance_admin -> ModelPermissionType.INSTANCE_ADMIN } } fun ModelPermissionType.toEntity(): EntityPermissionType { return when (this) { + ModelPermissionType.WORKSPACE_OWNER -> EntityPermissionType.workspace_admin ModelPermissionType.WORKSPACE_ADMIN -> EntityPermissionType.workspace_admin ModelPermissionType.WORKSPACE_EDITOR -> EntityPermissionType.workspace_editor ModelPermissionType.WORKSPACE_READER -> EntityPermissionType.workspace_reader @@ -25,6 +26,6 @@ fun ModelPermissionType.toEntity(): EntityPermissionType { ModelPermissionType.ORGANIZATION_EDITOR -> EntityPermissionType.organization_editor ModelPermissionType.ORGANIZATION_READER -> EntityPermissionType.organization_reader ModelPermissionType.ORGANIZATION_MEMBER -> EntityPermissionType.organization_member - else -> throw IllegalArgumentException("Unexpected permission type: $this") + ModelPermissionType.INSTANCE_ADMIN -> EntityPermissionType.instance_admin } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt index 28276d0e4ed..5a3f282156e 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt @@ -43,12 +43,14 @@ fun ModelConfigResourceType.toEntity(): EntityConfigResourceType { fun EntityConfigOriginType.toConfigModel(): ModelConfigOriginType { return when (this) { EntityConfigOriginType.user -> ModelConfigOriginType.USER + EntityConfigOriginType.breaking_change -> ModelConfigOriginType.BREAKING_CHANGE } } fun ModelConfigOriginType.toEntity(): EntityConfigOriginType { return when (this) { ModelConfigOriginType.USER -> EntityConfigOriginType.user + ModelConfigOriginType.BREAKING_CHANGE -> EntityConfigOriginType.breaking_change } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/UserInvitationMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/UserInvitationMapper.kt index 1f1f09c7781..62ed56f01da 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/UserInvitationMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/UserInvitationMapper.kt @@ -32,6 +32,7 @@ fun EntityInvitationStatus.toConfigModel(): ModelInvitationStatus { EntityInvitationStatus.accepted -> ModelInvitationStatus.ACCEPTED EntityInvitationStatus.cancelled -> ModelInvitationStatus.CANCELLED EntityInvitationStatus.declined -> ModelInvitationStatus.DECLINED + EntityInvitationStatus.expired -> ModelInvitationStatus.EXPIRED } } @@ -41,6 +42,7 @@ fun ModelInvitationStatus.toEntity(): EntityInvitationStatus { ModelInvitationStatus.ACCEPTED -> EntityInvitationStatus.accepted ModelInvitationStatus.CANCELLED -> EntityInvitationStatus.cancelled ModelInvitationStatus.DECLINED -> EntityInvitationStatus.declined + ModelInvitationStatus.EXPIRED -> EntityInvitationStatus.expired } } @@ -50,12 +52,14 @@ fun EntityUserInvitation.toConfigModel(): ModelUserInvitation { .withInviteCode(this.inviteCode) .withInviterUserId(this.inviterUserId) .withInvitedEmail(this.invitedEmail) + .withAcceptedByUserId(this.acceptedByUserId) .withScopeId(this.scopeId) .withScopeType(this.scopeType.toConfigModel()) .withPermissionType(this.permissionType.toConfigModel()) .withStatus(this.status.toConfigModel()) .withCreatedAt(this.createdAt?.toEpochSecond()) .withUpdatedAt(this.updatedAt?.toEpochSecond()) + .withExpiresAt(this.expiresAt.toEpochSecond()) } fun ModelUserInvitation.toEntity(): EntityUserInvitation { @@ -64,11 +68,13 @@ fun ModelUserInvitation.toEntity(): EntityUserInvitation { inviteCode = this.inviteCode, inviterUserId = this.inviterUserId, invitedEmail = this.invitedEmail, + acceptedByUserId = this.acceptedByUserId, scopeId = this.scopeId, scopeType = this.scopeType.toEntity(), permissionType = this.permissionType.toEntity(), status = this.status.toEntity(), createdAt = this.createdAt?.let { OffsetDateTime.ofInstant(Instant.ofEpochSecond(it), ZoneOffset.UTC) }, updatedAt = this.updatedAt?.let { OffsetDateTime.ofInstant(Instant.ofEpochSecond(it), ZoneOffset.UTC) }, + expiresAt = OffsetDateTime.ofInstant(Instant.ofEpochSecond(this.expiresAt), ZoneOffset.UTC), ) } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConfigScopeMapWithId.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConfigScopeMapWithId.kt new file mode 100644 index 00000000000..d1dda266716 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConfigScopeMapWithId.kt @@ -0,0 +1,13 @@ +package io.airbyte.data.services.shared + +import io.airbyte.config.ConfigScopeType +import java.util.UUID + +/** + * Data class to associate an ID with a given ScopedConfiguration scope map. + * This is used for resolving scopes in bulk, see ScopedConfigurationService. + */ +data class ConfigScopeMapWithId( + val id: UUID, + val scopeMap: Map, +) diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImplTest.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImplTest.java new file mode 100644 index 00000000000..b89ef7bbac4 --- /dev/null +++ b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ActorDefinitionServiceJooqImplTest.java @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.data.services.impls.jooq; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.ActorDefinitionVersion; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; +import io.airbyte.data.services.SecretPersistenceConfigService; +import io.airbyte.data.services.SourceService; +import io.airbyte.data.services.shared.ActorWorkspaceOrganizationIds; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.HeartbeatMaxSecondsBetweenMessages; +import io.airbyte.featureflag.TestClient; +import io.airbyte.test.utils.BaseConfigDatabaseTest; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class ActorDefinitionServiceJooqImplTest extends BaseConfigDatabaseTest { + + private JooqTestDbSetupHelper jooqTestDbSetupHelper; + private SourceService sourceService; + private ActorDefinitionServiceJooqImpl actorDefinitionService; + + @BeforeEach + void setUp() throws JsonValidationException, ConfigNotFoundException, IOException { + this.actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + + final FeatureFlagClient featureFlagClient = mock(TestClient.class); + when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any())).thenReturn("3600"); + + final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); + final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); + final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); + this.sourceService = new SourceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, + secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater); + + jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); + jooqTestDbSetupHelper.setupForVersionUpgradeTest(); + } + + @Test + void testSetActorDefaultVersions() throws IOException { + final UUID actorId = jooqTestDbSetupHelper.getSource().getSourceId(); + final UUID otherActorId = UUID.randomUUID(); + final SourceConnection otherSource = Jsons.clone(jooqTestDbSetupHelper.getSource()).withSourceId(otherActorId); + sourceService.writeSourceConnectionNoSecrets(otherSource); + + final ActorDefinitionVersion newVersion = + Jsons.clone(jooqTestDbSetupHelper.getSourceDefinitionVersion()).withVersionId(UUID.randomUUID()).withDockerImageTag("5.0.0"); + actorDefinitionService.writeActorDefinitionVersion(newVersion); + + actorDefinitionService.setActorDefaultVersions(List.of(actorId), newVersion.getVersionId()); + + final Set actorsOnNewDefaultVersion = actorDefinitionService.getActorsWithDefaultVersionId(newVersion.getVersionId()); + assertEquals(Set.of(actorId), actorsOnNewDefaultVersion); + + final Set actorsOnOldDefaultVersion = + actorDefinitionService.getActorsWithDefaultVersionId(jooqTestDbSetupHelper.getInitialSourceDefaultVersionId()); + assertEquals(Set.of(otherActorId), actorsOnOldDefaultVersion); + } + + @Test + void testGetActorsWithDefaultVersionId() throws IOException { + final UUID actorId = jooqTestDbSetupHelper.getSource().getSourceId(); + final Set actorIds = actorDefinitionService.getActorsWithDefaultVersionId(jooqTestDbSetupHelper.getInitialSourceDefaultVersionId()); + assertEquals(Set.of(actorId), actorIds); + } + + @Test + void updateActorDefinitionDefaultVersionId() throws JsonValidationException, ConfigNotFoundException, IOException { + final UUID actorDefinitionId = jooqTestDbSetupHelper.getSourceDefinition().getSourceDefinitionId(); + final StandardSourceDefinition sourceDefinition = sourceService.getStandardSourceDefinition(actorDefinitionId); + assertEquals(sourceDefinition.getDefaultVersionId(), jooqTestDbSetupHelper.getInitialSourceDefaultVersionId()); + + final ActorDefinitionVersion newVersion = + Jsons.clone(jooqTestDbSetupHelper.getSourceDefinitionVersion()).withVersionId(UUID.randomUUID()).withDockerImageTag("5.0.0"); + actorDefinitionService.writeActorDefinitionVersion(newVersion); + + actorDefinitionService.updateActorDefinitionDefaultVersionId(actorDefinitionId, newVersion.getVersionId()); + + final StandardSourceDefinition updatedSourceDefinition = sourceService.getStandardSourceDefinition(actorDefinitionId); + assertEquals(updatedSourceDefinition.getDefaultVersionId(), newVersion.getVersionId()); + } + + @Test + void testGetActorIdsForDefinition() throws IOException { + final UUID actorDefinitionId = jooqTestDbSetupHelper.getSourceDefinition().getSourceDefinitionId(); + + final UUID sourceActorId = jooqTestDbSetupHelper.getSource().getSourceId(); + final UUID workspaceId = jooqTestDbSetupHelper.getWorkspace().getWorkspaceId(); + final UUID organizationId = jooqTestDbSetupHelper.getOrganization().getOrganizationId(); + + final List actorIds = actorDefinitionService.getActorIdsForDefinition(actorDefinitionId); + assertEquals(List.of(new ActorWorkspaceOrganizationIds(sourceActorId, workspaceId, organizationId)), actorIds); + } + +} diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelperTest.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelperTest.java deleted file mode 100644 index ecb7b8f5146..00000000000 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelperTest.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.data.services.impls.jooq; - -import static io.airbyte.featureflag.ContextKt.ANONYMOUS; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorDefinitionBreakingChange; -import io.airbyte.config.ActorDefinitionVersion; -import io.airbyte.config.BreakingChangeScope; -import io.airbyte.config.BreakingChangeScope.ScopeType; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.MockData; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.data.services.ConnectionService; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.TestClient; -import io.airbyte.featureflag.UseBreakingChangeScopes; -import io.airbyte.featureflag.Workspace; -import io.airbyte.test.utils.BaseConfigDatabaseTest; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.sql.SQLException; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Stream; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; - -public class ConnectorMetadataJooqHelperTest extends BaseConfigDatabaseTest { - - private static final UUID ACTOR_DEFINITION_ID = UUID.randomUUID(); - private static final String UPGRADE_IMAGE_TAG = "1.0.0"; - private final FeatureFlagClient featureFlagClient; - private final ConnectionService connectionService; - private final ConnectorMetadataJooqHelper connectorMetadataJooqHelper; - - public ConnectorMetadataJooqHelperTest() { - this.featureFlagClient = mock(TestClient.class); - this.connectionService = mock(ConnectionService.class); - this.connectorMetadataJooqHelper = new ConnectorMetadataJooqHelper(featureFlagClient, connectionService); - } - - private static Stream getBreakingChangesForUpgradeMethodSource() { - return Stream.of( - // Version increases - Arguments.of("0.0.1", "2.0.0", List.of("1.0.0", "2.0.0")), - Arguments.of("1.0.0", "1.0.1", List.of()), - Arguments.of("1.0.0", "1.1.0", List.of()), - Arguments.of("1.0.1", "1.1.0", List.of()), - Arguments.of("1.0.0", "2.0.1", List.of("2.0.0")), - Arguments.of("1.0.1", "2.0.0", List.of("2.0.0")), - Arguments.of("1.0.0", "2.0.1", List.of("2.0.0")), - Arguments.of("1.0.1", "2.0.1", List.of("2.0.0")), - Arguments.of("2.0.0", "2.0.0", List.of()), - // Version decreases - should never have breaking changes - Arguments.of("2.0.0", "0.0.1", List.of()), - Arguments.of("1.0.1", "1.0.0", List.of()), - Arguments.of("1.1.0", "1.0.0", List.of()), - Arguments.of("1.1.0", "1.0.1", List.of()), - Arguments.of("2.0.0", "1.0.0", List.of()), - Arguments.of("2.0.0", "1.0.1", List.of()), - Arguments.of("2.0.1", "1.0.0", List.of()), - Arguments.of("2.0.1", "1.0.1", List.of()), - Arguments.of("2.0.0", "2.0.0", List.of())); - } - - @ParameterizedTest - @MethodSource("getBreakingChangesForUpgradeMethodSource") - void testGetBreakingChangesForUpgradeWithActorDefBreakingChanges(final String initialImageTag, - final String upgradeImageTag, - final List expectedBreakingChangeVersions) { - final List expectedBreakingChangeVersionsForUpgrade = expectedBreakingChangeVersions.stream().map(Version::new).toList(); - final List breakingChangesForDef = List.of( - new ActorDefinitionBreakingChange() - .withActorDefinitionId(ACTOR_DEFINITION_ID) - .withVersion(new Version("1.0.0")) - .withMessage("Breaking change 1") - .withUpgradeDeadline("2021-01-01") - .withMigrationDocumentationUrl("https://docs.airbyte.io/migration-guides/1.0.0"), - new ActorDefinitionBreakingChange() - .withActorDefinitionId(ACTOR_DEFINITION_ID) - .withVersion(new Version("2.0.0")) - .withMessage("Breaking change 2") - .withUpgradeDeadline("2020-08-09") - .withMigrationDocumentationUrl("https://docs.airbyte.io/migration-guides/2.0.0")); - final List breakingChangesForUpgrade = - ConnectorMetadataJooqHelper.getBreakingChangesForUpgrade(initialImageTag, upgradeImageTag, breakingChangesForDef); - final List actualBreakingChangeVersionsForUpgrade = - breakingChangesForUpgrade.stream().map(ActorDefinitionBreakingChange::getVersion).toList(); - assertEquals(expectedBreakingChangeVersionsForUpgrade.size(), actualBreakingChangeVersionsForUpgrade.size()); - assertTrue(actualBreakingChangeVersionsForUpgrade.containsAll(expectedBreakingChangeVersionsForUpgrade)); - } - - @ParameterizedTest - @MethodSource("getBreakingChangesForUpgradeMethodSource") - void testGetBreakingChangesForUpgradeWithNoActorDefinitionBreakingChanges(final String initialImageTag, - final String upgradeImageTag, - final List expectedBreakingChangeVersions) { - final List breakingChangesForDef = List.of(); - assertTrue(ConnectorMetadataJooqHelper.getBreakingChangesForUpgrade(initialImageTag, upgradeImageTag, breakingChangesForDef).isEmpty()); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testGetActorsForNonBreakingUpgrade(final boolean useBreakingChangeScopes) - throws JsonValidationException, ConfigNotFoundException, IOException, SQLException { - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(useBreakingChangeScopes); - - // Setup and get setup info - final JooqTestDbSetupHelper jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); - jooqTestDbSetupHelper.setupForVersionUpgradeTest(); - final ActorDefinitionVersion actorDefinitionVersion = jooqTestDbSetupHelper.getSourceDefinitionVersion(); - final UUID actorIdOnInitialVersion = jooqTestDbSetupHelper.getSource().getSourceId(); - - // Create a new version of the actor definition - final UUID newVersionId = UUID.randomUUID(); - final ActorDefinitionVersion newActorDefinitionVersion = Jsons.clone(actorDefinitionVersion) - .withVersionId(newVersionId).withDockerImageTag(UPGRADE_IMAGE_TAG); - - final Set actorsToUpgrade = - database.query(ctx -> connectorMetadataJooqHelper.getActorsToUpgrade(actorDefinitionVersion, newActorDefinitionVersion, List.of(), ctx)); - - // All actors should get upgraded - assertEquals(Set.of(actorIdOnInitialVersion), actorsToUpgrade); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testGetActorsForBreakingUpgrade(final Boolean useBreakingChangeScopes) - throws JsonValidationException, ConfigNotFoundException, IOException, SQLException { - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(useBreakingChangeScopes); - - // Setup and get setup info - final JooqTestDbSetupHelper jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); - jooqTestDbSetupHelper.setupForVersionUpgradeTest(); - final StandardSourceDefinition actorDefinition = jooqTestDbSetupHelper.getSourceDefinition(); - final ActorDefinitionVersion actorDefinitionVersion = jooqTestDbSetupHelper.getSourceDefinitionVersion(); - final SourceConnection actorNotSyncingAffectedStream = jooqTestDbSetupHelper.getSource(); - - // Create a new version of the destination, with a stream-scoped breaking change - final ActorDefinitionBreakingChange streamScopedBreakingChange = - MockData.actorDefinitionBreakingChange(UPGRADE_IMAGE_TAG).withActorDefinitionId(actorDefinitionVersion.getActorDefinitionId()) - .withScopedImpact(List.of(new BreakingChangeScope().withScopeType(ScopeType.STREAM).withImpactedScopes(List.of("affected_stream")))); - final UUID newVersionId = UUID.randomUUID(); - final ActorDefinitionVersion newActorDefinitionVersion = Jsons.clone(actorDefinitionVersion) - .withVersionId(newVersionId).withDockerImageTag(UPGRADE_IMAGE_TAG); - - // Create a second actor that syncs an affected stream - final SourceConnection actorSyncingAffectedStream = jooqTestDbSetupHelper.createActorForActorDefinition(actorDefinition); - when(connectionService.actorSyncsAnyListedStream(actorSyncingAffectedStream.getSourceId(), List.of("affected_stream"))).thenReturn(true); - - final Set actorsToUpgrade = database.query(ctx -> connectorMetadataJooqHelper.getActorsToUpgrade(actorDefinitionVersion, - newActorDefinitionVersion, List.of(streamScopedBreakingChange), ctx)); - - if (useBreakingChangeScopes) { - // Unaffected actors will be upgraded - assertEquals(Set.of(actorNotSyncingAffectedStream.getSourceId()), actorsToUpgrade); - } else { - // No actors will be upgraded - assertEquals(Set.of(), actorsToUpgrade); - } - - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testGetActorsAffectedByBreakingChange(final Boolean useBreakingChangeScopes) - throws JsonValidationException, ConfigNotFoundException, IOException, SQLException { - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(useBreakingChangeScopes); - - // Setup and get setup info - final JooqTestDbSetupHelper jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); - jooqTestDbSetupHelper.setupForVersionUpgradeTest(); - final StandardSourceDefinition actorDefinition = jooqTestDbSetupHelper.getSourceDefinition(); - final ActorDefinitionVersion actorDefinitionVersion = jooqTestDbSetupHelper.getSourceDefinitionVersion(); - final UUID actorNotSyncingAffectedStreamId = jooqTestDbSetupHelper.getSource().getSourceId(); - - // Create a new version of the destination, with a stream-scoped breaking change - final ActorDefinitionBreakingChange streamScopedBreakingChange = - MockData.actorDefinitionBreakingChange(UPGRADE_IMAGE_TAG).withActorDefinitionId(actorDefinitionVersion.getActorDefinitionId()) - .withScopedImpact(List.of(new BreakingChangeScope().withScopeType(ScopeType.STREAM).withImpactedScopes(List.of("affected_stream")))); - - // Create a second actor that syncs an affected stream - final SourceConnection actorSyncingAffectedStream = jooqTestDbSetupHelper.createActorForActorDefinition(actorDefinition); - final UUID actorSyncingAffectedStreamId = actorSyncingAffectedStream.getSourceId(); - when(connectionService.actorSyncsAnyListedStream(actorSyncingAffectedStreamId, List.of("affected_stream"))).thenReturn(true); - - final Set actorsAffectedByBreakingChange = connectorMetadataJooqHelper.getActorsAffectedByBreakingChange( - Set.of(actorSyncingAffectedStream.getSourceId(), actorNotSyncingAffectedStreamId), streamScopedBreakingChange); - - if (useBreakingChangeScopes) { - // Affected actors depend on scopes - assertEquals(Set.of(actorSyncingAffectedStreamId), actorsAffectedByBreakingChange); - } else { - // All actors are affected by breaking change - assertEquals(Set.of(actorSyncingAffectedStreamId, actorNotSyncingAffectedStreamId), actorsAffectedByBreakingChange); - } - } - -} diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImplTest.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImplTest.java deleted file mode 100644 index b0be0a51e94..00000000000 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImplTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.data.services.impls.jooq; - -import static io.airbyte.featureflag.ContextKt.ANONYMOUS; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ActorDefinitionBreakingChange; -import io.airbyte.config.ActorDefinitionVersion; -import io.airbyte.config.BreakingChangeScope; -import io.airbyte.config.BreakingChangeScope.ScopeType; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardDestinationDefinition; -import io.airbyte.config.persistence.MockData; -import io.airbyte.config.secrets.SecretsRepositoryReader; -import io.airbyte.config.secrets.SecretsRepositoryWriter; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.data.services.ConnectionService; -import io.airbyte.data.services.SecretPersistenceConfigService; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.HeartbeatMaxSecondsBetweenMessages; -import io.airbyte.featureflag.SourceDefinition; -import io.airbyte.featureflag.TestClient; -import io.airbyte.featureflag.UseBreakingChangeScopes; -import io.airbyte.featureflag.Workspace; -import io.airbyte.test.utils.BaseConfigDatabaseTest; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class DestinationServiceJooqImplTest extends BaseConfigDatabaseTest { - - private static final String UPGRADE_IMAGE_TAG = "0.0.2"; - private DestinationServiceJooqImpl destinationServiceJooqImpl; - private FeatureFlagClient featureFlagClient; - private SourceServiceJooqImpl sourceServiceJooqImpl; - private ConnectionService connectionService; - - @BeforeEach - void setup() { - this.featureFlagClient = mock(TestClient.class); - when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))).thenReturn("3600"); - - final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); - final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - connectionService = mock(ConnectionService.class); - this.destinationServiceJooqImpl = new DestinationServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretsRepositoryWriter, - secretPersistenceConfigService, connectionService); - // We don't actually need this service in the test, we just use it for extra validating. - // I'd take it out to keep it 'clean', but really this should be happening in a service - // That handles both sources and destinations. They're in the same table that we modify, - // So it's safer to have this to ensure we're modifying only the destination as expected. - this.sourceServiceJooqImpl = new SourceServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretsRepositoryWriter, - secretPersistenceConfigService, connectionService); - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(true); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testScopedImpactAffectsBreakingChangeImpact(final boolean actorIsInBreakingChangeScope) - throws IOException, JsonValidationException, ConfigNotFoundException { - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(true); - - // Setup and get setup info - final JooqTestDbSetupHelper jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); - jooqTestDbSetupHelper.setupForVersionUpgradeTest(); - final SourceConnection source = jooqTestDbSetupHelper.getSource(); - final DestinationConnection destination = jooqTestDbSetupHelper.getDestination(); - final StandardDestinationDefinition destinationDefinition = jooqTestDbSetupHelper.getDestinationDefinition(); - - // Create a new version of the destination, with a stream-scoped breaking change - final ActorDefinitionBreakingChange streamScopedBreakingChange = - MockData.actorDefinitionBreakingChange(UPGRADE_IMAGE_TAG).withActorDefinitionId(destinationDefinition.getDestinationDefinitionId()) - .withScopedImpact(List.of(new BreakingChangeScope().withScopeType(ScopeType.STREAM).withImpactedScopes(List.of("affected_stream")))); - final UUID newVersionId = UUID.randomUUID(); - final ActorDefinitionVersion newDestinationVersion = Jsons.clone(jooqTestDbSetupHelper.getDestinationDefinitionVersion()) - .withVersionId(newVersionId).withDockerImageTag(UPGRADE_IMAGE_TAG); - - // Write new version - // TODO: after uncoupling the transaction, this test will move to ApplyDefinitionsHelper. - // When we do that we can mock `actorIsInBreakingChangeScope` instead of the further down - // actorSyncsAnyListedStream - when(connectionService.actorSyncsAnyListedStream(destination.getDestinationId(), List.of("affected_stream"))) - .thenReturn(actorIsInBreakingChangeScope); - - destinationServiceJooqImpl.writeConnectorMetadata(destinationDefinition, newDestinationVersion, List.of(streamScopedBreakingChange)); - verify(featureFlagClient).boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS)); - verify(connectionService).actorSyncsAnyListedStream(destination.getDestinationId(), List.of("affected_stream")); - - // Get the destination definition and actor versions after the upgrade - final UUID destinationDefinitionDefaultVersionIdAfterUpgrade = - destinationServiceJooqImpl.getStandardDestinationDefinition(destinationDefinition.getDestinationDefinitionId()).getDefaultVersionId(); - final UUID destinationDefaultVersionIdAfterUpgrade = - destinationServiceJooqImpl.getDestinationConnection(destination.getDestinationId()).getDefaultVersionId(); - - // The destination definition should always get the new version - assertEquals(newVersionId, destinationDefinitionDefaultVersionIdAfterUpgrade); - // The source actor's version should not get messed with - assertEquals(jooqTestDbSetupHelper.getInitialSourceDefaultVersionId(), - sourceServiceJooqImpl.getSourceConnection(source.getSourceId()).getDefaultVersionId()); - - if (actorIsInBreakingChangeScope) { - // Assert actor is held back - assertEquals(jooqTestDbSetupHelper.getInitialDestinationDefaultVersionId(), destinationDefaultVersionIdAfterUpgrade); - } else { - // Assert actor is upgraded to the new version - assertEquals(newVersionId, destinationDefaultVersionIdAfterUpgrade); - } - verifyNoMoreInteractions(featureFlagClient, connectionService); - } - -} diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java index 89fa4f2d622..1eadc9bbc63 100644 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java +++ b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java @@ -15,6 +15,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; import io.airbyte.config.Geography; +import io.airbyte.config.Organization; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; @@ -23,7 +24,10 @@ import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.featureflag.HeartbeatMaxSecondsBetweenMessages; import io.airbyte.featureflag.SourceDefinition; @@ -42,11 +46,15 @@ public class JooqTestDbSetupHelper extends BaseConfigDatabaseTest { private final SourceServiceJooqImpl sourceServiceJooqImpl; private final DestinationServiceJooqImpl destinationServiceJooqImpl; private final WorkspaceServiceJooqImpl workspaceServiceJooqImpl; + private final OrganizationServiceJooqImpl organizationServiceJooqImpl; private final TestClient featureFlagClient; + private final UUID ORGANIZATION_ID = UUID.randomUUID(); private final UUID WORKSPACE_ID = UUID.randomUUID(); private final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); private final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); @Getter + private Organization organization; + @Getter private StandardWorkspace workspace; @Getter private StandardSourceDefinition sourceDefinition; @@ -71,29 +79,40 @@ public JooqTestDbSetupHelper() { final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); final ConnectionService connectionService = mock(ConnectionService.class); + final ScopedConfigurationService scopedConfigurationService = mock(ScopedConfigurationService.class); when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))).thenReturn("3600"); + final ActorDefinitionService actorDefinitionService = new ActorDefinitionServiceJooqImpl(database); + final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = + new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); this.destinationServiceJooqImpl = new DestinationServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService); + connectionService, + actorDefinitionVersionUpdater); this.sourceServiceJooqImpl = new SourceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, - connectionService); + connectionService, + actorDefinitionVersionUpdater); this.workspaceServiceJooqImpl = new WorkspaceServiceJooqImpl(database, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService); + this.organizationServiceJooqImpl = new OrganizationServiceJooqImpl(database); } public void setupForVersionUpgradeTest() throws IOException, JsonValidationException, ConfigNotFoundException { + // Create org + organization = createBaseOrganization(); + organizationServiceJooqImpl.writeOrganization(organization); + // Create workspace workspace = createBaseWorkspace(); workspaceServiceJooqImpl.writeStandardWorkspaceNoSecrets(createBaseWorkspace()); @@ -172,9 +191,19 @@ private SourceConnection createBaseSourceActor() { .withName("source"); } + private Organization createBaseOrganization() { + return new Organization() + .withOrganizationId(ORGANIZATION_ID) + .withName("organization") + .withEmail("org@airbyte.io") + .withPba(false) + .withOrgLevelBilling(false); + } + private StandardWorkspace createBaseWorkspace() { return new StandardWorkspace() .withWorkspaceId(WORKSPACE_ID) + .withOrganizationId(ORGANIZATION_ID) .withName("default") .withSlug("workspace-slug") .withInitialSetupComplete(false) diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImplTest.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImplTest.java deleted file mode 100644 index 306462d2973..00000000000 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImplTest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.data.services.impls.jooq; - -import static io.airbyte.featureflag.ContextKt.ANONYMOUS; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.ActorDefinitionBreakingChange; -import io.airbyte.config.ActorDefinitionVersion; -import io.airbyte.config.BreakingChangeScope; -import io.airbyte.config.BreakingChangeScope.ScopeType; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.config.persistence.MockData; -import io.airbyte.config.secrets.SecretsRepositoryReader; -import io.airbyte.config.secrets.SecretsRepositoryWriter; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.data.services.ConnectionService; -import io.airbyte.data.services.SecretPersistenceConfigService; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.HeartbeatMaxSecondsBetweenMessages; -import io.airbyte.featureflag.SourceDefinition; -import io.airbyte.featureflag.TestClient; -import io.airbyte.featureflag.UseBreakingChangeScopes; -import io.airbyte.featureflag.Workspace; -import io.airbyte.test.utils.BaseConfigDatabaseTest; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; - -public class SourceServiceJooqImplTest extends BaseConfigDatabaseTest { - - private static final String UPGRADE_IMAGE_TAG = "0.0.2"; - private SourceServiceJooqImpl sourceServiceJooqImpl; - private FeatureFlagClient featureFlagClient; - private DestinationServiceJooqImpl destinationServiceJooqImpl; - private ConnectionService connectionService; - - @BeforeEach - void setup() { - this.featureFlagClient = mock(TestClient.class); - when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))).thenReturn("3600"); - - final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); - final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); - final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - connectionService = mock(ConnectionService.class); - this.sourceServiceJooqImpl = new SourceServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretsRepositoryWriter, - secretPersistenceConfigService, connectionService); - // We don't actually need this service in the test, we just use it for extra validating. - // I'd take it out to keep it 'clean', but really this should be happening in a service - // That handles both destinations and sources. They're in the same table that we modify, - // So it's safer to have this to ensure we're modifying only the source as expected. - this.destinationServiceJooqImpl = new DestinationServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretsRepositoryWriter, - secretPersistenceConfigService, connectionService); - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(true); - when(featureFlagClient.stringVariation(eq(HeartbeatMaxSecondsBetweenMessages.INSTANCE), any(SourceDefinition.class))).thenReturn("3600"); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testScopedImpactAffectsBreakingChangeImpact(final boolean actorIsInBreakingChangeScope) - throws IOException, JsonValidationException, ConfigNotFoundException { - when(featureFlagClient.boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS))).thenReturn(true); - - // Setup and get setup info - final JooqTestDbSetupHelper jooqTestDbSetupHelper = new JooqTestDbSetupHelper(); - jooqTestDbSetupHelper.setupForVersionUpgradeTest(); - final DestinationConnection destination = jooqTestDbSetupHelper.getDestination(); - final SourceConnection source = jooqTestDbSetupHelper.getSource(); - final StandardSourceDefinition sourceDefinition = jooqTestDbSetupHelper.getSourceDefinition(); - - // Create a new version of the source, with a stream-scoped breaking change - final ActorDefinitionBreakingChange streamScopedBreakingChange = - MockData.actorDefinitionBreakingChange(UPGRADE_IMAGE_TAG).withActorDefinitionId(sourceDefinition.getSourceDefinitionId()) - .withScopedImpact(List.of(new BreakingChangeScope().withScopeType(ScopeType.STREAM).withImpactedScopes(List.of("affected_stream")))); - final UUID newVersionId = UUID.randomUUID(); - final ActorDefinitionVersion newSourceVersion = Jsons.clone(jooqTestDbSetupHelper.getSourceDefinitionVersion()) - .withVersionId(newVersionId).withDockerImageTag(UPGRADE_IMAGE_TAG); - - // Write new version - // TODO: after uncoupling the transaction, this test will move to ApplyDefinitionsHelper. - // When we do that we can mock `actorIsInBreakingChangeScope` instead of the further down - // actorSyncsAnyListedStream - when(connectionService.actorSyncsAnyListedStream(source.getSourceId(), List.of("affected_stream"))).thenReturn(actorIsInBreakingChangeScope); - - sourceServiceJooqImpl.writeConnectorMetadata(sourceDefinition, newSourceVersion, List.of(streamScopedBreakingChange)); - verify(featureFlagClient).boolVariation(UseBreakingChangeScopes.INSTANCE, new Workspace(ANONYMOUS)); - verify(connectionService).actorSyncsAnyListedStream(source.getSourceId(), List.of("affected_stream")); - - // Get the source definition and actor versions after the upgrade - final UUID sourceDefinitionDefaultVersionIdAfterUpgrade = - sourceServiceJooqImpl.getStandardSourceDefinition(sourceDefinition.getSourceDefinitionId()).getDefaultVersionId(); - final UUID sourceDefaultVersionIdAfterUpgrade = - sourceServiceJooqImpl.getSourceConnection(source.getSourceId()).getDefaultVersionId(); - - // The source definition should always get the new version - assertEquals(newVersionId, sourceDefinitionDefaultVersionIdAfterUpgrade); - // The destination actor's version should not get messed with - assertEquals(jooqTestDbSetupHelper.getInitialDestinationDefaultVersionId(), - destinationServiceJooqImpl.getDestinationConnection(destination.getDestinationId()).getDefaultVersionId()); - - if (actorIsInBreakingChangeScope) { - // Assert actor is held back - assertEquals(jooqTestDbSetupHelper.getInitialSourceDefaultVersionId(), sourceDefaultVersionIdAfterUpgrade); - } else { - // Assert actor is upgraded to the new version - assertEquals(newVersionId, sourceDefaultVersionIdAfterUpgrade); - } - verifyNoMoreInteractions(connectionService); - } - -} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt new file mode 100644 index 00000000000..3021f5c99b4 --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt @@ -0,0 +1,753 @@ +package io.airbyte.data.helpers + +import io.airbyte.commons.version.Version +import io.airbyte.config.ActorDefinitionBreakingChange +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.ActorType +import io.airbyte.config.BreakingChangeScope +import io.airbyte.config.ConfigOriginType +import io.airbyte.config.ConfigResourceType +import io.airbyte.config.ConfigScopeType +import io.airbyte.config.ScopedConfiguration +import io.airbyte.config.persistence.MockData +import io.airbyte.data.services.ActorDefinitionService +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.ScopedConfigurationService +import io.airbyte.data.services.shared.ActorWorkspaceOrganizationIds +import io.airbyte.data.services.shared.ConfigScopeMapWithId +import io.airbyte.data.services.shared.ConnectorVersionKey +import io.airbyte.featureflag.ANONYMOUS +import io.airbyte.featureflag.TestClient +import io.airbyte.featureflag.UseBreakingChangeScopes +import io.airbyte.featureflag.Workspace +import io.mockk.clearAllMocks +import io.mockk.every +import io.mockk.mockk +import io.mockk.slot +import io.mockk.verify +import io.mockk.verifyAll +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import org.junit.jupiter.params.provider.ValueSource +import java.util.Optional +import java.util.UUID +import java.util.stream.Stream + +internal class ActorDefinitionVersionUpdaterTest { + private val connectionService = mockk() + private val actorDefinitionService = mockk(relaxed = true) + private val scopedConfigurationService = mockk(relaxed = true) + private val featureFlagClient = mockk() + private val actorDefinitionVersionUpdater = + ActorDefinitionVersionUpdater( + featureFlagClient, + connectionService, + actorDefinitionService, + scopedConfigurationService, + ) + + companion object { + val ACTOR_DEFINITION_ID: UUID = UUID.randomUUID() + + val DEFAULT_VERSION: ActorDefinitionVersion = + ActorDefinitionVersion() + .withVersionId(UUID.randomUUID()) + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withDockerImageTag("1.0.0") + + val NEW_VERSION: ActorDefinitionVersion = + ActorDefinitionVersion() + .withVersionId(UUID.randomUUID()) + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withDockerImageTag("2.0.0") + + val STREAM_SCOPED_BREAKING_CHANGE: ActorDefinitionBreakingChange = + MockData.actorDefinitionBreakingChange(NEW_VERSION.dockerImageTag) + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withScopedImpact( + listOf( + BreakingChangeScope().withScopeType(BreakingChangeScope.ScopeType.STREAM).withImpactedScopes(listOf("affected_stream")), + ), + ) + + @JvmStatic + fun getBreakingChangesForUpgradeMethodSource(): Stream { + return Stream.of( + // Version increases + Arguments.of("0.0.1", "2.0.0", listOf("1.0.0", "2.0.0")), + Arguments.of("1.0.0", "1.0.1", listOf()), + Arguments.of("1.0.0", "1.1.0", listOf()), + Arguments.of("1.0.1", "1.1.0", listOf()), + Arguments.of("1.0.0", "2.0.1", listOf("2.0.0")), + Arguments.of("1.0.1", "2.0.0", listOf("2.0.0")), + Arguments.of("1.0.0", "2.0.1", listOf("2.0.0")), + Arguments.of("1.0.1", "2.0.1", listOf("2.0.0")), + // Version decreases - should never have breaking changes + Arguments.of("2.0.0", "2.0.0", listOf()), + Arguments.of("2.0.0", "0.0.1", listOf()), + Arguments.of("1.0.1", "1.0.0", listOf()), + Arguments.of("1.1.0", "1.0.0", listOf()), + Arguments.of("1.1.0", "1.0.1", listOf()), + Arguments.of("2.0.0", "1.0.0", listOf()), + Arguments.of("2.0.0", "1.0.1", listOf()), + Arguments.of("2.0.1", "1.0.0", listOf()), + Arguments.of("2.0.1", "1.0.1", listOf()), + Arguments.of("2.0.0", "2.0.0", listOf()), + ) + } + + @JvmStatic + fun getBreakingChangesAfterVersionMethodSource(): List { + return listOf( + Arguments.of("0.1.0", listOf("1.0.0", "2.0.0", "3.0.0")), + Arguments { arrayOf("1.0.0", listOf("2.0.0", "3.0.0")) }, + Arguments { arrayOf("2.0.0", listOf("3.0.0")) }, + Arguments { arrayOf("3.0.0", listOf()) }, + Arguments { arrayOf("4.0.0", listOf()) }, + ) + } + } + + @BeforeEach + fun reset() { + clearAllMocks() + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testScopedImpactAffectsBreakingChangeImpact(actorIsInBreakingChangeScope: Boolean) { + every { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + } returns true + + every { + actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) + } returns Optional.of(DEFAULT_VERSION) + + val actorId = UUID.randomUUID() + val workspaceId = UUID.randomUUID() + val organizationId = UUID.randomUUID() + + every { + actorDefinitionService.getActorsWithDefaultVersionId(DEFAULT_VERSION.versionId) + } returns setOf(actorId) + + every { + actorDefinitionService.getActorIdsForDefinition(ACTOR_DEFINITION_ID) + } returns listOf(ActorWorkspaceOrganizationIds(actorId, workspaceId, organizationId)) + + every { + connectionService.actorSyncsAnyListedStream(actorId, listOf("affected_stream")) + } returns actorIsInBreakingChangeScope + + val configsToWriteSlot = slot>() + every { + scopedConfigurationService.insertScopedConfigurations(capture(configsToWriteSlot)) + } returns listOf() + + actorDefinitionVersionUpdater.updateDefaultVersion( + ACTOR_DEFINITION_ID, + NEW_VERSION, + listOf(STREAM_SCOPED_BREAKING_CHANGE), + ) + + verifyAll { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) + actorDefinitionService.getActorsWithDefaultVersionId(DEFAULT_VERSION.versionId) + actorDefinitionService.getActorIdsForDefinition(ACTOR_DEFINITION_ID) + connectionService.actorSyncsAnyListedStream(actorId, listOf("affected_stream")) + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + listOf( + ConfigScopeMapWithId( + actorId, + mapOf( + ConfigScopeType.ACTOR to actorId, + ConfigScopeType.WORKSPACE to workspaceId, + ConfigScopeType.ORGANIZATION to organizationId, + ), + ), + ), + ) + + // Destination definition should always get the new version + actorDefinitionService.updateActorDefinitionDefaultVersionId(ACTOR_DEFINITION_ID, NEW_VERSION.versionId) + + if (actorIsInBreakingChangeScope) { + // Assert actor is not updated + actorDefinitionService.setActorDefaultVersions(listOf(), NEW_VERSION.versionId) + + // Assert pins are created + scopedConfigurationService.insertScopedConfigurations(any()) + assertEquals(1, configsToWriteSlot.captured.size) + + val capturedConfig = configsToWriteSlot.captured[0] + assertEquals( + ScopedConfiguration() + .withKey(ConnectorVersionKey.key) + .withValue(DEFAULT_VERSION.versionId.toString()) + .withResourceType(ConfigResourceType.ACTOR_DEFINITION) + .withResourceId(ACTOR_DEFINITION_ID) + .withScopeType(ConfigScopeType.ACTOR) + .withScopeId(actorId) + .withOriginType(ConfigOriginType.BREAKING_CHANGE) + .withOrigin(STREAM_SCOPED_BREAKING_CHANGE.version.serialize()), + capturedConfig.withId(null), + ) + } else { + // Assert actor is upgraded to the new version + actorDefinitionService.setActorDefaultVersions(listOf(actorId), NEW_VERSION.versionId) + } + } + + if (!actorIsInBreakingChangeScope) { + verify(exactly = 0) { + scopedConfigurationService.insertScopedConfigurations(any()) + } + } + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testGetActorsForNonBreakingUpgrade(useBreakingChangeScopes: Boolean) { + every { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + } returns useBreakingChangeScopes + + val actorIdOnInitialVersion = UUID.randomUUID() + every { + actorDefinitionService.getActorsWithDefaultVersionId(DEFAULT_VERSION.versionId) + } returns setOf(actorIdOnInitialVersion) + + val actorsToUpgrade = actorDefinitionVersionUpdater.getActorsToUpgrade(DEFAULT_VERSION, listOf()) + + // All actors should get upgraded + assertEquals(setOf(actorIdOnInitialVersion), actorsToUpgrade) + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testGetActorsForBreakingUpgrade(useBreakingChangeScopes: Boolean) { + every { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + } returns useBreakingChangeScopes + + // Set up an actor that syncs an affected stream + val actorNotSyncingAffectedStream = UUID.randomUUID() + val actorSyncingAffectedStream = UUID.randomUUID() + + every { + actorDefinitionService.getActorsWithDefaultVersionId(DEFAULT_VERSION.versionId) + } returns setOf(actorSyncingAffectedStream, actorNotSyncingAffectedStream) + + every { + connectionService.actorSyncsAnyListedStream(actorSyncingAffectedStream, listOf("affected_stream")) + } returns true + + every { + connectionService.actorSyncsAnyListedStream(actorNotSyncingAffectedStream, listOf("affected_stream")) + } returns false + + val actorsToUpgrade = + actorDefinitionVersionUpdater.getActorsToUpgrade( + DEFAULT_VERSION, + listOf(STREAM_SCOPED_BREAKING_CHANGE), + ) + + if (useBreakingChangeScopes) { + // Unaffected actors will be upgraded + assertEquals(setOf(actorNotSyncingAffectedStream), actorsToUpgrade) + } else { + // No actors will be upgraded + assertEquals(setOf(), actorsToUpgrade) + } + + verifyAll { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + actorDefinitionService.getActorsWithDefaultVersionId(DEFAULT_VERSION.versionId) + if (useBreakingChangeScopes) { + connectionService.actorSyncsAnyListedStream(actorSyncingAffectedStream, listOf("affected_stream")) + connectionService.actorSyncsAnyListedStream(actorNotSyncingAffectedStream, listOf("affected_stream")) + } + } + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + fun testGetActorsAffectedByBreakingChange(useBreakingChangeScopes: Boolean) { + every { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + } returns useBreakingChangeScopes + + val actorSyncingAffectedStreamId = UUID.randomUUID() + val actorNotSyncingAffectedStreamId = UUID.randomUUID() + + every { + connectionService.actorSyncsAnyListedStream(actorNotSyncingAffectedStreamId, listOf("affected_stream")) + } returns false + + every { + connectionService.actorSyncsAnyListedStream(actorSyncingAffectedStreamId, listOf("affected_stream")) + } returns true + + val actorsAffectedByBreakingChange = + actorDefinitionVersionUpdater.getActorsAffectedByBreakingChange( + setOf(actorSyncingAffectedStreamId, actorNotSyncingAffectedStreamId), + STREAM_SCOPED_BREAKING_CHANGE, + ) + + if (useBreakingChangeScopes) { + // Affected actors depend on scopes + assertEquals(setOf(actorSyncingAffectedStreamId), actorsAffectedByBreakingChange) + } else { + // All actors are affected by breaking change + assertEquals(setOf(actorSyncingAffectedStreamId, actorNotSyncingAffectedStreamId), actorsAffectedByBreakingChange) + } + } + + @ParameterizedTest + @MethodSource("getBreakingChangesForUpgradeMethodSource") + fun testGetBreakingChangesForUpgradeWithActorDefBreakingChanges( + initialImageTag: String, + upgradeImageTag: String, + expectedBreakingChangeVersions: List, + ) { + val expectedBreakingChangeVersionsForUpgrade = expectedBreakingChangeVersions.stream().map { version: String -> Version(version) }.toList() + val breakingChangesForDef = + listOf( + ActorDefinitionBreakingChange() + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withVersion(Version("1.0.0")) + .withMessage("Breaking change 1") + .withUpgradeDeadline("2021-01-01") + .withMigrationDocumentationUrl("https://docs.airbyte.io/migration-guides/1.0.0"), + ActorDefinitionBreakingChange() + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withVersion(Version("2.0.0")) + .withMessage("Breaking change 2") + .withUpgradeDeadline("2020-08-09") + .withMigrationDocumentationUrl("https://docs.airbyte.io/migration-guides/2.0.0"), + ) + val breakingChangesForUpgrade = + actorDefinitionVersionUpdater.getBreakingChangesForUpgrade(initialImageTag, upgradeImageTag, breakingChangesForDef) + val actualBreakingChangeVersionsForUpgrade = + breakingChangesForUpgrade.stream().map { obj: ActorDefinitionBreakingChange -> obj.version } + .toList() + assertEquals(expectedBreakingChangeVersionsForUpgrade.size, actualBreakingChangeVersionsForUpgrade.size) + assertTrue(actualBreakingChangeVersionsForUpgrade.containsAll(expectedBreakingChangeVersionsForUpgrade)) + } + + @ParameterizedTest + @MethodSource("getBreakingChangesForUpgradeMethodSource") + fun testGetBreakingChangesForUpgradeWithNoActorDefinitionBreakingChanges( + initialImageTag: String, + upgradeImageTag: String, + expectedBreakingChangeVersions: List, + ) { + val breakingChangesForDef = listOf() + assertTrue(actorDefinitionVersionUpdater.getBreakingChangesForUpgrade(initialImageTag, upgradeImageTag, breakingChangesForDef).isEmpty()) + } + + @Test + fun testUpgradeActorVersionWithBCPin() { + val actorId = UUID.randomUUID() + val actorDefinitionId = UUID.randomUUID() + val newVersionId = UUID.randomUUID() + + val breakingChangePinConfig = + ScopedConfiguration() + .withId(UUID.randomUUID()) + .withOriginType(ConfigOriginType.BREAKING_CHANGE) + + every { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + } returns Optional.of(breakingChangePinConfig) + + actorDefinitionVersionUpdater.upgradeActorVersion(actorId, actorDefinitionId, newVersionId, ActorType.SOURCE) + + verifyAll { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + + scopedConfigurationService.deleteScopedConfiguration(breakingChangePinConfig.id) + actorDefinitionService.setActorDefaultVersion(actorId, newVersionId) + } + } + + @Test + fun testUpgradeActorVersionWithManualPinThrowsError() { + val actorId = UUID.randomUUID() + val actorDefinitionId = UUID.randomUUID() + val newVersionId = UUID.randomUUID() + + val manualPinConfig = + ScopedConfiguration() + .withId(UUID.randomUUID()) + .withOriginType(ConfigOriginType.USER) + + every { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + } returns Optional.of(manualPinConfig) + + assertThrows { + actorDefinitionVersionUpdater.upgradeActorVersion(actorId, actorDefinitionId, newVersionId, ActorType.SOURCE) + } + + verifyAll { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + } + } + + @Test + fun testUpgradeActorVersionWithNoPins() { + val actorId = UUID.randomUUID() + val actorDefinitionId = UUID.randomUUID() + val newVersionId = UUID.randomUUID() + + every { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + } returns Optional.empty() + + actorDefinitionVersionUpdater.upgradeActorVersion(actorId, actorDefinitionId, newVersionId, ActorType.SOURCE) + + verifyAll { + scopedConfigurationService.getScopedConfiguration( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigScopeType.ACTOR, + actorId, + ) + + actorDefinitionService.setActorDefaultVersion(actorId, newVersionId) + } + } + + @Test + fun testProcessBreakingChangesForUpgrade() { + every { + featureFlagClient.boolVariation(UseBreakingChangeScopes, Workspace(ANONYMOUS)) + } returns true + + val pinnedActorId = UUID.randomUUID() + val withImpactedStreamActorId = UUID.randomUUID() + val noImpactedStreamActorId = UUID.randomUUID() + val noImpactedStreamActorId2 = UUID.randomUUID() + + val actors = + listOf( + ActorWorkspaceOrganizationIds(pinnedActorId, UUID.randomUUID(), UUID.randomUUID()), + ActorWorkspaceOrganizationIds(withImpactedStreamActorId, UUID.randomUUID(), UUID.randomUUID()), + ActorWorkspaceOrganizationIds(noImpactedStreamActorId, UUID.randomUUID(), UUID.randomUUID()), + ActorWorkspaceOrganizationIds(noImpactedStreamActorId2, UUID.randomUUID(), UUID.randomUUID()), + ) + + val currentVersion = DEFAULT_VERSION + val limitedScopeBreakingChange = STREAM_SCOPED_BREAKING_CHANGE + val breakingChange = MockData.actorDefinitionBreakingChange("3.0.0") + val breakingChangesForUpgrade = listOf(limitedScopeBreakingChange, breakingChange) + + every { + actorDefinitionService.getActorIdsForDefinition(ACTOR_DEFINITION_ID) + } returns actors + + val scopeMaps = actors.map { idsToConfigScopeMap(it) } + + // Setup: as we process the breaking changes, the pinned actors returned will include actors pinned due to the prior breaking change + every { + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + scopeMaps, + ) + } returnsMany + listOf( + mapOf( + pinnedActorId to ScopedConfiguration(), + ), + mapOf( + pinnedActorId to ScopedConfiguration(), + withImpactedStreamActorId to ScopedConfiguration(), + ), + mapOf( + pinnedActorId to ScopedConfiguration(), + withImpactedStreamActorId to ScopedConfiguration(), + noImpactedStreamActorId to ScopedConfiguration(), + noImpactedStreamActorId2 to ScopedConfiguration(), + ), + ) + + // Setup: For the limited-impact breaking change, only mock that the targeted actor is syncing the affected stream + every { + connectionService.actorSyncsAnyListedStream(any(), any()) + } returns false + + every { + connectionService.actorSyncsAnyListedStream(withImpactedStreamActorId, listOf("affected_stream")) + } returns true + + // Collect written configs to perform assertions + val capturedConfigsToWrite = mutableListOf>() + every { + scopedConfigurationService.insertScopedConfigurations(capture(capturedConfigsToWrite)) + } returns listOf() + + // Act: call method under test + actorDefinitionVersionUpdater.processBreakingChangesForUpgrade( + currentVersion, + breakingChangesForUpgrade, + ) + + verify { + actorDefinitionService.getActorIdsForDefinition(ACTOR_DEFINITION_ID) + connectionService.actorSyncsAnyListedStream(noImpactedStreamActorId, listOf("affected_stream")) + } + + // Assert: we get pinned actors and insert new pins for each processed breaking change (2) + verify(exactly = 2) { + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + scopeMaps, + ) + scopedConfigurationService.insertScopedConfigurations(any()) + } + + assertEquals(2, capturedConfigsToWrite.size) + + // Assert: limited-impact breaking change should pin the actor with the affected stream + val configsForScopedBC = capturedConfigsToWrite[0] + assertEquals(1, configsForScopedBC.size) + val expectedConfig1 = buildBreakingChangeScopedConfig(withImpactedStreamActorId, limitedScopeBreakingChange) + assertEquals(expectedConfig1, configsForScopedBC[0].withId(null)) + + // Assert: breaking change should pin all remaining unpinned actors + val configsForGlobalBC = capturedConfigsToWrite[1].sortedBy { it.scopeId } + assertEquals(2, configsForGlobalBC.size) + val sortedExpectedIds = listOf(noImpactedStreamActorId, noImpactedStreamActorId2).sorted() + val expectedConfig2 = buildBreakingChangeScopedConfig(sortedExpectedIds[0], breakingChange) + assertEquals(expectedConfig2, configsForGlobalBC[0].withId(null)) + + val expectedConfig3 = buildBreakingChangeScopedConfig(sortedExpectedIds[1], breakingChange) + assertEquals(expectedConfig3, configsForGlobalBC[1].withId(null)) + } + + @Test + fun testGetUpgradeCandidates() { + val pinnedActorId = UUID.randomUUID() + val pinnedActorId2 = UUID.randomUUID() + val unpinnedActorId = UUID.randomUUID() + val unpinnedActorId2 = UUID.randomUUID() + val configScopeMaps = + listOf( + ConfigScopeMapWithId(pinnedActorId, mapOf()), + ConfigScopeMapWithId(pinnedActorId2, mapOf()), + ConfigScopeMapWithId(unpinnedActorId, mapOf()), + ConfigScopeMapWithId(unpinnedActorId2, mapOf()), + ) + + every { + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + configScopeMaps, + ) + } returns + mapOf( + pinnedActorId to ScopedConfiguration(), + pinnedActorId2 to ScopedConfiguration(), + ) + + val upgradeCandidates = actorDefinitionVersionUpdater.getUpgradeCandidates(ACTOR_DEFINITION_ID, configScopeMaps) + + assertEquals(2, upgradeCandidates.size) + assertEquals(setOf(unpinnedActorId, unpinnedActorId2), upgradeCandidates) + + verifyAll { + scopedConfigurationService.getScopedConfigurations( + ConnectorVersionKey, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + configScopeMaps, + ) + } + } + + @Test + fun testCreateBreakingChangePinsForActors() { + val actorIds = setOf(UUID.randomUUID(), UUID.randomUUID()) + + val scopedConfigsCapture = slot>() + + every { + scopedConfigurationService.insertScopedConfigurations(capture(scopedConfigsCapture)) + } returns listOf() + + actorDefinitionVersionUpdater.createBreakingChangePinsForActors(actorIds, DEFAULT_VERSION, STREAM_SCOPED_BREAKING_CHANGE) + + verify(exactly = 1) { + scopedConfigurationService.insertScopedConfigurations(any()) + } + + assertEquals(2, scopedConfigsCapture.captured.size) + for (actorId in actorIds) { + val capturedConfig = scopedConfigsCapture.captured.find { it.scopeId == actorId } + assertNotNull(capturedConfig) + assertNotNull(capturedConfig!!.id) + + val expectedConfig = buildBreakingChangeScopedConfig(actorId, STREAM_SCOPED_BREAKING_CHANGE) + assertEquals(expectedConfig, capturedConfig.withId(null)) + } + } + + @ParameterizedTest + @MethodSource("getBreakingChangesAfterVersionMethodSource") + fun testGetBreakingChangesAfterVersion( + versionTag: String, + expectedBreakingChanges: List, + ) { + val breakingChanges = + listOf( + MockData.actorDefinitionBreakingChange("1.0.0"), + MockData.actorDefinitionBreakingChange("2.0.0"), + MockData.actorDefinitionBreakingChange("3.0.0"), + ) + + val actualBreakingChanges = + actorDefinitionVersionUpdater.getBreakingChangesAfterVersion( + versionTag, + breakingChanges, + ).map { it.version.serialize() }.toList() + + assertEquals(expectedBreakingChanges, actualBreakingChanges) + } + + @Test + fun testGetBreakingChangesAfterVersionWithNoBreakingChanges() { + val actualBreakingChanges = + actorDefinitionVersionUpdater.getBreakingChangesAfterVersion( + "1.0.0", + listOf(), + ) + + assertEquals(listOf(), actualBreakingChanges) + } + + @Test + fun testProcessBreakingChangePinRollbacks() { + val oldBC = MockData.actorDefinitionBreakingChange("1.0.0") + val currentVersionBC = MockData.actorDefinitionBreakingChange("2.0.0") + val rolledBackBC = MockData.actorDefinitionBreakingChange("3.0.0") + + val allBreakingChanges = listOf(oldBC, currentVersionBC, rolledBackBC) + val idsPinnedForV3 = listOf(UUID.randomUUID(), UUID.randomUUID()) + + every { + scopedConfigurationService.listScopedConfigurationsWithOrigins( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + ConfigOriginType.BREAKING_CHANGE, + listOf(rolledBackBC.version.serialize()), + ) + } returns idsPinnedForV3.map { buildBreakingChangeScopedConfig(it, rolledBackBC).withId(it) } + + actorDefinitionVersionUpdater.processBreakingChangePinRollbacks(ACTOR_DEFINITION_ID, NEW_VERSION, allBreakingChanges) + + verifyAll { + scopedConfigurationService.listScopedConfigurationsWithOrigins( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + ACTOR_DEFINITION_ID, + ConfigOriginType.BREAKING_CHANGE, + listOf(rolledBackBC.version.serialize()), + ) + + scopedConfigurationService.deleteScopedConfigurations(idsPinnedForV3) + } + } + + @Test + fun testProcessBreakingChangePinRollbacksWithNoBCsToRollBack() { + val breakingChanges = + listOf( + MockData.actorDefinitionBreakingChange("1.0.0"), + MockData.actorDefinitionBreakingChange("2.0.0"), + ) + + actorDefinitionVersionUpdater.processBreakingChangePinRollbacks(ACTOR_DEFINITION_ID, NEW_VERSION, breakingChanges) + + verify(exactly = 0) { + scopedConfigurationService.listScopedConfigurationsWithOrigins(any(), any(), any(), any(), any()) + scopedConfigurationService.deleteScopedConfigurations(any()) + } + } + + private fun buildBreakingChangeScopedConfig( + actorId: UUID, + breakingChange: ActorDefinitionBreakingChange, + ): ScopedConfiguration { + return ScopedConfiguration() + .withKey(ConnectorVersionKey.key) + .withValue(DEFAULT_VERSION.versionId.toString()) + .withResourceType(ConfigResourceType.ACTOR_DEFINITION) + .withResourceId(ACTOR_DEFINITION_ID) + .withScopeType(ConfigScopeType.ACTOR) + .withScopeId(actorId) + .withOriginType(ConfigOriginType.BREAKING_CHANGE) + .withOrigin(breakingChange.version.serialize()) + } + + private fun idsToConfigScopeMap(awoIds: ActorWorkspaceOrganizationIds): ConfigScopeMapWithId { + return ConfigScopeMapWithId( + awoIds.actorId, + mapOf( + ConfigScopeType.ACTOR to awoIds.actorId, + ConfigScopeType.WORKSPACE to awoIds.workspaceId, + ConfigScopeType.ORGANIZATION to awoIds.organizationId, + ), + ) + } +} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt index 7c2c1f043f4..4654789a0d5 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/AbstractConfigRepositoryTest.kt @@ -39,6 +39,10 @@ abstract class AbstractConfigRepositoryTest>( fun setupBase() { container.start() + // occasionally, the container is not yet accepting connections even though start() has returned. + // this createConnection() call will block until the container is ready to accept connections. + container.createConnection("").use { } + // set the micronaut datasource properties to match our container we started up context = ApplicationContext.run( diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt new file mode 100644 index 00000000000..0706e33ef3d --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt @@ -0,0 +1,41 @@ +package io.airbyte.data.repositories + +import io.airbyte.data.repositories.entities.ConnectionTimelineEvent +import io.airbyte.db.instance.configs.jooq.generated.Keys +import io.airbyte.db.instance.configs.jooq.generated.Tables +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest +internal class ConnectionTimelineEventRepositoryTest : AbstractConfigRepositoryTest( + ConnectionTimelineEventRepository::class, +) { + companion object { + @BeforeAll + @JvmStatic + fun setup() { + // so we don't have to deal with making users as well + jooqDslContext.alterTable( + Tables.CONNECTION_TIMELINE_EVENT, + ).dropForeignKey(Keys.CONNECTION_TIMELINE_EVENT__CONNECTION_TIMELINE_EVENT_CONNECTION_ID_FKEY.constraint()).execute() + } + } + + @Test + fun `test db insertion`() { + val eventId = java.util.UUID.randomUUID() + val event = + ConnectionTimelineEvent( + connectionId = UUID.randomUUID(), + eventType = "Test", + ) + + val saved = repository.save(event) + assert(repository.count() == 1L) + + val persistedEvent = repository.findById(saved.id!!).get() + assert(persistedEvent.connectionId == event.connectionId) + } +} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt index 3fbe72171c0..31c5e9d0375 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/PermissionRepositoryTest.kt @@ -9,6 +9,7 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertNotNull import org.junit.jupiter.api.Assertions.assertNull import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import java.util.UUID @@ -18,12 +19,18 @@ internal class PermissionRepositoryTest : AbstractConfigRepositoryTest(UserInvitationRepository::class) { companion object { const val INVITE_CODE = "some-code" + val EXPIRES_AT = OffsetDateTime.now(ZoneOffset.UTC).plusDays(7).truncatedTo(java.time.temporal.ChronoUnit.SECONDS) + + val userInvitation = + UserInvitation( + inviteCode = INVITE_CODE, + inviterUserId = UUID.randomUUID(), + invitedEmail = "invited@airbyte.io", + scopeId = UUID.randomUUID(), + scopeType = ScopeType.workspace, + permissionType = PermissionType.workspace_admin, + status = InvitationStatus.pending, + expiresAt = EXPIRES_AT, + ) @BeforeAll @JvmStatic @@ -49,40 +65,216 @@ internal class UserInvitationRepositoryTest : AbstractConfigRepositoryTest + val actual = actualWorkspaceInvites.find { it.id == expected.id } + assert(actual != null) + assertInvitationEquals(expected, actual!!) + } + + // for each organization invitation found, make sure that it has a match by calling assertInvitationEquals + expectedOrgMatches.forEach { expected -> + val actual = actualOrgInvites.find { it.id == expected.id } + assert(actual != null) + assertInvitationEquals(expected, actual!!) + } + } + + @Test + fun `test find by status and scope type and scope id and invited email`() { + val workspaceId = UUID.randomUUID() + val otherWorkspaceId = UUID.randomUUID() + val matchingStatus = InvitationStatus.pending + val otherStatus = InvitationStatus.accepted + val matchingEmail = "matching@airbyte.io" + val otherEmail = "other@airbyte.io" + + val matchingInvite = + userInvitation.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + scopeId = workspaceId, + status = matchingStatus, + invitedEmail = matchingEmail, + ) + repository.save(matchingInvite) + + val anotherMatchingInvite = + matchingInvite.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + ) + repository.save(anotherMatchingInvite) + + val wrongEmailInvite = + matchingInvite.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + invitedEmail = otherEmail, + ) + repository.save(wrongEmailInvite) + + val wrongWorkspaceInvite = + matchingInvite.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + scopeId = otherWorkspaceId, + ) + repository.save(wrongWorkspaceInvite) + + val wrongStatusInvite = + matchingInvite.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + status = otherStatus, + ) + repository.save(wrongStatusInvite) + + val wrongEverythingInvite = + userInvitation.copy( + id = UUID.randomUUID(), + inviteCode = UUID.randomUUID().toString(), + invitedEmail = otherEmail, + scopeId = otherWorkspaceId, + status = otherStatus, + ) + repository.save(wrongEverythingInvite) + + val expectedMatches = listOf(matchingInvite, anotherMatchingInvite) + val actualMatches = + repository.findByStatusAndScopeTypeAndScopeIdAndInvitedEmail( + matchingStatus, + EntityScopeType.workspace, + workspaceId, + matchingEmail, + ) + + // for each invitation found, make sure that it has a match by calling assertInvitationEquals + expectedMatches.forEach { expected -> + val actual = actualMatches.find { it.id == expected.id } + assert(actual != null) + assertInvitationEquals(expected, actual!!) + } + } } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImplTest.kt new file mode 100644 index 00000000000..20d304ef3e5 --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImplTest.kt @@ -0,0 +1,595 @@ +package io.airbyte.data.services.impls.data + +import io.airbyte.config.Permission +import io.airbyte.config.Permission.PermissionType +import io.airbyte.data.repositories.PermissionRepository +import io.airbyte.data.services.PermissionRedundantException +import io.airbyte.data.services.RemoveLastOrgAdminPermissionException +import io.airbyte.data.services.WorkspaceService +import io.airbyte.data.services.impls.data.mappers.toEntity +import io.mockk.Runs +import io.mockk.confirmVerified +import io.mockk.every +import io.mockk.just +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.Optional +import java.util.UUID + +class PermissionServiceDataImplTest { + private val testUserId = UUID.randomUUID() + + private lateinit var workspaceService: WorkspaceService + private lateinit var permissionRepository: PermissionRepository + private lateinit var permissionService: PermissionServiceDataImpl + + @BeforeEach + fun setUp() { + workspaceService = mockk() + permissionRepository = mockk() + permissionService = PermissionServiceDataImpl(workspaceService, permissionRepository) + } + + @Nested + inner class GetPermissionsForUser { + @Test + fun `getPermissionsForUser should fetch from repository`() { + val permissions = + listOf( + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN + }, + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN + }, + ) + + every { permissionRepository.findByUserId(testUserId) } returns permissions.map { it.toEntity() } + + val result = permissionService.getPermissionsForUser(testUserId) + + assertEquals(result.toSet(), permissions.toSet()) + + verify { permissionRepository.findByUserId(testUserId) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class DeletePermission { + @Test + fun `deletePermission should delete from repository if not the last org-admin`() { + val permId = UUID.randomUUID() + val orgId = UUID.randomUUID() + + val permissionToDelete = + Permission().apply { + permissionId = permId + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + + every { permissionRepository.findByIdIn(listOf(permId)) } returns listOf(permissionToDelete.toEntity()) + + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + permissionToDelete, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN // another org admin exists for a different user + }, + ).map { it.toEntity() } + + every { permissionRepository.deleteById(permId) } just Runs + + permissionService.deletePermission(permId) + + verify { permissionRepository.findByIdIn(listOf(permId)) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify { permissionRepository.deleteById(permId) } + confirmVerified(permissionRepository) + } + + @Test + fun `deletePermission should throw when deleting last org admin`() { + val permId = UUID.randomUUID() + val orgId = UUID.randomUUID() + + val permissionToDelete = + Permission().apply { + permissionId = permId + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + + every { permissionRepository.findByIdIn(listOf(permId)) } returns listOf(permissionToDelete.toEntity()) + + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + permissionToDelete, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_EDITOR // only other perm in the org is editor, so throw + }, + ).map { it.toEntity() } + + assertThrows { permissionService.deletePermission(permId) } + + verify { permissionRepository.findByIdIn(listOf(permId)) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 0) { permissionRepository.deleteById(any()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class DeletePermissions { + @Test + fun `deletePermissions should delete from repository when not deleting the last org admin`() { + val permId1 = UUID.randomUUID() + val permId2 = UUID.randomUUID() + val orgId = UUID.randomUUID() + + val permissionToDelete1 = + Permission().apply { + permissionId = permId1 + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + + val permissionToDelete2 = + Permission().apply { + permissionId = permId2 + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_EDITOR + } + + every { permissionRepository.findByIdIn(listOf(permId1, permId2)) } returns + listOf( + permissionToDelete1.toEntity(), + permissionToDelete2.toEntity(), + ) + + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + permissionToDelete1, + permissionToDelete2, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN // another org admin exists for a different user, so don't throw + }, + ).map { it.toEntity() } + + every { permissionRepository.deleteByIdIn(listOf(permId1, permId2)) } just Runs + + permissionService.deletePermissions(listOf(permId1, permId2)) + + verify { permissionRepository.findByIdIn(listOf(permId1, permId2)) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify { permissionRepository.deleteByIdIn(listOf(permId1, permId2)) } + confirmVerified(permissionRepository) + } + + @Test + fun `deletePermissions should throw when deleting the last org admin`() { + val permId1 = UUID.randomUUID() + val permId2 = UUID.randomUUID() + val orgId1 = UUID.randomUUID() + val orgId2 = UUID.randomUUID() + + val permissionToDelete1 = + Permission().apply { + permissionId = permId1 + userId = testUserId + organizationId = orgId1 + permissionType = PermissionType.ORGANIZATION_ADMIN // not the last admin in org 1 + } + + val permissionToDelete2 = + Permission().apply { + permissionId = permId2 + userId = testUserId + organizationId = orgId2 + permissionType = PermissionType.ORGANIZATION_ADMIN // is the last admin in org 2, should throw + } + + every { permissionRepository.findByIdIn(listOf(permId1, permId2)) } returns + listOf( + permissionToDelete1.toEntity(), + permissionToDelete2.toEntity(), + ) + + every { permissionRepository.findByOrganizationId(orgId1) } returns + listOf( + permissionToDelete1, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId1 + permissionType = PermissionType.ORGANIZATION_ADMIN // another admin exists in org 1, so this doesn't cause the throw + }, + ).map { it.toEntity() } + + every { permissionRepository.findByOrganizationId(orgId2) } returns + listOf( + permissionToDelete2, + Permission().apply { + permissionId = UUID.randomUUID() + userId = UUID.randomUUID() + organizationId = orgId2 + permissionType = PermissionType.ORGANIZATION_EDITOR // only other perm in org 2 is editor, so this causes a throw + }, + ).map { it.toEntity() } + + assertThrows { permissionService.deletePermissions(listOf(permId1, permId2)) } + + verify { permissionRepository.findByIdIn(listOf(permId1, permId2)) } + verify { permissionRepository.findByOrganizationId(orgId1) } + verify { permissionRepository.findByOrganizationId(orgId2) } + verify(exactly = 0) { permissionRepository.deleteByIdIn(any()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class CreatePermission { + @Test + fun `createPermission should save permission when no redundant permissions exist`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_EDITOR + } + val existingPermissionDifferentOrg = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN // different org than new permission + } + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // higher than existing org permission, not redundant + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + existingPermissionDifferentOrg.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(newPermission.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.save(newPermission.toEntity()) } returns newPermission.toEntity() + + val result = permissionService.createPermission(newPermission) + + assertEquals(result, newPermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.save(newPermission.toEntity()) } + confirmVerified(permissionRepository) + } + + @Test + fun `createPermission should throw when redundant permission is detected`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // equal to existing org permission, redundant + } + + // new permission is for a workspace that belongs to the existing permission's org + every { workspaceService.getOrganizationIdFromWorkspaceId(newPermission.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.findByUserId(testUserId) } returns listOf(existingOrgPermission.toEntity()) + + assertThrows { permissionService.createPermission(newPermission) } + + // nothing saved or deleted + verify(exactly = 0) { permissionRepository.save(any()) } + verify(exactly = 0) { permissionRepository.deleteById(any()) } + } + + @Test + fun `createPermission should work for instance admin permissions`() { + val newPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + permissionType = PermissionType.INSTANCE_ADMIN + } + + every { permissionRepository.findByUserId(testUserId) } returns emptyList() + every { permissionRepository.save(newPermission.toEntity()) } returns newPermission.toEntity() + + val result = permissionService.createPermission(newPermission) + + assertEquals(result, newPermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.save(newPermission.toEntity()) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class UpdatePermission { + @Nested + inner class UpdateWorkspacePermission { + @Test + fun `updatePermission should update workspace permission when not redundant`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_READER // lower than updated permission, so nothing redundant + } + val existingPermissionDifferentOrg = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_ADMIN // different org than new permission, so nothing redundant + } + val workspacePermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN + } + val updatedWorkspacePermission = + Permission().apply { + permissionId = workspacePermissionPreUpdate.permissionId + userId = workspacePermissionPreUpdate.userId + workspaceId = workspacePermissionPreUpdate.workspaceId + permissionType = PermissionType.WORKSPACE_EDITOR // update from admin to editor + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + existingPermissionDifferentOrg.toEntity(), + workspacePermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(workspacePermissionPreUpdate.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.update(updatedWorkspacePermission.toEntity()) } returns updatedWorkspacePermission.toEntity() + + permissionService.updatePermission(updatedWorkspacePermission) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.update(updatedWorkspacePermission.toEntity()) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should delete updated workspace permission when made redundant`() { + val existingOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_EDITOR // higher than updated permission, so update becomes redundant + } + val workspacePermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN + } + val workspacePermissionUpdated = + Permission().apply { + permissionId = workspacePermissionPreUpdate.permissionId + userId = workspacePermissionPreUpdate.userId + workspaceId = workspacePermissionPreUpdate.workspaceId + permissionType = PermissionType.WORKSPACE_READER // update from admin to reader, permission is now redundant + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOrgPermission.toEntity(), + workspacePermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(workspacePermissionPreUpdate.workspaceId) } returns + Optional.of( + existingOrgPermission.organizationId, + ) + every { permissionRepository.update(workspacePermissionUpdated.toEntity()) } returns workspacePermissionUpdated.toEntity() + every { permissionRepository.deleteById(workspacePermissionUpdated.permissionId) } just Runs + + permissionService.updatePermission(workspacePermissionUpdated) + + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 0) { permissionRepository.update(any()) } // no update because deleted instead + verify(exactly = 1) { permissionRepository.deleteById(workspacePermissionUpdated.permissionId) } + confirmVerified(permissionRepository) + } + } + + @Nested + inner class UpdateOrgPermission { + @Test + fun `updatePermission should delete any workspace permissions that are made redundant by updating an org permission`() { + val existingWorkspacePermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + workspaceId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_ADMIN // will be made redundant by updated org permission + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = UUID.randomUUID() + permissionType = PermissionType.ORGANIZATION_READER + } + val updatedOrgPermission = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = PermissionType.ORGANIZATION_ADMIN // update from org reader to admin + } + + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns Optional.of(orgPermissionPreUpdate.toEntity()) + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingWorkspacePermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { workspaceService.getOrganizationIdFromWorkspaceId(existingWorkspacePermission.workspaceId) } returns + Optional.of( + orgPermissionPreUpdate.organizationId, + ) + every { permissionRepository.update(updatedOrgPermission.toEntity()) } returns updatedOrgPermission.toEntity() + every { permissionRepository.deleteByIdIn(listOf(existingWorkspacePermission.permissionId)) } just Runs + + permissionService.updatePermission(updatedOrgPermission) + + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByUserId(testUserId) } + verify(exactly = 1) { permissionRepository.update(updatedOrgPermission.toEntity()) } + verify(exactly = 1) { permissionRepository.deleteByIdIn(listOf(existingWorkspacePermission.permissionId)) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should throw if demoting the last org admin`() { + val orgId = UUID.randomUUID() + + val existingOtherOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_EDITOR // other org permission is not admin + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val orgPermissionUpdated = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = + PermissionType.ORGANIZATION_EDITOR // org permission update is from admin to editor, throws because it's the last admin + } + + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns Optional.of(orgPermissionPreUpdate.toEntity()) + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + + assertThrows { permissionService.updatePermission(orgPermissionUpdated) } + + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 0) { permissionRepository.update(any()) } + confirmVerified(permissionRepository) + } + + @Test + fun `updatePermission should allow org admin demotion if another org admin exists`() { + val orgId = UUID.randomUUID() + + val existingOtherOrgPermission = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN // other org permission is admin + } + val orgPermissionPreUpdate = + Permission().apply { + permissionId = UUID.randomUUID() + userId = testUserId + organizationId = orgId + permissionType = PermissionType.ORGANIZATION_ADMIN + } + val orgPermissionUpdated = + Permission().apply { + permissionId = orgPermissionPreUpdate.permissionId + userId = orgPermissionPreUpdate.userId + organizationId = orgPermissionPreUpdate.organizationId + permissionType = PermissionType.ORGANIZATION_EDITOR // org permission update is from admin to editor + } + + every { permissionRepository.findByUserId(testUserId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } returns + Optional.of( + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.findByOrganizationId(orgId) } returns + listOf( + existingOtherOrgPermission.toEntity(), + orgPermissionPreUpdate.toEntity(), + ) + every { permissionRepository.update(orgPermissionUpdated.toEntity()) } returns orgPermissionUpdated.toEntity() + + permissionService.updatePermission(orgPermissionUpdated) + + verify { permissionRepository.findByUserId(testUserId) } + verify { permissionRepository.findById(orgPermissionPreUpdate.permissionId) } + verify { permissionRepository.findByOrganizationId(orgId) } + verify(exactly = 1) { permissionRepository.update(orgPermissionUpdated.toEntity()) } + confirmVerified(permissionRepository) + } + } + } +} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt index 286617f7ab0..40d82d923bd 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt @@ -5,15 +5,15 @@ import io.airbyte.data.repositories.ScopedConfigurationRepository import io.airbyte.data.repositories.entities.ScopedConfiguration import io.airbyte.data.services.impls.data.mappers.ModelConfigScopeType import io.airbyte.data.services.impls.data.mappers.toConfigModel +import io.airbyte.data.services.shared.ConfigScopeMapWithId import io.airbyte.data.services.shared.ScopedConfigurationKey import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigOriginType -import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigResourceType -import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigScopeType import io.mockk.clearAllMocks import io.mockk.every import io.mockk.justRun import io.mockk.mockk import io.mockk.verify +import io.mockk.verifyAll import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows @@ -227,24 +227,24 @@ internal class ScopedConfigurationServiceDataImplTest { ) every { - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.workspace, - workspaceId, + listOf(workspaceId), ) - } returns null + } returns listOf() every { - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.organization, - organizationId, + listOf(organizationId), ) - } returns config + } returns listOf(config) val retrievedConfig = scopedConfigurationService.getScopedConfiguration( @@ -260,19 +260,19 @@ internal class ScopedConfigurationServiceDataImplTest { assert(retrievedConfig.get() == config.toConfigModel()) verify { - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.workspace, - workspaceId, + listOf(workspaceId), ) - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.organization, - organizationId, + listOf(organizationId), ) } } @@ -298,6 +298,152 @@ internal class ScopedConfigurationServiceDataImplTest { } } + @Test + fun `test bulk get configurations by key, resource and scope map`() { + val configKey = + ScopedConfigurationKey( + key = "test-key", + supportedScopes = listOf(ModelConfigScopeType.WORKSPACE, ModelConfigScopeType.ORGANIZATION), + ) + + val resourceId = UUID.randomUUID() + + val organizationId = UUID.randomUUID() + val workspaceId1 = UUID.randomUUID() + val workspaceId2 = UUID.randomUUID() + + val organizationId2 = UUID.randomUUID() + val workspaceId3 = UUID.randomUUID() + + val orgConfig = + ScopedConfiguration( + id = UUID.randomUUID(), + key = configKey.key, + value = "value", + scopeType = EntityConfigScopeType.organization, + scopeId = organizationId, + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.user, + origin = "my_user_id", + description = "my_description", + ) + + val workspace1Config = + ScopedConfiguration( + id = UUID.randomUUID(), + key = configKey.key, + value = "value2", + scopeType = EntityConfigScopeType.workspace, + scopeId = workspaceId1, + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.user, + origin = "my_user_id", + description = "my_description", + ) + + every { + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( + configKey.key, + EntityConfigResourceType.actor_definition, + resourceId, + EntityConfigScopeType.workspace, + listOf(workspaceId1, workspaceId2, workspaceId3), + ) + } returns listOf(workspace1Config) + + every { + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( + configKey.key, + EntityConfigResourceType.actor_definition, + resourceId, + EntityConfigScopeType.organization, + listOf(organizationId, organizationId2), + ) + } returns listOf(orgConfig) + + val retrievedConfigs = + scopedConfigurationService.getScopedConfigurations( + configKey, + ModelConfigResourceType.ACTOR_DEFINITION, + resourceId, + listOf( + ConfigScopeMapWithId( + workspaceId1, + mapOf( + ModelConfigScopeType.WORKSPACE to workspaceId1, + ModelConfigScopeType.ORGANIZATION to organizationId, + ), + ), + ConfigScopeMapWithId( + workspaceId2, + mapOf( + ModelConfigScopeType.WORKSPACE to workspaceId2, + ModelConfigScopeType.ORGANIZATION to organizationId, + ), + ), + ConfigScopeMapWithId( + workspaceId3, + mapOf( + ModelConfigScopeType.WORKSPACE to workspaceId3, + ModelConfigScopeType.ORGANIZATION to organizationId2, + ), + ), + ), + ) + + // keys that have a config are in the result map, with the resolved config as the value + assert(retrievedConfigs[workspaceId1] == workspace1Config.toConfigModel()) + assert(retrievedConfigs[workspaceId2] == orgConfig.toConfigModel()) + + // keys that don't have a config are not included in the result map + assert(!retrievedConfigs.containsKey(workspaceId3)) + + verifyAll { + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( + configKey.key, + EntityConfigResourceType.actor_definition, + resourceId, + EntityConfigScopeType.workspace, + listOf(workspaceId1, workspaceId2, workspaceId3), + ) + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( + configKey.key, + EntityConfigResourceType.actor_definition, + resourceId, + EntityConfigScopeType.organization, + listOf(organizationId, organizationId2), + ) + } + } + + @Test + fun `test bulk get configurations with unsupported scope in map throws`() { + val configKey = + ScopedConfigurationKey( + key = "test-key-mismatched-supported-scope-3", + supportedScopes = listOf(ModelConfigScopeType.WORKSPACE), + ) + + assertThrows { + scopedConfigurationService.getScopedConfigurations( + configKey, + ModelConfigResourceType.ACTOR_DEFINITION, + UUID.randomUUID(), + listOf( + ConfigScopeMapWithId( + UUID.randomUUID(), + mapOf( + ModelConfigScopeType.ACTOR to UUID.randomUUID(), + ModelConfigScopeType.WORKSPACE to UUID.randomUUID(), + ), + ), + ), + ) + } + } + @Test fun `test get non-existent configuration by resource, scope and key returns empty opt`() { val scopeId = UUID.randomUUID() @@ -342,8 +488,8 @@ internal class ScopedConfigurationServiceDataImplTest { ) every { - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId(any(), any(), any(), any(), any()) - } returns null + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList(any(), any(), any(), any(), any()) + } returns listOf() val retrievedConfig = scopedConfigurationService.getScopedConfiguration( @@ -359,19 +505,19 @@ internal class ScopedConfigurationServiceDataImplTest { assert(retrievedConfig.isEmpty) verify { - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.workspace, - workspaceId, + listOf(workspaceId), ) - scopedConfigurationRepository.getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( + scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( configKey.key, EntityConfigResourceType.actor_definition, resourceId, EntityConfigScopeType.organization, - organizationId, + listOf(organizationId), ) } } @@ -440,6 +586,48 @@ internal class ScopedConfigurationServiceDataImplTest { } } + @Test + fun `test bulk insert new configurations`() { + val resourceId = UUID.randomUUID() + + val config = + ScopedConfiguration( + id = UUID.randomUUID(), + key = "key", + value = "value", + scopeType = EntityConfigScopeType.workspace, + scopeId = UUID.randomUUID(), + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.user, + origin = "my_user_id", + description = "my_description", + ) + + val config2 = + ScopedConfiguration( + id = UUID.randomUUID(), + key = "key", + value = "value", + scopeType = EntityConfigScopeType.workspace, + scopeId = UUID.randomUUID(), + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.user, + origin = "my_user_id", + description = "my_description", + ) + + every { scopedConfigurationRepository.saveAll(listOf(config, config2)) } returns listOf(config, config2) + + val res = scopedConfigurationService.insertScopedConfigurations(listOf(config.toConfigModel(), config2.toConfigModel())) + assert(res == listOf(config.toConfigModel(), config2.toConfigModel())) + + verifyAll { + scopedConfigurationRepository.saveAll(listOf(config, config2)) + } + } + @Test fun `test list configurations`() { val resourceId = UUID.randomUUID() @@ -561,9 +749,9 @@ internal class ScopedConfigurationServiceDataImplTest { every { scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( "key", - ConfigResourceType.actor_definition, + EntityConfigResourceType.actor_definition, resourceId, - ConfigScopeType.workspace, + EntityConfigScopeType.workspace, listOf(config.scopeId, config2.scopeId), ) } returns listOf(config, config2) @@ -581,9 +769,9 @@ internal class ScopedConfigurationServiceDataImplTest { verify { scopedConfigurationRepository.findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( "key", - ConfigResourceType.actor_definition, + EntityConfigResourceType.actor_definition, resourceId, - ConfigScopeType.workspace, + EntityConfigScopeType.workspace, listOf(config.scopeId, config2.scopeId), ) } @@ -599,4 +787,15 @@ internal class ScopedConfigurationServiceDataImplTest { verify { scopedConfigurationRepository.deleteById(configId) } } + + @Test + fun `test delete multiple scoped configuration`() { + val configIds = listOf(UUID.randomUUID(), UUID.randomUUID()) + + justRun { scopedConfigurationRepository.deleteByIdInList(configIds) } + + scopedConfigurationService.deleteScopedConfigurations(configIds) + + verifyAll { scopedConfigurationRepository.deleteByIdInList(configIds) } + } } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImplTest.kt index 34845646bfd..6e006e32f2d 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImplTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/UserInvitationServiceDataImplTest.kt @@ -1,15 +1,19 @@ package io.airbyte.data.services.impls.data +import io.airbyte.config.ScopeType import io.airbyte.data.exceptions.ConfigNotFoundException import io.airbyte.data.repositories.PermissionRepository import io.airbyte.data.repositories.UserInvitationRepository import io.airbyte.data.repositories.entities.Permission import io.airbyte.data.repositories.entities.UserInvitation +import io.airbyte.data.services.InvitationDuplicateException +import io.airbyte.data.services.InvitationStatusUnexpectedException +import io.airbyte.data.services.impls.data.mappers.EntityInvitationStatus +import io.airbyte.data.services.impls.data.mappers.EntityPermissionType +import io.airbyte.data.services.impls.data.mappers.EntityScopeType import io.airbyte.data.services.impls.data.mappers.toConfigModel -import io.airbyte.db.instance.configs.jooq.generated.enums.InvitationStatus -import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType -import io.airbyte.db.instance.configs.jooq.generated.enums.ScopeType import io.mockk.clearAllMocks +import io.mockk.confirmVerified import io.mockk.every import io.mockk.mockk import io.mockk.verify @@ -35,11 +39,12 @@ internal class UserInvitationServiceDataImplTest { inviterUserId = UUID.randomUUID(), invitedEmail = "invited@airbyte.io", scopeId = UUID.randomUUID(), - scopeType = ScopeType.workspace, - permissionType = PermissionType.workspace_admin, - status = InvitationStatus.pending, + scopeType = EntityScopeType.workspace, + permissionType = EntityPermissionType.workspace_admin, + status = EntityInvitationStatus.pending, createdAt = OffsetDateTime.now(ZoneOffset.UTC).truncatedTo(java.time.temporal.ChronoUnit.SECONDS), updatedAt = OffsetDateTime.now(ZoneOffset.UTC).truncatedTo(java.time.temporal.ChronoUnit.SECONDS), + expiresAt = OffsetDateTime.now(ZoneOffset.UTC).plusDays(7).truncatedTo(java.time.temporal.ChronoUnit.SECONDS), ) @BeforeEach @@ -68,6 +73,7 @@ internal class UserInvitationServiceDataImplTest { @Test fun `test create user invitation`() { + every { userInvitationRepository.findByStatusAndScopeTypeAndScopeIdAndInvitedEmail(any(), any(), any(), any()) } returns emptyList() every { userInvitationRepository.save(invitation) } returns invitation val result = userInvitationService.createUserInvitation(invitation.toConfigModel()) @@ -76,10 +82,23 @@ internal class UserInvitationServiceDataImplTest { verify { userInvitationRepository.save(invitation) } } + @Test + fun `test create duplicate user invitation throws`() { + every { userInvitationRepository.findByStatusAndScopeTypeAndScopeIdAndInvitedEmail(any(), any(), any(), any()) } returns listOf(invitation) + + assertThrows { userInvitationService.createUserInvitation(invitation.toConfigModel()) } + + verify(exactly = 0) { userInvitationRepository.save(invitation) } + } + @Test fun `test accept user invitation`() { val invitedUserId = UUID.randomUUID() - val expectedUpdatedInvitation = invitation.copy(status = InvitationStatus.accepted) + val expectedUpdatedInvitation = + invitation.copy( + status = EntityInvitationStatus.accepted, + acceptedByUserId = invitedUserId, + ) every { userInvitationRepository.findByInviteCode(invitation.inviteCode) } returns Optional.of(invitation) every { userInvitationRepository.update(expectedUpdatedInvitation) } returns expectedUpdatedInvitation @@ -104,9 +123,9 @@ internal class UserInvitationServiceDataImplTest { } @ParameterizedTest - @EnumSource(value = InvitationStatus::class) - fun `test accept user invitation fails if not pending`(status: InvitationStatus) { - if (status == InvitationStatus.pending) { + @EnumSource(value = EntityInvitationStatus::class) + fun `test accept user invitation fails if not pending`(status: EntityInvitationStatus) { + if (status == EntityInvitationStatus.pending) { return // not testing this case } @@ -115,8 +134,66 @@ internal class UserInvitationServiceDataImplTest { every { userInvitationRepository.findByInviteCode(invitation.inviteCode) } returns Optional.of(invitation) - assertThrows { userInvitationService.acceptUserInvitation(invitation.inviteCode, invitedUserId) } + assertThrows { userInvitationService.acceptUserInvitation(invitation.inviteCode, invitedUserId) } verify(exactly = 0) { userInvitationRepository.update(any()) } } + + @Test + fun `test accept user invitation fails if expired`() { + val invitedUserId = UUID.randomUUID() + val expiredInvitation = + invitation.copy( + status = EntityInvitationStatus.pending, + expiresAt = OffsetDateTime.now(ZoneOffset.UTC).minusDays(1), + ) + val expectedUpdatedInvitation = expiredInvitation.copy(status = EntityInvitationStatus.expired) + + every { userInvitationRepository.findByInviteCode(expiredInvitation.inviteCode) } returns Optional.of(expiredInvitation) + every { userInvitationRepository.update(expectedUpdatedInvitation) } returns expectedUpdatedInvitation + + assertThrows { userInvitationService.acceptUserInvitation(expiredInvitation.inviteCode, invitedUserId) } + + verify { userInvitationRepository.update(expectedUpdatedInvitation) } + } + + @Test + fun `test get pending invitations`() { + val workspaceId = UUID.randomUUID() + val organizationId = UUID.randomUUID() + val mockWorkspaceInvitations = listOf(invitation, invitation.copy(id = UUID.randomUUID())) + val mockOrganizationInvitations = listOf(invitation.copy(id = UUID.randomUUID()), invitation.copy(id = UUID.randomUUID())) + + every { + userInvitationRepository.findByStatusAndScopeTypeAndScopeId(EntityInvitationStatus.pending, EntityScopeType.workspace, workspaceId) + } returns mockWorkspaceInvitations + every { + userInvitationRepository.findByStatusAndScopeTypeAndScopeId(EntityInvitationStatus.pending, EntityScopeType.organization, organizationId) + } returns mockOrganizationInvitations + + val workspaceResult = userInvitationService.getPendingInvitations(ScopeType.WORKSPACE, workspaceId) + val organizationResult = userInvitationService.getPendingInvitations(ScopeType.ORGANIZATION, organizationId) + + verify(exactly = 1) { + userInvitationRepository.findByStatusAndScopeTypeAndScopeId(EntityInvitationStatus.pending, EntityScopeType.workspace, workspaceId) + } + verify(exactly = 1) { + userInvitationRepository.findByStatusAndScopeTypeAndScopeId(EntityInvitationStatus.pending, EntityScopeType.organization, organizationId) + } + confirmVerified(userInvitationRepository) + + assert(workspaceResult == mockWorkspaceInvitations.map { it.toConfigModel() }) + assert(organizationResult == mockOrganizationInvitations.map { it.toConfigModel() }) + } + + @Test + fun `test cancel invitation`() { + val expectedUpdatedInvitation = invitation.copy(status = EntityInvitationStatus.cancelled) + every { userInvitationRepository.findByInviteCode(invitation.inviteCode) } returns Optional.of(invitation) + every { userInvitationRepository.update(expectedUpdatedInvitation) } returns expectedUpdatedInvitation + + userInvitationService.cancelUserInvitation(invitation.inviteCode) + + verify { userInvitationRepository.update(expectedUpdatedInvitation) } + } } diff --git a/airbyte-db/db-lib/build.gradle.kts b/airbyte-db/db-lib/build.gradle.kts index 537eb5708f1..d69e9fb976f 100644 --- a/airbyte-db/db-lib/build.gradle.kts +++ b/airbyte-db/db-lib/build.gradle.kts @@ -1,115 +1,115 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } // Add a configuration(for our migrations(tasks defined below to encapsulate their dependencies) val migrations by configurations.creating { - extendsFrom(configurations.getByName("implementation")) + extendsFrom(configurations.getByName("implementation")) } configurations.all { - exclude(group = "io.micronaut.flyway") - resolutionStrategy { - force (libs.platform.testcontainers.postgresql) - } + exclude(group = "io.micronaut.flyway") + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } airbyte { - docker { - imageName = "db" - } + docker { + imageName = "db" + } } dependencies { - api(libs.hikaricp) - api(libs.jooq.meta) - api(libs.jooq) - api(libs.postgresql) - - implementation(project(":airbyte-commons")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-config:config-models")) - implementation(libs.bundles.flyway) - implementation(libs.guava) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.commons.io) - - migrations(libs.platform.testcontainers.postgresql) - migrations(sourceSets["main"].output) - - // Mark as compile Only to avoid leaking transitively to connectors) - compileOnly(libs.platform.testcontainers.postgresql) - - // These are required because gradle might be using lower version of Jna from other) - // library transitive dependency. Can be removed if we can figure out which library is the cause.) - // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079) - implementation(libs.jna) - implementation(libs.jna.platform) - - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.apache.commons.lang) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) - testImplementation(libs.json.assert) + api(libs.hikaricp) + api(libs.jooq.meta) + api(libs.jooq) + api(libs.postgresql) + + implementation(project(":airbyte-commons")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-config:config-models")) + implementation(libs.bundles.flyway) + implementation(libs.guava) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.commons.io) + + migrations(libs.platform.testcontainers.postgresql) + migrations(sourceSets["main"].output) + + // Mark as compile Only to avoid leaking transitively to connectors) + compileOnly(libs.platform.testcontainers.postgresql) + + // These are required because gradle might be using lower version of Jna from other) + // library transitive dependency. Can be removed if we can figure out which library is the cause.) + // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079) + implementation(libs.jna) + implementation(libs.jna.platform) + + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.apache.commons.lang) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) + testImplementation(libs.json.assert) } tasks.register("newConfigsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "create") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "create") + dependsOn(tasks.named("classes")) } tasks.register("runConfigsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "migrate") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "migrate") + dependsOn(tasks.named("classes")) } tasks.register("dumpConfigsSchema") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("configs", "dump_schema") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("configs", "dump_schema") + dependsOn(tasks.named("classes")) } tasks.register("newJobsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("jobs", "create") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "create") + dependsOn(tasks.named("classes")) } tasks.register("runJobsMigration") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf( "jobs", "migrate") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "migrate") + dependsOn(tasks.named("classes")) } tasks.register("dumpJobsSchema") { - mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" - classpath = files(migrations.files) - args = listOf("jobs", "dump_schema") - dependsOn(tasks.named("classes")) + mainClass = "io.airbyte.db.instance.development.MigrationDevCenter" + classpath = files(migrations.files) + args = listOf("jobs", "dump_schema") + dependsOn(tasks.named("classes")) } val copyInitSql = tasks.register("copyInitSql") { - from("src/main/resources") { - include("init.sql") - } - into("build/airbyte/docker/bin") + from("src/main/resources") { + include("init.sql") + } + into("build/airbyte/docker/bin") } tasks.named("dockerBuildImage") { - dependsOn(copyInitSql) + dependsOn(copyInitSql) } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java index 0c7b23ca563..45dab989001 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/FlywayFactory.java @@ -6,6 +6,7 @@ import javax.sql.DataSource; import org.flywaydb.core.Flyway; +import org.flywaydb.database.postgresql.PostgreSQLConfigurationExtension; /** * Temporary factory class that provides convenience methods for creating a {@link Flyway} @@ -68,15 +69,22 @@ public static Flyway create(final DataSource dataSource, final String baselineDescription, final boolean baselineOnMigrate, final String... migrationFileLocations) { - return Flyway.configure() + final var flywayConfiguration = Flyway.configure() .dataSource(dataSource) .baselineVersion(baselineVersion) .baselineDescription(baselineDescription) .baselineOnMigrate(baselineOnMigrate) .installedBy(installedBy) .table(String.format(MIGRATION_TABLE_FORMAT, dbIdentifier)) - .locations(migrationFileLocations) - .load(); + .locations(migrationFileLocations); + + // Setting the transactional lock to false allows us run queries outside transactions + // without hanging. This enables creating indexes concurrently (i.e. without locking tables) + flywayConfiguration.getPluginRegister() + .getPlugin(PostgreSQLConfigurationExtension.class) + .setTransactionalLock(false); + + return flywayConfiguration.load(); } } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_008__AddConnectionTimeline.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_008__AddConnectionTimeline.java new file mode 100644 index 00000000000..b6aef5b81c8 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_008__AddConnectionTimeline.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; +import static org.jooq.impl.DSL.table; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.JSONB; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_50_41_008__AddConnectionTimeline extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_41_008__AddConnectionTimeline.class); + + private static final Field idField = DSL.field("id", SQLDataType.UUID.nullable(false)); + + private static final Field connectionIdField = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + + private static final Field userIdField = DSL.field("user_id", SQLDataType.UUID.nullable(true)); + + private static final Field eventTypeField = DSL.field("event_type", SQLDataType.VARCHAR.nullable(false)); + + private static final Field summaryField = DSL.field("summary", SQLDataType.JSONB.nullable(true)); + + private static final Field createdAtField = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); + public static final String TABLE_NAME = "connection_timeline_event"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.createTable(TABLE_NAME) + .columns(idField, + connectionIdField, + userIdField, + eventTypeField, + summaryField, + createdAtField) + .constraints(primaryKey(idField), + foreignKey(connectionIdField).references("connection", "id").onDeleteCascade(), + foreignKey(userIdField).references("user", "id")) + .execute(); + ctx.createIndexIfNotExists("idx_connection_timeline_connection_id") + .on(table(TABLE_NAME), connectionIdField.asc(), createdAtField.desc(), eventTypeField.asc()) + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOrigin.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOrigin.java new file mode 100644 index 00000000000..1e1fa35957f --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOrigin.java @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.Catalog; +import org.jooq.DSLContext; +import org.jooq.EnumType; +import org.jooq.Schema; +import org.jooq.impl.DSL; +import org.jooq.impl.SchemaImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_50_41_009__AddBreakingChangeConfigOrigin extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_41_009__AddBreakingChangeConfigOrigin.class); + private static final String CONFIG_ORIGIN_TYPE = "config_origin_type"; + private static final String BREAKING_CHANGE = "breaking_change"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.alterType(CONFIG_ORIGIN_TYPE).addValue(BREAKING_CHANGE).execute(); + } + + enum ConfigOriginType implements EnumType { + + USER("user"), + BREAKING_CHANGE("breaking_change"); + + private final String literal; + + ConfigOriginType(final String literal) { + this.literal = literal; + } + + @Override + public Catalog getCatalog() { + return getSchema() == null ? null : getSchema().getCatalog(); + } + + @Override + public Schema getSchema() { + return new SchemaImpl(DSL.name("public"), null); + } + + @Override + public String getName() { + return CONFIG_ORIGIN_TYPE; + } + + @Override + public String getLiteral() { + return literal; + } + + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_010__AddConditionalMutexKeyIndexToWorkloads.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_010__AddConditionalMutexKeyIndexToWorkloads.java new file mode 100644 index 00000000000..0a0574532be --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_010__AddConditionalMutexKeyIndexToWorkloads.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import java.util.List; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// TODO: update migration description in the class name +public class V0_50_41_010__AddConditionalMutexKeyIndexToWorkloads extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_41_010__AddConditionalMutexKeyIndexToWorkloads.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.createIndexIfNotExists("active_workload_by_mutex_idx") + .on("workload", "mutex_key") + .where(DSL.field("status").in(List.of("pending", "claimed", "launched", "running"))) + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_011__AddUserInvitationAcceptedByAndExpiration.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_011__AddUserInvitationAcceptedByAndExpiration.java new file mode 100644 index 00000000000..004ab50f338 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_011__AddUserInvitationAcceptedByAndExpiration.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static io.airbyte.db.instance.DatabaseConstants.USER_INVITATION_TABLE; +import static io.airbyte.db.instance.DatabaseConstants.USER_TABLE; +import static org.jooq.impl.DSL.foreignKey; + +import java.sql.Timestamp; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Add accepted_by_user_id column and expires_at column to user_invitations table. Also add expired + * status to invitation_status enum. + */ +public class V0_50_41_011__AddUserInvitationAcceptedByAndExpiration extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_41_011__AddUserInvitationAcceptedByAndExpiration.class); + + private static final String ACCEPTED_BY_USER_ID = "accepted_by_user_id"; + private static final String EXPIRES_AT = "expires_at"; + private static final String INVITATION_STATUS = "invitation_status"; + private static final String EXPIRED = "expired"; + + private static final Field ACCEPTED_BY_USER_ID_COLUMN = DSL.field(ACCEPTED_BY_USER_ID, SQLDataType.UUID.nullable(true)); + private static final Field EXPIRES_AT_COLUMN = DSL.field(EXPIRES_AT, SQLDataType.TIMESTAMP.nullable(false)); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + + addAcceptedByUserIdColumnAndIndex(ctx); + addExpiresAtColumnAndIndex(ctx); + addExpiredStatus(ctx); + } + + static void addAcceptedByUserIdColumnAndIndex(final DSLContext ctx) { + ctx.alterTable(USER_INVITATION_TABLE) + .addColumnIfNotExists(ACCEPTED_BY_USER_ID_COLUMN) + .execute(); + + ctx.alterTable(USER_INVITATION_TABLE) + .add(foreignKey(ACCEPTED_BY_USER_ID) + .references(USER_TABLE, "id") + .onDeleteCascade()) + .execute(); + + ctx.createIndex("user_invitation_accepted_by_user_id_index") + .on(USER_INVITATION_TABLE, ACCEPTED_BY_USER_ID) + .execute(); + } + + static void addExpiresAtColumnAndIndex(final DSLContext ctx) { + ctx.alterTable(USER_INVITATION_TABLE).addColumnIfNotExists(EXPIRES_AT_COLUMN).execute(); + + ctx.createIndex("user_invitation_expires_at_index") + .on(USER_INVITATION_TABLE, EXPIRES_AT) + .execute(); + } + + static void addExpiredStatus(final DSLContext ctx) { + ctx.alterType(INVITATION_STATUS).addValue(EXPIRED).execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigration.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigration.java new file mode 100644 index 00000000000..542f3773537 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigration.java @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.commons.version.Version; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigResourceType; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigScopeType; +import io.airbyte.db.instance.configs.migrations.V0_50_41_009__AddBreakingChangeConfigOrigin.ConfigOriginType; +import jakarta.annotation.Nullable; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.Table; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_50_41_012__BreakingChangePinDataMigration extends BaseJavaMigration { + + private static final String CONNECTOR_VERSION_KEY = "connector_version"; + private static final Table ACTOR = DSL.table("actor"); + private static final Table ACTOR_DEFINITION = DSL.table("actor_definition"); + private static final Table ACTOR_DEFINITION_BREAKING_CHANGE = DSL.table("actor_definition_breaking_change"); + private static final Table WORKSPACE = DSL.table("workspace"); + private static final Table SCOPED_CONFIGURATION = DSL.table("scoped_configuration"); + private static final Field ID = DSL.field("id", SQLDataType.UUID); + private static final Field KEY = DSL.field("key", SQLDataType.VARCHAR); + private static final Field RESOURCE_TYPE = DSL.field("resource_type", ConfigResourceType.class); + private static final Field RESOURCE_ID = DSL.field("resource_id", SQLDataType.UUID); + private static final Field SCOPE_TYPE = DSL.field("scope_type", ConfigScopeType.class); + private static final Field SCOPE_ID = DSL.field("scope_id", SQLDataType.UUID); + private static final Field VALUE = DSL.field("value", SQLDataType.VARCHAR); + private static final Field DESCRIPTION = DSL.field("description", SQLDataType.VARCHAR); + private static final Field ORIGIN_TYPE = DSL.field("origin_type", ConfigOriginType.class); + private static final Field ORIGIN = DSL.field("origin", SQLDataType.VARCHAR); + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_41_012__BreakingChangePinDataMigration.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + migrateBreakingChangePins(ctx); + } + + @VisibleForTesting + public void migrateBreakingChangePins(final DSLContext ctx) { + final List actorDefinitions = getActorDefinitions(ctx); + for (final ActorDefinition actorDefinition : actorDefinitions) { + migrateBreakingChangePinsForDefinition(ctx, actorDefinition); + } + } + + private void migrateBreakingChangePinsForDefinition(final DSLContext ctx, final ActorDefinition actorDefinition) { + final List unpinnedActorsNotOnDefaultVersion = getUnpinnedActorsNotOnDefaultVersion(ctx, actorDefinition); + final List breakingChangeVersions = getBreakingChangeVersionsForDefinition(ctx, actorDefinition.actorDefinitionId); + for (final Actor actor : unpinnedActorsNotOnDefaultVersion) { + final String originatingBreakingChange = getOriginatingBreakingChangeForVersion(ctx, actor.defaultVersionId, breakingChangeVersions); + createScopedConfiguration(ctx, actorDefinition.actorDefinitionId, originatingBreakingChange, ConfigScopeType.ACTOR, actor.actorId, + actor.defaultVersionId); + } + } + + private List getUnpinnedActorsNotOnDefaultVersion(final DSLContext ctx, final ActorDefinition actorDefinition) { + final List actors = getActorsNotOnDefaultVersion(ctx, actorDefinition); + final List actorIdsWithConfig = + getIdsWithConfig(ctx, actorDefinition.actorDefinitionId, ConfigScopeType.ACTOR, actors.stream().map(Actor::actorId).toList()); + final List workspaceIdsWithConfig = + getIdsWithConfig(ctx, actorDefinition.actorDefinitionId, ConfigScopeType.WORKSPACE, actors.stream().map(Actor::workspaceId).toList()); + final List orgIdsWithConfig = + getIdsWithConfig(ctx, actorDefinition.actorDefinitionId, ConfigScopeType.ORGANIZATION, actors.stream().map(Actor::organizationId).toList()); + + return actors.stream() + .filter(actor -> !actorIdsWithConfig.contains(actor.actorId())) + .filter(actor -> !workspaceIdsWithConfig.contains(actor.workspaceId())) + .filter(actor -> !orgIdsWithConfig.contains(actor.organizationId())) + .toList(); + } + + final Map versionBreakingChangeCache = new HashMap<>(); + + private String getOriginatingBreakingChangeForVersion(final DSLContext ctx, final UUID versionId, final List breakingChangeVersions) { + if (versionBreakingChangeCache.containsKey(versionId)) { + return versionBreakingChangeCache.get(versionId); + } + + final ActorDefinitionVersion version = getActorDefinitionVersion(ctx, versionId); + final Version pinnedVersion = new Version(version.dockerImageTag); + + final Optional breakingVersion = breakingChangeVersions.stream() + .filter(breakingChangeVersion -> breakingChangeVersion.greaterThan(pinnedVersion)) + .findFirst(); + + if (breakingVersion.isEmpty()) { + throw new IllegalStateException(String.format( + "Could not find a corresponding breaking change for pinned version %s on actor definition ID %s. " + + "Overriding actor versions without a breaking change is not supported.", + version.dockerImageTag, version.actorDefinitionId)); + } + + final String originatingBreakingChange = breakingVersion.get().serialize(); + versionBreakingChangeCache.put(versionId, originatingBreakingChange); + return originatingBreakingChange; + } + + private List getIdsWithConfig(final DSLContext ctx, + final UUID actorDefinitionId, + final ConfigScopeType scopeType, + final List scopeIds) { + return ctx.select(SCOPE_ID) + .from(SCOPED_CONFIGURATION) + .where(KEY.eq(CONNECTOR_VERSION_KEY)) + .and(RESOURCE_TYPE.eq(ConfigResourceType.ACTOR_DEFINITION)) + .and(RESOURCE_ID.eq(actorDefinitionId)) + .and(SCOPE_TYPE.eq(scopeType)) + .and(SCOPE_ID.in(scopeIds)) + .fetch() + .map(r -> r.get(SCOPE_ID)); + } + + private void createScopedConfiguration(final DSLContext ctx, + final UUID actorDefinitionId, + final String breakingChangeVersionTag, + final ConfigScopeType scopeType, + final UUID scopeId, + final UUID pinnedVersionId) { + ctx.insertInto(SCOPED_CONFIGURATION) + .columns(ID, KEY, RESOURCE_TYPE, RESOURCE_ID, SCOPE_TYPE, SCOPE_ID, ORIGIN_TYPE, ORIGIN, VALUE, DESCRIPTION) + .values( + UUID.randomUUID(), + CONNECTOR_VERSION_KEY, + ConfigResourceType.ACTOR_DEFINITION, actorDefinitionId, + scopeType, scopeId, + ConfigOriginType.BREAKING_CHANGE, breakingChangeVersionTag, + pinnedVersionId.toString(), + "Automated breaking change pin migration") + .execute(); + } + + private List getActorDefinitions(final DSLContext ctx) { + final Field id = DSL.field("id", SQLDataType.UUID); + final Field defaultVersionId = DSL.field("default_version_id", SQLDataType.UUID); + + return ctx.select(id, defaultVersionId) + .from(ACTOR_DEFINITION) + .fetch() + .map(r -> new ActorDefinition(r.get(id), r.get(defaultVersionId))); + } + + private ActorDefinitionVersion getActorDefinitionVersion(final DSLContext ctx, final UUID versionId) { + final Field id = DSL.field("id", SQLDataType.UUID); + final Field actorDefinitionId = DSL.field("actor_definition_id", SQLDataType.UUID); + final Field dockerImageTag = DSL.field("docker_image_tag", SQLDataType.VARCHAR); + + return ctx.select(id, actorDefinitionId, dockerImageTag) + .from("actor_definition_version") + .where(id.eq(versionId)) + .fetchOne(r -> new ActorDefinitionVersion(r.get(id), r.get(actorDefinitionId), r.get(dockerImageTag))); + } + + private List getBreakingChangeVersionsForDefinition(final DSLContext ctx, final UUID actorDefinitionId) { + final Field actorDefinitionIdField = DSL.field("actor_definition_id", SQLDataType.UUID); + final Field version = DSL.field("version", SQLDataType.VARCHAR); + + return ctx.select(version) + .from(ACTOR_DEFINITION_BREAKING_CHANGE) + .where(actorDefinitionIdField.eq(actorDefinitionId)) + .fetch() + .map(r -> new Version(r.get(version))) + .stream().sorted(Version::versionCompareTo) + .toList(); + } + + private List getActorsNotOnDefaultVersion(final DSLContext ctx, final ActorDefinition actorDefinition) { + // Actor fields + final Field actorId = DSL.field("actor.id", SQLDataType.UUID); + final Field actorWorkspaceId = DSL.field("actor.workspace_id", SQLDataType.UUID); + final Field actorDefaultVersionId = DSL.field("actor.default_version_id", SQLDataType.UUID); + final Field actorDefinitionId = DSL.field("actor.actor_definition_id", SQLDataType.UUID); + + // Workspace fields + final Field workspaceId = DSL.field("workspace.id", SQLDataType.UUID); + final Field workspaceOrgId = DSL.field("workspace.organization_id", SQLDataType.UUID); + + return ctx.select(actorId, actorWorkspaceId, workspaceOrgId, actorDefaultVersionId) + .from(ACTOR) + .join(WORKSPACE).on(workspaceId.eq(actorWorkspaceId)) + .where(actorDefinitionId.eq(actorDefinition.actorDefinitionId)) + .and(actorDefaultVersionId.ne(actorDefinition.defaultVersionId)) + .fetch() + .map(r -> new Actor(r.get(actorId), r.get(actorWorkspaceId), r.get(workspaceOrgId), r.get(actorDefaultVersionId))); + + } + + record ActorDefinition(UUID actorDefinitionId, UUID defaultVersionId) {} + + record ActorDefinitionVersion(UUID versionId, UUID actorDefinitionId, String dockerImageTag) {} + + record Actor(UUID actorId, UUID workspaceId, @Nullable UUID organizationId, UUID defaultVersionId) {} + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_001__AddRefreshesTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_001__AddRefreshesTable.java new file mode 100644 index 00000000000..526e3fcfe20 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_001__AddRefreshesTable.java @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_55_1_001__AddRefreshesTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_55_1_001__AddRefreshesTable.class); + private static final String STREAM_REFRESHES_TABLE = "stream_refreshes"; + + private static final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + private static final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.nullable(false)); + private static final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR.nullable(true)); + + private static final Field createdAtField = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + createRefreshTable(ctx); + } + + static void createRefreshTable(final DSLContext ctx) { + + ctx.createTable(STREAM_REFRESHES_TABLE) + .columns(connectionId, + streamName, + streamNamespace, + createdAtField) + .constraints( + primaryKey(connectionId, streamName, streamNamespace), + foreignKey(connectionId).references("connection", "id").onDeleteCascade()) + .execute(); + ctx.createIndexIfNotExists("stream_refreshes_connection_id_idx").on(STREAM_REFRESHES_TABLE, connectionId.getName()).execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java new file mode 100644 index 00000000000..87eefadbe1a --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTable.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_55_1_002__AddGenerationTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_55_1_002__AddGenerationTable.class); + + static final String STREAM_GENERATION_TABLE_NAME = "stream_generation"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + createGenerationTable(ctx); + } + + static void createGenerationTable(final DSLContext ctx) { + final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); + final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.nullable(false)); + final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR.nullable(true)); + final Field generationId = DSL.field("generation_id", SQLDataType.BIGINT.nullable(false)); + final Field startJobId = DSL.field("start_job_id", SQLDataType.BIGINT.nullable(false)); + final Field createdAt = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + final Field updatedAt = + DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + ctx.createTable(STREAM_GENERATION_TABLE_NAME) + .columns(id, + connectionId, + streamName, + streamNamespace, + generationId, + startJobId, + createdAt, + updatedAt) + .constraints( + primaryKey(id), + foreignKey(connectionId).references("connection", "id").onDeleteCascade()) + .execute(); + + final String indexCreationQuery = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s DESC)", + STREAM_GENERATION_TABLE_NAME, connectionId.getName(), streamName.getName(), generationId.getName()); + final String indexCreationQuery2 = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s, %s DESC)", + STREAM_GENERATION_TABLE_NAME, connectionId.getName(), streamName.getName(), streamNamespace.getName(), generationId.getName()); + ctx.execute(indexCreationQuery); + ctx.execute(indexCreationQuery2); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java new file mode 100644 index 00000000000..fbb5f641800 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTable.java @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_55_1_003__EditRefreshTable extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_55_1_003__EditRefreshTable.class); + + static final String STREAM_REFRESHES_TABLE = "stream_refreshes"; + + private static final Field connectionId = DSL.field("connection_id", SQLDataType.UUID.nullable(false)); + private static final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.nullable(false)); + private static final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR.nullable(true)); + + private static final Field createdAtField = + DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + editRefreshTable(ctx); + } + + static void editRefreshTable(final DSLContext ctx) { + ctx.truncate(STREAM_REFRESHES_TABLE).execute(); + ctx.dropTable(STREAM_REFRESHES_TABLE).execute(); + + final Field id = DSL.field("id", SQLDataType.UUID.nullable(false)); + ctx.createTable(STREAM_REFRESHES_TABLE) + .columns(id, + connectionId, + streamName, + streamNamespace, + createdAtField) + .constraints( + primaryKey(id), + foreignKey(connectionId).references("connection", "id").onDeleteCascade()) + .execute(); + + final String indexCreationQuery = String.format("CREATE INDEX ON %s USING btree (%s)", + STREAM_REFRESHES_TABLE, connectionId.getName()); + final String indexCreationQuery2 = String.format("CREATE INDEX ON %s USING btree (%s, %s)", + STREAM_REFRESHES_TABLE, connectionId.getName(), streamName.getName()); + final String indexCreationQuery3 = String.format("CREATE INDEX ON %s USING btree (%s, %s, %s)", + STREAM_REFRESHES_TABLE, connectionId.getName(), streamName.getName(), streamNamespace.getName()); + ctx.execute(indexCreationQuery); + ctx.execute(indexCreationQuery2); + ctx.execute(indexCreationQuery3); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java index 16242324a43..034d513031f 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/development/DevDatabaseMigrator.java @@ -17,6 +17,7 @@ import org.flywaydb.core.api.configuration.FluentConfiguration; import org.flywaydb.core.api.output.BaselineResult; import org.flywaydb.core.api.output.MigrateResult; +import org.flywaydb.database.postgresql.PostgreSQLConfigurationExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,7 +77,7 @@ public String dumpSchema() { private static FluentConfiguration getBaselineConfig(final FlywayDatabaseMigrator fullMigrator) { final Configuration fullConfig = fullMigrator.getFlyway().getConfiguration(); - return Flyway.configure() + final var flywayConfiguration = Flyway.configure() .dataSource(fullConfig.getDataSource()) .baselineVersion(fullConfig.getBaselineVersion()) .baselineDescription(fullConfig.getBaselineDescription()) @@ -84,6 +85,14 @@ private static FluentConfiguration getBaselineConfig(final FlywayDatabaseMigrato .installedBy(fullConfig.getInstalledBy()) .table(fullConfig.getTable()) .locations(fullConfig.getLocations()); + + // Setting the transactional lock to false allows us run queries outside transactions + // without hanging. This enables creating indexes concurrently (i.e. without locking tables) + flywayConfiguration.getPluginRegister() + .getPlugin(PostgreSQLConfigurationExtension.class) + .setTransactionalLock(false); + + return flywayConfiguration; } /** diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_002__AddScopeStatusCreatedAtIndex.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_002__AddScopeStatusCreatedAtIndex.java new file mode 100644 index 00000000000..cec6135cc20 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_002__AddScopeStatusCreatedAtIndex.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_50_4_002__AddScopeStatusCreatedAtIndex extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_4_002__AddScopeStatusCreatedAtIndex.class); + + static final String SCOPE_STATUS_CREATED_AT_INDEX_NAME = "scope_status_created_at_idx"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.query("CREATE INDEX CONCURRENTLY IF NOT EXISTS " + SCOPE_STATUS_CREATED_AT_INDEX_NAME + " ON jobs(scope, status, created_at DESC)").execute(); + } + + // This prevents flyway from automatically wrapping the migration in a transaction. + // This is important because indexes cannot be created concurrently (i.e. without locking) from + // within a transaction. + @Override + public boolean canExecuteInTransaction() { + return false; + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_003__AddScopeCreatedAtScopeNonTerminalIndexes.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_003__AddScopeCreatedAtScopeNonTerminalIndexes.java new file mode 100644 index 00000000000..f269b4bdd36 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_50_4_003__AddScopeCreatedAtScopeNonTerminalIndexes.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import static io.airbyte.db.instance.jobs.migrations.V0_50_4_002__AddScopeStatusCreatedAtIndex.SCOPE_STATUS_CREATED_AT_INDEX_NAME; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_50_4_003__AddScopeCreatedAtScopeNonTerminalIndexes extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_50_4_003__AddScopeCreatedAtScopeNonTerminalIndexes.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + // helps with the general sorting of jobs by latest per connection + ctx.query("CREATE INDEX CONCURRENTLY IF NOT EXISTS scope_created_at_idx ON jobs(scope, created_at DESC)").execute(); + + // helps for looking for active jobs + ctx.query( + "CREATE INDEX CONCURRENTLY IF NOT EXISTS scope_non_terminal_status_idx ON jobs(scope, status) " + + "WHERE status NOT IN ('failed', 'succeeded', 'cancelled')") + .execute(); + + // remove other index, as these two are more performant + ctx.query("DROP INDEX CONCURRENTLY " + SCOPE_STATUS_CREATED_AT_INDEX_NAME).execute(); + } + + // This prevents flyway from automatically wrapping the migration in a transaction. + // This is important because indexes cannot be created concurrently (i.e. without locking) from + // within a transaction. + @Override + public boolean canExecuteInTransaction() { + return false; + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java new file mode 100644 index 00000000000..707f9bc0c65 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_001__AddRefreshJobType.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_57_2_001__AddRefreshJobType extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_57_2_001__AddRefreshJobType.class); + + @Override + public void migrate(Context context) throws Exception { + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.alterType("job_config_type").addValue("refresh").execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index 7c32e20c9b5..2fad95ffcc4 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -171,6 +171,15 @@ create table "public"."connection_operation" ( "updated_at" timestamp(6) with time zone not null default current_timestamp, constraint "connection_operation_pkey" primary key ("id", "connection_id", "operation_id") ); +create table "public"."connection_timeline_event" ( + "id" uuid not null, + "connection_id" uuid not null, + "user_id" uuid, + "event_type" varchar(2147483647) not null, + "summary" jsonb, + "created_at" timestamp(6) with time zone not null, + constraint "connection_timeline_event_pkey" primary key ("id") +); create table "public"."connector_builder_project" ( "id" uuid not null, "workspace_id" uuid not null, @@ -312,6 +321,25 @@ create table "public"."state" ( constraint "state_pkey" primary key ("id", "connection_id"), constraint "state__connection_id__stream_name__namespace__uq" unique ("connection_id", "stream_name", "namespace") ); +create table "public"."stream_generation" ( + "id" uuid not null, + "connection_id" uuid not null, + "stream_name" varchar(2147483647) not null, + "stream_namespace" varchar(2147483647), + "generation_id" bigint not null, + "start_job_id" bigint not null, + "created_at" timestamp(6) with time zone not null default current_timestamp, + "updated_at" timestamp(6) with time zone not null default current_timestamp, + constraint "stream_generation_pkey" primary key ("id") +); +create table "public"."stream_refreshes" ( + "id" uuid not null, + "connection_id" uuid not null, + "stream_name" varchar(2147483647) not null, + "stream_namespace" varchar(2147483647), + "created_at" timestamp(6) with time zone not null default current_timestamp, + constraint "stream_refreshes_pkey" primary key ("id") +); create table "public"."stream_reset" ( "id" uuid not null, "connection_id" uuid not null, @@ -348,6 +376,8 @@ create table "public"."user_invitation" ( "updated_at" timestamp(6) with time zone not null default current_timestamp, "scope_id" uuid not null, "scope_type" scope_type not null, + "accepted_by_user_id" uuid, + "expires_at" timestamp(6) not null, constraint "user_invitation_pkey" primary key ("id"), constraint "user_invitation_invite_code_key" unique ("invite_code") ); @@ -421,6 +451,7 @@ create index "connection_destination_id_idx" on "public"."connection"("destinati create index "connection_source_id_idx" on "public"."connection"("source_id" asc); create index "connection_status_idx" on "public"."connection"("status" asc); create index "connection_operation_connection_id_idx" on "public"."connection_operation"("connection_id" asc); +create index "idx_connection_timeline_connection_id" on "public"."connection_timeline_event"("connection_id" asc, "created_at" desc, "event_type" asc); create index "connector_builder_project_workspace_idx" on "public"."connector_builder_project"("workspace_id" asc); create index "organization_email_domain_email_domain_idx" on "public"."organization_email_domain"("email_domain" asc); create index "organization_email_domain_organization_id_idx" on "public"."organization_email_domain"("organization_id" asc); @@ -430,13 +461,22 @@ create index "permission_workspace_id_idx" on "public"."permission"("workspace_i create index "connection_idx" on "public"."schema_management"("connection_id" asc); create index "sso_config_keycloak_realm_idx" on "public"."sso_config"("keycloak_realm" asc); create index "sso_config_organization_id_idx" on "public"."sso_config"("organization_id" asc); +create index "stream_generation_connection_id_stream_name_generation_id_idx" on "public"."stream_generation"("connection_id" asc, "stream_name" asc, "generation_id" desc); +create index "stream_generation_connection_id_stream_name_stream_namespac_idx" on "public"."stream_generation"("connection_id" asc, "stream_name" asc, "stream_namespace" asc, "generation_id" desc); +create index "stream_refreshes_connection_id_idx" on "public"."stream_refreshes"("connection_id" asc); +create index "stream_refreshes_connection_id_stream_name_idx" on "public"."stream_refreshes"("connection_id" asc, "stream_name" asc); +create index "stream_refreshes_connection_id_stream_name_stream_namespace_idx" on "public"."stream_refreshes"("connection_id" asc, "stream_name" asc, "stream_namespace" asc); create index "connection_id_stream_name_namespace_idx" on "public"."stream_reset"("connection_id" asc, "stream_name" asc, "stream_namespace" asc); create index "user_auth_provider_auth_user_id_idx" on "public"."user"("auth_provider" asc, "auth_user_id" asc); create index "user_email_idx" on "public"."user"("email" asc); +create index "user_invitation_accepted_by_user_id_index" on "public"."user_invitation"("accepted_by_user_id" asc); +create index "user_invitation_expires_at_index" on "public"."user_invitation"("expires_at" asc); create index "user_invitation_invite_code_idx" on "public"."user_invitation"("invite_code" asc); create index "user_invitation_invited_email_idx" on "public"."user_invitation"("invited_email" asc); create index "user_invitation_scope_id_index" on "public"."user_invitation"("scope_id" asc); create index "user_invitation_scope_type_and_scope_id_index" on "public"."user_invitation"("scope_type" asc, "scope_id" asc); +create index "active_workload_by_mutex_idx" on "public"."workload"("mutex_key" asc) +where ((status = ANY (ARRAY['pending'::workload_status, 'claimed'::workload_status, 'launched'::workload_status, 'running'::workload_status]))); create index "workload_deadline_idx" on "public"."workload"("deadline" asc) where ((deadline IS NOT NULL)); create index "workload_mutex_idx" on "public"."workload"("mutex_key" asc); @@ -459,6 +499,8 @@ alter table "public"."connection" add constraint "connection_destination_id_fkey alter table "public"."connection" add constraint "connection_source_id_fkey" foreign key ("source_id") references "public"."actor" ("id"); alter table "public"."connection_operation" add constraint "connection_operation_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."connection_operation" add constraint "connection_operation_operation_id_fkey" foreign key ("operation_id") references "public"."operation" ("id"); +alter table "public"."connection_timeline_event" add constraint "connection_timeline_event_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); +alter table "public"."connection_timeline_event" add constraint "connection_timeline_event_user_id_fkey" foreign key ("user_id") references "public"."user" ("id"); alter table "public"."notification_configuration" add constraint "notification_configuration_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."operation" add constraint "operation_workspace_id_fkey" foreign key ("workspace_id") references "public"."workspace" ("id"); alter table "public"."organization_email_domain" add constraint "organization_email_domain_organization_id_fkey" foreign key ("organization_id") references "public"."organization" ("id"); @@ -468,7 +510,10 @@ alter table "public"."permission" add constraint "permission_workspace_id_fkey" alter table "public"."schema_management" add constraint "schema_management_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."sso_config" add constraint "sso_config_organization_id_fkey" foreign key ("organization_id") references "public"."organization" ("id"); alter table "public"."state" add constraint "state_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); +alter table "public"."stream_generation" add constraint "stream_generation_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); +alter table "public"."stream_refreshes" add constraint "stream_refreshes_connection_id_fkey" foreign key ("connection_id") references "public"."connection" ("id"); alter table "public"."user" add constraint "user_default_workspace_id_fkey" foreign key ("default_workspace_id") references "public"."workspace" ("id"); +alter table "public"."user_invitation" add constraint "user_invitation_accepted_by_user_id_fkey" foreign key ("accepted_by_user_id") references "public"."user" ("id"); alter table "public"."user_invitation" add constraint "user_invitation_inviter_user_id_fkey" foreign key ("inviter_user_id") references "public"."user" ("id"); alter table "public"."workload_label" add constraint "workload_label_workload_id_fkey" foreign key ("workload_id") references "public"."workload" ("id"); alter table "public"."workspace" add constraint "workspace_organization_id_fkey" foreign key ("organization_id") references "public"."organization" ("id"); diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt index 4b2ff7dcb33..b642c9dcce0 100644 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt @@ -127,6 +127,9 @@ create unique index "job_attempt_idx" on "public"."attempts"("job_id" asc, "atte create index "jobs_config_type_idx" on "public"."jobs"("config_type" asc); create index "jobs_scope_idx" on "public"."jobs"("scope" asc); create index "jobs_status_idx" on "public"."jobs"("status" asc); +create index "scope_created_at_idx" on "public"."jobs"("scope" asc, "created_at" desc); +create index "scope_non_terminal_status_idx" on "public"."jobs"("scope" asc, "status" asc) +where ((status <> ALL (ARRAY['failed'::job_status, 'succeeded'::job_status, 'cancelled'::job_status]))); create index "normalization_summary_attempt_id_idx" on "public"."normalization_summaries"("attempt_id" asc); create index "retry_state_connection_id_idx" on "public"."retry_states"("connection_id" asc); create index "retry_state_job_id_idx" on "public"."retry_states"("job_id" asc); diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java index 8a22ed7fc87..38bca86a4dd 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/AbstractConfigsDatabaseTest.java @@ -6,8 +6,8 @@ import io.airbyte.db.Database; import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.test.TestDatabaseProviders; +import io.airbyte.test.utils.AbstractDatabaseTest; import java.io.IOException; import javax.sql.DataSource; import org.jooq.DSLContext; diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java index 3bce25bf50f..a4e0079099f 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_30_22_001__Store_last_sync_state_test.java @@ -11,10 +11,7 @@ import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.COLUMN_UPDATED_AT; import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.TABLE_AIRBYTE_CONFIGS; import static io.airbyte.db.instance.configs.migrations.V0_30_22_001__Store_last_sync_state.getStandardSyncState; -import static org.jooq.impl.DSL.field; -import static org.jooq.impl.DSL.table; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -22,9 +19,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.ConfigSchema; import io.airbyte.config.Configs; -import io.airbyte.config.JobOutput; -import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncState; import io.airbyte.config.State; import io.airbyte.db.Database; @@ -33,20 +27,13 @@ import io.airbyte.db.instance.jobs.JobsDatabaseTestProvider; import jakarta.annotation.Nullable; import java.io.IOException; -import java.sql.Connection; import java.sql.SQLException; import java.time.OffsetDateTime; import java.util.Collections; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.migration.Context; import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.JSONB; -import org.jooq.Table; -import org.jooq.impl.SQLDataType; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; @@ -58,27 +45,11 @@ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) class V0_30_22_001__Store_last_sync_state_test extends AbstractConfigsDatabaseTest { - private static final OffsetDateTime TIMESTAMP = OffsetDateTime.now(); - - private static final Table JOBS_TABLE = table("jobs"); - private static final Field JOB_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field JOB_SCOPE_FIELD = field("scope", SQLDataType.VARCHAR); - private static final Field JOB_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final Table ATTEMPTS_TABLE = table("attempts"); - private static final Field ATTEMPT_ID_FIELD = field("id", SQLDataType.BIGINT); - private static final Field ATTEMPT_JOB_ID_FIELD = field("job_id", SQLDataType.BIGINT); - private static final Field ATTEMPT_NUMBER_FIELD = field("attempt_number", SQLDataType.INTEGER); - private static final Field ATTEMPT_OUTPUT_FIELD = field("output", SQLDataType.JSONB); - private static final Field ATTEMPT_CREATED_AT_FIELD = field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE); - - private static final UUID CONNECTION_1_ID = UUID.randomUUID(); private static final UUID CONNECTION_2_ID = UUID.randomUUID(); private static final UUID CONNECTION_3_ID = UUID.randomUUID(); private static final State CONNECTION_2_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 2222 } }", State.class); private static final State CONNECTION_3_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": 3333 } }", State.class); - private static final State CONNECTION_OLD_STATE = Jsons.deserialize("{ \"state\": { \"cursor\": -1 } }", State.class); private static final StandardSyncState STD_CONNECTION_STATE_2 = getStandardSyncState(CONNECTION_2_ID, CONNECTION_2_STATE); private static final StandardSyncState STD_CONNECTION_STATE_3 = getStandardSyncState(CONNECTION_3_ID, CONNECTION_3_STATE); @@ -108,43 +79,6 @@ void testGetJobsDatabase() { .getJobsDatabase(configs.getDatabaseUser(), configs.getDatabasePassword(), configs.getDatabaseUrl()).isPresent()); } - @Test - @Order(20) - void testGetStandardSyncStates() throws Exception { - jobDatabase.query(ctx -> { - // Connection 1 has 1 job, no attempt. - // This is to test that connection without no state is not returned. - createJob(ctx, CONNECTION_1_ID, 30); - - // Connection 2 has two jobs, each has one attempt. - // This is to test that only the state from the latest job is returned. - final long job21 = createJob(ctx, CONNECTION_2_ID, 10); - final long job22 = createJob(ctx, CONNECTION_2_ID, 20); - assertNotEquals(job21, job22); - createAttempt(ctx, job21, 1, createAttemptOutput(CONNECTION_OLD_STATE), 11); - createAttempt(ctx, job22, 1, createAttemptOutput(CONNECTION_2_STATE), 21); - - // Connection 3 has two jobs. - // The first job has multiple attempts. Its third attempt has the latest state. - // The second job has two attempts with no state. - // This is to test that only the state from the latest attempt is returned. - final long job31 = createJob(ctx, CONNECTION_3_ID, 5); - final long job32 = createJob(ctx, CONNECTION_3_ID, 15); - assertNotEquals(job31, job32); - createAttempt(ctx, job31, 1, createAttemptOutput(CONNECTION_OLD_STATE), 6); - createAttempt(ctx, job31, 2, null, 7); - createAttempt(ctx, job31, 3, createAttemptOutput(CONNECTION_3_STATE), 8); - createAttempt(ctx, job31, 4, null, 9); - createAttempt(ctx, job31, 5, null, 10); - createAttempt(ctx, job32, 1, null, 20); - createAttempt(ctx, job32, 2, null, 25); - - assertEquals(STD_CONNECTION_STATES, V0_30_22_001__Store_last_sync_state.getStandardSyncStates(jobDatabase)); - - return null; - }); - } - @Test @Order(30) void testCopyData() throws SQLException { @@ -175,100 +109,6 @@ void testCopyData() throws SQLException { }); } - /** - * Clear the table and test the migration end-to-end. - */ - @Test - @Order(40) - void testMigration() throws Exception { - jobDatabase.query(ctx -> ctx.deleteFrom(TABLE_AIRBYTE_CONFIGS) - .where(COLUMN_CONFIG_TYPE.eq(ConfigSchema.STANDARD_SYNC_STATE.name())) - .execute()); - - final var migration = new V0_30_22_001__Store_last_sync_state(); - // this context is a flyway class - final Context context = new Context() { - - @Override - public Configuration getConfiguration() { - final Configuration configuration = mock(Configuration.class); - when(configuration.getUser()).thenReturn(container.getUsername()); - when(configuration.getPassword()).thenReturn(container.getPassword()); - when(configuration.getUrl()).thenReturn(container.getJdbcUrl()); - return configuration; - } - - @Override - public Connection getConnection() { - try { - return dataSource.getConnection(); - } catch (final SQLException e) { - throw new RuntimeException(e); - } - } - - }; - migration.migrate(context); - jobDatabase.query(ctx -> { - checkSyncStates(ctx, STD_CONNECTION_STATES, null); - return null; - }); - } - - /** - * Create a job record whose scope equals to the passed in connection id, and return the job id. - * - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static long createJob(final DSLContext ctx, final UUID connectionId, final long creationOffset) { - final int insertCount = ctx.insertInto(JOBS_TABLE) - .set(JOB_SCOPE_FIELD, connectionId.toString()) - .set(JOB_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - return ctx.select(JOB_ID_FIELD) - .from(JOBS_TABLE) - .where(JOB_SCOPE_FIELD.eq(connectionId.toString())) - .orderBy(JOB_CREATED_AT_FIELD.desc()) - .limit(1) - .fetchOne() - .get(JOB_ID_FIELD); - } - - /* - * @param creationOffset Set the creation timestamp to {@code TIMESTAMP} + this passed in offset. - */ - private static void createAttempt(final DSLContext ctx, - final long jobId, - final int attemptNumber, - final JobOutput attemptOutput, - final long creationOffset) { - final int insertCount = ctx.insertInto(ATTEMPTS_TABLE) - .set(ATTEMPT_JOB_ID_FIELD, jobId) - .set(ATTEMPT_NUMBER_FIELD, attemptNumber) - .set(ATTEMPT_OUTPUT_FIELD, JSONB.valueOf(Jsons.serialize(attemptOutput))) - .set(ATTEMPT_CREATED_AT_FIELD, TIMESTAMP.plusDays(creationOffset)) - .execute(); - assertEquals(1, insertCount); - - ctx.select(ATTEMPT_ID_FIELD) - .from(ATTEMPTS_TABLE) - .where(ATTEMPT_JOB_ID_FIELD.eq(jobId), ATTEMPT_NUMBER_FIELD.eq(attemptNumber)) - .fetchOne() - .get(ATTEMPT_ID_FIELD); - } - - /** - * Create an JobOutput object whose output type is StandardSyncOutput. - * - * @param state The state object within a StandardSyncOutput. - */ - private static JobOutput createAttemptOutput(final State state) { - final StandardSyncOutput standardSyncOutput = new StandardSyncOutput().withState(state); - return new JobOutput().withOutputType(OutputType.SYNC).withSync(standardSyncOutput); - } - private static void checkSyncStates(final DSLContext ctx, final Set standardSyncStates, @Nullable final OffsetDateTime expectedTimestamp) { diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOriginTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOriginTest.java new file mode 100644 index 00000000000..e58355fbe4f --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_009__AddBreakingChangeConfigOriginTest.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigResourceType; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigScopeType; +import io.airbyte.db.instance.configs.migrations.V0_50_41_009__AddBreakingChangeConfigOrigin.ConfigOriginType; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.UUID; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_50_41_009__AddBreakingChangeConfigOriginTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_50_41_009__AddBreakingChangeConfigOriginTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_50_41_009__AddBreakingChangeConfigOrigin(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + private static void insertConfigWithOriginType( + final DSLContext ctx, + final ConfigOriginType originType) { + ctx.insertInto(DSL.table("scoped_configuration")) + .columns( + DSL.field("id"), + DSL.field("key"), + DSL.field("resource_type"), + DSL.field("resource_id"), + DSL.field("scope_type"), + DSL.field("scope_id"), + DSL.field("value"), + DSL.field("origin_type"), + DSL.field("origin")) + .values( + UUID.randomUUID(), + "some_key", + ConfigResourceType.ACTOR_DEFINITION, + UUID.randomUUID(), + ConfigScopeType.ACTOR, + UUID.randomUUID(), + "my_value", + originType, + "origin_ref") + .execute(); + } + + @Test + void testBreakingChangeOriginScopedConfig() { + final DSLContext ctx = getDslContext(); + + insertConfigWithOriginType(ctx, ConfigOriginType.BREAKING_CHANGE); // does not throw + + assertThrows(IllegalArgumentException.class, () -> insertConfigWithOriginType(ctx, ConfigOriginType.valueOf("foo"))); + } + +} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigrationTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigrationTest.java new file mode 100644 index 00000000000..a20526534c5 --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_50_41_012__BreakingChangePinDataMigrationTest.java @@ -0,0 +1,279 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.migrations.V0_32_8_001__AirbyteConfigDatabaseDenormalization.ActorType; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigResourceType; +import io.airbyte.db.instance.configs.migrations.V0_50_33_014__AddScopedConfigurationTable.ConfigScopeType; +import io.airbyte.db.instance.configs.migrations.V0_50_41_006__AlterSupportLevelAddArchived.SupportLevel; +import io.airbyte.db.instance.configs.migrations.V0_50_41_009__AddBreakingChangeConfigOrigin.ConfigOriginType; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.sql.Date; +import java.time.LocalDate; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; +import javax.annotation.Nullable; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.jooq.JSONB; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +public class V0_50_41_012__BreakingChangePinDataMigrationTest extends AbstractConfigsDatabaseTest { + + private V0_50_41_012__BreakingChangePinDataMigration migration; + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_50_41_012__BreakingChangePinDataMigrationTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_50_41_009__AddBreakingChangeConfigOrigin(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + + migration = new V0_50_41_012__BreakingChangePinDataMigration(); + } + + static Stream testMethodSource() { + return Stream.of( + // Already on latest (3.1.0), no BC pin + Arguments.of("3.1.0", List.of(), null), + + // Held back on an older version should create pin with correct BC as origin + Arguments.of("0.1.0", List.of(), "1.0.0"), + Arguments.of("1.0.0", List.of(), "2.0.0"), + + // Actors already pinned (at any level) should be ignored + Arguments.of("1.0.0", List.of(ConfigScopeType.ACTOR), null), + Arguments.of("1.0.0", List.of(ConfigScopeType.WORKSPACE), null), + Arguments.of("1.0.0", List.of(ConfigScopeType.ORGANIZATION), null), + Arguments.of("1.0.0", List.of(ConfigScopeType.ACTOR, ConfigScopeType.WORKSPACE, ConfigScopeType.ORGANIZATION), null)); + } + + @ParameterizedTest + @MethodSource("testMethodSource") + void testBreakingChangeOriginScopedConfig(final String actorVersion, + final List existingConfigScopes, + @Nullable final String expectedBCOrigin) { + final DSLContext ctx = getDslContext(); + + // ignore all foreign key constraints + ctx.execute("SET session_replication_role = replica;"); + + final UUID actorDefinitionId = UUID.randomUUID(); + createActorDefinition(ctx, actorDefinitionId); + + final UUID defaultVersionId = UUID.randomUUID(); + final String defaultVersionTag = "3.1.0"; + createActorDefinitionVersion(ctx, defaultVersionId, actorDefinitionId, defaultVersionTag); + setActorDefinitionDefaultVersion(ctx, actorDefinitionId, defaultVersionId); + + UUID actorVersionId = defaultVersionId; + if (!actorVersion.equals(defaultVersionTag)) { + actorVersionId = UUID.randomUUID(); + createActorDefinitionVersion(ctx, actorVersionId, actorDefinitionId, actorVersion); + } + + final UUID workspaceId = UUID.randomUUID(); + final UUID organizationId = UUID.randomUUID(); + createWorkspace(ctx, workspaceId, organizationId); + + final UUID actorId = UUID.randomUUID(); + createActor(ctx, actorId, workspaceId, actorDefinitionId, actorVersionId); + + for (final ConfigScopeType existingConfigScope : existingConfigScopes) { + final UUID scopeId; + switch (existingConfigScope) { + case ACTOR -> scopeId = actorId; + case WORKSPACE -> scopeId = workspaceId; + case ORGANIZATION -> scopeId = organizationId; + default -> throw new IllegalArgumentException("Unexpected config scope type: " + existingConfigScope); + } + createScopedConfig(ctx, actorDefinitionId, existingConfigScope, scopeId, ConfigOriginType.USER, "userId", actorVersion); + } + + final List breakingChanges = List.of("1.0.0", "2.0.0", "3.0.0"); + for (final String breakingChange : breakingChanges) { + createBreakingChange(ctx, actorDefinitionId, breakingChange); + } + + // run migration + migration.migrateBreakingChangePins(ctx); + + // get pin and assert it's correct + final Optional> scopedConfig = getScopedConfig(ctx, actorDefinitionId, actorId); + if (expectedBCOrigin == null) { + assert (scopedConfig.isEmpty()); + } else { + assert (scopedConfig.isPresent()); + assert (scopedConfig.get().get("value").equals(actorVersionId.toString())); + assert (scopedConfig.get().get("origin").equals(expectedBCOrigin)); + } + + } + + private static void createActorDefinition(final DSLContext ctx, final UUID actorDefinitionId) { + ctx.insertInto(DSL.table("actor_definition")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("actor_type")) + .values( + actorDefinitionId, + "postgres", + ActorType.source) + .execute(); + } + + private static void setActorDefinitionDefaultVersion(final DSLContext ctx, final UUID actorDefinitionId, final UUID defaultVersionId) { + ctx.update(DSL.table("actor_definition")) + .set(DSL.field("default_version_id"), defaultVersionId) + .where(DSL.field("id").eq(actorDefinitionId)) + .execute(); + } + + private static void createActorDefinitionVersion(final DSLContext ctx, + final UUID actorDefinitionVersionId, + final UUID actorDefinitionId, + final String version) { + ctx.insertInto(DSL.table("actor_definition_version")) + .columns( + DSL.field("id"), + DSL.field("actor_definition_id"), + DSL.field("docker_repository"), + DSL.field("docker_image_tag"), + DSL.field("support_level"), + DSL.field("spec", SQLDataType.JSONB)) + .values( + actorDefinitionVersionId, + actorDefinitionId, + "airbyte/postgres", + version, + SupportLevel.community, + JSONB.valueOf("{}")) + .execute(); + } + + private static void createActor(final DSLContext ctx, + final UUID actorId, + final UUID workspaceId, + final UUID actorDefinitionId, + final UUID defaultVersionId) { + ctx.insertInto(DSL.table("actor")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("actor_type"), + DSL.field("workspace_id"), + DSL.field("actor_definition_id"), + DSL.field("default_version_id"), + DSL.field("configuration", SQLDataType.JSONB)) + .values( + actorId, + "postgres", + ActorType.source, + workspaceId, + actorDefinitionId, + defaultVersionId, + JSONB.valueOf("{}")) + .execute(); + } + + private static void createWorkspace(final DSLContext ctx, final UUID workspaceId, final UUID organizationId) { + ctx.insertInto(DSL.table("workspace")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("slug"), + DSL.field("initial_setup_complete"), + DSL.field("organization_id")) + .values( + workspaceId, + "workspace", + "workspace", + true, + organizationId) + .execute(); + } + + private static void createScopedConfig( + final DSLContext ctx, + final UUID actorDefinitionId, + final ConfigScopeType scopeType, + final UUID scopeId, + final ConfigOriginType originType, + final String origin, + final String value) { + ctx.insertInto(DSL.table("scoped_configuration")) + .columns( + DSL.field("id"), + DSL.field("key"), + DSL.field("resource_type"), + DSL.field("resource_id"), + DSL.field("scope_type"), + DSL.field("scope_id"), + DSL.field("value"), + DSL.field("origin_type"), + DSL.field("origin")) + .values( + UUID.randomUUID(), + "connector_version", + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + scopeType, + scopeId, + value, + originType, + origin) + .execute(); + } + + private static Optional> getScopedConfig(final DSLContext ctx, final UUID actorDefinitionId, final UUID scopeId) { + return ctx.select(DSL.field("value"), DSL.field("origin")) + .from(DSL.table("scoped_configuration")) + .where(DSL.field("resource_type").eq(ConfigResourceType.ACTOR_DEFINITION) + .and(DSL.field("resource_id").eq(actorDefinitionId)) + .and(DSL.field("scope_type").eq(ConfigScopeType.ACTOR)) + .and(DSL.field("scope_id").eq(scopeId)) + .and(DSL.field("origin_type").eq(ConfigOriginType.BREAKING_CHANGE))) + .fetchOptional() + .map(r -> r + .map(record -> Map.of( + "value", record.get(DSL.field("value", String.class)), + "origin", record.get(DSL.field("origin", String.class))))); + } + + private static void createBreakingChange(final DSLContext ctx, final UUID actorDefinitionId, final String version) { + ctx.insertInto(DSL.table("actor_definition_breaking_change")) + .columns( + DSL.field("actor_definition_id"), + DSL.field("version"), + DSL.field("migration_documentation_url"), + DSL.field("message"), + DSL.field("upgrade_deadline", SQLDataType.DATE)) + .values( + actorDefinitionId, + version, + "https://docs.airbyte.io/", + "Breaking change", + Date.valueOf(LocalDate.now())) + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java new file mode 100644 index 00000000000..fb1f4608fa2 --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_002__AddGenerationTableTest.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static io.airbyte.db.instance.configs.migrations.V0_55_1_002__AddGenerationTable.STREAM_GENERATION_TABLE_NAME; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.table; +import static org.junit.jupiter.api.Assertions.*; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.Set; +import java.util.stream.Collectors; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_55_1_002__AddGenerationTableTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_55_1_001__AddRefreshesTable", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_55_1_001__AddRefreshesTable(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + @Test + public void test() { + final DSLContext dslContext = getDslContext(); + final boolean tableExists = generationTableExists(dslContext); + + assertFalse(tableExists); + + V0_55_1_002__AddGenerationTable.createGenerationTable(dslContext); + + final boolean tableExistsPostMigration = generationTableExists(dslContext); + + assertTrue(tableExistsPostMigration); + + final Set index = dslContext.select() + .from(table("pg_indexes")) + .where(field("tablename").eq(STREAM_GENERATION_TABLE_NAME)) + .fetch() + .stream() + .map(c -> c.getValue("indexdef", String.class)) + .collect(Collectors.toSet()); + assertEquals(3, index.size()); + assertTrue(index.contains("CREATE UNIQUE INDEX stream_generation_pkey ON public.stream_generation USING btree (id)")); + assertTrue(index.contains( + "CREATE INDEX stream_generation_connection_id_stream_name_generation_id_idx " + + "ON public.stream_generation USING btree (connection_id, stream_name, generation_id DESC)")); + assertTrue(index.contains( + "CREATE INDEX stream_generation_connection_id_stream_name_stream_namespac_idx ON public.stream_generation " + + "USING btree (connection_id, stream_name, stream_namespace, generation_id DESC)")); + } + + private static boolean generationTableExists(final DSLContext dslContext) { + final int size = dslContext.select() + .from(table("pg_tables")) + .where(field("tablename").eq(STREAM_GENERATION_TABLE_NAME)) + .fetch() + .size(); + return size > 0; + } + +} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java new file mode 100644 index 00000000000..e195a13501a --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_55_1_003__EditRefreshTableTest.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static io.airbyte.db.instance.configs.migrations.V0_55_1_003__EditRefreshTable.STREAM_REFRESHES_TABLE; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.table; +import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.util.Set; +import java.util.stream.Collectors; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class V0_55_1_003__EditRefreshTableTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_55_1_002__AddGenerationTable", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_55_1_002__AddGenerationTable(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + @Test + public void test() { + final DSLContext dslContext = getDslContext(); + V0_55_1_003__EditRefreshTable.editRefreshTable(dslContext); + final Set index = dslContext.select() + .from(table("pg_indexes")) + .where(field("tablename").eq(STREAM_REFRESHES_TABLE)) + .fetch() + .stream() + .map(c -> c.getValue("indexdef", String.class)) + .collect(Collectors.toSet()); + assertEquals(4, index.size()); + assertTrue(index.contains( + "CREATE UNIQUE INDEX stream_refreshes_pkey ON public.stream_refreshes USING btree (id)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_idx ON public.stream_refreshes USING btree (connection_id)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_stream_name_idx ON public.stream_refreshes " + + "USING btree (connection_id, stream_name)")); + assertTrue(index.contains( + "CREATE INDEX stream_refreshes_connection_id_stream_name_stream_namespace_idx ON public.stream_refreshes" + + " USING btree (connection_id, stream_name, stream_namespace)")); + } + +} diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java index 5796e03d2cc..28ab26442a8 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/jobs/AbstractJobsDatabaseTest.java @@ -6,8 +6,8 @@ import io.airbyte.db.Database; import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.test.TestDatabaseProviders; +import io.airbyte.test.utils.AbstractDatabaseTest; import java.io.IOException; import javax.sql.DataSource; import org.jooq.DSLContext; diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java index 34d520c50cd..c484163ef7f 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/toys/ToysDatabaseMigratorTest.java @@ -11,9 +11,9 @@ import io.airbyte.db.check.DatabaseAvailabilityCheck; import io.airbyte.db.factory.FlywayFactory; import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.db.instance.AbstractDatabaseTest; import io.airbyte.db.instance.DatabaseConstants; import io.airbyte.db.instance.DatabaseMigrator; +import io.airbyte.test.utils.AbstractDatabaseTest; import java.io.IOException; import javax.sql.DataSource; import org.flywaydb.core.Flyway; diff --git a/airbyte-db/jooq/build.gradle.kts b/airbyte-db/jooq/build.gradle.kts index 8de680c5d64..00684268d53 100644 --- a/airbyte-db/jooq/build.gradle.kts +++ b/airbyte-db/jooq/build.gradle.kts @@ -1,114 +1,114 @@ import nu.studer.gradle.jooq.JooqGenerate plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - alias(libs.plugins.nu.studer.jooq) + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + alias(libs.plugins.nu.studer.jooq) } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - implementation(libs.jooq.meta) - implementation(libs.jooq) - implementation(libs.postgresql) - implementation(libs.bundles.flyway) - implementation(project(":airbyte-db:db-lib")) + implementation(libs.jooq.meta) + implementation(libs.jooq) + implementation(libs.postgresql) + implementation(libs.bundles.flyway) + implementation(project(":airbyte-db:db-lib")) - // jOOQ code generation) - implementation(libs.jooq.codegen) - implementation(libs.platform.testcontainers.postgresql) + // jOOQ code generation) + implementation(libs.jooq.codegen) + implementation(libs.platform.testcontainers.postgresql) - // These are required because gradle might be using lower version of Jna from other - // library transitive dependency. Can be removed if we can figure out which library is the cause. - // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 - implementation(libs.jna) - implementation(libs.jna.platform) + // These are required because gradle might be using lower version of Jna from other + // library transitive dependency. Can be removed if we can figure out which library is the cause. + // Refer: https://github.com/testcontainers/testcontainers-java/issues/3834#issuecomment-825409079 + implementation(libs.jna) + implementation(libs.jna.platform) - // The jOOQ code generator(only has access to classes added to the jooqGenerator configuration - jooqGenerator(project(":airbyte-db:db-lib")) { - isTransitive = false - } - jooqGenerator(project(":airbyte-commons")) { - isTransitive = false - } - jooqGenerator(project(":airbyte-config:config-models")) { - isTransitive = false - } - jooqGenerator(libs.bundles.flyway) - jooqGenerator(libs.guava) - jooqGenerator(libs.hikaricp) - jooqGenerator(libs.jackson.datatype) - jooqGenerator(libs.postgresql) - jooqGenerator(libs.slf4j.simple) - jooqGenerator(libs.platform.testcontainers.postgresql) + // The jOOQ code generator(only has access to classes added to the jooqGenerator configuration + jooqGenerator(project(":airbyte-db:db-lib")) { + isTransitive = false + } + jooqGenerator(project(":airbyte-commons")) { + isTransitive = false + } + jooqGenerator(project(":airbyte-config:config-models")) { + isTransitive = false + } + jooqGenerator(libs.bundles.flyway) + jooqGenerator(libs.guava) + jooqGenerator(libs.hikaricp) + jooqGenerator(libs.jackson.datatype) + jooqGenerator(libs.postgresql) + jooqGenerator(libs.slf4j.simple) + jooqGenerator(libs.platform.testcontainers.postgresql) } jooq { - version = libs.versions.jooq - edition = nu.studer.gradle.jooq.JooqEdition.OSS + version = libs.versions.jooq + edition = nu.studer.gradle.jooq.JooqEdition.OSS - configurations { - create("configsDatabase") { - generateSchemaSourceOnCompilation = true - jooqConfiguration.apply { - generator.apply { - name = "org.jooq.codegen.DefaultGenerator" - database.apply { - name = "io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase" - inputSchema = "public" - excludes = "airbyte_configs_migrations" - } - target.apply { - packageName = "io.airbyte.db.instance.configs.jooq.generated" - directory = "build/generated/configsDatabase/src/main/java" - } - } - } + configurations { + create("configsDatabase") { + generateSchemaSourceOnCompilation = true + jooqConfiguration.apply { + generator.apply { + name = "org.jooq.codegen.DefaultGenerator" + database.apply { + name = "io.airbyte.db.instance.configs.ConfigsFlywayMigrationDatabase" + inputSchema = "public" + excludes = "airbyte_configs_migrations" + } + target.apply { + packageName = "io.airbyte.db.instance.configs.jooq.generated" + directory = "build/generated/configsDatabase/src/main/java" + } } + } + } - create("jobsDatabase") { - generateSchemaSourceOnCompilation = true - jooqConfiguration.apply { - generator.apply { - name = "org.jooq.codegen.DefaultGenerator" - database.apply { - name = "io.airbyte.db.instance.jobs.JobsFlywayMigrationDatabase" - inputSchema = "public" - excludes = "airbyte_jobs_migrations" - } - target.apply { - packageName = "io.airbyte.db.instance.jobs.jooq.generated" - directory = "build/generated/jobsDatabase/src/main/java" - } - } - } + create("jobsDatabase") { + generateSchemaSourceOnCompilation = true + jooqConfiguration.apply { + generator.apply { + name = "org.jooq.codegen.DefaultGenerator" + database.apply { + name = "io.airbyte.db.instance.jobs.JobsFlywayMigrationDatabase" + inputSchema = "public" + excludes = "airbyte_jobs_migrations" + } + target.apply { + packageName = "io.airbyte.db.instance.jobs.jooq.generated" + directory = "build/generated/jobsDatabase/src/main/java" + } } + } } + } } sourceSets["main"].java { - srcDirs( - tasks.named("generateConfigsDatabaseJooq").flatMap { it.outputDir }, - tasks.named("generateJobsDatabaseJooq").flatMap { it.outputDir }, - ) + srcDirs( + tasks.named("generateConfigsDatabaseJooq").flatMap { it.outputDir }, + tasks.named("generateJobsDatabaseJooq").flatMap { it.outputDir }, + ) } sourceSets["main"].java { - srcDirs("$buildDir/generated/configsDatabase/src/main/java", "$buildDir/generated/jobsDatabase/src/main/java") + srcDirs("$buildDir/generated/configsDatabase/src/main/java", "$buildDir/generated/jobsDatabase/src/main/java") } tasks.named("generateConfigsDatabaseJooq") { - allInputsDeclared = true - outputs.cacheIf { true } + allInputsDeclared = true + outputs.cacheIf { true } } tasks.named("generateJobsDatabaseJooq") { - allInputsDeclared = true - outputs.cacheIf { true } + allInputsDeclared = true + outputs.cacheIf { true } } diff --git a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt index e4a598583a8..62c340af8b9 100644 --- a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt +++ b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt @@ -17,8 +17,6 @@ object LogConnectorMessages : EnvVar(envVar = "LOG_CONNECTOR_MESSAGES") object AutoDetectSchema : EnvVar(envVar = "AUTO_DETECT_SCHEMA") -object NeedStateValidation : EnvVar(envVar = "NEED_STATE_VALIDATION") - object RemoveValidationLimit : Temporary(key = "validation.removeValidationLimit", default = false) object NormalizationInDestination : Temporary(key = "connectors.normalizationInDestination", default = "") @@ -77,8 +75,6 @@ object ReplicationWorkerImpl : Permanent(key = "platform.replication-wor object UseResourceRequirementsVariant : Permanent(key = "platform.resource-requirements-variant", default = "default") -object UseParallelStreamStatsTracker : Temporary(key = "platform.use-parallel-stream-stats-tracker", default = false) - object SuccessiveCompleteFailureLimit : Temporary(key = "complete-failures.max-successive", default = -1) object TotalCompleteFailureLimit : Temporary(key = "complete-failures.max-total", default = -1) @@ -111,6 +107,8 @@ object ConnectorApmEnabled : Permanent(key = "connectors.apm-enabled", object AutoRechargeEnabled : Permanent(key = "billing.autoRecharge", default = false) +object UseBreakingChangeScopedConfigs : Temporary(key = "connectors.useBreakingChangeScopedConfigs", default = false) + // NOTE: this is deprecated in favor of FieldSelectionEnabled and will be removed once that flag is fully deployed. object FieldSelectionWorkspaces : EnvVar(envVar = "FIELD_SELECTION_WORKSPACES") { override fun enabled(ctx: Context): Boolean { @@ -144,8 +142,6 @@ object FieldSelectionWorkspaces : EnvVar(envVar = "FIELD_SELECTION_WORKSPACES") object RunSocatInConnectorContainer : Temporary(key = "platform.run-socat-in-connector-container", default = false) -object FailSyncIfTooBig : Temporary(key = "platform.fail-sync-if-too-big", default = false) - object DefaultOrgForNewWorkspace : Temporary(key = "platform.set-default-org-for-new-workspace", default = false) object WorkloadHeartbeatRate : Permanent(key = "workload.heartbeat.rate", default = 5) @@ -170,6 +166,8 @@ object UseWorkloadApi : Temporary(key = "platform.use-workload-api", de object EmitStateStatsToSegment : Temporary(key = "platform.emit-state-stats-segment", default = true) +object LogsForStripeChecksumDebugging : Temporary(key = "platform.logs-for-stripe-checksum-debug", default = false) + object AddInitialCreditsForWorkspace : Temporary(key = "add-credits-at-workspace-creation-for-org", default = 0) object WorkloadApiRouting : Permanent(key = "workload-api-routing", default = "workload_default") @@ -186,8 +184,18 @@ object FailSyncOnInvalidChecksum : Temporary(key = "platform.fail-sync- object HydrateAggregatedStats : Temporary(key = "platform.hydrate-aggregated-stats", default = true) -object BillingCronScopeChangeTimestamp : Permanent(key = "platform.billing-cron-scope-change-timestamp", default = "1735711200") - object UseWorkloadApiForDiscover : Temporary(key = "platform.use-workload-api-for-discover", default = false) object UseWorkloadApiForSpec : Temporary(key = "platform.use-workload-api-for-spec", default = false) + +object ActivateRefreshes : Temporary(key = "platform.activate-refreshes", default = false) + +object WriteOutputCatalogToObjectStorage : Temporary(key = "platform.write-output-catalog-to-object-storage", default = false) + +object NullOutputCatalogOnSyncOutput : Temporary(key = "platform.null-output-catalog-on-sync-output", default = false) + +object UseCustomK8sInitCheck : Temporary(key = "platform.use-custom-k8s-init-check", default = true) + +object DeleteFullRefreshState : Temporary(key = "platform.delete-full-refresh-state", default = false) + +object UseClear : Temporary(key = "connection.clearNotReset", default = false) diff --git a/airbyte-json-validation/build.gradle.kts b/airbyte-json-validation/build.gradle.kts index fed6824bd5a..9dc6ad460ec 100644 --- a/airbyte-json-validation/build.gradle.kts +++ b/airbyte-json-validation/build.gradle.kts @@ -1,18 +1,18 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - implementation(project(":airbyte-commons")) - implementation(libs.guava) - implementation("com.networknt:json-schema-validator:1.0.72") - // needed so that we can follow $ref when parsing json. jackson does not support this natively. - implementation("me.andrz.jackson:jackson-json-reference-core:0.3.2") + implementation(project(":airbyte-commons")) + implementation(libs.guava) + implementation("com.networknt:json-schema-validator:1.0.72") + // needed so that we can follow $ref when parsing json. jackson does not support this natively. + implementation("me.andrz.jackson:jackson-json-reference-core:0.3.2") - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-keycloak-setup/Dockerfile b/airbyte-keycloak-setup/Dockerfile index 560ddde30d6..b02ae3769b1 100644 --- a/airbyte-keycloak-setup/Dockerfile +++ b/airbyte-keycloak-setup/Dockerfile @@ -1,5 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} AS keycloak-setup +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder WORKDIR /app ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} AS keycloak-setup +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte + ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-keycloak-setup"] diff --git a/airbyte-keycloak-setup/build.gradle.kts b/airbyte-keycloak-setup/build.gradle.kts index eaba71edfbd..e364ab896ee 100644 --- a/airbyte-keycloak-setup/build.gradle.kts +++ b/airbyte-keycloak-setup/build.gradle.kts @@ -1,43 +1,48 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - - implementation( platform(libs.micronaut.platform)) - implementation( libs.bundles.micronaut) - implementation( libs.bundles.keycloak.client) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.jupiter.system.stubs) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.keycloak.client) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.jupiter.system.stubs) + + testImplementation(project(":airbyte-test-utils")) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.keycloak.setup.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "keycloak-setup" - } + application { + mainClass = "io.airbyte.keycloak.setup.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "keycloak-setup" + } } diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/AccountClientUpdater.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/AccountClientUpdater.java deleted file mode 100644 index 72021d77232..00000000000 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/AccountClientUpdater.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.keycloak.setup; - -import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; -import io.micronaut.context.annotation.Value; -import jakarta.inject.Singleton; -import java.util.List; -import lombok.extern.slf4j.Slf4j; -import org.keycloak.admin.client.resource.RealmResource; -import org.keycloak.representations.idm.ClientRepresentation; - -/** - * This class provides functionality to update account client settings. It includes a method to - * change home url for the account client. - */ -@Singleton -@Slf4j -public class AccountClientUpdater { - - private final String webappUrl; - private final AirbyteKeycloakConfiguration keycloakConfiguration; - - public AccountClientUpdater(@Value("${airbyte.webapp-url}") final String webappUrl, - final AirbyteKeycloakConfiguration keycloakConfiguration) { - this.webappUrl = webappUrl; - this.keycloakConfiguration = keycloakConfiguration; - } - - public void updateAccountClientHomeUrl(final RealmResource airbyteRealm) { - List clients = airbyteRealm.clients().findAll(); - ClientRepresentation clientRepresentation = clients.stream() - .filter(client -> keycloakConfiguration.getAccountClientId().equals(client.getClientId())) - .findFirst() - .orElseThrow(() -> new RuntimeException("Account client not found")); - clientRepresentation.setBaseUrl(webappUrl); - - airbyteRealm.clients().get(clientRepresentation.getId()).update(clientRepresentation); - } - -} diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeCreator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeConfigurator.java similarity index 72% rename from airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeCreator.java rename to airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeConfigurator.java index 395c05aeb57..49a9c14f292 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeCreator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ClientScopeConfigurator.java @@ -8,6 +8,7 @@ import jakarta.ws.rs.core.Response; import java.util.Arrays; import java.util.Map; +import java.util.Optional; import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.ClientScopeRepresentation; @@ -19,7 +20,7 @@ */ @Singleton @Slf4j -public class ClientScopeCreator { +public class ClientScopeConfigurator { public static final int HTTP_STATUS_CREATED = 201; @@ -28,14 +29,27 @@ public class ClientScopeCreator { * * @param keycloakRealm the realm to create the client scope in */ - public void createClientScope(final RealmResource keycloakRealm) { + public void configureClientScope(final RealmResource keycloakRealm) { final ClientScopeRepresentation clientScopeRepresentation = createClientScopeRepresentation(); - final Response response = keycloakRealm.clientScopes().create(clientScopeRepresentation); - if (response.getStatus() == HTTP_STATUS_CREATED) { - log.info("ClientScope {} created successfully.", clientScopeRepresentation.getName()); + final Optional existingClientScope = keycloakRealm.clientScopes().findAll().stream() + .filter(scope -> scope.getName().equals(clientScopeRepresentation.getName())) + .findFirst(); + + if (existingClientScope.isPresent()) { + clientScopeRepresentation.setId(existingClientScope.get().getId()); + keycloakRealm.clientScopes().get(existingClientScope.get().getId()).update(clientScopeRepresentation); } else { - log.info("Failed to create Client Scope. Status: " + response.getStatusInfo().getReasonPhrase()); + try (final Response response = keycloakRealm.clientScopes().create(clientScopeRepresentation)) { + if (response.getStatus() == HTTP_STATUS_CREATED) { + log.info("ClientScope {} created successfully.", clientScopeRepresentation.getName()); + } else { + final String errorMessage = String.format("Failed to create Client Scope.\nReason: %s\n Response: %s", + response.getStatusInfo().getReasonPhrase(), response.readEntity(String.class)); + log.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } } } diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigDbResetHelper.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigDbResetHelper.java new file mode 100644 index 00000000000..dc5316e8d37 --- /dev/null +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigDbResetHelper.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import io.airbyte.db.Database; +import io.airbyte.db.instance.configs.jooq.generated.Tables; +import io.airbyte.db.instance.configs.jooq.generated.enums.AuthProvider; +import jakarta.inject.Named; +import jakarta.inject.Singleton; +import java.sql.SQLException; + +/** + * Helper to reset the Config DB state as part of a Keycloak Realm Reset. Cleans up old User records + * that would otherwise become orphaned when the Keycloak Realm is recreated from scratch, and + * assigns new Keycloak auth IDs to SSO logins. + */ +@Singleton +public class ConfigDbResetHelper { + + private final Database configDb; + + public ConfigDbResetHelper(@Named("configDatabase") final Database configDb) { + this.configDb = configDb; + } + + public void deleteConfigDbUsers() throws SQLException { + // DO NOT REMOVE THIS CRITICAL CHECK. + throwIfMultipleOrganizations(); + + this.configDb.query(ctx -> ctx.deleteFrom(Tables.USER) + .where(Tables.USER.AUTH_PROVIDER.eq(AuthProvider.keycloak)) + .execute()); + } + + /** + * This reset operation would be detrimental if it runs in any sort of multi-organization instance. + * It relies on an assumption that all keycloak-backed users are part of the same + * organization/realm. If the one-and-only realm is reset, we know the users will be orphaned. This + * check is an extra layer of protection in case this code were somehow run in a multi-org + * environment like Airbyte Cloud or any future multi-org setup. + */ + private void throwIfMultipleOrganizations() throws SQLException { + final var orgCount = this.configDb.query(ctx -> ctx.fetchCount(Tables.ORGANIZATION)); + if (orgCount > 1) { + throw new IllegalStateException("Multiple organizations found in ConfigDb. " + + "This is not supported with the KEYCLOAK_RESET_REALM process."); + } + } + +} diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java index 8798ee137f5..93fdddddaa1 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/ConfigurationMapService.java @@ -5,7 +5,7 @@ package io.airbyte.keycloak.setup; import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; -import io.airbyte.commons.auth.config.IdentityProviderConfiguration; +import io.airbyte.commons.auth.config.OidcConfig; import io.micronaut.context.annotation.Value; import jakarta.inject.Singleton; import java.util.HashMap; @@ -19,6 +19,8 @@ @Singleton public class ConfigurationMapService { + public static final String HTTPS_PREFIX = "https://"; + public static final String WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX = ".well-known/openid-configuration"; private final String webappUrl; private final AirbyteKeycloakConfiguration keycloakConfiguration; @@ -29,38 +31,45 @@ public ConfigurationMapService(@Value("${airbyte.webapp-url}") final String weba } public Map importProviderFrom(final RealmResource keycloakRealm, - final IdentityProviderConfiguration provider, + final OidcConfig oidcConfig, String keycloakProviderId) { Map map = new HashMap<>(); map.put("providerId", keycloakProviderId); - map.put("fromUrl", getProviderDiscoveryUrl(provider)); + map.put("fromUrl", getProviderDiscoveryUrl(oidcConfig)); return keycloakRealm.identityProviders().importFrom(map); } - public Map setupProviderConfig(final IdentityProviderConfiguration provider, Map configMap) { + public Map setupProviderConfig(final OidcConfig oidcConfig, Map configMap) { Map config = new HashMap<>(); // Copy all keys from configMap to the result map config.putAll(configMap); // Explicitly set required keys - config.put("clientId", provider.getClientId()); - config.put("clientSecret", provider.getClientSecret()); + config.put("clientId", oidcConfig.clientId()); + config.put("clientSecret", oidcConfig.clientSecret()); config.put("defaultScope", "openid email profile"); - config.put("redirectUris", getProviderRedirectUrl(provider)); + config.put("redirectUris", getProviderRedirectUrl(oidcConfig)); config.put("backchannelSupported", "true"); config.put("backchannel_logout_session_supported", "true"); return config; } - private String getProviderRedirectUrl(final IdentityProviderConfiguration provider) { + private String getProviderRedirectUrl(final OidcConfig oidcConfig) { final String webappUrlWithTrailingSlash = webappUrl.endsWith("/") ? webappUrl : webappUrl + "/"; - return webappUrlWithTrailingSlash + "auth/realms/" + keycloakConfiguration.getAirbyteRealm() + "/broker/" + provider.getAppName() + "/endpoint"; + return webappUrlWithTrailingSlash + "auth/realms/" + keycloakConfiguration.getAirbyteRealm() + "/broker/" + oidcConfig.appName() + "/endpoint"; } - private String getProviderDiscoveryUrl(final IdentityProviderConfiguration provider) { - final String domainWithTrailingSlash = provider.getDomain().endsWith("/") ? provider.getDomain() : provider.getDomain() + "/"; - return "https://" + domainWithTrailingSlash + ".well-known/openid-configuration"; + private String getProviderDiscoveryUrl(final OidcConfig oidcConfig) { + String domain = oidcConfig.domain(); + if (!domain.startsWith(HTTPS_PREFIX)) { + domain = HTTPS_PREFIX + domain; + } + if (!domain.endsWith(WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX)) { + domain = domain.endsWith("/") ? domain : domain + "/"; + domain = domain + WELL_KNOWN_OPENID_CONFIGURATION_SUFFIX; + } + return domain; } } diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/DatabaseBeanFactory.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/DatabaseBeanFactory.java new file mode 100644 index 00000000000..c39add63d7c --- /dev/null +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/DatabaseBeanFactory.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import io.airbyte.data.services.shared.DataSourceUnwrapper; +import io.airbyte.db.Database; +import io.micronaut.context.annotation.Factory; +import jakarta.inject.Named; +import jakarta.inject.Singleton; +import org.jooq.DSLContext; + +@Factory +public class DatabaseBeanFactory { + + @Singleton + @Named("configDatabase") + public Database configDatabase(@Named("config") final DSLContext dslContext) { + return new Database(DataSourceUnwrapper.unwrapContext(dslContext)); + } + +} diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java new file mode 100644 index 00000000000..203f2b29e27 --- /dev/null +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import io.airbyte.commons.auth.config.OidcConfig; +import jakarta.inject.Singleton; +import jakarta.ws.rs.core.Response; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; +import org.keycloak.admin.client.resource.RealmResource; +import org.keycloak.representations.idm.IdentityProviderRepresentation; + +/** + * This class is responsible for configuring an identity provider. It creates and manages various + * identity providers for authentication purposes. + */ +@Singleton +@Slf4j +public class IdentityProvidersConfigurator { + + static final String AIRBYTE_MANAGED_IDP_KEY = "airbyte-managed-idp"; + static final String AIRBYTE_MANAGED_IDP_VALUE = "true"; + private static final String KEYCLOAK_PROVIDER_ID = "oidc"; // OIDC is the only supported provider ID for now + + private final ConfigurationMapService configurationMapService; + private final Optional oidcConfig; + + public IdentityProvidersConfigurator(final ConfigurationMapService configurationMapService, + final Optional oidcConfig) { + this.configurationMapService = configurationMapService; + this.oidcConfig = oidcConfig; + } + + public void configureIdp(final RealmResource keycloakRealm) { + if (oidcConfig.isEmpty()) { + log.info("No identity provider configuration found. Skipping IDP setup."); + return; + } + + final IdentityProviderRepresentation idp = buildIdpFromConfig(keycloakRealm, oidcConfig.get()); + + final List existingIdps = keycloakRealm.identityProviders().findAll(); + // if no IDPs exist, create one and mark it as airbyte-managed + if (existingIdps.isEmpty()) { + log.info("No existing identity providers found. Creating new IDP."); + createNewIdp(keycloakRealm, idp); + return; + } + + // Look for an IDP with the AIRBYTE_MANAGED_IDP_KEY/VALUE in its config. This allows keycloak-setup + // to programmatically + // configure a specific IDP, even if the realm contains multiple. + final List existingManagedIdps = existingIdps.stream() + .filter(existingIdp -> existingIdp.getConfig().getOrDefault(AIRBYTE_MANAGED_IDP_KEY, "false").equals(AIRBYTE_MANAGED_IDP_VALUE)) + .toList(); + + if (existingManagedIdps.size() > 1) { + log.warn( + "Found multiple IDPs with Config entry {}={}. This isn't supported, as keycloak-setup only supports one managed IDP. Skipping IDP update.", + AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE); + return; + } + + if (existingManagedIdps.size() == 1) { + log.info("Found existing managed IDP. Updating it."); + updateExistingIdp(keycloakRealm, existingManagedIdps.getFirst(), idp); + return; + } + + // if no managed IDPs exist, but there is exactly one IDP, update it and mark it as airbyte-managed + if (existingIdps.size() == 1) { + log.info("Found exactly one existing IDP. Updating it and marking it as airbyte-managed."); + updateExistingIdp(keycloakRealm, existingIdps.getFirst(), idp); + return; + } + + // if there are multiple IDPs and none are managed, log a warning and do nothing. + log.warn("Multiple identity providers exist and none are marked as airbyte-managed. Skipping IDP update. If you want your OIDC configuration to " + + "apply to a specific IDP, please add a Config entry with key {} and value {} to that IDP and try again.", + AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE); + } + + private void createNewIdp(final RealmResource keycloakRealm, final IdentityProviderRepresentation idp) { + try (final Response response = keycloakRealm.identityProviders().create(idp)) { + if (response.getStatus() == Response.Status.CREATED.getStatusCode()) { + log.info("Identity Provider {} created successfully!", idp.getAlias()); + } else { + final String errorMessage = String.format("Failed to create Identity Provider.\nReason: %s\nResponse: %s", + response.getStatusInfo().getReasonPhrase(), response.readEntity(String.class)); + log.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } + } + + private void updateExistingIdp(final RealmResource keycloakRealm, + final IdentityProviderRepresentation existingIdp, + final IdentityProviderRepresentation updatedIdp) { + // In order to apply the updated IDP configuration to the existing IDP within Keycloak, we need to + // set the internal ID of the existing IDP. + updatedIdp.setInternalId(existingIdp.getInternalId()); + keycloakRealm.identityProviders().get(existingIdp.getAlias()).update(updatedIdp); + } + + private IdentityProviderRepresentation buildIdpFromConfig(final RealmResource keycloakRealm, final OidcConfig oidcConfig) { + final IdentityProviderRepresentation idp = new IdentityProviderRepresentation(); + idp.setAlias(oidcConfig.appName()); + idp.setProviderId(KEYCLOAK_PROVIDER_ID); + idp.setEnabled(true); + + final Map configMap = configurationMapService.importProviderFrom(keycloakRealm, oidcConfig, idp.getProviderId()); + final Map config = configurationMapService.setupProviderConfig(oidcConfig, configMap); + + // mark the IDP as airbyte-managed so that it can be programmatically updated in the future. + config.put(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE); + idp.setConfig(config); + + return idp; + } + +} diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersCreator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersCreator.java deleted file mode 100644 index 1200fdb5850..00000000000 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersCreator.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.keycloak.setup; - -import io.airbyte.commons.auth.config.IdentityProviderConfiguration; -import jakarta.inject.Singleton; -import jakarta.ws.rs.core.Response; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.keycloak.admin.client.resource.RealmResource; -import org.keycloak.representations.idm.IdentityProviderRepresentation; - -/** - * This class is responsible for creating identity providers. It creates and manages various - * identity providers for authentication purposes. - */ -@Singleton -@Slf4j -public class IdentityProvidersCreator { - - // static map of ProviderType to Keycloak provider id - private static final Map PROVIDER_TYPE_TO_KEYCLOAK_PROVIDER_ID = new HashMap<>(); - - static { - PROVIDER_TYPE_TO_KEYCLOAK_PROVIDER_ID.put(IdentityProviderConfiguration.ProviderType.OKTA, "oidc"); - PROVIDER_TYPE_TO_KEYCLOAK_PROVIDER_ID.put(IdentityProviderConfiguration.ProviderType.OIDC, "oidc"); - } - - private final List identityProviderConfigurations; - private final ConfigurationMapService configurationMapService; - - public IdentityProvidersCreator(final List identityProviderConfigurations, - final ConfigurationMapService configurationMapService) { - this.identityProviderConfigurations = identityProviderConfigurations; - this.configurationMapService = configurationMapService; - } - - public void createIdps(final RealmResource keycloakRealm) { - // Create Identity Providers - if (identityProviderConfigurations == null || identityProviderConfigurations.isEmpty()) { - log.info("No identity providers configured. Skipping IDP setup."); - return; - } - - for (final IdentityProviderConfiguration provider : identityProviderConfigurations) { - try { - createIdp(keycloakRealm, provider); - } catch (final RuntimeException e) { - log.error("Failed to create identity provider for provider: {}", provider.getAppName(), e); - throw e; - } - } - log.info("Identity providers created."); - } - - private void createIdp(final RealmResource keycloakRealm, final IdentityProviderConfiguration provider) { - log.info("Creating identity provider: {}", provider); - - final IdentityProviderRepresentation idp = new IdentityProviderRepresentation(); - idp.setAlias(provider.getAppName()); - idp.setProviderId(PROVIDER_TYPE_TO_KEYCLOAK_PROVIDER_ID.get(provider.getType())); - idp.setEnabled(true); - - final Map configMap = configurationMapService.importProviderFrom(keycloakRealm, provider, idp.getProviderId()); - final Map config = configurationMapService.setupProviderConfig(provider, configMap); - idp.setConfig(config); - - final Response idpResponse = keycloakRealm.identityProviders().create(idp); - - if (idpResponse.getStatus() == Response.Status.CREATED.getStatusCode()) { - log.info("Identity Provider {} created successfully!", provider.getAppName()); - } else { - final String error = String.format("Failed to create Identity Provider. Status: %s", idpResponse.getStatusInfo().getReasonPhrase()); - log.error(error); - throw new RuntimeException(error); - } - } - -} diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java index 336bd6b2b1c..ce30aac7655 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java @@ -5,7 +5,9 @@ package io.airbyte.keycloak.setup; import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; +import io.micronaut.context.annotation.Value; import jakarta.inject.Singleton; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.resource.RealmResource; @@ -19,62 +21,47 @@ @Slf4j public class KeycloakServer { + private static final String FRONTEND_URL_ATTRIBUTE = "frontendUrl"; + private final Keycloak keycloakAdminClient; private final KeycloakAdminClientProvider keycloakAdminClientProvider; private final AirbyteKeycloakConfiguration keycloakConfiguration; - private final UserCreator userCreator; - private final WebClientCreator webClientCreator; - private final IdentityProvidersCreator identityProvidersCreator; - private final AccountClientUpdater accountClientUpdater; - private ClientScopeCreator clientScopeCreator; + private final UserConfigurator userConfigurator; + private final WebClientConfigurator webClientConfigurator; + private final IdentityProvidersConfigurator identityProvidersConfigurator; + private final ClientScopeConfigurator clientScopeConfigurator; + private final String webappUrl; public KeycloakServer(final KeycloakAdminClientProvider keycloakAdminClientProvider, final AirbyteKeycloakConfiguration keycloakConfiguration, - final UserCreator userCreator, - final WebClientCreator webClientCreator, - final IdentityProvidersCreator identityProvidersCreator, - final AccountClientUpdater accountClientUpdater, - final ClientScopeCreator clientScopeCreator) { + final UserConfigurator userConfigurator, + final WebClientConfigurator webClientConfigurator, + final IdentityProvidersConfigurator identityProvidersConfigurator, + final ClientScopeConfigurator clientScopeConfigurator, + @Value("${airbyte.webapp-url}") final String webappUrl) { this.keycloakAdminClientProvider = keycloakAdminClientProvider; this.keycloakConfiguration = keycloakConfiguration; - this.userCreator = userCreator; - this.webClientCreator = webClientCreator; - this.identityProvidersCreator = identityProvidersCreator; - this.accountClientUpdater = accountClientUpdater; - this.clientScopeCreator = clientScopeCreator; + this.userConfigurator = userConfigurator; + this.webClientConfigurator = webClientConfigurator; + this.identityProvidersConfigurator = identityProvidersConfigurator; + this.clientScopeConfigurator = clientScopeConfigurator; this.keycloakAdminClient = initializeKeycloakAdminClient(); + this.webappUrl = webappUrl; } - public void createAirbyteRealm() { - if (doesRealmExist()) { - log.info("Realm {} already exists, nothing to be done.", keycloakConfiguration.getAirbyteRealm()); - return; - } - log.info("Creating realm {}...", keycloakConfiguration.getAirbyteRealm()); - createRealm(); - configureRealm(); - log.info("Realm created successfully."); - } - - public void recreateAirbyteRealm() { - if (!doesRealmExist()) { - log.info("Ignoring reset because realm {} does not exist. Creating it...", keycloakConfiguration.getAirbyteRealm()); - createAirbyteRealm(); - return; + public void setupAirbyteRealm() { + if (airbyteRealmDoesNotExist()) { + log.info("Creating realm {}...", keycloakConfiguration.getAirbyteRealm()); + createRealm(); + log.info("Realm created successfully."); } - - log.info("Recreating realm {}...", keycloakConfiguration.getAirbyteRealm()); - final RealmResource airbyteRealm = keycloakAdminClient.realm(keycloakConfiguration.getAirbyteRealm()); - airbyteRealm.remove(); - log.info("Realm removed successfully. Recreating..."); - createRealm(); configureRealm(); - log.info("Realm recreated successfully."); + log.info("Realm configured successfully."); } - private boolean doesRealmExist() { + private boolean airbyteRealmDoesNotExist() { return keycloakAdminClient.realms().findAll().stream() - .anyMatch(realmRepresentation -> realmRepresentation.getRealm().equals(keycloakConfiguration.getAirbyteRealm())); + .noneMatch(realmRepresentation -> realmRepresentation.getRealm().equals(keycloakConfiguration.getAirbyteRealm())); } private void createRealm() { @@ -83,6 +70,18 @@ private void createRealm() { keycloakAdminClient.realms().create(airbyteRealmRepresentation); } + private void configureRealm() { + final RealmResource airbyteRealm = keycloakAdminClient.realm(keycloakConfiguration.getAirbyteRealm()); + + // ensure webapp-url is applied as the frontendUrl before other configurations are updated + updateRealmFrontendUrl(airbyteRealm, webappUrl); + + userConfigurator.configureUser(airbyteRealm); + webClientConfigurator.configureWebClient(airbyteRealm); + identityProvidersConfigurator.configureIdp(airbyteRealm); + clientScopeConfigurator.configureClientScope(airbyteRealm); + } + private RealmRepresentation buildRealmRepresentation() { final RealmRepresentation airbyteRealmRepresentation = new RealmRepresentation(); airbyteRealmRepresentation.setRealm(keycloakConfiguration.getAirbyteRealm()); @@ -91,14 +90,12 @@ private RealmRepresentation buildRealmRepresentation() { return airbyteRealmRepresentation; } - private void configureRealm() { - final RealmResource airbyteRealm = keycloakAdminClient.realm(keycloakConfiguration.getAirbyteRealm()); - - userCreator.createUser(airbyteRealm); - webClientCreator.createWebClient(airbyteRealm); - identityProvidersCreator.createIdps(airbyteRealm); - accountClientUpdater.updateAccountClientHomeUrl(airbyteRealm); - clientScopeCreator.createClientScope(airbyteRealm); + private void updateRealmFrontendUrl(final RealmResource realm, final String webappUrl) { + final RealmRepresentation realmRep = realm.toRepresentation(); + final Map attributes = realmRep.getAttributesOrEmpty(); + attributes.put(FRONTEND_URL_ATTRIBUTE, webappUrl + keycloakConfiguration.getBasePath()); + realmRep.setAttributes(attributes); + realm.update(realmRep); } private Keycloak initializeKeycloakAdminClient() { @@ -117,4 +114,25 @@ public final String getKeycloakServerUrl() { return keycloakConfiguration.getProtocol() + "://" + keycloakConfiguration.getHost() + basePathWithLeadingSlash; } + // Should no longer be needed now that the realm is always updated on each run. + // Leaving it in for now in case any issues pop up and users need a way to reset their realm + // from scratch. We should remove this once we're confident that users no longer ever need to + // do this hard reset. + @Deprecated + public void destroyAndRecreateAirbyteRealm() { + if (airbyteRealmDoesNotExist()) { + log.info("Ignoring reset because realm {} does not exist. Creating it...", keycloakConfiguration.getAirbyteRealm()); + setupAirbyteRealm(); + return; + } + log.info("Recreating realm {}...", keycloakConfiguration.getAirbyteRealm()); + final RealmResource airbyteRealm = keycloakAdminClient.realm(keycloakConfiguration.getAirbyteRealm()); + airbyteRealm.remove(); + log.info("Realm removed successfully. Recreating..."); + createRealm(); + log.info("Realm recreated successfully. Configuring..."); + configureRealm(); + log.info("Realm configured successfully."); + } + } diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java index aa3df086a2b..c7cbc0cb57c 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java @@ -9,6 +9,7 @@ import io.micronaut.http.HttpResponse; import io.micronaut.http.client.HttpClient; import jakarta.inject.Singleton; +import java.sql.SQLException; import lombok.extern.slf4j.Slf4j; /** @@ -23,14 +24,17 @@ public class KeycloakSetup { private final HttpClient httpClient; private final KeycloakServer keycloakServer; private final AirbyteKeycloakConfiguration keycloakConfiguration; + private final ConfigDbResetHelper configDbResetHelper; public KeycloakSetup( final HttpClient httpClient, final KeycloakServer keycloakServer, - final AirbyteKeycloakConfiguration keycloakConfiguration) { + final AirbyteKeycloakConfiguration keycloakConfiguration, + final ConfigDbResetHelper configDbResetHelper) { this.httpClient = httpClient; this.keycloakServer = keycloakServer; this.keycloakConfiguration = keycloakConfiguration; + this.configDbResetHelper = configDbResetHelper; } public void run() { @@ -43,9 +47,18 @@ public void run() { log.info("Starting admin Keycloak client with url: {}", keycloakUrl); if (keycloakConfiguration.getResetRealm()) { - keycloakServer.recreateAirbyteRealm(); + keycloakServer.destroyAndRecreateAirbyteRealm(); + log.info("Successfully destroyed and recreated Airbyte Realm. Now deleting Airbyte User/Permission records..."); + try { + configDbResetHelper.deleteConfigDbUsers(); + } catch (SQLException e) { + log.error("Encountered an error while cleaning up Airbyte User/Permission records. " + + "You likely need to re-run this KEYCLOAK_RESET_REALM operation.", e); + throw new RuntimeException(e); + } + log.info("Successfully cleaned existing Airbyte User/Permission records. Reset finished successfully."); } else { - keycloakServer.createAirbyteRealm(); + keycloakServer.setupAirbyteRealm(); } } finally { keycloakServer.closeKeycloakAdminClient(); diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserCreator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java similarity index 53% rename from airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserCreator.java rename to airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java index 5c57d453bc9..4340a7ca907 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserCreator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java @@ -8,6 +8,7 @@ import jakarta.inject.Singleton; import jakarta.ws.rs.core.Response; import java.util.Arrays; +import java.util.Optional; import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.CredentialRepresentation; @@ -18,36 +19,43 @@ */ @Singleton @Slf4j -public class UserCreator { +public class UserConfigurator { public static final int HTTP_STATUS_CREATED = 201; private final InitialUserConfiguration initialUserConfiguration; - public UserCreator(final InitialUserConfiguration initialUserConfiguration) { + public UserConfigurator(final InitialUserConfiguration initialUserConfiguration) { this.initialUserConfiguration = initialUserConfiguration; } - public void createUser(final RealmResource keycloakRealm) { - final boolean userAlreadyExists = !keycloakRealm.users().search(initialUserConfiguration.getUsername()).isEmpty(); - if (userAlreadyExists) { - log.info("User {} already exists, nothing to be done.", initialUserConfiguration.getUsername()); - return; - } + public void configureUser(final RealmResource keycloakRealm) { + final UserRepresentation userConfig = getUserRepresentationFromConfig(); - final UserRepresentation user = createUserRepresentation(); - final Response response = keycloakRealm.users().create(user); + final Optional existingUser = keycloakRealm.users().searchByEmail(userConfig.getEmail(), true) + .stream() + .findFirst(); - if (response.getStatus() == HTTP_STATUS_CREATED) { - log.info("User {} created successfully.", user.getFirstName()); + if (existingUser.isPresent()) { + userConfig.setId(existingUser.get().getId()); + keycloakRealm.users().get(existingUser.get().getId()).update(userConfig); } else { - log.info("Failed to create user. Status: " + response.getStatusInfo().getReasonPhrase()); + try (final Response response = keycloakRealm.users().create(userConfig)) { + if (response.getStatus() == HTTP_STATUS_CREATED) { + log.info(userConfig.getUsername() + " user created successfully. Status: " + response.getStatusInfo()); + } else { + final String errorMessage = String.format("Failed to create %s user.\nReason: %s\nResponse: %s", userConfig.getUsername(), + response.getStatusInfo().getReasonPhrase(), response.readEntity(String.class)); + log.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } } } - UserRepresentation createUserRepresentation() { + UserRepresentation getUserRepresentationFromConfig() { final UserRepresentation user = new UserRepresentation(); - user.setUsername(initialUserConfiguration.getUsername()); + user.setUsername(initialUserConfiguration.getEmail()); user.setEnabled(true); user.setEmail(initialUserConfiguration.getEmail()); user.setFirstName(initialUserConfiguration.getFirstName()); diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientCreator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java similarity index 52% rename from airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientCreator.java rename to airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java index 2a9e9da1a86..e3e4e986d32 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientCreator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java @@ -11,6 +11,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.ClientRepresentation; @@ -21,7 +22,7 @@ */ @Singleton @Slf4j -public class WebClientCreator { +public class WebClientConfigurator { public static final int HTTP_STATUS_CREATED = 201; private static final String LOCAL_OSS_DEV_URI = "https://localhost:3000/*"; @@ -30,44 +31,59 @@ public class WebClientCreator { private final AirbyteKeycloakConfiguration keycloakConfiguration; private final String webappUrl; - public WebClientCreator(@Value("${airbyte.webapp-url}") final String webappUrl, - final AirbyteKeycloakConfiguration keycloakConfiguration) { + public WebClientConfigurator(@Value("${airbyte.webapp-url}") final String webappUrl, + final AirbyteKeycloakConfiguration keycloakConfiguration) { this.webappUrl = webappUrl; this.keycloakConfiguration = keycloakConfiguration; } - public void createWebClient(final RealmResource keycloakRealm) { - final ClientRepresentation client = createClientRepresentation(); - final Response clientResponse = keycloakRealm.clients().create(client); - handleClientCreationResponse(clientResponse, client.getClientId()); + public void configureWebClient(final RealmResource keycloakRealm) { + final ClientRepresentation clientConfig = getClientRepresentationFromConfig(); + + final Optional existingClient = keycloakRealm.clients().findByClientId(clientConfig.getClientId()) + .stream() + .findFirst(); + + if (existingClient.isPresent()) { + keycloakRealm.clients().get(existingClient.get().getId()).update(applyConfigToExistingClientRepresentation(existingClient.get())); + log.info(clientConfig.getClientId() + " client updated successfully."); + } else { + try (final Response response = keycloakRealm.clients().create(clientConfig)) { + if (response.getStatus() == HTTP_STATUS_CREATED) { + log.info(clientConfig.getClientId() + " client created successfully. Status: " + response.getStatusInfo()); + } else { + final String errorMessage = String.format("Failed to create %s client.\nReason: %s\nResponse: %s", clientConfig.getClientId(), + response.getStatusInfo().getReasonPhrase(), response.readEntity(String.class)); + log.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } + } } - ClientRepresentation createClientRepresentation() { + ClientRepresentation getClientRepresentationFromConfig() { final ClientRepresentation client = new ClientRepresentation(); client.setClientId(keycloakConfiguration.getWebClientId()); client.setPublicClient(true); // Client authentication disabled client.setDirectAccessGrantsEnabled(true); // Standard flow authentication client.setRedirectUris(getWebClientRedirectUris(webappUrl)); - client.setBaseUrl(webappUrl); client.setAttributes(getClientAttributes()); return client; } + private ClientRepresentation applyConfigToExistingClientRepresentation(final ClientRepresentation clientRepresentation) { + // only change the attributes that come from external configuration + clientRepresentation.setRedirectUris(getWebClientRedirectUris(webappUrl)); + return clientRepresentation; + } + private Map getClientAttributes() { final Map attributeMap = new HashMap<>(); attributeMap.put("access.token.lifespan", "180"); return attributeMap; } - private void handleClientCreationResponse(final Response clientResponse, final String webClientId) { - if (clientResponse.getStatus() == HTTP_STATUS_CREATED) { - log.info(webClientId + " client created successfully. Status: " + clientResponse.getStatusInfo()); - } else { - log.info("Failed to create " + webClientId + " client. Status: " + clientResponse.getStatusInfo().getReasonPhrase()); - } - } - private List getWebClientRedirectUris(final String webappUrl) { final String normalizedWebappUrl = webappUrl.endsWith("/") ? webappUrl : webappUrl + "/"; return List.of(normalizedWebappUrl + "*", LOCAL_OSS_DEV_URI, LOCAL_CLOUD_DEV_URI); diff --git a/airbyte-keycloak-setup/src/main/resources/application.yml b/airbyte-keycloak-setup/src/main/resources/application.yml index 36588e8c90c..33fb103de24 100644 --- a/airbyte-keycloak-setup/src/main/resources/application.yml +++ b/airbyte-keycloak-setup/src/main/resources/application.yml @@ -14,7 +14,19 @@ airbyte: client-id: ${KEYCLOAK_CLIENT_ID:admin-cli} redirect-uri: ${KEYCLOAK_REDIRECT_URI:`http://localhost:8000/*`} web-client-id: ${KEYCLOAK_WEB_CLIENT_ID:airbyte-webapp} - account-client-id: ${KEYCLOAK_ACCOUNT_CLIENT_ID:account} username: ${KEYCLOAK_ADMIN_USER:} password: ${KEYCLOAK_ADMIN_PASSWORD:} reset-realm: ${KEYCLOAK_RESET_REALM:false} + +datasources: + config: + connection-test-query: SELECT 1 + connection-timeout: 30000 + maximum-pool-size: 10 + minimum-idle: 0 + idle-timeout: 600000 + initialization-fail-timeout: -1 # Disable fail fast checking to avoid issues due to other pods not being started in time + url: ${DATABASE_URL} + driverClassName: org.postgresql.Driver + username: ${DATABASE_USER} + password: ${DATABASE_PASSWORD} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/AccountClientUpdaterTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/AccountClientUpdaterTest.java deleted file mode 100644 index 70f227179a4..00000000000 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/AccountClientUpdaterTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.keycloak.setup; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; -import java.util.Arrays; -import java.util.List; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.keycloak.admin.client.resource.ClientResource; -import org.keycloak.admin.client.resource.ClientsResource; -import org.keycloak.admin.client.resource.RealmResource; -import org.keycloak.representations.idm.ClientRepresentation; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AccountClientUpdaterTest { - - private static final String WEBAPP_URL = "http://localhost:8000"; - private static final String ACCOUNT_CLIENT_ID = "account"; - - @Mock - private RealmResource realmResource; - @Mock - private ClientsResource clientsResource; - @Mock - private ClientResource clientResource; - @Mock - private AirbyteKeycloakConfiguration keycloakConfiguration; - @InjectMocks - private AccountClientUpdater accountClientUpdater; - - @BeforeEach - void setUp() { - when(keycloakConfiguration.getAccountClientId()).thenReturn(ACCOUNT_CLIENT_ID); - accountClientUpdater = new AccountClientUpdater(WEBAPP_URL, keycloakConfiguration); - } - - @Test - void testUpdateAccountClientHomeUrl() { - ClientRepresentation accountClient = new ClientRepresentation(); - accountClient.setClientId(ACCOUNT_CLIENT_ID); - - ClientRepresentation anotherClient = new ClientRepresentation(); - anotherClient.setClientId("another-account"); - - List clients = Arrays.asList(accountClient, anotherClient); - - when(realmResource.clients()).thenReturn(clientsResource); - when(clientsResource.findAll()).thenReturn(clients); - when(clientsResource.get(accountClient.getId())).thenReturn(clientResource); - - doNothing().when(clientResource).update(accountClient); - - assertDoesNotThrow(() -> accountClientUpdater.updateAccountClientHomeUrl(realmResource)); - - verify(realmResource, times(2)).clients(); - verify(clientsResource).findAll(); - verify(clientsResource).get(accountClient.getId()); - verify(clientResource).update(accountClient); - } - - @Test - void testUpdateAccountClientHomeUrl_ClientNotFound() { - ClientRepresentation clientRepresentation = new ClientRepresentation(); - clientRepresentation.setClientId("differentClientId"); - - when(realmResource.clients()).thenReturn(clientsResource); - when(clientsResource.findAll()).thenReturn(List.of(clientRepresentation)); - - assertThrows(RuntimeException.class, () -> accountClientUpdater.updateAccountClientHomeUrl(realmResource)); - } - -} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigDbResetHelperTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigDbResetHelperTest.java new file mode 100644 index 00000000000..0e83fce350c --- /dev/null +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigDbResetHelperTest.java @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import io.airbyte.db.instance.configs.jooq.generated.Tables; +import io.airbyte.db.instance.configs.jooq.generated.enums.AuthProvider; +import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType; +import io.airbyte.test.utils.BaseConfigDatabaseTest; +import java.sql.SQLException; +import java.util.UUID; +import org.jooq.impl.TableImpl; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class ConfigDbResetHelperTest extends BaseConfigDatabaseTest { + + private static final UUID KEYCLOAK_USER_1_ID = UUID.randomUUID(); + private static final UUID KEYCLOAK_USER_2_ID = UUID.randomUUID(); + private static final UUID NON_KEYCLOAK_USER_ID = UUID.randomUUID(); + private static final UUID ORGANIZATION_ID = UUID.randomUUID(); + + private ConfigDbResetHelper configDbResetHelper; + + @BeforeEach + void setUp() throws Exception { + configDbResetHelper = new ConfigDbResetHelper(database); + truncateAllTables(); + + // Pre-populate the database with test data + database.transaction(ctx -> { + ctx.insertInto(Tables.ORGANIZATION, Tables.ORGANIZATION.ID, Tables.ORGANIZATION.NAME, Tables.ORGANIZATION.EMAIL) + .values(ORGANIZATION_ID, "Org", "org@airbyte.io") + .execute(); + + // Insert sample users, some with AuthProvider as keycloak and one with a different AuthProvider + ctx.insertInto(Tables.USER, Tables.USER.ID, Tables.USER.AUTH_PROVIDER, Tables.USER.AUTH_USER_ID, Tables.USER.EMAIL, Tables.USER.NAME) + .values(KEYCLOAK_USER_1_ID, AuthProvider.keycloak, UUID.randomUUID().toString(), "one@airbyte.io", "User One") + .values(KEYCLOAK_USER_2_ID, AuthProvider.keycloak, UUID.randomUUID().toString(), "two@airbyte.io", "User Two") + .values(NON_KEYCLOAK_USER_ID, AuthProvider.airbyte, UUID.randomUUID().toString(), "three@airbyte.io", "User Three") + .execute(); + + // Insert permissions for these users + ctx.insertInto(Tables.PERMISSION, Tables.PERMISSION.ID, Tables.PERMISSION.USER_ID, Tables.PERMISSION.ORGANIZATION_ID, + Tables.PERMISSION.PERMISSION_TYPE) + .values(UUID.randomUUID(), KEYCLOAK_USER_1_ID, ORGANIZATION_ID, PermissionType.organization_admin) + .values(UUID.randomUUID(), KEYCLOAK_USER_2_ID, ORGANIZATION_ID, PermissionType.organization_member) + .values(UUID.randomUUID(), NON_KEYCLOAK_USER_ID, null, PermissionType.instance_admin) + .execute(); + + return null; + }); + } + + @Test + void throwsIfMultipleOrgsDetected() throws Exception { + // Insert a second organization + database.query(ctx -> { + ctx.insertInto(Tables.ORGANIZATION, Tables.ORGANIZATION.ID, Tables.ORGANIZATION.NAME, Tables.ORGANIZATION.EMAIL) + .values(UUID.randomUUID(), "Org 2", "org2@airbyte.io") + .execute(); + return null; + }); + + // Expect an exception to be thrown when the helper is invoked + assertThrows(IllegalStateException.class, () -> configDbResetHelper.deleteConfigDbUsers()); + + // Expect no records to be deleted + assertEquals(3, countRowsInTable(Tables.USER)); + assertEquals(3, countRowsInTable(Tables.PERMISSION)); + } + + @Test + void deleteConfigDbUsers_KeycloakUsersExist_UsersAndPermissionsDeleted() throws SQLException { + // Before deletion, assert the initial state of the database + assertEquals(3, countRowsInTable(Tables.USER)); + assertEquals(3, countRowsInTable(Tables.PERMISSION)); + + // Perform the deletion operation + configDbResetHelper.deleteConfigDbUsers(); + + // Assert the state of the database after deletion + // Expecting users with AuthProvider keycloak and their permissions to be deleted + assertEquals(1, countRowsInTable(Tables.USER)); + assertEquals(1, countRowsInTable(Tables.PERMISSION)); + + // Assert that the remaining user is the one not backed by keycloak + final var remainingUserAuthProvider = database.query(ctx -> ctx.select(Tables.USER.AUTH_PROVIDER) + .from(Tables.USER) + .fetchOne(Tables.USER.AUTH_PROVIDER)); + assertEquals(AuthProvider.airbyte, remainingUserAuthProvider); + + // Assert that the remaining permission is the one not associated with a keycloak user + final var remainingPermissionType = database.query(ctx -> ctx.select(Tables.PERMISSION.PERMISSION_TYPE) + .from(Tables.PERMISSION) + .fetchOne(Tables.PERMISSION.PERMISSION_TYPE)); + assertEquals(PermissionType.instance_admin, remainingPermissionType); + } + + private int countRowsInTable(final TableImpl table) throws SQLException { + return database.query(ctx -> ctx.selectCount().from(table).fetchOne(0, int.class)); + } + +} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java index f34348e0fe7..7aeb2393e52 100644 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/ConfigurationMapServiceTest.java @@ -8,12 +8,14 @@ import static org.mockito.Mockito.when; import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; -import io.airbyte.commons.auth.config.IdentityProviderConfiguration; +import io.airbyte.commons.auth.config.OidcConfig; import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.keycloak.admin.client.resource.IdentityProvidersResource; import org.keycloak.admin.client.resource.RealmResource; import org.mockito.InjectMocks; @@ -29,7 +31,7 @@ class ConfigurationMapServiceTest { @Mock private IdentityProvidersResource identityProvidersResource; @Mock - private IdentityProviderConfiguration identityProviderConfiguration; + private OidcConfig oidcConfig; @Mock private AirbyteKeycloakConfiguration keycloakConfiguration; @InjectMocks @@ -40,9 +42,17 @@ public void setUp() { configurationMapService = new ConfigurationMapService(WEBAPP_URL, keycloakConfiguration); } - @Test - void testImportProviderFrom() { - when(identityProviderConfiguration.getDomain()).thenReturn("trial-577.okta.com"); + @ParameterizedTest + @ValueSource(strings = { + "trial-577.okta.com", + "https://trial-577.okta.com", + "trial-577.okta.com/.well-known/openid-configuration", + "https://trial-577.okta.com/.well-known/openid-configuration", + "trial-577.okta.com/", + "https://trial-577.okta.com/", + }) + void testImportProviderFrom(String url) { + when(oidcConfig.domain()).thenReturn(url); when(realmResource.identityProviders()).thenReturn(identityProvidersResource); Map importFromMap = new HashMap<>(); @@ -62,7 +72,7 @@ void testImportProviderFrom() { when(identityProvidersResource.importFrom(importFromMap)).thenReturn(expected); Map actual = - configurationMapService.importProviderFrom(realmResource, identityProviderConfiguration, "oidc"); + configurationMapService.importProviderFrom(realmResource, oidcConfig, "oidc"); assertEquals(expected, actual); } @@ -77,10 +87,10 @@ void testSetupProviderConfig() { "issuer", "https://trial-577.okta.com/oauth2/default", "jwksUrl", "https://trial-577.okta.com/oauth2/default/v1/keys"); - when(identityProviderConfiguration.getClientId()).thenReturn("clientId"); - when(identityProviderConfiguration.getClientSecret()).thenReturn("clientSecret"); + when(oidcConfig.clientId()).thenReturn("clientId"); + when(oidcConfig.clientSecret()).thenReturn("clientSecret"); - Map result = configurationMapService.setupProviderConfig(identityProviderConfiguration, configMap); + Map result = configurationMapService.setupProviderConfig(oidcConfig, configMap); assertEquals("clientId", result.get("clientId")); assertEquals("clientSecret", result.get("clientSecret")); diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersConfiguratorTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersConfiguratorTest.java new file mode 100644 index 00000000000..efa50dc4a2b --- /dev/null +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersConfiguratorTest.java @@ -0,0 +1,205 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import static io.airbyte.keycloak.setup.IdentityProvidersConfigurator.AIRBYTE_MANAGED_IDP_KEY; +import static io.airbyte.keycloak.setup.IdentityProvidersConfigurator.AIRBYTE_MANAGED_IDP_VALUE; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.auth.config.OidcConfig; +import jakarta.ws.rs.core.Response; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.keycloak.admin.client.resource.IdentityProviderResource; +import org.keycloak.admin.client.resource.IdentityProvidersResource; +import org.keycloak.admin.client.resource.RealmResource; +import org.keycloak.representations.idm.IdentityProviderRepresentation; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class IdentityProvidersConfiguratorTest { + + @Mock + private RealmResource realmResource; + @Mock + private ConfigurationMapService configurationMapService; + @Mock + private OidcConfig oidcConfig; + @Mock + private IdentityProvidersResource identityProvidersResource; + @Mock + private IdentityProviderResource identityProviderResource; + @Mock + private IdentityProviderRepresentation identityProviderRepresentation; + @InjectMocks + private IdentityProvidersConfigurator identityProvidersConfigurator; + + @BeforeEach + void setUp() { + identityProvidersConfigurator = new IdentityProvidersConfigurator(configurationMapService, Optional.of(oidcConfig)); + } + + @Nested + class ConfigureIdp { + + @Test + void testNoOidcConfig() { + identityProvidersConfigurator = new IdentityProvidersConfigurator(configurationMapService, Optional.empty()); + + identityProvidersConfigurator.configureIdp(realmResource); + + verifyNoInteractions(realmResource); + } + + @Test + void testNoExistingIdp() { + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(Collections.emptyList()); + + final Response response = mock(Response.class); + when(response.getStatus()).thenReturn(201); + when(identityProvidersResource.create(any(IdentityProviderRepresentation.class))).thenReturn(response); + + final Map importedMap = mock(HashMap.class); + final Map configMap = mock(HashMap.class); + when(configurationMapService.importProviderFrom(realmResource, oidcConfig, "oidc")) + .thenReturn(importedMap); + when(configurationMapService.setupProviderConfig(oidcConfig, importedMap)) + .thenReturn(configMap); + + identityProvidersConfigurator.configureIdp(realmResource); + + // verify the idp is created with the correct config + verify(identityProvidersResource, times(1)).create(argThat(idp -> idp.getConfig().equals(configMap))); + // verify that the idp is marked as managed by Airbyte + verify(configMap, times(1)).put(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE); + } + + @Test + void testOneExistingIdpNotMarked() { + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(List.of(identityProviderRepresentation)); + when(identityProviderRepresentation.getInternalId()).thenReturn("some-internal-id"); + when(identityProviderRepresentation.getAlias()).thenReturn("some-alias"); + when(identityProvidersResource.get("some-alias")).thenReturn(identityProviderResource); + + final Map importedMap = mock(HashMap.class); + final Map configMap = mock(HashMap.class); + when(configurationMapService.importProviderFrom(realmResource, oidcConfig, "oidc")) + .thenReturn(importedMap); + when(configurationMapService.setupProviderConfig(oidcConfig, importedMap)) + .thenReturn(configMap); + + identityProvidersConfigurator.configureIdp(realmResource); + + // verify the existing idp (based on internal id) is updated with new config + verify(identityProviderResource, times(1)).update( + argThat(idp -> idp.getConfig().equals(configMap) && idp.getInternalId().equals("some-internal-id"))); + // verify that the idp is marked as managed by Airbyte + verify(configMap, times(1)).put(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE); + } + + @Test + void testMultipleExistingIdpOnlyOneMarked() { + final IdentityProviderRepresentation unmarkedIdp = mock(IdentityProviderRepresentation.class); + when(unmarkedIdp.getConfig()).thenReturn(Map.of()); // does not contain marked key + when(identityProviderRepresentation.getConfig()).thenReturn(Map.of(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE)); + + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(List.of(unmarkedIdp, identityProviderRepresentation)); + when(identityProviderRepresentation.getInternalId()).thenReturn("some-internal-id"); + when(identityProviderRepresentation.getAlias()).thenReturn("some-alias"); + when(identityProvidersResource.get("some-alias")).thenReturn(identityProviderResource); + + final Map importedMap = mock(HashMap.class); + final Map configMap = mock(HashMap.class); + when(configurationMapService.importProviderFrom(realmResource, oidcConfig, "oidc")) + .thenReturn(importedMap); + when(configurationMapService.setupProviderConfig(oidcConfig, importedMap)) + .thenReturn(configMap); + + identityProvidersConfigurator.configureIdp(realmResource); + + // verify the marked idp is updated with new config + verify(identityProviderResource, times(1)).update( + argThat(idp -> idp.getConfig().equals(configMap) && idp.getInternalId().equals("some-internal-id"))); + // verify the unmarkedIdp was examined, but not touched + verify(unmarkedIdp, times(1)).getConfig(); + verifyNoMoreInteractions(unmarkedIdp); + } + + @Test + void testMultipleExistingIdpsMultipleMarked() { + final IdentityProviderRepresentation otherMarkedIdp = mock(IdentityProviderRepresentation.class); + when(otherMarkedIdp.getConfig()).thenReturn(Map.of(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE)); + when(identityProviderRepresentation.getConfig()).thenReturn(Map.of(AIRBYTE_MANAGED_IDP_KEY, AIRBYTE_MANAGED_IDP_VALUE)); + + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(List.of(otherMarkedIdp, identityProviderRepresentation)); + + identityProvidersConfigurator.configureIdp(realmResource); + + // verify the no creates or updates took place, because multiple idps were marked and could not be + // distinguished + verify(identityProviderResource, never()).update(any()); + verify(identityProviderResource, never()).update(any()); + } + + @Test + void testMultipleExistingIdpsNoneMarked() { + final IdentityProviderRepresentation otherUnmarkedIdp = mock(IdentityProviderRepresentation.class); + when(otherUnmarkedIdp.getConfig()).thenReturn(Map.of()); + when(identityProviderRepresentation.getConfig()).thenReturn(Map.of()); + + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(List.of(otherUnmarkedIdp, identityProviderRepresentation)); + + identityProvidersConfigurator.configureIdp(realmResource); + + // verify the no creates or updates took place, because multiple idps could not be distinguished and + // none were marked + verify(identityProviderResource, never()).update(any()); + verify(identityProviderResource, never()).update(any()); + } + + @Test + void testCreateFailureThrows() { + when(realmResource.identityProviders()).thenReturn(identityProvidersResource); + when(identityProvidersResource.findAll()).thenReturn(Collections.emptyList()); + when(identityProvidersResource.create(any(IdentityProviderRepresentation.class))) + .thenReturn(Response.status(Response.Status.BAD_REQUEST).build()); + + final Map configMap = new HashMap<>(); + when(configurationMapService.importProviderFrom(realmResource, oidcConfig, "oidc")) + .thenReturn(configMap); + when(configurationMapService.setupProviderConfig(oidcConfig, configMap)) + .thenReturn(configMap); + + assertThrows(RuntimeException.class, () -> { + identityProvidersConfigurator.configureIdp(realmResource); + }); + } + + } + +} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersCreatorTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersCreatorTest.java deleted file mode 100644 index c411e840ceb..00000000000 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/IdentityProvidersCreatorTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.keycloak.setup; - -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.auth.config.IdentityProviderConfiguration; -import jakarta.ws.rs.core.Response; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.keycloak.admin.client.resource.IdentityProvidersResource; -import org.keycloak.admin.client.resource.RealmResource; -import org.keycloak.representations.idm.IdentityProviderRepresentation; -import org.mockito.InjectMocks; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class IdentityProvidersCreatorTest { - - @Mock - private RealmResource realmResource; - @Mock - private ConfigurationMapService configurationMapService; - @Mock - private IdentityProviderConfiguration identityProviderConfiguration; - @Mock - private IdentityProvidersResource identityProvidersResource; - @InjectMocks - private IdentityProvidersCreator identityProvidersCreator; - - @BeforeEach - void setUp() { - identityProvidersCreator = new IdentityProvidersCreator(Collections.singletonList(identityProviderConfiguration), - configurationMapService); - } - - @Test - void testCreateIdps() { - when(realmResource.identityProviders()).thenReturn(identityProvidersResource); - - final Response response = mock(Response.class); - when(response.getStatus()).thenReturn(201); - when(identityProvidersResource.create(any(IdentityProviderRepresentation.class))).thenReturn(response); - - identityProvidersCreator.createIdps(realmResource); - - verify(identityProvidersResource).create(any(IdentityProviderRepresentation.class)); - } - - @Test - void testCreateIdps_Success() { - when(realmResource.identityProviders()).thenReturn(identityProvidersResource); - when(identityProvidersResource.create(any(IdentityProviderRepresentation.class))) - .thenReturn(Response.status(Response.Status.CREATED).build()); - when(identityProviderConfiguration.getType()).thenReturn(IdentityProviderConfiguration.ProviderType.OIDC); - - final Map configMap = new HashMap<>(); - when(configurationMapService.importProviderFrom(realmResource, identityProviderConfiguration, "oidc")) - .thenReturn(configMap); - when(configurationMapService.setupProviderConfig(identityProviderConfiguration, configMap)) - .thenReturn(configMap); - - identityProvidersCreator.createIdps(realmResource); - - verify(realmResource, times(1)).identityProviders(); - verify(identityProvidersResource, times(1)).create(any(IdentityProviderRepresentation.class)); - } - - @Test - void testCreateIdps_Failure() { - when(realmResource.identityProviders()).thenReturn(identityProvidersResource); - when(identityProvidersResource.create(any(IdentityProviderRepresentation.class))) - .thenReturn(Response.status(Response.Status.BAD_REQUEST).build()); - when(identityProviderConfiguration.getType()).thenReturn(IdentityProviderConfiguration.ProviderType.OIDC); - - final Map configMap = new HashMap<>(); - when(configurationMapService.importProviderFrom(realmResource, identityProviderConfiguration, "oidc")) - .thenReturn(configMap); - when(configurationMapService.setupProviderConfig(identityProviderConfiguration, configMap)) - .thenReturn(configMap); - - assertThrows(RuntimeException.class, () -> { - identityProvidersCreator.createIdps(realmResource); - }); - } - -} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakServerTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakServerTest.java index 7d7f814ea33..4ad158fe1fe 100644 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakServerTest.java +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakServerTest.java @@ -6,6 +6,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.times; @@ -27,27 +28,31 @@ class KeycloakServerTest { private static final String REALM_NAME = "airbyte"; + private static final String WEBAPP_URL = "http://localhost:8000"; + private static final String AUTH_PATH = "/auth"; + private static final String FRONTEND_URL_ATTRIBUTE = "frontendUrl"; @Mock private KeycloakAdminClientProvider keycloakAdminClientProvider; @Mock private AirbyteKeycloakConfiguration keycloakConfiguration; @Mock - private UserCreator userCreator; + private UserConfigurator userConfigurator; @Mock - private WebClientCreator webClientCreator; + private WebClientConfigurator webClientConfigurator; @Mock - private IdentityProvidersCreator identityProvidersCreator; + private IdentityProvidersConfigurator identityProvidersConfigurator; @Mock - private AccountClientUpdater accountClientUpdater; - @Mock - private ClientScopeCreator clientScopeCreator; + private ClientScopeConfigurator clientScopeConfigurator; @Mock private Keycloak keycloakAdminClient; @Mock private RealmsResource realmsResource; @Mock private RealmResource airbyteRealm; + @Mock + private RealmRepresentation airbyteRealmRep; + private KeycloakServer keycloakServer; @BeforeEach @@ -62,26 +67,28 @@ void setUp() { when(keycloakAdminClient.realms()).thenReturn(realmsResource); when(realmsResource.findAll()).thenReturn(Collections.emptyList()); when(keycloakAdminClient.realm(anyString())).thenReturn(airbyteRealm); + when(airbyteRealm.toRepresentation()).thenReturn(airbyteRealmRep); keycloakServer = new KeycloakServer(keycloakAdminClientProvider, keycloakConfiguration, - userCreator, - webClientCreator, - identityProvidersCreator, - accountClientUpdater, - clientScopeCreator); + userConfigurator, + webClientConfigurator, + identityProvidersConfigurator, + clientScopeConfigurator, + WEBAPP_URL); } @Test - void testCreateAirbyteRealm() { - keycloakServer.createAirbyteRealm(); + void testSetupAirbyteRealmWhenRealmDoesNotExist() { + keycloakServer.setupAirbyteRealm(); verify(realmsResource, times(1)).findAll(); verify(realmsResource, times(1)).create(any()); - verify(userCreator, times(1)).createUser(airbyteRealm); - verify(webClientCreator, times(1)).createWebClient(airbyteRealm); - verify(identityProvidersCreator, times(1)).createIdps(airbyteRealm); - verify(accountClientUpdater, times(1)).updateAccountClientHomeUrl(airbyteRealm); + verify(userConfigurator, times(1)).configureUser(airbyteRealm); + verify(webClientConfigurator, times(1)).configureWebClient(airbyteRealm); + verify(identityProvidersConfigurator, times(1)).configureIdp(airbyteRealm); + verify(airbyteRealmRep, times(1)).setAttributes(argThat(map -> map.get(FRONTEND_URL_ATTRIBUTE).equals(WEBAPP_URL + AUTH_PATH))); + verify(airbyteRealm, times(1)).update(airbyteRealmRep); } @Test @@ -92,19 +99,20 @@ void testCreateAirbyteRealmWhenRealmAlreadyExists() { when(realmsResource.findAll()).thenReturn(Collections.singletonList(existingRealm)); when(keycloakConfiguration.getAirbyteRealm()).thenReturn(REALM_NAME); - keycloakServer.createAirbyteRealm(); + keycloakServer.setupAirbyteRealm(); verify(realmsResource, times(1)).findAll(); - verify(realmsResource, times(0)).create(any()); - verify(userCreator, times(0)).createUser(any()); - verify(webClientCreator, times(0)).createWebClient(any()); - verify(identityProvidersCreator, times(0)).createIdps(any()); - verify(accountClientUpdater, times(0)).updateAccountClientHomeUrl(any()); + verify(realmsResource, times(0)).create(any()); // create not called, but other configuration methods should be called every time + verify(userConfigurator, times(1)).configureUser(any()); + verify(webClientConfigurator, times(1)).configureWebClient(any()); + verify(identityProvidersConfigurator, times(1)).configureIdp(any()); + verify(airbyteRealmRep, times(1)).setAttributes(argThat(map -> map.get(FRONTEND_URL_ATTRIBUTE).equals(WEBAPP_URL + AUTH_PATH))); + verify(airbyteRealm, times(1)).update(airbyteRealmRep); } @Test void testBuildRealmRepresentation() { - keycloakServer.createAirbyteRealm(); + keycloakServer.setupAirbyteRealm(); final ArgumentCaptor realmRepresentationCaptor = ArgumentCaptor.forClass(RealmRepresentation.class); verify(realmsResource).create(realmRepresentationCaptor.capture()); @@ -121,29 +129,31 @@ void testRecreateAirbyteRealm() { existingRealm.setRealm(REALM_NAME); when(realmsResource.findAll()).thenReturn(Collections.singletonList(existingRealm)); - keycloakServer.recreateAirbyteRealm(); + keycloakServer.destroyAndRecreateAirbyteRealm(); verify(airbyteRealm, times(1)).remove(); verify(realmsResource, times(1)).create(any()); - verify(userCreator, times(1)).createUser(airbyteRealm); - verify(webClientCreator, times(1)).createWebClient(airbyteRealm); - verify(identityProvidersCreator, times(1)).createIdps(airbyteRealm); - verify(accountClientUpdater, times(1)).updateAccountClientHomeUrl(airbyteRealm); + verify(userConfigurator, times(1)).configureUser(airbyteRealm); + verify(webClientConfigurator, times(1)).configureWebClient(airbyteRealm); + verify(identityProvidersConfigurator, times(1)).configureIdp(airbyteRealm); + verify(airbyteRealmRep, times(1)).setAttributes(argThat(map -> map.get(FRONTEND_URL_ATTRIBUTE).equals(WEBAPP_URL + AUTH_PATH))); + verify(airbyteRealm, times(1)).update(airbyteRealmRep); } @Test void testRecreateAirbyteRealmWhenRealmDoesNotExist() { when(realmsResource.findAll()).thenReturn(Collections.emptyList()); - keycloakServer.recreateAirbyteRealm(); + keycloakServer.destroyAndRecreateAirbyteRealm(); // should behave the same as createAirbyteRealm in this case. verify(airbyteRealm, times(0)).remove(); verify(realmsResource, times(1)).create(any()); - verify(userCreator, times(1)).createUser(airbyteRealm); - verify(webClientCreator, times(1)).createWebClient(airbyteRealm); - verify(identityProvidersCreator, times(1)).createIdps(airbyteRealm); - verify(accountClientUpdater, times(1)).updateAccountClientHomeUrl(airbyteRealm); + verify(userConfigurator, times(1)).configureUser(airbyteRealm); + verify(webClientConfigurator, times(1)).configureWebClient(airbyteRealm); + verify(identityProvidersConfigurator, times(1)).configureIdp(airbyteRealm); + verify(airbyteRealmRep, times(1)).setAttributes(argThat(map -> map.get(FRONTEND_URL_ATTRIBUTE).equals(WEBAPP_URL + AUTH_PATH))); + verify(airbyteRealm, times(1)).update(airbyteRealmRep); } } diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakSetupTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakSetupTest.java index b9e4c05fd42..c62b77dbcd8 100644 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakSetupTest.java +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/KeycloakSetupTest.java @@ -33,6 +33,8 @@ class KeycloakSetupTest { private KeycloakServer keycloakServer; @Mock private AirbyteKeycloakConfiguration keycloakConfiguration; + @Mock + private ConfigDbResetHelper configDbResetHelper; private KeycloakSetup keycloakSetup; @@ -45,17 +47,18 @@ void setup() { when(blockingHttpClient.exchange(any(HttpRequest.class), eq(String.class))) .thenReturn(HttpResponse.ok()); - keycloakSetup = new KeycloakSetup(httpClient, keycloakServer, keycloakConfiguration); + keycloakSetup = new KeycloakSetup(httpClient, keycloakServer, keycloakConfiguration, configDbResetHelper); } @Test - void testRun() { + void testRun() throws Exception { keycloakSetup.run(); verify(httpClient).toBlocking(); verify(blockingHttpClient).exchange(any(HttpRequest.class), eq(String.class)); - verify(keycloakServer).createAirbyteRealm(); + verify(keycloakServer).setupAirbyteRealm(); verify(keycloakServer).closeKeycloakAdminClient(); + verify(configDbResetHelper, never()).deleteConfigDbUsers(); } @Test @@ -67,18 +70,19 @@ void testRunThrowsException() { verify(keycloakServer).getKeycloakServerUrl(); verify(httpClient.toBlocking()).exchange(any(HttpRequest.class), eq(String.class)); - verify(keycloakServer, never()).createAirbyteRealm(); // Should not be called if exception is thrown + verify(keycloakServer, never()).setupAirbyteRealm(); // Should not be called if exception is thrown verify(keycloakServer).closeKeycloakAdminClient(); } @Test - void testResetRealm() { + void testResetRealm() throws Exception { when(keycloakConfiguration.getResetRealm()).thenReturn(true); keycloakSetup.run(); - verify(keycloakServer, times(0)).createAirbyteRealm(); - verify(keycloakServer, times(1)).recreateAirbyteRealm(); + verify(keycloakServer, times(0)).setupAirbyteRealm(); + verify(keycloakServer, times(1)).destroyAndRecreateAirbyteRealm(); + verify(configDbResetHelper, times(1)).deleteConfigDbUsers(); } } diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserConfiguratorTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserConfiguratorTest.java new file mode 100644 index 00000000000..f5aab08c4eb --- /dev/null +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserConfiguratorTest.java @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.keycloak.setup; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.auth.config.InitialUserConfiguration; +import jakarta.ws.rs.core.Response; +import java.util.Collections; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.keycloak.admin.client.resource.RealmResource; +import org.keycloak.admin.client.resource.UserResource; +import org.keycloak.admin.client.resource.UsersResource; +import org.keycloak.representations.idm.CredentialRepresentation; +import org.keycloak.representations.idm.UserRepresentation; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +class UserConfiguratorTest { + + private static final String EMAIL = "jon@airbyte.io"; + private static final String FIRST_NAME = "Jon"; + private static final String LAST_NAME = "Smith"; + private static final String PASSWORD = "airbytePassword"; + private static final String KEYCLOAK_USER_ID = "some-id"; + + // set up a static Keycloak UserRepresentation based on the constants above + private static final UserRepresentation USER_REPRESENTATION = new UserRepresentation(); + + static { + USER_REPRESENTATION.setId(KEYCLOAK_USER_ID); + USER_REPRESENTATION.setUsername(EMAIL); + USER_REPRESENTATION.setEmail(EMAIL); + USER_REPRESENTATION.setFirstName(FIRST_NAME); + USER_REPRESENTATION.setLastName(LAST_NAME); + USER_REPRESENTATION.setEnabled(true); + + final CredentialRepresentation credentialRepresentation = new CredentialRepresentation(); + credentialRepresentation.setType(CredentialRepresentation.PASSWORD); + credentialRepresentation.setValue(PASSWORD); + credentialRepresentation.setTemporary(false); + + USER_REPRESENTATION.setCredentials(Collections.singletonList(credentialRepresentation)); + } + + private UserConfigurator userConfigurator; + @Mock + private InitialUserConfiguration initialUserConfiguration; + @Mock + private RealmResource realmResource; + @Mock + private UsersResource usersResource; + @Mock + private UserResource userResource; + @Mock + private Response response; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + + when(initialUserConfiguration.getEmail()).thenReturn(EMAIL); + when(initialUserConfiguration.getFirstName()).thenReturn(FIRST_NAME); + when(initialUserConfiguration.getLastName()).thenReturn(LAST_NAME); + when(initialUserConfiguration.getPassword()).thenReturn(PASSWORD); + + when(realmResource.users()).thenReturn(usersResource); + when(usersResource.create(any(UserRepresentation.class))).thenReturn(response); + + when(usersResource.get(KEYCLOAK_USER_ID)).thenReturn(userResource); + when(response.getStatusInfo()).thenReturn(Response.Status.OK); + + userConfigurator = new UserConfigurator(initialUserConfiguration); + } + + @Test + void testConfigureUser() { + when(response.getStatus()).thenReturn(201); + + userConfigurator.configureUser(realmResource); + + verify(usersResource).create(argThat(userRepresentation -> userRepresentation.getId() == null + && userRepresentation.getUsername().equals(EMAIL) + && userRepresentation.getEmail().equals(EMAIL) + && userRepresentation.getFirstName().equals(FIRST_NAME) + && userRepresentation.getLastName().equals(LAST_NAME) + && userRepresentation.isEnabled() + && userRepresentation.getCredentials().size() == 1 + && userRepresentation.getCredentials().getFirst().getType().equals(CredentialRepresentation.PASSWORD) + && userRepresentation.getCredentials().getFirst().getValue().equals(PASSWORD) + && !userRepresentation.getCredentials().getFirst().isTemporary() + && userRepresentation.getCredentials().equals(USER_REPRESENTATION.getCredentials()))); + } + + @Test + void testConfigureUserAlreadyExists() { + when(usersResource.searchByEmail(EMAIL, true)).thenReturn(Collections.singletonList(USER_REPRESENTATION)); + + userConfigurator.configureUser(realmResource); + + verify(usersResource, never()).create(any()); + verify(userResource).update(argThat(userRepresentation -> userRepresentation.getId().equals(USER_REPRESENTATION.getId()) + && userRepresentation.getUsername().equals(USER_REPRESENTATION.getUsername()) + && userRepresentation.getEmail().equals(USER_REPRESENTATION.getEmail()) + && userRepresentation.getFirstName().equals(USER_REPRESENTATION.getFirstName()) + && userRepresentation.getLastName().equals(USER_REPRESENTATION.getLastName()) + && userRepresentation.isEnabled() == USER_REPRESENTATION.isEnabled() + && userRepresentation.getCredentials().equals(USER_REPRESENTATION.getCredentials()))); + } + + @Test + void testConfigureUserRepresentation() { + when(initialUserConfiguration.getEmail()).thenReturn(EMAIL); + when(initialUserConfiguration.getFirstName()).thenReturn(FIRST_NAME); + when(initialUserConfiguration.getLastName()).thenReturn(LAST_NAME); + + final UserRepresentation userRepresentation = userConfigurator.getUserRepresentationFromConfig(); + + assertEquals(EMAIL, userRepresentation.getUsername()); // we want to set the username to the configured email + assertEquals(EMAIL, userRepresentation.getEmail()); + assertEquals(FIRST_NAME, userRepresentation.getFirstName()); + assertEquals(LAST_NAME, userRepresentation.getLastName()); + } + + @Test + void testCreateCredentialRepresentation() { + when(initialUserConfiguration.getPassword()).thenReturn(PASSWORD); + + final CredentialRepresentation credentialRepresentation = userConfigurator.createCredentialRepresentation(); + + assertFalse(credentialRepresentation.isTemporary()); + assertEquals(CredentialRepresentation.PASSWORD, credentialRepresentation.getType()); + assertEquals(PASSWORD, credentialRepresentation.getValue()); + } + +} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserCreatorTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserCreatorTest.java deleted file mode 100644 index 3d5e8f348a9..00000000000 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/UserCreatorTest.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.keycloak.setup; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.auth.config.InitialUserConfiguration; -import jakarta.ws.rs.core.Response; -import java.util.Collections; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.keycloak.admin.client.resource.RealmResource; -import org.keycloak.admin.client.resource.UserResource; -import org.keycloak.admin.client.resource.UsersResource; -import org.keycloak.representations.idm.CredentialRepresentation; -import org.keycloak.representations.idm.UserRepresentation; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; - -class UserCreatorTest { - - private static final String USERNAME = "jon"; - private static final String EMAIL = "jon@airbyte.io"; - private static final String FIRST_NAME = "Jon"; - private static final String LAST_NAME = "Smith"; - private static final String PASSWORD = "airbytePassword"; - private UserCreator userCreator; - @Mock - private InitialUserConfiguration initialUserConfiguration; - @Mock - private RealmResource realmResource; - @Mock - private UsersResource usersResource; - @Mock - private UserResource userResource; - @Mock - private Response response; - - @BeforeEach - void setUp() { - MockitoAnnotations.openMocks(this); - - when(initialUserConfiguration.getUsername()).thenReturn(USERNAME); - when(initialUserConfiguration.getEmail()).thenReturn(EMAIL); - when(initialUserConfiguration.getFirstName()).thenReturn(FIRST_NAME); - when(initialUserConfiguration.getLastName()).thenReturn(LAST_NAME); - when(initialUserConfiguration.getPassword()).thenReturn(PASSWORD); - - when(realmResource.users()).thenReturn(usersResource); - when(usersResource.create(any(UserRepresentation.class))).thenReturn(response); - - when(usersResource.get(anyString())).thenReturn(userResource); - when(usersResource.search(anyString(), anyInt(), anyInt())).thenReturn(Collections.singletonList(new UserRepresentation())); - when(response.getStatusInfo()).thenReturn(Response.Status.OK); - - userCreator = new UserCreator(initialUserConfiguration); - } - - @Test - void testCreateUser() { - when(response.getStatus()).thenReturn(201); - - userCreator.createUser(realmResource); - - verify(usersResource).create(any(UserRepresentation.class)); - } - - @Test - void testCreateUserAlreadyExists() { - when(usersResource.search(anyString())).thenReturn(Collections.singletonList(new UserRepresentation())); - - userCreator.createUser(realmResource); - - verify(usersResource, never()).create(any()); - } - - @Test - void testCreateUserRepresentation() { - when(initialUserConfiguration.getUsername()).thenReturn(USERNAME); - when(initialUserConfiguration.getEmail()).thenReturn(EMAIL); - when(initialUserConfiguration.getFirstName()).thenReturn(FIRST_NAME); - when(initialUserConfiguration.getLastName()).thenReturn(LAST_NAME); - - final UserRepresentation userRepresentation = userCreator.createUserRepresentation(); - - assertEquals(USERNAME, userRepresentation.getUsername()); - assertEquals(EMAIL, userRepresentation.getEmail()); - assertEquals(FIRST_NAME, userRepresentation.getFirstName()); - assertEquals(LAST_NAME, userRepresentation.getLastName()); - } - - @Test - void testCreateCredentialRepresentation() { - when(initialUserConfiguration.getPassword()).thenReturn(PASSWORD); - - final CredentialRepresentation credentialRepresentation = userCreator.createCredentialRepresentation(); - - assertFalse(credentialRepresentation.isTemporary()); - assertEquals(CredentialRepresentation.PASSWORD, credentialRepresentation.getType()); - assertEquals(PASSWORD, credentialRepresentation.getValue()); - } - -} diff --git a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientCreatorTest.java b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientConfiguratorTest.java similarity index 87% rename from airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientCreatorTest.java rename to airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientConfiguratorTest.java index 22a70d8ad8f..d58f13b80e9 100644 --- a/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientCreatorTest.java +++ b/airbyte-keycloak-setup/src/test/java/io/airbyte/keycloak/setup/WebClientConfiguratorTest.java @@ -23,7 +23,7 @@ import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) -class WebClientCreatorTest { +class WebClientConfiguratorTest { private static final String WEBAPP_URL = "http://localhost:8000"; private static final String WEB_CLIENT_ID = "airbyte-okta"; @@ -36,12 +36,12 @@ class WebClientCreatorTest { @Mock private Response response; @InjectMocks - private WebClientCreator webClientCreator; + private WebClientConfigurator webClientConfigurator; @BeforeEach void setUp() { when(keycloakConfiguration.getWebClientId()).thenReturn(WEB_CLIENT_ID); - webClientCreator = new WebClientCreator(WEBAPP_URL, keycloakConfiguration); + webClientConfigurator = new WebClientConfigurator(WEBAPP_URL, keycloakConfiguration); } @Test @@ -50,7 +50,7 @@ void testCreateWebClient() { when(clientsResource.create(any(ClientRepresentation.class))).thenReturn(response); when(response.getStatus()).thenReturn(201); - webClientCreator.createWebClient(realmResource); + webClientConfigurator.configureWebClient(realmResource); verify(clientsResource).create(any(ClientRepresentation.class)); } @@ -59,12 +59,11 @@ void testCreateWebClient() { void testCreateClientRepresentation() { when(keycloakConfiguration.getWebClientId()).thenReturn(WEB_CLIENT_ID); - final ClientRepresentation clientRepresentation = webClientCreator.createClientRepresentation(); + final ClientRepresentation clientRepresentation = webClientConfigurator.getClientRepresentationFromConfig(); assertEquals(WEB_CLIENT_ID, clientRepresentation.getClientId()); assertTrue(clientRepresentation.isPublicClient()); assertTrue(clientRepresentation.isDirectAccessGrantsEnabled()); - assertEquals(WEBAPP_URL, clientRepresentation.getBaseUrl()); assertEquals("180", clientRepresentation.getAttributes().get("access.token.lifespan")); } diff --git a/airbyte-keycloak/Dockerfile b/airbyte-keycloak/Dockerfile index 4c6457d9c76..7f9158f8a6b 100644 --- a/airbyte-keycloak/Dockerfile +++ b/airbyte-keycloak/Dockerfile @@ -9,6 +9,23 @@ FROM airbyte/mirrored-keycloak:23.0.3 WORKDIR /opt/keycloak COPY bin/scripts/entrypoint.sh entrypoint.sh -COPY bin/themes/airbyte-keycloak-theme themes/airbyte-keycloak-theme +COPY bin/themes themes + +# Doing this instead of creating a separate file and copying it to ensure that we get any keycloak updates to this conf file. +RUN cp conf/cache-ispn.xml conf/cache-ispn-override.xml && \ +sed -i conf/cache-ispn-override.xml -e 's///g' && \ +sed -i conf/cache-ispn-override.xml -e 's///g' && \ +# Make sure that the two lines we wanted to be there are actually there +# i.e. keycloak didn't change its config file +grep '' conf/cache-ispn-override.xml -q && \ +grep '' conf/cache-ispn-override.xml -q && \ +# Create the directory for the infinispan global-state persistence +mkdir -p /opt/keycloak/data/infinispan && \ +# Inserting the block after the start tag +sed -i '/<\/global-state>' conf/cache-ispn-override.xml && \ +# Make sure that the block is actually there +# i.e. keycloak didn't change its config file +grep '' conf/cache-ispn-override.xml -q + ENTRYPOINT ["./entrypoint.sh"] diff --git a/airbyte-keycloak/build.gradle.kts b/airbyte-keycloak/build.gradle.kts index 1c44ffcc4f2..001ef13adff 100644 --- a/airbyte-keycloak/build.gradle.kts +++ b/airbyte-keycloak/build.gradle.kts @@ -1,24 +1,24 @@ plugins { - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "keycloak" - } + docker { + imageName = "keycloak" + } } val copyTheme = tasks.register("copyTheme") { - from("themes") - into("build/airbyte/docker/bin/themes") + from("themes") + into("build/airbyte/docker/bin/themes") } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/bin/scripts") + from("scripts") + into("build/airbyte/docker/bin/scripts") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts, copyTheme) + dependsOn(copyScripts, copyTheme) } diff --git a/airbyte-keycloak/scripts/entrypoint.sh b/airbyte-keycloak/scripts/entrypoint.sh index de58afa63d7..a56dc8dd3c0 100755 --- a/airbyte-keycloak/scripts/entrypoint.sh +++ b/airbyte-keycloak/scripts/entrypoint.sh @@ -6,13 +6,22 @@ export KC_DB=postgres export KC_DB_URL=$KEYCLOAK_DATABASE_URL export KC_DB_USERNAME=$KEYCLOAK_DATABASE_USERNAME export KC_DB_PASSWORD=$KEYCLOAK_DATABASE_PASSWORD -export KC_HOSTNAME_URL=$KEYCLOAK_HOSTNAME_URL export KC_HTTP_PORT=$KEYCLOAK_PORT -export KC_HOSTNAME_ADMIN_URL=$KEYCLOAK_HOSTNAME_ADMIN_URL -bin/kc.sh build --cache=ispn --cache-stack=kubernetes --health-enabled=true --http-relative-path /auth +if [ -n "$KEYCLOAK_HOSTNAME_URL" ]; then + # leave this unset if planning to configure frontendUrl at the realm level. + export KC_HOSTNAME_URL=$KEYCLOAK_HOSTNAME_URL +fi + +if [ -n "$KEYCLOAK_HOSTNAME_ADMIN_URL" ]; then + # leave this unset to let the admin console url be based on the incoming request. + export KC_HOSTNAME_ADMIN_URL=$KEYCLOAK_HOSTNAME_ADMIN_URL +fi + +# cache-config is relative to conf directory +bin/kc.sh build --cache=ispn --cache-stack=kubernetes --health-enabled=true --http-relative-path /auth --cache-config-file=cache-ispn-override.xml bin/kc.sh start --optimized --proxy edge --hostname-strict false # Uncomment to disable caching, which is useful for theme development -# bin/kc.sh start --optimized --proxy edge --hostname-strict false --spi-theme-static-max-age=-1 --spi-theme-cache-themes=false --spi-theme-cache-templates=false \ No newline at end of file +# bin/kc.sh start --optimized --proxy edge --hostname-strict false --spi-theme-static-max-age=-1 --spi-theme-cache-themes=false --spi-theme-cache-templates=false diff --git a/airbyte-keycloak/themes/README.md b/airbyte-keycloak/themes/README.md new file mode 100644 index 00000000000..6cb0697c69c --- /dev/null +++ b/airbyte-keycloak/themes/README.md @@ -0,0 +1,40 @@ +# Airbyte Keycloak theme + +This directory contain a [keycloak themes](https://www.keycloak.org/docs/latest/server_development/#_themes) for Airbyte Cloud and Self Managed Enterprise. + +The `airbyte-keycloak-theme` is based on the built-in Keycloak `common/base` theme, which provides minimal HTML templates and internationalized strings that the Airbyte theme builds upon. The `airbyte-cloud` theme extends `airbyte-keycloak-theme` with some cloud-specific styling. + +## Developing the theme + +The development process is currently not very streamlined, and there is no development mode or frontend build process to speak of. The template files in this directory can be edited individually, then the docker image must be rebuilt and the cluster can be started to view/test the changes made. + +First, the entryoint script for Keycloak must be altered to disable caching. The `entrypoint.sh` script located in `airbyte-keycloak/scripts` has a commented startup script that should be enabled: + +```sh +# Uncomment to disable caching, which is useful for theme development +# bin/kc.sh start --optimized --proxy edge --hostname-strict false --spi-theme-static-max-age=-1 --spi-theme-cache-themes=false --spi-theme-cache-templates=false +``` + +After this, the `airbyte-keycloak` image needs to be built with gradle: + +`./gradlew -p oss :airbyte-keycloak:assemble`. + +Then you can use the `make deploy` command to deploy Airbyte Cloud locally, or use helm directly to redeploy `airbyte-keycloak` for a Self Managed Enterprise instance. + +## Making CSS changes + +It's easiest to make CSS changes directly in the browser. Once you are happy with your changes, you can copy them all (e.g. from Chrome's `inspector-stylesheet.css`) into the appropriate CSS file in the Keycloak theme. Then `airbyte-keycloak` can be rebuilt and redeployed (`make deploy`) for your changes to be baked into the docker image. + +## Overriding templates + +The HTML content of Keycloak pages is defined in FreeMarker templates with the `.ftl` extension. If you want to alter the HTML content of a page, you should copy the contents of the template you want to override from Keycloak's GitHub repository: + +``` +https://github.com/keycloak/keycloak/blob//themes/src/main/resources/theme/base/login/.ftl +``` + +*Important*: make sure you copy the content from the correct `` that matches the Keycloak version we are currently using, which may not necessarily be the most recent release. + +## Registering the theme in Keycloak + +The theme for a realm can be changed in the Keycloak UI under `Realm settings` > `Themes`. Alternatively it can be set in the terraform provider or via the Airbyte Java SDK. \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/email/messages/messages_en.properties b/airbyte-keycloak/themes/airbyte-cloud/email/messages/messages_en.properties new file mode 100644 index 00000000000..82eba373e5d --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/email/messages/messages_en.properties @@ -0,0 +1,2 @@ +executeActionsSubject=Verify your email for Airbyte +executeActionsBodyHtml=

    Hello,

    Follow this link to verify your email address.

    {0}

    This link will expire within {4}.

    If you didn’t ask to verify this address, you can ignore this email.

    Thanks,

    Your Airbyte team

    diff --git a/airbyte-keycloak/themes/airbyte-cloud/email/theme.properties b/airbyte-keycloak/themes/airbyte-cloud/email/theme.properties new file mode 100644 index 00000000000..f1dbb7215d4 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/email/theme.properties @@ -0,0 +1 @@ +parent=base \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/login.ftl b/airbyte-keycloak/themes/airbyte-cloud/login/login.ftl new file mode 100644 index 00000000000..73ad4174f8c --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/login.ftl @@ -0,0 +1,116 @@ +<#import "template.ftl" as layout> +<@layout.registrationLayout displayMessage=!messagesPerField.existsError('username','password') displayInfo=realm.password && realm.registrationAllowed && !registrationDisabled??; section> + <#if section = "header"> + ${msg("loginAccountTitle")} + <#elseif section = "form"> +
    +
    + <#if realm.password> +
    + <#if !usernameHidden??> +
    + + + + + <#if messagesPerField.existsError('username','password')> + + ${kcSanitize(messagesPerField.getFirstError('username','password'))?no_esc} + + + +
    + + +
    + + +
    + + +
    + + <#if usernameHidden?? && messagesPerField.existsError('username','password')> + + ${kcSanitize(messagesPerField.getFirstError('username','password'))?no_esc} + + + +
    + +
    +
    + <#if realm.rememberMe && !usernameHidden??> +
    + +
    + +
    +
    + <#if realm.resetPasswordAllowed> + ${msg("doForgotPassword")} + +
    + +
    + +
    + value="${auth.selectedCredential}"/> + +
    +
    + +
    +
    + + <#elseif section = "info" > + <#if realm.password && realm.registrationAllowed && !registrationDisabled??> +
    +
    + ${msg("noAccount")} ${msg("doRegister")} +
    +
    + + <#elseif section = "socialProviders" > + <#if realm.password && social.providers??> +
    +
    +

    ${msg("identity-provider-login-label")}

    + + +
    + +
    By signing up and continuing, you agree to our Terms of Service and Privacy Policy.
    + + + + diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/messages/messages_en.properties b/airbyte-keycloak/themes/airbyte-cloud/login/messages/messages_en.properties new file mode 100644 index 00000000000..aa7dd365ae7 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/messages/messages_en.properties @@ -0,0 +1,10 @@ +registerTitle=Create your Airbyte account +noAccount=Don''t have an account? +doRegister=Sign up +missingFirstNameMessage=Required +missingLastNameMessage=Required +missingEmailMessage=Required +missingPasswordMessage=Required +invalidUserMessage=Invalid email or password. +emailInstruction=Enter your email address and we will send you instructions about how to reset your password. +expiredActionTokenNoSessionMessage=The link you followed to get here has expired. \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/register.ftl b/airbyte-keycloak/themes/airbyte-cloud/login/register.ftl new file mode 100644 index 00000000000..d3a7e0b7427 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/register.ftl @@ -0,0 +1,162 @@ +<#import "template.ftl" as layout> +<#import "register-commons.ftl" as registerCommons> +<@layout.registrationLayout displayMessage=!messagesPerField.existsError('firstName','lastName','email','username','password','password-confirm','termsAccepted'); section> + <#if section = "header"> + ${msg("registerTitle")} + <#elseif section = "form"> +
    +
    +
    + +
    +
    + + + <#if messagesPerField.existsError('firstName')> + + ${kcSanitize(messagesPerField.get('firstName'))?no_esc} + + +
    +
    + +
    +
    + +
    +
    + + + <#if messagesPerField.existsError('lastName')> + + ${kcSanitize(messagesPerField.get('lastName'))?no_esc} + + +
    +
    + +
    +
    + +
    +
    + + + <#if messagesPerField.existsError('email')> + + ${kcSanitize(messagesPerField.get('email'))?no_esc} + + +
    +
    + + <#if !realm.registrationEmailAsUsername> +
    +
    + +
    +
    + + + <#if messagesPerField.existsError('username')> + + ${kcSanitize(messagesPerField.get('username'))?no_esc} + + +
    +
    + + + <#if passwordRequired??> +
    +
    + +
    +
    +
    + + +
    + + + <#if messagesPerField.existsError('password')> + + ${kcSanitize(messagesPerField.get('password'))?no_esc} + + +
    +
    + +
    +
    + +
    +
    +
    + + +
    + + <#if messagesPerField.existsError('password-confirm')> + + ${kcSanitize(messagesPerField.get('password-confirm'))?no_esc} + + +
    +
    + + + <@registerCommons.termsAcceptance/> + + <#if recaptchaRequired??> +
    +
    +
    +
    +
    + + + + +
    By signing up and continuing, you agree to our Terms of Service and Privacy Policy.
    +
    + + + \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/login-form.css b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/login-form.css new file mode 100644 index 00000000000..b8d1bae051c --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/login-form.css @@ -0,0 +1,11 @@ +#kc-form-login .ab-form-group:has(input[name="rememberMe"]) { + display: flex; + justify-content: space-between; + flex-direction: row-reverse; + font-size: var(--font-size-lg); +} + +/** "Forgot password" link */ +#kc-form-login .ab-form-group:has(input[name="rememberMe"]) .ab-form-options-wrapper a:link { + color: var(--color-grey-400); +} \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/register-form.css b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/register-form.css new file mode 100644 index 00000000000..8b6615a8d12 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/register-form.css @@ -0,0 +1,37 @@ +#kc-page-title { + display: block; +} + +form#kc-register-form { + display: grid; + grid-template-columns: 1fr 1fr; + gap: var(--spacing-xl); +} + +#kc-register-form .ab-form-group { + grid-column: span 2; + margin-top: 0; +} + +#kc-register-form .ab-form-group:has(label[for="firstName"]), +#kc-register-form .ab-form-group:has(label[for="lastName"]) { + grid-column: span 1; + margin-top: 0; +} + +#kc-register-form .ab-form-group:has(#kc-form-buttons) { + display: flex; + justify-content: space-between; + align-items: center; + gap: var(--spacing-xl); +} + +#backToApplication, +#kc-register-form .ab-form-group:has(#kc-form-buttons) .ab-form-options-wrapper a { + color: var(--color-grey-400); + font-size: var(--font-size-lg); +} + +.ab-terms-of-servic { + grid-column: span 2; +} \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/reset-password-form.css b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/reset-password-form.css new file mode 100644 index 00000000000..a87e88804b0 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/reset-password-form.css @@ -0,0 +1,33 @@ +form#kc-reset-password-form { + display: grid; + grid-template-columns: 1fr 1fr; + gap: var(--spacing-xl); +} + +#kc-reset-password-form .ab-form-group { + grid-column: span 2; + margin-top: 0; +} + +#kc-reset-password-form .ab-form-group:has(label[for="firstName"]), +#kc-reset-password-form .ab-form-group:has(label[for="lastName"]) { + grid-column: span 1; + margin-top: 0; +} + +#kc-reset-password-form .ab-form-group:has(#kc-form-buttons) { + display: flex; + justify-content: space-between; + align-items: center; + gap: var(--spacing-xl); +} + +#kc-info-wrapper { + margin-top: var(--spacing-xl); + font-size: var(--font-size-lg); +} + +#kc-reset-password-form .ab-form-group:has(#kc-form-buttons) .ab-form-options-wrapper a { + color: var(--color-grey-400); + font-size: var(--font-size-lg); +} \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/styles.css b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/styles.css new file mode 100644 index 00000000000..95f217de78b --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/css/styles.css @@ -0,0 +1,77 @@ +#kc-social-providers ul { + justify-content: center; + flex-direction: row; +} + +#kc-registration { + margin-top: var(--spacing-xl); + display: flex; + justify-content: center; +} + +#kc-registration a { + border: none; +} + +#kc-social-providers ul { + gap: var(--spacing-xl); +} + +#kc-social-providers ul a { + border: none; + width: 24px; + height: 24px; + font-size: 0; + background-repeat: no-repeat; + background-size: contain; + padding: 0; +} + +#kc-social-providers ul a#social-github { + background-image: url(../img/github-logo.svg); +} + +#kc-social-providers ul a#social-google { + background-image: url(../img/google-logo.svg); +} + +#kc-registration { + font-size: var(--font-size-sm); +} + +#kc-registration a { + display: inline-block; + color: var(--color-grey-400); + border: 1px solid var(--color-grey-300); + border-radius: 6px; + padding: 8px 10px; + text-decoration: none; + font-size: var(--font-size-sm); + margin-left: var(--spacing-md); +} + +#kc-page-title { + display: block; + font-weight: 500; + font-size: 16px; + text-align: center; + margin-top: 0; + margin-bottom: var(--spacing-2xl); +} + +.ab-terms-of-service { + color: var(--color-text); + font-size: var(--font-size-md); +} + +.ab-login-page .ab-terms-of-service a:link, +.ab-login-page .ab-terms-of-service a:visited { + color: var(--color-blue-400); + text-decoration: none; +} + +.ab-login-page .checkbox label { + display: flex; + align-items: center; + gap: var(--spacing-sm); +} \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/github-logo.svg b/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/github-logo.svg new file mode 100644 index 00000000000..f4e5118a9c8 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/github-logo.svg @@ -0,0 +1,23 @@ + + + + + + + + + diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/google-logo.svg b/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/google-logo.svg new file mode 100644 index 00000000000..bc558e76542 --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/img/google-logo.svg @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-keycloak/themes/airbyte-cloud/login/resources/js/fullstory.js b/airbyte-keycloak/themes/airbyte-cloud/login/resources/js/fullstory.js new file mode 100644 index 00000000000..e1d48a0dabc --- /dev/null +++ b/airbyte-keycloak/themes/airbyte-cloud/login/resources/js/fullstory.js @@ -0,0 +1,20 @@ +window['_fs_host'] = 'fullstory.com'; +window['_fs_script'] = 'edge.fullstory.com/s/fs.js'; +window['_fs_org'] = '13AXQ4'; +window['_fs_namespace'] = 'FS'; +!function(m,n,e,t,l,o,g,y){var s,f,a=function(h){ +return!(h in m)||(m.console&&m.console.log&&m.console.log('FullStory namespace conflict. Please set window["_fs_namespace"].'),!1)}(e) +;function p(b){var h,d=[];function j(){h&&(d.forEach((function(b){var d;try{d=b[h[0]]&&b[h[0]](h[1])}catch(h){return void(b[3]&&b[3](h))} +d&&d.then?d.then(b[2],b[3]):b[2]&&b[2](d)})),d.length=0)}function r(b){return function(d){h||(h=[b,d],j())}}return b(r(0),r(1)),{ +then:function(b,h){return p((function(r,i){d.push([b,h,r,i]),j()}))}}}a&&(g=m[e]=function(){var b=function(b,d,j,r){function i(i,c){ +h(b,d,j,i,c,r)}r=r||2;var c,u=/Async$/;return u.test(b)?(b=b.replace(u,""),"function"==typeof Promise?new Promise(i):p(i)):h(b,d,j,c,c,r)} +;function h(h,d,j,r,i,c){return b._api?b._api(h,d,j,r,i,c):(b.q&&b.q.push([h,d,j,r,i,c]),null)}return b.q=[],b}(),y=function(b){function h(h){ +"function"==typeof h[4]&&h[4](new Error(b))}var d=g.q;if(d){for(var j=0;j `Themes`. diff --git a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/messages/messages_en.properties b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/messages/messages_en.properties index 5932f51b1d7..c2c94fad740 100644 --- a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/messages/messages_en.properties +++ b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/messages/messages_en.properties @@ -2,4 +2,7 @@ doLogIn=Log in loginAccountTitle=Log in to Airbyte usernameOrEmail=Your work email password=Enter your password -backToApplication=« Back to login \ No newline at end of file +backToApplication=Back to Airbyte +backToLogin=Back to login +identity-provider-login-label=Or log in with +errorTitle=Authentication error \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/styles.css b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/base.css similarity index 95% rename from airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/styles.css rename to airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/base.css index 1123f60eabb..a68db59de63 100644 --- a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/styles.css +++ b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/resources/css/base.css @@ -119,6 +119,7 @@ body { font-family: var(--font-sans-serif); color: var(--color-text); background-color: var(--color-grey-40); + accent-color: var(--color-blue-400); } input { @@ -138,12 +139,19 @@ input { padding-bottom: 20vh; } +.ab-login-page a:link, +.ab-login-page a:visited { + color: var(--color-grey-400); +} + .ab-alert { + max-width: 410px; margin-block: var(--spacing-xl); background: var(--color-yellow-100); font-size: var(--font-size-lg); padding: var(--spacing-lg); border-radius: 6px; + line-height: 1.5; } .ab-form-card { @@ -153,8 +161,7 @@ input { box-shadow: var(--box-shadow); border-radius: 10px; padding: 150px 20px 20px; - min-width: 410px; - max-width: 800px; + width: 410px; } .ab-form-group { @@ -211,7 +218,7 @@ input { #kc-form-buttons { display: flex; - width: 100%; + align-items: flex-end; } #kc-form-buttons input[type='submit'] { @@ -254,6 +261,10 @@ input { padding: 0; } +#kc-social-providers ul li { + list-style: none; +} + #kc-social-providers ul a { display: block; padding: 10px 14px; @@ -265,3 +276,7 @@ input { text-decoration: none; color: var(--color-text); } + +[data-password-toggle] { + display: none; +} \ No newline at end of file diff --git a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/theme.properties b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/theme.properties index c32db71c462..3dc7e417762 100644 --- a/airbyte-keycloak/themes/airbyte-keycloak-theme/login/theme.properties +++ b/airbyte-keycloak/themes/airbyte-keycloak-theme/login/theme.properties @@ -1,5 +1,5 @@ parent=base -styles=css/styles.css css/fonts.css +styles=css/base.css css/fonts.css kcLoginClass=ab-login-page diff --git a/airbyte-metrics/readme.md b/airbyte-metrics/README.md similarity index 100% rename from airbyte-metrics/readme.md rename to airbyte-metrics/README.md diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java index 25d1b898feb..49f1aab078b 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceUtils.java @@ -19,8 +19,10 @@ import java.io.StringWriter; import java.nio.file.Path; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.stream.Collectors; /** * Collection of utility methods to help with performance tracing. @@ -37,6 +39,20 @@ public class ApmTraceUtils { */ public static final String TAG_PREFIX = "metadata"; + /** + * Converts the provided metric attributes to tags and adds them to the currently active span, if + * one exists.
    + * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. + * + * @param attrs A list of attributes to be converted to tags and added to the currently active span. + */ + public static void addTagsToTrace(final List attrs) { + final Map tags = attrs.stream() + .collect(Collectors.toMap(MetricAttribute::key, MetricAttribute::value)); + + addTagsToTrace(tags, TAG_PREFIX); + } + /** * Adds all the provided tags to the currently active span, if one exists.
    * All tags added via this method will use the default {@link #TAG_PREFIX} namespace. diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java index aa70d2dcee2..bbbbf91b892 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java @@ -24,6 +24,7 @@ @AllArgsConstructor public enum MetricEmittingApps implements MetricEmittingApp { + BILLING("billing"), BOOTLOADER("bootloader"), CRON("cron"), METRICS_REPORTER("metrics-reporter"), diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java index f4a0bfa90d8..31f88599ac7 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java @@ -46,7 +46,7 @@ public class MetricTags { public static final String NOTIFICATION_CLIENT = "notification_client"; public static final String RECORD_COUNT_TYPE = "record_count_type"; public static final String RELEASE_STAGE = "release_stage"; - public static final String RESET_WORKFLOW_FAILURE_CAUSE = "failure_cause"; + public static final String FAILURE_CAUSE = "failure_cause"; public static final String SOURCE_ID = "source_id"; public static final String SOURCE_IMAGE = "source_image"; public static final String STATUS = "status"; @@ -56,6 +56,14 @@ public class MetricTags { public static final String USER_TYPE = "user_type"; // real user, service account, data plane user, etc public static final String WILL_RETRY = "will_retry"; + // payload metric tags + public static final String URI_NULL = "uri_null"; + public static final String URI_ID = "uri_id"; + public static final String URI_VERSION = "uri_version"; + public static final String PAYLOAD_NAME = "payload_name"; + public static final String IS_MATCH = "is_match"; + public static final String IS_MISS = "is_miss"; + public static String getReleaseStage(final ReleaseStage stage) { return stage != null ? stage.value() : UNKNOWN; } diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java index fd43365d249..de9f9f0c58b 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java @@ -122,6 +122,10 @@ public enum OssMetricsRegistry implements MetricsRegistry { MetricEmittingApps.CRON, // Actually `cron` or `bootloader` based on which metric client calls the code "connector_registry_definition_processed", "increments when a connector registry definition is processed by the ApplyDefinitionsHelper"), + CONNECTOR_BREAKING_CHANGE_PIN_SERVED( + MetricEmittingApps.SERVER, + "connector_breaking_change_pin_served", + "increments when a breaking change pin is served"), EST_NUM_METRICS_EMITTED_BY_REPORTER( MetricEmittingApps.METRICS_REPORTER, "est_num_metrics_emitted_by_reporter", @@ -146,10 +150,6 @@ public enum OssMetricsRegistry implements MetricsRegistry { MetricEmittingApps.WORKER, "json_string_length", "string length of a raw json string"), - RECORD_SIZE_ERROR( - MetricEmittingApps.WORKER, - "record_size_error", - "length of a raw record json string exceeding the limit"), KUBE_POD_PROCESS_CREATE_TIME_MILLISECS( MetricEmittingApps.WORKER, "kube_pod_process_create_time_millisecs", @@ -408,17 +408,49 @@ public enum OssMetricsRegistry implements MetricsRegistry { "destination_deserialization_error", "When a sync failed with a deserialization error from the destination"), + HEARTBEAT_TERMINAL_SHUTDOWN(MetricEmittingApps.ORCHESTRATOR, + "heartbeat_terminal_shutdown", + "When the heartbeat receives a terminal response from the server, and we shut down the orchestrator"), + + HEARTBEAT_CONNECTIVITY_FAILURE_SHUTDOWN(MetricEmittingApps.ORCHESTRATOR, + "heartbeat_connectivity_failure_shutdown", + "When the heartbeat cannot communicate with the server, and we shut down the orchestrator"), + SIDECAR_CHECK(MetricEmittingApps.SIDECAR_ORCHESTRATOR, "sidecar_check", - "Exit of the connetor sidecar"), + "Exit of the connector sidecar"), CATALOG_DISCOVERY(MetricEmittingApps.SIDECAR_ORCHESTRATOR, "catalog_discover", - "Exit of the connetor sidecar"), + "Exit of the connector sidecar"), SPEC(MetricEmittingApps.SIDECAR_ORCHESTRATOR, "spec", - "Result of the spec operation"); + "Result of the spec operation"), + + ACTIVITY_PAYLOAD_READ_FROM_DOC_STORE(MetricEmittingApps.WORKER, + "activity_payload_read_from_doc_store", + "An activity payload was read from the doc store."), + + ACTIVITY_PAYLOAD_WRITTEN_TO_DOC_STORE(MetricEmittingApps.WORKER, + "activity_payload_written_to_doc_store", + "An activity payload was written to the doc store."), + + PAYLOAD_SIZE_EXCEEDED(MetricEmittingApps.WORKER, + "payload_size_exceeded", + "Detected payload size was over 4mb Temporal limit"), + + PAYLOAD_FAILURE_WRITE(MetricEmittingApps.WORKER, + "payload_failure_write", + "Failure writing the activity payload to storage."), + + PAYLOAD_FAILURE_READ(MetricEmittingApps.WORKER, + "payload_failure_read", + "Failure reading the activity payload from storage."), + + PAYLOAD_VALIDATION_RESULT(MetricEmittingApps.WORKER, + "payload_validation_result", + "The result of the comparing the payload in object storage to the one passed from temporal."); private final MetricEmittingApp application; private final String metricName; diff --git a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java index 77f453f68aa..372d7335526 100644 --- a/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java +++ b/airbyte-metrics/metrics-lib/src/test/java/io/airbyte/metrics/lib/ApmTraceUtilsTest.java @@ -29,6 +29,7 @@ import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.UUID; import org.junit.After; @@ -65,6 +66,21 @@ void testAddingTags() { verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); } + @Test + void convertsAndAddsAttributes() { + final Span span = mock(Span.class); + final Tracer tracer = mock(Tracer.class); + when(tracer.activeSpan()).thenReturn(span); + + GlobalTracerTestUtil.setGlobalTracerUnconditionally(tracer); + + final var attrs = List.of(new MetricAttribute(TAG_1, VALUE_1), new MetricAttribute(TAG_2, VALUE_2)); + ApmTraceUtils.addTagsToTrace(attrs); + + verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_1), VALUE_1); + verify(span, times(1)).setTag(String.format(TAG_FORMAT, TAG_PREFIX, TAG_2), VALUE_2); + } + @Test void testAddingTagsWithPrefix() { final Span span = mock(Span.class); diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index af4b2ccfa83..bc9696a01a5 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,5 +1,12 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 + +FROM scratch as builder WORKDIR /app ADD airbyte-app.tar /app + +FROM ${JDK_IMAGE} +WORKDIR /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte + ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/airbyte-metrics-reporter"] diff --git a/airbyte-metrics/reporter/build.gradle.kts b/airbyte-metrics/reporter/build.gradle.kts index ae093138cba..3709f9b5d8c 100644 --- a/airbyte-metrics/reporter/build.gradle.kts +++ b/airbyte-metrics/reporter/build.gradle.kts @@ -1,55 +1,55 @@ plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } configurations { - create("jdbc") + create("jdbc") } configurations.all { - resolutionStrategy { - force (libs.jooq) - } + resolutionStrategy { + force(libs.jooq) + } } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.jooq) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.jooq) - runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.snakeyaml) - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.bundles.micronaut.test) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } airbyte { - application { - name = "airbyte-metrics-reporter" - mainClass = "io.airbyte.metrics.reporter.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - } - docker { - imageName = "metrics-reporter" - } + application { + name = "airbyte-metrics-reporter" + mainClass = "io.airbyte.metrics.reporter.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + } + docker { + imageName = "metrics-reporter" + } } diff --git a/airbyte-micronaut-temporal/build.gradle.kts b/airbyte-micronaut-temporal/build.gradle.kts index bc8bb26c7dd..bc33187eb8d 100644 --- a/airbyte-micronaut-temporal/build.gradle.kts +++ b/airbyte-micronaut-temporal/build.gradle.kts @@ -1,25 +1,25 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) - implementation(libs.bundles.micronaut) - implementation(libs.byte.buddy) - implementation(libs.guava) - implementation(libs.spring.core) - implementation(libs.temporal.sdk) { - exclude( module = "guava") - } + implementation(libs.bundles.micronaut) + implementation(libs.byte.buddy) + implementation(libs.guava) + implementation(libs.spring.core) + implementation(libs.temporal.sdk) { + exclude(module = "guava") + } - implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-temporal-core")) - testImplementation(libs.assertj.core) - testImplementation(libs.bundles.junit) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.assertj.core) + testImplementation(libs.bundles.junit) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testRuntimeOnly(libs.junit.jupiter.engine) } diff --git a/airbyte-notification/readme.md b/airbyte-notification/README.md similarity index 100% rename from airbyte-notification/readme.md rename to airbyte-notification/README.md diff --git a/airbyte-notification/build.gradle.kts b/airbyte-notification/build.gradle.kts index a37faed343f..92e8713b160 100644 --- a/airbyte-notification/build.gradle.kts +++ b/airbyte-notification/build.gradle.kts @@ -1,39 +1,39 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") - id("org.jetbrains.kotlin.jvm") - id("org.jetbrains.kotlin.kapt") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + id("org.jetbrains.kotlin.jvm") + id("org.jetbrains.kotlin.kapt") } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.bundles.micronaut.annotation.processor) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(libs.okhttp) - implementation("org.apache.httpcomponents:httpclient:4.5.13") - implementation("org.commonmark:commonmark:0.21.0") + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(libs.okhttp) + implementation("org.apache.httpcomponents:httpclient:4.5.13") + implementation("org.commonmark:commonmark:0.21.0") - implementation(libs.guava) - implementation(libs.bundles.apache) - implementation(libs.commons.io) - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - // TODO remove this, it"s used for String.isEmpty check) - implementation(libs.bundles.log4j) + implementation(libs.guava) + implementation(libs.bundles.apache) + implementation(libs.commons.io) + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + // TODO remove this, it"s used for String.isEmpty check) + implementation(libs.bundles.log4j) - testImplementation(libs.mockk) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testImplementation(libs.mockk) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockito.inline) - testImplementation(libs.mockwebserver) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockwebserver) } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java index 97752805b43..a5e49bc0095 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java @@ -5,6 +5,7 @@ package io.airbyte.notification; import io.airbyte.commons.resources.MoreResources; +import io.micronaut.context.annotation.Replaces; import io.micronaut.context.annotation.Requires; import io.micronaut.context.annotation.Value; import jakarta.inject.Named; @@ -26,6 +27,7 @@ @Singleton @Requires(property = "airbyte.notification.customerio.apikey", notEquals = "") +@Replaces(FakeCustomerIoEmailNotificationSender.class) public class CustomerIoEmailNotificationSender implements NotificationSender { public static final MediaType JSON = MediaType.get("application/json; charset=utf-8"); diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java index ece3d19a4fa..12dbb0a49e0 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java @@ -27,14 +27,12 @@ import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Map; -import java.util.UUID; import okhttp3.Interceptor; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; -import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHeaders; import org.commonmark.node.Node; @@ -63,8 +61,8 @@ public class CustomerioNotificationClient extends NotificationClient { private static final Logger LOGGER = LoggerFactory.getLogger(CustomerioNotificationClient.class); - private static final String AUTO_DISABLE_TRANSACTION_MESSAGE_ID = "7"; - private static final String AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID = "8"; + private static final String AUTO_DISABLE_TRANSACTION_MESSAGE_ID = "29"; + private static final String AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID = "30"; private static final String BREAKING_CHANGE_WARNING_BROADCAST_ID = "32"; private static final String BREAKING_CHANGE_SYNCS_DISABLED_BROADCAST_ID = "33"; private static final String SCHEMA_CHANGE_TRANSACTION_ID = "25"; @@ -77,7 +75,6 @@ public class CustomerioNotificationClient extends NotificationClient { private static final String CUSTOMERIO_EMAIL_API_ENDPOINT = "v1/send/email"; private static final String CAMPAIGNS_PATH_SEGMENT = "campaigns"; private static final String CUSTOMERIO_BROADCAST_API_ENDPOINT_TEMPLATE = "v1/" + CAMPAIGNS_PATH_SEGMENT + "/%s/triggers"; - private static final String AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH = "customerio/auto_disable_notification_template.json"; private static final String CUSTOMERIO_TYPE = "customerio"; @@ -159,29 +156,21 @@ public boolean notifyJobSuccess(final SyncSummary summary, // airbyte-config/models/src/main/resources/types/CustomerioNotificationConfiguration.yaml // instead of being passed in @Override - public boolean notifyConnectionDisabled(final String receiverEmail, - final String sourceConnector, - final String destinationConnector, - final String jobDescription, - final UUID workspaceId, - final UUID connectionId) + public boolean notifyConnectionDisabled(final SyncSummary summary, + final String receiverEmail) throws IOException { - final String requestBody = renderTemplate(AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH, AUTO_DISABLE_TRANSACTION_MESSAGE_ID, receiverEmail, - receiverEmail, sourceConnector, destinationConnector, jobDescription, workspaceId.toString(), connectionId.toString()); - return notifyByEmail(requestBody); + ObjectNode node = buildSyncCompletedJson(summary, receiverEmail, AUTO_DISABLE_TRANSACTION_MESSAGE_ID); + String payload = Jsons.serialize(node); + return notifyByEmail(payload); } @Override - public boolean notifyConnectionDisableWarning(final String receiverEmail, - final String sourceConnector, - final String destinationConnector, - final String jobDescription, - final UUID workspaceId, - final UUID connectionId) + public boolean notifyConnectionDisableWarning(final SyncSummary summary, + final String receiverEmail) throws IOException { - final String requestBody = renderTemplate(AUTO_DISABLE_NOTIFICATION_TEMPLATE_PATH, AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID, receiverEmail, - receiverEmail, sourceConnector, destinationConnector, jobDescription, workspaceId.toString(), connectionId.toString()); - return notifyByEmail(requestBody); + ObjectNode node = buildSyncCompletedJson(summary, receiverEmail, AUTO_DISABLE_WARNING_TRANSACTION_MESSAGE_ID); + String payload = Jsons.serialize(node); + return notifyByEmail(payload); } @Override @@ -213,16 +202,6 @@ public boolean notifyBreakingChangeSyncsDisabled(final List receiverEmai "connector_version_migration_url", breakingChange.getMigrationDocumentationUrl())); } - @Override - public boolean notifySuccess(final String message) { - throw new NotImplementedException(); - } - - @Override - public boolean notifyFailure(final String message) { - throw new NotImplementedException(); - } - @Override public boolean notifySchemaPropagated(final SchemaUpdateNotification notification, final String recipient) diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java b/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java index 20bcc399fe4..d5f4beea51d 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java @@ -4,7 +4,6 @@ package io.airbyte.notification; -import io.micronaut.context.annotation.Requires; import jakarta.inject.Singleton; import lombok.extern.slf4j.Slf4j; @@ -13,7 +12,6 @@ */ @Slf4j @Singleton -@Requires(env = "local-test") public class FakeCustomerIoEmailNotificationSender extends CustomerIoEmailNotificationSender { FakeCustomerIoEmailNotificationSender() { diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java index 0707dfad27d..4cfdd7554ec 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java @@ -11,7 +11,6 @@ import io.airbyte.notification.messages.SyncSummary; import java.io.IOException; import java.util.List; -import java.util.UUID; /** * Client for trigger notifications (regardless of notification type e.g. slack or email). @@ -28,20 +27,12 @@ public abstract boolean notifyJobSuccess(final SyncSummary summary, final String receiverEmail) throws IOException, InterruptedException; - public abstract boolean notifyConnectionDisabled(String receiverEmail, - String sourceConnector, - String destinationConnector, - String jobDescription, - UUID workspaceId, - UUID connectionId) + public abstract boolean notifyConnectionDisabled(final SyncSummary summary, + final String receiverEmail) throws IOException, InterruptedException; - public abstract boolean notifyConnectionDisableWarning(String receiverEmail, - String sourceConnector, - String destinationConnector, - String jobDescription, - UUID workspaceId, - UUID connectionId) + public abstract boolean notifyConnectionDisableWarning(final SyncSummary summary, + final String receiverEmail) throws IOException, InterruptedException; public abstract boolean notifyBreakingChangeWarning(List receiverEmails, @@ -56,10 +47,6 @@ public abstract boolean notifyBreakingChangeSyncsDisabled(List receiverE final ActorDefinitionBreakingChange breakingChange) throws IOException, InterruptedException; - public abstract boolean notifySuccess(String message) throws IOException, InterruptedException; - - public abstract boolean notifyFailure(String message) throws IOException, InterruptedException; - public abstract boolean notifySchemaPropagated(final SchemaUpdateNotification notification, final String recipient) throws IOException, InterruptedException; diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index 07cf8b05778..70b6a8ac38a 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -28,7 +28,7 @@ import java.net.http.HttpResponse; import java.util.Comparator; import java.util.List; -import java.util.UUID; +import java.util.Optional; import org.apache.logging.log4j.util.Strings; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; @@ -67,7 +67,7 @@ public boolean notifyJobFailure(final SyncSummary summary, summary.getErrorMessage(), summary.getConnection().getUrl(), String.valueOf(summary.getJobId())); - return notifyJson(buildJobCompletedNotification(summary, legacyMessage).toJsonNode()); + return notifyJson(buildJobCompletedNotification(summary, "Sync failure occurred", legacyMessage, Optional.empty()).toJsonNode()); } @Override @@ -82,19 +82,26 @@ public boolean notifyJobSuccess(final SyncSummary summary, summary.getErrorMessage(), summary.getConnection().getUrl(), String.valueOf(summary.getJobId())); - return notifyJson(buildJobCompletedNotification(summary, legacyMessage).toJsonNode()); + return notifyJson(buildJobCompletedNotification(summary, "Sync completed", legacyMessage, Optional.empty()).toJsonNode()); } @NotNull - static Notification buildJobCompletedNotification(final SyncSummary summary, final String text) { + static Notification buildJobCompletedNotification(final SyncSummary summary, + final String titleText, + final String legacyText, + final Optional topContent) { Notification notification = new Notification(); - notification.setText(text); + notification.setText(legacyText); Section title = notification.addSection(); String connectionLink = Notification.createLink(summary.getConnection().getName(), summary.getConnection().getUrl()); - String titleText = summary.isSuccess() ? "Sync completed" : "Sync failure occurred"; title.setText(String.format("%s: %s", titleText, connectionLink)); - Section description = notification.addSection(); + if (topContent.isPresent()) { + final Section topSection = notification.addSection(); + topSection.setText(topContent.get()); + } + + Section description = notification.addSection(); final Field sourceLabel = description.addField(); sourceLabel.setType("mrkdwn"); sourceLabel.setText("*Source:*"); @@ -131,59 +138,48 @@ static Notification buildJobCompletedNotification(final SyncSummary summary, fin Section summarySection = notification.addSection(); summarySection.setText(String.format(""" *Sync Summary:* - %d record(s) loaded / %d record(s) extracted - %s loaded / %s extracted + %d record(s) extracted / %d record(s) loaded + %s extracted / %s loaded """, - summary.getRecordsCommitted(), summary.getRecordsEmitted(), - summary.getBytesCommittedFormatted(), summary.getBytesEmittedFormatted())); + summary.getRecordsEmitted(), summary.getRecordsCommitted(), + summary.getBytesEmittedFormatted(), summary.getBytesCommittedFormatted())); return notification; } @Override - public boolean notifyConnectionDisabled(final String receiverEmail, - final String sourceConnector, - final String destinationConnector, - final String jobDescription, - final UUID workspaceId, - final UUID connectionId) + public boolean notifyConnectionDisabled(final SyncSummary summary, + final String receiverEmail) throws IOException, InterruptedException { - final String message = renderTemplate( + String legacyMessage = renderTemplate( "slack/auto_disable_slack_notification_template.txt", - sourceConnector, - destinationConnector, - jobDescription, - workspaceId.toString(), - connectionId.toString()); - - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; + summary.getSource().getName(), + summary.getDestination().getName(), + summary.getErrorMessage(), + summary.getWorkspace().getId().toString(), + summary.getConnection().getId().toString()); + String message = """ + Your connection has been repeatedly failing and has been automatically disabled. + """; + return notifyJson(buildJobCompletedNotification(summary, "Connection disabled", legacyMessage, Optional.of(message)).toJsonNode()); } @Override - public boolean notifyConnectionDisableWarning(final String receiverEmail, - final String sourceConnector, - final String destinationConnector, - final String jobDescription, - final UUID workspaceId, - final UUID connectionId) + public boolean notifyConnectionDisableWarning(final SyncSummary summary, + final String receiverEmail) throws IOException, InterruptedException { - final String message = renderTemplate( + String legacyMessage = renderTemplate( "slack/auto_disable_warning_slack_notification_template.txt", - sourceConnector, - destinationConnector, - jobDescription, - workspaceId.toString(), - connectionId.toString()); - - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; + summary.getSource().getName(), + summary.getDestination().getName(), + summary.getErrorMessage(), + summary.getWorkspace().getId().toString(), + summary.getConnection().getId().toString()); + String message = """ + Your connection has been repeatedly failing. Please address any issues to ensure your syncs continue to run. + """; + return notifyJson( + buildJobCompletedNotification(summary, "Warning - repeated connection failures", legacyMessage, Optional.of(message)).toJsonNode()); } @Override @@ -354,24 +350,6 @@ private boolean notifyJson(final JsonNode node) throws IOException, InterruptedE } } - @Override - public boolean notifySuccess(final String message) throws IOException, InterruptedException { - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; - } - - @Override - public boolean notifyFailure(final String message) throws IOException, InterruptedException { - final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { - return notify(message); - } - return false; - } - @Override public String getNotificationClientType() { return SLACK_CLIENT; diff --git a/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json b/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json deleted file mode 100644 index 4469fe417bb..00000000000 --- a/airbyte-notification/src/main/resources/customerio/auto_disable_notification_template.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "transactional_message_id": "%s", - "to": "%s", - "identifiers": { - "email": "%s" - }, - "message_data": { - "source": "%s", - "destination": "%s", - "job_description": "%s", - "workspace_id": "%s", - "connection_id": "%s" - }, - - "disable_message_retention": false, - "send_to_unsubscribed": true, - "tracked": false, - "queue_draft": false, - "disable_css_preprocessing": true -} diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java index 06d565d2499..5e67ae7ea84 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java @@ -172,9 +172,22 @@ void testNotifyBreakingChangeSyncsDisabled() throws IOException, InterruptedExce void testNotifyConnectionDisabled() throws IOException, InterruptedException { mockWebServer.enqueue(new MockResponse()); + SyncSummary summary = SyncSummary.builder() + .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) + .destination(DestinationInfo.builder().name(RANDOM_INPUT).build()) + .source(SourceInfo.builder().name(RANDOM_INPUT).build()) + .connection(ConnectionInfo.builder().id(CONNECTION_ID).build()) + .startedAt(Instant.ofEpochSecond(1000000)) + .finishedAt(Instant.ofEpochSecond(2000000)) + .isSuccess(false) + .bytesEmitted(123240L) + .bytesCommitted(9000L) + .recordsEmitted(780) + .recordsCommitted(600) + .errorMessage(RANDOM_INPUT) + .build(); final boolean result = - customerioNotificationClient.notifyConnectionDisabled(WORKSPACE.getEmail(), RANDOM_INPUT, RANDOM_INPUT, RANDOM_INPUT, WORKSPACE_ID, - CONNECTION_ID); + customerioNotificationClient.notifyConnectionDisabled(summary, WORKSPACE.getEmail()); assertTrue(result); diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java index a80a337b969..5e1116d4f02 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java @@ -78,15 +78,6 @@ void tearDown() { server.stop(1); } - @Test - void testBadResponseWrongNotificationMessage() throws IOException, InterruptedException { - final String message = UUID.randomUUID().toString(); - server.createContext(TEST_PATH, new ServerHandler("Message mismatched")); - final SlackNotificationClient client = - new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - assertThrows(IOException.class, () -> client.notifyFailure(message)); - } - @Test void testBadWebhookUrl() { final SlackNotificationClient client = @@ -125,16 +116,6 @@ void testEmptyWebhookUrl() throws IOException, InterruptedException { assertFalse(client.notifyJobFailure(summary, null)); } - @Test - void testNotify() throws IOException, InterruptedException { - final String message = UUID.randomUUID().toString(); - server.createContext(TEST_PATH, new ServerHandler(message)); - final SlackNotificationClient client = - new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - assertTrue(client.notifyFailure(message)); - assertTrue(client.notifySuccess(message)); - } - @Test void testNotifyJobFailure() throws IOException, InterruptedException { server.createContext(TEST_PATH, new ServerHandler(EXPECTED_FAIL_MESSAGE)); @@ -188,7 +169,14 @@ void testNotifyConnectionDisabled() throws IOException, InterruptedException { server.createContext(TEST_PATH, new ServerHandler(expectedNotificationMessage)); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - assertTrue(client.notifyConnectionDisabled("", SOURCE_TEST, DESTINATION_TEST, "job description.", WORKSPACE_ID, CONNECTION_ID)); + SyncSummary summary = SyncSummary.builder() + .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) + .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) + .source(SourceInfo.builder().name(SOURCE_TEST).build()) + .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) + .errorMessage("job description.") + .build(); + assertTrue(client.notifyConnectionDisabled(summary, "")); } @SuppressWarnings("LineLength") @@ -208,7 +196,14 @@ void testNotifyConnectionDisabledWarning() throws IOException, InterruptedExcept server.createContext(TEST_PATH, new ServerHandler(expectedNotificationWarningMessage)); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - assertTrue(client.notifyConnectionDisableWarning("", SOURCE_TEST, DESTINATION_TEST, "job description.", WORKSPACE_ID, CONNECTION_ID)); + SyncSummary summary = SyncSummary.builder() + .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) + .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) + .source(SourceInfo.builder().name(SOURCE_TEST).build()) + .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) + .errorMessage("job description.") + .build(); + assertTrue(client.notifyConnectionDisableWarning(summary, "")); } @Test @@ -364,7 +359,7 @@ public void handle(final HttpExchange t) throws IOException { response = "No notification message or message missing `text` node"; t.sendResponseHeaders(500, response.length()); } else { - response = String.format("Wrong notification messge: %s", message.get("text").asText()); + response = String.format("Wrong notification message: %s", message.get("text").asText()); t.sendResponseHeaders(500, response.length()); } final OutputStream os = t.getResponseBody(); diff --git a/airbyte-oauth/readme.md b/airbyte-oauth/README.md similarity index 100% rename from airbyte-oauth/readme.md rename to airbyte-oauth/README.md diff --git a/airbyte-oauth/build.gradle.kts b/airbyte-oauth/build.gradle.kts index 7ad77afa77f..34a0b33d075 100644 --- a/airbyte-oauth/build.gradle.kts +++ b/airbyte-oauth/build.gradle.kts @@ -1,26 +1,26 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } dependencies { - implementation(platform("com.fasterxml.jackson:jackson-bom:2.13.0")) - implementation(libs.bundles.jackson) - implementation(libs.guava) - implementation(libs.google.cloud.storage) - implementation(libs.bundles.apache) - implementation(libs.appender.log4j2) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) + implementation(platform("com.fasterxml.jackson:jackson-bom:2.13.0")) + implementation(libs.bundles.jackson) + implementation(libs.guava) + implementation(libs.google.cloud.storage) + implementation(libs.bundles.apache) + implementation(libs.appender.log4j2) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-json-validation")) - implementation(libs.airbyte.protocol) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-json-validation")) + implementation(libs.airbyte.protocol) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 6e7ecb93a92..ab363bd80c9 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -20,6 +20,7 @@ import io.airbyte.oauth.flows.LeverOAuthFlow; import io.airbyte.oauth.flows.LinkedinAdsOAuthFlow; import io.airbyte.oauth.flows.MailchimpOAuthFlow; +import io.airbyte.oauth.flows.MicrosoftAzureBlobStorageOAuthFlow; import io.airbyte.oauth.flows.MicrosoftBingAdsOAuthFlow; import io.airbyte.oauth.flows.MicrosoftOneDriveOAuthFlow; import io.airbyte.oauth.flows.MicrosoftSharepointOAuthFlow; @@ -76,6 +77,7 @@ public OAuthImplementationFactory(final HttpClient httpClient) { builder.put("airbyte/source-amazon-ads", new AmazonAdsOAuthFlow(httpClient)); builder.put("airbyte/source-amazon-seller-partner", new AmazonSellerPartnerOAuthFlow(httpClient)); builder.put("airbyte/source-asana", new AsanaOAuthFlow(httpClient)); + builder.put("airbyte/source-azure-blob-storage", new MicrosoftAzureBlobStorageOAuthFlow(httpClient)); builder.put("airbyte/source-bing-ads", new MicrosoftBingAdsOAuthFlow(httpClient)); builder.put("airbyte/source-drift", new DriftOAuthFlow(httpClient)); builder.put("airbyte/source-facebook-marketing", new FacebookMarketingOAuthFlow(httpClient)); diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java new file mode 100644 index 00000000000..11636bce6fc --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlow.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import io.airbyte.oauth.BaseOAuth2Flow; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import org.apache.http.client.utils.URIBuilder; + +/** + * Microsoft Azure Blob Storage OAuth. + */ +public class MicrosoftAzureBlobStorageOAuthFlow extends BaseOAuth2Flow { + + private static final String fieldName = "tenant_id"; + + public MicrosoftAzureBlobStorageOAuthFlow(final HttpClient httpClient) { + super(httpClient); + } + + @VisibleForTesting + public MicrosoftAzureBlobStorageOAuthFlow(final HttpClient httpClient, final Supplier stateSupplier) { + super(httpClient, stateSupplier); + } + + private String getScopes() { + return "offline_access%20https://storage.azure.com/.default"; + } + + @Override + protected String formatConsentUrl(final UUID definitionId, + final String clientId, + final String redirectUrl, + final JsonNode inputOAuthConfiguration) + throws IOException { + + final String tenantId; + try { + tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); + } catch (final IllegalArgumentException e) { + throw new IOException("Failed to get " + fieldName + " value from input configuration", e); + } + + try { + return new URIBuilder() + .setScheme("https") + .setHost("login.microsoftonline.com") + .setPath(tenantId + "/oauth2/v2.0/authorize") + .addParameter("client_id", clientId) + .addParameter("response_type", "code") + .addParameter("redirect_uri", redirectUrl) + .addParameter("response_mode", "query") + .addParameter("state", getState()) + .build().toString() + "&scope=" + getScopes(); + } catch (final URISyntaxException e) { + throw new IOException("Failed to format Consent URL for OAuth flow", e); + } + } + + @Override + protected Map getAccessTokenQueryParameters(final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl) { + return ImmutableMap.builder() + .put("client_id", clientId) + .put("client_secret", clientSecret) + .put("code", authCode) + .put("redirect_uri", redirectUrl) + .put("grant_type", "authorization_code") + .build(); + } + + @Override + protected String getAccessTokenUrl(final JsonNode inputOAuthConfiguration) { + final String tenantId = getConfigValueUnsafe(inputOAuthConfiguration, fieldName); + return "https://login.microsoftonline.com/" + tenantId + "/oauth2/v2.0/token"; + } + +} diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java new file mode 100644 index 00000000000..e56baf4e2f4 --- /dev/null +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/MicrosoftAzureBlobStorageOAuthFlowTest.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.oauth.BaseOAuthFlow; +import java.util.Map; +import org.junit.jupiter.api.Test; + +@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") +class MicrosoftAzureBlobStorageOAuthFlowTest extends BaseOAuthFlowTest { + + @Override + protected BaseOAuthFlow getOAuthFlow() { + return new MicrosoftAzureBlobStorageOAuthFlow(getHttpClient(), this::getConstantState); + } + + @Override + protected String getExpectedConsentUrl() { + return "https://login.microsoftonline.com/test_tenant_id/oauth2/v2.0/authorize?client_id=test_client_id&response_type=code&redirect_uri=https%3A%2F%2Fairbyte.io&response_mode=query&state=state&scope=offline_access%20https://storage.azure.com/.default"; + } + + @Override + protected JsonNode getInputOAuthConfiguration() { + return Jsons.jsonNode(Map.of("tenant_id", "test_tenant_id")); + } + + @Override + protected JsonNode getUserInputFromConnectorConfigSpecification() { + return getJsonSchema(Map.of("tenant_id", Map.of("type", "string"))); + } + + @Test + @Override + void testEmptyInputCompleteDestinationOAuth() {} + + @Test + @Override + void testDeprecatedCompleteDestinationOAuth() {} + + @Test + @Override + void testDeprecatedCompleteSourceOAuth() {} + + @Test + @Override + void testEmptyInputCompleteSourceOAuth() {} + +} diff --git a/airbyte-persistence/job-persistence/build.gradle.kts b/airbyte-persistence/job-persistence/build.gradle.kts index 07a23d70f55..041ed7041c2 100644 --- a/airbyte-persistence/job-persistence/build.gradle.kts +++ b/airbyte-persistence/job-persistence/build.gradle.kts @@ -1,59 +1,67 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + force(libs.platform.testcontainers.postgresql) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - - implementation(platform(libs.fasterxml)) - implementation(libs.bundles.jackson) - implementation(libs.spotbugs.annotations) - implementation(libs.guava) - implementation(libs.commons.io) - implementation(libs.bundles.apache) - // TODO: remove this, it's pulled in for a Strings.notEmpty() check - implementation(libs.bundles.log4j) - - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-protocol")) - implementation(project(":airbyte-oauth")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-db:db-lib")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-metrics:metrics-lib")) - - implementation(libs.sentry.java) - implementation(libs.otel.semconv) - implementation(libs.otel.sdk) - implementation(libs.otel.sdk.testing) - implementation(libs.micrometer.statsd) - implementation(platform(libs.otel.bom)) - implementation("io.opentelemetry:opentelemetry-api") - implementation("io.opentelemetry:opentelemetry-sdk") - implementation("io.opentelemetry:opentelemetry-exporter-otlp") - implementation(libs.apache.commons.collections) - implementation(libs.datadog.statsd.client) - - testImplementation(project(":airbyte-config:config-persistence")) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - - testImplementation(libs.junit.pioneer) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(libs.bundles.micronaut.annotation.processor) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + + implementation(platform(libs.fasterxml)) + implementation(libs.bundles.jackson) + implementation(libs.spotbugs.annotations) + implementation(libs.guava) + implementation(libs.commons.io) + implementation(libs.bundles.apache) + // TODO: remove this, it's pulled in for a Strings.notEmpty() check + implementation(libs.bundles.log4j) + + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-protocol")) + implementation(project(":airbyte-oauth")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-db:db-lib")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-metrics:metrics-lib")) + + implementation(libs.sentry.java) + implementation(libs.otel.semconv) + implementation(libs.otel.sdk) + implementation(libs.otel.sdk.testing) + implementation(libs.micrometer.statsd) + implementation(platform(libs.otel.bom)) + implementation("io.opentelemetry:opentelemetry-api") + implementation("io.opentelemetry:opentelemetry-sdk") + implementation("io.opentelemetry:opentelemetry-exporter-otlp") + implementation(libs.apache.commons.collections) + implementation(libs.datadog.statsd.client) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.kotlin) + + testImplementation(project(":airbyte-config:config-persistence")) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.platform.testcontainers.postgresql) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java index 64a796aa28f..c79bf47d78c 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java @@ -14,6 +14,7 @@ import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobSyncConfig; import io.airbyte.config.JobTypeResourceLimit.JobType; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ResourceRequirementsType; @@ -23,10 +24,17 @@ import io.airbyte.config.StandardSourceDefinition.SourceType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; +import io.airbyte.config.StateWrapper; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.config.SyncResourceRequirementsKey; import io.airbyte.config.helpers.ResourceRequirementsUtils; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.provider.ResourceRequirementsProvider; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.Context; import io.airbyte.featureflag.DestResourceOverrides; @@ -63,13 +71,25 @@ public class DefaultJobCreator implements JobCreator { private final JobPersistence jobPersistence; private final ResourceRequirementsProvider resourceRequirementsProvider; private final FeatureFlagClient featureFlagClient; + private final GenerationBumper generationBumper; + private final StatePersistence statePersistence; + private final RefreshJobStateUpdater refreshJobStateUpdater; + private final StreamRefreshesRepository streamRefreshesRepository; public DefaultJobCreator(final JobPersistence jobPersistence, final ResourceRequirementsProvider resourceRequirementsProvider, - final FeatureFlagClient featureFlagClient) { + final FeatureFlagClient featureFlagClient, + final GenerationBumper generationBumper, + final StatePersistence statePersistence, + final RefreshJobStateUpdater refreshJobStateUpdater, + final StreamRefreshesRepository streamRefreshesRepository) { this.jobPersistence = jobPersistence; this.resourceRequirementsProvider = resourceRequirementsProvider; this.featureFlagClient = featureFlagClient; + this.generationBumper = generationBumper; + this.statePersistence = statePersistence; + this.refreshJobStateUpdater = refreshJobStateUpdater; + this.streamRefreshesRepository = streamRefreshesRepository; } @Override @@ -115,6 +135,86 @@ public Optional createSyncJob(final SourceConnection source, return jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); } + @Override + public Optional createRefreshConnection(final StandardSync standardSync, + final String sourceDockerImageName, + final Version sourceProtocolVersion, + final String destinationDockerImageName, + final Version destinationProtocolVersion, + final List standardSyncOperations, + @Nullable final JsonNode webhookOperationConfigs, + final StandardSourceDefinition sourceDefinition, + final StandardDestinationDefinition destinationDefinition, + final ActorDefinitionVersion sourceDefinitionVersion, + final ActorDefinitionVersion destinationDefinitionVersion, + final UUID workspaceId, + final List streamsToRefresh) + throws IOException { + final boolean canRunRefreshes = featureFlagClient.boolVariation(ActivateRefreshes.INSTANCE, new Multi( + List.of( + new Workspace(workspaceId), + new Connection(standardSync.getConnectionId()), + new SourceDefinition(sourceDefinition.getSourceDefinitionId()), + new DestinationDefinition(destinationDefinition.getDestinationDefinitionId())))); + + if (!canRunRefreshes) { + throw new IllegalStateException("Trying to create a refresh job for a destination which doesn't support refreshes"); + } + + final SyncResourceRequirements syncResourceRequirements = + getSyncResourceRequirements(workspaceId, standardSync, sourceDefinition, destinationDefinition, false); + + final RefreshConfig refreshConfig = new RefreshConfig() + .withNamespaceDefinition(standardSync.getNamespaceDefinition()) + .withNamespaceFormat(standardSync.getNamespaceFormat()) + .withPrefix(standardSync.getPrefix()) + .withSourceDockerImage(sourceDockerImageName) + .withSourceProtocolVersion(sourceProtocolVersion) + .withDestinationDockerImage(destinationDockerImageName) + .withDestinationProtocolVersion(destinationProtocolVersion) + .withOperationSequence(standardSyncOperations) + .withWebhookOperationConfigs(webhookOperationConfigs) + .withConfiguredAirbyteCatalog(standardSync.getCatalog()) + .withSyncResourceRequirements(syncResourceRequirements) + .withIsSourceCustomConnector(sourceDefinition.getCustom()) + .withIsDestinationCustomConnector(destinationDefinition.getCustom()) + .withWorkspaceId(workspaceId) + .withSourceDefinitionVersionId(sourceDefinitionVersion.getVersionId()) + .withDestinationDefinitionVersionId(destinationDefinitionVersion.getVersionId()) + .withStreamsToRefresh( + streamsToRefresh.stream().map(streamRefresh -> new StreamDescriptor() + .withName(streamRefresh.getStreamName()) + .withNamespace(streamRefresh.getStreamNamespace())).toList()); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + final Optional maybeJobId = jobPersistence.enqueueJob(standardSync.getConnectionId().toString(), jobConfig); + + if (maybeJobId.isPresent()) { + final long jobId = maybeJobId.get(); + generationBumper.updateGenerationForStreams(standardSync.getConnectionId(), jobId, streamsToRefresh); + + final Optional currentState = statePersistence.getCurrentState(standardSync.getConnectionId()); + updateStateAndDeleteRefreshes(standardSync.getConnectionId(), streamsToRefresh, currentState); + } + + return maybeJobId; + } + + // TODO: Add Transactional annotation + private void updateStateAndDeleteRefreshes(final UUID connectionId, + final List streamsToRefresh, + final Optional currentState) + throws IOException { + if (currentState.isPresent()) { + refreshJobStateUpdater.updateStateWrapperForRefresh(connectionId, currentState.get(), streamsToRefresh); + } + streamsToRefresh.forEach( + s -> streamRefreshesRepository.deleteByConnectionIdAndStreamNameAndStreamNamespace(connectionId, s.getStreamName(), s.getStreamNamespace())); + } + @Override public Optional createResetConnectionJob(final DestinationConnection destination, final StandardSync standardSync, diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java index cffe860d677..93a37a510d0 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java @@ -9,6 +9,7 @@ import static io.airbyte.db.instance.jobs.jooq.generated.Tables.NORMALIZATION_SUMMARIES; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_STATS; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.SYNC_STATS; +import static io.airbyte.persistence.job.models.JobStatus.TERMINAL_STATUSES; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -403,6 +404,10 @@ private static JobConfig parseJobConfigFromString(final String jobConfigString) // TODO feature flag this for data types rollout // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobConfig.getResetConnection().getConfiguredAirbyteCatalog()); CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobConfig.getResetConnection().getConfiguredAirbyteCatalog()); + } else if (jobConfig.getConfigType() == ConfigType.REFRESH && jobConfig.getRefresh() != null) { + // TODO feature flag this for data types rollout + // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobConfig.getRefresh().getConfiguredAirbyteCatalog()); + CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobConfig.getRefresh().getConfiguredAirbyteCatalog()); } return jobConfig; } @@ -434,10 +439,6 @@ private static JobOutput parseJobOutputFromString(final String jobOutputString) // TODO feature flag this for data types rollout // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getDiscoverCatalog().getCatalog()); - } else if (jobOutput.getOutputType() == OutputType.SYNC && jobOutput.getSync() != null) { - // TODO feature flag this for data types rollout - // CatalogMigrationV1Helper.upgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(jobOutput.getSync().getOutputCatalog()); } return jobOutput; } @@ -520,7 +521,7 @@ public Optional enqueueJob(final String scope, final JobConfig jobConfig) ? String.format("WHERE NOT EXISTS (SELECT 1 FROM jobs WHERE config_type IN (%s) AND scope = '%s' AND status NOT IN (%s)) ", Job.REPLICATION_TYPES.stream().map(DefaultJobPersistence::toSqlName).map(Names::singleQuote).collect(Collectors.joining(",")), scope, - JobStatus.TERMINAL_STATUSES.stream().map(DefaultJobPersistence::toSqlName).map(Names::singleQuote).collect(Collectors.joining(","))) + TERMINAL_STATUSES.stream().map(DefaultJobPersistence::toSqlName).map(Names::singleQuote).collect(Collectors.joining(","))) : ""; return jobDatabase.query( @@ -935,12 +936,10 @@ public List listJobs(final Set configTypes, .and(updatedAtStart == null ? DSL.noCondition() : JOBS.UPDATED_AT.ge(updatedAtStart)) .and(updatedAtEnd == null ? DSL.noCondition() : JOBS.UPDATED_AT.le(updatedAtEnd)) .orderBy(JOBS.CREATED_AT.desc(), JOBS.ID.desc()) - .limit(limit) - .offset(offset) .getSQL(ParamType.INLINED) + ") AS jobs"; LOGGER.debug("jobs subquery: {}", jobsSubquery); - return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod))); + return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod, limit, offset))); }); } @@ -961,7 +960,7 @@ public List listJobs(final Set configTypes, return jobDatabase.query(ctx -> { final String jobsSubquery = "(" + ctx.select(JOBS.asterisk()).from(JOBS) .join(Tables.CONNECTION) - .on(Tables.CONNECTION.ID.eq(JOBS.SCOPE.cast(UUID.class))) + .on(Tables.CONNECTION.ID.cast(String.class).eq(JOBS.SCOPE)) .join(Tables.ACTOR) .on(Tables.ACTOR.ID.eq(Tables.CONNECTION.SOURCE_ID)) .where(JOBS.CONFIG_TYPE.in(configTypeSqlNames(configTypes))) @@ -975,11 +974,9 @@ public List listJobs(final Set configTypes, .and(updatedAtStart == null ? DSL.noCondition() : JOBS.UPDATED_AT.ge(updatedAtStart)) .and(updatedAtEnd == null ? DSL.noCondition() : JOBS.UPDATED_AT.le(updatedAtEnd)) .orderBy(JOBS.CREATED_AT.desc(), JOBS.ID.desc()) - .limit(limit) - .offset(offset) .getSQL(ParamType.INLINED) + ") AS jobs"; - return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod))); + return getJobsFromResult(ctx.fetch(jobSelectAndJoin(jobsSubquery) + buildJobOrderByString(orderByField, orderByMethod, limit, offset))); }); } @@ -1091,12 +1088,11 @@ public List listRecordsCommittedForConnectionAfterTimestam + "FROM jobs " + "JOIN attempts ON jobs.id = attempts.job_id " + WHERE - + "CAST(config_type AS VARCHAR) = ? AND " + + "CAST(jobs.config_type AS VARCHAR) in " + toSqlInFragment(Job.SYNC_REPLICATION_TYPES) + AND + SCOPE_CLAUSE + "CAST(jobs.status AS VARCHAR) = ? AND " + "attempts.ended_at > ? " + "ORDER BY attempts.ended_at ASC", - toSqlName(ConfigType.SYNC), connectionId.toString(), toSqlName(JobStatus.SUCCEEDED), timeConvertedIntoLocalDateTime)) @@ -1156,10 +1152,9 @@ public Optional getLastReplicationJob(final UUID connectionId) throws IOExc public Optional getLastSyncJob(final UUID connectionId) throws IOException { return jobDatabase.query(ctx -> ctx .fetch(BASE_JOB_SELECT_AND_JOIN + WHERE - + "CAST(jobs.config_type AS VARCHAR) = ? " + AND + + "CAST(jobs.config_type AS VARCHAR) in " + toSqlInFragment(Job.SYNC_REPLICATION_TYPES) + AND + "scope = ? " + ORDER_BY_JOB_CREATED_AT_DESC + LIMIT_1, - toSqlName(ConfigType.SYNC), connectionId.toString()) .stream() .findFirst() @@ -1179,10 +1174,9 @@ public List getLastSyncJobForConnections(final List conn return jobDatabase.query(ctx -> ctx .fetch("SELECT DISTINCT ON (scope) jobs.scope, jobs.created_at, jobs.status " + " FROM jobs " - + WHERE + "CAST(jobs.config_type AS VARCHAR) = ? " + + WHERE + "CAST(jobs.config_type AS VARCHAR) in " + toSqlInFragment(Job.SYNC_REPLICATION_TYPES) + AND + scopeInList(connectionIds) - + "ORDER BY scope, created_at DESC", - toSqlName(ConfigType.SYNC)) + + "ORDER BY scope, created_at DESC") .stream() .map(r -> new JobStatusSummary(UUID.fromString(r.get("scope", String.class)), getEpoch(r, "created_at"), JobStatus.valueOf(r.get("status", String.class).toUpperCase()))) @@ -1201,11 +1195,10 @@ public List getRunningSyncJobForConnections(final List connectionIds) return jobDatabase.query(ctx -> ctx .fetch("SELECT DISTINCT ON (scope) * FROM jobs " - + WHERE + "CAST(jobs.config_type AS VARCHAR) = ? " + + WHERE + "CAST(jobs.config_type AS VARCHAR) in " + toSqlInFragment(Job.SYNC_REPLICATION_TYPES) + AND + scopeInList(connectionIds) + AND + JOB_STATUS_IS_NON_TERMINAL - + "ORDER BY scope, created_at DESC", - toSqlName(ConfigType.SYNC)) + + "ORDER BY scope, created_at DESC") .stream() .flatMap(r -> getJobOptional(ctx, r.get("id", Long.class)).stream()) .collect(Collectors.toList())); @@ -1275,6 +1268,19 @@ public List getAttemptNormalizationStatusesForJob(fi Optional.ofNullable(record.get(SYNC_STATS.RECORDS_COMMITTED)), record.get(NORMALIZATION_SUMMARIES.FAILURES) != null))); } + @Override + public void updateJobConfig(Long jobId, JobConfig config) throws IOException { + jobDatabase.query(ctx -> { + + ctx.update(JOBS) + .set(JOBS.CONFIG, JSONB.valueOf(Jsons.serialize(config))) + .set(JOBS.UPDATED_AT, OffsetDateTime.now()) + .where(JOBS.ID.eq(jobId)) + .execute(); + return null; + }); + } + @Override public Optional getVersion() throws IOException { return getMetadata(AirbyteVersion.AIRBYTE_VERSION_KEY_NAME).findFirst(); @@ -1437,7 +1443,7 @@ private String removeUnsupportedUnicode(final String value) { return value != null ? value.replaceAll("\\u0000|\\\\u0000", "") : null; } - private String buildJobOrderByString(final String orderByField, final String orderByMethod) { + private String buildJobOrderByString(final String orderByField, final String orderByMethod, final int limit, final int offset) { // Set up maps and values final Map fieldMap = Map.of( OrderByField.CREATED_AT, JOBS.CREATED_AT.getName(), @@ -1457,7 +1463,7 @@ private String buildJobOrderByString(final String orderByField, final String ord sortMethod = orderByMethod.toUpperCase(); } - return String.format("ORDER BY jobs.%s %s ", field, sortMethod); + return String.format("ORDER BY jobs.%s %s LIMIT %d OFFSET %d", field, sortMethod, limit, offset); } private enum OrderByField { diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java index ef9c216b881..b6aff1a7dba 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobCreator.java @@ -13,6 +13,7 @@ import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.protocol.models.StreamDescriptor; import jakarta.annotation.Nullable; import java.io.IOException; @@ -75,4 +76,19 @@ Optional createResetConnectionJob(DestinationConnection destination, UUID workspaceId) throws IOException; + Optional createRefreshConnection(final StandardSync standardSync, + final String sourceDockerImageName, + final Version sourceProtocolVersion, + final String destinationDockerImageName, + final Version destinationProtocolVersion, + final List standardSyncOperations, + @Nullable final JsonNode webhookOperationConfigs, + final StandardSourceDefinition sourceDefinition, + final StandardDestinationDefinition destinationDefinition, + final ActorDefinitionVersion sourceDefinitionVersion, + final ActorDefinitionVersion destinationDefinitionVersion, + final UUID workspaceId, + final List streamsToRefresh) + throws IOException; + } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java index 2f60e5be383..487cae93b0a 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java @@ -7,13 +7,13 @@ import static io.airbyte.metrics.lib.MetricTags.NOTIFICATION_CLIENT; import static io.airbyte.metrics.lib.MetricTags.NOTIFICATION_TRIGGER; -import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import io.airbyte.analytics.TrackingClient; import io.airbyte.commons.map.MoreMaps; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; +import io.airbyte.config.FailureReason; import io.airbyte.config.Notification.NotificationType; import io.airbyte.config.NotificationItem; import io.airbyte.config.NotificationSettings; @@ -36,6 +36,7 @@ import io.airbyte.notification.messages.SourceInfo; import io.airbyte.notification.messages.SyncSummary; import io.airbyte.notification.messages.WorkspaceInfo; +import io.airbyte.persistence.job.models.Attempt; import io.airbyte.persistence.job.models.Job; import io.airbyte.persistence.job.models.JobStatus; import io.airbyte.persistence.job.tracker.TrackingMetadata; @@ -84,18 +85,17 @@ public JobNotifier(final WebUrlHelper webUrlHelper, this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; } - private void notifyJob(final String reason, final String action, final Job job, List attemptStats) { + private void notifyJob(final String action, final Job job, List attemptStats) { try { final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId()); final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - notifyJob(reason, action, job, attemptStats, workspace); + notifyJob(action, job, attemptStats, workspace); } catch (final Exception e) { LOGGER.error("Unable to read configuration:", e); } } - private void notifyJob(final String reason, - final String action, + private void notifyJob(final String action, final Job job, final List attempts, final StandardWorkspace workspace) { @@ -138,8 +138,8 @@ private void notifyJob(final String reason, } } } - final NotificationItem notificationItem = createAndSend(notificationSettings, action, connectionId, - destinationDefinition, job, reason, sourceDefinition, standardSync, workspace, source, destination, + final NotificationItem notificationItem = createAndSend(notificationSettings, action, + job, standardSync, workspace, source, destination, syncStats); if (notificationItem != null) { @@ -196,31 +196,6 @@ private void submitToMetricClient(final String action, final String notification metricTriggerAttribute); } - /** - * This method allows for the alert to be sent without the customerio configuration set in the - * database. - *

    - * This is only needed because there is no UI element to allow for users to create that - * configuration. - *

    - * Once that exists, this can be removed and we should be using `notifyJobByEmail`. The alert is - * sent to the email associated with the workspace. - * - * @param reason for notification - * @param action tracking action for telemetry - * @param job job notification is for - * @param attemptStats sync stats for each attempts - */ - public void notifyJobByEmail(final String reason, final String action, final Job job, List attemptStats) { - try { - final UUID workspaceId = workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - notifyJob(reason, action, job, attemptStats, workspace); - } catch (final Exception e) { - LOGGER.error("Unable to read configuration:", e); - } - } - private String getJobDescription(final Job job, final String reason) { final Instant jobStartedDate = Instant.ofEpochSecond(job.getStartedAtInSecond().orElse(job.getCreatedAtInSecond())); final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.FULL) @@ -235,20 +210,20 @@ private String getJobDescription(final Job job, final String reason) { formatter.format(jobStartedDate), durationString, reason); } - public void failJob(final String reason, final Job job, List attemptStats) { - notifyJob(reason, FAILURE_NOTIFICATION, job, attemptStats); + public void failJob(final Job job, List attemptStats) { + notifyJob(FAILURE_NOTIFICATION, job, attemptStats); } public void successJob(final Job job, List attemptStats) { - notifyJob(null, SUCCESS_NOTIFICATION, job, attemptStats); + notifyJob(SUCCESS_NOTIFICATION, job, attemptStats); } public void autoDisableConnection(final Job job, List attemptStats) { - notifyJob(null, CONNECTION_DISABLED_NOTIFICATION, job, attemptStats); + notifyJob(CONNECTION_DISABLED_NOTIFICATION, job, attemptStats); } public void autoDisableConnectionWarning(final Job job, List attemptStats) { - notifyJob(null, CONNECTION_DISABLED_WARNING_NOTIFICATION, job, attemptStats); + notifyJob(CONNECTION_DISABLED_WARNING_NOTIFICATION, job, attemptStats); } private void sendNotification(final NotificationItem notificationItem, @@ -276,23 +251,24 @@ private void sendNotification(final NotificationItem notificationItem, private NotificationItem createAndSend(final NotificationSettings notificationSettings, final String action, - final UUID connectionId, - final StandardDestinationDefinition destinationDefinition, final Job job, - final String reason, - final StandardSourceDefinition sourceDefinition, final StandardSync standardSync, final StandardWorkspace workspace, final SourceConnection source, final DestinationConnection destination, final SyncStats syncStats) { NotificationItem notificationItem = null; - final String sourceConnector = sourceDefinition.getName(); - final String destinationConnector = destinationDefinition.getName(); - final String failReason = Strings.isNullOrEmpty(reason) ? "" : String.format(", as the %s", reason); - final String jobDescription = getJobDescription(job, failReason); final UUID workspaceId = workspace.getWorkspaceId(); + // Error message we show in the notification is the first failure reason of the last attempt if + // available + // If it is not available, default to null + final String failureMessage = job.getLastAttempt() + .flatMap(Attempt::getFailureSummary) + .flatMap(s -> s.getFailures().stream().findFirst()) + .map(FailureReason::getExternalMessage) + .orElse(null); + SyncSummary.SyncSummaryBuilder summaryBuilder = SyncSummary.builder() .workspace(WorkspaceInfo.builder() .name(workspace.getName()).id(workspaceId).url(webUrlHelper.getWorkspaceUrl(workspaceId)).build()) @@ -308,7 +284,7 @@ private NotificationItem createAndSend(final NotificationSettings notificationSe .finishedAt(Instant.ofEpochSecond(job.getUpdatedAtInSecond())) .isSuccess(job.getStatus() == JobStatus.SUCCEEDED) .jobId(job.getId()) - .errorMessage(reason); + .errorMessage(failureMessage); if (syncStats != null) { long bytesEmitted = syncStats.getBytesEmitted() != null ? syncStats.getBytesEmitted() : 0; @@ -335,13 +311,11 @@ private NotificationItem createAndSend(final NotificationSettings notificationSe } else if (CONNECTION_DISABLED_NOTIFICATION.equalsIgnoreCase(action)) { notificationItem = notificationSettings.getSendOnSyncDisabled(); sendNotification(notificationItem, CONNECTION_DISABLED_NOTIFICATION, - (notificationClient) -> notificationClient.notifyConnectionDisabled(workspace.getEmail(), - sourceConnector, destinationConnector, jobDescription, workspace.getWorkspaceId(), connectionId)); + (notificationClient) -> notificationClient.notifyConnectionDisabled(summary, workspace.getEmail())); } else if (CONNECTION_DISABLED_WARNING_NOTIFICATION.equalsIgnoreCase(action)) { notificationItem = notificationSettings.getSendOnSyncDisabledWarning(); sendNotification(notificationItem, CONNECTION_DISABLED_WARNING_NOTIFICATION, - (notificationClient) -> notificationClient.notifyConnectionDisableWarning(workspace.getEmail(), - sourceConnector, destinationConnector, jobDescription, workspace.getWorkspaceId(), connectionId)); + (notificationClient) -> notificationClient.notifyConnectionDisableWarning(summary, workspace.getEmail())); } } else { LOGGER.warn("Unable to send notification: notification settings are not present."); diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java index ef04209ce4a..64beb3c9d9e 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java @@ -417,6 +417,8 @@ List listJobStatusAndTimestampWithConnection(UUID con List getAttemptNormalizationStatusesForJob(final Long jobId) throws IOException; + void updateJobConfig(Long jobId, JobConfig config) throws IOException; + /** * Convenience POJO for various stats data structures. * diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java index 3a913ab65b9..2e9e6fb5453 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/WorkspaceHelper.java @@ -4,13 +4,14 @@ package io.airbyte.persistence.job; +import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; + import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import io.airbyte.commons.functional.CheckedSupplier; import io.airbyte.config.DestinationConnection; -import io.airbyte.config.JobConfig; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; @@ -98,7 +99,7 @@ public UUID load(@NonNull final Long jobId) throws ConfigNotFoundException, IOEx if (job == null) { throw new ConfigNotFoundException(Job.class.toString(), jobId.toString()); } - if (job.getConfigType() == JobConfig.ConfigType.SYNC || job.getConfigType() == JobConfig.ConfigType.RESET_CONNECTION) { + if (REPLICATION_TYPES.contains(job.getConfigType())) { return getWorkspaceForConnectionIdIgnoreExceptions(UUID.fromString(job.getScope())); } else { throw new IllegalArgumentException("Only sync/reset jobs are associated with workspaces! A " + job.getConfigType() + " job was requested!"); diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java index 55fa1800f84..8a0112735c8 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java @@ -56,7 +56,7 @@ public class JobErrorReporter { private static final String JOB_ID_KEY = "job_id"; private static final ImmutableSet UNSUPPORTED_FAILURETYPES = - ImmutableSet.of(FailureType.CONFIG_ERROR, FailureType.MANUAL_CANCELLATION); + ImmutableSet.of(FailureType.CONFIG_ERROR, FailureType.MANUAL_CANCELLATION, FailureType.TRANSIENT_ERROR); private final ConfigRepository configRepository; private final DeploymentMode deploymentMode; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java index 76dfe0aef70..77c984cd8f9 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactory.java @@ -19,8 +19,10 @@ import io.airbyte.config.persistence.ConfigInjector; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.persistence.job.DefaultJobCreator; import io.airbyte.persistence.job.WorkspaceHelper; +import io.airbyte.persistence.job.helper.model.JobCreatorInput; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.List; @@ -56,66 +58,25 @@ public DefaultSyncJobFactory(final boolean connectorSpecificResourceDefaultsEnab } @Override - public Long create(final UUID connectionId) { + public Long createSync(final UUID connectionId) { try { - final StandardSync standardSync = configRepository.getStandardSync(connectionId); - final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); - final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); - final SourceConnection sourceConnection = configRepository.getSourceConnection(standardSync.getSourceId()); - final DestinationConnection destinationConnection = configRepository.getDestinationConnection(standardSync.getDestinationId()); - final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( - sourceConnection.getSourceDefinitionId(), - sourceConnection.getSourceId(), - sourceConnection.getWorkspaceId(), - sourceConnection.getConfiguration()); - sourceConnection.withConfiguration(configInjector.injectConfig(sourceConfiguration, sourceConnection.getSourceDefinitionId())); - final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( - destinationConnection.getDestinationDefinitionId(), - destinationConnection.getDestinationId(), - destinationConnection.getWorkspaceId(), - destinationConnection.getConfiguration()); - destinationConnection - .withConfiguration(configInjector.injectConfig(destinationConfiguration, destinationConnection.getDestinationDefinitionId())); - final StandardSourceDefinition sourceDefinition = configRepository - .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); - final StandardDestinationDefinition destinationDefinition = configRepository - .getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); - - final ActorDefinitionVersion sourceVersion = - actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, workspaceId, standardSync.getSourceId()); - final ActorDefinitionVersion destinationVersion = - actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); - - final String sourceImageName = sourceVersion.getDockerRepository() + ":" + sourceVersion.getDockerImageTag(); - final String destinationImageName = destinationVersion.getDockerRepository() + ":" + destinationVersion.getDockerImageTag(); - - final List standardSyncOperations = Lists.newArrayList(); - for (final var operationId : standardSync.getOperationIds()) { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - standardSyncOperations.add(standardSyncOperation); - } - - // for OSS users, make it possible to ignore default actor-level resource requirements - if (!connectorSpecificResourceDefaultsEnabled) { - sourceDefinition.setResourceRequirements(null); - destinationDefinition.setResourceRequirements(null); - } + final JobCreatorInput jobCreatorInput = getJobCreatorInput(connectionId); return jobCreator.createSyncJob( - sourceConnection, - destinationConnection, - standardSync, - sourceImageName, - new Version(sourceVersion.getProtocolVersion()), - destinationImageName, - new Version(destinationVersion.getProtocolVersion()), - standardSyncOperations, - workspace.getWebhookOperationConfigs(), - sourceDefinition, - destinationDefinition, - sourceVersion, - destinationVersion, - workspace.getWorkspaceId()) + jobCreatorInput.getSource(), + jobCreatorInput.getDestination(), + jobCreatorInput.getStandardSync(), + jobCreatorInput.getSourceDockerImageName(), + jobCreatorInput.getSourceProtocolVersion(), + jobCreatorInput.getDestinationDockerImageName(), + jobCreatorInput.getDestinationProtocolVersion(), + jobCreatorInput.getStandardSyncOperations(), + jobCreatorInput.getWebhookOperationConfigs(), + jobCreatorInput.getSourceDefinition(), + jobCreatorInput.getDestinationDefinition(), + jobCreatorInput.getSourceDefinitionVersion(), + jobCreatorInput.getDestinationDefinitionVersion(), + jobCreatorInput.getWorkspaceId()) .orElseThrow(() -> new IllegalStateException("We shouldn't be trying to create a new sync job if there is one running already.")); } catch (final IOException | JsonValidationException | ConfigNotFoundException e) { @@ -123,4 +84,91 @@ public Long create(final UUID connectionId) { } } + @Override + public Long createRefresh(UUID connectionId, List streamsToRefresh) { + try { + final JobCreatorInput jobCreatorInput = getJobCreatorInput(connectionId); + + return jobCreator.createRefreshConnection( + jobCreatorInput.getStandardSync(), + jobCreatorInput.getSourceDockerImageName(), + jobCreatorInput.getSourceProtocolVersion(), + jobCreatorInput.getDestinationDockerImageName(), + jobCreatorInput.getDestinationProtocolVersion(), + jobCreatorInput.getStandardSyncOperations(), + jobCreatorInput.getWebhookOperationConfigs(), + jobCreatorInput.getSourceDefinition(), + jobCreatorInput.getDestinationDefinition(), + jobCreatorInput.getSourceDefinitionVersion(), + jobCreatorInput.getDestinationDefinitionVersion(), + jobCreatorInput.getWorkspaceId(), + streamsToRefresh) + .orElseThrow(() -> new IllegalStateException("We shouldn't be trying to create a new sync job if there is one running already.")); + + } catch (final IOException | JsonValidationException | ConfigNotFoundException e) { + throw new RuntimeException(e); + } + } + + private JobCreatorInput getJobCreatorInput(UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { + final StandardSync standardSync = configRepository.getStandardSync(connectionId); + final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); + final StandardWorkspace workspace = configRepository.getStandardWorkspaceNoSecrets(workspaceId, true); + final SourceConnection sourceConnection = configRepository.getSourceConnection(standardSync.getSourceId()); + final DestinationConnection destinationConnection = configRepository.getDestinationConnection(standardSync.getDestinationId()); + final JsonNode sourceConfiguration = oAuthConfigSupplier.injectSourceOAuthParameters( + sourceConnection.getSourceDefinitionId(), + sourceConnection.getSourceId(), + sourceConnection.getWorkspaceId(), + sourceConnection.getConfiguration()); + sourceConnection.withConfiguration(configInjector.injectConfig(sourceConfiguration, sourceConnection.getSourceDefinitionId())); + final JsonNode destinationConfiguration = oAuthConfigSupplier.injectDestinationOAuthParameters( + destinationConnection.getDestinationDefinitionId(), + destinationConnection.getDestinationId(), + destinationConnection.getWorkspaceId(), + destinationConnection.getConfiguration()); + destinationConnection + .withConfiguration(configInjector.injectConfig(destinationConfiguration, destinationConnection.getDestinationDefinitionId())); + final StandardSourceDefinition sourceDefinition = configRepository + .getStandardSourceDefinition(sourceConnection.getSourceDefinitionId()); + final StandardDestinationDefinition destinationDefinition = configRepository + .getStandardDestinationDefinition(destinationConnection.getDestinationDefinitionId()); + + final ActorDefinitionVersion sourceVersion = + actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, workspaceId, standardSync.getSourceId()); + final ActorDefinitionVersion destinationVersion = + actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); + + final String sourceImageName = sourceVersion.getDockerRepository() + ":" + sourceVersion.getDockerImageTag(); + final String destinationImageName = destinationVersion.getDockerRepository() + ":" + destinationVersion.getDockerImageTag(); + + final List standardSyncOperations = Lists.newArrayList(); + for (final var operationId : standardSync.getOperationIds()) { + final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); + standardSyncOperations.add(standardSyncOperation); + } + + // for OSS users, make it possible to ignore default actor-level resource requirements + if (!connectorSpecificResourceDefaultsEnabled) { + sourceDefinition.setResourceRequirements(null); + destinationDefinition.setResourceRequirements(null); + } + + return new JobCreatorInput( + sourceConnection, + destinationConnection, + standardSync, + sourceImageName, + new Version(sourceVersion.getProtocolVersion()), + destinationImageName, + new Version(destinationVersion.getProtocolVersion()), + standardSyncOperations, + workspace.getWebhookOperationConfigs(), + sourceDefinition, + destinationDefinition, + sourceVersion, + destinationVersion, + workspaceId); + } + } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java index 97c2cae991e..b41869aa6ec 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/SyncJobFactory.java @@ -4,6 +4,8 @@ package io.airbyte.persistence.job.factory; +import io.airbyte.config.persistence.domain.StreamRefresh; +import java.util.List; import java.util.UUID; /** @@ -17,6 +19,14 @@ public interface SyncJobFactory { * @param connectionId connection id * @return job id */ - Long create(UUID connectionId); + Long createSync(UUID connectionId); + + /** + * Create refresh job for given connection id. + * + * @param connectionId connection id + * @return job id + */ + Long createRefresh(UUID connectionId, List streamsToRefresh); } diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java index ebef2cfbeb7..1f38331bf0f 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/Job.java @@ -22,7 +22,8 @@ */ public class Job { - public static final Set REPLICATION_TYPES = EnumSet.of(ConfigType.SYNC, ConfigType.RESET_CONNECTION); + public static final Set REPLICATION_TYPES = EnumSet.of(ConfigType.SYNC, ConfigType.RESET_CONNECTION, ConfigType.REFRESH); + public static final Set SYNC_REPLICATION_TYPES = EnumSet.of(ConfigType.SYNC, ConfigType.REFRESH); private final long id; private final ConfigType configType; @@ -187,19 +188,6 @@ public Optional getLastFailedAttempt() { .findFirst(); } - /** - * Get the last attempt by created_at for the job that had an output. - * - * @return the last attempt. empty optional, if there have been no attempts with outputs. - */ - public Optional getLastAttemptWithOutput() { - return getAttempts() - .stream() - .sorted(Comparator.comparing(Attempt::getCreatedAtInSecond).reversed()) - .filter(a -> a.getOutput().isPresent() && a.getOutput().get().getSync() != null && a.getOutput().get().getSync().getState() != null) - .findFirst(); - } - /** * Get the last attempt by created_at for the job. * diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java index cf6a6a379a0..126394ad101 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java @@ -4,6 +4,8 @@ package io.airbyte.persistence.job.tracker; +import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; +import static io.airbyte.persistence.job.models.Job.SYNC_REPLICATION_TYPES; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -52,6 +54,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.Set; import java.util.UUID; /** @@ -211,7 +214,7 @@ public void trackDiscover(final UUID jobId, public void trackSync(final Job job, final JobState jobState) { Exceptions.swallow(() -> { final ConfigType configType = job.getConfigType(); - final boolean allowedJob = configType == ConfigType.SYNC || configType == ConfigType.RESET_CONNECTION; + final boolean allowedJob = REPLICATION_TYPES.contains(configType); Preconditions.checkArgument(allowedJob, "Job type " + configType + " is not allowed!"); final long jobId = job.getId(); final Optional lastAttempt = job.getLastAttempt(); @@ -227,7 +230,12 @@ public void trackSync(final Job job, final JobState jobState) { final ActorDefinitionVersion destinationVersion = actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); - final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), configType, job.getAttemptsCount()); + final List jobsHistory = jobPersistence.listJobsIncludingId( + Set.of(ConfigType.SYNC, ConfigType.RESET_CONNECTION, ConfigType.REFRESH), connectionId.toString(), jobId, 2); + + final Optional previousJob = jobsHistory.stream().filter(jobHistory -> jobHistory.getId() != jobId).findFirst(); + + final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), configType, job.getAttemptsCount(), previousJob); final Map jobAttemptMetadata = generateJobAttemptMetadata(jobId, jobState); final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinition, sourceVersion); final Map destinationDefMetadata = generateDestinationDefinitionMetadata(destinationDefinition, destinationVersion); @@ -276,7 +284,7 @@ public void trackSyncForInternalFailure(final Long jobId, final ActorDefinitionVersion destinationVersion = actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, standardSync.getDestinationId()); - final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), null, attempts); + final Map jobMetadata = generateJobMetadata(String.valueOf(jobId), null, attempts, Optional.empty()); final Map jobAttemptMetadata = generateJobAttemptMetadata(jobId, jobState); final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinition, sourceVersion); final Map destinationDefMetadata = generateDestinationDefinitionMetadata(destinationDefinition, destinationVersion); @@ -303,7 +311,7 @@ private Map generateSyncConfigMetadata( @Nullable final AttemptSyncConfig attemptSyncConfig, final JsonNode sourceConfigSchema, final JsonNode destinationConfigSchema) { - if (config.getConfigType() == ConfigType.SYNC) { + if (SYNC_REPLICATION_TYPES.contains(config.getConfigType())) { final Map actorConfigMetadata = new HashMap<>(); if (attemptSyncConfig != null) { @@ -317,7 +325,15 @@ private Map generateSyncConfigMetadata( mapToJsonString(configToMetadata(destinationConfiguration, destinationConfigSchema))); } - final Map catalogMetadata = getCatalogMetadata(config.getSync().getConfiguredAirbyteCatalog()); + final Map catalogMetadata; + if (config.getConfigType() == ConfigType.SYNC) { + catalogMetadata = getCatalogMetadata(config.getSync().getConfiguredAirbyteCatalog()); + } else if (config.getConfigType() == ConfigType.REFRESH) { + catalogMetadata = getCatalogMetadata(config.getRefresh().getConfiguredAirbyteCatalog()); + } else { + // This is not possible + throw new IllegalStateException("This should not be reacheable"); + } return MoreMaps.merge(actorConfigMetadata, catalogMetadata); } else { return emptyMap(); @@ -498,17 +514,25 @@ private Map generateSourceDefinitionMetadata(final StandardSourc } private Map generateJobMetadata(final String jobId, final ConfigType configType) { - return generateJobMetadata(jobId, configType, 0); + return generateJobMetadata(jobId, configType, 0, Optional.empty()); } - private Map generateJobMetadata(final String jobId, final @Nullable ConfigType configType, final int attempt) { + @VisibleForTesting + Map generateJobMetadata(final String jobId, + final @Nullable ConfigType configType, + final int attempt, + final Optional previousJob) { final Map metadata = new HashMap<>(); if (configType != null) { metadata.put("job_type", configType); } metadata.put("job_id", jobId); metadata.put("attempt_id", attempt); - + previousJob.ifPresent(job -> { + if (job.getConfigType() != null) { + metadata.put("previous_job_type", job.getConfigType()); + } + }); return Collections.unmodifiableMap(metadata); } diff --git a/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt b/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt new file mode 100644 index 00000000000..8665dd9dc8d --- /dev/null +++ b/airbyte-persistence/job-persistence/src/main/kotlin/io/airbyte/persistence/job/helper/model/JobCreatorInput.kt @@ -0,0 +1,29 @@ +package io.airbyte.persistence.job.helper.model + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.commons.version.Version +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.DestinationConnection +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardDestinationDefinition +import io.airbyte.config.StandardSourceDefinition +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardSyncOperation +import java.util.UUID + +data class JobCreatorInput( + val source: SourceConnection, + val destination: DestinationConnection, + val standardSync: StandardSync, + val sourceDockerImageName: String, + val sourceProtocolVersion: Version, + val destinationDockerImageName: String, + val destinationProtocolVersion: Version, + val standardSyncOperations: List, + val webhookOperationConfigs: JsonNode?, + val sourceDefinition: StandardSourceDefinition, + val destinationDefinition: StandardDestinationDefinition, + val sourceDefinitionVersion: ActorDefinitionVersion, + val destinationDefinitionVersion: ActorDefinitionVersion, + val workspaceId: UUID, +) diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java index d973642571f..62d009e2acb 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java @@ -5,8 +5,11 @@ package io.airbyte.persistence.job; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -28,6 +31,7 @@ import io.airbyte.config.JobTypeResourceLimit.JobType; import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorNormalization.Option; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ResourceRequirementsType; @@ -38,13 +42,24 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; +import io.airbyte.config.StateType; +import io.airbyte.config.StateWrapper; import io.airbyte.config.SyncResourceRequirements; import io.airbyte.config.SyncResourceRequirementsKey; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.domain.StreamRefresh; +import io.airbyte.config.persistence.helper.CatalogGenerationSetter; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.provider.ResourceRequirementsProvider; +import io.airbyte.featureflag.ActivateRefreshes; import io.airbyte.featureflag.DestResourceOverrides; +import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.OrchestratorResourceOverrides; import io.airbyte.featureflag.SourceResourceOverrides; import io.airbyte.featureflag.TestClient; +import io.airbyte.featureflag.UseResourceRequirementsVariant; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -99,11 +114,16 @@ class DefaultJobCreatorTest { private static final UUID WORKSPACE_ID = UUID.randomUUID(); private JobPersistence jobPersistence; + private StatePersistence statePersistence; + private RefreshJobStateUpdater refreshJobStateUpdater; + private StreamRefreshesRepository streamRefreshesRepository; private JobCreator jobCreator; private ResourceRequirementsProvider resourceRequirementsProvider; private ResourceRequirements workerResourceRequirements; private ResourceRequirements sourceResourceRequirements; private ResourceRequirements destResourceRequirements; + private GenerationBumper generationBumper; + private CatalogGenerationSetter catalogGenerationSetter; private static final JsonNode PERSISTED_WEBHOOK_CONFIGS; @@ -179,9 +199,10 @@ class DefaultJobCreatorTest { String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", WEBHOOK_CONFIG_ID, WEBHOOK_NAME)); - STANDARD_SOURCE_DEFINITION = new StandardSourceDefinition().withCustom(false); - STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE = new StandardSourceDefinition().withSourceType(SourceType.DATABASE).withCustom(false); - STANDARD_DESTINATION_DEFINITION = new StandardDestinationDefinition().withCustom(false); + STANDARD_SOURCE_DEFINITION = new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()).withCustom(false); + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE = + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()).withSourceType(SourceType.DATABASE).withCustom(false); + STANDARD_DESTINATION_DEFINITION = new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()).withCustom(false); SOURCE_DEFINITION_VERSION = new ActorDefinitionVersion().withVersionId(UUID.randomUUID()); DESTINATION_DEFINITION_VERSION = new ActorDefinitionVersion().withVersionId(UUID.randomUUID()); @@ -190,6 +211,9 @@ class DefaultJobCreatorTest { @BeforeEach void setup() { jobPersistence = mock(JobPersistence.class); + statePersistence = mock(StatePersistence.class); + refreshJobStateUpdater = new RefreshJobStateUpdater(statePersistence); + streamRefreshesRepository = mock(StreamRefreshesRepository.class); workerResourceRequirements = new ResourceRequirements() .withCpuLimit("0.2") .withCpuRequest("0.2") @@ -208,49 +232,174 @@ void setup() { resourceRequirementsProvider = mock(ResourceRequirementsProvider.class); when(resourceRequirementsProvider.getResourceRequirements(any(), any(), any())) .thenReturn(workerResourceRequirements); - jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, new TestClient()); + generationBumper = mock(GenerationBumper.class); + catalogGenerationSetter = mock(CatalogGenerationSetter.class); + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, new TestClient(), generationBumper, statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + } + + @Test + void testCreateRefreshJob() throws IOException { + final String streamToRefresh = "name"; + final String streamNamespace = "namespace"; + + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(true); + when(mFeatureFlagClient.stringVariation(eq(UseResourceRequirementsVariant.INSTANCE), any())).thenReturn("default"); + when(jobPersistence.enqueueJob(any(), any())).thenReturn(Optional.of(1L)); + + final StateWrapper stateWrapper = new StateWrapper().withStateType(StateType.STREAM) + .withStateMessages(List.of()); + + when(statePersistence.getCurrentState(STANDARD_SYNC.getConnectionId())).thenReturn(Optional.of(stateWrapper)); + + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + final Optional expectedSourceType = Optional.of("database"); + final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); + final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); + final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); + final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); + final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); + + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final RefreshConfig refreshConfig = getRefreshConfig(expectedSyncResourceRequirements, List.of( + new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace))); + + final JobConfig jobConfig = new JobConfig() + .withConfigType(ConfigType.REFRESH) + .withRefresh(refreshConfig); + + final String expectedScope = STANDARD_SYNC.getConnectionId().toString(); + when(jobPersistence.enqueueJob(expectedScope, jobConfig)).thenReturn(Optional.of(JOB_ID)); + + List refreshes = + List.of(new StreamRefresh(UUID.randomUUID(), STANDARD_SYNC.getConnectionId(), streamToRefresh, streamNamespace, null)); + List refreshesSD = + List.of(new StreamDescriptor().withName(streamToRefresh).withNamespace(streamNamespace)); + + when(catalogGenerationSetter.updateCatalogWithGenerationAndSyncInformation(any(), anyLong(), any(), any())) + .thenReturn(STANDARD_SYNC.getCatalog()); + + jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + refreshes); + + verify(jobPersistence).enqueueJob(expectedScope, jobConfig); + verify(generationBumper).updateGenerationForStreams(STANDARD_SYNC.getConnectionId(), JOB_ID, refreshes); + + final StateWrapper expected = + new StateWrapper().withStateType(StateType.STREAM); + + verify(statePersistence).updateOrCreateState(STANDARD_SYNC.getConnectionId(), expected); + verify(streamRefreshesRepository).deleteByConnectionIdAndStreamNameAndStreamNamespace(STANDARD_SYNC.getConnectionId(), streamToRefresh, + streamNamespace); + } + + private static RefreshConfig getRefreshConfig(final SyncResourceRequirements expectedSyncResourceRequirements, + final List streamToRefresh) { + return new RefreshConfig() + .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) + .withNamespaceFormat(STANDARD_SYNC.getNamespaceFormat()) + .withPrefix(STANDARD_SYNC.getPrefix()) + .withSourceDockerImage(SOURCE_IMAGE_NAME) + .withSourceProtocolVersion(SOURCE_PROTOCOL_VERSION) + .withDestinationDockerImage(DESTINATION_IMAGE_NAME) + .withDestinationProtocolVersion(DESTINATION_PROTOCOL_VERSION) + .withConfiguredAirbyteCatalog(STANDARD_SYNC.getCatalog()) + .withOperationSequence(List.of(STANDARD_SYNC_OPERATION)) + .withSyncResourceRequirements(expectedSyncResourceRequirements) + .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS) + .withIsSourceCustomConnector(false) + .withIsDestinationCustomConnector(false) + .withWorkspaceId(WORKSPACE_ID) + .withSourceDefinitionVersionId(SOURCE_DEFINITION_VERSION.getVersionId()) + .withDestinationDefinitionVersionId(DESTINATION_DEFINITION_VERSION.getVersionId()) + .withStreamsToRefresh(streamToRefresh); + } + + @Test + void testFailToCreateRefreshIfNotAllowed() { + final FeatureFlagClient mFeatureFlagClient = mock(TestClient.class); + when(mFeatureFlagClient.boolVariation(eq(ActivateRefreshes.INSTANCE), any())).thenReturn(false); + jobCreator = + new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, mFeatureFlagClient, generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); + + assertThrows(IllegalStateException.class, () -> jobCreator.createRefreshConnection( + STANDARD_SYNC, + SOURCE_IMAGE_NAME, + SOURCE_PROTOCOL_VERSION, + DESTINATION_IMAGE_NAME, + DESTINATION_PROTOCOL_VERSION, + List.of(STANDARD_SYNC_OPERATION), + PERSISTED_WEBHOOK_CONFIGS, + STANDARD_SOURCE_DEFINITION_WITH_SOURCE_TYPE, + STANDARD_DESTINATION_DEFINITION, + SOURCE_DEFINITION_VERSION, + DESTINATION_DEFINITION_VERSION, + WORKSPACE_ID, + List.of())); } @Test void testCreateSyncJob() throws IOException { final Optional expectedSourceType = Optional.of("database"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.ORCHESTRATOR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(workerResourceRequirements); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(sourceResourceRequirements); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destResourceRequirements); - // More explicit resource requirements to verify data mapping final ResourceRequirements destStderrResourceRequirements = new ResourceRequirements().withCpuLimit("10"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDERR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStderrResourceRequirements); final ResourceRequirements destStdinResourceRequirements = new ResourceRequirements().withCpuLimit("11"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDIN, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStdinResourceRequirements); final ResourceRequirements destStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("12"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDOUT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(destStdoutResourceRequirements); final ResourceRequirements heartbeatResourceRequirements = new ResourceRequirements().withCpuLimit("13"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.HEARTBEAT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(heartbeatResourceRequirements); final ResourceRequirements srcStderrResourceRequirements = new ResourceRequirements().withCpuLimit("14"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDERR, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(srcStderrResourceRequirements); final ResourceRequirements srcStdoutResourceRequirements = new ResourceRequirements().withCpuLimit("14"); - when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDOUT, expectedSourceType, DEFAULT_VARIANT)) - .thenReturn(srcStdoutResourceRequirements); - final SyncResourceRequirements expectedSyncResourceRequirements = new SyncResourceRequirements() - .withConfigKey(new SyncResourceRequirementsKey().withVariant(DEFAULT_VARIANT).withSubType("database")) - .withDestination(destResourceRequirements) - .withDestinationStdErr(destStderrResourceRequirements) - .withDestinationStdIn(destStdinResourceRequirements) - .withDestinationStdOut(destStdoutResourceRequirements) - .withOrchestrator(workerResourceRequirements) - .withHeartbeat(heartbeatResourceRequirements) - .withSource(sourceResourceRequirements) - .withSourceStdErr(srcStderrResourceRequirements) - .withSourceStdOut(srcStdoutResourceRequirements); + mockResourcesRequirement(expectedSourceType, + destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); + + final SyncResourceRequirements expectedSyncResourceRequirements = getExpectedResourcesRequirement(destStderrResourceRequirements, + destStdinResourceRequirements, + destStdoutResourceRequirements, + heartbeatResourceRequirements, + srcStderrResourceRequirements, + srcStdoutResourceRequirements); final JobSyncConfig jobSyncConfig = new JobSyncConfig() .withNamespaceDefinition(STANDARD_SYNC.getNamespaceDefinition()) @@ -294,6 +443,53 @@ void testCreateSyncJob() throws IOException { verify(jobPersistence).enqueueJob(expectedScope, jobConfig); } + private void mockResourcesRequirement(final Optional expectedSourceType, + final ResourceRequirements destStderrResourceRequirements, + final ResourceRequirements destStdinResourceRequirements, + final ResourceRequirements destStdoutResourceRequirements, + final ResourceRequirements heartbeatResourceRequirements, + final ResourceRequirements srcStderrResourceRequirements, + final ResourceRequirements srcStdoutResourceRequirements) { + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.ORCHESTRATOR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(workerResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(sourceResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destResourceRequirements); + // More explicit resource requirements to verify data mapping + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDERR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStderrResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDIN, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStdinResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.DESTINATION_STDOUT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(destStdoutResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.HEARTBEAT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(heartbeatResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDERR, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(srcStderrResourceRequirements); + when(resourceRequirementsProvider.getResourceRequirements(ResourceRequirementsType.SOURCE_STDOUT, expectedSourceType, DEFAULT_VARIANT)) + .thenReturn(srcStdoutResourceRequirements); + } + + private SyncResourceRequirements getExpectedResourcesRequirement(final ResourceRequirements destStderrResourceRequirements, + final ResourceRequirements destStdinResourceRequirements, + final ResourceRequirements destStdoutResourceRequirements, + final ResourceRequirements heartbeatResourceRequirements, + final ResourceRequirements srcStderrResourceRequirements, + final ResourceRequirements srcStdoutResourceRequirements) { + return new SyncResourceRequirements() + .withConfigKey(new SyncResourceRequirementsKey().withVariant(DEFAULT_VARIANT).withSubType("database")) + .withDestination(destResourceRequirements) + .withDestinationStdErr(destStderrResourceRequirements) + .withDestinationStdIn(destStdinResourceRequirements) + .withDestinationStdOut(destStdoutResourceRequirements) + .withOrchestrator(workerResourceRequirements) + .withHeartbeat(heartbeatResourceRequirements) + .withSource(sourceResourceRequirements) + .withSourceStdErr(srcStderrResourceRequirements) + .withSourceStdOut(srcStdoutResourceRequirements); + } + @Test void testCreateSyncJobEnsureNoQueuing() throws IOException { final JobSyncConfig jobSyncConfig = new JobSyncConfig() @@ -475,9 +671,11 @@ void testCreateSyncJobSourceAndDestinationResourceReqs() throws IOException { DESTINATION_PROTOCOL_VERSION, List.of(STANDARD_SYNC_OPERATION), null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( - new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(destResourceRequirements)))), + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), + new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( + new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(destResourceRequirements)))), SOURCE_DEFINITION_VERSION, DESTINATION_DEFINITION_VERSION, WORKSPACE_ID); @@ -548,7 +746,10 @@ void testDestinationResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, @@ -560,9 +761,11 @@ void testDestinationResourceReqsOverrides(final String cpuReqOverride, DESTINATION_PROTOCOL_VERSION, List.of(STANDARD_SYNC_OPERATION), null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( - new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), + new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( + new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), SOURCE_DEFINITION_VERSION, DESTINATION_DEFINITION_VERSION, WORKSPACE_ID); @@ -612,7 +815,10 @@ void testOrchestratorResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(OrchestratorResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(OrchestratorResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); final var standardSync = new StandardSync() .withConnectionId(UUID.randomUUID()) @@ -637,8 +843,10 @@ void testOrchestratorResourceReqsOverrides(final String cpuReqOverride, DESTINATION_PROTOCOL_VERSION, List.of(STANDARD_SYNC_OPERATION), null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(destResourceRequirements)), + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), + new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(destResourceRequirements)), SOURCE_DEFINITION_VERSION, DESTINATION_DEFINITION_VERSION, WORKSPACE_ID); @@ -688,7 +896,10 @@ void testSourceResourceReqsOverrides(final String cpuReqOverride, .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(SourceResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides)))); + new TestClient(Map.of(SourceResourceOverrides.INSTANCE.getKey(), Jsons.serialize(overrides))), generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, @@ -700,9 +911,11 @@ void testSourceResourceReqsOverrides(final String cpuReqOverride, DESTINATION_PROTOCOL_VERSION, List.of(STANDARD_SYNC_OPERATION), null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( - new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(destResourceRequirements)), + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( + new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), + new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(destResourceRequirements)), SOURCE_DEFINITION_VERSION, DESTINATION_DEFINITION_VERSION, WORKSPACE_ID); @@ -744,7 +957,10 @@ void ignoresOverridesIfJsonStringWeird(final String weirdness) throws IOExceptio .withMemoryRequest("800Mi"); final var jobCreator = new DefaultJobCreator(jobPersistence, resourceRequirementsProvider, - new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(weirdness)))); + new TestClient(Map.of(DestResourceOverrides.INSTANCE.getKey(), Jsons.serialize(weirdness))), generationBumper, + statePersistence, + refreshJobStateUpdater, + streamRefreshesRepository); jobCreator.createSyncJob( SOURCE_CONNECTION, @@ -756,9 +972,11 @@ void ignoresOverridesIfJsonStringWeird(final String weirdness) throws IOExceptio DESTINATION_PROTOCOL_VERSION, List.of(STANDARD_SYNC_OPERATION), null, - new StandardSourceDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), - new StandardDestinationDefinition().withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( - new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), + new StandardSourceDefinition().withSourceDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(sourceResourceRequirements)), + new StandardDestinationDefinition().withDestinationDefinitionId(UUID.randomUUID()) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withJobSpecific(List.of( + new JobTypeResourceLimit().withJobType(JobType.SYNC).withResourceRequirements(originalReqs)))), SOURCE_DEFINITION_VERSION, DESTINATION_DEFINITION_VERSION, WORKSPACE_ID); diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java index d215448f72a..faa6ef99332 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java @@ -240,6 +240,37 @@ private Result getJobRecord(final long jobId) throws SQLException { return jobDatabase.query(ctx -> ctx.fetch(DefaultJobPersistence.BASE_JOB_SELECT_AND_JOIN + "WHERE jobs.id = ?", jobId)); } + @Test + @DisplayName("Properly update a config") + void testUpdateConfig() throws IOException { + final long jobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + + final Job actual = jobPersistence.getJob(jobId); + + assertEquals(SYNC_JOB_CONFIG, actual.getConfig()); + + jobPersistence.updateJobConfig(jobId, SPEC_JOB_CONFIG); + final Job actualAfterUpdate = jobPersistence.getJob(jobId); + + assertEquals(SPEC_JOB_CONFIG, actualAfterUpdate.getConfig()); + } + + @Test + @DisplayName("Properly update a config without modifying other jobs") + void testUpdateConfigOnly1Job() throws IOException { + final long jobId = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); + final long jobId2 = jobPersistence.enqueueJob(UUID.randomUUID().toString(), SYNC_JOB_CONFIG).orElseThrow(); + + final Job actual = jobPersistence.getJob(jobId); + + assertEquals(SYNC_JOB_CONFIG, actual.getConfig()); + + jobPersistence.updateJobConfig(jobId, SPEC_JOB_CONFIG); + final Job actualJob2AfterUpdate = jobPersistence.getJob(jobId2); + + assertEquals(SYNC_JOB_CONFIG, actualJob2AfterUpdate.getConfig()); + } + @Test @DisplayName("Should set a job to incomplete if an attempt fails") void testCompleteAttemptFailed() throws IOException { @@ -2034,6 +2065,35 @@ void testListJobsWithMultipleAttemptsInDescOrder() throws IOException { assertEquals(jobId2, actualList.get(0).getId()); } + @Test + @DisplayName("Should apply limits after ordering by the key provided by the caller") + void testListJobsOrderedByUpdatedAt() throws IOException { + + final var jobId1 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); + final var job1Attempt1 = jobPersistence.createAttempt(jobId1, LOG_PATH); + + final var laterTime = NOW.plusSeconds(1000); + when(timeSupplier.get()).thenReturn(laterTime); + final var jobId2 = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); + final var job2Attempt1LogPath = LOG_PATH.resolve("3"); + final var job2Attempt1 = jobPersistence.createAttempt(jobId2, job2Attempt1LogPath); + jobPersistence.succeedAttempt(jobId2, job2Attempt1); + + final var evenLaterTime = NOW.plusSeconds(3000); + when(timeSupplier.get()).thenReturn(evenLaterTime); + jobPersistence.succeedAttempt(jobId1, job1Attempt1); + + String configId = null; + final List updatedAtJobs = + jobPersistence.listJobs(Set.of(SPEC_JOB_CONFIG.getConfigType()), configId, 1, 0, null, null, null, null, null, "UPDATED_AT", "ASC"); + assertEquals(1, updatedAtJobs.size()); + assertEquals(jobId2, updatedAtJobs.get(0).getId()); + final List createdAtJobs = + jobPersistence.listJobs(Set.of(SPEC_JOB_CONFIG.getConfigType()), configId, 1, 0, null, null, null, null, null, "CREATED_AT", "ASC"); + assertEquals(1, createdAtJobs.size()); + assertEquals(jobId1, createdAtJobs.get(0).getId()); + } + @Test @DisplayName("Should list jobs across all connections in any status") void testListJobsWithNoFilters() throws IOException { diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java index 40ec379ff8e..3603dd7129b 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/JobNotifierTest.java @@ -4,6 +4,7 @@ package io.airbyte.persistence.job; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -109,13 +110,13 @@ void setup() throws Exception { .thenReturn(new SourceConnection().withWorkspaceId(WORKSPACE_ID).withSourceId(SOURCE_ID).withName(SOURCE_NAME)); when(configRepository.getDestinationConnection(DESTINATION_ID)) .thenReturn(new DestinationConnection().withWorkspaceId(WORKSPACE_ID).withDestinationId(DESTINATION_ID).withName(DESTINATION_NAME)); - when(configRepository.getSourceDefinitionFromConnection(ArgumentMatchers.any())).thenReturn(sourceDefinition); - when(configRepository.getDestinationDefinitionFromConnection(ArgumentMatchers.any())).thenReturn(destinationDefinition); - when(configRepository.getStandardSourceDefinition(ArgumentMatchers.any())).thenReturn(sourceDefinition); - when(configRepository.getStandardDestinationDefinition(ArgumentMatchers.any())).thenReturn(destinationDefinition); + when(configRepository.getSourceDefinitionFromConnection(any())).thenReturn(sourceDefinition); + when(configRepository.getDestinationDefinitionFromConnection(any())).thenReturn(destinationDefinition); + when(configRepository.getStandardSourceDefinition(any())).thenReturn(sourceDefinition); + when(configRepository.getStandardDestinationDefinition(any())).thenReturn(destinationDefinition); when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(getWorkspace()); when(workspaceHelper.getWorkspaceForJobIdIgnoreExceptions(job.getId())).thenReturn(WORKSPACE_ID); - when(notificationClient.notifyJobFailure(ArgumentMatchers.any(), ArgumentMatchers.anyString())).thenReturn(true); + when(notificationClient.notifyJobFailure(any(), ArgumentMatchers.anyString())).thenReturn(true); when(actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, WORKSPACE_ID, SOURCE_ID)).thenReturn(actorDefinitionVersion); when(actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, WORKSPACE_ID, DESTINATION_ID)).thenReturn(actorDefinitionVersion); } @@ -123,10 +124,10 @@ void setup() throws Exception { @Test void testFailJob() throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { List attemptStats = new ArrayList<>(); - jobNotifier.failJob("JobNotifierTest was running", job, attemptStats); + jobNotifier.failJob(job, attemptStats); final DateTimeFormatter formatter = DateTimeFormatter.ofLocalizedDateTime(FormatStyle.FULL).withZone(ZoneId.systemDefault()); SyncSummary summary = SyncSummary.builder().build(); - verify(notificationClient).notifyJobFailure(ArgumentMatchers.any(), ArgumentMatchers.eq(null)); + verify(notificationClient).notifyJobFailure(any(), ArgumentMatchers.eq(null)); final Builder metadata = ImmutableMap.builder(); metadata.put("connection_id", UUID.fromString(job.getScope())); @@ -144,21 +145,37 @@ void testFailJob() throws IOException, InterruptedException, JsonValidationExcep @Test void testSuccessfulJobDoNotSendNotificationPerSettings() - throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { + throws IOException, InterruptedException { List attemptStats = new ArrayList<>(); jobNotifier.successJob(job, attemptStats); - verify(notificationClient, never()).notifySuccess(ArgumentMatchers.any()); + } + + @Test + void testSuccessfulJobSendNotification() throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { + + NotificationItem item = new NotificationItem() + .withNotificationType(List.of(NotificationType.SLACK)) + .withSlackConfiguration(new SlackNotificationConfiguration() + .withWebhook("http://webhook")); + StandardWorkspace workspace = getWorkspace(); + NotificationSettings sendNotificationOnSuccessSetting = new NotificationSettings() + .withSendOnSuccess(item); + workspace.setNotificationSettings(sendNotificationOnSuccessSetting); + when(jobNotifier.getNotificationClientsFromNotificationItem(item)).thenReturn(List.of(notificationClient)); + + when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, true)).thenReturn(workspace); + List attemptStats = new ArrayList<>(); + jobNotifier.successJob(job, attemptStats); + verify(notificationClient).notifyJobSuccess(any(), any()); } @Test void testSendOnSyncDisabledWarning() - throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { + throws IOException, InterruptedException { List attemptStats = new ArrayList<>(); jobNotifier.autoDisableConnectionWarning(job, attemptStats); - verify(notificationClient, never()).notifyConnectionDisableWarning(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), - ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); - verify(customerIoNotificationClient).notifyConnectionDisableWarning(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), - ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); + verify(notificationClient, never()).notifyConnectionDisableWarning(any(), any()); + verify(customerIoNotificationClient).notifyConnectionDisableWarning(any(), any()); } @Test @@ -166,10 +183,8 @@ void testSendOnSyncDisabled() throws IOException, InterruptedException, JsonValidationException, ConfigNotFoundException { List attemptStats = new ArrayList<>(); jobNotifier.autoDisableConnection(job, attemptStats); - verify(notificationClient).notifyConnectionDisabled(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), - ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); - verify(customerIoNotificationClient).notifyConnectionDisabled(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any(), - ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); + verify(notificationClient).notifyConnectionDisabled(any(), any()); + verify(customerIoNotificationClient).notifyConnectionDisabled(any(), any()); } @Test diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java index 95321dc475f..bb3da2b6896 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/DefaultSyncJobFactoryTest.java @@ -134,7 +134,7 @@ void createSyncJobFromConnectionId() throws JsonValidationException, ConfigNotFo final SyncJobFactory factory = new DefaultSyncJobFactory(true, jobCreator, configRepository, oAuthConfigSupplier, configInjector, workspaceHelper, actorDefinitionVersionHelper); - final long actualJobId = factory.create(connectionId); + final long actualJobId = factory.createSync(connectionId); assertEquals(jobId, actualJobId); verify(jobCreator) diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java index 32715ca7988..a5f5cb44b54 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java @@ -32,6 +32,7 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.config.Metadata; import io.airbyte.config.NormalizationSummary; +import io.airbyte.config.RefreshConfig; import io.airbyte.config.Schedule; import io.airbyte.config.Schedule.TimeUnit; import io.airbyte.config.StandardCheckConnectionOutput; @@ -329,6 +330,11 @@ void testTrackSync() throws ConfigNotFoundException, IOException, JsonValidation testAsynchronous(ConfigType.SYNC, SYNC_CONFIG_METADATA); } + @Test + void testTrackRefresh() throws ConfigNotFoundException, IOException, JsonValidationException { + testAsynchronous(ConfigType.REFRESH, SYNC_CONFIG_METADATA); + } + @Test void testTrackSyncForInternalFailure() throws JsonValidationException, ConfigNotFoundException, IOException { final Long jobId = 12345L; @@ -500,6 +506,20 @@ void testConfigToMetadata() throws IOException { assertEquals(expected, actual); } + @Test + void testGenerateMetadata() { + final String jobId = "shouldBeLong"; + final int attemptId = 2; + final ConfigType configType = ConfigType.REFRESH; + final Job previousJob = new Job(0, ConfigType.RESET_CONNECTION, null, null, null, null, null, 0L, 0L); + + final Map metadata = jobTracker.generateJobMetadata(jobId, configType, attemptId, Optional.of(previousJob)); + assertEquals(jobId, metadata.get("job_id")); + assertEquals(attemptId, metadata.get("attempt_id")); + assertEquals(configType, metadata.get("job_type")); + assertEquals(ConfigType.RESET_CONNECTION, metadata.get("previous_job_type")); + } + void testAsynchronousAttempt(final ConfigType configType) throws ConfigNotFoundException, IOException, JsonValidationException { testAsynchronousAttempt(configType, getJobWithAttemptsMock(configType, LONG_JOB_ID), Collections.emptyMap()); } @@ -617,9 +637,6 @@ private Job getJobMock(final ConfigType configType, final long jobId) throws Con .withSyncMode(SyncMode.FULL_REFRESH) .withDestinationSyncMode(DestinationSyncMode.APPEND))); - final JobSyncConfig jobSyncConfig = new JobSyncConfig() - .withConfiguredAirbyteCatalog(catalog); - final AttemptSyncConfig attemptSyncConfig = new AttemptSyncConfig() .withSourceConfiguration(Jsons.jsonNode(ImmutableMap.of("key", "some_value"))) .withDestinationConfiguration(Jsons.jsonNode(ImmutableMap.of("key", false))); @@ -628,8 +645,15 @@ private Job getJobMock(final ConfigType configType, final long jobId) throws Con when(jobConfig.getConfigType()).thenReturn(configType); if (configType == ConfigType.SYNC) { + final JobSyncConfig jobSyncConfig = new JobSyncConfig() + .withConfiguredAirbyteCatalog(catalog); when(jobConfig.getSync()).thenReturn(jobSyncConfig); } + if (configType == ConfigType.REFRESH) { + final RefreshConfig refreshConfig = new RefreshConfig() + .withConfiguredAirbyteCatalog(catalog); + when(jobConfig.getRefresh()).thenReturn(refreshConfig); + } final Attempt attempt = mock(Attempt.class); when(attempt.getSyncConfig()).thenReturn(Optional.of(attemptSyncConfig)); diff --git a/airbyte-proxy/Dockerfile b/airbyte-proxy/Dockerfile index 056dd2b7fd8..607cfef033d 100644 --- a/airbyte-proxy/Dockerfile +++ b/airbyte-proxy/Dockerfile @@ -10,9 +10,9 @@ ENV VERSION ${VERSION} RUN apt-get update -y && apt-get install -y apache2-utils && rm -rf /var/lib/apt/lists/* # This variable can be used to update the destination containers that Nginx proxies to. -ENV PROXY_PASS_WEB "http://airbyte-webapp:80" +ENV PROXY_PASS_WEB "http://airbyte-webapp:8080" ENV PROXY_PASS_API "http://airbyte-server:8001" -ENV CONNECTOR_BUILDER_SERVER_API "http://airbyte-connector-builder-server:80" +ENV CONNECTOR_BUILDER_SERVER_API "http://airbyte-connector-builder-server:8080" ENV PROXY_PASS_AIRBYTE_API_SERVER "http://airbyte-api-server:8006" # Nginx config file diff --git a/airbyte-proxy/build.gradle.kts b/airbyte-proxy/build.gradle.kts index 95fd2f83bad..50e0d5510b2 100644 --- a/airbyte-proxy/build.gradle.kts +++ b/airbyte-proxy/build.gradle.kts @@ -1,40 +1,40 @@ plugins { - id("io.airbyte.gradle.jvm") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "proxy" - } + docker { + imageName = "proxy" + } } val prepareBuild = tasks.register("prepareBuild") { - from(layout.projectDirectory.file("nginx-auth.conf.template")) - from(layout.projectDirectory.file("nginx-no-auth.conf.template")) - from(layout.projectDirectory.file("run.sh")) - from(layout.projectDirectory.file("401.html")) + from(layout.projectDirectory.file("nginx-auth.conf.template")) + from(layout.projectDirectory.file("nginx-no-auth.conf.template")) + from(layout.projectDirectory.file("run.sh")) + from(layout.projectDirectory.file("401.html")) - into(layout.buildDirectory.dir("airbyte/docker")) + into(layout.buildDirectory.dir("airbyte/docker")) } tasks.named("dockerBuildImage") { - dependsOn(prepareBuild) - inputs.file("../.env") + dependsOn(prepareBuild) + inputs.file("../.env") } val bashTest = tasks.register("bashTest") { - inputs.file(layout.projectDirectory.file("nginx-auth.conf.template")) - inputs.file(layout.projectDirectory.file("nginx-no-auth.conf.template")) - inputs.file(layout.projectDirectory.file("run.sh")) - inputs.file(layout.projectDirectory.file("401.html")) - outputs.upToDateWhen { true } - dependsOn(tasks.named("dockerBuildImage")) - commandLine("./test.sh") + inputs.file(layout.projectDirectory.file("nginx-auth.conf.template")) + inputs.file(layout.projectDirectory.file("nginx-no-auth.conf.template")) + inputs.file(layout.projectDirectory.file("run.sh")) + inputs.file(layout.projectDirectory.file("401.html")) + outputs.upToDateWhen { true } + dependsOn(tasks.named("dockerBuildImage")) + commandLine("./test.sh") } // we can"t override the "test" command, so we can make our bash test a dependency) tasks.named("test") { - dependsOn(bashTest) + dependsOn(bashTest) } diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 9869c458c4d..1e7865f87c9 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -1,17 +1,17 @@ -ARG JDK_IMAGE=airbyte/airbyte-base-java-image:2.1.0 -FROM ${JDK_IMAGE} AS server +ARG JDK_IMAGE=airbyte/airbyte-base-java-image:3.2.1 -EXPOSE 8000 5005 +FROM scratch as builder +WORKDIR /app +ADD airbyte-app.tar /app +FROM ${JDK_IMAGE} AS server +EXPOSE 8000 5005 ARG VERSION=dev - ENV APPLICATION airbyte-server ENV VERSION ${VERSION} - WORKDIR /app - -# This is automatically unzipped by Docker -ADD airbyte-app.tar /app +COPY --chown=airbyte:airbyte --from=builder /app /app +USER airbyte:airbyte # wait for upstream dependencies to become available before starting server ENTRYPOINT ["/bin/bash", "-c", "airbyte-app/bin/${APPLICATION}"] diff --git a/airbyte-server/readme.md b/airbyte-server/README.md similarity index 100% rename from airbyte-server/readme.md rename to airbyte-server/README.md diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 36b22f08082..33918acbab2 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -1,179 +1,186 @@ import java.util.Properties plugins { - id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") - kotlin("jvm") - kotlin("kapt") + id("io.airbyte.gradle.jvm.app") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") + kotlin("jvm") + kotlin("kapt") } configurations.all { - resolutionStrategy { - // Ensure that the versions defined in deps.toml are used) - // instead of versions from transitive dependencies) - // Force to avoid updated version(brought in transitively from Micronaut 3.8+) - // that is incompatible with our current Helm setup) - force (libs.flyway.core, libs.s3, libs.aws.java.sdk.s3, libs.sts, libs.aws.java.sdk.sts) - } + resolutionStrategy { + // Ensure that the versions defined in deps.toml are used) + // instead of versions from transitive dependencies) + // Force to avoid updated version(brought in transitively from Micronaut 3.8+) + // that is incompatible with our current Helm setup) + force(libs.flyway.core, libs.s3, libs.aws.java.sdk.s3, libs.sts, libs.aws.java.sdk.sts) + } } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) - - kapt(platform(libs.micronaut.platform)) - kapt(libs.bundles.micronaut.annotation.processor) - kapt(libs.micronaut.jaxrs.processor) - - implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.micronaut) - implementation(libs.bundles.micronaut.data.jdbc) - implementation(libs.bundles.micronaut.metrics) - implementation(libs.micronaut.jaxrs.server) - implementation(libs.micronaut.http) - implementation(libs.micronaut.security) - implementation(libs.bundles.flyway) - implementation(libs.s3) - implementation(libs.sts) - implementation(libs.aws.java.sdk.s3) - implementation(libs.aws.java.sdk.sts) - implementation(libs.reactor.core) - implementation(libs.slugify) - implementation(libs.temporal.sdk) - implementation(libs.bundles.datadog) - implementation(libs.sentry.java) - implementation(libs.swagger.annotations) - implementation(libs.google.cloud.storage) - implementation(libs.cron.utils) - implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ - implementation(libs.jakarta.ws.rs.api) - implementation(libs.jakarta.validation.api ) - - implementation(project(":airbyte-analytics")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-converters")) - implementation(project(":airbyte-commons-license")) - implementation(project(":airbyte-commons-micronaut")) - implementation(project(":airbyte-commons-micronaut-security")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-temporal-core")) - implementation(project(":airbyte-commons-server")) - implementation(project(":airbyte-commons-with-dependencies")) - implementation(project(":airbyte-config:init")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-config:config-secrets")) - implementation(project(":airbyte-config:specs")) - implementation(project(":airbyte-data")) - implementation(project(":airbyte-featureflag")) - implementation(project(":airbyte-metrics:metrics-lib")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-db:jooq")) - implementation(project(":airbyte-json-validation")) - implementation(project(":airbyte-notification")) - implementation(project(":airbyte-oauth")) - implementation(libs.airbyte.protocol) - implementation(project(":airbyte-persistence:job-persistence")) - - runtimeOnly(libs.javax.databind) - - // Required for local database secret hydration) - runtimeOnly(libs.hikaricp) - runtimeOnly(libs.h2.database) - - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - testAnnotationProcessor(platform(libs.micronaut.platform)) - testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) - testAnnotationProcessor(libs.micronaut.jaxrs.processor) - testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) - - testImplementation(libs.bundles.micronaut.test) - testImplementation(project(":airbyte-test-utils")) - testImplementation(libs.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) - testImplementation(libs.mockwebserver) - testImplementation(libs.mockito.inline) - testImplementation(libs.reactor.test) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) - testImplementation(libs.mockk) - testImplementation(libs.micronaut.http.client) - - testRuntimeOnly(libs.junit.jupiter.engine) + compileOnly(libs.lombok) + annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) + + kapt(platform(libs.micronaut.platform)) + kapt(libs.bundles.micronaut.annotation.processor) + kapt(libs.micronaut.jaxrs.processor) + + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.bundles.micronaut.cache) + implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.micronaut.metrics) + implementation(libs.micronaut.jaxrs.server) + implementation(libs.micronaut.http) + implementation(libs.micronaut.security) + implementation(libs.bundles.flyway) + implementation(libs.s3) + implementation(libs.sts) + implementation(libs.aws.java.sdk.s3) + implementation(libs.aws.java.sdk.sts) + implementation(libs.reactor.core) + implementation(libs.slugify) + implementation(libs.temporal.sdk) + implementation(libs.bundles.datadog) + implementation(libs.sentry.java) + implementation(libs.swagger.annotations) + implementation(libs.google.cloud.storage) + implementation(libs.cron.utils) + implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ + implementation(libs.jakarta.ws.rs.api) + implementation(libs.jakarta.validation.api) + + implementation(project(":airbyte-analytics")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-converters")) + implementation(project(":airbyte-commons-license")) + implementation(project(":airbyte-commons-micronaut")) + implementation(project(":airbyte-commons-micronaut-security")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-temporal-core")) + implementation(project(":airbyte-commons-server")) + implementation(project(":airbyte-commons-with-dependencies")) + implementation(project(":airbyte-config:init")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-config:config-secrets")) + implementation(project(":airbyte-config:specs")) + implementation(project(":airbyte-data")) + implementation(project(":airbyte-featureflag")) + implementation(project(":airbyte-metrics:metrics-lib")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-db:jooq")) + implementation(project(":airbyte-json-validation")) + implementation(project(":airbyte-notification")) + implementation(project(":airbyte-oauth")) + implementation(libs.airbyte.protocol) + implementation(project(":airbyte-persistence:job-persistence")) + + runtimeOnly(libs.javax.databind) + + // Required for local database secret hydration) + runtimeOnly(libs.hikaricp) + runtimeOnly(libs.h2.database) + + testCompileOnly(libs.lombok) + testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + testAnnotationProcessor(platform(libs.micronaut.platform)) + testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) + testAnnotationProcessor(libs.micronaut.jaxrs.processor) + testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(project(":airbyte-test-utils")) + testImplementation(libs.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.mockwebserver) + testImplementation(libs.mockito.inline) + testImplementation(libs.reactor.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) + testImplementation(libs.mockk) + testImplementation(libs.micronaut.http.client) + + testRuntimeOnly(libs.junit.jupiter.engine) } // we want to be able to access the generated db files from config/init when we build the server docker image.) val copySeed = tasks.register("copySeed") { - from ("${project(":airbyte-config:init").buildDir}/resources/main/config") - into ("$buildDir/config_init/resources/main/config") - dependsOn(project(":airbyte-config:init").tasks.named("processResources")) + from("${project(":airbyte-config:init").buildDir}/resources/main/config") + into("$buildDir/config_init/resources/main/config") + dependsOn(project(":airbyte-config:init").tasks.named("processResources")) } // need to make sure that the files are in the resource directory before copying.) // tests require the seed to exist.) tasks.named("test") { - dependsOn(copySeed) + dependsOn(copySeed) } tasks.named("assemble") { - dependsOn(copySeed) + dependsOn(copySeed) } val env = Properties().apply { - load(rootProject.file(".env.dev").inputStream()) + load(rootProject.file(".env.dev").inputStream()) } airbyte { - application { - mainClass = "io.airbyte.server.Application" - defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") - @Suppress("UNCHECKED_CAST") - localEnvVars.putAll(env.toMap() as Map) - localEnvVars.putAll(mapOf( - "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), - "AIRBYTE_VERSION" to env["VERSION"].toString(), - "DATABASE_USER" to env["DATABASE_USER"].toString(), - "DATABASE_PASSWORD" to env["DATABASE_PASSWORD"].toString(), - "CONFIG_DATABASE_USER" to (env["CONFIG_DATABASE_USER"]?.toString() ?: ""), - "CONFIG_DATABASE_PASSWORD" to (env["CONFIG_DATABASE_PASSWORD"]?.toString() ?: ""), - // we map the docker pg db to port 5433 so it does not conflict with other pg instances. - "DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["DATABASE_DB"]}", - "CONFIG_DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["CONFIG_DATABASE_DB"]}", - "RUN_DATABASE_MIGRATION_ON_STARTUP" to "true", - "WORKSPACE_ROOT" to env["WORKSPACE_ROOT"].toString(), - "CONFIG_ROOT" to "/tmp/airbyte_config", - "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), - "TEMPORAL_HOST" to "localhost:7233", - "MICRONAUT_ENVIRONMENTS" to "control-plane", - )) - } - - docker { - imageName = "server" - } - - spotbugs { - excludes = listOf(" \n" + - " \n" + - " \n" + - " \n" + - " ") - } + application { + mainClass = "io.airbyte.server.Application" + defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") + @Suppress("UNCHECKED_CAST") + localEnvVars.putAll(env.toMap() as Map) + localEnvVars.putAll( + mapOf( + "AIRBYTE_ROLE" to (System.getenv("AIRBYTE_ROLE") ?: "undefined"), + "AIRBYTE_VERSION" to env["VERSION"].toString(), + "DATABASE_USER" to env["DATABASE_USER"].toString(), + "DATABASE_PASSWORD" to env["DATABASE_PASSWORD"].toString(), + "CONFIG_DATABASE_USER" to (env["CONFIG_DATABASE_USER"]?.toString() ?: ""), + "CONFIG_DATABASE_PASSWORD" to (env["CONFIG_DATABASE_PASSWORD"]?.toString() ?: ""), + // we map the docker pg db to port 5433 so it does not conflict with other pg instances. + "DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["DATABASE_DB"]}", + "CONFIG_DATABASE_URL" to "jdbc:postgresql://localhost:5433/${env["CONFIG_DATABASE_DB"]}", + "RUN_DATABASE_MIGRATION_ON_STARTUP" to "true", + "WORKSPACE_ROOT" to env["WORKSPACE_ROOT"].toString(), + "CONFIG_ROOT" to "/tmp/airbyte_config", + "TRACKING_STRATEGY" to env["TRACKING_STRATEGY"].toString(), + "TEMPORAL_HOST" to "localhost:7233", + "MICRONAUT_ENVIRONMENTS" to "control-plane", + ) + ) + } + + docker { + imageName = "server" + } + + spotbugs { + excludes = listOf( + " \n" + + " \n" + + " \n" + + " \n" + + " " + ) + } } tasks.named("test") { - environment(mapOf( - "AIRBYTE_VERSION" to env["VERSION"], - "MICRONAUT_ENVIRONMENTS" to "test", - "SERVICE_NAME" to project.name, - )) + environment( + mapOf( + "AIRBYTE_VERSION" to env["VERSION"], + "MICRONAUT_ENVIRONMENTS" to "test", + "SERVICE_NAME" to project.name, + ) + ) } // The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies.) @@ -182,5 +189,5 @@ tasks.named("test") { // keepJavacAnnotationProcessors enabled, which causes duplicate META-INF files to be generated.) // Once lombok has been removed, this can also be removed.) tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.EXCLUDE + duplicatesStrategy = DuplicatesStrategy.EXCLUDE } \ No newline at end of file diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java index 5163598a126..6eef192118a 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java @@ -12,6 +12,8 @@ import io.airbyte.api.generated.ConnectionApi; import io.airbyte.api.model.generated.ActorDefinitionRequestBody; +import io.airbyte.api.model.generated.BooleanRead; +import io.airbyte.api.model.generated.ConnectionAndJobIdRequestBody; import io.airbyte.api.model.generated.ConnectionAutoPropagateResult; import io.airbyte.api.model.generated.ConnectionAutoPropagateSchemaChange; import io.airbyte.api.model.generated.ConnectionCreate; @@ -25,6 +27,7 @@ import io.airbyte.api.model.generated.ConnectionStatusesRequestBody; import io.airbyte.api.model.generated.ConnectionStreamHistoryReadItem; import io.airbyte.api.model.generated.ConnectionStreamHistoryRequestBody; +import io.airbyte.api.model.generated.ConnectionStreamRefreshRequestBody; import io.airbyte.api.model.generated.ConnectionStreamRequestBody; import io.airbyte.api.model.generated.ConnectionSyncProgressReadItem; import io.airbyte.api.model.generated.ConnectionSyncResultRead; @@ -41,6 +44,7 @@ import io.airbyte.commons.server.handlers.MatchSearchHandler; import io.airbyte.commons.server.handlers.OperationsHandler; import io.airbyte.commons.server.handlers.SchedulerHandler; +import io.airbyte.commons.server.handlers.StreamRefreshesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; import io.airbyte.commons.temporal.TemporalJobType; import io.airbyte.commons.temporal.scheduling.RouterService; @@ -54,11 +58,14 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +import java.util.ArrayList; import java.util.List; +import lombok.extern.slf4j.Slf4j; @Controller("/api/v1/connections") @Context @Secured(SecurityRule.IS_AUTHENTICATED) +@Slf4j public class ConnectionApiController implements ConnectionApi { private final ConnectionsHandler connectionsHandler; @@ -67,19 +74,22 @@ public class ConnectionApiController implements ConnectionApi { private final RouterService routerService; private final StreamStatusesHandler streamStatusesHandler; private final MatchSearchHandler matchSearchHandler; + private final StreamRefreshesHandler streamRefreshesHandler; public ConnectionApiController(final ConnectionsHandler connectionsHandler, final OperationsHandler operationsHandler, final SchedulerHandler schedulerHandler, final RouterService routerService, final StreamStatusesHandler streamStatusesHandler, - final MatchSearchHandler matchSearchHandler) { + final MatchSearchHandler matchSearchHandler, + final StreamRefreshesHandler streamRefreshesHandler) { this.connectionsHandler = connectionsHandler; this.operationsHandler = operationsHandler; this.schedulerHandler = schedulerHandler; this.routerService = routerService; this.streamStatusesHandler = streamStatusesHandler; this.matchSearchHandler = matchSearchHandler; + this.streamRefreshesHandler = streamRefreshesHandler; } @Override @@ -124,6 +134,16 @@ public ConnectionReadList listConnectionsForWorkspacesPaginated( return ApiHelper.execute(() -> connectionsHandler.listConnectionsForWorkspaces(listConnectionsForWorkspacesRequestBody)); } + @Post(uri = "/refresh") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + @Override + public BooleanRead refreshConnectionStream(@Body final ConnectionStreamRefreshRequestBody connectionStreamRefreshRequestBody) { + return ApiHelper.execute(() -> new BooleanRead().value(streamRefreshesHandler.createRefreshesForConnection( + connectionStreamRefreshRequestBody.getConnectionId(), + connectionStreamRefreshRequestBody.getStreams() != null ? connectionStreamRefreshRequestBody.getStreams() : new ArrayList<>()))); + } + @Override @Post(uri = "/list_all") @Secured({WORKSPACE_READER, ORGANIZATION_READER}) @@ -162,6 +182,15 @@ public List getConnectionDataHistory(@Body final return ApiHelper.execute(() -> connectionsHandler.getConnectionDataHistory(connectionDataHistoryRequestBody)); } + @Override + @Post(uri = "/getForJob") + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + public ConnectionRead getConnectionForJob(@Body final ConnectionAndJobIdRequestBody connectionAndJobIdRequestBody) { + return ApiHelper.execute( + () -> connectionsHandler.getConnectionForJob(connectionAndJobIdRequestBody.getConnectionId(), connectionAndJobIdRequestBody.getJobId())); + } + @Override @Post(uri = "/status") @Secured({WORKSPACE_READER, ORGANIZATION_READER}) @@ -234,6 +263,22 @@ public JobInfoRead resetConnectionStream(@Body final ConnectionStreamRequestBody return ApiHelper.execute(() -> schedulerHandler.resetConnectionStream(connectionStreamRequestBody)); } + @Override + @Post(uri = "/clear") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + public JobInfoRead clearConnection(@Body ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); + } + + @Override + @Post(uri = "/clear/stream") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + public JobInfoRead clearConnectionStream(@Body ConnectionStreamRequestBody connectionStreamRequestBody) { + return ApiHelper.execute(() -> schedulerHandler.resetConnectionStream(connectionStreamRequestBody)); + } + @Override @Post(uri = "/apply_schema_change") @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/InstanceConfigurationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/InstanceConfigurationApiController.java index 2180387605c..d127c926e3d 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/InstanceConfigurationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/InstanceConfigurationApiController.java @@ -14,6 +14,7 @@ import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +// this controller is only usable in self-managed versions of Airbyte. Not Cloud! @Controller("/api/v1/instance_configuration") @Secured(SecurityRule.IS_ANONYMOUS) public class InstanceConfigurationApiController implements InstanceConfigurationApi { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java index 15079cfc05a..2419284f5ce 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java @@ -81,10 +81,11 @@ public PermissionRead getPermission(@Body final PermissionIdRequestBody permissi @Secured({ORGANIZATION_ADMIN, WORKSPACE_ADMIN}) @Post("/update") @Override - public PermissionRead updatePermission(@Body final PermissionUpdate permissionUpdate) { - return ApiHelper.execute(() -> { + public void updatePermission(@Body final PermissionUpdate permissionUpdate) { + ApiHelper.execute(() -> { validatePermissionUpdate(permissionUpdate); - return permissionHandler.updatePermission(permissionUpdate); + permissionHandler.updatePermission(permissionUpdate); + return null; }); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java index 0dc272ab07c..83faa81a361 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java @@ -4,23 +4,32 @@ package io.airbyte.server.apis; -import static io.airbyte.commons.auth.AuthRoleConstants.ORGANIZATION_EDITOR; -import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_EDITOR; +import static io.airbyte.commons.auth.AuthRoleConstants.ORGANIZATION_ADMIN; +import static io.airbyte.commons.auth.AuthRoleConstants.ORGANIZATION_READER; +import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_ADMIN; +import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_READER; import io.airbyte.api.generated.UserInvitationApi; import io.airbyte.api.model.generated.InviteCodeRequestBody; import io.airbyte.api.model.generated.UserInvitationCreateRequestBody; +import io.airbyte.api.model.generated.UserInvitationCreateResponse; +import io.airbyte.api.model.generated.UserInvitationListRequestBody; import io.airbyte.api.model.generated.UserInvitationRead; +import io.airbyte.commons.server.errors.OperationNotAllowedException; import io.airbyte.commons.server.support.CurrentUserService; import io.airbyte.config.User; import io.airbyte.server.handlers.UserInvitationHandler; +import io.airbyte.server.helpers.UserInvitationAuthorizationHelper; import io.micronaut.http.annotation.Body; import io.micronaut.http.annotation.Controller; import io.micronaut.http.annotation.Get; +import io.micronaut.http.annotation.Post; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; +import java.util.List; +import java.util.UUID; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -30,14 +39,18 @@ public class UserInvitationApiController implements UserInvitationApi { private final UserInvitationHandler userInvitationHandler; private final CurrentUserService currentUserService; + private final UserInvitationAuthorizationHelper userInvitationAuthorizationHelper; - public UserInvitationApiController(final UserInvitationHandler userInvitationHandler, final CurrentUserService currentUserService) { + public UserInvitationApiController(final UserInvitationHandler userInvitationHandler, + final CurrentUserService currentUserService, + final UserInvitationAuthorizationHelper userInvitationAuthorizationHelper) { this.currentUserService = currentUserService; this.userInvitationHandler = userInvitationHandler; + this.userInvitationAuthorizationHelper = userInvitationAuthorizationHelper; } @Get - @Path("/{inviteCode}") + @Path("/by_code/{inviteCode}") @Override public UserInvitationRead getUserInvitation(@PathParam("inviteCode") final String inviteCode) { return ApiHelper.execute(() -> { @@ -49,13 +62,21 @@ public UserInvitationRead getUserInvitation(@PathParam("inviteCode") final Strin }); } + @Post + @Path("/list_pending") @Override - @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) - public UserInvitationRead createUserInvitation(@Body final UserInvitationCreateRequestBody invitationCreateRequestBody) { + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + public List listPendingInvitations(@Body final UserInvitationListRequestBody invitationListRequestBody) { + return userInvitationHandler.getPendingInvitations(invitationListRequestBody); + } + + @Override + @Secured({WORKSPACE_ADMIN, ORGANIZATION_ADMIN}) + public UserInvitationCreateResponse createUserInvitation(@Body final UserInvitationCreateRequestBody invitationCreateRequestBody) { return ApiHelper.execute(() -> { final User currentUser = currentUserService.getCurrentUser(); - return userInvitationHandler.create(invitationCreateRequestBody, currentUser); + return userInvitationHandler.createInvitationOrPermission(invitationCreateRequestBody, currentUser); }); } @@ -80,8 +101,21 @@ public UserInvitationRead declineUserInvitation(@Body final InviteCodeRequestBod @Override public UserInvitationRead cancelUserInvitation(@Body final InviteCodeRequestBody inviteCodeRequestBody) { - // TODO only invite creator cancel the invitation - throw new RuntimeException("Not yet implemented"); + // note: this endpoint is accessible to all authenticated users, but `authorizeInvitationAdmin` + // throws a 403 if a non-admin user of the invitation's scope tries to cancel it. + return ApiHelper.execute(() -> { + authorizeInvitationAdmin(inviteCodeRequestBody.getInviteCode()); + return userInvitationHandler.cancel(inviteCodeRequestBody); + }); + } + + private void authorizeInvitationAdmin(final String inviteCode) { + final UUID currentUserId = currentUserService.getCurrentUser().getUserId(); + try { + userInvitationAuthorizationHelper.authorizeInvitationAdmin(inviteCode, currentUserId); + } catch (final Exception e) { + throw new OperationNotAllowedException("Admin authorization failed for invite code: " + inviteCode + " and user id: " + currentUserId, e); + } } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java index 7a8619559d8..370d2b77e96 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java @@ -25,13 +25,13 @@ import io.airbyte.api.model.generated.WebBackendWorkspaceState; import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; import io.airbyte.commons.lang.MoreBooleans; +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper; +import io.airbyte.commons.server.authorization.Scope; import io.airbyte.commons.server.handlers.WebBackendCheckUpdatesHandler; import io.airbyte.commons.server.handlers.WebBackendConnectionsHandler; import io.airbyte.commons.server.handlers.WebBackendGeographiesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; import io.airbyte.metrics.lib.TracingHelper; -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper; -import io.airbyte.server.apis.publicapi.authorization.Scope; import io.micronaut.http.annotation.Body; import io.micronaut.http.annotation.Controller; import io.micronaut.http.annotation.Post; @@ -47,16 +47,16 @@ public class WebBackendApiController implements WebBackendApi { private final WebBackendConnectionsHandler webBackendConnectionsHandler; private final WebBackendGeographiesHandler webBackendGeographiesHandler; private final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler; - private final AirbyteApiAuthorizationHelper airbyteApiAuthorizationHelper; + private final ApiAuthorizationHelper apiAuthorizationHelper; public WebBackendApiController(final WebBackendConnectionsHandler webBackendConnectionsHandler, final WebBackendGeographiesHandler webBackendGeographiesHandler, final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler, - final AirbyteApiAuthorizationHelper airbyteApiAuthorizationHelper) { + final ApiAuthorizationHelper apiAuthorizationHelper) { this.webBackendConnectionsHandler = webBackendConnectionsHandler; this.webBackendGeographiesHandler = webBackendGeographiesHandler; this.webBackendCheckUpdatesHandler = webBackendCheckUpdatesHandler; - this.airbyteApiAuthorizationHelper = airbyteApiAuthorizationHelper; + this.apiAuthorizationHelper = apiAuthorizationHelper; } @Post("/state/get_type") @@ -99,7 +99,7 @@ public WebBackendConnectionRead webBackendGetConnection(@Body final WebBackendCo if (MoreBooleans.isTruthy(webBackendConnectionRequestBody.getWithRefreshedCatalog())) { // only allow refresh catalog if the user is at least a workspace editor or // organization editor for the connection's workspace - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( webBackendConnectionRequestBody.getConnectionId().toString(), Scope.CONNECTION, Set.of(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR)); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java index 7cb8dcda796..62d6fe3c83f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java @@ -32,11 +32,11 @@ import io.airbyte.api.model.generated.WorkspaceUpdate; import io.airbyte.api.model.generated.WorkspaceUpdateName; import io.airbyte.api.model.generated.WorkspaceUpdateOrganization; +import io.airbyte.commons.server.errors.problems.ForbiddenProblem; import io.airbyte.commons.server.handlers.PermissionHandler; import io.airbyte.commons.server.handlers.WorkspacesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; import io.airbyte.commons.server.support.CurrentUserService; -import io.airbyte.server.apis.publicapi.problems.ForbiddenProblem; import io.micronaut.http.HttpStatus; import io.micronaut.http.annotation.Body; import io.micronaut.http.annotation.Controller; @@ -205,7 +205,15 @@ public WorkspaceRead updateWorkspaceOrganization(@Body final WorkspaceUpdateOrga @ExecuteOn(AirbyteTaskExecutors.IO) @Override public WorkspaceRead getWorkspaceByConnectionId(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); + return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, false)); + } + + @Post("/get_by_connection_id_with_tombstone") + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + @Override + public WorkspaceRead getWorkspaceByConnectionIdWithTombstone(@Body final ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody, true)); } @Override diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java index 899ee4c6452..be13711c1ea 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java @@ -7,6 +7,10 @@ import io.airbyte.analytics.TrackingClient; import io.airbyte.commons.features.EnvVariableFeatureFlags; import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.CompositeBuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.ConfigRepositoryBuilderProjectUpdater; +import io.airbyte.commons.server.handlers.helpers.LocalFileSystemBuilderProjectUpdater; import io.airbyte.commons.server.scheduler.EventRunner; import io.airbyte.commons.server.scheduler.TemporalEventRunner; import io.airbyte.commons.temporal.TemporalClient; @@ -16,6 +20,10 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigInjector; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.RefreshJobStateUpdater; +import io.airbyte.config.persistence.StatePersistence; +import io.airbyte.config.persistence.StreamRefreshesRepository; +import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.metrics.lib.MetricClient; @@ -38,6 +46,7 @@ import jakarta.inject.Singleton; import java.net.http.HttpClient; import java.nio.file.Path; +import java.util.List; import java.util.UUID; import java.util.function.Supplier; @@ -87,8 +96,14 @@ public JobNotifier jobNotifier( @Singleton public DefaultJobCreator defaultJobCreator(final JobPersistence jobPersistence, final WorkerConfigsProvider workerConfigsProvider, - final FeatureFlagClient featureFlagClient) { - return new DefaultJobCreator(jobPersistence, workerConfigsProvider, featureFlagClient); + final FeatureFlagClient featureFlagClient, + final GenerationBumper generationBumper, + final StatePersistence statePersistence, + final RefreshJobStateUpdater refreshJobStateUpdater, + final StreamRefreshesRepository streamRefreshesRepository) { + return new DefaultJobCreator(jobPersistence, workerConfigsProvider, featureFlagClient, generationBumper, + statePersistence, refreshJobStateUpdater, + streamRefreshesRepository); } @SuppressWarnings("ParameterName") @@ -163,4 +178,15 @@ public HttpClient httpClient() { return HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); } + @Singleton + public BuilderProjectUpdater builderProjectUpdater(ConfigRepository configRepository) { + final var pathToConnectors = io.airbyte.commons.envvar.EnvVar.PATH_TO_CONNECTORS.fetch(); + ConfigRepositoryBuilderProjectUpdater configRepositoryProjectUpdater = new ConfigRepositoryBuilderProjectUpdater(configRepository); + if (pathToConnectors == null || pathToConnectors.isEmpty()) { + return configRepositoryProjectUpdater; + } else { + return new CompositeBuilderProjectUpdater(List.of(configRepositoryProjectUpdater, new LocalFileSystemBuilderProjectUpdater())); + } + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java index f9a2af80095..3f2b6831f5d 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java @@ -4,15 +4,33 @@ package io.airbyte.server.handlers; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Sets; +import io.airbyte.analytics.TrackingClient; import io.airbyte.api.model.generated.InviteCodeRequestBody; +import io.airbyte.api.model.generated.PermissionCreate; +import io.airbyte.api.model.generated.PermissionType; import io.airbyte.api.model.generated.UserInvitationCreateRequestBody; +import io.airbyte.api.model.generated.UserInvitationCreateResponse; +import io.airbyte.api.model.generated.UserInvitationListRequestBody; import io.airbyte.api.model.generated.UserInvitationRead; +import io.airbyte.commons.server.errors.ConflictException; import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.handlers.PermissionHandler; +import io.airbyte.config.ConfigSchema; import io.airbyte.config.InvitationStatus; +import io.airbyte.config.Permission; +import io.airbyte.config.ScopeType; import io.airbyte.config.User; import io.airbyte.config.UserInvitation; -import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.data.services.InvitationDuplicateException; +import io.airbyte.data.services.InvitationStatusUnexpectedException; +import io.airbyte.data.services.OrganizationService; import io.airbyte.data.services.UserInvitationService; +import io.airbyte.data.services.WorkspaceService; import io.airbyte.notification.CustomerIoEmailConfig; import io.airbyte.notification.CustomerIoEmailNotificationSender; import io.airbyte.persistence.job.WebUrlHelper; @@ -20,24 +38,53 @@ import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Singleton; import java.io.IOException; +import java.time.OffsetDateTime; +import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.UUID; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; @Singleton +@Slf4j public class UserInvitationHandler { + static final String ACCEPT_INVITE_PATH = "/accept-invite?inviteCode="; + static final int INVITE_EXPIRATION_DAYS = 7; + static final String USER_INVITED = "User Invited"; + final UserInvitationService service; final UserInvitationMapper mapper; final WebUrlHelper webUrlHelper; final CustomerIoEmailNotificationSender customerIoEmailNotificationSender; + final WorkspaceService workspaceService; + final OrganizationService organizationService; + final UserPersistence userPersistence; + final PermissionPersistence permissionPersistence; + final PermissionHandler permissionHandler; + final TrackingClient trackingClient; public UserInvitationHandler(final UserInvitationService service, final UserInvitationMapper mapper, final CustomerIoEmailNotificationSender customerIoEmailNotificationSender, - final WebUrlHelper webUrlHelper) { + final WebUrlHelper webUrlHelper, + final WorkspaceService workspaceService, + final OrganizationService organizationService, + final UserPersistence userPersistence, + final PermissionPersistence permissionPersistence, + final PermissionHandler permissionHandler, + final TrackingClient trackingClient) { this.service = service; this.mapper = mapper; this.webUrlHelper = webUrlHelper; this.customerIoEmailNotificationSender = customerIoEmailNotificationSender; + this.workspaceService = workspaceService; + this.organizationService = organizationService; + this.userPersistence = userPersistence; + this.permissionPersistence = permissionPersistence; + this.permissionHandler = permissionHandler; + this.trackingClient = trackingClient; } public UserInvitationRead getByInviteCode(final String inviteCode, final User currentUser) { @@ -50,31 +97,214 @@ public UserInvitationRead getByInviteCode(final String inviteCode, final User cu return mapper.toApi(invitation); } - public UserInvitationRead create(final UserInvitationCreateRequestBody req, final User currentUser) - throws JsonValidationException, ConfigNotFoundException, IOException { + public List getPendingInvitations(final UserInvitationListRequestBody invitationListRequestBody) { + final ScopeType scopeType = mapper.toDomain(invitationListRequestBody.getScopeType()); + final List invitations = service.getPendingInvitations(scopeType, invitationListRequestBody.getScopeId()); + + return invitations.stream() + .map(mapper::toApi) + .collect(Collectors.toList()); + } + + /** + * Creates either a new {@link UserInvitation}, or a new {@link Permission} for the invited email + * address, depending on whether the email address is already associated with a User within the + * relevant organization. + */ + public UserInvitationCreateResponse createInvitationOrPermission(final UserInvitationCreateRequestBody req, final User currentUser) + throws IOException, JsonValidationException, ConfigNotFoundException { + + final UserInvitationCreateResponse response; + final boolean wasDirectAdd = attemptDirectAddEmailToOrg(req, currentUser); + + if (wasDirectAdd) { + return new UserInvitationCreateResponse().directlyAdded(true); + } else { + try { + final UserInvitation invitation = createUserInvitationForNewOrgEmail(req, currentUser); + response = new UserInvitationCreateResponse().directlyAdded(false).inviteCode(invitation.getInviteCode()); + trackUserInvited(req, currentUser); + return response; + } catch (final InvitationDuplicateException e) { + throw new ConflictException(e.getMessage()); + } + } + } + + private void trackUserInvited(final UserInvitationCreateRequestBody requestBody, final User currentUser) { + try { + switch (requestBody.getScopeType()) { + case ORGANIZATION -> { + // Implement once we support org-level invitations + } + case WORKSPACE -> trackUserInvitedToWorkspace(requestBody.getScopeId(), + requestBody.getInvitedEmail(), + currentUser.getEmail(), + currentUser.getUserId(), + getInvitedResourceName(requestBody), + requestBody.getPermissionType()); + default -> throw new IllegalArgumentException("Unexpected scope type: " + requestBody.getScopeType()); + } + } catch (final Exception e) { + // log the error, but don't throw an exception to prevent a user-facing error + log.error("Failed to track user invited", e); + } + } + + private void trackUserInvitedToWorkspace(final UUID workspaceId, + final String email, + final String inviterUserEmail, + final UUID inviterUserId, + final String workspaceName, + final PermissionType permissionType) { + trackingClient.track(workspaceId, + USER_INVITED, + ImmutableMap.builder() + .put("email", email) + .put("inviter_user_email", inviterUserEmail) + .put("inviter_user_id", inviterUserId) + .put("role", permissionType) + .put("workspace_id", workspaceId) + .put("workspace_name", workspaceName) + .put("invited_from", "unspecified") // Note: currently we don't have a way to specify this, carryover from old cloud-only invite system + .build()); + } + + /** + * Attempts to add the invited email address to the requested workspace/organization directly. + * Searches for existing users with the invited email address, who are also currently members of the + * requested organization. If any such users are found, a new permission is created for each user + * via the {@link PermissionHandler}, and an email notification is sent to the email. + */ + private boolean attemptDirectAddEmailToOrg(final UserInvitationCreateRequestBody req, final User currentUser) + throws IOException, JsonValidationException, ConfigNotFoundException { + + final Optional orgId = getOrgIdFromCreateRequest(req); + if (orgId.isEmpty()) { + log.info("No orgId found for scopeId {}, will not direct add.", req.getScopeId()); + return false; + } + + final Set orgUserIdsWithEmail = getOrgUserIdsWithEmail(orgId.get(), req.getInvitedEmail()); + + if (orgUserIdsWithEmail.isEmpty()) { + // indicates that there will be no 'direct add', so the invitation creation path should be + // taken instead. + log.info("No existing org users with email, will not direct add."); + return false; + } + + // TODO - simplify once we enforce email uniqueness in User table. + for (final UUID userId : orgUserIdsWithEmail) { + directAddPermissionForExistingUser(req, userId); + } + + // TODO - update customer.io template to support organization-level invitations, right now the + // template contains hardcoded language about workspaces. + customerIoEmailNotificationSender.sendNotificationOnInvitingExistingUser( + new CustomerIoEmailConfig(req.getInvitedEmail()), currentUser.getName(), getInvitedResourceName(req)); + + // indicates that the email was processed via the 'direct add' path, so no invitation will be + // created. + return true; + } + + private Set getOrgUserIdsWithEmail(final UUID orgId, final String email) throws IOException { + log.info("orgId: " + orgId); + + final Set userIdsWithEmail = userPersistence.getUsersByEmail(email).stream() + .map(User::getUserId) + .collect(Collectors.toSet()); + + log.info("userIdsWithEmail: " + userIdsWithEmail); + + final Set existingOrgUserIds = permissionPersistence.listUsersInOrganization(orgId).stream() + .map(userPermission -> userPermission.getUser().getUserId()) + .collect(Collectors.toSet()); + + log.info("existingOrgUserIds: " + existingOrgUserIds); + + final Set intersection = Sets.intersection(userIdsWithEmail, existingOrgUserIds); + + log.info("intersection: " + intersection); + + return intersection; + } + + private Optional getOrgIdFromCreateRequest(final UserInvitationCreateRequestBody req) throws IOException { + return switch (req.getScopeType()) { + case ORGANIZATION -> Optional.of(req.getScopeId()); + case WORKSPACE -> workspaceService.getOrganizationIdFromWorkspaceId(req.getScopeId()); + }; + } + + private void directAddPermissionForExistingUser(final UserInvitationCreateRequestBody req, final UUID existingUserId) + throws JsonValidationException, IOException { + final var permissionCreate = new PermissionCreate() + .userId(existingUserId) + .permissionType(req.getPermissionType()); + + switch (req.getScopeType()) { + case ORGANIZATION -> permissionCreate.setOrganizationId(req.getScopeId()); + case WORKSPACE -> permissionCreate.setWorkspaceId(req.getScopeId()); + default -> throw new IllegalArgumentException("Unexpected scope type: " + req.getScopeType()); + } + + permissionHandler.createPermission(permissionCreate); + } + + /** + * Creates a new {@link UserInvitation} for the invited email address, and sends an email that + * contains a link that can be used to accept the invitation by its unique inviteCode. Note that + * this method only handles the path where the invited email address is not already associated with + * a User inside the relevant organization. + */ + private UserInvitation createUserInvitationForNewOrgEmail(final UserInvitationCreateRequestBody req, final User currentUser) + throws InvitationDuplicateException { final UserInvitation model = mapper.toDomain(req); model.setInviterUserId(currentUser.getUserId()); // For now, inviteCodes are simply UUIDs that are converted to strings, to virtually guarantee - // uniqueness. - // The column itself is a string, so if UUIDs prove to be cumbersome or too long, we can always - // switch to - // a different method of generating shorter, unique inviteCodes. + // uniqueness. The column itself is a string, so if UUIDs prove to be cumbersome or too long, + // we can always switch to a different method of generating shorter, unique inviteCodes. model.setInviteCode(UUID.randomUUID().toString()); // New UserInvitations are always created with a status of PENDING. model.setStatus(InvitationStatus.PENDING); + // For now, new UserInvitations are created with a fixed expiration timestamp. + model.setExpiresAt(OffsetDateTime.now().plusDays(INVITE_EXPIRATION_DAYS).toEpochSecond()); + final UserInvitation saved = service.createUserInvitation(model); + log.info("created invitation {}", saved); + // send invite email to the user // the email content includes the name of the inviter and the invite link // the invite link should look like cloud.airbyte.com/accept-invite?inviteCode=randomCodeHere - final String inviteLink = webUrlHelper.getBaseUrl() + "/accept-invite?inviteCode=" + saved.getInviteCode(); + final String inviteLink = webUrlHelper.getBaseUrl() + ACCEPT_INVITE_PATH + saved.getInviteCode(); customerIoEmailNotificationSender.sendInviteToUser(new CustomerIoEmailConfig(req.getInvitedEmail()), currentUser.getName(), inviteLink); - return mapper.toApi(saved); + return saved; + } + + /** + * Returns either the Workspace name or Organization name, depending on the scope of the invite. + */ + private String getInvitedResourceName(final UserInvitationCreateRequestBody req) + throws IOException, JsonValidationException, ConfigNotFoundException { + switch (req.getScopeType()) { + case ORGANIZATION -> { + return organizationService.getOrganization(req.getScopeId()) + .orElseThrow(() -> new ConfigNotFoundException(ConfigSchema.ORGANIZATION, req.getScopeId())) + .getName(); + } + case WORKSPACE -> { + return workspaceService.getStandardWorkspaceNoSecrets(req.getScopeId(), false).getName(); + } + default -> throw new IllegalArgumentException("Unexpected scope type: " + req.getScopeType()); + } } public UserInvitationRead accept(final InviteCodeRequestBody req, final User currentUser) { @@ -84,17 +314,23 @@ public UserInvitationRead accept(final InviteCodeRequestBody req, final User cur throw new OperationNotAllowedException("Invited email does not match current user email."); } - // TODO - ensure that only org-level invitation can be accepted by a user currently logged into that - // org. - // email is not enough, because a user can have multiple logins with the same associated email, ie - // if they sign in through both SSO and via email/password. - final UserInvitation accepted = service.acceptUserInvitation(req.getInviteCode(), currentUser.getUserId()); + try { + final UserInvitation accepted = service.acceptUserInvitation(req.getInviteCode(), currentUser.getUserId()); + return mapper.toApi(accepted); + } catch (final InvitationStatusUnexpectedException e) { + throw new ConflictException(e.getMessage()); + } + } - return mapper.toApi(accepted); + public UserInvitationRead cancel(final InviteCodeRequestBody req) { + try { + final UserInvitation canceled = service.cancelUserInvitation(req.getInviteCode()); + return mapper.toApi(canceled); + } catch (final InvitationStatusUnexpectedException e) { + throw new ConflictException(e.getMessage()); + } } // TODO implement `decline` - // TODO implement `cancel` - } diff --git a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java index 3bce5c37adc..ee4b7016275 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java +++ b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java @@ -4,6 +4,8 @@ package io.airbyte.server.pro; +import static io.airbyte.config.persistence.UserPersistence.DEFAULT_USER_ID; + import io.airbyte.commons.auth.AirbyteAuthConstants; import io.airbyte.commons.license.annotation.RequiresAirbyteProEnabled; import io.airbyte.commons.server.support.RbacRoleHelper; @@ -31,7 +33,7 @@ public class AirbyteAuthInternalTokenValidator implements TokenValidator validateToken(final String token, final HttpRequest request) { if (validateAirbyteAuthInternalToken(token)) { return Flux.create(emitter -> { - emitter.next(getAuthentication(token)); + emitter.next(getAuthentication()); emitter.complete(); }); } else { @@ -44,10 +46,10 @@ private Boolean validateAirbyteAuthInternalToken(final String token) { return AirbyteAuthConstants.VALID_INTERNAL_SERVICE_NAMES.contains(token); } - private Authentication getAuthentication(final String token) { + private Authentication getAuthentication() { // set the Authentication username to the token value, which must be a valid internal service name. // for now, all internal services get instance admin roles. - return Authentication.build(token, RbacRoleHelper.getInstanceAdminRoles()); + return Authentication.build(DEFAULT_USER_ID.toString(), RbacRoleHelper.getInstanceAdminRoles()); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/pro/KeycloakTokenValidator.java b/airbyte-server/src/main/java/io/airbyte/server/pro/KeycloakTokenValidator.java index d5a85e84073..9f2dfe030d5 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/pro/KeycloakTokenValidator.java +++ b/airbyte-server/src/main/java/io/airbyte/server/pro/KeycloakTokenValidator.java @@ -14,21 +14,20 @@ import io.airbyte.commons.server.support.JwtTokenParser; import io.airbyte.commons.server.support.RbacRoleHelper; import io.micrometer.common.util.StringUtils; -import io.micronaut.http.HttpHeaders; import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.MediaType; -import io.micronaut.http.client.HttpClient; import io.micronaut.security.authentication.Authentication; import io.micronaut.security.authentication.AuthenticationException; import io.micronaut.security.token.validator.TokenValidator; +import jakarta.inject.Named; import jakarta.inject.Singleton; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Collection; import java.util.HashSet; import lombok.extern.slf4j.Slf4j; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; import org.reactivestreams.Publisher; import reactor.core.publisher.Mono; @@ -42,14 +41,14 @@ @SuppressWarnings({"PMD.PreserveStackTrace", "PMD.UseTryWithResources", "PMD.UnusedFormalParameter", "PMD.UnusedPrivateMethod"}) public class KeycloakTokenValidator implements TokenValidator> { - private final HttpClient client; + private final OkHttpClient client; private final AirbyteKeycloakConfiguration keycloakConfiguration; private final RbacRoleHelper rbacRoleHelper; - public KeycloakTokenValidator(final HttpClient httpClient, + public KeycloakTokenValidator(@Named("keycloakTokenValidatorHttpClient") final OkHttpClient okHttpClient, final AirbyteKeycloakConfiguration keycloakConfiguration, final RbacRoleHelper rbacRoleHelper) { - this.client = httpClient; + this.client = okHttpClient; this.keycloakConfiguration = keycloakConfiguration; this.rbacRoleHelper = rbacRoleHelper; } @@ -100,27 +99,25 @@ private Authentication getAuthentication(final String token, final HttpRequest validateTokenWithKeycloak(final String token) { - final HttpRequest httpRequest = buildHttpRequest(token); - - return Mono.from(client.exchange(httpRequest, String.class)) - .flatMap(this::handleResponse) - .doOnError(e -> log.error("Failed to validate access token.", e)) - .onErrorReturn(false) - .doOnTerminate(() -> client.close()); - } - - private HttpRequest buildHttpRequest(final String token) { - return HttpRequest.GET(keycloakConfiguration.getKeycloakUserInfoEndpoint()) - .header(HttpHeaders.AUTHORIZATION, "Bearer " + token) - .contentType(MediaType.APPLICATION_JSON); - } - - private Mono handleResponse(final HttpResponse response) { - if (response.getStatus().equals(HttpStatus.OK)) { - return validateUserInfo(response.body()); - } else { - log.warn("Non-200 response from userinfo endpoint: {}", response.getStatus()); - return Mono.just(false); + final okhttp3.Request request = new Request.Builder() + .addHeader(org.apache.http.HttpHeaders.CONTENT_TYPE, "application/json") + .addHeader(org.apache.http.HttpHeaders.AUTHORIZATION, "Bearer " + token) + .url(keycloakConfiguration.getKeycloakUserInfoEndpoint()) + .get() + .build(); + + try (final Response response = client.newCall(request).execute()) { + if (response.isSuccessful()) { + assert response.body() != null; + final String responseBody = response.body().string(); + return validateUserInfo(responseBody); + } else { + log.warn("Non-200 response from userinfo endpoint: {}", response.code()); + return Mono.just(false); + } + } catch (final Exception e) { + log.error("Failed to validate access token.", e); + return Mono.error(e); } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/apiTracking/TrackingHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/apiTracking/TrackingHelper.kt index 18d9e55d0e9..fb2d1e19412 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/apiTracking/TrackingHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/apiTracking/TrackingHelper.kt @@ -131,7 +131,7 @@ class TrackingHelper(private val trackingClient: TrackingClient) { trackingClient.track( userId, AIRBYTE_API_CALL, - payload as Map?, + payload.toMap(), ) } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt index 1bf5f6c3c57..682a61e1e81 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt @@ -30,6 +30,7 @@ val DELETE = io.micronaut.http.HttpMethod.DELETE.name val PUT = io.micronaut.http.HttpMethod.PUT.name const val WORKSPACE_IDS = "workspaceIds" +const val JOB_TYPE = "jobType" const val INCLUDE_DELETED = "includeDeleted" const val OAUTH_CALLBACK_PATH = "$ROOT_PATH/v1/oauth/callback" diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt index 701b6a859f7..c685bd38c59 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt @@ -9,6 +9,8 @@ import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration import io.airbyte.api.model.generated.DestinationSyncMode import io.airbyte.api.model.generated.PermissionType import io.airbyte.api.model.generated.SourceDiscoverSchemaRead +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.generated.PublicConnectionsApi @@ -16,8 +18,6 @@ import io.airbyte.public_api.model.generated.ConnectionCreateRequest import io.airbyte.public_api.model.generated.ConnectionResponse import io.airbyte.public_api.model.generated.DestinationResponse import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.CONNECTIONS_PATH import io.airbyte.server.apis.publicapi.constants.CONNECTIONS_WITH_ID_PATH import io.airbyte.server.apis.publicapi.constants.DELETE @@ -26,6 +26,7 @@ import io.airbyte.server.apis.publicapi.constants.POST import io.airbyte.server.apis.publicapi.constants.PUT import io.airbyte.server.apis.publicapi.helpers.AirbyteCatalogHelper import io.airbyte.server.apis.publicapi.services.ConnectionService +import io.airbyte.server.apis.publicapi.services.DestinationService import io.airbyte.server.apis.publicapi.services.SourceService import io.micronaut.http.annotation.Body import io.micronaut.http.annotation.Controller @@ -38,7 +39,6 @@ import jakarta.validation.constraints.NotNull import jakarta.ws.rs.Path import jakarta.ws.rs.PathParam import jakarta.ws.rs.core.Response -import services.DestinationService import java.util.Objects import java.util.UUID @@ -49,13 +49,13 @@ open class ConnectionsController( private val sourceService: SourceService, private val destinationService: DestinationService, private val trackingHelper: TrackingHelper, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicConnectionsApi { - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateConnection(connectionCreateRequest: ConnectionCreateRequest): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(connectionCreateRequest.destinationId.toString()), Scope.DESTINATION, userId, @@ -63,9 +63,9 @@ open class ConnectionsController( ) trackingHelper.callWithTracker({ - AirbyteCatalogHelper.validateCronConfiguration( - connectionCreateRequest.schedule, - ) + connectionCreateRequest.schedule?.let { + AirbyteCatalogHelper.validateCronConfiguration(it) + } }, CONNECTIONS_PATH, POST, userId) // get destination response to retrieve workspace id as well as input for destination sync modes @@ -95,7 +95,7 @@ open class ConnectionsController( val validStreams: Map = AirbyteCatalogHelper.getValidStreams( - Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), + Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), ) // check user configs @@ -117,7 +117,14 @@ open class ConnectionsController( for (streamConfiguration in connectionCreateRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -130,23 +137,21 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all streams with full refresh overwrite - configuredCatalog = airbyteCatalogFromDiscoverSchema - AirbyteCatalogHelper.setAllStreamsFullRefreshOverwrite(configuredCatalog!!) + configuredCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalogFromDiscoverSchema!!) } val finalConfiguredCatalog = configuredCatalog @@ -172,10 +177,10 @@ open class ConnectionsController( } @Path("/{connectionId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteConnection(connectionId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(connectionId.toString()), Scope.CONNECTION, userId, @@ -205,10 +210,10 @@ open class ConnectionsController( } @Path("/{connectionId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetConnection(connectionId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(connectionId.toString()), Scope.CONNECTION, userId, @@ -232,7 +237,7 @@ open class ConnectionsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun listConnections( workspaceIds: List?, includeDeleted: Boolean?, @@ -240,7 +245,7 @@ open class ConnectionsController( offset: Int?, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -270,7 +275,7 @@ open class ConnectionsController( @Patch @Path("/{connectionId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun patchConnection( @PathParam(value = "connectionId") connectionId: UUID, @Valid @Body @NotNull connectionPatchRequest: @@ -278,7 +283,7 @@ open class ConnectionsController( io.airbyte.public_api.model.generated.ConnectionPatchRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(connectionId.toString()), Scope.CONNECTION, userId, @@ -288,9 +293,9 @@ open class ConnectionsController( // validate cron timing configurations trackingHelper.callWithTracker( { - AirbyteCatalogHelper.validateCronConfiguration( - connectionPatchRequest.schedule, - ) + connectionPatchRequest.schedule?.let { + AirbyteCatalogHelper.validateCronConfiguration(it) + } }, CONNECTIONS_WITH_ID_PATH, PUT, @@ -332,7 +337,7 @@ open class ConnectionsController( val validStreams: Map = AirbyteCatalogHelper.getValidStreams( - Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), + Objects.requireNonNull(airbyteCatalogFromDiscoverSchema), ) // check user configs @@ -354,7 +359,14 @@ open class ConnectionsController( for (streamConfiguration in connectionPatchRequest.configurations.streams) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val schemaConfig = validStreamAndConfig.config + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) val validDestinationSyncModes = trackingHelper.callWithTracker( @@ -367,18 +379,17 @@ open class ConnectionsController( // set user configs trackingHelper.callWithTracker( { - AirbyteCatalogHelper.setAndValidateStreamConfig( - streamConfiguration, - validDestinationSyncModes, - schemaStream!!, - schemaConfig!!, + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = validDestinationSyncModes, + airbyteStream = schemaStream, ) }, CONNECTIONS_PATH, POST, userId, ) - configuredCatalog!!.addStreamsItem(validStreamAndConfig) + configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { // no user supplied stream configs, return all existing streams diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DefaultController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DefaultController.kt index fc586132a52..5d892d18cf6 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DefaultController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DefaultController.kt @@ -17,7 +17,7 @@ open class DefaultController() : PublicRootApi { @Value("\${airbyte.internal.documentation.host}") var documentationHost: String? = null - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun getDocumentation(): Response { return Response .status(302) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt index c1d3b53228a..bb952459225 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt @@ -6,6 +6,9 @@ package io.airbyte.server.apis.publicapi.controllers import com.fasterxml.jackson.databind.node.ObjectNode import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.generated.PublicDestinationsApi @@ -13,8 +16,6 @@ import io.airbyte.public_api.model.generated.DestinationCreateRequest import io.airbyte.public_api.model.generated.DestinationPatchRequest import io.airbyte.public_api.model.generated.DestinationPutRequest import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.DELETE import io.airbyte.server.apis.publicapi.constants.DESTINATIONS_PATH import io.airbyte.server.apis.publicapi.constants.DESTINATIONS_WITH_ID_PATH @@ -26,7 +27,7 @@ import io.airbyte.server.apis.publicapi.constants.PUT import io.airbyte.server.apis.publicapi.helpers.getActorDefinitionIdFromActorName import io.airbyte.server.apis.publicapi.helpers.removeDestinationType import io.airbyte.server.apis.publicapi.mappers.DESTINATION_NAME_TO_DEFINITION_ID -import io.airbyte.server.apis.publicapi.problems.UnprocessableEntityProblem +import io.airbyte.server.apis.publicapi.services.DestinationService import io.micronaut.http.annotation.Controller import io.micronaut.http.annotation.Patch import io.micronaut.scheduling.annotation.ExecuteOn @@ -34,7 +35,6 @@ import io.micronaut.security.annotation.Secured import io.micronaut.security.rules.SecurityRule import jakarta.ws.rs.Path import jakarta.ws.rs.core.Response -import services.DestinationService import java.util.UUID @Controller(DESTINATIONS_PATH) @@ -42,13 +42,13 @@ import java.util.UUID open class DestinationsController( private val destinationService: DestinationService, private val trackingHelper: TrackingHelper, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicDestinationsApi { - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateDestination(destinationCreateRequest: DestinationCreateRequest): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationCreateRequest.workspaceId.toString()), Scope.WORKSPACE, userId, @@ -99,10 +99,10 @@ open class DestinationsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteDestination(destinationId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationId.toString()), Scope.DESTINATION, userId, @@ -131,10 +131,10 @@ open class DestinationsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetDestination(destinationId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationId.toString()), Scope.DESTINATION, userId, @@ -163,7 +163,7 @@ open class DestinationsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun listDestinations( workspaceIds: MutableList?, includeDeleted: Boolean?, @@ -171,7 +171,7 @@ open class DestinationsController( offset: Int?, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -201,13 +201,13 @@ open class DestinationsController( @Path("/{destinationId}") @Patch - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun patchDestination( destinationId: UUID, destinationPatchRequest: DestinationPatchRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationId.toString()), Scope.DESTINATION, userId, @@ -241,13 +241,13 @@ open class DestinationsController( } @Path("/{destinationId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun putDestination( destinationId: UUID, destinationPutRequest: DestinationPutRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationId.toString()), Scope.DESTINATION, userId, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt index 61800810cad..f8b86d125f7 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt @@ -4,10 +4,10 @@ package io.airbyte.server.apis.publicapi.controllers -import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody.OrderByFieldEnum -import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody.OrderByMethodEnum import io.airbyte.api.model.generated.PermissionType -import io.airbyte.commons.enums.Enums +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.generated.PublicJobsApi @@ -16,17 +16,15 @@ import io.airbyte.public_api.model.generated.JobCreateRequest import io.airbyte.public_api.model.generated.JobStatusEnum import io.airbyte.public_api.model.generated.JobTypeEnum import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.DELETE import io.airbyte.server.apis.publicapi.constants.GET import io.airbyte.server.apis.publicapi.constants.JOBS_PATH import io.airbyte.server.apis.publicapi.constants.JOBS_WITH_ID_PATH import io.airbyte.server.apis.publicapi.constants.POST import io.airbyte.server.apis.publicapi.filters.JobsFilter -import io.airbyte.server.apis.publicapi.problems.BadRequestProblem -import io.airbyte.server.apis.publicapi.problems.UnprocessableEntityProblem +import io.airbyte.server.apis.publicapi.helpers.orderByToFieldAndMethod import io.airbyte.server.apis.publicapi.services.ConnectionService +import io.airbyte.server.apis.publicapi.services.JobService import io.micronaut.http.annotation.Controller import io.micronaut.scheduling.annotation.ExecuteOn import io.micronaut.security.annotation.Secured @@ -36,7 +34,6 @@ import jakarta.ws.rs.GET import jakarta.ws.rs.Path import jakarta.ws.rs.PathParam import jakarta.ws.rs.core.Response -import services.JobService import java.time.OffsetDateTime import java.util.UUID @@ -46,17 +43,17 @@ open class JobsController( private val jobService: JobService, private val connectionService: ConnectionService, private val trackingHelper: TrackingHelper, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicJobsApi { @DELETE @Path("/{jobId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCancelJob( @PathParam("jobId") jobId: Long, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(jobId.toString()), Scope.JOB, userId, @@ -86,10 +83,10 @@ open class JobsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateJob(jobCreateRequest: JobCreateRequest): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(jobCreateRequest.connectionId.toString()), Scope.CONNECTION, userId, @@ -163,12 +160,12 @@ open class JobsController( @GET @Path("/{jobId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun getJob( @PathParam("jobId") jobId: Long, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(jobId.toString()), Scope.JOB, userId, @@ -198,7 +195,7 @@ open class JobsController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun listJobs( connectionId: UUID?, limit: Int?, @@ -214,14 +211,14 @@ open class JobsController( ): Response { val userId: UUID = currentUserService.currentUser.userId if (connectionId != null) { - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(connectionId.toString()), Scope.CONNECTION, userId, PermissionType.WORKSPACE_READER, ) } else { - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -286,23 +283,4 @@ open class JobsController( .entity(jobsResponse) .build() } - - private fun orderByToFieldAndMethod(orderBy: String?): Pair { - var field: OrderByFieldEnum = OrderByFieldEnum.CREATEDAT - var method: OrderByMethodEnum = OrderByMethodEnum.ASC - if (orderBy != null) { - val pattern: java.util.regex.Pattern = java.util.regex.Pattern.compile("([a-zA-Z0-9]+)|(ASC|DESC)") - val matcher: java.util.regex.Matcher = pattern.matcher(orderBy) - if (!matcher.find()) { - throw BadRequestProblem("Invalid order by clause provided: $orderBy") - } - field = - Enums.toEnum(matcher.group(1), OrderByFieldEnum::class.java) - .orElseThrow { BadRequestProblem("Invalid order by clause provided: $orderBy") } - method = - Enums.toEnum(matcher.group(2), OrderByMethodEnum::class.java) - .orElseThrow { BadRequestProblem("Invalid order by clause provided: $orderBy") } - } - return Pair(field, method) - } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt index fc4f6c86184..f7bb2b272c3 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt @@ -6,6 +6,9 @@ package io.airbyte.server.apis.publicapi.controllers import com.fasterxml.jackson.databind.node.ObjectNode import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.generated.PublicSourcesApi @@ -14,8 +17,6 @@ import io.airbyte.public_api.model.generated.SourceCreateRequest import io.airbyte.public_api.model.generated.SourcePatchRequest import io.airbyte.public_api.model.generated.SourcePutRequest import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.DELETE import io.airbyte.server.apis.publicapi.constants.GET import io.airbyte.server.apis.publicapi.constants.PATCH @@ -27,7 +28,6 @@ import io.airbyte.server.apis.publicapi.constants.SOURCE_TYPE import io.airbyte.server.apis.publicapi.helpers.getActorDefinitionIdFromActorName import io.airbyte.server.apis.publicapi.helpers.removeSourceTypeNode import io.airbyte.server.apis.publicapi.mappers.SOURCE_NAME_TO_DEFINITION_ID -import io.airbyte.server.apis.publicapi.problems.UnprocessableEntityProblem import io.airbyte.server.apis.publicapi.services.SourceService import io.micronaut.http.annotation.Controller import io.micronaut.http.annotation.Patch @@ -44,13 +44,13 @@ import java.util.UUID open class SourcesController( private val sourceService: SourceService, private val trackingHelper: TrackingHelper, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicSourcesApi { - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateSource(sourceCreateRequest: SourceCreateRequest): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceCreateRequest.workspaceId.toString()), Scope.WORKSPACE, userId, @@ -102,10 +102,10 @@ open class SourcesController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteSource(sourceId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceId.toString()), Scope.SOURCE, userId, @@ -135,10 +135,10 @@ open class SourcesController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetSource(sourceId: UUID): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceId.toString()), Scope.SOURCE, userId, @@ -168,10 +168,10 @@ open class SourcesController( .build() } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun initiateOAuth(initiateOauthRequest: InitiateOauthRequest): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(initiateOauthRequest.workspaceId.toString()), Scope.WORKSPACE, userId, @@ -180,7 +180,7 @@ open class SourcesController( return sourceService.controllerInitiateOAuth(initiateOauthRequest) } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun listSources( workspaceIds: MutableList?, includeDeleted: Boolean?, @@ -188,8 +188,8 @@ open class SourcesController( offset: Int?, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( - workspaceIds?.map { toString() } ?: emptyList(), + apiAuthorizationHelper.checkWorkspacePermissions( + workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, PermissionType.WORKSPACE_READER, @@ -219,13 +219,13 @@ open class SourcesController( @Patch @Path("/{sourceId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun patchSource( sourceId: UUID, sourcePatchRequest: SourcePatchRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceId.toString()), Scope.SOURCE, userId, @@ -259,13 +259,13 @@ open class SourcesController( } @Path("/{sourceId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun putSource( sourceId: UUID, sourcePutRequest: SourcePutRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceId.toString()), Scope.SOURCE, userId, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt index f3a226d8c17..dbfcde9d95c 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt @@ -11,17 +11,18 @@ import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration import io.airbyte.api.model.generated.DestinationSyncMode import io.airbyte.api.model.generated.PermissionType import io.airbyte.api.model.generated.SyncMode +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope +import io.airbyte.commons.server.errors.problems.UnexpectedProblem import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.generated.PublicStreamsApi import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum import io.airbyte.public_api.model.generated.StreamProperties import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.GET import io.airbyte.server.apis.publicapi.constants.STREAMS_PATH -import io.airbyte.server.apis.publicapi.problems.UnexpectedProblem +import io.airbyte.server.apis.publicapi.services.DestinationService import io.airbyte.server.apis.publicapi.services.SourceService import io.micronaut.http.HttpStatus import io.micronaut.http.annotation.Controller @@ -30,7 +31,6 @@ import io.micronaut.security.annotation.Secured import io.micronaut.security.rules.SecurityRule import jakarta.ws.rs.core.Response import org.slf4j.LoggerFactory -import services.DestinationService import java.io.IOException import java.util.UUID @@ -40,14 +40,14 @@ class StreamsController( private val sourceService: SourceService, private val destinationService: DestinationService, private val trackingHelper: TrackingHelper, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicStreamsApi { companion object { private val log: org.slf4j.Logger? = LoggerFactory.getLogger(StreamsController::class.java) } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun getStreamProperties( sourceId: UUID, destinationId: UUID?, @@ -55,13 +55,13 @@ class StreamsController( ): Response { // Check permission for source and destination val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(sourceId.toString()), Scope.SOURCE, userId, PermissionType.WORKSPACE_READER, ) - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(destinationId!!.toString()), Scope.DESTINATION, userId, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt index fb9e3182b1e..768a19567d9 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt @@ -5,14 +5,16 @@ package io.airbyte.server.apis.publicapi.controllers import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.auth.OrganizationAuthRole +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.authorization.Scope import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID import io.airbyte.public_api.generated.PublicWorkspacesApi import io.airbyte.public_api.model.generated.WorkspaceCreateRequest import io.airbyte.public_api.model.generated.WorkspaceOAuthCredentialsRequest import io.airbyte.public_api.model.generated.WorkspaceUpdateRequest -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope import io.airbyte.server.apis.publicapi.constants.WORKSPACES_PATH import io.airbyte.server.apis.publicapi.services.WorkspaceService import io.github.oshai.kotlinlogging.KotlinLogging @@ -30,18 +32,18 @@ val logger = KotlinLogging.logger {} @Controller(WORKSPACES_PATH) @Secured(SecurityRule.IS_AUTHENTICATED) open class WorkspacesController( - private val workspaceService: WorkspaceService, - private val airbyteApiAuthorizationHelper: AirbyteApiAuthorizationHelper, + protected val workspaceService: WorkspaceService, + private val apiAuthorizationHelper: ApiAuthorizationHelper, private val currentUserService: CurrentUserService, ) : PublicWorkspacesApi { @Path("/{workspaceId}/oauthCredentials") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun createOrUpdateWorkspaceOAuthCredentials( workspaceId: UUID?, workspaceOAuthCredentialsRequest: WorkspaceOAuthCredentialsRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(workspaceId!!.toString()), Scope.WORKSPACE, userId, @@ -53,17 +55,22 @@ open class WorkspacesController( ) } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateWorkspace(workspaceCreateRequest: WorkspaceCreateRequest?): Response { - // As long as user is authenticated, they can proceed. + // Now that we have orgs everywhere, ensure the user is at least an organization editor + apiAuthorizationHelper.ensureUserHasAnyRequiredRoleOrThrow( + Scope.ORGANIZATION, + listOf(DEFAULT_ORGANIZATION_ID.toString()), + setOf(OrganizationAuthRole.ORGANIZATION_EDITOR), + ) return workspaceService.controllerCreateWorkspace(workspaceCreateRequest!!) } @Path("/{workspaceId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteWorkspace(workspaceId: UUID?): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(workspaceId!!.toString()), Scope.WORKSPACE, userId, @@ -73,10 +80,10 @@ open class WorkspacesController( } @Path("/{workspaceId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetWorkspace(workspaceId: UUID?): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(workspaceId!!.toString()), Scope.WORKSPACE, userId, @@ -85,7 +92,7 @@ open class WorkspacesController( return workspaceService.controllerGetWorkspace(workspaceId) } - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicListWorkspaces( workspaceIds: MutableList?, includeDeleted: Boolean?, @@ -94,7 +101,7 @@ open class WorkspacesController( ): Response { val userId: UUID = currentUserService.currentUser.userId logger.debug { "listing workspaces: $workspaceIds" } - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -110,13 +117,13 @@ open class WorkspacesController( @Patch @Path("/{workspaceId}") - @ExecuteOn(AirbyteTaskExecutors.IO) + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicUpdateWorkspace( workspaceId: UUID?, workspaceUpdateRequest: WorkspaceUpdateRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - airbyteApiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacePermissions( listOf(workspaceId!!.toString()), Scope.WORKSPACE, userId, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandler.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandler.kt index 9f280000539..bfdd87a8e20 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandler.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandler.kt @@ -7,20 +7,31 @@ package io.airbyte.server.apis.publicapi.errorHandlers import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.errors.ValueConflictKnownException import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem +import io.airbyte.commons.server.errors.problems.BadRequestProblem +import io.airbyte.commons.server.errors.problems.ConflictProblem +import io.airbyte.commons.server.errors.problems.InvalidApiKeyProblem +import io.airbyte.commons.server.errors.problems.OAuthCallbackFailureProblem +import io.airbyte.commons.server.errors.problems.ResourceNotFoundProblem +import io.airbyte.commons.server.errors.problems.SyncConflictProblem +import io.airbyte.commons.server.errors.problems.UnexpectedProblem +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem import io.airbyte.config.persistence.ConfigNotFoundException import io.airbyte.public_api.model.generated.ConnectionCreateRequest import io.airbyte.server.apis.publicapi.constants.MESSAGE -import io.airbyte.server.apis.publicapi.problems.InvalidApiKeyProblem -import io.airbyte.server.apis.publicapi.problems.ResourceNotFoundProblem -import io.airbyte.server.apis.publicapi.problems.SyncConflictProblem -import io.airbyte.server.apis.publicapi.problems.UnexpectedProblem -import io.airbyte.server.apis.publicapi.problems.UnprocessableEntityProblem +import io.airbyte.server.apis.publicapi.exceptions.OAuthCallbackException import io.airbyte.validation.json.JsonValidationException import io.micronaut.http.HttpResponse import io.micronaut.http.HttpStatus import java.io.IOException +const val DEFAULT_CONFLICT_MESSAGE = "Could not fulfill request" +const val DEFAULT_INTERNAL_SERVER_ERROR_MESSAGE = + "An unexpected problem has occurred. If this is an error that needs to be addressed, please submit a pull request or github issue." +const val DEFAULT_UNPROCESSABLE_ENTITY_MESSAGE = "The body of the request was not understood" +const val JOB_NOT_RUNNING_MESSAGE = "Job is not currently running" + /** * Maps config API client response statuses to problems. */ @@ -38,21 +49,19 @@ object ConfigClientErrorHandler { when (response.status) { HttpStatus.NOT_FOUND -> throw ResourceNotFoundProblem(resourceId) HttpStatus.CONFLICT -> { - val couldNotFulfillRequest = "Could not fulfill request" val message: String = response.getBody(MutableMap::class.java) - .orElseGet { mutableMapOf(Pair(MESSAGE, couldNotFulfillRequest)) } - .getOrDefault(MESSAGE, couldNotFulfillRequest).toString() + .orElseGet { mutableMapOf(Pair(MESSAGE, DEFAULT_CONFLICT_MESSAGE)) } + .getOrDefault(MESSAGE, DEFAULT_CONFLICT_MESSAGE).toString() throw SyncConflictProblem(message) } HttpStatus.UNAUTHORIZED -> throw InvalidApiKeyProblem() HttpStatus.UNPROCESSABLE_ENTITY -> { - val defaultErrorMessage = "The body of the request was not understood" val message: String = response.getBody(MutableMap::class.java) - .orElseGet { mutableMapOf(Pair(MESSAGE, defaultErrorMessage)) } - .getOrDefault(MESSAGE, defaultErrorMessage).toString() + .orElseGet { mutableMapOf(Pair(MESSAGE, DEFAULT_UNPROCESSABLE_ENTITY_MESSAGE)) } + .getOrDefault(MESSAGE, DEFAULT_UNPROCESSABLE_ENTITY_MESSAGE).toString() // Exclude the part of a schema validation message that's ugly if it's there throw UnprocessableEntityProblem(message.split("\nSchema".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[0]) } @@ -73,53 +82,47 @@ object ConfigClientErrorHandler { ) { when (throwable) { is ConfigNotFoundException -> throw ResourceNotFoundProblem(resourceId) - is SyncConflictProblem -> { - val couldNotFulfillRequest = "Could not fulfill request" - val message = - Jsons.deserialize(throwable.message, MutableMap::class.java).orEmpty() - .getOrDefault(MESSAGE, couldNotFulfillRequest).toString() + is ValueConflictKnownException -> { + val message = Jsons.serialize(mapOf(MESSAGE to (throwable.message ?: DEFAULT_CONFLICT_MESSAGE))) throw SyncConflictProblem(message) } - is JsonValidationException, is IOException -> { - val defaultErrorMessage = "The body of the request was not understood" - val message: String = - Jsons.deserialize(throwable.message, MutableMap::class.java).orEmpty() - .getOrDefault(MESSAGE, defaultErrorMessage).toString() - // Exclude the part of a schema validation message that's ugly if it's there - throw UnprocessableEntityProblem(message.split("\nSchema".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[0]) - } - else -> throw UnexpectedProblem(HttpStatus.INTERNAL_SERVER_ERROR) - } - } - /** - * Maps sync exceptions to problems. - * - * @param throwable throwable - * @param resourceId resource ID passed in with the request - */ - fun handleSyncError( - throwable: Throwable, - resourceId: String?, - ) { - when (throwable) { - is ConfigNotFoundException -> throw ResourceNotFoundProblem(resourceId) - is SyncConflictProblem -> { - val couldNotFulfillRequest = "Could not fulfill request" + is IllegalStateException -> { + // Many of the job failures share this exception type. + // If a job has already been canceled it throws this exception with a cryptic message. + val isFailedCancellation = throwable.message?.contains("Failed to cancel") val message = - Jsons.deserialize(throwable.message, MutableMap::class.java).orEmpty() - .getOrDefault(MESSAGE, couldNotFulfillRequest).toString() - throw SyncConflictProblem(message) + if (isFailedCancellation == true) { + Jsons.serialize(mapOf(MESSAGE to JOB_NOT_RUNNING_MESSAGE)) + } else { + Jsons.serialize(mapOf(MESSAGE to (throwable.message ?: DEFAULT_CONFLICT_MESSAGE))) + } + throw ConflictProblem(message) } - is JsonValidationException, is IOException -> { - val defaultErrorMessage = "The body of the request was not understood" - val message: String = - Jsons.deserialize(throwable.message, MutableMap::class.java).orEmpty() - .getOrDefault(MESSAGE, defaultErrorMessage).toString() - // Exclude the part of a schema validation message that's ugly if it's there - throw UnprocessableEntityProblem(message.split("\nSchema".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[0]) + + is JsonValidationException -> { + val error = throwable.message?.substringBefore("\nSchema:") ?: DEFAULT_UNPROCESSABLE_ENTITY_MESSAGE + val message = Jsons.serialize(mapOf(MESSAGE to error)) + throw UnprocessableEntityProblem(message) + } + + is IOException -> { + val message = Jsons.serialize(mapOf(MESSAGE to (throwable.message ?: DEFAULT_UNPROCESSABLE_ENTITY_MESSAGE))) + throw UnprocessableEntityProblem(message) + } + + is OAuthCallbackException -> { + throw OAuthCallbackFailureProblem(throwable.message) + } + + else -> { + val message = throwable.message ?: DEFAULT_INTERNAL_SERVER_ERROR_MESSAGE + if (message.contains("Could not find job with id")) { + throw ConflictProblem(JOB_NOT_RUNNING_MESSAGE) + } else { + throw UnexpectedProblem(HttpStatus.INTERNAL_SERVER_ERROR, message) + } } - else -> throw UnexpectedProblem(HttpStatus.INTERNAL_SERVER_ERROR) } } @@ -154,8 +157,10 @@ object ConfigClientErrorHandler { * @param response HttpResponse, most likely from the config api */ private fun passThroughBadStatusCode(response: HttpResponse<*>) { - if (response.status.code >= HttpStatus.BAD_REQUEST.code) { - throw UnexpectedProblem(response.status) + if (response.status.code >= HttpStatus.INTERNAL_SERVER_ERROR.code) { + throw UnexpectedProblem(response.status, response.body()?.toString()) + } else if (response.status.code >= HttpStatus.BAD_REQUEST.code) { + throw BadRequestProblem("${response.status.reason}: ${response.body}") } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/exceptions/OAuthCallbackException.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/exceptions/OAuthCallbackException.kt new file mode 100644 index 00000000000..08bba8d69fc --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/exceptions/OAuthCallbackException.kt @@ -0,0 +1,3 @@ +package io.airbyte.server.apis.publicapi.exceptions + +class OAuthCallbackException(message: String?) : Exception(message) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt index 15375c946ee..84183d803f7 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt @@ -18,15 +18,16 @@ import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration import io.airbyte.api.model.generated.AirbyteStreamConfiguration import io.airbyte.api.model.generated.DestinationSyncMode import io.airbyte.api.model.generated.SyncMode +import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem +import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem.Companion.duplicateStream +import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem.Companion.invalidStreamName +import io.airbyte.commons.server.errors.problems.UnexpectedProblem import io.airbyte.public_api.model.generated.AirbyteApiConnectionSchedule import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum import io.airbyte.public_api.model.generated.ScheduleTypeEnum import io.airbyte.public_api.model.generated.StreamConfiguration import io.airbyte.public_api.model.generated.StreamConfigurations import io.airbyte.server.apis.publicapi.mappers.ConnectionReadMapper -import io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.Companion.duplicateStream -import io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.Companion.invalidStreamName -import io.airbyte.server.apis.publicapi.problems.UnexpectedProblem import io.micronaut.http.HttpStatus import jakarta.validation.Valid import org.slf4j.LoggerFactory @@ -56,9 +57,19 @@ object AirbyteCatalogHelper { * * @param config config to be set */ - fun setConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?) { - config!!.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.OVERWRITE + fun updateConfigDefaultFullRefreshOverwrite(config: AirbyteStreamConfiguration?): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + config?.let { + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.selected = config.selected + updatedStreamConfiguration.selectedFields = config.selectedFields + updatedStreamConfiguration.suggested = config.suggested + } + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + return updatedStreamConfiguration } /** @@ -66,11 +77,17 @@ object AirbyteCatalogHelper { * * @param airbyteCatalog The catalog to be modified */ - fun setAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog) { - for (schemaStreams in airbyteCatalog.streams) { - val config = schemaStreams.config!! - setConfigDefaultFullRefreshOverwrite(config) - } + fun updateAllStreamsFullRefreshOverwrite(airbyteCatalog: AirbyteCatalog): AirbyteCatalog { + val updatedAirbyteCatalog = AirbyteCatalog() + updatedAirbyteCatalog.streams = + airbyteCatalog.streams.stream().map { stream: AirbyteStreamAndConfiguration -> + val updatedAirbyteStreamAndConfiguration = AirbyteStreamAndConfiguration() + updatedAirbyteStreamAndConfiguration.config = updateConfigDefaultFullRefreshOverwrite(stream.config) + updatedAirbyteStreamAndConfiguration.stream = stream.stream + updatedAirbyteStreamAndConfiguration + }.toList() + + return updatedAirbyteCatalog } /** @@ -123,7 +140,7 @@ object AirbyteCatalogHelper { if (connectionSchedule != null) { if (connectionSchedule.scheduleType != null && connectionSchedule.scheduleType === ScheduleTypeEnum.CRON) { if (connectionSchedule.cronExpression == null) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.missingCronExpression() + throw ConnectionConfigurationProblem.missingCronExpression() } try { if (connectionSchedule.cronExpression.endsWith("UTC")) { @@ -141,16 +158,11 @@ object AirbyteCatalogHelper { } catch (e: NumberFormatException) { log.debug("Invalid cron expression: " + connectionSchedule.cronExpression) log.debug("NumberFormatException: $e") - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.invalidCronExpressionUnderOneHour( - connectionSchedule.cronExpression, - ) + throw ConnectionConfigurationProblem.invalidCronExpressionUnderOneHour(connectionSchedule.cronExpression) } catch (e: IllegalArgumentException) { log.debug("Invalid cron expression: " + connectionSchedule.cronExpression) log.debug("IllegalArgumentException: $e") - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.invalidCronExpression( - connectionSchedule.cronExpression, - e.message, - ) + throw ConnectionConfigurationProblem.invalidCronExpression(connectionSchedule.cronExpression, e.message) } } } @@ -159,6 +171,80 @@ object AirbyteCatalogHelper { // check that the first seconds and hour values are not * } + fun updateAirbyteStreamConfiguration( + config: AirbyteStreamConfiguration, + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): AirbyteStreamConfiguration { + val updatedStreamConfiguration = AirbyteStreamConfiguration() + // Set stream config as selected + updatedStreamConfiguration.selected = true + updatedStreamConfiguration.aliasName = config.aliasName + updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled + updatedStreamConfiguration.suggested = config.suggested + + if (streamConfiguration.syncMode == null) { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } else { + when (streamConfiguration.syncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } + + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { + updatedStreamConfiguration.syncMode(SyncMode.INCREMENTAL) + updatedStreamConfiguration.destinationSyncMode(DestinationSyncMode.APPEND) + updatedStreamConfiguration.cursorField(selectCursorField(airbyteStream, streamConfiguration)) + updatedStreamConfiguration.primaryKey(selectPrimaryKey(airbyteStream, streamConfiguration)) + } + + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { + updatedStreamConfiguration.syncMode = SyncMode.INCREMENTAL + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP + updatedStreamConfiguration.cursorField = selectCursorField(airbyteStream, streamConfiguration) + updatedStreamConfiguration.primaryKey = selectPrimaryKey(airbyteStream, streamConfiguration) + } + + else -> { + updatedStreamConfiguration.syncMode = SyncMode.FULL_REFRESH + updatedStreamConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE + updatedStreamConfiguration.cursorField = config.cursorField + updatedStreamConfiguration.primaryKey = config.primaryKey + } + } + } + + return updatedStreamConfiguration + } + + private fun selectCursorField( + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): List? { + return if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { + airbyteStream.defaultCursorField + } else if (streamConfiguration.cursorField != null && streamConfiguration.cursorField.isNotEmpty()) { + streamConfiguration.cursorField + } else { + airbyteStream.defaultCursorField + } + } + + private fun selectPrimaryKey( + airbyteStream: AirbyteStream, + streamConfiguration: StreamConfiguration, + ): List>? { + return (airbyteStream.sourceDefinedPrimaryKey ?: emptyList()).ifEmpty { + streamConfiguration.primaryKey + } + } + /** * Validates a stream's configurations and sets those configurations in the * `AirbyteStreamConfiguration` object. Logic comes from @@ -167,119 +253,100 @@ object AirbyteCatalogHelper { * @param streamConfiguration The configuration input of a specific stream provided by the caller. * @param validDestinationSyncModes All the valid destination sync modes for a destination * @param airbyteStream The immutable schema defined by the source - * @param config The configuration of a stream consumed by the config-api * @return True if no exceptions. Needed so it can be used inside TrackingHelper.callWithTracker */ - fun setAndValidateStreamConfig( + fun validateStreamConfig( streamConfiguration: StreamConfiguration, - validDestinationSyncModes: List, + validDestinationSyncModes: List, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ): Boolean { - // Set stream config as selected - config.selected = true if (streamConfiguration.syncMode == null) { - setConfigDefaultFullRefreshOverwrite(config) return true } // validate that sync and destination modes are valid val validCombinedSyncModes: Set = validCombinedSyncModes(airbyteStream.supportedSyncModes, validDestinationSyncModes) if (!validCombinedSyncModes.contains(streamConfiguration.syncMode)) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.handleSyncModeProblem( + throw ConnectionConfigurationProblem.handleSyncModeProblem( streamConfiguration.syncMode, streamConfiguration.name, validCombinedSyncModes, ) } - when (streamConfiguration.syncMode) { - ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> { - config.syncMode = SyncMode.FULL_REFRESH - config.destinationSyncMode = DestinationSyncMode.APPEND - } + when (streamConfiguration.syncMode) { ConnectionSyncModeEnum.INCREMENTAL_APPEND -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) } ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> { - config.syncMode = SyncMode.INCREMENTAL - config.destinationSyncMode = DestinationSyncMode.APPEND_DEDUP - setAndValidateCursorField(streamConfiguration.cursorField, airbyteStream, config) - setAndValidatePrimaryKey(streamConfiguration.primaryKey, airbyteStream, config) + validateCursorField(streamConfiguration.cursorField, airbyteStream) + validatePrimaryKey(streamConfiguration.primaryKey, airbyteStream) } - else -> { - // always valid - setConfigDefaultFullRefreshOverwrite(config) - } + else -> {} } return true } - private fun setAndValidateCursorField( + private fun validateCursorField( cursorField: List?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { if (airbyteStream.sourceDefinedCursor != null && airbyteStream.sourceDefinedCursor!!) { if (!cursorField.isNullOrEmpty()) { // if cursor given is not empty and is NOT the same as the default, throw error - if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem( - airbyteStream.name, - airbyteStream.defaultCursorField!!, - ) + if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { + throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem(airbyteStream.name, airbyteStream.defaultCursorField!!) } } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } else { if (!cursorField.isNullOrEmpty()) { // validate cursor field val validCursorFields: List> = getStreamFields(airbyteStream.jsonSchema!!) if (!validCursorFields.contains(cursorField)) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) + throw ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) } - config.cursorField = cursorField } else { // no default or given cursor field if (airbyteStream.defaultCursorField == null || airbyteStream.defaultCursorField!!.isEmpty()) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) + throw ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) } - config.cursorField = airbyteStream.defaultCursorField // this probably isn't necessary and should be already set } } } - private fun setAndValidatePrimaryKey( + private fun validatePrimaryKey( primaryKey: List>?, airbyteStream: AirbyteStream, - config: AirbyteStreamConfiguration, ) { - // if no source defined primary key - if (airbyteStream.sourceDefinedPrimaryKey == null || airbyteStream.sourceDefinedPrimaryKey!!.isEmpty()) { - if (!primaryKey.isNullOrEmpty()) { - // validate primary key - val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) - - // todo maybe check that they don't provide the same primary key twice? - for (singlePrimaryKey in primaryKey) { - if (!validPrimaryKey.contains(singlePrimaryKey)) { // todo double check if the .contains() for list of strings works as intended - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) - } - } - config.primaryKey = primaryKey - } else { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) + // Validate that if a source defined primary key exists, that's the one we use. + // Currently, UI only supports this and there's likely assumptions baked into the platform that mean this needs to be true. + val sourceDefinedPrimaryKeyExists = !airbyteStream.sourceDefinedPrimaryKey.isNullOrEmpty() + val configuredPrimaryKeyExists = !primaryKey.isNullOrEmpty() + + if (sourceDefinedPrimaryKeyExists && configuredPrimaryKeyExists) { + if (airbyteStream.sourceDefinedPrimaryKey != primaryKey) { + throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name, airbyteStream.sourceDefinedPrimaryKey) } - } else { - // source defined primary key exists - if (!primaryKey.isNullOrEmpty()) { - throw io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name) - } else { - config.primaryKey = airbyteStream.sourceDefinedPrimaryKey // this probably isn't necessary and should be already set + } + + // Ensure that we've passed at least some kind of primary key + val noPrimaryKey = !configuredPrimaryKeyExists && !sourceDefinedPrimaryKeyExists + if (noPrimaryKey) { + throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) + } + + // Validate the actual key passed in + val validPrimaryKey: List> = getStreamFields(airbyteStream.jsonSchema!!) + + for (singlePrimaryKey in primaryKey!!) { + if (!validPrimaryKey.contains(singlePrimaryKey)) { // todo double check if the .contains() for list of strings works as intended + throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) + } + + if (singlePrimaryKey.distinct() != singlePrimaryKey) { + throw ConnectionConfigurationProblem.duplicatePrimaryKey(airbyteStream.name, primaryKey) } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelper.kt new file mode 100644 index 00000000000..1a7dd58b5db --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelper.kt @@ -0,0 +1,26 @@ +package io.airbyte.server.apis.publicapi.helpers + +import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody +import io.airbyte.commons.enums.Enums +import io.airbyte.commons.server.errors.problems.BadRequestProblem + +fun orderByToFieldAndMethod( + orderBy: String?, +): Pair { + var field: JobListForWorkspacesRequestBody.OrderByFieldEnum = JobListForWorkspacesRequestBody.OrderByFieldEnum.CREATEDAT + var method: JobListForWorkspacesRequestBody.OrderByMethodEnum = JobListForWorkspacesRequestBody.OrderByMethodEnum.ASC + if (orderBy != null) { + val pattern: java.util.regex.Pattern = java.util.regex.Pattern.compile("([a-zA-Z0-9]+)\\|(ASC|DESC)") + val matcher: java.util.regex.Matcher = pattern.matcher(orderBy) + if (!matcher.find()) { + throw BadRequestProblem("Invalid order by clause provided: $orderBy") + } + field = + Enums.toEnum(matcher.group(1), JobListForWorkspacesRequestBody.OrderByFieldEnum::class.java) + .orElseThrow { BadRequestProblem("Invalid order by clause provided: $orderBy") } + method = + Enums.toEnum(matcher.group(2), JobListForWorkspacesRequestBody.OrderByMethodEnum::class.java) + .orElseThrow { BadRequestProblem("Invalid order by clause provided: $orderBy") } + } + return Pair(field, method) +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/NameToDefinitionMappingHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/NameToDefinitionMappingHelper.kt index 951014fbd95..10a85029030 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/NameToDefinitionMappingHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/NameToDefinitionMappingHelper.kt @@ -4,7 +4,7 @@ package io.airbyte.server.apis.publicapi.helpers -import io.airbyte.server.apis.publicapi.problems.UnknownValueProblem +import io.airbyte.commons.server.errors.problems.UnknownValueProblem import java.util.UUID fun getActorDefinitionIdFromActorName( diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelper.kt index 940d36dad03..2fefd30ca32 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelper.kt @@ -4,11 +4,8 @@ package io.airbyte.server.apis.publicapi.helpers -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper -import io.airbyte.server.apis.publicapi.problems.InvalidRedirectUrlProblem +import io.airbyte.commons.server.errors.problems.InvalidRedirectUrlProblem import org.slf4j.LoggerFactory -import java.io.IOException import java.net.URI /** @@ -17,8 +14,6 @@ import java.net.URI object OAuthHelper { private const val TEMP_OAUTH_STATE_KEY = "temp_oauth_state" private const val HTTPS = "https" - private val OBJECT_MAPPER = ObjectMapper() - private const val PROPERTIES = "properties" private val log = LoggerFactory.getLogger(OAuthHelper.javaClass) fun buildTempOAuthStateKey(state: String): String { @@ -43,36 +38,4 @@ object OAuthHelper { throw InvalidRedirectUrlProblem("Redirect URL must conform to RFC 2396 - https://www.ietf.org/rfc/rfc2396.txt") } } - - private fun extractFromCompleteOutputSpecification(outputSpecification: JsonNode): List> { - val properties = outputSpecification[PROPERTIES] - val paths = properties.findValues("path_in_connector_config") - return paths.stream().map> { node: JsonNode? -> - try { - return@map OBJECT_MAPPER.readerForListOf(String::class.java).readValue(node) as List - } catch (e: IOException) { - throw RuntimeException(e) - } - }.toList() - } - - /** - * Create a list with alternating elements of property, list[n]. Used to spoof a connector - * specification for splitting out secrets. - * - * @param property property to put in front of each list element - * @param list list to insert elements into - * @return new list with alternating elements - */ - private fun alternatingList( - property: String, - list: List, - ): List { - val result: MutableList = ArrayList(list.size * 2) - for (item in list) { - result.add(property) - result.add(item) - } - return result - } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionsResponseMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionsResponseMapper.kt index d4bb17e4b64..da0d6c87240 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionsResponseMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionsResponseMapper.kt @@ -40,6 +40,8 @@ object ConnectionsResponseMapper { PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(CONNECTIONS_PATH)) .queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) .queryParam(INCLUDE_DELETED, includeDeleted) + + if (workspaceIds.isNotEmpty()) uriBuilder.queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) val connectionsResponse = ConnectionsResponse() connectionsResponse.next = PaginationMapper.getNextUrl(connectionReadList.connections, limit, offset, uriBuilder) connectionsResponse.previous = PaginationMapper.getPreviousUrl(limit, offset, uriBuilder) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationsResponseMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationsResponseMapper.kt index 9c54b68f24a..486b7a44343 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationsResponseMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationsResponseMapper.kt @@ -40,8 +40,8 @@ object DestinationsResponseMapper { ): DestinationsResponse { val uriBuilder = PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(DESTINATIONS_PATH)) - .queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) .queryParam(INCLUDE_DELETED, includeDeleted) + if (workspaceIds.isNotEmpty()) uriBuilder.queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) val destinationsResponse = DestinationsResponse() destinationsResponse.next = PaginationMapper.getNextUrl(destinationReadList.destinations, limit, offset, uriBuilder) destinationsResponse.previous = PaginationMapper.getPreviousUrl(limit, offset, uriBuilder) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/JobsResponseMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/JobsResponseMapper.kt index ba761a731cc..5145f12bc4e 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/JobsResponseMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/JobsResponseMapper.kt @@ -11,6 +11,8 @@ import io.airbyte.public_api.model.generated.JobResponse import io.airbyte.public_api.model.generated.JobTypeEnum import io.airbyte.public_api.model.generated.JobsResponse import io.airbyte.server.apis.publicapi.constants.JOBS_PATH +import io.airbyte.server.apis.publicapi.constants.JOB_TYPE +import io.airbyte.server.apis.publicapi.constants.WORKSPACE_IDS import io.airbyte.server.apis.publicapi.helpers.removePublicApiPathPrefix import java.util.UUID @@ -47,7 +49,7 @@ object JobsResponseMapper { }.map { obj: JobWithAttemptsRead? -> JobResponseMapper.from(obj!!) }.toList() val uriBuilder = PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(JOBS_PATH)) - .queryParam("jobType", jobType) + .queryParam(JOB_TYPE, jobType) .queryParam("connectionId", connectionId) val jobsResponse = JobsResponse() jobsResponse.next = PaginationMapper.getNextUrl(jobs, limit, offset, uriBuilder) @@ -69,7 +71,7 @@ object JobsResponseMapper { */ fun from( jobsList: JobReadList, - workspaceIds: List?, + workspaceIds: List, jobType: JobTypeEnum?, limit: Int, offset: Int, @@ -81,10 +83,12 @@ object JobsResponseMapper { j.job!!.configType, ) }.map { obj: JobWithAttemptsRead? -> JobResponseMapper.from(obj!!) }.toList() + val uriBuilder = PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(JOBS_PATH)) - .queryParam("jobType", jobType) - .queryParam("workspaceIds", workspaceIds) + .queryParam(JOB_TYPE, jobType) + if (workspaceIds.isNotEmpty()) uriBuilder.queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) + val jobsResponse = JobsResponse() jobsResponse.next = PaginationMapper.getNextUrl(jobs, limit, offset, uriBuilder) jobsResponse.previous = PaginationMapper.getPreviousUrl(limit, offset, uriBuilder) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt index 85d1400d6fa..5d7c6939a51 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapper.kt @@ -44,8 +44,8 @@ object PaginationMapper { limit: Int, offset: Int, ): Optional { - // If we have no more entries or we had no entries this page, just return empty - no next URL - return if (CollectionUtils.isEmpty(collection) || collection.size < limit) { + // If we have no more entries, or we had no entries this page, just return empty - no next URL + return if (CollectionUtils.isEmpty(collection)) { Optional.empty() } else { Optional.of(offset + limit) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/SourcesResponseMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/SourcesResponseMapper.kt index a644212364e..3edff0663d2 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/SourcesResponseMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/SourcesResponseMapper.kt @@ -38,8 +38,10 @@ object SourcesResponseMapper { ): SourcesResponse { val uriBuilder = PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(SOURCES_PATH)) - .queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) .queryParam(INCLUDE_DELETED, includeDeleted) + + if (workspaceIds.isNotEmpty()) uriBuilder.queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) + val sourcesResponse = SourcesResponse() sourcesResponse.next = PaginationMapper.getNextUrl(sourceReadList.sources, limit, offset, uriBuilder) sourcesResponse.previous = PaginationMapper.getPreviousUrl(limit, offset, uriBuilder) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspacesResponseMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspacesResponseMapper.kt index 5bfe4b3d8f5..8f75a25866d 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspacesResponseMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspacesResponseMapper.kt @@ -39,8 +39,9 @@ object WorkspacesResponseMapper { ): WorkspacesResponse { val uriBuilder = PaginationMapper.getBuilder(apiHost, removePublicApiPathPrefix(WORKSPACES_PATH)) - .queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) .queryParam(INCLUDE_DELETED, includeDeleted) + if (workspaceIds.isNotEmpty()) uriBuilder.queryParam(WORKSPACE_IDS, PaginationMapper.uuidListToQueryString(workspaceIds)) + val workspacesResponse = WorkspacesResponse() workspacesResponse.next = PaginationMapper.getNextUrl(workspaceReadList.workspaces, limit, offset, uriBuilder) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/netty/LoggingNettyChannelHandler.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/netty/LoggingNettyChannelHandler.kt index 878a6635f56..a098e19d174 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/netty/LoggingNettyChannelHandler.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/netty/LoggingNettyChannelHandler.kt @@ -1,5 +1,6 @@ package io.airbyte.server.apis.publicapi.netty +import io.airbyte.server.apis.publicapi.constants.ROOT_PATH import io.netty.buffer.ByteBuf import io.netty.channel.ChannelDuplexHandler import io.netty.channel.ChannelHandlerContext @@ -35,7 +36,6 @@ class LoggingNettyChannelHandler : ChannelDuplexHandler() { writer = CaptureWriter() request = NettyHttpRequest(message as HttpRequest) request!!.register(writer!!) - log.info("[{}] {}", request!!.method, request!!.requestURI) } if (request == null) { return @@ -85,6 +85,12 @@ class LoggingNettyChannelHandler : ChannelDuplexHandler() { } private fun capture() { + if ((request?.requestURI?.contains(ROOT_PATH) == false)) { + request = null + response = null + writer = null + return + } log.info("Request: [{}] -- {}", request!!.requestId, request!!.getLogString()) log.info("Response: [{}] -- {}", request!!.requestId, response!!.getLogString()) request = null diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ConnectionConfigurationProblem.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ConnectionConfigurationProblem.kt deleted file mode 100644 index cf7ff9d67be..00000000000 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/problems/ConnectionConfigurationProblem.kt +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis.publicapi.problems - -import io.airbyte.commons.server.errors.problems.AbstractThrowableProblem -import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum -import io.airbyte.server.apis.publicapi.constants.API_DOC_URL -import io.micronaut.http.HttpStatus -import jakarta.validation.Valid -import java.io.Serial -import java.net.URI - -/** - * Thrown when a configuration for a connection is not valid. - */ -class ConnectionConfigurationProblem private constructor(message: String) : AbstractThrowableProblem( - io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.Companion.TYPE, - io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem.Companion.TITLE, - HttpStatus.BAD_REQUEST, - "The body of the request contains an invalid connection configuration. $message", -) { - companion object { - @Serial - private val serialVersionUID = 1L - private val TYPE = URI.create("$API_DOC_URL/reference/errors") - private const val TITLE = "bad-request" - - fun handleSyncModeProblem( - connectionSyncMode: @Valid ConnectionSyncModeEnum?, - streamName: String, - validSyncModes: Set, - ): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Cannot set sync mode to $connectionSyncMode for stream $streamName. Valid sync modes are: $validSyncModes", - ) - } - - fun invalidStreamName(validStreamNames: Collection): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Invalid stream found. The list of valid streams include: $validStreamNames.", - ) - } - - fun duplicateStream(streamName: String): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem("Duplicate stream found in configuration for: $streamName.") - } - - fun sourceDefinedCursorFieldProblem( - streamName: String, - cursorField: List, - ): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Cursor Field " + cursorField + " is already defined by source for stream: " + streamName + - ". Do not include a cursor field configuration for this stream.", - ) - } - - fun missingCursorField(streamName: String): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "No default cursor field for stream: $streamName. Please include a cursor field configuration for this stream.", - ) - } - - fun invalidCursorField( - streamName: String, - validFields: List?>, - ): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Invalid cursor field for stream: $streamName. The list of valid cursor fields include: $validFields.", - ) - } - - fun missingPrimaryKey(streamName: String): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "No default primary key for stream: $streamName. Please include a primary key configuration for this stream.", - ) - } - - fun primaryKeyAlreadyDefined(streamName: String): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Primary key for stream: $streamName is already pre-defined. Please do NOT include a primary key configuration for this stream.", - ) - } - - fun invalidPrimaryKey( - streamName: String, - validFields: List?>, - ): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "Invalid cursor field for stream: $streamName. The list of valid primary keys fields: $validFields.", - ) - } - - fun invalidCronExpressionUnderOneHour(cronExpression: String): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "The cron expression " + cronExpression + - " is not valid or is less than the one hour minimum. The seconds and minutes values cannot be `*`.", - ) - } - - fun invalidCronExpression( - cronExpression: String, - message: String?, - ): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem( - "The cron expression $cronExpression is not valid. Error: $message" + - ". Please check the cron expression format at https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html", - ) - } - - fun missingCronExpression(): io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem { - return io.airbyte.server.apis.publicapi.problems.ConnectionConfigurationProblem("Missing cron expression in the schedule.") - } - } -} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/ConnectionService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/ConnectionService.kt index d78030fe33a..7a658db3187 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/ConnectionService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/ConnectionService.kt @@ -10,6 +10,7 @@ import io.airbyte.api.model.generated.ConnectionRead import io.airbyte.api.model.generated.ConnectionUpdate import io.airbyte.api.model.generated.ListConnectionsForWorkspacesRequestBody import io.airbyte.api.model.generated.Pagination +import io.airbyte.commons.server.errors.problems.UnexpectedProblem import io.airbyte.commons.server.handlers.ConnectionsHandler import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.public_api.model.generated.ConnectionCreateRequest @@ -23,7 +24,6 @@ import io.airbyte.server.apis.publicapi.mappers.ConnectionCreateMapper import io.airbyte.server.apis.publicapi.mappers.ConnectionReadMapper import io.airbyte.server.apis.publicapi.mappers.ConnectionUpdateMapper import io.airbyte.server.apis.publicapi.mappers.ConnectionsResponseMapper -import io.airbyte.server.apis.publicapi.problems.UnexpectedProblem import io.micronaut.context.annotation.Secondary import io.micronaut.context.annotation.Value import io.micronaut.http.HttpStatus diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/DestinationService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/DestinationService.kt index 5d33930004b..a5fa35b7769 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/DestinationService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/DestinationService.kt @@ -2,7 +2,7 @@ * Copyright (c) 2023 Airbyte, Inc., all rights reserved. */ -package services +package io.airbyte.server.apis.publicapi.services import io.airbyte.api.model.generated.DestinationCreate import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId @@ -26,7 +26,6 @@ import io.airbyte.server.apis.publicapi.helpers.getActorDefinitionIdFromActorNam import io.airbyte.server.apis.publicapi.mappers.DESTINATION_NAME_TO_DEFINITION_ID import io.airbyte.server.apis.publicapi.mappers.DestinationReadMapper import io.airbyte.server.apis.publicapi.mappers.DestinationsResponseMapper -import io.airbyte.server.apis.publicapi.services.UserService import io.micronaut.context.annotation.Secondary import io.micronaut.context.annotation.Value import jakarta.inject.Singleton diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/JobService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/JobService.kt index 6b74a7595cf..d90286774f0 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/JobService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/JobService.kt @@ -2,7 +2,7 @@ * Copyright (c) 2023 Airbyte, Inc., all rights reserved. */ -package services +package io.airbyte.server.apis.publicapi.services import io.airbyte.api.model.generated.ConnectionIdRequestBody import io.airbyte.api.model.generated.JobConfigType @@ -12,6 +12,7 @@ import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody.OrderByFie import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody.OrderByMethodEnum import io.airbyte.api.model.generated.JobListRequestBody import io.airbyte.api.model.generated.Pagination +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem import io.airbyte.commons.server.handlers.JobHistoryHandler import io.airbyte.commons.server.handlers.SchedulerHandler import io.airbyte.commons.server.support.CurrentUserService @@ -23,8 +24,6 @@ import io.airbyte.server.apis.publicapi.errorHandlers.ConfigClientErrorHandler import io.airbyte.server.apis.publicapi.filters.JobsFilter import io.airbyte.server.apis.publicapi.mappers.JobResponseMapper import io.airbyte.server.apis.publicapi.mappers.JobsResponseMapper -import io.airbyte.server.apis.publicapi.problems.UnprocessableEntityProblem -import io.airbyte.server.apis.publicapi.services.UserService import io.micronaut.context.annotation.Secondary import io.micronaut.context.annotation.Value import jakarta.inject.Singleton @@ -104,9 +103,11 @@ class JobServiceImpl( override fun cancelJob(jobId: Long): JobResponse { val jobIdRequestBody = JobIdRequestBody().id(jobId) val result = - kotlin.runCatching { schedulerHandler.cancelJob(jobIdRequestBody) } + kotlin.runCatching { + schedulerHandler.cancelJob(jobIdRequestBody) + } .onFailure { - log.error("reset job error $it") + log.error("cancel job error $it") ConfigClientErrorHandler.handleError(it, jobId.toString()) } log.debug(HTTP_RESPONSE_BODY_DEBUG_MESSAGE + result) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/SourceService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/SourceService.kt index 2bd18b8c030..1d0bf0f07dc 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/SourceService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/SourceService.kt @@ -12,6 +12,7 @@ import io.airbyte.api.model.generated.SourceDiscoverSchemaRead import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody import io.airbyte.api.model.generated.SourceIdRequestBody import io.airbyte.api.model.generated.SourceUpdate +import io.airbyte.commons.server.errors.problems.UnexpectedProblem import io.airbyte.commons.server.handlers.SchedulerHandler import io.airbyte.commons.server.handlers.SourceHandler import io.airbyte.commons.server.support.CurrentUserService @@ -25,7 +26,6 @@ import io.airbyte.server.apis.publicapi.constants.HTTP_RESPONSE_BODY_DEBUG_MESSA import io.airbyte.server.apis.publicapi.errorHandlers.ConfigClientErrorHandler import io.airbyte.server.apis.publicapi.mappers.SourceReadMapper import io.airbyte.server.apis.publicapi.mappers.SourcesResponseMapper -import io.airbyte.server.apis.publicapi.problems.UnexpectedProblem import io.micronaut.context.annotation.Secondary import io.micronaut.context.annotation.Value import io.micronaut.http.HttpStatus diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/UserService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/UserService.kt index bf92e5ed3ba..705aaa8bdb3 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/UserService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/UserService.kt @@ -4,8 +4,8 @@ package io.airbyte.server.apis.publicapi.services -import io.airbyte.api.client.model.generated.WorkspaceReadList import io.airbyte.api.model.generated.ListWorkspacesByUserRequestBody +import io.airbyte.api.model.generated.WorkspaceReadList import io.airbyte.commons.server.handlers.UserHandler import io.airbyte.commons.server.handlers.WorkspacesHandler import io.airbyte.server.apis.publicapi.constants.HTTP_RESPONSE_BODY_DEBUG_MESSAGE @@ -42,7 +42,7 @@ open class UserServiceImpl( } log.debug(HTTP_RESPONSE_BODY_DEBUG_MESSAGE + result) - val workspaceReadList: WorkspaceReadList = result.getOrDefault(WorkspaceReadList().workspaces(emptyList())) as WorkspaceReadList + val workspaceReadList: WorkspaceReadList = result.getOrDefault(WorkspaceReadList().workspaces(emptyList())) return workspaceReadList.workspaces.map { it.workspaceId } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt index 1c1ac7153ce..2de3aac667a 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/services/WorkspaceService.kt @@ -8,8 +8,10 @@ import io.airbyte.api.model.generated.ListResourcesForWorkspacesRequestBody import io.airbyte.api.model.generated.Pagination import io.airbyte.api.model.generated.WorkspaceCreate import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.api.model.generated.WorkspaceUpdateName import io.airbyte.commons.server.handlers.WorkspacesHandler import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID import io.airbyte.public_api.model.generated.WorkspaceCreateRequest import io.airbyte.public_api.model.generated.WorkspaceOAuthCredentialsRequest import io.airbyte.public_api.model.generated.WorkspaceResponse @@ -19,6 +21,7 @@ import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper import io.airbyte.server.apis.publicapi.constants.DELETE import io.airbyte.server.apis.publicapi.constants.GET import io.airbyte.server.apis.publicapi.constants.HTTP_RESPONSE_BODY_DEBUG_MESSAGE +import io.airbyte.server.apis.publicapi.constants.PATCH import io.airbyte.server.apis.publicapi.constants.POST import io.airbyte.server.apis.publicapi.constants.WORKSPACES_PATH import io.airbyte.server.apis.publicapi.constants.WORKSPACES_WITH_ID_PATH @@ -91,7 +94,13 @@ open class WorkspaceServiceImpl( * Creates a workspace. */ override fun createWorkspace(workspaceCreateRequest: WorkspaceCreateRequest): WorkspaceResponse { - val workspaceCreate = WorkspaceCreate().name(workspaceCreateRequest.name) + // For now this should always be true in OSS. + val organizationId = DEFAULT_ORGANIZATION_ID + + val workspaceCreate = + WorkspaceCreate().name( + workspaceCreateRequest.name, + ).email(currentUserService.currentUser.email).organizationId(organizationId) val result = kotlin.runCatching { workspacesHandler.createWorkspace(workspaceCreate) } .onFailure { @@ -127,21 +136,49 @@ open class WorkspaceServiceImpl( } /** - * No-op in OSS. + * Updates a workspace name in OSS. */ override fun updateWorkspace( workspaceId: UUID, workspaceUpdateRequest: WorkspaceUpdateRequest, ): WorkspaceResponse { - // Update workspace in the cloud version of the airbyte API currently only supports name updates, but we don't have name updates in OSS. - return WorkspaceResponse() + val workspaceUpdate = + WorkspaceUpdateName().apply { + this.name = workspaceUpdateRequest.name + this.workspaceId = workspaceId + } + val result = + kotlin.runCatching { workspacesHandler.updateWorkspaceName(workspaceUpdate) } + .onFailure { + log.error("Error for updateWorkspace", it) + ConfigClientErrorHandler.handleError(it, workspaceId.toString()) + } + log.debug(HTTP_RESPONSE_BODY_DEBUG_MESSAGE + result) + return WorkspaceResponseMapper.from(result.getOrNull()!!) } override fun controllerUpdateWorkspace( workspaceId: UUID, workspaceUpdateRequest: WorkspaceUpdateRequest, ): Response { - return Response.status(Response.Status.NOT_IMPLEMENTED).build() + val userId: UUID = currentUserService.currentUser.userId + + val workspaceResponse: Any = + trackingHelper.callWithTracker( + { updateWorkspace(workspaceId, workspaceUpdateRequest) }, + WORKSPACES_WITH_ID_PATH, + PATCH, + userId, + ) + trackingHelper.trackSuccess( + WORKSPACES_WITH_ID_PATH, + PATCH, + userId, + ) + return Response + .status(Response.Status.OK.statusCode) + .entity(workspaceResponse) + .build() } /** diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/config/HttpClientFactory.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/config/HttpClientFactory.kt new file mode 100644 index 00000000000..1af5a1e1c14 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/config/HttpClientFactory.kt @@ -0,0 +1,20 @@ +package io.airbyte.server.config + +import io.micronaut.context.annotation.Factory +import jakarta.inject.Named +import jakarta.inject.Singleton +import okhttp3.OkHttpClient + +@Factory +class HttpClientFactory { + /** + * Create a new instance of {@link OkHttpClient} for use with the Keycloak token validator. + * For now, this is a simple instance with no additional configuration, but this can be + * tuned if needed for calls made within the {@link KeycloakTokenValidator}. + */ + @Singleton + @Named("keycloakTokenValidatorHttpClient") + fun okHttpClient(): OkHttpClient { + return OkHttpClient() + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelper.kt new file mode 100644 index 00000000000..66cb2f2b939 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelper.kt @@ -0,0 +1,78 @@ +package io.airbyte.server.helpers + +import io.airbyte.api.model.generated.PermissionCheckRead.StatusEnum +import io.airbyte.api.model.generated.PermissionCheckRequest +import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.server.errors.OperationNotAllowedException +import io.airbyte.commons.server.handlers.PermissionHandler +import io.airbyte.config.ScopeType +import io.airbyte.data.services.UserInvitationService +import jakarta.inject.Singleton +import java.util.UUID + +/** + * Helper class for performing authorization checks related to User Invitations. + */ +@Singleton +class UserInvitationAuthorizationHelper( + private val userInvitationService: UserInvitationService, + private val permissionHandler: PermissionHandler, +) { + /** + * Authorizes a user as an admin for a given invitation. Based on the scope of the invitation, + * checks if the user has necessary workspace/organization admin permissions. + * + * @throws OperationNotAllowedException if authorization fails. + */ + @Throws(OperationNotAllowedException::class) + fun authorizeInvitationAdmin( + inviteCode: String, + userId: UUID, + ) { + try { + val invitation = userInvitationService.getUserInvitationByInviteCode(inviteCode) + when (invitation.scopeType) { + ScopeType.WORKSPACE -> authorizeWorkspaceInvitationAdmin(invitation.scopeId, userId) + ScopeType.ORGANIZATION -> authorizeOrganizationInvitationAdmin(invitation.scopeId, userId) + null -> throw OperationNotAllowedException("Invitation $inviteCode has no scope type") + } + } catch (e: Exception) { + // always explicitly throw a 403 if anything goes wrong during authorization + throw OperationNotAllowedException("Could not authorize $userId for invitation $inviteCode", e) + } + } + + private fun authorizeWorkspaceInvitationAdmin( + workspaceId: UUID, + userId: UUID, + ) { + val result = + permissionHandler.checkPermissions( + PermissionCheckRequest() + .userId(userId) + .permissionType(PermissionType.WORKSPACE_ADMIN) + .workspaceId(workspaceId), + ) + + if (!result.status.equals(StatusEnum.SUCCEEDED)) { + throw OperationNotAllowedException("User $userId is not an admin of workspace $workspaceId") + } + } + + private fun authorizeOrganizationInvitationAdmin( + organizationId: UUID, + userId: UUID, + ) { + val result = + permissionHandler.checkPermissions( + PermissionCheckRequest() + .userId(userId) + .permissionType(PermissionType.ORGANIZATION_ADMIN) + .organizationId(organizationId), + ) + + if (!result.status.equals(StatusEnum.SUCCEEDED)) { + throw OperationNotAllowedException("User $userId is not an admin of organization $organizationId") + } + } +} diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index 99a0dbd0d7c..f11ba9e23b0 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -1,15 +1,28 @@ micronaut: application: name: airbyte-server + caches: + # used by the analytics tracking client to cache calls to resolve the deployment and identity (workspace) for + # track events + analytics-tracking-deployments: + charset: "UTF-8" + expire-after-access: 10m + analytics-tracking-identity: + charset: "UTF-8" + expire-after-access: 10m env: cloud-deduction: true executors: + # We set our max pool size for the config DB to 20 in 10-values.yml files at the time of writing this. health: type: fixed - n-threads: ${HEALTH_TASK_EXECUTOR_THREADS:10} # Match the data source max pool size below + n-threads: ${HEALTH_TASK_EXECUTOR_THREADS:3} io: type: fixed - n-threads: ${IO_TASK_EXECUTOR_THREADS:10} # Match the data source max pool size below + n-threads: ${IO_TASK_EXECUTOR_THREADS:10} + public-api: + type: fixed + n-threads: ${PUBLIC_API_EXECUTOR_THREADS:5} scheduler: type: fixed n-threads: ${SCHEDULER_TASK_EXECUTOR_THREADS:25} @@ -46,6 +59,7 @@ micronaut: max-content-length: 52428800 # 50MB access-logger: enabled: ${HTTP_ACCESS_LOG_ENABLED:true} + max-header-size: ${NETTY_MAX_HEADER_SIZE:32768} idle-timeout: ${HTTP_IDLE_TIMEOUT:5m} http: client: @@ -72,6 +86,7 @@ airbyte: log: ${STORAGE_BUCKET_LOG} state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} + activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:} local: @@ -108,9 +123,9 @@ airbyte: persistence: ${SECRET_PERSISTENCE:TESTING_CONFIG_DB_TABLE} store: aws: - access-key: ${AWS_ACCESS_KEY:} - secret-key: ${AWS_SECRET_ACCESS_KEY:} - region: ${AWS_REGION:} + access-key: ${AWS_SECRET_MANAGER_ACCESS_KEY_ID:} + secret-key: ${AWS_SECRET_MANAGER_SECRET_ACCESS_KEY:} + region: ${AWS_SECRET_MANAGER_REGION:} kmsKeyArn: ${AWS_KMS_KEY_ARN:} tags: ${AWS_SECRET_MANAGER_SECRET_TAGS:} gcp: @@ -344,6 +359,9 @@ endpoints: beans: enabled: true sensitive: false + caches: + enabled: true + sensitive: false env: enabled: true sensitive: false diff --git a/airbyte-server/src/test/java/io/airbyte/server/KeycloakTokenValidatorTest.java b/airbyte-server/src/test/java/io/airbyte/server/KeycloakTokenValidatorTest.java index f2f2123f6ee..76c27651aa3 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/KeycloakTokenValidatorTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/KeycloakTokenValidatorTest.java @@ -14,19 +14,19 @@ import io.airbyte.server.pro.KeycloakTokenValidator; import io.micronaut.http.HttpHeaders; import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.client.HttpClient; import io.micronaut.http.netty.NettyHttpHeaders; import io.micronaut.security.authentication.Authentication; import java.net.URI; -import java.net.URISyntaxException; import java.util.Collection; import java.util.Set; -import org.junit.jupiter.api.AfterEach; +import okhttp3.Call; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.ResponseBody; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.reactivestreams.Publisher; -import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") @@ -34,15 +34,36 @@ class KeycloakTokenValidatorTest { private static final String LOCALHOST = "http://localhost"; private static final String URI_PATH = "/some/path"; + private static final String VALID_ACCESS_TOKEN = + """ + eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICIwM095c3pkWmNrZFd6Mk84d0ZFRkZVblJPLVJrN1lGLWZzRm1kWG1Q + bHdBIn0.eyJleHAiOjE2ODY4MTEwNTAsImlhdCI6MTY4NjgxMDg3MCwiYXV0aF90aW1lIjoxNjg2ODA3MTAzLCJqdGkiOiI1YzZhYTQ0 + Yi02ZDRlLTRkMTktOWQ0NC02YmY0ZjRlMzM5OTYiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjgwMDAvYXV0aC9yZWFsbXMvbWFzdGVy + IiwiYXVkIjpbIm1hc3Rlci1yZWFsbSIsImFjY291bnQiXSwic3ViIjoiMGYwY2JmOWEtMjRjMi00NmNjLWI1ODItZDFmZjJjMGQ1ZWY1 + IiwidHlwIjoiQmVhcmVyIiwiYXpwIjoiYWlyYnl0ZS13ZWJhcHAiLCJzZXNzaW9uX3N0YXRlIjoiN2FhOTdmYTEtYTI1Mi00NmQ0LWE0 + NTMtOTE2Y2E3M2E4NmQ4IiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwOi8vbG9jYWxob3N0OjgwMDAiXSwicmVhbG1f + YWNjZXNzIjp7InJvbGVzIjpbImNyZWF0ZS1yZWFsbSIsImRlZmF1bHQtcm9sZXMtbWFzdGVyIiwib2ZmbGluZV9hY2Nlc3MiLCJhZG1p + biIsInVtYV9hdXRob3JpemF0aW9uIl19LCJyZXNvdXJjZV9hY2Nlc3MiOnsibWFzdGVyLXJlYWxtIjp7InJvbGVzIjpbInZpZXctaWRl + bnRpdHktcHJvdmlkZXJzIiwidmlldy1yZWFsbSIsIm1hbmFnZS1pZGVudGl0eS1wcm92aWRlcnMiLCJpbXBlcnNvbmF0aW9uIiwiY3Jl + YXRlLWNsaWVudCIsIm1hbmFnZS11c2VycyIsInF1ZXJ5LXJlYWxtcyIsInZpZXctYXV0aG9yaXphdGlvbiIsInF1ZXJ5LWNsaWVudHMi + LCJxdWVyeS11c2VycyIsIm1hbmFnZS1ldmVudHMiLCJtYW5hZ2UtcmVhbG0iLCJ2aWV3LWV2ZW50cyIsInZpZXctdXNlcnMiLCJ2aWV3 + LWNsaWVudHMiLCJtYW5hZ2UtYXV0aG9yaXphdGlvbiIsIm1hbmFnZS1jbGllbnRzIiwicXVlcnktZ3JvdXBzIl19LCJhY2NvdW50Ijp7 + InJvbGVzIjpbIm1hbmFnZS1hY2NvdW50IiwibWFuYWdlLWFjY291bnQtbGlua3MiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6Im9w + ZW5pZCBwcm9maWxlIGVtYWlsIiwic2lkIjoiN2FhOTdmYTEtYTI1Mi00NmQ0LWE0NTMtOTE2Y2E3M2E4NmQ4IiwiZW1haWxfdmVyaWZp + ZWQiOmZhbHNlLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJhZG1pbiJ9.fTqnLrU4vtcqvqW88RGLe81EUZ48TwYt6i-EdRttPfYs6BkkR4L + WKbJYv0HLbJYYjalLvAuGg5ELUvyjNiZqyP4yzlCqlZvNSwtiGG8fROj5XutMyVd3jxxAsTNntHw-EX7dT9Z6_EeQlV3tVBl_yvNh-1y + 4bujH25omDr080fmuU-4ug6PT7rxbIEjMjgQMiJQ7_B-2DXjq4bGwuB8js5kDEADJNiZjs1PLd4Cri2qC14I_CE1RcEgM4CA_oY48M13 + DdKDaG0rH2B4zu7PD6PIMp8vgt9lq7FKh1QBfBdgDXCCbLe3RdOAua5QyeDztGyTwP7FghRLIUoK1kSbMww + """.replace("\n", "").replace("\r", ""); private KeycloakTokenValidator keycloakTokenValidator; - private HttpClient httpClient; + private OkHttpClient httpClient; private AirbyteKeycloakConfiguration keycloakConfiguration; private RbacRoleHelper rbacRoleHelper; @BeforeEach void setUp() { - httpClient = mock(HttpClient.class); + httpClient = mock(OkHttpClient.class); keycloakConfiguration = mock(AirbyteKeycloakConfiguration.class); when(keycloakConfiguration.getKeycloakUserInfoEndpoint()).thenReturn(LOCALHOST + URI_PATH); @@ -51,53 +72,34 @@ void setUp() { keycloakTokenValidator = new KeycloakTokenValidator(httpClient, keycloakConfiguration, rbacRoleHelper); } - @AfterEach - void tearDown() { - httpClient.close(); - } - @Test void testValidateToken() throws Exception { final URI uri = new URI(LOCALHOST + URI_PATH); - final String accessToken = """ - eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICIwM095c3pkWmNrZFd6Mk84d0ZFRkZVblJPLVJrN1lGLWZzRm1kWG1Q - bHdBIn0.eyJleHAiOjE2ODY4MTEwNTAsImlhdCI6MTY4NjgxMDg3MCwiYXV0aF90aW1lIjoxNjg2ODA3MTAzLCJqdGkiOiI1YzZhYTQ0 - Yi02ZDRlLTRkMTktOWQ0NC02YmY0ZjRlMzM5OTYiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjgwMDAvYXV0aC9yZWFsbXMvbWFzdGVy - IiwiYXVkIjpbIm1hc3Rlci1yZWFsbSIsImFjY291bnQiXSwic3ViIjoiMGYwY2JmOWEtMjRjMi00NmNjLWI1ODItZDFmZjJjMGQ1ZWY1 - IiwidHlwIjoiQmVhcmVyIiwiYXpwIjoiYWlyYnl0ZS13ZWJhcHAiLCJzZXNzaW9uX3N0YXRlIjoiN2FhOTdmYTEtYTI1Mi00NmQ0LWE0 - NTMtOTE2Y2E3M2E4NmQ4IiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwOi8vbG9jYWxob3N0OjgwMDAiXSwicmVhbG1f - YWNjZXNzIjp7InJvbGVzIjpbImNyZWF0ZS1yZWFsbSIsImRlZmF1bHQtcm9sZXMtbWFzdGVyIiwib2ZmbGluZV9hY2Nlc3MiLCJhZG1p - biIsInVtYV9hdXRob3JpemF0aW9uIl19LCJyZXNvdXJjZV9hY2Nlc3MiOnsibWFzdGVyLXJlYWxtIjp7InJvbGVzIjpbInZpZXctaWRl - bnRpdHktcHJvdmlkZXJzIiwidmlldy1yZWFsbSIsIm1hbmFnZS1pZGVudGl0eS1wcm92aWRlcnMiLCJpbXBlcnNvbmF0aW9uIiwiY3Jl - YXRlLWNsaWVudCIsIm1hbmFnZS11c2VycyIsInF1ZXJ5LXJlYWxtcyIsInZpZXctYXV0aG9yaXphdGlvbiIsInF1ZXJ5LWNsaWVudHMi - LCJxdWVyeS11c2VycyIsIm1hbmFnZS1ldmVudHMiLCJtYW5hZ2UtcmVhbG0iLCJ2aWV3LWV2ZW50cyIsInZpZXctdXNlcnMiLCJ2aWV3 - LWNsaWVudHMiLCJtYW5hZ2UtYXV0aG9yaXphdGlvbiIsIm1hbmFnZS1jbGllbnRzIiwicXVlcnktZ3JvdXBzIl19LCJhY2NvdW50Ijp7 - InJvbGVzIjpbIm1hbmFnZS1hY2NvdW50IiwibWFuYWdlLWFjY291bnQtbGlua3MiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6Im9w - ZW5pZCBwcm9maWxlIGVtYWlsIiwic2lkIjoiN2FhOTdmYTEtYTI1Mi00NmQ0LWE0NTMtOTE2Y2E3M2E4NmQ4IiwiZW1haWxfdmVyaWZp - ZWQiOmZhbHNlLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJhZG1pbiJ9.fTqnLrU4vtcqvqW88RGLe81EUZ48TwYt6i-EdRttPfYs6BkkR4L - WKbJYv0HLbJYYjalLvAuGg5ELUvyjNiZqyP4yzlCqlZvNSwtiGG8fROj5XutMyVd3jxxAsTNntHw-EX7dT9Z6_EeQlV3tVBl_yvNh-1y - 4bujH25omDr080fmuU-4ug6PT7rxbIEjMjgQMiJQ7_B-2DXjq4bGwuB8js5kDEADJNiZjs1PLd4Cri2qC14I_CE1RcEgM4CA_oY48M13 - DdKDaG0rH2B4zu7PD6PIMp8vgt9lq7FKh1QBfBdgDXCCbLe3RdOAua5QyeDztGyTwP7FghRLIUoK1kSbMww"""; final String expectedUserId = "0f0cbf9a-24c2-46cc-b582-d1ff2c0d5ef5"; - final HttpRequest httpRequest = mock(HttpRequest.class); + // set up mocked incoming request + final HttpRequest httpRequest = mock(HttpRequest.class); final NettyHttpHeaders headers = new NettyHttpHeaders(); - final String accessTokenWithoutNewline = accessToken.replace("\n", "").replace("\r", ""); - headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + accessTokenWithoutNewline); + headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + VALID_ACCESS_TOKEN); + when(httpRequest.getUri()).thenReturn(uri); + when(httpRequest.getHeaders()).thenReturn(headers); + // set up mock http response from Keycloak final String responseBody = "{\"sub\":\"0f0cbf9a-24c2-46cc-b582-d1ff2c0d5ef5\",\"preferred_username\":\"airbyte\"}"; - final HttpResponse userInfoResponse = HttpResponse - .ok(responseBody) - .header(HttpHeaders.CONTENT_TYPE, "application/json"); + final Response userInfoResponse = mock(Response.class); + final ResponseBody userInfoResponseBody = mock(ResponseBody.class); + when(userInfoResponseBody.string()).thenReturn(responseBody); + when(userInfoResponse.body()).thenReturn(userInfoResponseBody); + when(userInfoResponse.code()).thenReturn(200); + when(userInfoResponse.isSuccessful()).thenReturn(true); - when(httpRequest.getUri()).thenReturn(uri); - when(httpRequest.getHeaders()).thenReturn(headers); - when(httpClient.exchange(any(HttpRequest.class), eq(String.class))) - .thenReturn(Mono.just(userInfoResponse)); + final Call call = mock(Call.class); + when(call.execute()).thenReturn(userInfoResponse); + when(httpClient.newCall(any(Request.class))).thenReturn(call); - final Publisher responsePublisher = keycloakTokenValidator.validateToken(accessTokenWithoutNewline, httpRequest); + final Publisher responsePublisher = keycloakTokenValidator.validateToken(VALID_ACCESS_TOKEN, httpRequest); - Set mockedRoles = + final Set mockedRoles = Set.of("ORGANIZATION_ADMIN", "ORGANIZATION_EDITOR", "ORGANIZATION_READER", "ORGANIZATION_MEMBER", "ADMIN", "EDITOR", "READER"); when(rbacRoleHelper.getRbacRoles(eq(expectedUserId), any(HttpRequest.class))) @@ -109,25 +111,30 @@ void testValidateToken() throws Exception { } @Test - void testKeycloakValidationFailure() throws URISyntaxException { - // this token is missing 'sub' claim thus the validation should fail + void testKeycloakValidationFailureNoSubClaim() throws Exception { final URI uri = new URI(LOCALHOST + URI_PATH); - final String accessToken = "Bearer invalid-opHsFNA"; + + // set up mocked incoming request final HttpRequest httpRequest = mock(HttpRequest.class); final NettyHttpHeaders headers = new NettyHttpHeaders(); - headers.add(HttpHeaders.AUTHORIZATION, accessToken); - - final String responseBody = "{\"preferred_username\":\"airbyte\"}"; - final HttpResponse userInfoResponse = HttpResponse - .ok(responseBody) - .header(HttpHeaders.CONTENT_TYPE, "application/json"); - + headers.add(HttpHeaders.AUTHORIZATION, "Bearer " + VALID_ACCESS_TOKEN); when(httpRequest.getUri()).thenReturn(uri); when(httpRequest.getHeaders()).thenReturn(headers); - when(httpClient.exchange(any(HttpRequest.class), eq(String.class))) - .thenReturn(Mono.just(userInfoResponse)); - final Publisher responsePublisher = keycloakTokenValidator.validateToken(accessToken, httpRequest); + // set up mock http response from Keycloak that lacks a `sub` claim + final String responseBody = "{\"preferred_username\":\"airbyte\"}"; + final Response userInfoResponse = mock(Response.class); + final ResponseBody userInfoResponseBody = mock(ResponseBody.class); + when(userInfoResponseBody.string()).thenReturn(responseBody); + when(userInfoResponse.body()).thenReturn(userInfoResponseBody); + when(userInfoResponse.code()).thenReturn(200); + when(userInfoResponse.isSuccessful()).thenReturn(true); + + final Call call = mock(Call.class); + when(call.execute()).thenReturn(userInfoResponse); + when(httpClient.newCall(any(Request.class))).thenReturn(call); + + final Publisher responsePublisher = keycloakTokenValidator.validateToken(VALID_ACCESS_TOKEN, httpRequest); // Verify the stream remains empty. StepVerifier.create(responsePublisher) diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java index b01e053170a..3da92c52152 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java @@ -43,7 +43,7 @@ InstanceConfigurationHandler mmInstanceConfigurationHandler() { static String PATH = "/api/v1/instance_configuration"; @Test - void testGetInstanceConfiguration() throws ConfigNotFoundException, IOException { + void testGetInstanceConfiguration() throws IOException { when(instanceConfigurationHandler.getInstanceConfiguration()) .thenReturn(new InstanceConfigurationResponse()); diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java index 247a783f8e9..eb4ff27ceb9 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java @@ -46,12 +46,10 @@ void testGetPermission() throws ConfigNotFoundException, IOException { } @Test - void testUpdatePermission() throws ConfigNotFoundException, IOException, JsonValidationException { + void testUpdatePermission() throws ConfigNotFoundException, IOException { final UUID userId = UUID.randomUUID(); Mockito.when(permissionHandler.getPermission(Mockito.any())) .thenReturn(new PermissionRead().userId(userId)); - Mockito.when(permissionHandler.updatePermission(Mockito.any())) - .thenReturn(new PermissionRead().userId(userId)); final String path = "/api/v1/permissions/update"; testEndpointStatus( HttpRequest.POST(path, new PermissionUpdate().permissionId(UUID.randomUUID())), @@ -59,7 +57,7 @@ void testUpdatePermission() throws ConfigNotFoundException, IOException, JsonVal } @Test - void testDeletePermission() throws IOException { + void testDeletePermission() { Mockito.doNothing().when(permissionHandler).deletePermission(Mockito.any()); final String path = "/api/v1/permissions/delete"; testEndpointStatus( diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java index 5b0ad3fa038..1ac02ffdf67 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java @@ -13,9 +13,9 @@ import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; import io.airbyte.api.model.generated.WebBackendGeographiesListResult; import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper; +import io.airbyte.commons.server.errors.problems.ForbiddenProblem; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper; -import io.airbyte.server.apis.publicapi.problems.ForbiddenProblem; import io.airbyte.validation.json.JsonValidationException; import io.micronaut.context.annotation.Primary; import io.micronaut.context.annotation.Requires; @@ -35,7 +35,7 @@ @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") class WebBackendApiTest extends BaseControllerTest { - private AirbyteApiAuthorizationHelper airbyteApiAuthorizationHelper; + private ApiAuthorizationHelper apiAuthorizationHelper; // Due to some strange interaction between Micronaut 3, Java, and Kotlin, the only way to // mock this Kotlin dependency is to annotate it with @Bean instead of @MockBean, and to @@ -43,13 +43,13 @@ class WebBackendApiTest extends BaseControllerTest { // back to BaseControllerTest and use @MockBean after we upgrade to Micronaut 4. @Singleton @Primary - AirbyteApiAuthorizationHelper mmAirbyteApiAuthorizationHelper() { - return airbyteApiAuthorizationHelper; + ApiAuthorizationHelper mmAirbyteApiAuthorizationHelper() { + return apiAuthorizationHelper; } @BeforeEach void setup() { - airbyteApiAuthorizationHelper = Mockito.mock(AirbyteApiAuthorizationHelper.class); + apiAuthorizationHelper = Mockito.mock(ApiAuthorizationHelper.class); } @Test @@ -100,7 +100,7 @@ void testWebBackendGetConnection() throws JsonValidationException, ConfigNotFoun .doNothing() // first call that makes it here passes auth check .doNothing() // second call that makes it here passes auth check but 404s .doThrow(new ForbiddenProblem("forbidden")) // third call fails auth check and 403s - .when(airbyteApiAuthorizationHelper).checkWorkspacePermissions(Mockito.anyString(), Mockito.any(), Mockito.any()); + .when(apiAuthorizationHelper).checkWorkspacePermissions(Mockito.anyString(), Mockito.any(), Mockito.any()); // first call doesn't activate checkWorkspacePermissions because withRefreshedCatalog is false testEndpointStatus( diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java index a480bc8253a..9e3a5909fa3 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java @@ -4,6 +4,8 @@ package io.airbyte.server.apis; +import static org.mockito.ArgumentMatchers.anyBoolean; + import io.airbyte.api.model.generated.PermissionCheckRead; import io.airbyte.api.model.generated.PermissionCheckRead.StatusEnum; import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; @@ -199,7 +201,7 @@ void testUpdateWorkspaceName() throws JsonValidationException, ConfigNotFoundExc @Test void testGetWorkspaceByConnectionId() throws ConfigNotFoundException { - Mockito.when(workspacesHandler.getWorkspaceByConnectionId(Mockito.any())) + Mockito.when(workspacesHandler.getWorkspaceByConnectionId(Mockito.any(), anyBoolean())) .thenReturn(new WorkspaceRead()) .thenThrow(new ConfigNotFoundException("", "")); final String path = "/api/v1/workspaces/get_by_connection_id"; diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/UserInvitationHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/UserInvitationHandlerTest.java index c2fb29e26b4..36bd0d5ae04 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/UserInvitationHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/UserInvitationHandlerTest.java @@ -4,88 +4,455 @@ package io.airbyte.server.handlers; +import static io.airbyte.config.Permission.PermissionType.WORKSPACE_ADMIN; +import static io.airbyte.server.handlers.UserInvitationHandler.ACCEPT_INVITE_PATH; +import static io.airbyte.server.handlers.UserInvitationHandler.USER_INVITED; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mock.Strictness.LENIENT; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import io.airbyte.analytics.TrackingClient; +import io.airbyte.api.model.generated.InviteCodeRequestBody; +import io.airbyte.api.model.generated.PermissionCreate; +import io.airbyte.api.model.generated.PermissionType; import io.airbyte.api.model.generated.UserInvitationCreateRequestBody; +import io.airbyte.api.model.generated.UserInvitationCreateResponse; +import io.airbyte.api.model.generated.UserInvitationListRequestBody; import io.airbyte.api.model.generated.UserInvitationRead; +import io.airbyte.commons.server.errors.ConflictException; +import io.airbyte.commons.server.errors.OperationNotAllowedException; +import io.airbyte.commons.server.handlers.PermissionHandler; import io.airbyte.config.InvitationStatus; +import io.airbyte.config.ScopeType; +import io.airbyte.config.StandardWorkspace; import io.airbyte.config.User; import io.airbyte.config.UserInvitation; -import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.config.UserPermission; +import io.airbyte.config.persistence.PermissionPersistence; +import io.airbyte.config.persistence.UserPersistence; +import io.airbyte.data.services.InvitationDuplicateException; +import io.airbyte.data.services.InvitationStatusUnexpectedException; +import io.airbyte.data.services.OrganizationService; import io.airbyte.data.services.UserInvitationService; +import io.airbyte.data.services.WorkspaceService; import io.airbyte.notification.CustomerIoEmailConfig; import io.airbyte.notification.CustomerIoEmailNotificationSender; import io.airbyte.persistence.job.WebUrlHelper; import io.airbyte.server.handlers.api_domain_mapping.UserInvitationMapper; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; @ExtendWith(MockitoExtension.class) public class UserInvitationHandlerTest { - @Mock(strictness = LENIENT) + @Mock UserInvitationService service; - - @Mock(strictness = LENIENT) + @Mock UserInvitationMapper mapper; - - @Mock(strictness = LENIENT) + @Mock CustomerIoEmailNotificationSender customerIoEmailNotificationSender; - - @Mock(strictness = LENIENT) + @Mock WebUrlHelper webUrlHelper; + @Mock + WorkspaceService workspaceService; + @Mock + OrganizationService organizationService; + @Mock + UserPersistence userPersistence; + @Mock + PermissionPersistence permissionPersistence; + @Mock + PermissionHandler permissionHandler; + @Mock + TrackingClient trackingClient; UserInvitationHandler handler; @BeforeEach void setup() { - handler = new UserInvitationHandler(service, mapper, customerIoEmailNotificationSender, webUrlHelper); + handler = new UserInvitationHandler(service, mapper, customerIoEmailNotificationSender, webUrlHelper, workspaceService, organizationService, + userPersistence, permissionPersistence, permissionHandler, trackingClient); + } + + @Nested + class CreateInvitationOrPermission { + + private static final User CURRENT_USER = new User().withUserId(UUID.randomUUID()).withEmail("current-user@airbyte.io").withName("Current User"); + private static final String WEBAPP_BASE_URL = "https://test.airbyte.io"; + private static final String INVITED_EMAIL = "invited@airbyte.io"; + private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final String WORKSPACE_NAME = "workspace-name"; + private static final UUID ORG_ID = UUID.randomUUID(); + private static final UserInvitationCreateRequestBody USER_INVITATION_CREATE_REQUEST_BODY = new UserInvitationCreateRequestBody() + .invitedEmail(INVITED_EMAIL) + .scopeType(io.airbyte.api.model.generated.ScopeType.WORKSPACE) + .scopeId(WORKSPACE_ID) + .permissionType(io.airbyte.api.model.generated.PermissionType.WORKSPACE_ADMIN); + + @Nested + class CreateAndSendInvitation { + + private static final UserInvitation USER_INVITATION = new UserInvitation() + .withInvitedEmail(INVITED_EMAIL) + .withScopeType(ScopeType.WORKSPACE) + .withScopeId(WORKSPACE_ID) + .withPermissionType(WORKSPACE_ADMIN); + + private void setupSendInvitationMocks() throws Exception { + when(webUrlHelper.getBaseUrl()).thenReturn(WEBAPP_BASE_URL); + when(service.createUserInvitation(USER_INVITATION)).thenReturn(USER_INVITATION); + when(workspaceService.getStandardWorkspaceNoSecrets(WORKSPACE_ID, false)).thenReturn(new StandardWorkspace().withName(WORKSPACE_NAME)); + } + + @BeforeEach + void setup() { + when(mapper.toDomain(USER_INVITATION_CREATE_REQUEST_BODY)).thenReturn(USER_INVITATION); + } + + @Test + void testNewEmailWorkspaceInOrg() throws Exception { + setupSendInvitationMocks(); + + // the workspace is in an org. + when(workspaceService.getOrganizationIdFromWorkspaceId(WORKSPACE_ID)).thenReturn(Optional.of(ORG_ID)); + + // no existing user has the invited email. + when(userPersistence.getUsersByEmail(INVITED_EMAIL)).thenReturn(Collections.emptyList()); + + // call the handler method under test. + final UserInvitationCreateResponse result = handler.createInvitationOrPermission(USER_INVITATION_CREATE_REQUEST_BODY, CURRENT_USER); + + // make sure correct invite was created, email was sent, and result is correct. + verifyInvitationCreatedAndEmailSentResult(result); + } + + @Test + void testWorkspaceNotInAnyOrg() throws Exception { + setupSendInvitationMocks(); + + // the workspace is not in any org. + when(workspaceService.getOrganizationIdFromWorkspaceId(WORKSPACE_ID)).thenReturn(Optional.empty()); + + // call the handler method under test. + final UserInvitationCreateResponse result = handler.createInvitationOrPermission(USER_INVITATION_CREATE_REQUEST_BODY, CURRENT_USER); + + // make sure correct invite was created, email was sent, and result is correct. + verifyInvitationCreatedAndEmailSentResult(result); + } + + @Test + void testExistingEmailButNotInWorkspaceOrg() throws Exception { + setupSendInvitationMocks(); + + // the workspace is in an org. + when(workspaceService.getOrganizationIdFromWorkspaceId(WORKSPACE_ID)).thenReturn(Optional.of(ORG_ID)); + + // a user with the email exists, but is not in the workspace's org. + final User userWithEmail = new User().withUserId(UUID.randomUUID()).withEmail(INVITED_EMAIL); + when(userPersistence.getUsersByEmail(INVITED_EMAIL)).thenReturn(List.of(userWithEmail)); + + // the org has a user with a different email, but not the one we're inviting. + final User otherUserInOrg = new User().withUserId(UUID.randomUUID()).withEmail("other@airbyte.io"); + when(permissionPersistence.listUsersInOrganization(ORG_ID)).thenReturn(List.of(new UserPermission().withUser(otherUserInOrg))); + + // call the handler method under test. + final UserInvitationCreateResponse result = handler.createInvitationOrPermission(USER_INVITATION_CREATE_REQUEST_BODY, CURRENT_USER); + + // make sure correct invite was created, email was sent, and result is correct. + verifyInvitationCreatedAndEmailSentResult(result); + } + + @Test + void testThrowsConflictExceptionOnDuplicateInvitation() throws Exception { + when(service.createUserInvitation(USER_INVITATION)).thenThrow(new InvitationDuplicateException("duplicate")); + + assertThrows(ConflictException.class, () -> handler.createInvitationOrPermission(USER_INVITATION_CREATE_REQUEST_BODY, CURRENT_USER)); + } + + private void verifyInvitationCreatedAndEmailSentResult(final UserInvitationCreateResponse result) throws Exception { + verify(mapper, times(1)).toDomain(USER_INVITATION_CREATE_REQUEST_BODY); + + // capture and verify the invitation that is saved by the service. + final ArgumentCaptor savedUserInvitationCaptor = ArgumentCaptor.forClass(UserInvitation.class); + verify(service, times(1)).createUserInvitation(savedUserInvitationCaptor.capture()); + final UserInvitation capturedUserInvitation = savedUserInvitationCaptor.getValue(); + + // make sure an invite code and pending status were set on the saved invitation + assertNotNull(capturedUserInvitation.getInviteCode()); + assertEquals(InvitationStatus.PENDING, capturedUserInvitation.getStatus()); + + // make sure an expiration time was set on the invitation + assertNotNull(capturedUserInvitation.getExpiresAt()); + + // make sure the email sender was called with the correct inputs. + final ArgumentCaptor emailConfigCaptor = ArgumentCaptor.forClass(CustomerIoEmailConfig.class); + final ArgumentCaptor inviteLinkCaptor = ArgumentCaptor.forClass(String.class); + + verify(customerIoEmailNotificationSender, times(1)).sendInviteToUser( + emailConfigCaptor.capture(), + eq(CURRENT_USER.getName()), + inviteLinkCaptor.capture()); + + final CustomerIoEmailConfig capturedEmailConfig = emailConfigCaptor.getValue(); + assertEquals(INVITED_EMAIL, capturedEmailConfig.getTo()); + + final String capturedInviteLink = inviteLinkCaptor.getValue(); + assertEquals(WEBAPP_BASE_URL + ACCEPT_INVITE_PATH + capturedUserInvitation.getInviteCode(), capturedInviteLink); + + // make sure no other emails are sent. + verifyNoMoreInteractions(customerIoEmailNotificationSender); + + // make sure we never created a permission, because the invitation path was taken instead. + verify(permissionHandler, times(0)).createPermission(any()); + + // make sure the final result is correct + assertFalse(result.getDirectlyAdded()); + assertEquals(capturedUserInvitation.getInviteCode(), result.getInviteCode()); + + // verify we sent an invitation tracking event + verify(trackingClient, times(1)).track( + eq(WORKSPACE_ID), + eq(USER_INVITED), + anyMap()); + } + + } + + @Nested + class DirectlyAddPermission { + + @BeforeEach + void setup() throws Exception { + when(workspaceService.getStandardWorkspaceNoSecrets(WORKSPACE_ID, false)).thenReturn(new StandardWorkspace().withName(WORKSPACE_NAME)); + } + + @Test + void testExistingEmailInsideWorkspaceOrg() throws Exception { + when(workspaceService.getOrganizationIdFromWorkspaceId(WORKSPACE_ID)).thenReturn(Optional.of(ORG_ID)); + + // set up three users with the same email, two in the workspace's org and one outside of it. + final User matchingUserInOrg1 = new User().withUserId(UUID.randomUUID()).withEmail(INVITED_EMAIL); + final User matchingUserInOrg2 = new User().withUserId(UUID.randomUUID()).withEmail(INVITED_EMAIL); + final User matchingUserNotInOrg = new User().withUserId(UUID.randomUUID()).withEmail(INVITED_EMAIL); + when(userPersistence.getUsersByEmail(INVITED_EMAIL)).thenReturn(List.of(matchingUserInOrg1, matchingUserInOrg2, matchingUserNotInOrg)); + + // set up three users inside the workspace's org, two with the same email and one with a different + // email. + final User otherUserInOrg = new User().withUserId(UUID.randomUUID()).withEmail("other@airbyte.io"); + when(permissionPersistence.listUsersInOrganization(ORG_ID)).thenReturn(List.of( + new UserPermission().withUser(matchingUserInOrg1), + new UserPermission().withUser(matchingUserInOrg2), + new UserPermission().withUser(otherUserInOrg))); + + // call the handler method under test. + final UserInvitationCreateResponse result = handler.createInvitationOrPermission(USER_INVITATION_CREATE_REQUEST_BODY, CURRENT_USER); + + // make sure permissions were created, appropriate email was sent, and result is correct. + verifyPermissionAddedResult(Set.of(matchingUserInOrg1.getUserId(), matchingUserInOrg2.getUserId()), result); + } + + private void verifyPermissionAddedResult(final Set expectedUserIds, final UserInvitationCreateResponse result) throws Exception { + // capture and verify the permissions that are created by the permission handler. + final ArgumentCaptor permissionCreateCaptor = ArgumentCaptor.forClass(PermissionCreate.class); + verify(permissionHandler, times(expectedUserIds.size())).createPermission(permissionCreateCaptor.capture()); + verifyNoMoreInteractions(permissionHandler); + + // verify one captured permissionCreate per expected userId. + final List capturedPermissionCreateValues = permissionCreateCaptor.getAllValues(); + assertEquals(expectedUserIds.size(), capturedPermissionCreateValues.size()); + + for (final PermissionCreate capturedPermissionCreate : capturedPermissionCreateValues) { + assertEquals(WORKSPACE_ID, capturedPermissionCreate.getWorkspaceId()); + assertEquals(PermissionType.WORKSPACE_ADMIN, capturedPermissionCreate.getPermissionType()); + assertTrue(expectedUserIds.contains(capturedPermissionCreate.getUserId())); + } + + // make sure the email sender was called with the correct inputs. + final ArgumentCaptor emailConfigCaptor = ArgumentCaptor.forClass(CustomerIoEmailConfig.class); + verify(customerIoEmailNotificationSender, times(1)).sendNotificationOnInvitingExistingUser( + emailConfigCaptor.capture(), + eq(CURRENT_USER.getName()), + eq(WORKSPACE_NAME)); + + assertEquals(INVITED_EMAIL, emailConfigCaptor.getValue().getTo()); + + // make sure no other emails are sent. + verifyNoMoreInteractions(customerIoEmailNotificationSender); + + // make sure we never created a user invitation, because the add-permission path was taken instead. + verify(service, times(0)).createUserInvitation(any()); + + // make sure the final result is correct + assertTrue(result.getDirectlyAdded()); + assertNull(result.getInviteCode()); + + // we don't send a "user invited" event when a user is directly added to a workspace. + verify(trackingClient, never()).track(any(), any(), any()); + } + + } + + } + + @Nested + class AcceptInvitation { + + private static final String INVITE_CODE = "invite-code"; + private static final InviteCodeRequestBody INVITE_CODE_REQUEST_BODY = new InviteCodeRequestBody().inviteCode(INVITE_CODE); + private static final String CURRENT_USER_EMAIL = "current@airbyte.io"; + private static final User CURRENT_USER = new User().withUserId(UUID.randomUUID()).withEmail(CURRENT_USER_EMAIL); + + @Test + void testEmailMatches() throws Exception { + final UserInvitation invitation = new UserInvitation() + .withInviteCode(INVITE_CODE) + .withInvitedEmail(CURRENT_USER_EMAIL); + + final UserInvitationRead invitationRead = mock(UserInvitationRead.class); + + when(service.getUserInvitationByInviteCode(INVITE_CODE)).thenReturn(invitation); + when(service.acceptUserInvitation(INVITE_CODE, CURRENT_USER.getUserId())) + .thenReturn(invitation); + + when(mapper.toApi(invitation)).thenReturn(invitationRead); + + final UserInvitationRead result = handler.accept(INVITE_CODE_REQUEST_BODY, CURRENT_USER); + + verify(service, times(1)).acceptUserInvitation(INVITE_CODE, CURRENT_USER.getUserId()); + verifyNoMoreInteractions(service); + + // make sure the result is whatever the mapper outputs. + assertEquals(invitationRead, result); + } + + @Test + void testEmailDoesNotMatch() throws Exception { + final UserInvitation invitation = new UserInvitation() + .withInviteCode(INVITE_CODE) + .withInvitedEmail("different@airbyte.io"); + + when(service.getUserInvitationByInviteCode(INVITE_CODE)).thenReturn(invitation); + + assertThrows(OperationNotAllowedException.class, () -> handler.accept(INVITE_CODE_REQUEST_BODY, CURRENT_USER)); + + // make sure the service method to accept the invitation was never called. + verify(service, times(0)).acceptUserInvitation(any(), any()); + } + + @Test + void testInvitationStatusUnexpected() throws Exception { + final UserInvitation invitation = new UserInvitation() + .withInviteCode(INVITE_CODE) + .withInvitedEmail(CURRENT_USER_EMAIL); + + when(service.getUserInvitationByInviteCode(INVITE_CODE)).thenReturn(invitation); + + doThrow(new InvitationStatusUnexpectedException("not pending")) + .when(service).acceptUserInvitation(INVITE_CODE, CURRENT_USER.getUserId()); + + assertThrows(ConflictException.class, () -> handler.accept(INVITE_CODE_REQUEST_BODY, CURRENT_USER)); + } + + @Test + void testInvitationExpired() throws Exception { + final UserInvitation invitation = new UserInvitation() + .withInviteCode(INVITE_CODE) + .withInvitedEmail(CURRENT_USER_EMAIL); + + when(service.getUserInvitationByInviteCode(INVITE_CODE)).thenReturn(invitation); + doThrow(new InvitationStatusUnexpectedException("expired")) + .when(service).acceptUserInvitation(INVITE_CODE, CURRENT_USER.getUserId()); + + assertThrows(ConflictException.class, () -> handler.accept(INVITE_CODE_REQUEST_BODY, CURRENT_USER)); + } + + } + + @Nested + class CancelInvitation { + + @Test + void testCancelInvitationCallsService() throws Exception { + final String inviteCode = "invite-code"; + final InviteCodeRequestBody req = new InviteCodeRequestBody().inviteCode(inviteCode); + + final UserInvitation cancelledInvitation = new UserInvitation() + .withInviteCode(inviteCode) + .withInvitedEmail("invited@airbyte.io") + .withStatus(InvitationStatus.CANCELLED); + + when(service.cancelUserInvitation(inviteCode)).thenReturn(cancelledInvitation); + when(mapper.toApi(cancelledInvitation)).thenReturn(mock(UserInvitationRead.class)); + + final UserInvitationRead result = handler.cancel(req); + + verify(service, times(1)).cancelUserInvitation(inviteCode); + verifyNoMoreInteractions(service); + } + + @Test + void testCancelInvitationThrowsConflictExceptionOnUnexpectedStatus() throws Exception { + final String inviteCode = "invite-code"; + final InviteCodeRequestBody req = new InviteCodeRequestBody().inviteCode(inviteCode); + + when(service.cancelUserInvitation(inviteCode)).thenThrow(new InvitationStatusUnexpectedException("unexpected status")); + + assertThrows(ConflictException.class, () -> handler.cancel(req)); + } + } @Test - public void testCreateUserInvitation() throws JsonValidationException, ConfigNotFoundException, IOException { - // mocked data - UserInvitationCreateRequestBody req = new UserInvitationCreateRequestBody(); - req.setInvitedEmail("test@example.com"); - User currentUser = new User(); - UUID currentUserId = UUID.randomUUID(); - currentUser.setUserId(currentUserId); - currentUser.setName("inviterName"); - UserInvitation saved = new UserInvitation(); - saved.setInviteCode("randomCode"); - saved.setInviterUserId(currentUserId); - saved.setStatus(InvitationStatus.PENDING); - UserInvitationRead expected = new UserInvitationRead(); - expected.setInviteCode(saved.getInviteCode()); - expected.setInvitedEmail(req.getInvitedEmail()); - expected.setInviterUserId(currentUserId); - - when(mapper.toDomain(req)).thenReturn(new UserInvitation()); - when(service.createUserInvitation(any(UserInvitation.class))).thenReturn(saved); - when(webUrlHelper.getBaseUrl()).thenReturn("cloud.airbyte.com"); - when(mapper.toApi(saved)).thenReturn(expected); - - final UserInvitationRead result = handler.create(req, currentUser); - - verify(mapper, times(1)).toDomain(req); - verify(service, times(1)).createUserInvitation(any(UserInvitation.class)); - verify(webUrlHelper, times(1)).getBaseUrl(); - verify(customerIoEmailNotificationSender, times(1)) - .sendInviteToUser(any(CustomerIoEmailConfig.class), anyString(), anyString()); - - assert result != null; - assert result.equals(expected); + void getPendingInvitationsTest() { + final UUID workspaceId = UUID.randomUUID(); + final UUID organizationId = UUID.randomUUID(); + final List workspaceInvitations = List.of(mock(UserInvitation.class), mock(UserInvitation.class)); + final List organizationInvitations = List.of(mock(UserInvitation.class), mock(UserInvitation.class), mock(UserInvitation.class)); + + when(service.getPendingInvitations(ScopeType.WORKSPACE, workspaceId)).thenReturn(workspaceInvitations); + when(service.getPendingInvitations(ScopeType.ORGANIZATION, organizationId)).thenReturn(organizationInvitations); + + when(mapper.toDomain(io.airbyte.api.model.generated.ScopeType.WORKSPACE)).thenReturn(ScopeType.WORKSPACE); + when(mapper.toDomain(io.airbyte.api.model.generated.ScopeType.ORGANIZATION)).thenReturn(ScopeType.ORGANIZATION); + when(mapper.toApi(any(UserInvitation.class))).thenReturn(mock(UserInvitationRead.class)); + + final List workspaceResult = handler.getPendingInvitations(new UserInvitationListRequestBody() + .scopeType(io.airbyte.api.model.generated.ScopeType.WORKSPACE) + .scopeId(workspaceId)); + final List organizationResult = handler.getPendingInvitations(new UserInvitationListRequestBody() + .scopeType(io.airbyte.api.model.generated.ScopeType.ORGANIZATION) + .scopeId(organizationId)); + + assertEquals(workspaceInvitations.size(), workspaceResult.size()); + assertEquals(organizationInvitations.size(), organizationResult.size()); + + verify(service, times(1)).getPendingInvitations(ScopeType.WORKSPACE, workspaceId); + verify(service, times(1)).getPendingInvitations(ScopeType.ORGANIZATION, organizationId); + + verify(mapper, times(workspaceInvitations.size() + organizationInvitations.size())).toApi(any(UserInvitation.class)); } } diff --git a/airbyte-server/src/test/kotlin/authorization/AirbyteApiAuthorizationHelperTest.kt b/airbyte-server/src/test/kotlin/authorization/AirbyteApiAuthorizationHelperTest.kt deleted file mode 100644 index eeec04f3282..00000000000 --- a/airbyte-server/src/test/kotlin/authorization/AirbyteApiAuthorizationHelperTest.kt +++ /dev/null @@ -1,114 +0,0 @@ -package authorization - -import io.airbyte.api.model.generated.PermissionCheckRead -import io.airbyte.api.model.generated.PermissionType -import io.airbyte.commons.server.handlers.PermissionHandler -import io.airbyte.commons.server.support.AuthenticationHeaderResolver -import io.airbyte.commons.server.support.CurrentUserService -import io.airbyte.server.apis.publicapi.authorization.AirbyteApiAuthorizationHelper -import io.airbyte.server.apis.publicapi.authorization.Scope -import io.airbyte.server.apis.publicapi.problems.ForbiddenProblem -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.assertDoesNotThrow -import org.junit.jupiter.api.assertThrows -import java.util.UUID - -class AirbyteApiAuthorizationHelperTest { - private val authenticationHeaderResolver = mockk() - private val currentUserService = mockk() - private val permissionHandler = mockk() - private val airbyteApiAuthorizationHelper = AirbyteApiAuthorizationHelper(authenticationHeaderResolver, permissionHandler, currentUserService) - - private val userId = UUID.randomUUID() - private val workspaceId = UUID.randomUUID() - - @BeforeEach - fun setup() { - every { authenticationHeaderResolver.resolveWorkspace(any()) } returns listOf(workspaceId) - every { currentUserService.currentUser.userId } returns userId - } - - @Test - fun `test checkWorkspacePermissions for instance admin`() { - every { permissionHandler.isUserInstanceAdmin(any()) } returns true - // shouldn't matter because we're an instance admin. - every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returns - PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED) - val ids = listOf(UUID.randomUUID().toString()) - val scope = Scope.WORKSPACE - val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) - assertDoesNotThrow { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) - } - } - - @Test - fun `test checkWorkspacePermissions with empty workspace Ids`() { - every { permissionHandler.isUserInstanceAdmin(any()) } returns false - - val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) - - for (scope in Scope.entries) { - if (scope == Scope.WORKSPACES) { - // Allow empty ids for WORKSPACES scope specifically - assertDoesNotThrow { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) - } - } else { - // Disallow empty ids for other scopes - assertThrows { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) - } - } - } - } - - @Test - fun `test checkWorkspacePermissions with null workspace Ids`() { - every { permissionHandler.isUserInstanceAdmin(any()) } returns false - - val ids = listOf(UUID.randomUUID().toString()) - val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) - - // can't resolve workspaces - every { authenticationHeaderResolver.resolveWorkspace(any()) } returns null - - assertThrows { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(ids, Scope.WORKSPACE, userId, permissionTypes) - } - } - - @Test - fun `test checkWorkspacePermissions for passing and failing permission checks`() { - every { permissionHandler.isUserInstanceAdmin(any()) } returns false - - val ids = listOf(UUID.randomUUID().toString()) - val scope = Scope.WORKSPACES - val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) - - // as long as we have one permission type that passes, we pass the overall check - every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returnsMany - listOf( - PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED), - PermissionCheckRead().message("yes").status(PermissionCheckRead.StatusEnum.SUCCEEDED), - ) - - assertDoesNotThrow { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) - } - - // if no permission types pass, we fail the overall check - every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returnsMany - listOf( - PermissionCheckRead().message("no").status(PermissionCheckRead.StatusEnum.FAILED), - PermissionCheckRead().message("no again").status(PermissionCheckRead.StatusEnum.FAILED), - ) - - assertThrows { - airbyteApiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) - } - } -} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandlerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandlerTest.kt new file mode 100644 index 00000000000..ca72443dc9a --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/errorHandlers/ConfigClientErrorHandlerTest.kt @@ -0,0 +1,108 @@ +package io.airbyte.server.apis.publicapi.errorHandlers + +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.errors.ValueConflictKnownException +import io.airbyte.commons.server.errors.problems.BadRequestProblem +import io.airbyte.commons.server.errors.problems.ConflictProblem +import io.airbyte.commons.server.errors.problems.InvalidApiKeyProblem +import io.airbyte.commons.server.errors.problems.ResourceNotFoundProblem +import io.airbyte.commons.server.errors.problems.SyncConflictProblem +import io.airbyte.commons.server.errors.problems.UnexpectedProblem +import io.airbyte.commons.server.errors.problems.UnprocessableEntityProblem +import io.airbyte.config.persistence.ConfigNotFoundException +import io.airbyte.validation.json.JsonSchemaValidator +import io.airbyte.validation.json.JsonValidationException +import io.micronaut.http.HttpResponse +import io.micronaut.http.HttpResponseFactory +import io.micronaut.http.HttpStatus +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import java.util.UUID + +class ConfigClientErrorHandlerTest { + private val resourceId = UUID.randomUUID() + + private val httpResponseFactory = HttpResponseFactory.INSTANCE + + @Test + fun `test that it can handle errors for an HttpResponse`() { + val notFoundResponse = HttpResponse.notFound("body") + assertThrows { ConfigClientErrorHandler.handleError(notFoundResponse, resourceId.toString()) } + + val conflictResponse = + httpResponseFactory.status(HttpStatus.CONFLICT, "test") + .body(mapOf("message" to "test")) + assertThrows { ConfigClientErrorHandler.handleError(conflictResponse, resourceId.toString()) } + + val unauthorizedResponse = httpResponseFactory.status(HttpStatus.UNAUTHORIZED) + assertThrows { ConfigClientErrorHandler.handleError(unauthorizedResponse, resourceId.toString()) } + + val unprocessibleEntityResponse = + httpResponseFactory.status(HttpStatus.UNPROCESSABLE_ENTITY, "test") + .body(mapOf("message" to "test")) + assertThrows { ConfigClientErrorHandler.handleError(unprocessibleEntityResponse, resourceId.toString()) } + + val badRequestResponse = httpResponseFactory.status(HttpStatus.BAD_REQUEST, "test") + assertThrows { ConfigClientErrorHandler.handleError(badRequestResponse, resourceId.toString()) } + + val unexpectedResponse = httpResponseFactory.status(HttpStatus.INTERNAL_SERVER_ERROR, "test") + assertThrows { ConfigClientErrorHandler.handleError(unexpectedResponse, resourceId.toString()) } + } + + @Test + fun `test that it can handle throwables`() { + assertThrows { ConfigClientErrorHandler.handleError(ConfigNotFoundException("test", "test"), resourceId.toString()) } + + assertThrows { ConfigClientErrorHandler.handleError(ValueConflictKnownException("test"), resourceId.toString()) } + + assertThrows { ConfigClientErrorHandler.handleError(IllegalStateException(), resourceId.toString()) } + + assertThrows { ConfigClientErrorHandler.handleError(JsonValidationException("test"), resourceId.toString()) } + } + + @Test + fun `test that it can handle JSON validation errors gracefully`() { + val schema = + Jsons.deserialize( + """ + { + "type": "object", + "title": "Pokeapi Spec", + "${"$"}schema": "http://json-schema.org/draft-07/schema#", + "required": [ + "pokemon_name" + ], + "properties": { + "pokemon_name": { + "enum": [ + "bulbasaur", + "ivysaur" + ], + "type": "string", + "title": "Pokemon Name" + } + } + } + """.trimIndent(), + ) + + runCatching { JsonSchemaValidator().ensure(schema, Jsons.deserialize("{\"test\": \"test\"}")) } + .onFailure { assertThrows { ConfigClientErrorHandler.handleError(it, resourceId.toString()) } } + } + + @Test + fun `test that it can handle job cancellation failures gracefully`() { + val failureReason = "Could not find job with id: -1" + assertThrows(JOB_NOT_RUNNING_MESSAGE) { + ConfigClientErrorHandler.handleError(RuntimeException(failureReason), resourceId.toString()) + } + } + + @Test + fun `test that it doesn't throw on non-error http responses`() { + assertDoesNotThrow { + ConfigClientErrorHandler.handleError(HttpResponse.ok(), resourceId.toString()) + } + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt new file mode 100644 index 00000000000..1b131766076 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt @@ -0,0 +1,601 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.publicapi.helpers + +import io.airbyte.api.model.generated.AirbyteCatalog +import io.airbyte.api.model.generated.AirbyteStream +import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration +import io.airbyte.api.model.generated.AirbyteStreamConfiguration +import io.airbyte.api.model.generated.DestinationSyncMode +import io.airbyte.api.model.generated.SelectedFieldInfo +import io.airbyte.api.model.generated.SyncMode +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem +import io.airbyte.public_api.model.generated.AirbyteApiConnectionSchedule +import io.airbyte.public_api.model.generated.ConnectionSyncModeEnum +import io.airbyte.public_api.model.generated.ScheduleTypeEnum +import io.airbyte.public_api.model.generated.StreamConfiguration +import io.airbyte.public_api.model.generated.StreamConfigurations +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.EnumSource + +internal class AirbyteCatalogHelperTest { + @Test + internal fun `test that a stream configuration is not empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf(mockk()) + + assertTrue(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + } + + @Test + internal fun `test that a stream configuration is empty`() { + val streamConfigurations: StreamConfigurations = mockk() + + every { streamConfigurations.streams } returns listOf() + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + every { streamConfigurations.streams } returns null + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(streamConfigurations)) + + assertFalse(AirbyteCatalogHelper.hasStreamConfigurations(null)) + } + + @Test + internal fun `test that a copy of the AirbyteStreamConfiguration is returned when it is updated to full refresh overwrite mode`() { + val originalStreamConfiguration = createAirbyteStreamConfiguration() + + val updatedStreamConfiguration = AirbyteCatalogHelper.updateConfigDefaultFullRefreshOverwrite(config = originalStreamConfiguration) + assertFalse(originalStreamConfiguration === updatedStreamConfiguration) + assertEquals(SyncMode.FULL_REFRESH, updatedStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a copy of the AirbyteCatalog is returned when all of its streams are updated to full refresh overwrite mode`() { + val originalAirbyteCatalog = createAirbyteCatalog() + val updatedAirbyteCatalog = AirbyteCatalogHelper.updateAllStreamsFullRefreshOverwrite(airbyteCatalog = originalAirbyteCatalog) + assertFalse(originalAirbyteCatalog === updatedAirbyteCatalog) + updatedAirbyteCatalog.streams.stream().forEach { stream -> + assertEquals(SyncMode.FULL_REFRESH, stream.config?.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, stream.config?.destinationSyncMode) + } + } + + @Test + internal fun `test that streams can be validated`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + assertTrue(AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations)) + } + + @Test + internal fun `test that a stream with an invalid name is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration = StreamConfiguration() + streamConfiguration.name = "unknown" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Invalid stream found")) + } + + @Test + internal fun `test that streams with duplicate streams is considered to be invalid`() { + val referenceCatalog = createAirbyteCatalog() + val streamConfiguration1 = StreamConfiguration() + streamConfiguration1.name = "name1" + val streamConfiguration2 = StreamConfiguration() + streamConfiguration2.name = "name1" + val streamConfigurations = StreamConfigurations() + streamConfigurations.streams = listOf(streamConfiguration1, streamConfiguration2) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreams(referenceCatalog = referenceCatalog, streamConfigurations = streamConfigurations) + } + assertEquals(true, throwable.message?.contains("Duplicate stream found in configuration")) + } + + @Test + internal fun `test that valid streams can be retrieved from the AirbyteCatalog`() { + val airbyteCatalog = createAirbyteCatalog() + val validStreamNames = AirbyteCatalogHelper.getValidStreams(airbyteCatalog = airbyteCatalog) + assertEquals(airbyteCatalog.streams.map { it.stream?.name }.toSet(), validStreamNames.keys) + } + + @Test + internal fun `test that the cron configuration can be validated`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * UTC" + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + assertFalse(connectionSchedule.cronExpression.contains("UTC")) + + connectionSchedule.scheduleType = ScheduleTypeEnum.MANUAL + assertTrue(AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule)) + } + + @Test + internal fun `test that the cron configuration with a missing cron expression is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = null + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Missing cron expression in the schedule.")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression length is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "0 15 10 * * ? * * * *" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Cron expression contains 10 parts but we expect one of [6, 7]")) + } + + @Test + internal fun `test that the cron configuration with an invalid cron expression is invalid`() { + val connectionSchedule = AirbyteApiConnectionSchedule() + connectionSchedule.scheduleType = ScheduleTypeEnum.CRON + connectionSchedule.cronExpression = "not a valid cron expression string" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateCronConfiguration(connectionSchedule = connectionSchedule) + } + assertEquals(true, throwable.message?.contains("Failed to parse cron expression. Invalid chars in expression!")) + } + + @ParameterizedTest + @EnumSource(ConnectionSyncModeEnum::class) + internal fun `test that when a stream configuration is updated, the corret sync modes are set based on the stream configuration`( + connectionSyncMode: ConnectionSyncModeEnum, + ) { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = connectionSyncMode + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(getSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.syncMode) + assertEquals(getDestinationSyncMode(connectionSyncMode), updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when a stream configuration does not have a configured sync mode, the updated configuration uses full refresh overwrite`() { + val cursorField = "cursor" + val primayKeyColumn = "primary" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primayKeyColumn)) + + val updatedAirbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = airbyteStreamConfiguration, + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertEquals(true, updatedAirbyteStreamConfiguration.selected) + assertEquals(SyncMode.FULL_REFRESH, updatedAirbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, updatedAirbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that when validating a stream without a sync mode, the sync mode is set to full refresh and the stream is considered valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + streamConfiguration.syncMode = null + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue(AirbyteCatalogHelper.validateStreamConfig(streamConfiguration, listOf(), airbyteStream)) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(true, airbyteStreamConfiguration.selected) + } + + @Test + internal fun `test that if the stream configuration contains an invalid sync mode, the stream is considered invalid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + streamConfiguration.name = "stream-name" + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Cannot set sync mode to ${streamConfiguration.syncMode} for stream")) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_APPEND is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + createAirbyteStreamConfiguration(), + airbyteStream, + streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with FULL_REFRESH_OVERWRITE is always considered to be valid`() { + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.supportedSyncModes = listOf(SyncMode.FULL_REFRESH) + streamConfiguration.syncMode = ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.FULL_REFRESH, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.OVERWRITE, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source defined cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val airbyteStreamConfiguration = createAirbyteStreamConfiguration() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source defined cursor field is invalid`() { + val cursorField = "cursor" + val streamName = "stream-name" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.name = streamName + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf("other") + streamConfiguration.name = airbyteStream.name + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals(true, throwable.message?.contains("Do not include a cursor field configuration for this stream")) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is only valid if the source cursor field is also valid`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if the source cursor field is invalid`() { + val cursorField = "cursor" + val otherCursorField = "other" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(otherCursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$otherCursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid cursor fields include: [[$otherCursorField]]", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + } + + @Test + internal fun `test that a stream configuration with INCREMENTAL_APPEND is invalid if there is no cursor field`() { + val cursorField = "cursor" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf() + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}}}") + airbyteStream.name = "name" + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf() + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + val throwable = + assertThrows(ConnectionConfigurationProblem::class.java) { + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND), + airbyteStream = airbyteStream, + ) + } + assertEquals( + true, + throwable.message?.contains( + "No default cursor field for stream: ${airbyteStream.name}. Please include a cursor field configuration for this stream.", + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND, airbyteStreamConfiguration.destinationSyncMode) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source defined cursor and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = true + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that an INCREMENTAL_DEDUPED_HISTORY stream is only valid if the source cursor field and primary key field are also valid`() { + val cursorField = "cursor" + val primaryKey = "primary" + val airbyteStream = AirbyteStream() + val streamConfiguration = StreamConfiguration() + airbyteStream.defaultCursorField = listOf(cursorField) + airbyteStream.jsonSchema = Jsons.deserialize("{\"properties\": {\"$cursorField\": {}, \"$primaryKey\": {}}}") + airbyteStream.sourceDefinedCursor = false + airbyteStream.supportedSyncModes = listOf(SyncMode.INCREMENTAL) + streamConfiguration.cursorField = listOf(cursorField) + streamConfiguration.primaryKey = listOf(listOf(primaryKey)) + streamConfiguration.syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY + val airbyteStreamConfiguration = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + config = createAirbyteStreamConfiguration(), + airbyteStream = airbyteStream, + streamConfiguration = streamConfiguration, + ) + + assertTrue( + AirbyteCatalogHelper.validateStreamConfig( + streamConfiguration = streamConfiguration, + validDestinationSyncModes = listOf(DestinationSyncMode.APPEND_DEDUP), + airbyteStream = airbyteStream, + ), + ) + assertEquals(SyncMode.INCREMENTAL, airbyteStreamConfiguration.syncMode) + assertEquals(DestinationSyncMode.APPEND_DEDUP, airbyteStreamConfiguration.destinationSyncMode) + assertEquals(listOf(cursorField), airbyteStreamConfiguration.cursorField) + assertEquals(listOf(listOf(primaryKey)), airbyteStreamConfiguration.primaryKey) + } + + @Test + internal fun `test that the combined sync modes are valid`() { + val validSourceSyncModes = listOf(SyncMode.FULL_REFRESH) + val validDestinationSyncModes = listOf(DestinationSyncMode.OVERWRITE) + + val combinedSyncModes = + AirbyteCatalogHelper.validCombinedSyncModes( + validSourceSyncModes = validSourceSyncModes, + validDestinationSyncModes = validDestinationSyncModes, + ) + assertEquals(1, combinedSyncModes.size) + assertEquals(listOf(ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE).first(), combinedSyncModes.first()) + } + + private fun createAirbyteCatalog(): AirbyteCatalog { + val airbyteCatalog = AirbyteCatalog() + val streams = mutableListOf() + for (i in 1..5) { + val streamAndConfiguration = AirbyteStreamAndConfiguration() + val stream = AirbyteStream() + stream.name = "name$i" + stream.namespace = "namespace" + streamAndConfiguration.stream = stream + streamAndConfiguration.config = createAirbyteStreamConfiguration() + streams += streamAndConfiguration + } + airbyteCatalog.streams(streams) + return airbyteCatalog + } + + private fun createAirbyteStreamConfiguration(): AirbyteStreamConfiguration { + val airbyteStreamConfiguration = AirbyteStreamConfiguration() + airbyteStreamConfiguration.aliasName = "alias" + airbyteStreamConfiguration.cursorField = listOf("cursor") + airbyteStreamConfiguration.destinationSyncMode = DestinationSyncMode.APPEND + airbyteStreamConfiguration.fieldSelectionEnabled = true + airbyteStreamConfiguration.primaryKey = listOf(listOf("primary")) + airbyteStreamConfiguration.selected = false + airbyteStreamConfiguration.selectedFields = listOf(SelectedFieldInfo()) + airbyteStreamConfiguration.suggested = false + airbyteStreamConfiguration.syncMode = SyncMode.INCREMENTAL + return airbyteStreamConfiguration + } + + private fun getSyncMode(connectionSyncMode: ConnectionSyncModeEnum): SyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> SyncMode.FULL_REFRESH + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> SyncMode.INCREMENTAL + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> SyncMode.INCREMENTAL + } + } + + private fun getDestinationSyncMode(connectionSyncMode: ConnectionSyncModeEnum): DestinationSyncMode { + return when (connectionSyncMode) { + ConnectionSyncModeEnum.FULL_REFRESH_OVERWRITE -> DestinationSyncMode.OVERWRITE + ConnectionSyncModeEnum.FULL_REFRESH_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_APPEND -> DestinationSyncMode.APPEND + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY -> DestinationSyncMode.APPEND_DEDUP + } + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelperTest.kt new file mode 100644 index 00000000000..1478812dce2 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/JobsHelperTest.kt @@ -0,0 +1,16 @@ +package io.airbyte.server.apis.publicapi.helpers + +import io.airbyte.api.model.generated.JobListForWorkspacesRequestBody +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test + +class JobsHelperTest { + @Test + fun `it should return the correct field and method pair`() { + val result = orderByToFieldAndMethod("createdAt|ASC") + assertEquals(result.first.ordinal, JobListForWorkspacesRequestBody.OrderByFieldEnum.CREATEDAT.ordinal) + assertEquals(result.first::class, JobListForWorkspacesRequestBody.OrderByFieldEnum.CREATEDAT::class) + assertEquals(result.second.ordinal, JobListForWorkspacesRequestBody.OrderByMethodEnum.ASC.ordinal) + assertEquals(result.second::class, JobListForWorkspacesRequestBody.OrderByMethodEnum.ASC::class) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelperTest.kt new file mode 100644 index 00000000000..4fa4fd08f31 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/OAuthHelperTest.kt @@ -0,0 +1,16 @@ +package io.airbyte.server.apis.publicapi.helpers + +import io.airbyte.commons.server.errors.problems.InvalidRedirectUrlProblem +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows + +class OAuthHelperTest { + @Test + fun `it should reject invalid redirect URLs for our initiateOAuth endpoint`() { + assertThrows { OAuthHelper.validateRedirectUrl(null) } + assertThrows { OAuthHelper.validateRedirectUrl("http://example.com") } + assertThrows { OAuthHelper.validateRedirectUrl("this isn't a URL") } + assertDoesNotThrow { OAuthHelper.validateRedirectUrl("https://test-site.com/path/and/stuff?query=params") } + } +} diff --git a/airbyte-server/src/test/kotlin/helpers/PathHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/PathHelperTest.kt similarity index 74% rename from airbyte-server/src/test/kotlin/helpers/PathHelperTest.kt rename to airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/PathHelperTest.kt index 61447fe6e2e..d043a1b6284 100644 --- a/airbyte-server/src/test/kotlin/helpers/PathHelperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/PathHelperTest.kt @@ -1,6 +1,5 @@ -package helpers +package io.airbyte.server.apis.publicapi.helpers -import io.airbyte.server.apis.publicapi.helpers.removePublicApiPathPrefix import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test diff --git a/airbyte-server/src/test/kotlin/mappers/ConnectionCreateMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionCreateMapperTest.kt similarity index 97% rename from airbyte-server/src/test/kotlin/mappers/ConnectionCreateMapperTest.kt rename to airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionCreateMapperTest.kt index 33cdcbb8f0d..45df7eb69de 100644 --- a/airbyte-server/src/test/kotlin/mappers/ConnectionCreateMapperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionCreateMapperTest.kt @@ -1,4 +1,4 @@ -package mappers +package io.airbyte.server.apis.publicapi.mappers import io.airbyte.api.model.generated.AirbyteCatalog import io.airbyte.api.model.generated.ConnectionCreate @@ -13,7 +13,6 @@ import io.airbyte.public_api.model.generated.ConnectionStatusEnum import io.airbyte.public_api.model.generated.GeographyEnum import io.airbyte.public_api.model.generated.NamespaceDefinitionEnum import io.airbyte.public_api.model.generated.ScheduleTypeEnum -import io.airbyte.server.apis.publicapi.mappers.ConnectionCreateMapper import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test import java.util.UUID diff --git a/airbyte-server/src/test/kotlin/mappers/ConnectionReadMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapperTest.kt similarity index 95% rename from airbyte-server/src/test/kotlin/mappers/ConnectionReadMapperTest.kt rename to airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapperTest.kt index 0580a96208f..87456647c7f 100644 --- a/airbyte-server/src/test/kotlin/mappers/ConnectionReadMapperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapperTest.kt @@ -1,11 +1,10 @@ -package mappers +package io.airbyte.server.apis.publicapi.mappers import io.airbyte.api.model.generated.ConnectionRead import io.airbyte.api.model.generated.ConnectionScheduleType import io.airbyte.api.model.generated.ConnectionStatus import io.airbyte.api.model.generated.Geography import io.airbyte.api.model.generated.NamespaceDefinitionType -import io.airbyte.server.apis.publicapi.mappers.ConnectionReadMapper import org.junit.jupiter.api.Test import java.util.UUID diff --git a/airbyte-server/src/test/kotlin/mappers/ConnectionUpdateMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionUpdateMapperTest.kt similarity index 97% rename from airbyte-server/src/test/kotlin/mappers/ConnectionUpdateMapperTest.kt rename to airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionUpdateMapperTest.kt index 2099ad16f0f..6d8e3965152 100644 --- a/airbyte-server/src/test/kotlin/mappers/ConnectionUpdateMapperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionUpdateMapperTest.kt @@ -1,4 +1,4 @@ -package mappers +package io.airbyte.server.apis.publicapi.mappers import io.airbyte.api.model.generated.AirbyteCatalog import io.airbyte.api.model.generated.ConnectionScheduleType @@ -14,7 +14,6 @@ import io.airbyte.public_api.model.generated.GeographyEnumNoDefault import io.airbyte.public_api.model.generated.NamespaceDefinitionEnumNoDefault import io.airbyte.public_api.model.generated.NonBreakingSchemaUpdatesBehaviorEnumNoDefault import io.airbyte.public_api.model.generated.ScheduleTypeEnum -import io.airbyte.server.apis.publicapi.mappers.ConnectionUpdateMapper import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test import java.util.UUID diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationReadMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationReadMapperTest.kt new file mode 100644 index 00000000000..a06a5de5dcf --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/DestinationReadMapperTest.kt @@ -0,0 +1,30 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.api.model.generated.DestinationRead +import io.airbyte.commons.json.Jsons +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class DestinationReadMapperTest { + @Test + fun `from should convert a DestinationRead object from the config api to a DestinationResponse`() { + // Given + val destinationRead = DestinationRead() + destinationRead.destinationId = UUID.randomUUID() + destinationRead.name = "destinationName" + destinationRead.destinationDefinitionId = UUID.randomUUID() + destinationRead.workspaceId = UUID.randomUUID() + destinationRead.connectionConfiguration = Jsons.deserialize("{}") + + // When + val destinationResponse = DestinationReadMapper.from(destinationRead) + + // Then + assertEquals(destinationRead.destinationId, destinationResponse.destinationId) + assertEquals(destinationRead.name, destinationResponse.name) + assertEquals(DEFINITION_ID_TO_DESTINATION_NAME[destinationRead.destinationDefinitionId], destinationResponse.destinationType) + assertEquals(destinationRead.workspaceId, destinationResponse.workspaceId) + assertEquals(destinationRead.connectionConfiguration, destinationResponse.configuration) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/JobResponseMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/JobResponseMapperTest.kt new file mode 100644 index 00000000000..3c28ff8c906 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/JobResponseMapperTest.kt @@ -0,0 +1,47 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.api.model.generated.JobAggregatedStats +import io.airbyte.api.model.generated.JobConfigType +import io.airbyte.api.model.generated.JobInfoRead +import io.airbyte.api.model.generated.JobRead +import io.airbyte.api.model.generated.JobStatus +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class JobResponseMapperTest { + @Test + fun `from should convert a JobRead object from the config api to a JobResponse`() { + // Given + val jobRead = + JobRead().apply { + this.id = 1L + this.status = JobStatus.FAILED + this.configId = UUID.randomUUID().toString() + this.configType = JobConfigType.SYNC + this.createdAt = 1L + this.updatedAt = 2L + this.aggregatedStats = + JobAggregatedStats().apply { + this.bytesCommitted = 12345 + this.recordsCommitted = 67890 + } + } + val jobInfoRead = JobInfoRead() + jobInfoRead.job = jobRead + + // When + val jobResponse = JobResponseMapper.from(jobInfoRead) + + // Then + assertEquals(jobResponse.jobId, jobRead.id) + assertEquals(jobResponse.status.toString(), jobRead.status.toString()) + assertEquals(jobResponse.connectionId, UUID.fromString(jobRead.configId)) + assertEquals(jobResponse.jobType.toString(), jobRead.configType.toString()) + assertEquals(jobResponse.startTime, "1970-01-01T00:00:01Z") + assertEquals(jobResponse.lastUpdatedAt, "1970-01-01T00:00:02Z") + assertEquals(jobResponse.duration, "PT1S") + assertEquals(jobResponse.bytesSynced, jobRead.aggregatedStats.bytesCommitted) + assertEquals(jobResponse.rowsSynced, jobRead.aggregatedStats.recordsCommitted) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt new file mode 100644 index 00000000000..113eb213c57 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/PaginationMapperTest.kt @@ -0,0 +1,73 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.public_api.model.generated.SourceDefinitionRead.SourceTypeEnum +import io.airbyte.server.apis.publicapi.constants.SOURCES_PATH +import io.airbyte.server.apis.publicapi.helpers.removePublicApiPathPrefix +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class PaginationMapperTest { + private val publicApiHost = "https://api.airbyte.com" + + @Test + fun `test that it outputs a correct URL`() { + val paginationMapper = + PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + .queryParam("string", "string") + .queryParam("int", 1) + .queryParam("enum", SourceTypeEnum.API) + + assertEquals("$publicApiHost/v1/sources?string=string&int=1&enum=api", paginationMapper.build().toString()) + } + + @Test + fun `test that it can generate next URLs`() { + val lessResultsBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getNextUrl(listOf("a", "b", "c"), 4, 0, lessResultsBuilder) + assertEquals( + "$publicApiHost/v1/sources?limit=4&offset=4", + lessResultsBuilder.build().toString(), + ) + + val noResultsBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getNextUrl(emptyList(), 4, 0, noResultsBuilder) + assertEquals( + "$publicApiHost/v1/sources", + noResultsBuilder.build().toString(), + ) + + val offsetLimitBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getNextUrl(listOf("a", "b", "c"), 2, 0, offsetLimitBuilder) + assertEquals( + "$publicApiHost/v1/sources?limit=2&offset=2", + offsetLimitBuilder.build().toString(), + ) + } + + @Test + fun `test that it can generate prev URLs`() { + var prevPageBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getPreviousUrl(4, 8, prevPageBuilder) + assertEquals( + "$publicApiHost/v1/sources?limit=4&offset=4", + prevPageBuilder.build().toString(), + ) + + var noPrevPageBuilder = PaginationMapper.getBuilder(publicApiHost, removePublicApiPathPrefix(SOURCES_PATH)) + PaginationMapper.getPreviousUrl(2, 0, noPrevPageBuilder) + assertEquals( + "$publicApiHost/v1/sources", + noPrevPageBuilder.build().toString(), + ) + } + + @Test + fun `uuid list to qs`() { + val uuids = listOf(UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()) + val (first, second, third) = uuids + assertEquals("$first,$second,$third", PaginationMapper.uuidListToQueryString(uuids)) + + assertEquals("", PaginationMapper.uuidListToQueryString(emptyList())) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/SourceReadMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/SourceReadMapperTest.kt new file mode 100644 index 00000000000..aaf1788ab75 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/SourceReadMapperTest.kt @@ -0,0 +1,32 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.api.model.generated.SourceRead +import io.airbyte.commons.json.Jsons +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class SourceReadMapperTest { + @Test + fun `from should convert a SourceRead object from the config api to a SourceResponse`() { + // Given + val sourceRead = + SourceRead().apply { + this.sourceId = UUID.randomUUID() + this.name = "sourceName" + this.sourceDefinitionId = UUID.randomUUID() + this.workspaceId = UUID.randomUUID() + this.connectionConfiguration = Jsons.deserialize("{}") + } + + // When + val sourceResponse = SourceReadMapper.from(sourceRead) + + // Then + assertEquals(sourceRead.sourceId, sourceResponse.sourceId) + assertEquals(sourceRead.name, sourceResponse.name) + assertEquals(DEFINITION_ID_TO_SOURCE_NAME[sourceRead.sourceDefinitionId], sourceResponse.sourceType) + assertEquals(sourceRead.workspaceId, sourceResponse.workspaceId) + assertEquals(sourceRead.connectionConfiguration, sourceResponse.configuration) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspaceResonseMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspaceResonseMapperTest.kt new file mode 100644 index 00000000000..730d7560025 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/WorkspaceResonseMapperTest.kt @@ -0,0 +1,26 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.api.model.generated.WorkspaceRead +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class WorkspaceResonseMapperTest { + @Test + fun `from should convert a WorkspaceRead object from the config api to a WorkspaceResponse`() { + // Given + val workspaceRead = + WorkspaceRead().apply { + this.workspaceId = UUID.randomUUID() + this.name = "workspaceName" + this.email = "workspaceEmail@gmail.com" + } + + // When + val workspaceResponse = WorkspaceResponseMapper.from(workspaceRead) + + // Then + assertEquals(workspaceRead.workspaceId, workspaceResponse.workspaceId) + assertEquals(workspaceRead.name, workspaceResponse.name) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt new file mode 100644 index 00000000000..667f192e553 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt @@ -0,0 +1,64 @@ +package io.airbyte.server.apis.publicapi.services + +import io.airbyte.commons.server.errors.ValueConflictKnownException +import io.airbyte.commons.server.errors.problems.ConflictProblem +import io.airbyte.commons.server.errors.problems.SyncConflictProblem +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.server.apis.publicapi.errorHandlers.JOB_NOT_RUNNING_MESSAGE +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.UUID + +@MicronautTest +class JobServiceTest { + @Inject + private lateinit var jobService: JobServiceImpl + + private val connectionId = UUID.randomUUID() + + private val schedulerHandler = mockk() + + @MockBean(SchedulerHandler::class) + fun schedulerHandler(): SchedulerHandler { + return schedulerHandler + } + + @Test + fun `test sync already running value conflict known exception`() { + val failureReason = "A sync is already running for: $connectionId" + val schedulerHandler = schedulerHandler() + every { schedulerHandler.syncConnection(any()) } throws + ValueConflictKnownException(failureReason) + + assertThrows(failureReason) { jobService.sync(connectionId) } + } + + @Test + fun `test sync already running illegal state exception`() { + val failureReason = "A sync is already running for: $connectionId" + val schedulerHandler = schedulerHandler() + every { schedulerHandler.syncConnection(any()) } throws + IllegalStateException(failureReason) + + assertThrows(failureReason) { jobService.sync(connectionId) } + } + + @Test + fun `test cancel non-running sync`() { + // This is a real error message that we can get. + // Happens because after canceling a job we go to the job persistence to fetch it but have no ID + + val couldNotFindJobMessage = "Could not find job with id: -1" + every { schedulerHandler.syncConnection(any()) } throws RuntimeException(couldNotFindJobMessage) + assertThrows(JOB_NOT_RUNNING_MESSAGE) { jobService.sync(connectionId) } + + val failureReason = "Failed to cancel job with id: -1" + every { schedulerHandler.syncConnection(any()) } throws IllegalStateException(failureReason) + assertThrows(JOB_NOT_RUNNING_MESSAGE) { jobService.sync(connectionId) } + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelperTest.kt new file mode 100644 index 00000000000..aa35bcb2fd2 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/helpers/UserInvitationAuthorizationHelperTest.kt @@ -0,0 +1,59 @@ +package io.airbyte.server.helpers + +import io.airbyte.api.model.generated.PermissionCheckRead +import io.airbyte.commons.server.errors.OperationNotAllowedException +import io.airbyte.commons.server.handlers.PermissionHandler +import io.airbyte.config.ScopeType +import io.airbyte.config.UserInvitation +import io.airbyte.data.services.UserInvitationService +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.EnumSource +import java.util.UUID + +class UserInvitationAuthorizationHelperTest { + private val userInvitationService = mockk() + private val permissionHandler = mockk() + + private lateinit var authorizationHelper: UserInvitationAuthorizationHelper + private val userId = UUID.randomUUID() + private val inviteCode = "test-invite-code" + private val invitation = UserInvitation().withInviteCode(inviteCode).withScopeId(UUID.randomUUID()) + + @BeforeEach + fun setUp() { + authorizationHelper = UserInvitationAuthorizationHelper(userInvitationService, permissionHandler) + } + + @ParameterizedTest + @EnumSource(ScopeType::class) + fun `successful permission check does not throw`(scopeType: ScopeType) { + invitation.scopeType = scopeType + every { userInvitationService.getUserInvitationByInviteCode(inviteCode) } returns invitation + every { permissionHandler.checkPermissions(any()) } returns PermissionCheckRead().status(PermissionCheckRead.StatusEnum.SUCCEEDED) + + assertDoesNotThrow { authorizationHelper.authorizeInvitationAdmin(inviteCode, userId) } + } + + @ParameterizedTest + @EnumSource(ScopeType::class) + fun `failed permission check throws`(scopeType: ScopeType) { + invitation.scopeType = scopeType + every { userInvitationService.getUserInvitationByInviteCode(inviteCode) } returns invitation + every { permissionHandler.checkPermissions(any()) } returns PermissionCheckRead().status(PermissionCheckRead.StatusEnum.FAILED) + + assertThrows { authorizationHelper.authorizeInvitationAdmin(inviteCode, userId) } + } + + @Test + fun `authorizeInvitationAdmin should handle exceptions from UserInvitationService`() { + every { userInvitationService.getUserInvitationByInviteCode(inviteCode) } throws RuntimeException("Service exception") + + assertThrows { authorizationHelper.authorizeInvitationAdmin(inviteCode, userId) } + } +} diff --git a/airbyte-server/src/test/resources/application-test.yml b/airbyte-server/src/test/resources/application-test.yml index ad1f81ab238..76e87b13c24 100644 --- a/airbyte-server/src/test/resources/application-test.yml +++ b/airbyte-server/src/test/resources/application-test.yml @@ -27,6 +27,9 @@ airbyte: log: log state: state workload-output: workload + activity-payload: payload + api: + host: https://api.airbyte.com datasources: config: diff --git a/airbyte-temporal/Dockerfile b/airbyte-temporal/Dockerfile index c04074c7d4b..9e1cf243964 100644 --- a/airbyte-temporal/Dockerfile +++ b/airbyte-temporal/Dockerfile @@ -1,5 +1,5 @@ # A test describe in the README is available to test a version update -FROM temporalio/auto-setup:1.22.3 +FROM temporalio/auto-setup:1.22.7 ENV TEMPORAL_HOME /etc/temporal diff --git a/airbyte-temporal/build.gradle.kts b/airbyte-temporal/build.gradle.kts index d699ef53873..2014497b8a4 100644 --- a/airbyte-temporal/build.gradle.kts +++ b/airbyte-temporal/build.gradle.kts @@ -1,20 +1,20 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.docker") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.docker") + id("io.airbyte.gradle.publish") } airbyte { - docker { - imageName = "temporal" - } + docker { + imageName = "temporal" + } } val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/") + from("scripts") + into("build/airbyte/docker/") } tasks.named("dockerBuildImage") { - dependsOn(copyScripts) + dependsOn(copyScripts) } diff --git a/airbyte-test-utils/README.md b/airbyte-test-utils/README.md new file mode 100644 index 00000000000..fb22cc4b538 --- /dev/null +++ b/airbyte-test-utils/README.md @@ -0,0 +1,20 @@ +# airbyte-test-utils + +Shared Java code for executing TestContainers and other helpers. + +## Stage databases setup + +When we run acceptance tests on an environment that is not `stage`, a test container will be used for each connector that requires a database. Each test container will be used for only one test case, and it will be deleted once the test case completes. + +When we run acceptance tests on stage, things are slightly more complex, but we try to have the same behavior. Instead of using a test container for each connector that requires a database, we will use a CloudSQL database for each connector. Similarly to the test containers, each CloudSQL database will be used for only one test case, and it will be deleted once the test case completes. + +It's important to understand how are the different components communicating when running on stage. + +![Stage network setup](stage_network_setup.png) + +- It is possible to communicate with the `CloudSQL Instance` from both private IP and public ip +- One same `CloudSQL Instance` is use for all the tests, but each test case will create their own databases inside this instance. +- We run the acceptance tests from a `AWS Test Runner` (EC2 instances), which are behind Tailscale, so they can communicate with the CloudSQL instance using its private IP. We need to be able to access the CloudSQL instance from these test runners since the tests will access these databases to validate their content. +- The only IPs that are allowed to connect to the CloudSQL instance via its public IP are the ones that belong to stage Dataplanes (both `GCP Dataplane` and `AWS Dataplane`). Note that this is not a workaround for the sake of our tests, this is the same setup that real users have. + + diff --git a/airbyte-test-utils/build.gradle.kts b/airbyte-test-utils/build.gradle.kts index f35125c69b1..c85d5a828f1 100644 --- a/airbyte-test-utils/build.gradle.kts +++ b/airbyte-test-utils/build.gradle.kts @@ -1,39 +1,44 @@ plugins { - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") } configurations.all { - exclude( group = "io.micronaut.jaxrs") - exclude( group = "io.micronaut.sql") + exclude(group = "io.micronaut.jaxrs") + exclude(group = "io.micronaut.sql") - resolutionStrategy { - // Force to avoid(updated version(brought in transitively from Micronaut) - force(libs.platform.testcontainers.postgresql) - } + resolutionStrategy { + // Force to avoid(updated version(brought in transitively from Micronaut) + force(libs.platform.testcontainers.postgresql) + } } dependencies { - api(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-commons-worker")) + api(project(":airbyte-db:db-lib")) + api(project(":airbyte-db:jooq")) + api(project(":airbyte-config:config-models")) + api(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-commons-worker")) - implementation(libs.bundles.kubernetes.client) - implementation(libs.bundles.flyway) - implementation(libs.temporal.sdk) + implementation(libs.bundles.kubernetes.client) + implementation(libs.bundles.flyway) + implementation(libs.temporal.sdk) + implementation(libs.google.cloud.api.client) + implementation(libs.google.cloud.sqladmin) - api(libs.junit.jupiter.api) + api(libs.junit.jupiter.api) - // Mark as compile only(to avoid leaking transitively to connectors - compileOnly(libs.platform.testcontainers.postgresql) + // Mark as compile only(to avoid leaking transitively to connectors + compileOnly(libs.platform.testcontainers.postgresql) - testImplementation(libs.platform.testcontainers.postgresql) + testImplementation(libs.platform.testcontainers.postgresql) - testRuntimeOnly(libs.junit.jupiter.engine) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) + testRuntimeOnly(libs.junit.jupiter.engine) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.junit.pioneer) } diff --git a/airbyte-test-utils/readme.md b/airbyte-test-utils/readme.md deleted file mode 100644 index f75ba4e74b0..00000000000 --- a/airbyte-test-utils/readme.md +++ /dev/null @@ -1,3 +0,0 @@ -# airbyte-test-utils - -Shared Java code for executing TestContainers and other helpers. diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AbstractDatabaseTest.java similarity index 96% rename from airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java rename to airbyte-test-utils/src/main/java/io/airbyte/test/utils/AbstractDatabaseTest.java index a32031f52de..c8b06b9f8d7 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/AbstractDatabaseTest.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AbstractDatabaseTest.java @@ -2,13 +2,13 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.db.instance; +package io.airbyte.test.utils; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.init.DatabaseInitializationException; -import io.airbyte.test.utils.Databases; +import io.airbyte.db.instance.DatabaseMigrator; import java.io.IOException; import javax.sql.DataSource; import org.jooq.DSLContext; diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java index 4065a2ba6f3..05179fc9830 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java @@ -10,8 +10,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; -import com.github.dockerjava.api.DockerClient; -import com.github.dockerjava.api.model.Network; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.Resources; @@ -105,8 +103,6 @@ import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.test.container.AirbyteTestContainer; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; import io.temporal.client.WorkflowClient; import io.temporal.serviceclient.WorkflowServiceStubs; import java.io.File; @@ -116,6 +112,7 @@ import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.file.Path; +import java.security.GeneralSecurityException; import java.sql.SQLException; import java.time.Duration; import java.time.Instant; @@ -133,7 +130,6 @@ import org.junit.jupiter.api.Assertions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.testcontainers.DockerClientFactory; import org.testcontainers.containers.PostgreSQLContainer; import org.testcontainers.shaded.org.apache.commons.lang3.RandomStringUtils; import org.testcontainers.utility.DockerImageName; @@ -159,6 +155,7 @@ public class AcceptanceTestHarness { private static final Logger LOGGER = LoggerFactory.getLogger(AcceptanceTestHarness.class); + private static final UUID DEFAULT_ORGANIZATION_ID = UUID.fromString("00000000-0000-0000-0000-000000000000"); private static final String DOCKER_COMPOSE_FILE_NAME = "docker-compose.yaml"; // assume env file is one directory level up from airbyte-tests. private static final File ENV_FILE = Path.of(System.getProperty("user.dir")).getParent().resolve(".env").toFile(); @@ -182,9 +179,6 @@ public class AcceptanceTestHarness { private static final String SOURCE_USERNAME = "sourceusername"; public static final String SOURCE_PASSWORD = "hunter2"; public static final String PUBLIC_SCHEMA_NAME = "public"; - public static final String STAGING_SCHEMA_NAME = "staging"; - public static final String COOL_EMPLOYEES_TABLE_NAME = "cool_employees"; - public static final String AWESOME_PEOPLE_TABLE_NAME = "awesome_people"; public static final String PUBLIC = "public"; private static final String DEFAULT_POSTGRES_INIT_SQL_FILE = "postgres_init.sql"; @@ -194,20 +188,21 @@ public class AcceptanceTestHarness { public static final int MAX_TRIES = 5; public static final int MAX_ALLOWED_SECOND_PER_RUN = 120; + private static final String CLOUD_SQL_DATABASE_PREFIX = "acceptance_test_"; + // NOTE: we include `INCOMPLETE` here because the job may still retry; see // https://docs.airbyte.com/understanding-airbyte/jobs/. public static final Set IN_PROGRESS_JOB_STATUSES = Set.of(JobStatus.PENDING, JobStatus.INCOMPLETE, JobStatus.RUNNING); private static final String KUBE_PROCESS_RUNNER_HOST = java.util.Optional.ofNullable(System.getenv("KUBE_PROCESS_RUNNER_HOST")).orElse(""); - private static final String DOCKER_NETWORK = java.util.Optional.ofNullable(System.getenv("DOCKER_NETWORK")).orElse("bridge"); - private static boolean isKube; private static boolean isMinikube; private static boolean isGke; private static boolean isMac; private static boolean useExternalDeployment; private static boolean ensureCleanSlate; + private CloudSqlDatabaseProvisioner cloudSqlDatabaseProvisioner; /** * When the acceptance tests are run against a local instance of docker-compose or KUBE then these @@ -216,6 +211,9 @@ public class AcceptanceTestHarness { */ private PostgreSQLContainer sourcePsql; private PostgreSQLContainer destinationPsql; + private String sourceDatabaseName; + private String destinationDatabaseName; + private AirbyteTestContainer airbyteTestContainer; private AirbyteApiClient apiClient; @@ -223,8 +221,6 @@ public class AcceptanceTestHarness { private final UUID defaultWorkspaceId; private final String postgresSqlInitFile; - private KubernetesClient kubernetesClient; - private final List sourceIds = Lists.newArrayList(); private final List connectionIds = Lists.newArrayList(); private final List destinationIds = Lists.newArrayList(); @@ -232,11 +228,13 @@ public class AcceptanceTestHarness { private final List sourceDefinitionIds = Lists.newArrayList(); private DataSource sourceDataSource; private DataSource destinationDataSource; - private String postgresPassword; - public KubernetesClient getKubernetesClient() { - return kubernetesClient; - } + private String gcpProjectId; + private String cloudSqlInstanceId; + private String cloudSqlInstanceUsername; + private String cloudSqlInstancePassword; + private String cloudSqlInstancePrivateIp; + private String cloudSqlInstancePublicIp; public void removeConnection(final UUID connection) { connectionIds.remove(connection); @@ -246,7 +244,7 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, final WebBackendApi webBackendApi, final UUID defaultWorkspaceId, final String postgresSqlInitFile) - throws URISyntaxException, IOException, InterruptedException { + throws URISyntaxException, IOException, InterruptedException, GeneralSecurityException { // reads env vars to assign static variables assignEnvVars(); this.apiClient = apiClient; @@ -258,28 +256,23 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, throw new RuntimeException("KUBE Flag should also be enabled if GKE flag is enabled"); } if (!isGke) { - // we attach the container to the appropriate network since there are environments where we use one - // other than the default - final DockerClient dockerClient = DockerClientFactory.lazyClient(); - final List dockerNetworks = dockerClient.listNetworksCmd().withNameFilter(DOCKER_NETWORK).exec(); - final Network dockerNetwork = dockerNetworks.get(0); - final org.testcontainers.containers.Network containerNetwork = - org.testcontainers.containers.Network.builder().id(dockerNetwork.getId()).build(); - sourcePsql = (PostgreSQLContainer) new PostgreSQLContainer(SOURCE_POSTGRES_IMAGE_NAME) - .withNetwork(containerNetwork); + sourcePsql = new PostgreSQLContainer(SOURCE_POSTGRES_IMAGE_NAME); sourcePsql.withUsername(SOURCE_USERNAME) .withPassword(SOURCE_PASSWORD); sourcePsql.start(); - destinationPsql = (PostgreSQLContainer) new PostgreSQLContainer(DESTINATION_POSTGRES_IMAGE_NAME) - .withNetwork(containerNetwork); + destinationPsql = new PostgreSQLContainer(DESTINATION_POSTGRES_IMAGE_NAME); destinationPsql.start(); - } - - if (isKube && !isGke) { - // TODO(mfsiega-airbyte): get the Kube client to work with GKE tests. We don't use it yet but we - // will want to someday. - kubernetesClient = new DefaultKubernetesClient(); + } else { + this.cloudSqlDatabaseProvisioner = new CloudSqlDatabaseProvisioner(); + sourceDatabaseName = cloudSqlDatabaseProvisioner.createDatabase( + gcpProjectId, + cloudSqlInstanceId, + generateRandomCloudSqlDatabaseName()); + destinationDatabaseName = cloudSqlDatabaseProvisioner.createDatabase( + gcpProjectId, + cloudSqlInstanceId, + generateRandomCloudSqlDatabaseName()); } // by default use airbyte deployment governed by a test container. @@ -303,7 +296,7 @@ public AcceptanceTestHarness(final AirbyteApiClient apiClient, } public AcceptanceTestHarness(final AirbyteApiClient apiClient, final WebBackendApi webBackendApi, final UUID defaultWorkspaceId) - throws URISyntaxException, IOException, InterruptedException { + throws URISyntaxException, IOException, InterruptedException, GeneralSecurityException { this(apiClient, webBackendApi, defaultWorkspaceId, DEFAULT_POSTGRES_INIT_SQL_FILE); } @@ -328,29 +321,36 @@ public void stopDbAndContainers() { public void setup() throws SQLException, URISyntaxException, IOException, ApiException { if (isGke) { // Prepare the database data sources. - LOGGER.info("postgresPassword: {}", postgresPassword); - sourceDataSource = GKEPostgresConfig.getSourceDataSource(postgresPassword); - destinationDataSource = GKEPostgresConfig.getDestinationDataSource(postgresPassword); + LOGGER.info("postgresPassword: {}", cloudSqlInstancePassword); + sourceDataSource = GKEPostgresConfig.getDataSource( + cloudSqlInstanceUsername, + cloudSqlInstancePassword, + cloudSqlInstancePrivateIp, + sourceDatabaseName); + destinationDataSource = GKEPostgresConfig.getDataSource( + cloudSqlInstanceUsername, + cloudSqlInstancePassword, + cloudSqlInstancePrivateIp, + destinationDatabaseName); // seed database. GKEPostgresConfig.runSqlScript(Path.of(MoreResources.readResourceAsFile(postgresSqlInitFile).toURI()), getSourceDatabase()); } else { PostgreSQLContainerHelper.runSqlScript(MountableFile.forClasspathResource(postgresSqlInitFile), sourcePsql); - destinationPsql = new PostgreSQLContainer("postgres:13-alpine"); - destinationPsql.start(); - sourceDataSource = Databases.createDataSource(sourcePsql); destinationDataSource = Databases.createDataSource(destinationPsql); - } - // Pinning Postgres destination version - final DestinationDefinitionRead postgresDestDef = getPostgresDestinationDefinition(); - if (!postgresDestDef.getDockerImageTag().equals(POSTGRES_DESTINATION_CONNECTOR_VERSION)) { - LOGGER.info("Setting postgres destination connector to version {}...", POSTGRES_DESTINATION_CONNECTOR_VERSION); - try { - updateDestinationDefinitionVersion(postgresDestDef.getDestinationDefinitionId(), POSTGRES_DESTINATION_CONNECTOR_VERSION); - } catch (final ApiException e) { - LOGGER.error("Error while updating destination definition version", e); + // Pinning Postgres destination version. This doesn't work on GKE since the + // airbyte-cron will revert this change. On GKE we are pinning the version by + // adding an entry to the scoped_configuration table. + final DestinationDefinitionRead postgresDestDef = getPostgresDestinationDefinition(); + if (!postgresDestDef.getDockerImageTag().equals(POSTGRES_DESTINATION_CONNECTOR_VERSION)) { + LOGGER.info("Setting postgres destination connector to version {}...", POSTGRES_DESTINATION_CONNECTOR_VERSION); + try { + updateDestinationDefinitionVersion(postgresDestDef.getDestinationDefinitionId(), POSTGRES_DESTINATION_CONNECTOR_VERSION); + } catch (final ApiException e) { + LOGGER.error("Error while updating destination definition version", e); + } } } } @@ -379,8 +379,18 @@ public void cleanup() { if (isGke) { DataSourceFactory.close(sourceDataSource); DataSourceFactory.close(destinationDataSource); + + cloudSqlDatabaseProvisioner.deleteDatabase( + gcpProjectId, + cloudSqlInstanceId, + sourceDatabaseName); + cloudSqlDatabaseProvisioner.deleteDatabase( + gcpProjectId, + cloudSqlInstanceId, + destinationDatabaseName); } else { destinationPsql.stop(); + sourcePsql.stop(); } // TODO(mfsiega-airbyte): clean up created source definitions. } catch (final Exception e) { @@ -448,9 +458,12 @@ private void assignEnvVars() { && System.getenv("USE_EXTERNAL_DEPLOYMENT").equalsIgnoreCase("true"); ensureCleanSlate = System.getenv("ENSURE_CLEAN_SLATE") != null && System.getenv("ENSURE_CLEAN_SLATE").equalsIgnoreCase("true"); - postgresPassword = System.getenv("POSTGRES_PASSWORD") != null - ? System.getenv("POSTGRES_PASSWORD") - : "admin123"; + gcpProjectId = System.getenv("GCP_PROJECT_ID"); + cloudSqlInstanceId = System.getenv("CLOUD_SQL_INSTANCE_ID"); + cloudSqlInstanceUsername = System.getenv("CLOUD_SQL_INSTANCE_USERNAME"); + cloudSqlInstancePassword = System.getenv("CLOUD_SQL_INSTANCE_PASSWORD"); + cloudSqlInstancePrivateIp = System.getenv("CLOUD_SQL_INSTANCE_PRIVATE_IP"); + cloudSqlInstancePublicIp = System.getenv("CLOUD_SQL_INSTANCE_PUBLIC_IP"); } private WorkflowClient getWorkflowClient() { @@ -745,7 +758,7 @@ public OperationRead createNormalizationOperation() { return createNormalizationOperation(defaultWorkspaceId); } - private OperationRead createNormalizationOperation(final UUID workspaceId) { + public OperationRead createNormalizationOperation(final UUID workspaceId) { final OperatorConfiguration normalizationConfig = new OperatorConfiguration() .operatorType(OperatorType.NORMALIZATION).normalization(new OperatorNormalization().option( OperatorNormalization.OptionEnum.BASIC)); @@ -783,25 +796,29 @@ public List retrieveRecordsFromDatabase(final Database database, final } public JsonNode getSourceDbConfig() { - return getDbConfig(sourcePsql, false, false, Type.SOURCE); + return getDbConfig(sourcePsql, false, false, sourceDatabaseName); } public JsonNode getDestinationDbConfig() { - return getDbConfig(destinationPsql, false, true, Type.DESTINATION); + return getDbConfig(destinationPsql, false, true, destinationDatabaseName); } public JsonNode getDestinationDbConfigWithHiddenPassword() { - return getDbConfig(destinationPsql, true, true, Type.DESTINATION); + return getDbConfig(destinationPsql, true, true, destinationDatabaseName); } public JsonNode getDbConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema, - final Type connectorType) { + final String databaseName) { try { final Map dbConfig = - (isKube && isGke) ? GKEPostgresConfig.dbConfig(connectorType, hiddenPassword ? null : postgresPassword, withSchema) - : localConfig(psql, hiddenPassword, withSchema); + (isKube && isGke) ? GKEPostgresConfig.dbConfig( + hiddenPassword ? null : cloudSqlInstancePassword, + withSchema, + cloudSqlInstanceUsername, + cloudSqlInstancePublicIp, + databaseName) : localConfig(psql, hiddenPassword, withSchema); final var config = Jsons.jsonNode(dbConfig); LOGGER.info("Using db config: {}", Jsons.toPrettyString(config)); return config; @@ -814,9 +831,7 @@ private Map localConfig(final PostgreSQLContainer psql, final boolean hiddenPassword, final boolean withSchema) { final Map dbConfig = new HashMap<>(); - // don't use psql.getHost() directly since the ip we need differs depending on environment - // NOTE: Use the container ip IFF we aren't on the "bridge" network - dbConfig.put(JdbcUtils.HOST_KEY, DOCKER_NETWORK.equals("bridge") ? getHostname() : psql.getHost()); + dbConfig.put(JdbcUtils.HOST_KEY, getHostname()); if (hiddenPassword) { dbConfig.put(JdbcUtils.PASSWORD_KEY, "**********"); @@ -878,6 +893,20 @@ public SourceDefinitionRead createE2eSourceDefinition(final UUID workspaceId) { return sourceDefinitionRead; } + public SourceDefinitionRead createPostgresSourceDefinition(final UUID workspaceId, final String dockerImageTag) throws Exception { + final var sourceDefinitionRead = AirbyteApiClient.retryWithJitterThrows( + () -> apiClient.getSourceDefinitionApi().createCustomSourceDefinition(new CustomSourceDefinitionCreate() + .workspaceId(workspaceId) + .sourceDefinition(new SourceDefinitionCreate() + .name("Custom Postgres Source") + .dockerRepository("airbyte/source-postgres") + .dockerImageTag(dockerImageTag) + .documentationUrl(URI.create("https://example.com")))), + "create customer source definition", JITTER_MAX_INTERVAL_SECS, FINAL_INTERVAL_SECS, MAX_TRIES); + sourceDefinitionIds.add(sourceDefinitionRead.getSourceDefinitionId()); + return sourceDefinitionRead; + } + public DestinationDefinitionRead createE2eDestinationDefinition(final UUID workspaceId) throws Exception { return AirbyteApiClient.retryWithJitterThrows(() -> apiClient.getDestinationDefinitionApi() .createCustomDestinationDefinition(new CustomDestinationDefinitionCreate() @@ -1217,7 +1246,7 @@ public void createWorkspaceWithId(UUID workspaceId) throws Exception { .createWorkspaceIfNotExist(new WorkspaceCreateWithId() .id(workspaceId) .email("acceptance-tests@airbyte.io") - .name("Airbyte Acceptance Tests" + UUID.randomUUID())), + .name("Airbyte Acceptance Tests" + UUID.randomUUID()).organizationId(DEFAULT_ORGANIZATION_ID)), "create workspace", 10, FINAL_INTERVAL_SECS, MAX_TRIES); } @@ -1231,14 +1260,6 @@ public StreamStatusReadList getStreamStatuses(UUID connectionId, Long jobId, Int "get stream statuses", JITTER_MAX_INTERVAL_SECS, FINAL_INTERVAL_SECS, MAX_TRIES); } - /** - * Connector type. - */ - public enum Type { - SOURCE, - DESTINATION - } - public void setIncrementalAppendSyncMode(final AirbyteCatalog airbyteCatalog, final List cursorField) { airbyteCatalog.getStreams().forEach(stream -> { stream.getConfig().syncMode(SyncMode.INCREMENTAL) @@ -1307,4 +1328,8 @@ public void compareCatalog(AirbyteCatalog actual) { assertEquals(expected, actual); } + private static String generateRandomCloudSqlDatabaseName() { + return CLOUD_SQL_DATABASE_PREFIX + UUID.randomUUID(); + } + } diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java index 15fc475086e..b915348d49e 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/BaseConfigDatabaseTest.java @@ -4,6 +4,10 @@ package io.airbyte.test.utils; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.PERMISSION; + +import io.airbyte.config.Permission; +import io.airbyte.config.persistence.PermissionPersistenceHelper; import io.airbyte.db.Database; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.factory.DataSourceFactory; @@ -11,9 +15,11 @@ import io.airbyte.db.init.DatabaseInitializationException; import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; +import io.airbyte.db.instance.configs.jooq.generated.Tables; import io.airbyte.db.instance.test.TestDatabaseProviders; import java.io.IOException; import java.sql.SQLException; +import java.time.OffsetDateTime; import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.jooq.DSLContext; @@ -123,6 +129,7 @@ protected static void truncateAllTables() throws SQLException { actor_definition_config_injection, actor_oauth_parameter, auth_user, + connection_timeline_event, connection, connection_operation, connector_builder_project, @@ -134,6 +141,8 @@ protected static void truncateAllTables() throws SQLException { schema_management, state, stream_reset, + stream_refreshes, + stream_generation, \"user\", user_invitation, sso_config, @@ -143,6 +152,30 @@ protected static void truncateAllTables() throws SQLException { """)); } + /** + * This method used to live on PermissionPersistence, but it was deprecated in favor of the new + * PermissionService backed by a Micronaut Data repository. Many tests depended on this method, so + * rather than keep it in the deprecated PermissionPersistence, a simplified version is implemented + * here for tests only. + */ + protected static void writePermission(final Permission permission) throws SQLException { + final io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType permissionType = + PermissionPersistenceHelper.convertConfigPermissionTypeToJooqPermissionType(permission.getPermissionType()); + + final OffsetDateTime timestamp = OffsetDateTime.now(); + + database.query(ctx -> ctx + .insertInto(Tables.PERMISSION) + .set(PERMISSION.ID, permission.getPermissionId()) + .set(PERMISSION.PERMISSION_TYPE, permissionType) + .set(PERMISSION.USER_ID, permission.getUserId()) + .set(PERMISSION.WORKSPACE_ID, permission.getWorkspaceId()) + .set(PERMISSION.ORGANIZATION_ID, permission.getOrganizationId()) + .set(PERMISSION.CREATED_AT, timestamp) + .set(PERMISSION.UPDATED_AT, timestamp) + .execute()); + } + private static void createDbContainer() { container = new PostgreSQLContainer<>("postgres:13-alpine") .withDatabaseName("airbyte") diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java new file mode 100644 index 00000000000..3057ec467fc --- /dev/null +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/CloudSqlDatabaseProvisioner.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.json.gson.GsonFactory; +import com.google.api.services.sqladmin.SQLAdmin; +import com.google.api.services.sqladmin.model.Database; +import com.google.api.services.sqladmin.model.Operation; +import com.google.auth.http.HttpCredentialsAdapter; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.util.concurrent.Callable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Creates and deletes GCP CloudSQL databases. + */ +public class CloudSqlDatabaseProvisioner { + + private static final Logger LOGGER = LoggerFactory.getLogger(CloudSqlDatabaseProvisioner.class); + + private static final String SQL_OPERATION_DONE_STATUS = "DONE"; + private static final int DEFAULT_MAX_POLL_ATTEMPTS = 10; + private static final int DEFAULT_MAX_API_CALL_ATTEMPTS = 10; + private static final String APPLICATION_NAME = "cloud-sql-database-provisioner"; + + private final SQLAdmin sqlAdmin; + private final int maxPollAttempts; + private final int maxApiCallAttempts; + + @VisibleForTesting + CloudSqlDatabaseProvisioner(SQLAdmin sqlAdmin, int maxPollAttempts, int maxApiCallAttempts) { + this.sqlAdmin = sqlAdmin; + this.maxPollAttempts = maxPollAttempts; + this.maxApiCallAttempts = maxApiCallAttempts; + } + + public CloudSqlDatabaseProvisioner() throws GeneralSecurityException, IOException { + this.sqlAdmin = new SQLAdmin.Builder( + GoogleNetHttpTransport.newTrustedTransport(), + GsonFactory.getDefaultInstance(), + new HttpCredentialsAdapter(GoogleCredentials.getApplicationDefault())).setApplicationName(APPLICATION_NAME).build(); + this.maxPollAttempts = DEFAULT_MAX_POLL_ATTEMPTS; + this.maxApiCallAttempts = DEFAULT_MAX_API_CALL_ATTEMPTS; + } + + public synchronized String createDatabase(String projectId, String instanceId, String databaseName) throws IOException, InterruptedException { + Database database = new Database().setName(databaseName); + Operation operation = runWithRetry(() -> sqlAdmin.databases().insert(projectId, instanceId, database).execute()); + pollOperation(projectId, operation.getName()); + + return databaseName; + } + + public synchronized void deleteDatabase(String projectId, String instanceId, String databaseName) throws IOException, InterruptedException { + Operation operation = runWithRetry(() -> sqlAdmin.databases().delete(projectId, instanceId, databaseName).execute()); + pollOperation(projectId, operation.getName()); + } + + /** + * Database operations are asynchronous. This method polls the operation until it is done. + */ + @VisibleForTesting + void pollOperation(String projectId, String operationName) throws IOException, InterruptedException { + int pollAttempts = 0; + while (pollAttempts < maxPollAttempts) { + Operation operation = sqlAdmin.operations().get(projectId, operationName).execute(); + if (operation.getStatus().equals(SQL_OPERATION_DONE_STATUS)) { + return; + } + Thread.sleep(1000); + pollAttempts += 1; + } + + throw new RuntimeException("Operation " + operationName + " did not complete successfully"); + } + + /** + * If there's another operation already in progress in one same cloudsql instance then the api will + * return a 409 error. This method will retry api calls that return a 409 error. + */ + @VisibleForTesting + Operation runWithRetry(Callable callable) throws InterruptedException { + int attempts = 0; + while (attempts < maxApiCallAttempts) { + try { + return callable.call(); + } catch (Exception e) { + if (e instanceof GoogleJsonResponseException && ((GoogleJsonResponseException) e).getStatusCode() == 409) { + attempts++; + LOGGER.info("Attempt " + attempts + " failed with 409 error"); + LOGGER.info("Exception thrown by API: " + e.getMessage()); + Thread.sleep(1000); + } else { + throw new RuntimeException(e); + } + } + } + throw new RuntimeException("Max retries exceeded. Could not complete operation."); + } + +} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java index 11d789d4787..9f18f42ba71 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/GKEPostgresConfig.java @@ -8,7 +8,6 @@ import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.test.utils.AcceptanceTestHarness.Type; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; @@ -20,33 +19,24 @@ /** * This class is used to provide information related to the test databases for running the - * {@link AcceptanceTestHarness} on GKE. We launch 2 postgres databases in GKE as pods which act as - * source and destination and the tests run against them. In order to allow the test instance to - * connect to these databases we use port forwarding Refer - * tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh for more info + * {@link AcceptanceTestHarness} on GKE. */ class GKEPostgresConfig { - // NOTE: these two hosts refer to services named `acceptance-test-postgres-[source|destination]-svc` - // in the `acceptance-tests` namespace, running in the same cluster as the check/discover/sync - // workers. - // - // The namespace here needs to be in sync with the namespaces created in - // tools/bin/gke-kube-acceptance-test/acceptance_test_kube_gke.sh. - private static final String SOURCE_HOST = "acceptance-test-postgres-source-svc.acceptance-tests.svc.cluster.local"; - private static final String DESTINATION_HOST = "acceptance-test-postgres-destination-svc.acceptance-tests.svc.cluster.local"; private static final Integer PORT = 5432; - private static final String USERNAME = "postgresadmin"; - private static final String DB = "postgresdb"; - static Map dbConfig(final Type connectorType, final String password, final boolean withSchema) { + static Map dbConfig(final String password, + final boolean withSchema, + String username, + String cloudSqlInstanceIp, + String databaseName) { final Map dbConfig = new HashMap<>(); - dbConfig.put(JdbcUtils.HOST_KEY, connectorType == Type.SOURCE ? SOURCE_HOST : DESTINATION_HOST); + dbConfig.put(JdbcUtils.HOST_KEY, cloudSqlInstanceIp); dbConfig.put(JdbcUtils.PASSWORD_KEY, password == null ? "**********" : password); dbConfig.put(JdbcUtils.PORT_KEY, PORT); - dbConfig.put(JdbcUtils.DATABASE_KEY, DB); - dbConfig.put(JdbcUtils.USERNAME_KEY, USERNAME); + dbConfig.put(JdbcUtils.DATABASE_KEY, databaseName); + dbConfig.put(JdbcUtils.USERNAME_KEY, username); dbConfig.put(JdbcUtils.JDBC_URL_PARAMS, "connectTimeout=60"); if (withSchema) { @@ -56,20 +46,9 @@ static Map dbConfig(final Type connectorType, final String passw return dbConfig; } - static DataSource getDestinationDataSource(final String password) { - // Note: we set the connection timeout to 30s. The underlying Hikari default is also 30s -- - // https://github.com/brettwooldridge/HikariCP#frequently-used -- but our DataSourceFactory - // overrides that to MAX_INTEGER unless we explicitly specify it. - return DataSourceFactory.create(USERNAME, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:4000/postgresdb", Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); - } - - static DataSource getSourceDataSource(final String password) { - // Note: we set the connection timeout to 30s. The underlying Hikari default is also 30s -- - // https://github.com/brettwooldridge/HikariCP#frequently-used -- but our DataSourceFactory - // overrides that to MAX_INTEGER unless we explicitly specify it. - return DataSourceFactory.create(USERNAME, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), - "jdbc:postgresql://localhost:2000/postgresdb", Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); + static DataSource getDataSource(final String username, final String password, String cloudSqlInstanceIp, String databaseName) { + return DataSourceFactory.create(username, password, DatabaseDriver.POSTGRESQL.getDriverClassName(), + "jdbc:postgresql://" + cloudSqlInstanceIp + ":5432/" + databaseName, Map.of(PGProperty.CONNECT_TIMEOUT.getName(), "60")); } static void runSqlScript(final Path scriptFilePath, final Database db) throws SQLException, IOException { diff --git a/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java new file mode 100644 index 00000000000..8bf975eb163 --- /dev/null +++ b/airbyte-test-utils/src/test/java/io/airbyte/test/utils/CloudSqlDatabaseProvisionerTest.java @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.test.utils; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.services.sqladmin.SQLAdmin; +import com.google.api.services.sqladmin.SQLAdmin.Operations; +import com.google.api.services.sqladmin.model.Database; +import com.google.api.services.sqladmin.model.Operation; +import java.io.IOException; +import java.util.concurrent.Callable; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class CloudSqlDatabaseProvisionerTest { + + private static final String PROJECT_ID = "project-id"; + private static final String INSTANCE_ID = "instance-id"; + private static final String DATABASE_NAME = "database-name"; + private static final int POLL_ATTEMPTS = 2; + private static final int API_CALL_ATTEMPTS = 2; + + @Mock + private SQLAdmin sqlAdmin; + @Mock + private SQLAdmin.Databases databases; + @Mock + private Operations operations; + @Mock + private Operations.Get getOperation; + @Mock + private SQLAdmin.Databases.Insert insertDatabase; + @Mock + private SQLAdmin.Databases.Delete deleteDatabase; + @Mock + private Operation operation; + @Mock + private GoogleJsonResponseException googleJsonResponseException; + @Mock + private Callable callable; + + private CloudSqlDatabaseProvisioner provisioner; + + @BeforeEach + void setUp() { + provisioner = new CloudSqlDatabaseProvisioner(sqlAdmin, POLL_ATTEMPTS, API_CALL_ATTEMPTS); + } + + @Test + void testCreateDatabase() throws IOException, InterruptedException { + mockOperation(); + when(operation.getStatus()).thenReturn("DONE"); + when(sqlAdmin.databases()).thenReturn(databases); + when(databases.insert(anyString(), anyString(), any(Database.class))).thenReturn(insertDatabase); + when(insertDatabase.execute()).thenReturn(operation); + when(operation.getName()).thenReturn("operation-name"); + + provisioner.createDatabase(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + + verify(databases).insert(PROJECT_ID, INSTANCE_ID, new Database().setName(DATABASE_NAME)); + verify(insertDatabase).execute(); + } + + @Test + void testDeleteDatabase() throws IOException, InterruptedException { + mockOperation(); + when(operation.getStatus()).thenReturn("DONE"); + when(sqlAdmin.databases()).thenReturn(databases); + when(databases.delete(anyString(), anyString(), anyString())).thenReturn(deleteDatabase); + when(deleteDatabase.execute()).thenReturn(operation); + when(operation.getName()).thenReturn("operation-name"); + + provisioner.deleteDatabase(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + + verify(databases).delete(PROJECT_ID, INSTANCE_ID, DATABASE_NAME); + verify(deleteDatabase).execute(); + } + + @Test + void testPollOperationNotDoneAfterMaxStatusChecks() throws IOException { + mockOperation(); + when(operation.getStatus()) + .thenReturn("PENDING") + .thenReturn("RUNNING") + .thenReturn("DONE"); + assertThrows(RuntimeException.class, () -> provisioner.pollOperation(PROJECT_ID, "operation-name")); + } + + @Test + void testPollOperationDoneBeforeMaxStatusChecks() throws IOException { + mockOperation(); + when(operation.getStatus()) + .thenReturn("PENDING") + .thenReturn("DONE"); + assertDoesNotThrow(() -> provisioner.pollOperation(PROJECT_ID, "operation-name")); + } + + private void mockOperation() throws IOException { + when(sqlAdmin.operations()).thenReturn(operations); + when(operations.get(eq(PROJECT_ID), anyString())).thenReturn(getOperation); + when(getOperation.execute()).thenReturn(operation); + } + + @Test + void testMoreThanMaxAttempts() throws Exception { + when(callable.call()).thenThrow(googleJsonResponseException); + when(googleJsonResponseException.getStatusCode()).thenReturn(409); + assertThrows(RuntimeException.class, () -> provisioner.runWithRetry(callable)); + } + + @Test + void testNoRetry() throws Exception { + when(callable.call()).thenThrow(new RuntimeException()); + assertThrows(RuntimeException.class, () -> provisioner.runWithRetry(callable)); + } + + @Test + void testOneRetry() throws Exception { + when(googleJsonResponseException.getStatusCode()).thenReturn(409); + when(callable.call()) + .thenThrow(googleJsonResponseException) + .thenReturn(null); + + assertDoesNotThrow(() -> provisioner.runWithRetry(callable)); + } + +} diff --git a/airbyte-test-utils/stage_network_setup.png b/airbyte-test-utils/stage_network_setup.png new file mode 100644 index 00000000000..4ac000d9b58 Binary files /dev/null and b/airbyte-test-utils/stage_network_setup.png differ diff --git a/airbyte-tests/build.gradle.kts b/airbyte-tests/build.gradle.kts index f000df1eb71..89804a67fab 100644 --- a/airbyte-tests/build.gradle.kts +++ b/airbyte-tests/build.gradle.kts @@ -1,38 +1,38 @@ import org.gradle.api.tasks.testing.logging.TestLogEvent plugins { - id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.jvm.lib") } @Suppress("UnstableApiUsage") testing { - registerTestSuite(name="acceptanceTest", type="acceptance-test", dirName="test-acceptance") { - implementation.add(project()) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-tests")) - implementation(project(":airbyte-test-utils")) - implementation(project(":airbyte-commons-worker")) - - - - implementation(libs.failsafe) - implementation(libs.jackson.databind) - implementation(libs.okhttp) - implementation(libs.temporal.sdk) - implementation(libs.platform.testcontainers.postgresql) - implementation(libs.postgresql) - - // needed for fabric to connect to k8s. - runtimeOnly(libs.bouncycastle.bcpkix) - runtimeOnly(libs.bouncycastle.bcprov) - } + registerTestSuite(name = "acceptanceTest", type = "acceptance-test", dirName = "test-acceptance") { + implementation.add(project()) + + implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-tests")) + implementation(project(":airbyte-test-utils")) + implementation(project(":airbyte-commons-worker")) + + + + implementation(libs.failsafe) + implementation(libs.jackson.databind) + implementation(libs.okhttp) + implementation(libs.temporal.sdk) + implementation(libs.platform.testcontainers.postgresql) + implementation(libs.postgresql) + + // needed for fabric to connect to k8s. + runtimeOnly(libs.bouncycastle.bcpkix) + runtimeOnly(libs.bouncycastle.bcprov) + } } /** @@ -44,58 +44,62 @@ testing { */ @Suppress("UnstableApiUsage") fun registerTestSuite(name: String, type: String, dirName: String, deps: JvmComponentDependencies.() -> Unit) { - testing { - suites.register(name) { - testType.set(type) - - deps(dependencies) - - sources { - java { - setSrcDirs(listOf("src/$dirName/java")) - } - resources { - setSrcDirs(listOf("src/$dirName/resources")) - } - } - - targets.all { - testTask.configure { - testLogging { - events = setOf(TestLogEvent.PASSED, TestLogEvent.FAILED) - } - shouldRunAfter(suites.named("test")) - // Ensure they re-run since these are integration tests. - outputs.upToDateWhen { false } - } - } + testing { + suites.register(name) { + testType.set(type) + + deps(dependencies) + + sources { + java { + setSrcDirs(listOf("src/$dirName/java")) + } + resources { + setSrcDirs(listOf("src/$dirName/resources")) } + } - configurations.named("${name}Implementation") { - extendsFrom(configurations.getByName("testImplementation")) + targets.all { + testTask.configure { + + val parallelExecutionEnabled = System.getenv()["TESTS_PARALLEL_EXECUTION_ENABLED"] ?: "true" + systemProperties = mapOf("junit.jupiter.execution.parallel.enabled" to parallelExecutionEnabled) + + testLogging { + events = setOf(TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.STARTED, TestLogEvent.SKIPPED) + } + shouldRunAfter(suites.named("test")) + // Ensure they re-run since these are integration tests. + outputs.upToDateWhen { false } } + } } + + configurations.named("${name}Implementation") { + extendsFrom(configurations.getByName("testImplementation")) + } + } } configurations.configureEach { - // Temporary hack to avoid dependency conflicts - exclude(group="io.micronaut.email") + // Temporary hack to avoid dependency conflicts + exclude(group = "io.micronaut.email") } dependencies { - implementation(project(":airbyte-api")) - implementation(project(":airbyte-container-orchestrator")) + implementation(project(":airbyte-api")) + implementation(project(":airbyte-container-orchestrator")) - testImplementation("com.airbyte:api:0.39.2") + testImplementation("com.airbyte:api:0.39.2") - implementation(libs.bundles.kubernetes.client) - implementation(libs.platform.testcontainers) + implementation(libs.bundles.kubernetes.client) + implementation(libs.platform.testcontainers) - testImplementation(libs.bundles.junit) - testImplementation(libs.assertj.core) - testImplementation(libs.junit.pioneer) + testImplementation(libs.bundles.junit) + testImplementation(libs.assertj.core) + testImplementation(libs.junit.pioneer) } tasks.withType().configureEach { - duplicatesStrategy = DuplicatesStrategy.INCLUDE + duplicatesStrategy = DuplicatesStrategy.INCLUDE } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java index 0cbb62fa5aa..72d7dcc2e67 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java @@ -5,6 +5,7 @@ package io.airbyte.test.acceptance; import static io.airbyte.commons.auth.AirbyteAuthConstants.X_AIRBYTE_AUTH_HEADER; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static io.airbyte.test.acceptance.AcceptanceTestConstants.IS_ENTERPRISE_TRUE; import static io.airbyte.test.acceptance.AcceptanceTestConstants.X_AIRBYTE_AUTH_HEADER_TEST_CLIENT_VALUE; @@ -42,6 +43,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.security.GeneralSecurityException; import java.sql.SQLException; import java.time.Duration; import java.util.Collections; @@ -276,7 +278,7 @@ void runSmallSyncForAWorkspaceId(final UUID workspaceId) throws Exception { StreamStatusJobType.SYNC); } - void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + void init() throws URISyntaxException, IOException, InterruptedException, ApiException, GeneralSecurityException { // TODO(mfsiega-airbyte): clean up and centralize the way we do config. final boolean isGke = System.getenv().containsKey(IS_GKE); // Set up the API client. @@ -318,7 +320,8 @@ void init() throws URISyntaxException, IOException, InterruptedException, ApiExc // NOTE: the API client can't create workspaces in GKE deployments, so we need to provide a // workspace ID in that environment. workspaceId = System.getenv(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID) == null ? apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId() : UUID.fromString(System.getenv(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID)); LOGGER.info("workspaceId = " + workspaceId); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java index 7331bcb527b..0d270edc9b0 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java @@ -7,45 +7,25 @@ import static io.airbyte.commons.auth.AirbyteAuthConstants.X_AIRBYTE_AUTH_HEADER; import static io.airbyte.test.acceptance.AcceptanceTestConstants.IS_ENTERPRISE_TRUE; import static io.airbyte.test.acceptance.AcceptanceTestConstants.X_AIRBYTE_AUTH_HEADER_TEST_CLIENT_VALUE; -import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.invoker.generated.ApiClient; -import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AirbyteStream; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.ConnectionState; import io.airbyte.api.client.model.generated.DestinationDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; import io.airbyte.api.client.model.generated.JobInfoRead; -import io.airbyte.api.client.model.generated.JobRead; -import io.airbyte.api.client.model.generated.JobStatus; import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.SourceDefinitionRead; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamStatusJobType; import io.airbyte.api.client.model.generated.StreamStatusRunState; import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.lang.MoreBooleans; import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; -import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; -import java.util.List; import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.AfterAll; @@ -90,7 +70,7 @@ class AdvancedAcceptanceTests { private static final String AIRBYTE_SERVER_HOST = Optional.ofNullable(System.getenv("AIRBYTE_SERVER_HOST")).orElse("http://localhost:8001"); @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + static void init() throws Exception { final URI url = new URI(AIRBYTE_SERVER_HOST); final var apiClient = new AirbyteApiClient( new ApiClient().setScheme(url.getScheme()) @@ -152,147 +132,10 @@ void testManualSync() throws Exception { Asserts.assertSourceAndDestinationDbRawRecordsInSync(testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, conn.getNamespaceFormat(), false, false); - LOGGER.info("===== before stream"); Asserts.assertStreamStatuses(testHarness, workspaceId, connectionId, connectionSyncRead.getJob().getId(), StreamStatusRunState.COMPLETE, StreamStatusJobType.SYNC); testHarness.cleanup(); } - @Test - void testCheckpointing() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "EXCEPTION_AFTER_N") - .put("throw_after_n_records", 100) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.of(TYPE, "SILENT"))); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - final AirbyteStream stream = catalog.getStreams().get(0).getStream(); - - assertEquals( - Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), - stream.getSupportedSyncModes()); - assertTrue(MoreBooleans.isTruthy(stream.getSourceDefinedCursor())); - - final SyncMode syncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode destinationSyncMode = DestinationSyncMode.APPEND; - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(syncMode) - .cursorField(List.of(COLUMN_ID)) - .selected(true) - .destinationSyncMode(destinationSyncMode)); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()).build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING)); - // now cancel it so that we freeze state! - try { - testHarness.cancelSync(connectionSyncRead1.getJob().getId()); - } catch (final Exception e) { - LOGGER.error("error:", e); - } - - final ConnectionState connectionState = testHarness.waitForConnectionState(connectionId); - - /* - * the source is set to emit a state message every 5th message. because of the multithreaded nature, - * we can't guarantee exactly what checkpoint will be registered. what we can do is send enough - * messages to make sure that we check point at least once. - */ - assertNotNull(connectionState.getState()); - assertTrue(connectionState.getState().get(COLUMN1).isInt()); - LOGGER.info("state value: {}", connectionState.getState().get(COLUMN1).asInt()); - assertTrue(connectionState.getState().get(COLUMN1).asInt() > 0); - assertEquals(0, connectionState.getState().get(COLUMN1).asInt() % 5); - } - - // verify that when the worker uses backpressure from pipes that no records are lost. - @Test - void testBackpressure() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition(workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition(workspaceId); - - final SourceRead source = testHarness.createSource( - "E2E Test Source -" + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "INFINITE_FEED") - .put("max_records", 5000) - .build())); - - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "THROTTLED") - .put("millis_per_record", 1) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig().selected(true)); - - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()).build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - // wait to get out of running. - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING)); - - final JobInfoRead jobInfo = testHarness.getJobInfoRead(runningJob.getId()); - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(jobInfo.getAttempts().size() - 1); - assertNotNull(attemptInfoRead); - - int expectedMessageNumber = 0; - final int max = 10_000; - for (final String logLine : attemptInfoRead.getLogs().getLogLines()) { - if (expectedMessageNumber > max) { - break; - } - - if (logLine.contains("received record: ") && logLine.contains("\"type\": \"RECORD\"")) { - assertTrue( - logLine.contains(String.format("\"column1\": \"%s\"", expectedMessageNumber)), - String.format("Expected %s but got: %s", expectedMessageNumber, logLine)); - expectedMessageNumber++; - } - } - } - } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java index 853cdae6a8d..85000ecf2fb 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java @@ -37,21 +37,12 @@ import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.List; import java.util.Set; import java.util.UUID; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.TestInstance.Lifecycle; -import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,47 +66,29 @@ "PMD.AvoidDuplicateLiterals"}) @DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", matches = "true") -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -@TestInstance(Lifecycle.PER_CLASS) class ApiAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(ApiAcceptanceTests.class); - private static final AcceptanceTestsResources testResources = new AcceptanceTestsResources(); - - static final String SLOW_TEST_IN_GKE = - "TODO(https://github.com/airbytehq/airbyte-platform-internal/issues/5181): re-enable slow tests in GKE"; - static final String DUPLICATE_TEST_IN_GKE = + private static final String DUPLICATE_TEST_IN_GKE = "TODO(https://github.com/airbytehq/airbyte-platform-internal/issues/5182): eliminate test duplication"; - static final String TYPE = "type"; - static final String E2E_TEST_SOURCE = "E2E Test Source -"; - static final String INFINITE_FEED = "INFINITE_FEED"; - static final String MESSAGE_INTERVAL = "message_interval"; - static final String MAX_RECORDS = "max_records"; - static final String FIELD = "field"; - static final String ID_AND_NAME = "id_and_name"; - AcceptanceTestHarness testHarness; - UUID workspaceId; - - @BeforeAll - void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + + private AcceptanceTestsResources testResources; + private AcceptanceTestHarness testHarness; + private UUID workspaceId; + + @BeforeEach + void setup() throws Exception { + testResources = new AcceptanceTestsResources(); testResources.init(); testHarness = testResources.getTestHarness(); workspaceId = testResources.getWorkspaceId(); - } - - @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException { testResources.setup(); } @AfterEach void tearDown() { testResources.tearDown(); - } - - @AfterAll - static void end() { testResources.end(); } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java index f1f51799a75..59eb5320cce 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java @@ -4,6 +4,7 @@ package io.airbyte.test.acceptance; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.core.JsonProcessingException; @@ -32,10 +33,7 @@ import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Databases; import io.airbyte.test.utils.SchemaTableNamePair; -import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -166,7 +164,7 @@ public class ConnectorBuilderTests { } @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException, SQLException { + static void init() throws Exception { final URI url = new URI(AIRBYTE_SERVER_HOST); final var underlyingApiClient = new ApiClient().setScheme(url.getScheme()) .setHost(url.getHost()) @@ -174,7 +172,8 @@ static void init() throws URISyntaxException, IOException, InterruptedException, .setBasePath("/api"); apiClient = new AirbyteApiClient(underlyingApiClient); workspaceId = apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID().toString())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID().toString()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId(); testHarness = new AcceptanceTestHarness(apiClient, null, workspaceId); testHarness.setup(); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java index a42216cdcba..4813efa54c1 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java @@ -4,6 +4,7 @@ package io.airbyte.test.acceptance; +import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static org.junit.Assert.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -38,18 +39,18 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.security.GeneralSecurityException; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.TestInstance.Lifecycle; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -60,7 +61,7 @@ matches = "true") @Timeout(value = 2, unit = TimeUnit.MINUTES) // Default timeout of 2 minutes; individual tests should override if they need longer. -@TestInstance(Lifecycle.PER_CLASS) +@Execution(ExecutionMode.CONCURRENT) class SchemaManagementTests { private static final Logger LOGGER = LoggerFactory.getLogger(SchemaManagementTests.class); @@ -73,15 +74,12 @@ class SchemaManagementTests { private static final String AIRBYTE_AUTH_HEADER = "eyJ1c2VyX2lkIjogImNsb3VkLWFwaSIsICJlbWFpbF92ZXJpZmllZCI6ICJ0cnVlIn0K"; private static final String AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID = "AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID"; private static final String AIRBYTE_SERVER_HOST = Optional.ofNullable(System.getenv("AIRBYTE_SERVER_HOST")).orElse("http://localhost:8001"); - public static final int JITTER_MAX_INTERVAL_SECS = 10; - public static final int FINAL_INTERVAL_SECS = 60; - public static final int MAX_TRIES = 3; public static final String A_NEW_COLUMN = "a_new_column"; public static final String FIELD_NAME = "name"; private static final int DEFAULT_VALUE = 50; - private static AcceptanceTestHarness testHarness; - private static ConnectionRead createdConnection; - private static ConnectionRead createdConnectionWithSameSource; + private AcceptanceTestHarness testHarness; + private ConnectionRead createdConnection; + private ConnectionRead createdConnectionWithSameSource; private void createTestConnections() throws Exception { final UUID sourceId = testHarness.createPostgresSource().getSourceId(); @@ -118,8 +116,7 @@ private void createTestConnections() throws Exception { .build()); } - @BeforeAll - static void init() throws ApiException, URISyntaxException, IOException, InterruptedException { + void init() throws ApiException, URISyntaxException, IOException, InterruptedException, GeneralSecurityException { // TODO(mfsiega-airbyte): clean up and centralize the way we do config. final boolean isGke = System.getenv().containsKey(IS_GKE); // Set up the API client. @@ -144,7 +141,8 @@ static void init() throws ApiException, URISyntaxException, IOException, Interru final var webBackendApi = new WebBackendApi(underlyingWebBackendApiClient); final UUID workspaceId = System.getenv().get(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID) == null ? apiClient.getWorkspaceApi() - .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID())) + .createWorkspace(new WorkspaceCreate().email("acceptance-tests@airbyte.io").name("Airbyte Acceptance Tests" + UUID.randomUUID()) + .organizationId(DEFAULT_ORGANIZATION_ID)) .getWorkspaceId() : UUID.fromString(System.getenv().get(AIRBYTE_ACCEPTANCE_TEST_WORKSPACE_ID)); @@ -153,6 +151,7 @@ static void init() throws ApiException, URISyntaxException, IOException, Interru @BeforeEach void beforeEach() throws Exception { + init(); LOGGER.debug("Executing test case setup"); testHarness.setup(); createTestConnections(); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java index 7a402466220..afd56a5d870 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java @@ -4,14 +4,11 @@ package io.airbyte.test.acceptance; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.DISABLE_TEMPORAL_TESTS_IN_GKE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.FINAL_INTERVAL_SECS; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.GERALT; import static io.airbyte.test.acceptance.AcceptanceTestsResources.IS_GKE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.JITTER_MAX_INTERVAL_SECS; +import static io.airbyte.test.acceptance.AcceptanceTestsResources.KUBE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.MAX_TRIES; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.STATE_AFTER_SYNC_ONE; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.STATE_AFTER_SYNC_TWO; import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITHOUT_SCD_TABLE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITH_SCD_TABLE; @@ -20,33 +17,20 @@ import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; import static io.airbyte.test.utils.AcceptanceTestHarness.STREAM_NAME; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.AttemptInfoRead; -import io.airbyte.api.client.model.generated.AttemptStatus; import io.airbyte.api.client.model.generated.CheckConnectionRead; import io.airbyte.api.client.model.generated.ConnectionRead; import io.airbyte.api.client.model.generated.ConnectionScheduleData; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule; -import io.airbyte.api.client.model.generated.ConnectionScheduleDataBasicSchedule.TimeUnitEnum; import io.airbyte.api.client.model.generated.ConnectionScheduleDataCron; import io.airbyte.api.client.model.generated.ConnectionScheduleType; -import io.airbyte.api.client.model.generated.ConnectionState; -import io.airbyte.api.client.model.generated.DestinationDefinitionRead; -import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; -import io.airbyte.api.client.model.generated.JobConfigType; import io.airbyte.api.client.model.generated.JobInfoRead; import io.airbyte.api.client.model.generated.JobRead; import io.airbyte.api.client.model.generated.JobStatus; @@ -56,45 +40,33 @@ import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.StreamDescriptor; -import io.airbyte.api.client.model.generated.StreamState; import io.airbyte.api.client.model.generated.StreamStatusJobType; import io.airbyte.api.client.model.generated.StreamStatusRunState; import io.airbyte.api.client.model.generated.SyncMode; import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.scheduling.state.WorkflowState; import io.airbyte.db.Database; import io.airbyte.test.utils.AcceptanceTestHarness; import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.Databases; import io.airbyte.test.utils.SchemaTableNamePair; import io.airbyte.test.utils.TestConnectionCreate; -import io.temporal.client.WorkflowQueryException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.time.Duration; -import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; -import org.junit.jupiter.api.TestInstance; -import org.junit.jupiter.api.TestInstance.Lifecycle; -import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -113,17 +85,17 @@ * these tests, the assert statement we would need to put in to check nullability is just as good as * throwing the NPE as they will be effectively the same at run time. */ -@SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert", "DataFlowIssue", "SqlDialectInspection", "SqlNoDataSourceInspection", +@SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert", "DataFlowIssue", "SqlDialectInspection", + "SqlNoDataSourceInspection", "PMD.AvoidDuplicateLiterals"}) @DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", matches = "true") -@TestMethodOrder(MethodOrderer.OrderAnnotation.class) -@TestInstance(Lifecycle.PER_CLASS) +@Execution(ExecutionMode.CONCURRENT) class SyncAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(SyncAcceptanceTests.class); - private static final AcceptanceTestsResources testResources = new AcceptanceTestsResources(); + private AcceptanceTestsResources testResources; static final String SLOW_TEST_IN_GKE = "TODO(https://github.com/airbytehq/airbyte-platform-internal/issues/5181): re-enable slow tests in GKE"; @@ -139,25 +111,18 @@ class SyncAcceptanceTests { AcceptanceTestHarness testHarness; UUID workspaceId; - @BeforeAll - void init() throws URISyntaxException, IOException, InterruptedException, ApiException { + @BeforeEach + void setup() throws Exception { + testResources = new AcceptanceTestsResources(); testResources.init(); testHarness = testResources.getTestHarness(); workspaceId = testResources.getWorkspaceId(); - } - - @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException { testResources.setup(); } @AfterEach void tearDown() { testResources.tearDown(); - } - - @AfterAll - static void end() { testResources.end(); } @@ -296,71 +261,6 @@ void testCronSync() throws Exception { testHarness.removeConnection(connectionId); } - @Test - void testMultipleSchemasAndTablesSync() throws Exception { - // create tables in the staging schema - testHarness.runSqlScriptInSource("postgres_second_schema_multiple_tables.sql"); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(srcSyncMode).selected(true).destinationSyncMode(dstSyncMode)); - final var conn = testHarness - .createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead.getJob()); - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), - Set.of(PUBLIC_SCHEMA_NAME, "staging"), conn.getNamespaceFormat(), false, false); - Asserts.assertStreamStatuses(testHarness, workspaceId, connectionId, connectionSyncRead.getJob().getId(), StreamStatusRunState.COMPLETE, - StreamStatusJobType.SYNC); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = "The different way of interacting with the source db causes errors") - void testMultipleSchemasSameTablesSync() throws Exception { - // create tables in another schema - testHarness.runSqlScriptInSource("postgres_separate_schema_same_table.sql"); - - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(srcSyncMode).selected(true).destinationSyncMode(dstSyncMode)); - final var conn = - testHarness.createConnectionSourceNamespace(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead.getJob()); - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat().replace("${SOURCE_NAMESPACE}", PUBLIC), false, - WITHOUT_SCD_TABLE); - Asserts.assertStreamStatuses(testHarness, workspaceId, connectionId, connectionSyncRead.getJob().getId(), StreamStatusRunState.COMPLETE, - StreamStatusJobType.SYNC); - } - @Test @DisabledIfEnvironmentVariable(named = IS_GKE, matches = TRUE, @@ -421,381 +321,9 @@ void testIncrementalDedupeSync() throws Exception { @Test void testIncrementalSync() throws Exception { - testResources.runIncrementalSyncForAWorkspaceId(workspaceId); } - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testUpdateConnectionWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - // Also, this test doesn't verify correctness of the schedule update applied, as adding the ability - // to query a workflow for its current - // schedule is out of scope for the issue (https://github.com/airbytehq/airbyte/issues/11215). This - // test just ensures that the underlying workflow - // is running after the update method is called. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing connection update when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - // This should throw an exception since the workflow is terminated and does not exist. - assertThrows(WorkflowQueryException.class, () -> testHarness.getWorkflowState(connectionId)); - - // we should still be able to update the connection when the temporal workflow is in this state - testHarness.updateConnectionSchedule( - connectionId, - ConnectionScheduleType.BASIC, - new ConnectionScheduleData().basicSchedule(new ConnectionScheduleDataBasicSchedule().timeUnit(TimeUnitEnum.HOURS).units(1L))); - // updateConnection should recreate the workflow. Querying for it should not throw an exception. - assertDoesNotThrow(() -> testHarness.getWorkflowState(connectionId)); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testManualSyncRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 5000) - .put(MESSAGE_INTERVAL, 100) - .build())); - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing manual sync when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - LOGGER.info("Starting first manual sync"); - final JobInfoRead firstJobInfo = testHarness.syncConnection(connectionId); - LOGGER.info("Terminating workflow during first sync"); - testHarness.terminateTemporalWorkflow(connectionId); - - LOGGER.info("Submitted another manual sync"); - testHarness.syncConnection(connectionId); - - LOGGER.info("Waiting for workflow to be recreated..."); - Thread.sleep(500); - - final WorkflowState workflowState = testHarness.getWorkflowState(connectionId); - assertTrue(workflowState.isRunning()); - assertTrue(workflowState.isSkipScheduling()); - - // verify that the first manual sync was marked as failed - final JobInfoRead terminatedJobInfo = testHarness.getJobInfoRead(firstJobInfo.getJob().getId()); - assertEquals(JobStatus.FAILED, terminatedJobInfo.getJob().getStatus()); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) - void testResetConnectionRepairsWorkflowWhenWorkflowUnreachable() throws Exception { - // This test only covers the specific behavior of updating a connection that does not have an - // underlying temporal workflow. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - catalog.getStreams().forEach(s -> s.getConfig() - .selected(true) - .syncMode(SyncMode.INCREMENTAL) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND_DEDUP) - .primaryKey(List.of(List.of(COLUMN_NAME)))); - - LOGGER.info("Testing reset connection when temporal is in a terminal state"); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - testHarness.terminateTemporalWorkflow(connectionId); - - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - assertEquals(JobConfigType.RESET_CONNECTION, jobInfoRead.getJob().getConfigType()); - } - - @Test - void testResetCancelsRunningSync() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MESSAGE_INTERVAL, 1000) - .put(MAX_RECORDS, Duration.ofMinutes(5).toSeconds()) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; - catalog.getStreams().forEach(s -> s.getConfig().syncMode(srcSyncMode).selected(true).destinationSyncMode(dstSyncMode)); - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - final JobInfoRead connectionSyncRead = testHarness.syncConnection(connectionId); - - // wait to get out of PENDING - final JobRead jobRead = testHarness.waitWhileJobHasStatus(connectionSyncRead.getJob(), Set.of(JobStatus.PENDING)); - assertEquals(JobStatus.RUNNING, jobRead.getStatus()); - - // send reset request while sync is still running - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - - // verify that sync job was cancelled - final JobRead connectionSyncReadAfterReset = testHarness.getJobInfoRead(connectionSyncRead.getJob().getId()).getJob(); - assertEquals(JobStatus.CANCELLED, connectionSyncReadAfterReset.getStatus()); - - // wait for the reset to complete - testHarness.waitForSuccessfulJob(jobInfoRead.getJob()); - // TODO enable once stream status for resets has been fixed - // testHarness.assertStreamStatuses(workspaceId, connectionId, StreamStatusRunState.COMPLETE, - // StreamStatusJobType.RESET); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = SLOW_TEST_IN_GKE) - void testSyncAfterUpgradeToPerStreamState(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - final SourceRead source = testHarness.createPostgresSource(); - final UUID sourceId = source.getSourceId(); - final UUID sourceDefinitionId = source.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = testHarness.getSourceDefinition(sourceDefinitionId); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - AcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, AcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, testHarness.getConnectionState(connectionId)); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", currentSourceDefintionVersion); - - // add new records and run again. - final Database src = testHarness.getSourceDatabase(); - final var dst = testHarness.getDestinationDatabase(); - // get contents of source before mutating records. - final List expectedRecords = testHarness.retrieveRecordsFromDatabase(src, STREAM_NAME); - expectedRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).put(COLUMN_NAME, GERALT).build())); - // add a new record - src.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'geralt')")); - // mutate a record that was already synced with out updating its cursor value. if we are actually - // full refreshing, this record will appear in the output and cause the test to fail. if we are, - // correctly, doing incremental, we will not find this value in the destination. - src.query(ctx -> ctx.execute("UPDATE id_and_name SET name='yennefer' WHERE id=2")); - - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, testHarness.getConnectionState(connectionId)); - - Asserts.assertRawDestinationContains(dst, expectedRecords, conn.getNamespaceFormat(), STREAM_NAME); - - // reset back to no data. - LOGGER.info("Starting {} reset", testInfo.getDisplayName()); - final JobInfoRead jobInfoRead = testHarness.resetConnection(connectionId); - testHarness.waitWhileJobHasStatus(jobInfoRead.getJob(), - Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); - // This is a band-aid to prevent some race conditions where the job status was updated but we may - // still be cleaning up some data in the reset table. This would be an argument for reworking the - // source of truth of the replication workflow state to be in DB rather than in Memory and - // serialized automagically by temporal - testHarness.waitWhileJobIsRunning(jobInfoRead.getJob(), Duration.ofMinutes(1)); - - LOGGER.info("state after reset: {}", testHarness.getConnectionState(connectionId)); - - Asserts.assertRawDestinationContains(dst, Collections.emptyList(), conn.getNamespaceFormat(), STREAM_NAME); - - // sync one more time. verify it is the equivalent of a full refresh. - final String expectedState = - """ - { - "cursor":"6", - "version":2, - "state_type":"cursor_based", - "stream_name":"id_and_name", - "cursor_field":["id"], - "stream_namespace":"public", - "cursor_record_count":1}" - """; - LOGGER.info("Starting {} sync 3", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead3 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead3.getJob()); - final ConnectionState state = testHarness.getConnectionState(connectionId); - LOGGER.info("state after sync 3: {}", state); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - assertNotNull(state.getStreamState()); - assertEquals(1, state.getStreamState().size()); - final StreamState idAndNameState = state.getStreamState().get(0); - assertEquals(new StreamDescriptor().namespace(PUBLIC).name(STREAM_NAME), idAndNameState.getStreamDescriptor()); - assertEquals(Jsons.deserialize(expectedState), idAndNameState.getStreamState()); - } - - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = SLOW_TEST_IN_GKE) - void testSyncAfterUpgradeToPerStreamStateWithNoNewData(final TestInfo testInfo) throws Exception { - LOGGER.info("Starting {}", testInfo.getDisplayName()); - final SourceRead source = testHarness.createPostgresSource(); - final UUID sourceId = source.getSourceId(); - final UUID sourceDefinitionId = source.getSourceDefinitionId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - // Fetch the current/most recent source definition version - final SourceDefinitionRead sourceDefinitionRead = testHarness.getSourceDefinition(sourceDefinitionId); - final String currentSourceDefintionVersion = sourceDefinitionRead.getDockerImageTag(); - - // Set the source to a version that does not support per-stream state - LOGGER.info("Setting source connector to pre-per-stream state version {}...", - AcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, AcceptanceTestHarness.POSTGRES_SOURCE_LEGACY_CONNECTOR_VERSION); - - catalog.getStreams().forEach(s -> s.getConfig() - .syncMode(SyncMode.INCREMENTAL) - .selected(true) - .cursorField(List.of(COLUMN_ID)) - .destinationSyncMode(DestinationSyncMode.APPEND)); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - LOGGER.info("Beginning {} sync 1", testInfo.getDisplayName()); - final var connectionId = conn.getConnectionId(); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead1.getJob()); - LOGGER.info(STATE_AFTER_SYNC_ONE, testHarness.getConnectionState(connectionId)); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - - // Set source to a version that supports per-stream state - testHarness.updateSourceDefinitionVersion(sourceDefinitionId, currentSourceDefintionVersion); - LOGGER.info("Upgraded source connector per-stream state supported version {}.", currentSourceDefintionVersion); - - // sync one more time. verify that nothing has been synced - LOGGER.info("Starting {} sync 2", testInfo.getDisplayName()); - final JobInfoRead connectionSyncRead2 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead2.getJob()); - LOGGER.info(STATE_AFTER_SYNC_TWO, testHarness.getConnectionState(connectionId)); - - final JobInfoRead syncJob = testHarness.getJobInfoRead(connectionSyncRead2.getJob().getId()); - final Optional result = syncJob.getAttempts().stream() - .min((a, b) -> Long.compare(b.getAttempt().getEndedAt(), a.getAttempt().getEndedAt())); - - assertTrue(result.isPresent()); - assertEquals(0, result.get().getAttempt().getRecordsSynced()); - assertEquals(0, result.get().getAttempt().getTotalStats().getRecordsEmitted()); - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), - false, WITHOUT_SCD_TABLE); - } - @Test @DisabledIfEnvironmentVariable(named = IS_GKE, matches = TRUE, @@ -840,7 +368,10 @@ void testMultipleSchemasAndTablesSyncAndReset() throws Exception { assertDestinationDbEmpty(testHarness.getDestinationDatabase()); } + // TODO (Angel): Enable once we fix the docker compose tests @Test + @EnabledIfEnvironmentVariable(named = KUBE, + matches = TRUE) @DisabledIfEnvironmentVariable(named = IS_GKE, matches = TRUE, disabledReason = SLOW_TEST_IN_GKE) @@ -854,8 +385,10 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw ctx.createTableIfNotExists(additionalTable) .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(1, "1").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(2, "2").execute(); + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(1, + "1").execute(); + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(2, + "2").execute(); return null; }); UUID sourceId = testHarness.createPostgresSource().getSourceId(); @@ -896,15 +429,18 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw // Update with refreshed catalog AirbyteCatalog refreshedCatalog = testHarness.discoverSourceSchemaWithoutCache(sourceId); refreshedCatalog.getStreams().forEach(s -> s.getConfig().selected(true)); - WebBackendConnectionUpdate update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); + WebBackendConnectionUpdate update = testHarness.getUpdateInput(connection, refreshedCatalog, + operation); testHarness.webBackendUpdateConnection(update); // Wait until the sync from the UpdateConnection is finished - final JobRead syncFromTheUpdate1 = testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), syncRead.getJob().getId()); + final JobRead syncFromTheUpdate1 = + testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), + syncRead.getJob().getId()); testHarness.waitForSuccessfulJob(syncFromTheUpdate1); - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that + // We do not check that the source and the dest are in sync here because removing a stream + // doesn't remove that Asserts.assertStreamStateContainsStream(testHarness, connection.getConnectionId(), List.of( new StreamDescriptor().name(ID_AND_NAME).namespace(PUBLIC))); @@ -918,8 +454,10 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw ctx.createTableIfNotExists(additionalTable) .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(3, "3").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(4, "4").execute(); + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(3, + "3").execute(); + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(4, + "4").execute(); return null; }); @@ -929,11 +467,13 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); testHarness.webBackendUpdateConnection(update); - final JobRead syncFromTheUpdate2 = testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), syncFromTheUpdate1.getId()); + final JobRead syncFromTheUpdate2 = + testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), + syncFromTheUpdate1.getId()); testHarness.waitForSuccessfulJob(syncFromTheUpdate2); - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that + // We do not check that the source and the dest are in sync here because removing a stream + // doesn't remove that Asserts.assertSourceAndDestinationDbRawRecordsInSync( testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, connection.getNamespaceFormat(), true, WITHOUT_SCD_TABLE); @@ -947,12 +487,15 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw sourceDb.query(ctx -> { ctx.dropTableIfExists(additionalTable).execute(); ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR), DSL.field("another_field", SQLDataType.VARCHAR)) + .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR), + DSL.field("another_field", SQLDataType.VARCHAR)) .execute(); ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), DSL.field("another_field")).values(3, "3", "three") + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), + DSL.field("another_field")).values(3, "3", "three") .execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), DSL.field("another_field")).values(4, "4", "four") + ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), + DSL.field("another_field")).values(4, "4", "four") .execute(); return null; }); @@ -963,11 +506,13 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); testHarness.webBackendUpdateConnection(update); - final JobRead syncFromTheUpdate3 = testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), syncFromTheUpdate2.getId()); + final JobRead syncFromTheUpdate3 = + testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), + syncFromTheUpdate2.getId()); testHarness.waitForSuccessfulJob(syncFromTheUpdate3); - // We do not check that the source and the dest are in sync here because removing a stream doesn't - // remove that + // We do not check that the source and the dest are in sync here because removing a stream + // doesn't remove that Asserts.assertSourceAndDestinationDbRawRecordsInSync( testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, connection.getNamespaceFormat(), true, WITHOUT_SCD_TABLE); @@ -976,7 +521,6 @@ void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throw new StreamDescriptor().name(additionalTable).namespace(PUBLIC))); } - // TODO: this test needs a cleanup @Test void testIncrementalDedupeSyncRemoveOneColumn() throws Exception { final UUID sourceId = testHarness.createPostgresSource().getSourceId(); @@ -1036,70 +580,6 @@ void testIncrementalDedupeSyncRemoveOneColumn() throws Exception { testHarness.assertNormalizedDestinationContainsIdColumn(conn.getNamespaceFormat(), expectedNormalizedRecords); } - @Test - @Disabled - void testFailureTimeout() throws Exception { - final SourceDefinitionRead sourceDefinition = testHarness.createE2eSourceDefinition( - workspaceId); - final DestinationDefinitionRead destinationDefinition = testHarness.createE2eDestinationDefinition( - workspaceId); - - final SourceRead source = testHarness.createSource( - E2E_TEST_SOURCE + UUID.randomUUID(), - workspaceId, - sourceDefinition.getSourceDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, INFINITE_FEED) - .put(MAX_RECORDS, 1000) - .put(MESSAGE_INTERVAL, 100) - .build())); - - // Destination fails after processing 5 messages, so the job should fail after the graceful close - // timeout of 1 minute - final DestinationRead destination = testHarness.createDestination( - "E2E Test Destination -" + UUID.randomUUID(), - workspaceId, - destinationDefinition.getDestinationDefinitionId(), - Jsons.jsonNode(ImmutableMap.builder() - .put(TYPE, "FAILING") - .put("num_messages", 5) - .build())); - - final UUID sourceId = source.getSourceId(); - final UUID destinationId = destination.getDestinationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final AirbyteCatalog catalog = discoverResult.getCatalog(); - - final UUID connectionId = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()) - .getConnectionId(); - - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - - // wait to get out of pending. - final JobRead runningJob = testHarness.waitWhileJobHasStatus(connectionSyncRead1.getJob(), Sets.newHashSet(JobStatus.PENDING)); - - // wait for job for max of 3 minutes, by which time the job attempt should have failed - testHarness.waitWhileJobHasStatus(runningJob, Sets.newHashSet(JobStatus.RUNNING), Duration.ofMinutes(3)); - - final JobInfoRead jobInfo = testHarness.getJobInfoRead(runningJob.getId()); - // Only look at the first attempt. It's possible that in the time between leaving RUNNING and - // retrieving the job info, we'll have started a new attempt. - final AttemptInfoRead attemptInfoRead = jobInfo.getAttempts().get(0); - - // assert that the job attempt failed, and cancel the job regardless of status to prevent retries - try { - assertEquals(AttemptStatus.FAILED, attemptInfoRead.getAttempt().getStatus()); - } finally { - testHarness.cancelSync(runningJob.getId()); - } - } - static void assertDestinationDbEmpty(final Database dst) throws Exception { final Set destinationTables = Databases.listAllTables(dst); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java index 8fde77fa427..572f662f8dd 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java @@ -10,18 +10,12 @@ import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; import static org.junit.jupiter.api.Assertions.assertEquals; -import io.airbyte.api.client.invoker.generated.ApiException; import io.airbyte.api.client.model.generated.AirbyteCatalog; import io.airbyte.api.client.model.generated.CheckConnectionRead; import io.airbyte.api.client.model.generated.CheckConnectionRead.StatusEnum; -import java.io.IOException; -import java.net.URISyntaxException; -import java.sql.SQLException; import java.util.UUID; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; @@ -34,19 +28,15 @@ */ public class WorkloadBasicAcceptanceTests { - static final AcceptanceTestsResources testResources = new AcceptanceTestsResources(); + AcceptanceTestsResources testResources = new AcceptanceTestsResources(); static final UUID RUN_WITH_WORKLOAD_WITHOUT_DOC_STORE_WORKSPACE_ID = UUID.fromString("3d2985a0-a412-45f4-9124-e15800b739be"); static final UUID RUN_CHECK_WITH_WORKLOAD_WORKSPACE_ID = UUID.fromString("1bdcfb61-219b-4290-be4f-12f9ac5461be"); static final UUID RUN_DISCOVER_WITH_WORKLOAD_WORKSPACE_ID = UUID.fromString("3851861d-ac0b-440c-bd60-408cf9e7fc0e"); - @BeforeAll - static void init() throws URISyntaxException, IOException, InterruptedException, ApiException { - testResources.init(); - } - @BeforeEach - void setup() throws SQLException, URISyntaxException, IOException, ApiException { + void setup() throws Exception { + testResources.init(); testResources.setup(); } @@ -55,18 +45,20 @@ void tearDown() { testResources.tearDown(); } - @AfterAll - static void end() { + @AfterEach + void end() { testResources.end(); } @Test + @EnabledIfEnvironmentVariable(named = KUBE, + matches = TRUE) @DisabledIfEnvironmentVariable(named = IS_GKE, matches = TRUE, disabledReason = DISABLE_TEMPORAL_TESTS_IN_GKE) void testSyncWithWorkload() throws Exception { - // Create workspace with static ID for test which is used in the flags.yaml to perform an override - // in order to exercise the workload path. + // Create workspace with static ID for test which is used in the flags.yaml to perform an + // override in order to exercise the workload path. testResources.getTestHarness().createWorkspaceWithId(RUN_WITH_WORKLOAD_WITHOUT_DOC_STORE_WORKSPACE_ID); testResources.runSmallSyncForAWorkspaceId(RUN_WITH_WORKLOAD_WITHOUT_DOC_STORE_WORKSPACE_ID); diff --git a/airbyte-tests/src/test-acceptance/resources/junit-platform.properties b/airbyte-tests/src/test-acceptance/resources/junit-platform.properties index 9e63e3e8955..1fee5143b70 100644 --- a/airbyte-tests/src/test-acceptance/resources/junit-platform.properties +++ b/airbyte-tests/src/test-acceptance/resources/junit-platform.properties @@ -1,2 +1 @@ -junit.jupiter.execution.parallel.enabled=true junit.jupiter.execution.parallel.mode.classes.default=concurrent diff --git a/airbyte-webapp/.eslintLegacyFolderStructure.js b/airbyte-webapp/.eslintLegacyFolderStructure.js index 3e27f12ffa5..4e3b3a10f8b 100644 --- a/airbyte-webapp/.eslintLegacyFolderStructure.js +++ b/airbyte-webapp/.eslintLegacyFolderStructure.js @@ -99,9 +99,6 @@ module.exports = [ "src/views/Connector/ConnectorCard/useAnalyticsTrackFunctions.tsx", "src/views/Connector/ConnectorCard/ConnectorCard.tsx", "src/views/Connector/ConnectorCard/index.tsx", - "src/views/Connector/RequestConnectorModal/RequestConnectorModal.tsx", - "src/views/Connector/RequestConnectorModal/types.ts", - "src/views/Connector/RequestConnectorModal/index.tsx", "src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/useAnalyticsTrackFunctions.tsx", "src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.tsx", "src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/index.tsx", diff --git a/airbyte-webapp/.gitignore b/airbyte-webapp/.gitignore index a00b2a3d5a3..a4963661feb 100644 --- a/airbyte-webapp/.gitignore +++ b/airbyte-webapp/.gitignore @@ -34,9 +34,6 @@ storybook-static/ # Generated by our build-info plugin /public/buildInfo.json -# Generated at build time -/public/docs - # Generated file to make the CDK version available in the webapp /src/components/connectorBuilder/cdk.ts diff --git a/airbyte-webapp/.ignore b/airbyte-webapp/.ignore new file mode 100644 index 00000000000..f3be8705c21 --- /dev/null +++ b/airbyte-webapp/.ignore @@ -0,0 +1,4 @@ +# Allow VSCode to find some files that are gitignored in file search +!.experiments.json +!.env.* +!src/core/api/generated/** diff --git a/airbyte-webapp/.storybook/withProvider.tsx b/airbyte-webapp/.storybook/withProvider.tsx index e37d7e9c8ef..ae491451483 100644 --- a/airbyte-webapp/.storybook/withProvider.tsx +++ b/airbyte-webapp/.storybook/withProvider.tsx @@ -7,7 +7,6 @@ import { QueryClientProvider, QueryClient } from "@tanstack/react-query"; import messages from "../src/locales/en.json"; import { FeatureService } from "../src/core/services/features"; -import { ConfigServiceProvider, config } from "../src/core/config"; import { DocumentationPanelProvider } from "../src/views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; import { AppMonitoringServiceProvider } from "../src/hooks/services/AppMonitoringService"; import { AirbyteThemeProvider } from "../src/hooks/theme/useAirbyteTheme"; @@ -33,13 +32,11 @@ export const withProviders = (getStory: Parameters[0]) => ( b: (chunk) => {chunk}, }} > - - - - {getStory()} - - - + + + {getStory()} + + diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile index d47cf95d3af..b396496a25c 100644 --- a/airbyte-webapp/Dockerfile +++ b/airbyte-webapp/Dockerfile @@ -1,8 +1,17 @@ -ARG NGINX_IMAGE=nginx:alpine +ARG NGINX_IMAGE=nginxinc/nginx-unprivileged:alpine3.18 FROM ${NGINX_IMAGE} -EXPOSE 80 +EXPOSE 8080 + +USER root COPY bin/build /usr/share/nginx/html -RUN find /usr/share/nginx/html -type d -exec chmod 755 '{}' \; -o -type f -exec chmod 644 '{}' \; + +RUN < { + val ignores = mutableListOf() + f.forEachLine { line -> + //ignore comments and empty lines + if (!line.startsWith('#') && line.isNotEmpty()) { + ignores.add(line) + } + } + return ignores +} + + +// Use the node version that's defined in the .nvmrc file +val nodeVersion = file("${projectDir}/.nvmrc").readText().trim() + +// Read pnpm version to use from package.json engines.pnpm entry +val parsedJson = JsonSlurper().parse(FileReader("${projectDir}/package.json")) as Map<*, *> // Cast to Map +val engines = parsedJson["engines"] as? Map<*, *> // Safely cast to Map if 'engines' exists +val pnpmVer = engines?.get("pnpm")?.toString()?.trim() // Extract 'pnpm' as String and trim + +/** + * A list of all files outside the webapp folder, that the webapp build depends on, i.e. + * if those change we can't reuse a cached build. + */ +val outsideWebappDependencies = listOf( + "../airbyte-api/src/main/openapi/config.yaml", + "../airbyte-api/src/main/openapi/cloud-config.yaml", + "../airbyte-connector-builder-server/src/main/openapi/openapi.yaml", + "../airbyte-connector-builder-resources/CDK_VERSION", +) + +configure { + download = true + version = nodeVersion + pnpmVersion = pnpmVer + distBaseUrl = "https://nodejs.org/dist" +} + +tasks.named("pnpmInstall") { + /* + Add patches folder to inputs of pnpmInstall task, since it has pnpm-lock.yml as an output + thus wouldn't rerun in case a patch get changed + */ + inputs.dir("patches") +} + +// fileTree to watch node_modules, but exclude the .cache dir since that might have changes on every build +val nodeModules = fileTree("node_modules") { + exclude(".cache") +} + +/** + * All files inside the webapp folder that aren't gitignored + */ +val allFiles = fileTree(".") { + exclude(parseIgnoreFile(file("../.gitignore"))) + exclude(parseIgnoreFile(file(".gitignore"))) + exclude(parseIgnoreFile(file("./src/core/api/generated/.gitignore"))) + exclude(parseIgnoreFile(file("./src/core/api/types/.gitignore"))) +} + +tasks.register("pnpmBuild") { + dependsOn(tasks.named("pnpmInstall")) + + environment.put("VERSION", rootProject.ext.get("version") as String) + + args = listOf("build") + + // The WEBAPP_BUILD_CLOUD_ENV environment variable is an input for this task, since it changes for which env we're building the webapp + inputs.property("cloudEnv", System.getenv("WEBAPP_BUILD_CLOUD_ENV") ?: "") + inputs.files(allFiles, outsideWebappDependencies) + + outputs.dir("build/app") +} + +tasks.register("test") { + dependsOn(tasks.named("pnpmInstall")) + + args = listOf("run", "test:ci") + inputs.files(allFiles, outsideWebappDependencies) + + /* + The test has no outputs, thus we always treat the outputs up to date + as long as the inputs have not changed + */ + outputs.upToDateWhen { true } +} + +tasks.register("e2etest") { + dependsOn(tasks.named("pnpmInstall")) + + /* + If the cypressWebappKey property has been set from the outside (see tools/bin/e2e_test.sh) + we'll record the cypress session, otherwise we're not recording + */ + val recordCypress = project.hasProperty("cypressWebappKey") && project.property("cypressWebappKey") as Boolean + if (recordCypress) { + environment.put("CYPRESS_KEY", project.property("cypressWebappKey") as String) + args = listOf("run", "cypress:ci:record") + } else { + args = listOf("run", "cypress:ci") + } + + /* + Mark the outputs as never up to date, to ensure we always run the tests. + We want this because they are e2e tests and can depend on other factors e.g., external dependencies. + */ + outputs.upToDateWhen { false } +} + +tasks.register("cloudE2eTest") { + dependsOn(tasks.named("pnpmInstall")) + val recordCypress = project.hasProperty("cypressCloudWebappKey") && project.property("cypressCloudWebappKey") as Boolean + if (recordCypress) { + environment.put("CYPRESS_KEY", project.property("cypressCloudWebappKey") as String) + args = listOf("run", "cloud-test:stage:record") + } else { + args = listOf("run", "cloud-test:stage") + } + + /* + Mark the outputs as never up to date, to ensure we always run the tests. + We want this because they are e2e tests and can depend on other factors e.g., external dependencies. + */ + outputs.upToDateWhen { false } +} + +//tasks.register("validateLinks") { +// dependsOn(tasks.named("pnpmInstall")) +// +// args = listOf("run", "validate-links") +// +// inputs.file("scripts/validate-links.ts") +// inputs.file("src/core/utils/links.ts") +// +// // Configure the up-to-date check to always run in CI environments +// outputs.upToDateWhen { +// System.getenv("CI") == null +// } +//} + +tasks.register("buildStorybook") { + dependsOn(tasks.named("pnpmInstall")) + + args = listOf("run", "build:storybook") + + inputs.files(allFiles, outsideWebappDependencies) + + outputs.dir("build/storybook") + + environment = mapOf( + "NODE_OPTIONS" to "--max_old_space_size=8192" + ) +} + +tasks.register("copyBuildOutput") { + dependsOn(tasks.named("copyDocker"), tasks.named("pnpmBuild")) + + from("${project.projectDir}/build/app") + into("build/docker/bin/build") +} + +tasks.register("copyNginx") { + dependsOn(tasks.named("copyDocker")) + + from("${project.projectDir}/nginx") + into("build/docker/bin/nginx") +} + +// Those tasks should be run as part of the "check" task +tasks.named("check") { + dependsOn(/* tasks.named("validateLinks"), */ tasks.named("test")) +} + +tasks.named("build") { + dependsOn(tasks.named("buildStorybook")) +} + +tasks.named("buildDockerImage") { + dependsOn(tasks.named("copyDocker"), tasks.named("copyNginx"), tasks.named("copyBuildOutput")) +} + +// Include some cloud-specific tasks only in the airbyte-platform-internal environment +if (file("${project.projectDir}/../../cloud/cloud-webapp/cloud-tasks.gradle").exists()) { + apply(from = "${project.projectDir}/../../cloud/cloud-webapp/cloud-tasks.gradle") +} diff --git a/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts b/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts index 79ff80a0edb..39ef971ea53 100644 --- a/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts +++ b/airbyte-webapp/cypress/cloud-e2e/cloud-login.cy.ts @@ -7,10 +7,10 @@ describe("manually logging in and out of airbyte cloud", () => { cy.visit("/"); // unauthenticated users are redirected to /login assertOnLoginPage(); - - cy.get("[data-testid='login.email']").type(testUser.email); - cy.get("[data-testid='login.password']").type(testUser.password); - cy.get("[data-testid='login.submit']").click(); + cy.get("button").contains("Continue with Email").click(); + cy.get("input[name=username]").type(testUser.email); + cy.get("input[name=password]").type(testUser.password); + cy.get("input[name=login]").click(); cy.hasNavigatedTo("/workspaces"); cy.selectWorkspace(); diff --git a/airbyte-webapp/cypress/commands/cloud.ts b/airbyte-webapp/cypress/commands/cloud.ts index 4feaa8998dc..02e4371dc9e 100644 --- a/airbyte-webapp/cypress/commands/cloud.ts +++ b/airbyte-webapp/cypress/commands/cloud.ts @@ -16,9 +16,11 @@ Cypress.Commands.add("login", (user: TestUserCredentials = testUser) => { } cy.visit("/login"); - cy.get("[data-testid='login.email']", { timeout: 10000 }).type(user.email); - cy.get("[data-testid='login.password']").type(user.password); - cy.get("[data-testid='login.submit']").click(); + cy.get("button").contains("Continue with Email").click(); + cy.get("input[name=username]").type(testUser.email); + cy.get("input[name=password]").type(testUser.password); + cy.get("input[name=login]").click(); + cy.hasNavigatedTo("/workspaces"); }); diff --git a/airbyte-webapp/cypress/commands/common.ts b/airbyte-webapp/cypress/commands/common.ts index 2dc5e5a0cea..22719fb6335 100644 --- a/airbyte-webapp/cypress/commands/common.ts +++ b/airbyte-webapp/cypress/commands/common.ts @@ -32,7 +32,12 @@ export const openConnectorPage = (name: string) => { export const deleteEntity = () => { cy.get("button[data-id='open-delete-modal']").click(); - cy.get("button[data-id='delete']").click(); + cy.get("input[id='confirmation-text']") + .invoke("attr", "placeholder") + .then((placeholder) => { + cy.get("input[id='confirmation-text']").type(placeholder ?? ""); + cy.get("button[data-id='delete']").click(); + }); }; export const clearApp = () => { diff --git a/airbyte-webapp/cypress/commands/connectorBuilder.ts b/airbyte-webapp/cypress/commands/connectorBuilder.ts index 29bed847934..34c7603d46d 100644 --- a/airbyte-webapp/cypress/commands/connectorBuilder.ts +++ b/airbyte-webapp/cypress/commands/connectorBuilder.ts @@ -133,7 +133,7 @@ const SCHEMA = ' "$schema": "http://json-schema.org/schema#",\n' + ' "properties": {\n' + ' "name": {\n' + - ' "type": "string"\n' + + ' "type": [\n "string",\n "null"\n ]\n' + " }\n" + " },\n" + ' "type": "object"\n' + diff --git a/airbyte-webapp/cypress/commands/source.ts b/airbyte-webapp/cypress/commands/source.ts index eeef91a9ceb..727021e88bc 100644 --- a/airbyte-webapp/cypress/commands/source.ts +++ b/airbyte-webapp/cypress/commands/source.ts @@ -64,6 +64,7 @@ export const updateSource = (name: string, field: string, value: string, isDropd }; export const deleteSource = (name: string) => { + cy.log(`Deleting source ${name}`); cy.intercept("/api/v1/sources/delete").as("deleteSource"); goToSourcePage(); openConnectorPage(name); diff --git a/airbyte-webapp/cypress/e2e/base.cy.ts b/airbyte-webapp/cypress/e2e/base.cy.ts index b4f59dd559d..f6f1a0e4220 100644 --- a/airbyte-webapp/cypress/e2e/base.cy.ts +++ b/airbyte-webapp/cypress/e2e/base.cy.ts @@ -1,20 +1,4 @@ describe("Error handling view", () => { - it("Shows Version Mismatch page", () => { - cy.intercept("/api/v1/**", { - statusCode: 500, - body: { - error: - "Version mismatch between 0.0.1-ci and 0.0.2-ci.\nPlease upgrade or reset your Airbyte Database, see more at https://docs.airbyte.io/operator-guides/upgrading-airbyte", - }, - }); - - cy.on("uncaught:exception", () => false); - - cy.visit("/"); - - cy.get("div").contains("Version mismatch between 0.0.1-ci and 0.0.2-ci.").should("exist"); - }); - it("Shows Server Unavailable page", () => { cy.intercept("/api/v1/**", { statusCode: 502, @@ -25,6 +9,6 @@ describe("Error handling view", () => { cy.visit("/"); - cy.get("div").contains("Cannot reach server. The server may still be starting up.").should("exist"); + cy.get("p").contains("Airbyte is temporarily unavailable.").should("exist"); }); }); diff --git a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts index 61a84949836..5c4f26558a4 100644 --- a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts @@ -80,7 +80,7 @@ describe("Connection - Auto-detect schema changes", () => { it("does not show non-breaking change on list page", () => { connectionListPage.visit(); connectionListPage.getSchemaChangeIcon(connection, "non_breaking").should("not.exist"); - connectionListPage.getManualSyncButton(connection).should("be.enabled"); + connectionListPage.getConnectionStateSwitch(connection).should("be.checked").and("be.enabled"); }); it("shows non-breaking change that can be saved after refresh", () => { @@ -149,7 +149,7 @@ describe("Connection - Auto-detect schema changes", () => { it("shows breaking change on list page", () => { connectionListPage.visit(); connectionListPage.getSchemaChangeIcon(connection, "breaking").should("exist"); - connectionListPage.getManualSyncButton(connection).should("be.disabled"); + connectionListPage.getConnectionStateSwitch(connection).should("not.be.checked").and("not.be.enabled"); }); it("shows breaking change that can be saved after refresh and fix", () => { diff --git a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts index f370282ebeb..be5b9189bf8 100644 --- a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts @@ -410,9 +410,9 @@ describe("Connection Configuration", () => { createNewConnectionViaApi(pokeApiSource, jsonDestination).then((connectionResponse) => { connection = connectionResponse; visit(connection); + connectionSettings.goToSettingsPage(); + deleteEntity(); }); - connectionSettings.goToSettingsPage(); - deleteEntity(); }); }); @@ -537,15 +537,6 @@ describe("Connection Configuration", () => { }); }); }); - describe("Transformations tab", () => { - it("cannot edit Custom transformations form settings", () => { - cy.get("@postgresConnection").then((connection) => { - cy.visit(`/${RoutePaths.Connections}/${connection.connectionId}/${ConnectionRoutePaths.Transformation}`); - - cy.get('form[data-testid="custom-transformation-form"]').children("fieldset").should("be.disabled"); - }); - }); - }); }); describe("Disabled connection", () => { diff --git a/airbyte-webapp/cypress/e2e/connection/streamDetails.cy.ts b/airbyte-webapp/cypress/e2e/connection/streamDetails.cy.ts index edea5719619..9c10297e9c7 100644 --- a/airbyte-webapp/cypress/e2e/connection/streamDetails.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/streamDetails.cy.ts @@ -1,14 +1,14 @@ import { + createNewConnectionViaApi, createPostgresDestinationViaApi, createPostgresSourceViaApi, - createNewConnectionViaApi, } from "@cy/commands/connection"; import { - WebBackendConnectionRead, DestinationRead, DestinationSyncMode, - SyncMode, SourceRead, + SyncMode, + WebBackendConnectionRead, } from "@src/core/api/types/AirbyteClient"; import { requestDeleteConnection, requestDeleteDestination, requestDeleteSource } from "commands/api"; import { runDbQuery } from "commands/db/db"; @@ -36,27 +36,31 @@ describe("Connection - Stream details", () => { let destination: DestinationRead; let connection: WebBackendConnectionRead; - before(() => { - dropTables(); - - runDbQuery(getCreateUsersTableQuery("users"), createUserCarsTableQuery, createTableWithLotsOfColumnsQuery); - - createPostgresSourceViaApi().then((pgSource) => { - source = pgSource; - createPostgresDestinationViaApi().then((pgDestination) => { - destination = pgDestination; - createNewConnectionViaApi(source, destination).then((connectionResponse) => { - connection = connectionResponse; + // setup logic adapted from https://stackoverflow.com/questions/71285827/cypress-e2e-before-hook-not-working-on-retries/71377694#71377694 + // to allow retrying, as Cypress doesn't retry if `before` throws an error + let isBackendSetup = false; + let isError = false; + const setup = () => { + if (isBackendSetup === false) { + dropTables(); + + runDbQuery(getCreateUsersTableQuery("users"), createUserCarsTableQuery, createTableWithLotsOfColumnsQuery); + + return createPostgresSourceViaApi().then((pgSource) => { + source = pgSource; + createPostgresDestinationViaApi().then((pgDestination) => { + destination = pgDestination; + createNewConnectionViaApi(source, destination).then((connectionResponse) => { + connection = connectionResponse; + isBackendSetup = true; + }); }); }); - }); - }); - - beforeEach(() => { - connectionPage.visit(connection, "replication"); - }); + } + return cy.get("body"); // return a Cypress chainable so it can be 'then'ed + }; - after(() => { + const cleanup = () => { if (connection) { requestDeleteConnection({ connectionId: connection.connectionId }); } @@ -68,8 +72,26 @@ describe("Connection - Stream details", () => { } dropTables(); + }; + + beforeEach(() => { + cy.once("fail", (err) => { + isError = true; + throw err; + }); + if (isError) { + cleanup(); + isError = false; + } + + // @ts-expect-error the .then() signature between the two possibilities don't exactly match + setup().then(() => { + connectionPage.visit(connection, "replication"); + }); }); + after(cleanup); + describe("basics", () => { beforeEach(() => { streamRow.showStreamDetails(); @@ -81,11 +103,17 @@ describe("Connection - Stream details", () => { const fieldTypes = ["String", "Integer", "String", "Datetime"]; streamDetails.isSyncStreamDisabled(); + streamDetails.isSelectSyncModeHidden(); streamDetails.isNamespace("public"); streamDetails.isStreamName("users"); streamDetails.areFieldsValid({ names: fieldNames, dataTypes: fieldTypes }); }); + it("show sync mode dropdown if stream is enabled", () => { + streamDetails.enableSyncStream(); + streamDetails.isSelectSyncModeVisible(); + }); + it("closes", () => { streamDetails.close(); streamDetails.isClosed(); @@ -125,6 +153,8 @@ describe("Connection - Stream details", () => { const cursor = "created_at"; const primaryKeys = ["car_id", "user_id"]; + userCarsStreamRow.toggleStreamSync(); + userCarsStreamRow.isStreamSyncEnabled(true); userCarsStreamRow.selectSyncMode(SyncMode.incremental, DestinationSyncMode.append_dedup); userCarsStreamRow.showStreamDetails(); @@ -140,10 +170,13 @@ describe("Connection - Stream details", () => { describe("sync mode", () => { const userCarsStreamRow = new StreamRowPageObject("public", "user_cars"); + it("can select cursor and primary key", () => { const cursor = "created_at"; const primaryKeys = ["car_id", "user_id"]; + userCarsStreamRow.toggleStreamSync(); + userCarsStreamRow.isStreamSyncEnabled(true); userCarsStreamRow.showStreamDetails(); streamDetails.selectSyncMode(SyncMode.incremental, DestinationSyncMode.append_dedup); @@ -162,6 +195,8 @@ describe("Connection - Stream details", () => { const columnsStreamRow = new StreamRowPageObject("public", "columns"); it("selects cursors for stream with many fields", () => { + columnsStreamRow.toggleStreamSync(); + columnsStreamRow.isStreamSyncEnabled(true); columnsStreamRow.selectSyncMode(SyncMode.incremental, DestinationSyncMode.append); columnsStreamRow.showStreamDetails(); diff --git a/airbyte-webapp/cypress/e2e/connection/syncModes.cy.ts b/airbyte-webapp/cypress/e2e/connection/syncModes.cy.ts index 0620cacb35b..860a95e38d3 100644 --- a/airbyte-webapp/cypress/e2e/connection/syncModes.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/syncModes.cy.ts @@ -4,12 +4,12 @@ import { createPostgresSourceViaApi, } from "@cy/commands/connection"; import { - WebBackendConnectionRead, + AirbyteStreamConfiguration, DestinationRead, DestinationSyncMode, SourceRead, SyncMode, - AirbyteStreamConfiguration, + WebBackendConnectionRead, } from "@src/core/api/types/AirbyteClient"; import { requestDeleteConnection, requestDeleteDestination, requestDeleteSource } from "commands/api"; import { runDbQuery } from "commands/db/db"; @@ -419,4 +419,49 @@ describe("Connection - sync modes", () => { usersStreamRow.hasNoSourceDefinedPrimaryKeys(); }); }); + + describe("Track stream(config) user-configured changes", () => { + it("should NOT show reset stream modal if sync mode was changed in disabled stream", () => { + const accountsStreamRow = streamsTable.getRow("public", "accounts"); + + // disable the stream and save + accountsStreamRow.toggleStreamSync(); + saveConnectionAndAssertStreams( + { + namespace: "public", + name: "accounts", + config: { + syncMode: SyncMode.full_refresh, + destinationSyncMode: DestinationSyncMode.overwrite, + }, + }, + { expectModal: false } + ); + + // leave the page and come back + connectionPage.visit(connection, "transformation", { interceptGetHandler: modifyAccountsTableInterceptHandler }); + connectionPage.visit(connection, "replication", { interceptGetHandler: modifyAccountsTableInterceptHandler }); + + // enable stream, change sync mode, then disable stream and save + accountsStreamRow.toggleStreamSync(); + accountsStreamRow.selectSyncMode(SyncMode.full_refresh, DestinationSyncMode.append); + accountsStreamRow.toggleStreamSync(); + saveConnectionAndAssertStreams( + { + namespace: "public", + name: "accounts", + config: { + /** + * Note: changes in disabled streams aren't persisted at all to the db, + * so anything user-configured on a disabled stream can/should be ignored. + * So the sync should be the same as before + */ + syncMode: SyncMode.full_refresh, + destinationSyncMode: DestinationSyncMode.overwrite, + }, + }, + { expectModal: false } + ); + }); + }); }); diff --git a/airbyte-webapp/cypress/pages/connection/StreamDetailsPageObject.ts b/airbyte-webapp/cypress/pages/connection/StreamDetailsPageObject.ts index 2485b4df24d..df5fb2f1c73 100644 --- a/airbyte-webapp/cypress/pages/connection/StreamDetailsPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/StreamDetailsPageObject.ts @@ -157,6 +157,18 @@ export class StreamDetailsPageObject { }); } + isSelectSyncModeVisible() { + cy.get(streamDetailsPanel).within(() => { + cy.get(syncModeSelectButton).should("be.visible"); + }); + } + + isSelectSyncModeHidden() { + cy.get(streamDetailsPanel).within(() => { + cy.get(syncModeSelectButton).should("not.exist"); + }); + } + selectCursor(fieldName: string) { getRowByFieldName(fieldName).within(() => { cy.get(cursorRadioButton).parent().click({ scrollBehavior: false }); diff --git a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts index f571df8d5ed..928d4c86995 100644 --- a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts @@ -1,9 +1,10 @@ import { WebBackendConnectionListItem } from "@src/core/api/types/AirbyteClient"; import { getWorkspaceId } from "commands/api/workspace"; -const statusCell = (connectionId: string) => `[data-testId='statusCell-${connectionId}']`; +const schemaChangeCell = (connectionId: string) => `[data-testid='link-replication-${connectionId}']`; + const changesStatusIcon = (type: string) => `[data-testId='changesStatusIcon-${type}']`; -const manualSyncButton = "button[data-testId='manual-sync-button']"; +const connectionStateSwitch = (connectionId: string) => `[data-testId='connection-state-switch-${connectionId}']`; const newConnectionButton = "[data-testid='new-connection-button']"; export const visit = () => { @@ -13,10 +14,10 @@ export const visit = () => { }; export const getSchemaChangeIcon = (connection: WebBackendConnectionListItem, type: "breaking" | "non_breaking") => - cy.get(`${statusCell(connection.connectionId)} ${changesStatusIcon(type)}`); + cy.get(`${schemaChangeCell(connection.connectionId)} ${changesStatusIcon(type)}`); -export const getManualSyncButton = (connection: WebBackendConnectionListItem) => - cy.get(`${statusCell(connection.connectionId)} ${manualSyncButton}`); +export const getConnectionStateSwitch = (connection: WebBackendConnectionListItem) => + cy.get(`${connectionStateSwitch(connection.connectionId)}`); export const clickNewConnectionButton = () => { cy.get(newConnectionButton).click(); diff --git a/airbyte-webapp/knip.jsonc b/airbyte-webapp/knip.jsonc new file mode 100644 index 00000000000..0cec89c8536 --- /dev/null +++ b/airbyte-webapp/knip.jsonc @@ -0,0 +1,30 @@ +{ + "$schema": "https://unpkg.com/knip@5/schema.json", + "typescript": true, + "vite": true, + "project": [ + "src/**/*.{js,ts,tsx}!" + ], + "ignore": [ + // Ignore all generated files + "src/core/api/generated/**", + // Ignore storybook files + "**/*.stories.tsx", + "**/*.docs.tsx", + "**/*.docs-utils.{ts,tsx}", + // Ignore test specific files + "src/test-utils/**", + "**/__mocks__/**", + "**/*.mocks.{ts,tsx}", + + // Debug utility meant to be used only temporary for local debugging + "src/core/utils/useMemoDebug.ts" + ], + "ignoreDependencies": [ + // Knip doesn't detect the import to this from a SASS file + "normalize.css", + // Knip does not detect the pure `/// ` import for this + "@types/segment-analytics" + ], + "include": ["files", "dependencies"] +} \ No newline at end of file diff --git a/airbyte-webapp/nginx/cloud.conf.template b/airbyte-webapp/nginx/cloud.conf.template index 6de816c0042..4a31db0a64a 100644 --- a/airbyte-webapp/nginx/cloud.conf.template +++ b/airbyte-webapp/nginx/cloud.conf.template @@ -1,6 +1,6 @@ server { - listen 80; - listen [::]:80; + listen 8080; + listen [::]:8080; server_name localhost; gzip on; diff --git a/airbyte-webapp/nginx/default.conf.template b/airbyte-webapp/nginx/default.conf.template index 837c2e31d3f..20ae11429dc 100644 --- a/airbyte-webapp/nginx/default.conf.template +++ b/airbyte-webapp/nginx/default.conf.template @@ -11,8 +11,8 @@ upstream keycloak { } server { - listen 80; - listen [::]:80; + listen 8080; + listen [::]:8080; server_name localhost; add_header Content-Security-Policy "script-src * 'unsafe-inline'; worker-src 'self' blob:;"; diff --git a/airbyte-webapp/orval.config.ts b/airbyte-webapp/orval.config.ts index 5827f1efe60..fbacb429127 100644 --- a/airbyte-webapp/orval.config.ts +++ b/airbyte-webapp/orval.config.ts @@ -91,6 +91,7 @@ const createApi = (inputSpecFile: string, name: string, apiFn?: ApiFn, excludedP }; }; +// IMPORTANT: Whenever you change/add OpenAPI specs here, make sure to also adjust the outsideWebappDependencies list in build.gradle.kts export default defineConfig({ api: createApi("../airbyte-api/src/main/openapi/config.yaml", "AirbyteClient", "apiCall", [ // Required to exclude, due to us not being able to convert JSON parameters diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 9a0914e084d..49c6757c093 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -23,7 +23,7 @@ "pretest": "TS_NODE_TRANSPILE_ONLY=true pnpm run generate-client", "pretest:ci": "TS_NODE_TRANSPILE_ONLY=true pnpm run generate-client", "test": "jest --watch", - "test:ci": "jest --watchAll=false --silent", + "test:ci": "JEST_RETRIES=3 jest --watchAll=false --silent", "test:coverage": "jest --coverage --watchAll=false", "format": "prettier --write 'src/**/*.{ts,tsx}'", "prebuild:storybook": "TS_NODE_TRANSPILE_ONLY=true pnpm run generate-client", @@ -32,6 +32,7 @@ "lint": "eslint --ext .js,.ts,.tsx --ignore-path .gitignore .", "stylelint": "stylelint 'src/**/*.{css,scss}'", "stylelint-check": "stylelint-config-prettier-scss-check", + "unused-code": "knip --production --reporter markdown || { echo '[^] The above files and dependencies are not used within the code base. Please delete them or explicitly ignore them in knip.jsonc.'; exit 1; }", "license-check": "node ./scripts/license-check.js", "generate-client": "./scripts/load-declarative-schema.sh && orval", "validate-links": "ts-node --skip-project ./scripts/validate-links.ts", @@ -70,7 +71,6 @@ "@tanstack/react-query-devtools": "^4.29.6", "@tanstack/react-table": "^8.7.0", "@types/diff": "^5.0.7", - "@types/node-fetch": "^2.6.8", "@types/path-browserify": "^1.0.1", "@types/segment-analytics": "^0.0.36", "@types/semver": "^7.3.13", @@ -84,11 +84,12 @@ "date-fns": "^2.29.3", "dayjs": "^1.11.3", "diff": "^5.1.0", + "escape-string-regexp": "^5.0.0", "firebase": "^10.5.0", - "flat": "^5.0.2", "framer-motion": "^6.3.11", "js-yaml": "^4.1.0", "json-schema": "^0.4.0", + "keycloak-js": "^23.0.7", "launchdarkly-js-client-sdk": "^3.1.0", "lodash": "^4.17.21", "markdown-to-jsx": "^7.3.2", @@ -117,21 +118,13 @@ "react-virtuoso": "^4.6.2", "react-widgets": "^4.6.1", "recharts": "^2.1.13", - "rehype-slug": "^5.0.1", - "rehype-urls": "^1.1.1", "remark": "^14.0.3", - "remark-directive": "^2.0.1", - "remark-frontmatter": "^4.0.1", "remark-gfm": "^3.0.0", "rxjs": "^7.8.1", "sanitize-html": "^2.11.0", - "sass": "^1.70.0", "semver": "^7.5.4", "ts-pattern": "^4.2.1", "typesafe-actions": "^5.1.0", - "unified": "^10.1.2", - "unist-util-visit": "^4.1.2", - "url": "^0.11.3", "uuid": "^9.0.1", "yup": "^0.32.11" }, @@ -156,12 +149,12 @@ "@testing-library/react": "^14.2.1", "@testing-library/user-event": "^14.5.2", "@types/byte-size": "^8.1.1", - "@types/flat": "^5.0.4", "@types/jest": "^29.5.12", "@types/js-yaml": "^4.0.8", "@types/json-schema": "^7.0.14", "@types/lodash": "^4.14.200", "@types/node": "^18.11.9", + "@types/node-fetch": "^2.6.11", "@types/react": "^18.2.34", "@types/react-datepicker": "^4.8.0", "@types/react-dom": "^18.2.14", @@ -200,6 +193,7 @@ "history": "^5.3.0", "jest": "^29.7.0", "jest-environment-jsdom": "^29.7.0", + "knip": "^5.2.2", "license-checker": "^25.0.1", "lint-staged": "^12.3.7", "meow": "^9.0.0", @@ -210,6 +204,7 @@ "pg-promise": "^10.15.4", "prettier": "^3.0.3", "react-select-event": "^5.5.0", + "sass": "^1.70.0", "start-server-and-test": "^2.0.3", "storybook": "^7.6.12", "storybook-dark-mode": "^3.0.3", diff --git a/airbyte-webapp/packages/vite-plugins/doc-middleware.ts b/airbyte-webapp/packages/vite-plugins/doc-middleware.ts index 84f49c5730a..5e3e72eaf02 100644 --- a/airbyte-webapp/packages/vite-plugins/doc-middleware.ts +++ b/airbyte-webapp/packages/vite-plugins/doc-middleware.ts @@ -10,11 +10,11 @@ const localDocMiddleware = (docsPath: string): Plugin => { return { name: "airbyte/doc-middleware-local", configureServer(server: ViteDevServer) { - // Serve the docs used in the sidebar. During building Gradle will copy those into the docker image - // Relavant gradle task :airbyte-webapp:copyDocs + // Serve the docs used in the sidebar. In dev mode, docs are served from either the local airbyte repository, + // or github if that repository is not found. server.middlewares.use("/docs/integrations", express.static(docsPath) as Connect.NextHandleFunction); // Don't fallback to default handling (serve index.html) for not found files, but make sure they 404 out properly - // so trying to load the `.inapp.md` files will properly function and fail if the doc doesn't exist. + // so that a clear error message is displayed in the documentation panel if the file is not found. server.middlewares.use("/docs/integrations", (req, res) => { res.statusCode = 404; res.end(`404 - ${docsPath}${req.url} not found`); diff --git a/airbyte-webapp/packages/vite-plugins/environment-variables.ts b/airbyte-webapp/packages/vite-plugins/environment-variables.ts index 14acc7e000b..1771e9aec95 100644 --- a/airbyte-webapp/packages/vite-plugins/environment-variables.ts +++ b/airbyte-webapp/packages/vite-plugins/environment-variables.ts @@ -21,7 +21,7 @@ export function environmentVariables(): Plugin { const cloudEnv = process.env.WEBAPP_BUILD_CLOUD_ENV; if (cloudEnv) { console.log(`☁️ Getting env file for cloud environment ${chalk.green(cloudEnv)}\n`); - const envDirPath = path.join(ROOT_PATH, `../../cloud-webapp/envs/`, cloudEnv); + const envDirPath = path.join(ROOT_PATH, `../../cloud/cloud-webapp/envs/`, cloudEnv); // loadEnv will not throw if you give it a non-existent path, so we explicitly check here if (!fs.existsSync(path.join(envDirPath, `.env`))) { diff --git a/airbyte-webapp/pnpm-lock.yaml b/airbyte-webapp/pnpm-lock.yaml index b504017e5dc..94e3a04af53 100644 --- a/airbyte-webapp/pnpm-lock.yaml +++ b/airbyte-webapp/pnpm-lock.yaml @@ -57,9 +57,6 @@ dependencies: '@types/diff': specifier: ^5.0.7 version: 5.0.7 - '@types/node-fetch': - specifier: ^2.6.8 - version: 2.6.8 '@types/path-browserify': specifier: ^1.0.1 version: 1.0.1 @@ -99,12 +96,12 @@ dependencies: diff: specifier: ^5.1.0 version: 5.1.0 + escape-string-regexp: + specifier: ^5.0.0 + version: 5.0.0 firebase: specifier: ^10.5.0 version: 10.5.0 - flat: - specifier: ^5.0.2 - version: 5.0.2 framer-motion: specifier: ^6.3.11 version: 6.5.1(react-dom@18.2.0)(react@18.2.0) @@ -114,6 +111,9 @@ dependencies: json-schema: specifier: ^0.4.0 version: 0.4.0 + keycloak-js: + specifier: ^23.0.7 + version: 23.0.7 launchdarkly-js-client-sdk: specifier: ^3.1.0 version: 3.1.0 @@ -198,21 +198,9 @@ dependencies: recharts: specifier: ^2.1.13 version: 2.3.2(prop-types@15.8.1)(react-dom@18.2.0)(react@18.2.0) - rehype-slug: - specifier: ^5.0.1 - version: 5.1.0 - rehype-urls: - specifier: ^1.1.1 - version: 1.1.1 remark: specifier: ^14.0.3 version: 14.0.3 - remark-directive: - specifier: ^2.0.1 - version: 2.0.1 - remark-frontmatter: - specifier: ^4.0.1 - version: 4.0.1 remark-gfm: specifier: ^3.0.0 version: 3.0.1 @@ -222,9 +210,6 @@ dependencies: sanitize-html: specifier: ^2.11.0 version: 2.11.0 - sass: - specifier: ^1.70.0 - version: 1.70.0 semver: specifier: ^7.5.4 version: 7.5.4 @@ -234,15 +219,6 @@ dependencies: typesafe-actions: specifier: ^5.1.0 version: 5.1.0 - unified: - specifier: ^10.1.2 - version: 10.1.2 - unist-util-visit: - specifier: ^4.1.2 - version: 4.1.2 - url: - specifier: ^0.11.3 - version: 0.11.3 uuid: specifier: ^9.0.1 version: 9.0.1 @@ -311,9 +287,6 @@ devDependencies: '@types/byte-size': specifier: ^8.1.1 version: 8.1.1 - '@types/flat': - specifier: ^5.0.4 - version: 5.0.4 '@types/jest': specifier: ^29.5.12 version: 29.5.12 @@ -329,6 +302,9 @@ devDependencies: '@types/node': specifier: ^18.11.9 version: 18.16.3 + '@types/node-fetch': + specifier: ^2.6.11 + version: 2.6.11 '@types/react': specifier: ^18.2.34 version: 18.2.34 @@ -443,6 +419,9 @@ devDependencies: jest-environment-jsdom: specifier: ^29.7.0 version: 29.7.0 + knip: + specifier: ^5.2.2 + version: 5.2.2(@types/node@18.16.3)(typescript@5.0.2) license-checker: specifier: ^25.0.1 version: 25.0.1 @@ -473,6 +452,9 @@ devDependencies: react-select-event: specifier: ^5.5.0 version: 5.5.1 + sass: + specifier: ^1.70.0 + version: 1.70.0 start-server-and-test: specifier: ^2.0.3 version: 2.0.3 @@ -2263,6 +2245,31 @@ packages: resolution: {integrity: sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww==} dev: false + /@ericcornelissen/bash-parser@0.5.2: + resolution: {integrity: sha512-4pIMTa1nEFfMXitv7oaNEWOdM+zpOZavesa5GaiWTgda6Zk32CFGxjUp/iIaN0PwgUW1yTq/fztSjbpE8SLGZQ==} + engines: {node: '>=4'} + dependencies: + array-last: 1.3.0 + babylon: 6.18.0 + compose-function: 3.0.3 + deep-freeze: 0.0.1 + filter-iterator: 0.0.1 + filter-obj: 1.1.0 + has-own-property: 0.1.0 + identity-function: 1.0.0 + is-iterable: 1.1.1 + iterable-lookahead: 1.0.0 + lodash.curry: 4.1.1 + magic-string: 0.16.0 + map-obj: 2.0.0 + object-pairs: 0.1.0 + object-values: 1.0.0 + reverse-arguments: 1.0.0 + shell-quote-word: 1.0.1 + to-pascal-case: 1.0.0 + unescape-js: 1.1.4 + dev: true + /@esbuild/aix-ppc64@0.19.12: resolution: {integrity: sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==} engines: {node: '>=12'} @@ -3913,11 +3920,24 @@ packages: run-parallel: 1.2.0 dev: true + /@nodelib/fs.scandir@3.0.0: + resolution: {integrity: sha512-ktI9+PxfHYtKjF3cLTUAh2N+b8MijCRPNwKJNqTVdL0gB0QxLU2rIRaZ1t71oEa3YBDE6bukH1sR0+CDnpp/Mg==} + engines: {node: '>=16.14.0'} + dependencies: + '@nodelib/fs.stat': 3.0.0 + run-parallel: 1.2.0 + dev: true + /@nodelib/fs.stat@2.0.5: resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} dev: true + /@nodelib/fs.stat@3.0.0: + resolution: {integrity: sha512-2tQOI38s19P9i7X/Drt0v8iMA+KMsgdhB/dyPER+e+2Y8L1Z7QvnuRdW/uLuf5YRFUYmnj4bMA6qCuZHFI1GDQ==} + engines: {node: '>=16.14.0'} + dev: true + /@nodelib/fs.walk@1.2.8: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} @@ -3926,6 +3946,67 @@ packages: fastq: 1.17.1 dev: true + /@nodelib/fs.walk@2.0.0: + resolution: {integrity: sha512-54voNDBobGdMl3BUXSu7UaDh1P85PGHWlJ5e0XhPugo1JulOyCtp2I+5ri4wplGDJ8QGwPEQW7/x3yTLU7yF1A==} + engines: {node: '>=16.14.0'} + dependencies: + '@nodelib/fs.scandir': 3.0.0 + fastq: 1.17.1 + dev: true + + /@npmcli/git@5.0.4: + resolution: {integrity: sha512-nr6/WezNzuYUppzXRaYu/W4aT5rLxdXqEFupbh6e/ovlYFQ8hpu1UUPV3Ir/YTl+74iXl2ZOMlGzudh9ZPUchQ==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + '@npmcli/promise-spawn': 7.0.1 + lru-cache: 10.2.0 + npm-pick-manifest: 9.0.0 + proc-log: 3.0.0 + promise-inflight: 1.0.1 + promise-retry: 2.0.1 + semver: 7.5.4 + which: 4.0.0 + transitivePeerDependencies: + - bluebird + dev: true + + /@npmcli/map-workspaces@3.0.4: + resolution: {integrity: sha512-Z0TbvXkRbacjFFLpVpV0e2mheCh+WzQpcqL+4xp49uNJOxOnIAPZyXtUxZ5Qn3QBTGKA11Exjd9a5411rBrhDg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + '@npmcli/name-from-folder': 2.0.0 + glob: 10.3.10 + minimatch: 9.0.3 + read-package-json-fast: 3.0.2 + dev: true + + /@npmcli/name-from-folder@2.0.0: + resolution: {integrity: sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true + + /@npmcli/package-json@5.0.0: + resolution: {integrity: sha512-OI2zdYBLhQ7kpNPaJxiflofYIpkNLi+lnGdzqUOfRmCF3r2l1nadcjtCYMJKv/Utm/ZtlffaUuTiAktPHbc17g==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + '@npmcli/git': 5.0.4 + glob: 10.3.10 + hosted-git-info: 7.0.1 + json-parse-even-better-errors: 3.0.1 + normalize-package-data: 6.0.0 + proc-log: 3.0.0 + semver: 7.5.4 + transitivePeerDependencies: + - bluebird + dev: true + + /@npmcli/promise-spawn@7.0.1: + resolution: {integrity: sha512-P4KkF9jX3y+7yFUxgcUdDtLy+t4OlDGuEBLNs57AZsfSfg+uV6MLndqGpnl4831ggaEdXwR50XFoZP4VFtHolg==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + which: 4.0.0 + dev: true + /@orval/angular@6.19.1(openapi-types@12.1.3): resolution: {integrity: sha512-8yd1Z75r8VBFFfednXz3eEIrVeEZX+HF6urH0t9EfifJBmO62kSwtJop3UoR3WXq6J4m+ck3nZ4lR9oJ7LeL9g==} dependencies: @@ -4034,6 +4115,130 @@ packages: engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} dev: true + /@pnpm/constants@7.1.1: + resolution: {integrity: sha512-31pZqMtjwV+Vaq7MaPrT1EoDFSYwye3dp6BiHIGRJmVThCQwySRKM7hCvqqI94epNkqFAAYoWrNynWoRYosGdw==} + engines: {node: '>=16.14'} + dev: true + + /@pnpm/core-loggers@9.0.6(@pnpm/logger@5.0.0): + resolution: {integrity: sha512-iK67SGbp+06bA/elpg51wygPFjNA7JKHtKkpLxqXXHw+AjFFBC3f2OznJsCIuDK6HdGi5UhHLYqo5QxJ2gMqJQ==} + engines: {node: '>=16.14'} + peerDependencies: + '@pnpm/logger': ^5.0.0 + dependencies: + '@pnpm/logger': 5.0.0 + '@pnpm/types': 9.4.2 + dev: true + + /@pnpm/error@5.0.3: + resolution: {integrity: sha512-ONJU5cUeoeJSy50qOYsMZQHTA/9QKmGgh1ATfEpCLgtbdwqUiwD9MxHNeXUYYI/pocBCz6r1ZCFqiQvO+8SUKA==} + engines: {node: '>=16.14'} + dependencies: + '@pnpm/constants': 7.1.1 + dev: true + + /@pnpm/fetching-types@5.0.0: + resolution: {integrity: sha512-o9gdO1v8Uc5P2fBBuW6GSpfTqIivQmQlqjQJdFiQX0m+tgxlrMRneIg392jZuc6fk7kFqjLheInlslgJfwY+4Q==} + engines: {node: '>=16.14'} + dependencies: + '@zkochan/retry': 0.2.0 + node-fetch: 3.0.0-beta.9 + transitivePeerDependencies: + - domexception + dev: true + + /@pnpm/graceful-fs@3.2.0: + resolution: {integrity: sha512-vRoXJxscDpHak7YE9SqCkzfrayn+Lw+YueOeHIPEqkgokrHeYgYeONoc2kGh0ObHaRtNSsonozVfJ456kxLNvA==} + engines: {node: '>=16.14'} + dependencies: + graceful-fs: 4.2.11 + dev: true + + /@pnpm/logger@5.0.0: + resolution: {integrity: sha512-YfcB2QrX+Wx1o6LD1G2Y2fhDhOix/bAY/oAnMpHoNLsKkWIRbt1oKLkIFvxBMzLwAEPqnYWguJrYC+J6i4ywbw==} + engines: {node: '>=12.17'} + dependencies: + bole: 5.0.11 + ndjson: 2.0.0 + dev: true + + /@pnpm/npm-package-arg@1.0.0: + resolution: {integrity: sha512-oQYP08exi6mOPdAZZWcNIGS+KKPsnNwUBzSuAEGWuCcqwMAt3k/WVCqVIXzBxhO5sP2b43og69VHmPj6IroKqw==} + engines: {node: '>=14.6'} + dependencies: + hosted-git-info: 4.1.0 + semver: 7.5.4 + validate-npm-package-name: 4.0.0 + dev: true + + /@pnpm/npm-resolver@18.1.1(@pnpm/logger@5.0.0): + resolution: {integrity: sha512-NptzncmMD5ZMimbjWkGpMzuBRhlCY+sh7mzypPdBOTNlh5hmEQe/VaRKjNK4V9/b0C/llElkvIePL6acybu86w==} + engines: {node: '>=16.14'} + peerDependencies: + '@pnpm/logger': ^5.0.0 + dependencies: + '@pnpm/core-loggers': 9.0.6(@pnpm/logger@5.0.0) + '@pnpm/error': 5.0.3 + '@pnpm/fetching-types': 5.0.0 + '@pnpm/graceful-fs': 3.2.0 + '@pnpm/logger': 5.0.0 + '@pnpm/resolve-workspace-range': 5.0.1 + '@pnpm/resolver-base': 11.1.0 + '@pnpm/types': 9.4.2 + '@zkochan/retry': 0.2.0 + encode-registry: 3.0.1 + load-json-file: 6.2.0 + lru-cache: 10.2.0 + normalize-path: 3.0.0 + p-limit: 3.1.0 + p-memoize: 4.0.1 + parse-npm-tarball-url: 3.0.0 + path-temp: 2.1.0 + ramda: /@pnpm/ramda@0.28.1 + rename-overwrite: 5.0.0 + semver: 7.5.4 + ssri: 10.0.5 + version-selector-type: 3.0.0 + transitivePeerDependencies: + - domexception + dev: true + + /@pnpm/ramda@0.28.1: + resolution: {integrity: sha512-zcAG+lvU0fMziNeGXpPyCyCJYp5ZVrPElEE4t14jAmViaihohocZ+dDkcRIyAomox8pQsuZnv1EyHR+pOhmUWw==} + dev: true + + /@pnpm/resolve-workspace-range@5.0.1: + resolution: {integrity: sha512-yQ0pMthlw8rTgS/C9hrjne+NEnnSNevCjtdodd7i15I59jMBYciHifZ/vjg0NY+Jl+USTc3dBE+0h/4tdYjMKg==} + engines: {node: '>=16.14'} + dependencies: + semver: 7.5.4 + dev: true + + /@pnpm/resolver-base@11.1.0: + resolution: {integrity: sha512-y2qKaj18pwe1VWc3YXEitdYFo+WqOOt60aqTUuOVkJAirUzz0DzuYh3Ifct4znYWPdgUXHaN5DMphNF5iL85rA==} + engines: {node: '>=16.14'} + dependencies: + '@pnpm/types': 9.4.2 + dev: true + + /@pnpm/types@9.4.2: + resolution: {integrity: sha512-g1hcF8Nv4gd76POilz9gD4LITAPXOe5nX4ijgr8ixCbLQZfcpYiMfJ+C1RlMNRUDo8vhlNB4O3bUlxmT6EAQXA==} + engines: {node: '>=16.14'} + dev: true + + /@pnpm/workspace.pkgs-graph@2.0.15(@pnpm/logger@5.0.0): + resolution: {integrity: sha512-Txxd5FzzVfBfGCTngISaxFlJzZhzdS8BUrCEtAWJfZOFbQzpWy27rzkaS7TaWW2dHiFcCVYzPI/2vgxfeRansA==} + engines: {node: '>=16.14'} + dependencies: + '@pnpm/npm-package-arg': 1.0.0 + '@pnpm/npm-resolver': 18.1.1(@pnpm/logger@5.0.0) + '@pnpm/resolve-workspace-range': 5.0.1 + ramda: /@pnpm/ramda@0.28.1 + transitivePeerDependencies: + - '@pnpm/logger' + - domexception + dev: true + /@popperjs/core@2.11.6: resolution: {integrity: sha512-50/17A98tWUfQ176raKiOGXuYpLyyVMkxxG6oylzL3BPOlA6ADGdK7EYunSa4I064xerltq9TGXs8HmOk5E+vw==} @@ -4822,6 +5027,16 @@ packages: '@sinonjs/commons': 3.0.0 dev: true + /@snyk/github-codeowners@1.1.0: + resolution: {integrity: sha512-lGFf08pbkEac0NYgVf4hdANpAgApRjNByLXB+WBip3qj1iendOIyAwP2GKkKbQMNVy2r1xxDf0ssfWscoiC+Vw==} + engines: {node: '>=8.10'} + hasBin: true + dependencies: + commander: 4.1.1 + ignore: 5.3.1 + p-map: 4.0.0 + dev: true + /@stoplight/better-ajv-errors@1.0.3(ajv@8.12.0): resolution: {integrity: sha512-0p9uXkuB22qGdNfy3VeEhxkU5uwvp/KrBTAbrLBURv6ilxIVwanKwjMc41lQfIVgPGcOkmLbTolfFrSsueu7zA==} engines: {node: ^12.20 || >= 14.13} @@ -5511,7 +5726,7 @@ packages: '@storybook/types': 7.6.12 '@types/find-cache-dir': 3.2.1 '@types/node': 18.16.3 - '@types/node-fetch': 2.6.8 + '@types/node-fetch': 2.6.11 '@types/pretty-hrtime': 1.0.3 chalk: 4.1.2 esbuild: 0.18.20 @@ -6348,10 +6563,6 @@ packages: resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==} dev: true - /@types/flat@5.0.4: - resolution: {integrity: sha512-Qy8SOE+x5F40jebCj4vPoA/Y6J2Zd6h9ihMPYP8U8//dDEOK4Yu0ShOTKqEmWPR+4AdV4sls/VCaR6azEFvKKg==} - dev: true - /@types/glob@7.2.0: resolution: {integrity: sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==} dependencies: @@ -6470,11 +6681,12 @@ packages: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} dev: false - /@types/node-fetch@2.6.8: - resolution: {integrity: sha512-nnH5lV9QCMPsbEVdTb5Y+F3GQxLSw1xQgIydrb2gSfEavRPs50FnMr+KUaa+LoPSqibm2N+ZZxH7lavZlAT4GA==} + /@types/node-fetch@2.6.11: + resolution: {integrity: sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==} dependencies: '@types/node': 18.16.3 form-data: 4.0.0 + dev: true /@types/node@14.18.43: resolution: {integrity: sha512-n3eFEaoem0WNwLux+k272P0+aq++5o05bA9CfiwKPdYPB5ZambWKdWoeHy7/OJiizMhzg27NLaZ6uzjLTzXceQ==} @@ -6495,6 +6707,10 @@ packages: resolution: {integrity: sha512-rUSqIy7fAfK6sRasdFCukWO4S77pXcTxViURlLdo1VKuekTDS8ASMdX1LA0TFlbzT3fZgFlgQTCrqmJBuTHpxA==} dev: false + /@types/picomatch@2.3.3: + resolution: {integrity: sha512-Yll76ZHikRFCyz/pffKGjrCwe/le2CDwOP5F210KQo27kpRE46U2rDnzikNlVn6/ezH3Mhn46bJMTfeVTtcYMg==} + dev: true + /@types/postcss-modules-local-by-default@4.0.1: resolution: {integrity: sha512-8Vf6MA7x68/XKCgIOFGGDtkfKzcIp0LuQcYGUqG+Ip1kIDIlTekH1u147obRXXSdv44q9HtT2sJQGWQkzLjMuQ==} dependencies: @@ -6950,6 +7166,18 @@ packages: tslib: 1.14.1 dev: true + /@zkochan/retry@0.2.0: + resolution: {integrity: sha512-WhB+2B/ZPlW2Xy/kMJBrMbqecWXcbDDgn0K0wKBAgO2OlBTz1iLJrRWduo+DGGn0Akvz1Lu4Xvls7dJojximWw==} + engines: {node: '>=10'} + dev: true + + /@zkochan/rimraf@2.1.3: + resolution: {integrity: sha512-mCfR3gylCzPC+iqdxEA6z5SxJeOgzgbwmyxanKriIne5qZLswDe/M43aD3p5MNzwzXRhbZg/OX+MpES6Zk1a6A==} + engines: {node: '>=12.10'} + dependencies: + rimraf: 3.0.2 + dev: true + /abab@2.0.6: resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==} dev: true @@ -7150,6 +7378,7 @@ packages: dependencies: normalize-path: 3.0.0 picomatch: 2.3.1 + dev: true /app-root-dir@1.0.2: resolution: {integrity: sha512-jlpIfsOoNoafl92Sz//64uQHGSyMrD2vYG5d8o2a4qGvyNCvXur7bzIsWtAC/6flI2RYAp3kv8rsfBtaLm7w0g==} @@ -7194,6 +7423,10 @@ packages: dequal: 2.0.3 dev: true + /arity-n@1.0.4: + resolution: {integrity: sha512-fExL2kFDC1Q2DUOx3whE/9KoN66IzkY4b4zUHUBFM1ojEYjZZYDcUW3bek/ufGionX9giIKDC5redH2IlGqcQQ==} + dev: true + /array-buffer-byte-length@1.0.0: resolution: {integrity: sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==} dependencies: @@ -7229,6 +7462,13 @@ packages: is-string: 1.0.7 dev: true + /array-last@1.3.0: + resolution: {integrity: sha512-eOCut5rXlI6aCOS7Z7kCplKRKyiFQ6dHFBem4PwlwKeNFk2/XxTrhRh5T9PyaEWGy/NHTZWbY+nsZlNFJu9rYg==} + engines: {node: '>=0.10.0'} + dependencies: + is-number: 4.0.0 + dev: true + /array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} @@ -7388,6 +7628,7 @@ packages: /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: true /at-least-node@1.0.0: resolution: {integrity: sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==} @@ -7558,6 +7799,11 @@ packages: babel-preset-current-node-syntax: 1.0.1(@babel/core@7.23.9) dev: true + /babylon@6.18.0: + resolution: {integrity: sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==} + hasBin: true + dev: true + /backslash@0.2.0: resolution: {integrity: sha512-Avs+8FUZ1HF/VFP4YWwHQZSGzRPm37ukU1JQYQWijuHhtXdOuAzcZ8PcAzfIw898a8PyBzdn+RtnKA6MzW0X2A==} dev: true @@ -7598,6 +7844,7 @@ packages: /binary-extensions@2.2.0: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} engines: {node: '>=8'} + dev: true /bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} @@ -7635,6 +7882,13 @@ packages: - supports-color dev: true + /bole@5.0.11: + resolution: {integrity: sha512-KB0Ye0iMAW5BnNbnLfMSQcnI186hKUzE2fpkZWqcxsoTR7eqzlTidSOMYPHJOn/yR7VGH7uSZp37qH9q2Et0zQ==} + dependencies: + fast-safe-stringify: 2.1.1 + individual: 3.0.0 + dev: true + /boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} dev: true @@ -7664,6 +7918,7 @@ packages: engines: {node: '>=8'} dependencies: fill-range: 7.0.1 + dev: true /broadcast-channel@7.0.0: resolution: {integrity: sha512-a2tW0Ia1pajcPBOGUF2jXlDnvE9d5/dg6BG9h60OmRUcZVr/veUrU8vEQFwwQIhwG3KVzYwSk3v2nRRGFgQDXQ==} @@ -7725,6 +7980,12 @@ packages: resolution: {integrity: sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==} dev: true + /builtins@5.0.1: + resolution: {integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==} + dependencies: + semver: 7.5.4 + dev: true + /bytes@3.0.0: resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} engines: {node: '>= 0.8'} @@ -7751,6 +8012,7 @@ packages: function-bind: 1.1.2 get-intrinsic: 1.2.2 set-function-length: 1.1.1 + dev: true /call-me-maybe@1.0.2: resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} @@ -7825,22 +8087,10 @@ packages: engines: {node: '>=10'} dev: true - /character-entities-html4@2.1.0: - resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} - dev: false - - /character-entities-legacy@3.0.0: - resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} - dev: false - /character-entities@2.0.2: resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} dev: false - /character-reference-invalid@2.0.1: - resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} - dev: false - /check-more-types@2.24.0: resolution: {integrity: sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==} engines: {node: '>= 0.8.0'} @@ -7859,6 +8109,7 @@ packages: readdirp: 3.6.0 optionalDependencies: fsevents: 2.3.3 + dev: true /chownr@1.1.4: resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} @@ -7967,6 +8218,7 @@ packages: /clone@1.0.4: resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} engines: {node: '>=0.8'} + requiresBuild: true dev: true /clsx@1.2.1: @@ -8013,6 +8265,7 @@ packages: engines: {node: '>= 0.8'} dependencies: delayed-stream: 1.0.0 + dev: true /comma-separated-tokens@2.0.3: resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} @@ -8022,6 +8275,11 @@ packages: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} dev: true + /commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + dev: true + /commander@6.2.1: resolution: {integrity: sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==} engines: {node: '>= 6'} @@ -8055,6 +8313,12 @@ packages: resolution: {integrity: sha512-LNZQXhqUvqUTotpZ00qLSaify3b4VFD588aRr8MKFw4CMUr98ytzCW5wDH5qx/DEY5kCDXcbcRuCqL0szEf2tg==} dev: true + /compose-function@3.0.3: + resolution: {integrity: sha512-xzhzTJ5eC+gmIzvZq+C3kCJHsp9os6tJkrigDRZclyGtOKINbZtE8n1Tzmeh32jW+BUDPbvZpibwvJHBLGMVwg==} + dependencies: + arity-n: 1.0.4 + dev: true + /compressible@2.0.18: resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} engines: {node: '>= 0.6'} @@ -8481,6 +8745,11 @@ packages: resolution: {integrity: sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==} dev: true + /data-uri-to-buffer@3.0.1: + resolution: {integrity: sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og==} + engines: {node: '>= 6'} + dev: true + /data-urls@3.0.2: resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==} engines: {node: '>=12'} @@ -8618,6 +8887,10 @@ packages: which-typed-array: 1.1.14 dev: true + /deep-freeze@0.0.1: + resolution: {integrity: sha512-Z+z8HiAvsGwmjqlphnHW5oz6yWlOwu6EQfFTjmeTWlDeda3FS2yv3jhq35TX/ewmsnqB+RX2IdsIOyjJCQN5tg==} + dev: true + /deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} dev: true @@ -8641,6 +8914,7 @@ packages: /defaults@1.0.4: resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + requiresBuild: true dependencies: clone: 1.0.4 dev: true @@ -8652,6 +8926,7 @@ packages: get-intrinsic: 1.2.2 gopd: 1.0.1 has-property-descriptors: 1.0.1 + dev: true /define-lazy-prop@2.0.0: resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} @@ -8688,6 +8963,7 @@ packages: /delayed-stream@1.0.0: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + dev: true /depd@2.0.0: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} @@ -8867,6 +9143,14 @@ packages: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} dev: true + /easy-table@1.2.0: + resolution: {integrity: sha512-OFzVOv03YpvtcWGe5AayU5G2hgybsg3iqA6drU8UaoZyB9jLGMTrz9+asnLp/E+6qPh88yEI1gvyZFZ41dmgww==} + dependencies: + ansi-regex: 5.0.1 + optionalDependencies: + wcwidth: 1.0.1 + dev: true + /ecc-jsbn@0.1.2: resolution: {integrity: sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==} dependencies: @@ -8902,6 +9186,13 @@ packages: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} dev: true + /encode-registry@3.0.1: + resolution: {integrity: sha512-6qOwkl1g0fv0DN3Y3ggr2EaZXN71aoAqPp3p/pVaWSBSIo+YjLOWN61Fva43oVyQNPf7kgm8lkudzlzojwE2jw==} + engines: {node: '>=10'} + dependencies: + mem: 8.1.1 + dev: true + /encodeurl@1.0.2: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} @@ -8951,6 +9242,10 @@ packages: hasBin: true dev: true + /err-code@2.0.3: + resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + dev: true + /errno@0.1.8: resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==} hasBin: true @@ -10059,12 +10354,6 @@ packages: reusify: 1.0.4 dev: true - /fault@2.0.1: - resolution: {integrity: sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==} - dependencies: - format: 0.2.2 - dev: false - /faye-websocket@0.11.4: resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==} engines: {node: '>=0.8.0'} @@ -10084,6 +10373,16 @@ packages: pend: 1.2.0 dev: true + /fetch-blob@2.1.2: + resolution: {integrity: sha512-YKqtUDwqLyfyMnmbw8XD6Q8j9i/HggKtPEI+pZ1+8bvheBu78biSmNaXWusx1TauGqtUUGx/cBb1mKdq2rLYow==} + engines: {node: ^10.17.0 || >=12.3.0} + peerDependencies: + domexception: '*' + peerDependenciesMeta: + domexception: + optional: true + dev: true + /fetch-readablestream@0.2.0: resolution: {integrity: sha512-qu4mXWf4wus4idBIN/kVH+XSer8IZ9CwHP+Pd7DL7TuKNC1hP7ykon4kkBjwJF3EMX2WsFp4hH7gU7CyL7ucXw==} dev: false @@ -10131,6 +10430,16 @@ packages: engines: {node: '>=8'} dependencies: to-regex-range: 5.0.1 + dev: true + + /filter-iterator@0.0.1: + resolution: {integrity: sha512-v4lhL7Qa8XpbW3LN46CEnmhGk3eHZwxfNl5at20aEkreesht4YKb/Ba3BUIbnPhAC/r3dmu7ABaGk6MAvh2alA==} + dev: true + + /filter-obj@1.1.0: + resolution: {integrity: sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==} + engines: {node: '>=0.10.0'} + dev: true /filter-obj@5.1.0: resolution: {integrity: sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==} @@ -10249,11 +10558,6 @@ packages: rimraf: 5.0.5 dev: true - /flat@5.0.2: - resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==} - hasBin: true - dev: false - /flatted@3.2.9: resolution: {integrity: sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==} dev: true @@ -10309,16 +10613,12 @@ packages: asynckit: 0.4.0 combined-stream: 1.0.8 mime-types: 2.1.35 + dev: true /format-util@1.0.5: resolution: {integrity: sha512-varLbTj0e0yVyRpqQhuWV+8hlePAgaoFRhNFj50BNjEIrw1/DphHSObtqwskVCPWNgzwPoQrZAbfa/SBiicNeg==} dev: true - /format@0.2.2: - resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} - engines: {node: '>=0.4.x'} - dev: false - /forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -10414,6 +10714,7 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true + dev: true optional: true /function-bind@1.1.2: @@ -10449,6 +10750,7 @@ packages: has-proto: 1.0.1 has-symbols: 1.0.3 hasown: 2.0.0 + dev: true /get-intrinsic@1.2.3: resolution: {integrity: sha512-JIcZczvcMVE7AUOP+X72bh8HqHBRxFdz5PDHYtNG/lE3yk9b3KZBJlwFcTyPYjg3L4RLLmZJzvjxhaZVapxFrQ==} @@ -10553,15 +10855,12 @@ packages: resolution: {integrity: sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==} dev: true - /github-slugger@2.0.0: - resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==} - dev: false - /glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} dependencies: is-glob: 4.0.3 + dev: true /glob-parent@6.0.2: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} @@ -10698,6 +10997,7 @@ packages: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} dependencies: get-intrinsic: 1.2.2 + dev: true /graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -10750,18 +11050,25 @@ packages: engines: {node: '>=8'} dev: true + /has-own-property@0.1.0: + resolution: {integrity: sha512-14qdBKoonU99XDhWcFKZTShK+QV47qU97u8zzoVo9cL5TZ3BmBHXogItSt9qJjR0KUMFRhcCW8uGIGl8nkl7Aw==} + dev: true + /has-property-descriptors@1.0.1: resolution: {integrity: sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==} dependencies: get-intrinsic: 1.2.2 + dev: true /has-proto@1.0.1: resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} engines: {node: '>= 0.4'} + dev: true /has-symbols@1.0.3: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} engines: {node: '>= 0.4'} + dev: true /has-tostringtag@1.0.0: resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} @@ -10783,26 +11090,6 @@ packages: dependencies: function-bind: 1.1.2 - /hast-util-has-property@1.0.4: - resolution: {integrity: sha512-ghHup2voGfgFoHMGnaLHOjbYFACKrRh9KFttdCzMCbFoBMJXiNi2+XTrPP8+q6cDJM/RSqlCfVWrjp1H201rZg==} - dev: false - - /hast-util-has-property@2.0.1: - resolution: {integrity: sha512-X2+RwZIMTMKpXUzlotatPzWj8bspCymtXH3cfG3iQKV+wPF53Vgaqxi/eLqGck0wKq1kS9nvoB1wchbCPEL8sg==} - dev: false - - /hast-util-heading-rank@2.1.1: - resolution: {integrity: sha512-iAuRp+ESgJoRFJbSyaqsfvJDY6zzmFoEnL1gtz1+U8gKtGGj1p0CVlysuUAUjq95qlZESHINLThwJzNGmgGZxA==} - dependencies: - '@types/hast': 2.3.4 - dev: false - - /hast-util-to-string@2.0.0: - resolution: {integrity: sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==} - dependencies: - '@types/hast': 2.3.4 - dev: false - /hast-util-whitespace@2.0.1: resolution: {integrity: sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==} dev: false @@ -10834,6 +11121,13 @@ packages: lru-cache: 6.0.0 dev: true + /hosted-git-info@7.0.1: + resolution: {integrity: sha512-+K84LB1DYwMHoHSgaOY/Jfhw3ucPmSET5v98Ke/HdNSw4a0UktWzyW1mjhjpuxxTqOOsfWT/7iVshHmVZ4IpOA==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + lru-cache: 10.2.0 + dev: true + /hpagent@1.2.0: resolution: {integrity: sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==} engines: {node: '>=14'} @@ -11001,6 +11295,10 @@ packages: resolution: {integrity: sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ==} dev: false + /identity-function@1.0.0: + resolution: {integrity: sha512-kNrgUK0qI+9qLTBidsH85HjDLpZfrrS0ElquKKe/fJFdB3D7VeKdXXEvOPDUHSHOzdZKCAAaQIWWyp0l2yq6pw==} + dev: true + /ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} dev: true @@ -11033,6 +11331,7 @@ packages: /immutable@4.3.5: resolution: {integrity: sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw==} + dev: true /import-fresh@3.3.0: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} @@ -11060,6 +11359,10 @@ packages: engines: {node: '>=8'} dev: true + /individual@3.0.0: + resolution: {integrity: sha512-rUY5vtT748NMRbEMrTNiFfy29BgGZwGXUi2NFUVMWQrogSLzlJvQV9eeMWi+g1aVaQ53tpyLAQtd5x/JH0Nh1g==} + dev: true + /inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} dependencies: @@ -11133,17 +11436,6 @@ packages: engines: {node: '>=8'} dev: true - /is-alphabetical@2.0.1: - resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} - dev: false - - /is-alphanumerical@2.0.1: - resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} - dependencies: - is-alphabetical: 2.0.1 - is-decimal: 2.0.1 - dev: false - /is-arguments@1.1.1: resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} engines: {node: '>= 0.4'} @@ -11171,10 +11463,6 @@ packages: /is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - /is-arrayish@0.3.2: - resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} - dev: false - /is-async-function@2.0.0: resolution: {integrity: sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==} engines: {node: '>= 0.4'} @@ -11193,6 +11481,7 @@ packages: engines: {node: '>=8'} dependencies: binary-extensions: 2.2.0 + dev: true /is-boolean-object@1.1.2: resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} @@ -11231,10 +11520,6 @@ packages: has-tostringtag: 1.0.0 dev: true - /is-decimal@2.0.1: - resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} - dev: false - /is-deflate@1.0.0: resolution: {integrity: sha512-YDoFpuZWu1VRXlsnlYMzKyVRITXj7Ej/V9gXQ2/pAe7X1J7M/RNOqaIYi6qUn+B7nGyB9pDXrv02dsB58d2ZAQ==} dev: true @@ -11248,6 +11533,7 @@ packages: /is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + dev: true /is-finalizationregistry@1.0.2: resolution: {integrity: sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==} @@ -11281,16 +11567,13 @@ packages: engines: {node: '>=0.10.0'} dependencies: is-extglob: 2.1.1 + dev: true /is-gzip@1.0.0: resolution: {integrity: sha512-rcfALRIb1YewtnksfRIHGcIY93QnK8BIQ/2c9yDYcG/Y6+vRoJuTWBmmSEbyLLYtXm7q35pHOHbZFQBaLrhlWQ==} engines: {node: '>=0.10.0'} dev: true - /is-hexadecimal@2.0.1: - resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} - dev: false - /is-installed-globally@0.4.0: resolution: {integrity: sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==} engines: {node: '>=10'} @@ -11304,6 +11587,11 @@ packages: engines: {node: '>=8'} dev: true + /is-iterable@1.1.1: + resolution: {integrity: sha512-EdOZCr0NsGE00Pot+x1ZFx9MJK3C6wy91geZpXwvwexDLJvA4nzYyZf7r+EIwSeVsOLDdBz7ATg9NqKTzuNYuQ==} + engines: {node: '>= 4'} + dev: true + /is-map@2.0.2: resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} dev: true @@ -11328,9 +11616,15 @@ packages: has-tostringtag: 1.0.0 dev: true + /is-number@4.0.0: + resolution: {integrity: sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==} + engines: {node: '>=0.10.0'} + dev: true + /is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + dev: true /is-path-cwd@2.2.0: resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} @@ -11483,6 +11777,11 @@ packages: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} dev: true + /isexe@3.1.1: + resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} + engines: {node: '>=16'} + dev: true + /isobject@3.0.1: resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} engines: {node: '>=0.10.0'} @@ -11556,6 +11855,11 @@ packages: istanbul-lib-report: 3.0.1 dev: true + /iterable-lookahead@1.0.0: + resolution: {integrity: sha512-hJnEP2Xk4+44DDwJqUQGdXal5VbyeWLaPyDl2AQc242Zr7iqz4DgpQOrEzglWVMGHMDCkguLHEKxd1+rOsmgSQ==} + engines: {node: '>=4'} + dev: true + /iterator.prototype@1.1.2: resolution: {integrity: sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==} dependencies: @@ -12020,7 +12324,12 @@ packages: - ts-node dev: true - /joi@17.12.1: + /jiti@1.21.0: + resolution: {integrity: sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==} + hasBin: true + dev: true + + /joi@17.12.1: resolution: {integrity: sha512-vtxmq+Lsc5SlfqotnfVjlViWfOL9nt/avKNbKYizwf6gsCfq9NYY/ceYRMFD8XDdrjJ9abJyScWmhmIiy+XRtQ==} dependencies: '@hapi/hoek': 9.3.0 @@ -12034,6 +12343,10 @@ packages: resolution: {integrity: sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==} dev: false + /js-sha256@0.10.1: + resolution: {integrity: sha512-5obBtsz9301ULlsgggLg542s/jqtddfOpV5KJc4hajc9JV8GeY2gZHSVpYBn4nWqAUTJ9v+xwtbJ1mIBgIH5Vw==} + dev: false + /js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -12159,6 +12472,11 @@ packages: /json-parse-even-better-errors@2.3.1: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + /json-parse-even-better-errors@3.0.1: + resolution: {integrity: sha512-aatBvbL26wVUCLmbWdCpeu9iF5wOyWpagiKkInA+kfws3sWdBrTnsvN2CKcyCYyUrc7rebNBlK6+kteg7ksecg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true + /json-schema-ref-parser@5.1.3: resolution: {integrity: sha512-CpDFlBwz/6la78hZxyB9FECVKGYjIIl3Ms3KLqFj99W7IIb7D00/RDgc++IGB4BBALl0QRhh5m4q5WNSopvLtQ==} deprecated: Please switch to @apidevtools/json-schema-ref-parser @@ -12260,6 +12578,19 @@ packages: resolution: {integrity: sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==} dev: false + /jwt-decode@4.0.0: + resolution: {integrity: sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA==} + engines: {node: '>=18'} + dev: false + + /keycloak-js@23.0.7: + resolution: {integrity: sha512-OmszsKzBhhm5yP4W1q/tMd+nNnKpOAdeVYcoGhphlv8Fj1bNk4wRTYzp7pn5BkvueLz7fhvKHz7uOc33524YrA==} + dependencies: + base64-js: 1.5.1 + js-sha256: 0.10.1 + jwt-decode: 4.0.0 + dev: false + /keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} dependencies: @@ -12281,6 +12612,43 @@ packages: engines: {node: '>=6'} dev: false + /knip@5.2.2(@types/node@18.16.3)(typescript@5.0.2): + resolution: {integrity: sha512-4HMMUFk34KOE37NzmDnxWhBH6WMfStqN5jTPGXS7lq+Z6WvQxjWMo/ewuhPje4+BBN6LYWw+aiQOsbo9FHYhpw==} + engines: {node: '>=18.6.0'} + hasBin: true + peerDependencies: + '@types/node': '>=18' + typescript: '>=5.0.4' + dependencies: + '@ericcornelissen/bash-parser': 0.5.2 + '@nodelib/fs.walk': 2.0.0 + '@npmcli/map-workspaces': 3.0.4 + '@npmcli/package-json': 5.0.0 + '@pnpm/logger': 5.0.0 + '@pnpm/workspace.pkgs-graph': 2.0.15(@pnpm/logger@5.0.0) + '@snyk/github-codeowners': 1.1.0 + '@types/node': 18.16.3 + '@types/picomatch': 2.3.3 + easy-table: 1.2.0 + fast-glob: 3.3.2 + jiti: 1.21.0 + js-yaml: 4.1.0 + micromatch: 4.0.5 + minimist: 1.2.8 + picocolors: 1.0.0 + picomatch: 4.0.1 + pretty-ms: 9.0.0 + smol-toml: 1.1.4 + strip-json-comments: 5.0.1 + summary: 2.1.0 + typescript: 5.0.2 + zod: 3.22.4 + zod-validation-error: 3.0.3(zod@3.22.4) + transitivePeerDependencies: + - bluebird + - domexception + dev: true + /known-css-properties@0.29.0: resolution: {integrity: sha512-Ne7wqW7/9Cz54PDt4I3tcV+hAyat8ypyOGzYRJQfdxnnjeWsTxt1cy8pjvvKeI5kfXuyvULyeeAvwvvtAX3ayQ==} dev: true @@ -12449,6 +12817,16 @@ packages: wrap-ansi: 7.0.0 dev: true + /load-json-file@6.2.0: + resolution: {integrity: sha512-gUD/epcRms75Cw8RT1pUdHugZYM5ce64ucs2GEISABwkRsOQr0q2wm/MV2TKThycIe5e0ytRweW2RZxclogCdQ==} + engines: {node: '>=8'} + dependencies: + graceful-fs: 4.2.11 + parse-json: 5.2.0 + strip-bom: 4.0.0 + type-fest: 0.6.0 + dev: true + /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} engines: {node: '>=6'} @@ -12478,6 +12856,10 @@ packages: /lodash.camelcase@4.3.0: resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + /lodash.curry@4.1.1: + resolution: {integrity: sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA==} + dev: true + /lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} dev: true @@ -12598,6 +12980,12 @@ packages: hasBin: true dev: true + /magic-string@0.16.0: + resolution: {integrity: sha512-c4BEos3y6G2qO0B9X7K0FVLOPT9uGrjYwYRLFmDqyl5YMboUviyecnXWp94fJTSMwPw2/sf+CEYt5AGpmklkkQ==} + dependencies: + vlq: 0.2.3 + dev: true + /magic-string@0.25.9: resolution: {integrity: sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==} dependencies: @@ -12650,11 +13038,23 @@ packages: tmpl: 1.0.5 dev: true + /map-age-cleaner@0.1.3: + resolution: {integrity: sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==} + engines: {node: '>=6'} + dependencies: + p-defer: 1.0.0 + dev: true + /map-obj@1.0.1: resolution: {integrity: sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==} engines: {node: '>=0.10.0'} dev: true + /map-obj@2.0.0: + resolution: {integrity: sha512-TzQSV2DiMYgoF5RycneKVUzIa9bQsj/B3tTgsE3dOGqlzHnGIDaC7XBE7grnA+8kZPnfqSGFe95VHc2oc0VFUQ==} + engines: {node: '>=4'} + dev: true + /map-obj@4.3.0: resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==} engines: {node: '>=8'} @@ -12705,17 +13105,6 @@ packages: unist-util-visit: 4.1.2 dev: false - /mdast-util-directive@2.2.2: - resolution: {integrity: sha512-6BuW4dFkCbTIf9peVMXdtWylI6ovMidVjnHyJpx7IDhwk3GosIgUs87Rl3x6T6kP5iAf1qIE3lMn6CgWw40d+g==} - dependencies: - '@types/mdast': 3.0.10 - '@types/unist': 2.0.6 - mdast-util-to-markdown: 1.5.0 - parse-entities: 4.0.0 - stringify-entities: 4.0.3 - unist-util-visit-parents: 5.1.1 - dev: false - /mdast-util-find-and-replace@2.2.1: resolution: {integrity: sha512-SobxkQXFAdd4b5WmEakmkVoh18icjQRxGy5OWTCzgsLRm1Fu/KCtwD1HIQSsmq5ZRjVH0Ehwg6/Fn3xIUk+nKw==} dependencies: @@ -12743,12 +13132,6 @@ packages: - supports-color dev: false - /mdast-util-frontmatter@1.0.0: - resolution: {integrity: sha512-7itKvp0arEVNpCktOET/eLFAYaZ+0cNjVtFtIPxgQ5tV+3i+D4SDDTjTzPWl44LT59PC+xdx+glNTawBdF98Mw==} - dependencies: - micromark-extension-frontmatter: 1.0.0 - dev: false - /mdast-util-gfm-autolink-literal@1.0.2: resolution: {integrity: sha512-FzopkOd4xTTBeGXhXSBU0OCDDh5lUj2rd+HQqG92Ld+jL4lpUfgX2AT2OHAVP9aEeDKp7G92fuooSZcYJA3cRg==} dependencies: @@ -12868,6 +13251,22 @@ packages: engines: {node: '>= 0.6'} dev: true + /mem@6.1.1: + resolution: {integrity: sha512-Ci6bIfq/UgcxPTYa8dQQ5FY3BzKkT894bwXWXxC/zqs0XgMO2cT20CGkOqda7gZNkmK5VP4x89IGZ6K7hfbn3Q==} + engines: {node: '>=8'} + dependencies: + map-age-cleaner: 0.1.3 + mimic-fn: 3.1.0 + dev: true + + /mem@8.1.1: + resolution: {integrity: sha512-qFCFUDs7U3b8mBDPyz5EToEKoAkgCzqquIgi9nkkR9bixxOVOre+09lbuH7+9Kn2NFpm56M3GUWVbU2hQgdACA==} + engines: {node: '>=10'} + dependencies: + map-age-cleaner: 0.1.3 + mimic-fn: 3.1.0 + dev: true + /memoize-one@6.0.0: resolution: {integrity: sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==} dev: false @@ -12940,26 +13339,6 @@ packages: uvu: 0.5.6 dev: false - /micromark-extension-directive@2.1.2: - resolution: {integrity: sha512-brqLEztt14/73snVXYsq9Cv6ng67O+Sy69ZuM0s8ZhN/GFI9rnyXyj0Y0DaCwi648vCImv7/U1H5TzR7wMv5jw==} - dependencies: - micromark-factory-space: 1.0.0 - micromark-factory-whitespace: 1.0.0 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - micromark-util-types: 1.0.2 - parse-entities: 4.0.0 - uvu: 0.5.6 - dev: false - - /micromark-extension-frontmatter@1.0.0: - resolution: {integrity: sha512-EXjmRnupoX6yYuUJSQhrQ9ggK0iQtQlpi6xeJzVD5xscyAI+giqco5fdymayZhJMbIFecjnE2yz85S9NzIgQpg==} - dependencies: - fault: 2.0.1 - micromark-util-character: 1.1.0 - micromark-util-symbol: 1.0.1 - dev: false - /micromark-extension-gfm-autolink-literal@1.0.3: resolution: {integrity: sha512-i3dmvU0htawfWED8aHMMAzAVp/F0Z+0bPh3YrbTPPL1v4YAlCZpy5rBO5p0LPYiZo0zFVkoYh7vDU7yQSiCMjg==} dependencies: @@ -13199,12 +13578,14 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} + dev: true /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} dependencies: mime-db: 1.52.0 + dev: true /mime@1.6.0: resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} @@ -13223,6 +13604,11 @@ packages: engines: {node: '>=6'} dev: true + /mimic-fn@3.1.0: + resolution: {integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==} + engines: {node: '>=8'} + dev: true + /mimic-fn@4.0.0: resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} engines: {node: '>=12'} @@ -13381,6 +13767,18 @@ packages: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} dev: true + /ndjson@2.0.0: + resolution: {integrity: sha512-nGl7LRGrzugTtaFcJMhLbpzJM6XdivmbkdlaGcrk/LXg2KL/YBC6z1g70xh0/al+oFuVFP8N8kiWRucmeEH/qQ==} + engines: {node: '>=10'} + hasBin: true + dependencies: + json-stringify-safe: 5.0.1 + minimist: 1.2.8 + readable-stream: 3.6.2 + split2: 3.2.2 + through2: 4.0.2 + dev: true + /needle@3.3.1: resolution: {integrity: sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==} engines: {node: '>= 4.4.x'} @@ -13463,6 +13861,16 @@ packages: whatwg-url: 5.0.0 dev: true + /node-fetch@3.0.0-beta.9: + resolution: {integrity: sha512-RdbZCEynH2tH46+tj0ua9caUHVWrd/RHnRfvly2EVdqGmI3ndS1Vn/xjm5KuGejDt2RNDQsVRLPNd2QPwcewVg==} + engines: {node: ^10.17 || >=12.3} + dependencies: + data-uri-to-buffer: 3.0.1 + fetch-blob: 2.1.2 + transitivePeerDependencies: + - domexception + dev: true + /node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true @@ -13512,18 +13920,61 @@ packages: validate-npm-package-license: 3.0.4 dev: true + /normalize-package-data@6.0.0: + resolution: {integrity: sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + hosted-git-info: 7.0.1 + is-core-module: 2.13.1 + semver: 7.5.4 + validate-npm-package-license: 3.0.4 + dev: true + /normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} + dev: true /normalize.css@8.0.1: resolution: {integrity: sha512-qizSNPO93t1YUuUhP22btGOo3chcvDFqFaj2TRybP0DMxkHOCTYwp3n34fel4a31ORXy4m1Xq0Gyqpb5m33qIg==} dev: false + /npm-install-checks@6.3.0: + resolution: {integrity: sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + semver: 7.5.4 + dev: true + /npm-normalize-package-bin@1.0.1: resolution: {integrity: sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==} dev: true + /npm-normalize-package-bin@3.0.1: + resolution: {integrity: sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true + + /npm-package-arg@11.0.1: + resolution: {integrity: sha512-M7s1BD4NxdAvBKUPqqRW957Xwcl/4Zvo8Aj+ANrzvIPzGJZElrH7Z//rSaec2ORcND6FHHLnZeY8qgTpXDMFQQ==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + hosted-git-info: 7.0.1 + proc-log: 3.0.0 + semver: 7.5.4 + validate-npm-package-name: 5.0.0 + dev: true + + /npm-pick-manifest@9.0.0: + resolution: {integrity: sha512-VfvRSs/b6n9ol4Qb+bDwNGUXutpy76x6MARw/XssevE0TnctIKcmklJZM5Z7nqs5z5aW+0S63pgCNbpkUNNXBg==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + npm-install-checks: 6.3.0 + npm-normalize-package-bin: 3.0.1 + npm-package-arg: 11.0.1 + semver: 7.5.4 + dev: true + /npm-run-path@4.0.1: resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} engines: {node: '>=8'} @@ -13611,6 +14062,7 @@ packages: /object-inspect@1.13.1: resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} + dev: true /object-is@1.1.5: resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} @@ -13625,6 +14077,15 @@ packages: engines: {node: '>= 0.4'} dev: true + /object-pairs@0.1.0: + resolution: {integrity: sha512-3ECr6K831I4xX/Mduxr9UC+HPOz/d6WKKYj9p4cmC8Lg8p7g8gitzsxNX5IWlSIgFWN/a4JgrJaoAMKn20oKwA==} + dev: true + + /object-values@1.0.0: + resolution: {integrity: sha512-+8hwcz/JnQ9EpLIXzN0Rs7DLsBpJNT/xYehtB/jU93tHYr5BFEO8E+JGQNOSqE7opVzz5cGksKFHt7uUJVLSjQ==} + engines: {node: '>=0.10.0'} + dev: true + /object.assign@4.1.4: resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} engines: {node: '>= 0.4'} @@ -13842,6 +14303,11 @@ packages: resolution: {integrity: sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==} dev: true + /p-defer@1.0.0: + resolution: {integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==} + engines: {node: '>=4'} + dev: true + /p-finally@1.0.0: resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} engines: {node: '>=4'} @@ -13889,6 +14355,14 @@ packages: aggregate-error: 3.1.0 dev: true + /p-memoize@4.0.1: + resolution: {integrity: sha512-km0sP12uE0dOZ5qP+s7kGVf07QngxyG0gS8sYFvFWhqlgzOsSy+m71aUejf/0akxj5W7gE//2G74qTv6b4iMog==} + engines: {node: '>=10'} + dependencies: + mem: 6.1.1 + mimic-fn: 3.1.0 + dev: true + /p-queue@6.6.2: resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} engines: {node: '>=8'} @@ -13930,19 +14404,6 @@ packages: dependencies: callsites: 3.1.0 - /parse-entities@4.0.0: - resolution: {integrity: sha512-5nk9Fn03x3rEhGaX1FU6IDwG/k+GxLXlFAkgrbM1asuAFl3BhdQWvASaIsmwWypRNcZKHPYnIuOSfIWEyEQnPQ==} - dependencies: - '@types/unist': 2.0.6 - character-entities: 2.0.2 - character-entities-legacy: 3.0.0 - character-reference-invalid: 2.0.1 - decode-named-character-reference: 1.0.2 - is-alphanumerical: 2.0.1 - is-decimal: 2.0.1 - is-hexadecimal: 2.0.1 - dev: false - /parse-json@5.2.0: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} engines: {node: '>=8'} @@ -13952,11 +14413,23 @@ packages: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 + /parse-ms@4.0.0: + resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==} + engines: {node: '>=18'} + dev: true + /parse-node-version@1.0.1: resolution: {integrity: sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==} engines: {node: '>= 0.10'} dev: true + /parse-npm-tarball-url@3.0.0: + resolution: {integrity: sha512-InpdgIdNe5xWMEUcrVQUniQKwnggBtJ7+SCwh7zQAZwbbIYZV9XdgJyhtmDSSvykFyQXoe4BINnzKTfCwWLs5g==} + engines: {node: '>=8.15'} + dependencies: + semver: 6.3.1 + dev: true + /parse-srcset@1.0.2: resolution: {integrity: sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q==} dev: false @@ -14020,6 +14493,13 @@ packages: minipass: 4.2.5 dev: true + /path-temp@2.1.0: + resolution: {integrity: sha512-cMMJTAZlion/RWRRC48UbrDymEIt+/YSD/l8NqjneyDw2rDOBQcP5yRkMB4CYGn47KMhZvbblBP7Z79OsMw72w==} + engines: {node: '>=8.15'} + dependencies: + unique-string: 2.0.0 + dev: true + /path-to-regexp@0.1.7: resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} dev: true @@ -14133,6 +14613,12 @@ packages: /picomatch@2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} + dev: true + + /picomatch@4.0.1: + resolution: {integrity: sha512-xUXwsxNjwTQ8K3GnT4pCJm+xq3RUPQbmkYJTP5aFIfNIvbcc/4MUxgBaaRSZJ6yGJZiGSyYlM6MzwTsRk8SYCg==} + engines: {node: '>=12'} + dev: true /pidtree@0.5.0: resolution: {integrity: sha512-9nxspIM7OpZuhBxPg73Zvyq7j1QMPMPsGKTqRc2XOaFQauDvoNz9fM1Wdkjmeo7l9GXOZiRs97sPkuayl39wjA==} @@ -14389,10 +14875,22 @@ packages: engines: {node: '>= 0.8'} dev: true + /pretty-ms@9.0.0: + resolution: {integrity: sha512-E9e9HJ9R9NasGOgPaPE8VMeiPKAyWR5jcFpNnwIejslIhWqdqOrb2wShBsncMPUb+BcCd2OPYfh7p2W6oemTng==} + engines: {node: '>=18'} + dependencies: + parse-ms: 4.0.0 + dev: true + /printable-characters@1.0.42: resolution: {integrity: sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==} dev: true + /proc-log@3.0.0: + resolution: {integrity: sha512-++Vn7NS4Xf9NacaU9Xq3URUuqZETPsf8L4j5/ckhaRYsfPeRyzGw+iDjFhV/Jr3uNmTvvddEJFWh5R1gRgUH8A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: true + /process-nextick-args@2.0.1: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} dev: true @@ -14407,6 +14905,23 @@ packages: engines: {node: '>=0.4.0'} dev: true + /promise-inflight@1.0.1: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + dev: true + + /promise-retry@2.0.1: + resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} + engines: {node: '>=10'} + dependencies: + err-code: 2.0.3 + retry: 0.12.0 + dev: true + /prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} @@ -14515,10 +15030,6 @@ packages: pump: 2.0.1 dev: true - /punycode@1.4.1: - resolution: {integrity: sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==} - dev: false - /punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -14567,6 +15078,7 @@ packages: engines: {node: '>=0.6'} dependencies: side-channel: 1.0.4 + dev: true /query-string@8.1.0: resolution: {integrity: sha512-BFQeWxJOZxZGix7y+SByG3F36dA0AbTy9o6pSmKFcFz7DAj0re9Frkty3saBn3nHo3D0oZJ/+rx3r8H8r8Jbpw==} @@ -15200,6 +15712,14 @@ packages: graceful-fs: 4.2.11 dev: true + /read-package-json-fast@3.0.2: + resolution: {integrity: sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + json-parse-even-better-errors: 3.0.1 + npm-normalize-package-bin: 3.0.1 + dev: true + /read-package-json@2.1.2: resolution: {integrity: sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA==} dependencies: @@ -15264,6 +15784,7 @@ packages: engines: {node: '>=8.10.0'} dependencies: picomatch: 2.3.1 + dev: true /recast@0.23.4: resolution: {integrity: sha512-qtEDqIZGVcSZCHniWwZWbRy79Dc6Wp3kT/UmDA2RJKBPg7+7k51aQBZirHmUGn5uvHf2rg8DkjizrN26k61ATw==} @@ -15390,35 +15911,6 @@ packages: jsesc: 0.5.0 dev: true - /rehype-slug@5.1.0: - resolution: {integrity: sha512-Gf91dJoXneiorNEnn+Phx97CO7oRMrpi+6r155tTxzGuLtm+QrI4cTwCa9e1rtePdL4i9tSO58PeSS6HWfgsiw==} - dependencies: - '@types/hast': 2.3.4 - github-slugger: 2.0.0 - hast-util-has-property: 2.0.1 - hast-util-heading-rank: 2.1.1 - hast-util-to-string: 2.0.0 - unified: 10.1.2 - unist-util-visit: 4.1.2 - dev: false - - /rehype-urls@1.1.1: - resolution: {integrity: sha512-ct9Kb/nAL6oe/O5fDc0xjiqm8Z9xgXdorOdDhZAWx7awucyiuYXU7Dax+23Gu24nnGwtdaCW6zslKAYzlEW1lw==} - dependencies: - hast-util-has-property: 1.0.4 - stdopt: 2.2.0 - unist-util-visit: 1.4.1 - dev: false - - /remark-directive@2.0.1: - resolution: {integrity: sha512-oosbsUAkU/qmUE78anLaJePnPis4ihsE7Agp0T/oqTzvTea8pOiaYEtfInU/+xMOVTS9PN5AhGOiaIVe4GD8gw==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-directive: 2.2.2 - micromark-extension-directive: 2.1.2 - unified: 10.1.2 - dev: false - /remark-external-links@8.0.0: resolution: {integrity: sha512-5vPSX0kHoSsqtdftSHhIYofVINC8qmp0nctkeU9YoJwV3YfiBRiI6cbFRJ0oI/1F9xS+bopXG0m2KS8VFscuKA==} dependencies: @@ -15429,15 +15921,6 @@ packages: unist-util-visit: 2.0.3 dev: true - /remark-frontmatter@4.0.1: - resolution: {integrity: sha512-38fJrB0KnmD3E33a5jZC/5+gGAC2WKNiPw1/fdXJvijBlhA7RCsvJklrYJakS0HedninvaCYW8lQGf9C918GfA==} - dependencies: - '@types/mdast': 3.0.10 - mdast-util-frontmatter: 1.0.0 - micromark-extension-frontmatter: 1.0.0 - unified: 10.1.2 - dev: false - /remark-gfm@3.0.1: resolution: {integrity: sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==} dependencies: @@ -15499,6 +15982,14 @@ packages: resolution: {integrity: sha512-7pXIJqJOq5tFgG1A2Zxti3Ht8jJF337m4sowbuHsW30ZnkQFnDzy9qBNhgzX8ZLW4+UBcXiiR7SwR6pokHsxiA==} dev: false + /rename-overwrite@5.0.0: + resolution: {integrity: sha512-vSxE5Ww7Jnyotvaxi3Dj0vOMoojH8KMkBfs9xYeW/qNfJiLTcC1fmwTjrbGUq3mQSOCxkG0DbdcvwTUrpvBN4w==} + engines: {node: '>=12.10'} + dependencies: + '@zkochan/rimraf': 2.1.3 + fs-extra: 10.1.0 + dev: true + /request-progress@3.0.0: resolution: {integrity: sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==} dependencies: @@ -15586,11 +16077,20 @@ packages: signal-exit: 3.0.7 dev: true + /retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + dev: true + /reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} dev: true + /reverse-arguments@1.0.0: + resolution: {integrity: sha512-/x8uIPdTafBqakK0TmPNJzgkLP+3H+yxpUJhCQHsLBg1rYEVNR2D8BRYNWQhVBjyOd7oo1dZRVzIkwMY2oqfYQ==} + dev: true + /rfdc@1.3.0: resolution: {integrity: sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==} dev: true @@ -15739,6 +16239,7 @@ packages: chokidar: 3.5.3 immutable: 4.3.5 source-map-js: 1.0.2 + dev: true /sax@1.2.4: resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} @@ -15825,6 +16326,7 @@ packages: get-intrinsic: 1.2.2 gopd: 1.0.1 has-property-descriptors: 1.0.1 + dev: true /set-function-name@2.0.1: resolution: {integrity: sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==} @@ -15867,6 +16369,10 @@ packages: engines: {node: '>=8'} dev: true + /shell-quote-word@1.0.1: + resolution: {integrity: sha512-lT297f1WLAdq0A4O+AknIFRP6kkiI3s8C913eJ0XqBxJbZPGWUNkRQk2u8zk4bEAjUJ5i+fSLwB6z1HzeT+DEg==} + dev: true + /should-equal@2.0.0: resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} dependencies: @@ -15911,6 +16417,7 @@ packages: call-bind: 1.0.5 get-intrinsic: 1.2.2 object-inspect: 1.13.1 + dev: true /signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} @@ -15967,6 +16474,11 @@ packages: resolution: {integrity: sha512-NwrtjCg+lZoqhFU8fOwl4ay2ei8PaqCBOUV3/ektPY9trO1yQ1oXEfmHAhKArUVUr/hOHvy5f6AdP17dCM0zMw==} dev: true + /smol-toml@1.1.4: + resolution: {integrity: sha512-Y0OT8HezWsTNeEOSVxDnKOW/AyNXHQ4BwJNbAXlLTF5wWsBvrcHhIkE5Rf8kQMLmgf7nDX3PVOlgC6/Aiggu3Q==} + engines: {node: '>= 18', pnpm: '>= 8'} + dev: true + /snake-case@3.0.4: resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} dependencies: @@ -16075,6 +16587,12 @@ packages: engines: {node: '>=12'} dev: false + /split2@3.2.2: + resolution: {integrity: sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==} + dependencies: + readable-stream: 3.6.2 + dev: true + /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} @@ -16110,6 +16628,13 @@ packages: tweetnacl: 0.14.5 dev: true + /ssri@10.0.5: + resolution: {integrity: sha512-bSf16tAFkGeRlUNDjXu8FzaMQt6g2HZJrun7mtMbIPOddxt3GLMSz5VWUWcqTJUPfLEaDIepGxv+bYQW49596A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + minipass: 7.0.4 + dev: true + /stack-generator@2.0.10: resolution: {integrity: sha512-mwnua/hkqM6pF4k8SnmZ2zfETsRUpWXREfA/goT8SLCV4iOFa4bzOX2nDipWAZFPTjLvQB82f5yaodMVhK0yJQ==} dependencies: @@ -16175,12 +16700,6 @@ packages: engines: {node: '>= 0.8'} dev: true - /stdopt@2.2.0: - resolution: {integrity: sha512-D/p41NgXOkcj1SeGhfXOwv9z1K6EV3sjAUY5aeepVbgEHv7DpKWLTjhjScyzMWAQCAgUQys1mjH0eArm4cjRGw==} - dependencies: - is-arrayish: 0.3.2 - dev: false - /stop-iteration-iterator@1.0.0: resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==} engines: {node: '>= 0.4'} @@ -16275,6 +16794,10 @@ packages: strip-ansi: 7.1.0 dev: true + /string.fromcodepoint@0.2.1: + resolution: {integrity: sha512-n69H31OnxSGSZyZbgBlvYIXlrMhJQ0dQAX1js1QDhpaUH6zmU3QYlj07bCwCNlPOu3oRXIubGPl2gDGnHsiCqg==} + dev: true + /string.prototype.matchall@4.0.10: resolution: {integrity: sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ==} dependencies: @@ -16326,13 +16849,6 @@ packages: safe-buffer: 5.2.1 dev: true - /stringify-entities@4.0.3: - resolution: {integrity: sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g==} - dependencies: - character-entities-html4: 2.1.0 - character-entities-legacy: 3.0.0 - dev: false - /strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -16385,6 +16901,11 @@ packages: engines: {node: '>=8'} dev: true + /strip-json-comments@5.0.1: + resolution: {integrity: sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw==} + engines: {node: '>=14.16'} + dev: true + /style-to-object@0.3.0: resolution: {integrity: sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==} dependencies: @@ -16553,6 +17074,10 @@ packages: - supports-color dev: true + /summary@2.1.0: + resolution: {integrity: sha512-nMIjMrd5Z2nuB2RZCKJfFMjgS3fygbeyGk9PxPPaJR1RIcyN9yn4A63Isovzm3ZtQuEkLBVgMdPup8UeLH7aQw==} + dev: true + /superjson@1.12.3: resolution: {integrity: sha512-0j+U70KUtP8+roVPbwfqkyQI7lBt7ETnuA7KXbTDX3mCKiD/4fXs2ldKSMdt0MCfpTwiMxo20yFU3vu6ewETpQ==} engines: {node: '>=10'} @@ -16767,6 +17292,12 @@ packages: xtend: 4.0.2 dev: true + /through2@4.0.2: + resolution: {integrity: sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==} + dependencies: + readable-stream: 3.6.2 + dev: true + /through@2.3.8: resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} dev: true @@ -16790,11 +17321,28 @@ packages: resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} engines: {node: '>=4'} + /to-no-case@1.0.2: + resolution: {integrity: sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==} + dev: true + + /to-pascal-case@1.0.0: + resolution: {integrity: sha512-QGMWHqM6xPrcQW57S23c5/3BbYb0Tbe9p+ur98ckRnGDwD4wbbtDiYI38CfmMKNB5Iv0REjs5SNDntTwvDxzZA==} + dependencies: + to-space-case: 1.0.0 + dev: true + /to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} dependencies: is-number: 7.0.0 + dev: true + + /to-space-case@1.0.0: + resolution: {integrity: sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==} + dependencies: + to-no-case: 1.0.2 + dev: true /tocbot@4.25.0: resolution: {integrity: sha512-kE5wyCQJ40hqUaRVkyQ4z5+4juzYsv/eK+aqD97N62YH0TxFhzJvo22RUQQZdO3YnXAk42ZOfOpjVdy+Z0YokA==} @@ -17167,6 +17715,12 @@ packages: react-lifecycles-compat: 3.0.4 dev: false + /unescape-js@1.1.4: + resolution: {integrity: sha512-42SD8NOQEhdYntEiUQdYq/1V/YHwr1HLwlHuTJB5InVVdOSbgI6xu8jK5q65yIzuFCfczzyDF/7hbGzVbyCw0g==} + dependencies: + string.fromcodepoint: 0.2.1 + dev: true + /unicode-canonical-property-names-ecmascript@2.0.0: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} @@ -17219,10 +17773,6 @@ packages: resolution: {integrity: sha512-TiWE6DVtVe7Ye2QxOVW9kqybs6cZexNwTwSMVgkfjEReqy/xwGpAXb99OxktoWwmL+Z+Epb0Dn8/GNDYP1wnUw==} dev: false - /unist-util-is@3.0.0: - resolution: {integrity: sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A==} - dev: false - /unist-util-is@4.1.0: resolution: {integrity: sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==} dev: true @@ -17243,12 +17793,6 @@ packages: '@types/unist': 2.0.6 dev: false - /unist-util-visit-parents@2.1.2: - resolution: {integrity: sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g==} - dependencies: - unist-util-is: 3.0.0 - dev: false - /unist-util-visit-parents@3.1.1: resolution: {integrity: sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==} dependencies: @@ -17263,12 +17807,6 @@ packages: unist-util-is: 5.1.1 dev: false - /unist-util-visit@1.4.1: - resolution: {integrity: sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw==} - dependencies: - unist-util-visit-parents: 2.1.2 - dev: false - /unist-util-visit@2.0.3: resolution: {integrity: sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==} dependencies: @@ -17351,13 +17889,6 @@ packages: requires-port: 1.0.0 dev: true - /url@0.11.3: - resolution: {integrity: sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw==} - dependencies: - punycode: 1.4.1 - qs: 6.11.2 - dev: false - /use-callback-ref@1.3.1(@types/react@18.2.34)(react@18.2.0): resolution: {integrity: sha512-Lg4Vx1XZQauB42Hw3kK7JM6yjVjgFmFC5/Ab797s79aARomD2nEErc4mCgM8EZrARLmmbWpi5DGCadmK50DcAQ==} engines: {node: '>=10'} @@ -17494,6 +18025,20 @@ packages: builtins: 1.0.3 dev: true + /validate-npm-package-name@4.0.0: + resolution: {integrity: sha512-mzR0L8ZDktZjpX4OB46KT+56MAhl4EIazWP/+G/HPGuvfdaqg4YsCdtOm6U9+LOFyYDoh4dpnpxZRB9MQQns5Q==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + dependencies: + builtins: 5.0.1 + dev: true + + /validate-npm-package-name@5.0.0: + resolution: {integrity: sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + builtins: 5.0.1 + dev: true + /validator@13.11.0: resolution: {integrity: sha512-Ii+sehpSfZy+At5nPdnyMhx78fEoPDkR2XW/zimHEL3MyGJQOCQ7WeP20jPYRz7ZCpcKLB21NxuXHF3bxjStBQ==} engines: {node: '>= 0.10'} @@ -17513,6 +18058,13 @@ packages: extsprintf: 1.3.0 dev: true + /version-selector-type@3.0.0: + resolution: {integrity: sha512-PSvMIZS7C1MuVNBXl/CDG2pZq8EXy/NW2dHIdm3bVP5N0PC8utDK8ttXLXj44Gn3J0lQE3U7Mpm1estAOd+eiA==} + engines: {node: '>=10.13'} + dependencies: + semver: 7.5.4 + dev: true + /vfile-message@3.1.3: resolution: {integrity: sha512-0yaU+rj2gKAyEk12ffdSbBfjnnj+b1zqTBv3OQCTn8yEB02bsPizwdBPrLJjHnK+cU9EMMcUnNv938XcZIkmdA==} dependencies: @@ -17671,6 +18223,10 @@ packages: fsevents: 2.3.3 dev: true + /vlq@0.2.3: + resolution: {integrity: sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==} + dev: true + /vscode-jsonrpc@6.0.0: resolution: {integrity: sha512-wnJA4BnEjOSyFMvjZdpiOwhSq9uDoK8e/kpRJDTaMYzwlkrhG1fwDIZI94CLsLzlCK5cIbMMtFlJlfR57Lavmg==} engines: {node: '>=8.0.0 || >=10.0.0'} @@ -17898,6 +18454,14 @@ packages: isexe: 2.0.0 dev: true + /which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + dependencies: + isexe: 3.1.1 + dev: true + /wordwrap@1.0.0: resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} dev: true @@ -18098,6 +18662,19 @@ packages: toposort: 2.0.2 dev: false + /zod-validation-error@3.0.3(zod@3.22.4): + resolution: {integrity: sha512-cETTrcMq3Ze58vhdR0zD37uJm/694I6mAxcf/ei5bl89cC++fBNxrC2z8lkFze/8hVMPwrbtrwXHR2LB50fpHw==} + engines: {node: '>=18.0.0'} + peerDependencies: + zod: ^3.18.0 + dependencies: + zod: 3.22.4 + dev: true + + /zod@3.22.4: + resolution: {integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==} + dev: true + /zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} dev: false diff --git a/airbyte-webapp/release.dockerfile b/airbyte-webapp/release.dockerfile deleted file mode 100644 index 4a81ec0dfc5..00000000000 --- a/airbyte-webapp/release.dockerfile +++ /dev/null @@ -1,13 +0,0 @@ -ARG BUILD_IMAGE -FROM ${BUILD_IMAGE} AS builder - - -FROM nginx:alpine AS release - -EXPOSE 80 - -ARG SRC_DIR=/workspace/oss/airbyte-webapp/build/app/build/app - -COPY --from=builder ${SRC_DIR} /usr/share/nginx/html -RUN find /usr/share/nginx/html -type d -exec chmod 755 '{}' \; -o -type f -exec chmod 644 '{}' \; -COPY nginx/default.conf.template /etc/nginx/templates/default.conf.template diff --git a/airbyte-webapp/scripts/load-declarative-schema.sh b/airbyte-webapp/scripts/load-declarative-schema.sh index d7eeba5c3a8..dd32b87d4dd 100755 --- a/airbyte-webapp/scripts/load-declarative-schema.sh +++ b/airbyte-webapp/scripts/load-declarative-schema.sh @@ -19,7 +19,8 @@ then TARGET_FILE="build/declarative_component_schema-${CDK_VERSION}.yaml" if [ ! -f "$TARGET_FILE" ]; then echo "Downloading CDK manifest schema $CDK_VERSION from pypi" - curl -L https://pypi.python.org/packages/source/a/airbyte-cdk/airbyte-cdk-${CDK_VERSION}.tar.gz | tar -xzO airbyte-cdk-${CDK_VERSION}/airbyte_cdk/sources/declarative/declarative_component_schema.yaml > ${TARGET_FILE} + pypi_url=$(curl -s https://pypi.org/pypi/airbyte-cdk/${CDK_VERSION}/json | jq -r '.urls[] | select(.packagetype == "sdist") | .url') + curl $pypi_url | tar -xzO airbyte_cdk-${CDK_VERSION}/airbyte_cdk/sources/declarative/declarative_component_schema.yaml > ${TARGET_FILE} else echo "Found cached CDK manifest schema $CDK_VERSION" fi diff --git a/airbyte-webapp/scripts/local-cloud-dev.js b/airbyte-webapp/scripts/local-cloud-dev.js index 10727859474..f5d20c210a1 100644 --- a/airbyte-webapp/scripts/local-cloud-dev.js +++ b/airbyte-webapp/scripts/local-cloud-dev.js @@ -8,7 +8,7 @@ if (!process.env.CLOUD_ENV) { return; } -const envFile = path.resolve(__dirname, "../../../cloud-webapp/development", `.env.${process.env.CLOUD_ENV}`); +const envFile = path.resolve(__dirname, "../../../cloud/cloud-webapp/development", `.env.${process.env.CLOUD_ENV}`); if (!fs.existsSync(envFile)) { console.error( diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx index a14b366aa03..8f467618c85 100644 --- a/airbyte-webapp/src/App.tsx +++ b/airbyte-webapp/src/App.tsx @@ -2,7 +2,6 @@ import React, { Suspense } from "react"; import { HelmetProvider } from "react-helmet-async"; import { createBrowserRouter, RouterProvider } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { DevToolsToggle } from "components/DevToolsToggle"; import { QueryProvider, useGetInstanceConfiguration } from "core/api"; @@ -10,7 +9,7 @@ import { InstanceConfigurationResponseEdition, InstanceConfigurationResponseTrackingStrategy, } from "core/api/types/AirbyteClient"; -import { config, ConfigServiceProvider } from "core/config"; +import { DefaultErrorBoundary } from "core/errors"; import { AnalyticsProvider } from "core/services/analytics"; import { OSSAuthService } from "core/services/auth"; import { defaultOssFeatures, defaultEnterpriseFeatures, FeatureService } from "core/services/features"; @@ -66,13 +65,11 @@ const App: React.FC = () => { }> - - - - - - - + + + + + diff --git a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx index d0e4e61b57a..3010786d81e 100644 --- a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx +++ b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx @@ -5,7 +5,7 @@ import { FormattedDate, FormattedMessage, FormattedTimeParts, useIntl } from "re import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; -import { AttemptRead, AttemptStatus, FailureReason, FailureType } from "core/api/types/AirbyteClient"; +import { AttemptRead, AttemptStats, AttemptStatus, FailureReason, FailureType } from "core/api/types/AirbyteClient"; import { formatBytes } from "core/utils/numberHelper"; import styles from "./AttemptDetails.module.scss"; @@ -23,6 +23,7 @@ interface AttemptDetailsProps { isPartialSuccess?: boolean; showEndedAt?: boolean; showFailureMessage?: boolean; + aggregatedAttemptStats?: AttemptStats; } export const AttemptDetails: React.FC = ({ @@ -32,6 +33,7 @@ export const AttemptDetails: React.FC = ({ isPartialSuccess, showEndedAt = false, showFailureMessage = true, + aggregatedAttemptStats, }) => { const { formatMessage } = useIntl(); @@ -87,7 +89,7 @@ export const AttemptDetails: React.FC = ({ )} - {formatBytes(attempt?.totalStats?.bytesEmitted)} + {formatBytes(aggregatedAttemptStats?.bytesEmitted || attempt?.totalStats?.bytesEmitted)} | @@ -95,7 +97,7 @@ export const AttemptDetails: React.FC = ({ @@ -104,7 +106,7 @@ export const AttemptDetails: React.FC = ({ diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss index 8138cf2246f..d40fbd469e9 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.module.scss @@ -24,6 +24,7 @@ &__summary { overflow: hidden; + flex-grow: 1; } &__modalLoading { diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx index 9521f286c21..4f306e39ddb 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx @@ -7,7 +7,6 @@ import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { LoadingSpinner } from "components/ui/LoadingSpinner"; import { Spinner } from "components/ui/Spinner"; import { Text } from "components/ui/Text"; @@ -20,7 +19,7 @@ import { getJobCreatedAt } from "area/connection/utils/jobs"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useCurrentWorkspace, useGetDebugInfoJobManual } from "core/api"; import { copyToClipboard } from "core/utils/clipboard"; -import { FILE_TYPE_DOWNLOAD, downloadFile, fileizeString } from "core/utils/file"; +import { downloadFile, FILE_TYPE_DOWNLOAD, fileizeString } from "core/utils/file"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { useModalService } from "hooks/services/Modal"; @@ -216,7 +215,7 @@ export const JobHistoryItem: React.FC = ({ jobWithAttempts ]} onChange={handleClick} > - {() => + + )} + + )} + {failureUiDetails && isSecondaryMessageExpanded && ( + + {failureUiDetails.secondaryMessage} )} diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStatusLabel.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStatusLabel.tsx index d33efde3823..cf000717ca8 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStatusLabel.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobStatusLabel.tsx @@ -5,17 +5,23 @@ import { Text } from "components/ui/Text"; import { JobWithAttempts } from "area/connection/types/jobs"; import { isJobPartialSuccess, getJobAttempts, getJobStatus } from "area/connection/utils/jobs"; import { JobStatus } from "core/api/types/AirbyteClient"; +import { useExperiment } from "hooks/services/Experiment"; interface JobStatusLabelProps { jobWithAttempts: JobWithAttempts; } export const JobStatusLabel: React.FC = ({ jobWithAttempts }) => { + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + const attempts = getJobAttempts(jobWithAttempts); const jobStatus = getJobStatus(jobWithAttempts); const jobIsPartialSuccess = isJobPartialSuccess(attempts); const streamsToReset = "job" in jobWithAttempts ? jobWithAttempts.job.resetConfig?.streamsToReset : undefined; - const jobConfigType = jobWithAttempts.job.configType; + const jobConfigType = + sayClearInsteadOfReset && jobWithAttempts.job.configType === "reset_connection" + ? "clear_data" + : jobWithAttempts.job.configType; let status = ""; if (jobIsPartialSuccess) { diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx index 7bfd20f3bb0..323f1221f80 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx @@ -2,11 +2,10 @@ import React from "react"; import { useIntl } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { CleanedLogLines } from "area/connection/components/JobHistoryItem/useCleanLogs"; import { useCurrentWorkspace } from "core/api"; -import { FILE_TYPE_DOWNLOAD, downloadFile, fileizeString } from "core/utils/file"; +import { downloadFile, FILE_TYPE_DOWNLOAD, fileizeString } from "core/utils/file"; interface DownloadButtonProps { logLines: CleanedLogLines; @@ -31,7 +30,7 @@ export const DownloadLogsButton: React.FC = ({ logLines, fi title={formatMessage({ id: "jobHistory.logs.downloadLogs", })} - icon={} + icon="download" /> ); }; diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModal.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModal.tsx index 2f72161fa0c..67faec71557 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModal.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModal.tsx @@ -13,7 +13,7 @@ import { LogSearchInput } from "area/connection/components/JobHistoryItem/LogSea import { JobLogOrigins, KNOWN_LOG_ORIGINS, useCleanLogs } from "area/connection/components/JobHistoryItem/useCleanLogs"; import { VirtualLogs } from "area/connection/components/JobHistoryItem/VirtualLogs"; import { LinkToAttemptButton } from "area/connection/components/JobLogsModal/LinkToAttemptButton"; -import { useAttemptForJob, useJobInfoWithoutLogs } from "core/api"; +import { useAttemptCombinedStatsForJob, useAttemptForJob, useJobInfoWithoutLogs } from "core/api"; import { AttemptStatusIcon } from "./AttemptStatusIcon"; import { DownloadLogsButton } from "./DownloadLogsButton"; @@ -36,6 +36,12 @@ export const JobLogsModal: React.FC = ({ jobId, initialAttemp initialAttemptId ?? job.attempts[job.attempts.length - 1].attempt.id ); const jobAttempt = useAttemptForJob(jobId, selectedAttemptId); + const aggregatedAttemptStats = useAttemptCombinedStatsForJob(jobId, selectedAttemptId, { + refetchInterval() { + // if the attempt hasn't ended refetch every 2.5 seconds + return jobAttempt.attempt.endedAt ? false : 2500; + }, + }); const { logLines, origins } = useCleanLogs(jobAttempt); const [selectedLogOrigins, setSelectedLogOrigins] = useState( KNOWN_LOG_ORIGINS.map(({ key }) => key) @@ -181,7 +187,13 @@ export const JobLogsModal: React.FC = ({ jobId, initialAttemp isDisabled={job.attempts.length === 1} /> - + diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss index 29ff0e73840..a3b1ce43d99 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.module.scss @@ -3,16 +3,5 @@ .internalFailureContainer { border-radius: variables.$border-radius-md; - background-color: colors.$red-50; padding-bottom: variables.$spacing-md; } - -.internalFailureReason { - max-height: 300px; - overflow-y: auto; - overflow-x: auto; - white-space: nowrap; - background: colors.$red-30; - padding: variables.$spacing-sm variables.$spacing-md; - font-family: monospace; -} diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx index 4821f44c31d..0edcd04e866 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx @@ -1,4 +1,3 @@ -import { useMemo } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { Box } from "components/ui/Box"; @@ -8,6 +7,7 @@ import { Message } from "components/ui/Message"; import { AttemptFailureSummary, FailureType } from "core/api/types/AirbyteClient"; import { copyToClipboard } from "core/utils/clipboard"; +import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; import { useNotificationService } from "hooks/services/Notification"; import styles from "./JobLogsModalFailureMessage.module.scss"; @@ -19,33 +19,22 @@ interface JobLogsModalFailureMessageProps { export const JobLogsModalFailureMessage: React.FC = ({ failureSummary }) => { const { registerNotification } = useNotificationService(); const { formatMessage } = useIntl(); + const failureUiDetails = failureUiDetailsFromReason(failureSummary?.failures[0], formatMessage); - const internalFailureReason = useMemo(() => failureSummary?.failures[0]?.internalMessage, [failureSummary]); - - const externalFailureReason = useMemo(() => failureSummary?.failures[0]?.externalMessage, [failureSummary]); - - const failureToShow = useMemo( - () => - !failureSummary || - failureSummary?.failures.some(({ failureType }) => failureType === FailureType.manual_cancellation) - ? "none" - : failureSummary?.failures[0]?.internalMessage - ? "internal" - : failureSummary?.failures[0]?.externalMessage - ? "external" - : "unknown", - [failureSummary] + const isFailureCancellation = failureSummary?.failures.some( + ({ failureType }) => failureType === FailureType.manual_cancellation ); + const showFailureMessage = !isFailureCancellation && failureUiDetails; - if (failureToShow === "none") { + if (!showFailureMessage) { return null; } const onCopyTextBtnClick = async () => { - if (!internalFailureReason) { + if (!failureUiDetails.secondaryMessage) { return; } - await copyToClipboard(internalFailureReason); + await copyToClipboard(failureUiDetails.secondaryMessage); registerNotification({ type: "success", @@ -56,35 +45,27 @@ export const JobLogsModalFailureMessage: React.FC - {failureToShow === "internal" && ( -

    - - - +
    + + + + {failureUiDetails.secondaryMessage && ( - - } - /> - - -
    {internalFailureReason}
    -
    -
    - )} - - {failureToShow === "external" && ( - } - /> - )} - - {failureToShow === "unknown" && } />} + )} + + } + > + {failureUiDetails.secondaryMessage} + +
    ); }; diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/LinkToAttemptButton.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/LinkToAttemptButton.tsx index a8f7267538d..0dc9e3e986f 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/LinkToAttemptButton.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/LinkToAttemptButton.tsx @@ -3,7 +3,6 @@ import { FormattedMessage, useIntl } from "react-intl"; import { useDebounce } from "react-use"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { Tooltip } from "components/ui/Tooltip"; import { buildAttemptLink } from "area/connection/utils/attemptLink"; @@ -39,7 +38,7 @@ export const LinkToAttemptButton: React.FC = ({ jobId, attemptId }) => { onClick={onCopyLink} title={formatMessage({ id: "connection.copyLogLink" })} aria-label={formatMessage({ id: "connection.copyLogLink" })} - icon={} + icon="link" /> } > diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss index 29e8533bd97..4660f30789c 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss @@ -6,6 +6,6 @@ greenVar: colors.$green; darkBlueVar: colors.$dark-blue-300; redVar: colors.$red; - blackVar: colors.$black; - emptyVar: colors.$white; + blackVar: colors.$inverse; + emptyVar: colors.$foreground; } diff --git a/airbyte-webapp/src/area/connector/utils/destinations.json b/airbyte-webapp/src/area/connector/utils/destinations.json index a3189be25cc..f06846603a4 100644 --- a/airbyte-webapp/src/area/connector/utils/destinations.json +++ b/airbyte-webapp/src/area/connector/utils/destinations.json @@ -13,6 +13,7 @@ "DatabricksLakehouse": "072d5540-f236-4294-ba7c-ade8fd918496", "DynamoDb": "8ccd8909-4e99-4141-b48d-4984b70b2d89", "E2ETesting": "2eb65e87-983a-4fd7-b3e3-9d9dc6eb8537", + "EndToEndTesting": "a7bcc9d8-13b3-4e49-b80d-d020b90045e3", "ElasticSearch": "68f351a7-2745-4bef-ad7f-996b8e51bb8c", "Firebolt": "18081484-02a5-4662-8dba-b270b582f321", "GoogleCloudStorageGcs": "ca8f6566-e555-4b40-943a-545bf123117a", @@ -22,12 +23,14 @@ "Kinesis": "6d1d66d4-26ab-4602-8d32-f85894b04955", "LocalCsv": "8be1cf83-fde1-477f-a4ad-318d23c9f3c6", "LocalJson": "a625d593-bba5-4a1c-a53d-2d246268a816", + "Milvus": "65de8962-48c9-11ee-be56-0242ac120002", "Mqtt": "f3802bc4-5406-4752-9e8d-01e504ca8194", "MsSqlServer": "d4353156-9217-4cad-8dd7-c108fd4f74cf", "MeiliSearch": "af7c921e-5892-4ff2-b6c1-4a5ab258fb7e", "MongoDb": "8b746512-8c2e-6ac1-4adc-b59faafd473c", "MySql": "ca81ee7c-3163-4246-af40-094cc31e5e42", "Oracle": "3986776d-2319-4de9-8af8-db14c0996e72", + "Pinecone": "3d2b6f84-7f0d-4e3f-a5e5-7c7d4b50eabd", "Postgres": "25c5221d-dce2-4163-ade9-739ef790f503", "Pulsar": "2340cbba-358e-11ec-8d3d-0242ac130203", "RabbitMq": "e06ad785-ad6f-4647-b2e8-3027a5c59454", @@ -46,5 +49,6 @@ "LocalSqLite": "b76be0a6-27dc-4560-95f6-2623da0bd7b6", "TiDb": "06ec60c7-7468-45c0-91ac-174f6e1a788b", "Typesense": "36be8dc6-9851-49af-b776-9d4c30e4ab6a", - "YugabyteDb": "2300fdcf-a532-419f-9f24-a014336e7966" + "YugabyteDb": "2300fdcf-a532-419f-9f24-a014336e7966", + "Weaviate": "7b7d7a0d-954c-45a0-bcfc-39a634b97736" } diff --git a/airbyte-webapp/src/area/connector/utils/sources.json b/airbyte-webapp/src/area/connector/utils/sources.json index 6a692ca5525..21d88eaa5b4 100644 --- a/airbyte-webapp/src/area/connector/utils/sources.json +++ b/airbyte-webapp/src/area/connector/utils/sources.json @@ -54,6 +54,7 @@ "Dv360": "1356e1d9-977f-4057-ad4b-65f25329cf61", "DynamoDb": "50401137-8871-4c5a-abb7-1f5fda35545a", "E2ETesting": "d53f9084-fa6b-4a5a-976c-5b8392f4ad8a", + "EndToEndTesting": "50bd8338-7c4e-46f1-8c7f-3ef95de19fdd", "EmailOctopus": "46b25e70-c980-4590-a811-8deaf50ee09f", "ExchangeRatesApi": "e2b40e36-aa0e-4bed-b41b-bcea6fa348b1", "FacebookMarketing": "e7778cfc-e97c-4458-9ecb-b4f2bba8946c", diff --git a/airbyte-webapp/src/area/settings/components/SettingsLayout.module.scss b/airbyte-webapp/src/area/settings/components/SettingsLayout.module.scss new file mode 100644 index 00000000000..30a08f51ef8 --- /dev/null +++ b/airbyte-webapp/src/area/settings/components/SettingsLayout.module.scss @@ -0,0 +1,29 @@ +@use "scss/variables"; +@use "scss/colors"; + +.settings { + width: 100%; + height: 100%; + + &__breadcrumbs { + flex: 0 0 auto; + background-color: colors.$foreground; + border-bottom: variables.$border-thin solid colors.$grey-100; + height: variables.$height-breadcrumb; + display: flex; + align-items: center; + } + + &__main { + overflow: hidden; + flex: 1 1 auto; + display: flex; + gap: variables.$spacing-xl; + } + + &__content { + padding: variables.$spacing-xl; + background: colors.$foreground; + overflow-y: auto; + } +} diff --git a/airbyte-webapp/src/area/settings/components/SettingsLayout.tsx b/airbyte-webapp/src/area/settings/components/SettingsLayout.tsx new file mode 100644 index 00000000000..716f5e3b09c --- /dev/null +++ b/airbyte-webapp/src/area/settings/components/SettingsLayout.tsx @@ -0,0 +1,44 @@ +import { useIntl } from "react-intl"; + +import { HeadTitle } from "components/common/HeadTitle"; +import { Box } from "components/ui/Box"; +import { Breadcrumbs } from "components/ui/Breadcrumbs"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; + +import { useCurrentOrganizationInfo, useCurrentWorkspace } from "core/api"; +import { FeatureItem, useFeature } from "core/services/features"; + +import styles from "./SettingsLayout.module.scss"; + +export const SettingsLayoutContent: React.FC = ({ children }) => { + return ( + + {children} + + ); +}; + +export const SettingsLayout: React.FC = ({ children }) => { + const { formatMessage } = useIntl(); + const { name: workspaceName } = useCurrentWorkspace(); + const organization = useCurrentOrganizationInfo(); + const multiWorkspaceUi = useFeature(FeatureItem.MultiWorkspaceUI); + + const breadcrumbs = [ + { label: formatMessage({ id: "sidebar.settings" }) }, + ...(organization && multiWorkspaceUi ? [{ label: organization.organizationName }] : []), + ...(multiWorkspaceUi ? [{ label: workspaceName }] : []), + ]; + + return ( + <> + + + + + +
    {children}
    +
    + + ); +}; diff --git a/airbyte-webapp/src/components/settings/SettingsNavigation/SettingsNavigation.module.scss b/airbyte-webapp/src/area/settings/components/SettingsNavigation.module.scss similarity index 95% rename from airbyte-webapp/src/components/settings/SettingsNavigation/SettingsNavigation.module.scss rename to airbyte-webapp/src/area/settings/components/SettingsNavigation.module.scss index 911f422f95c..83ef63fcef0 100644 --- a/airbyte-webapp/src/components/settings/SettingsNavigation/SettingsNavigation.module.scss +++ b/airbyte-webapp/src/area/settings/components/SettingsNavigation.module.scss @@ -2,8 +2,8 @@ @use "scss/variables"; .settingsNavigation { - min-width: 165px; - max-width: 200px; + width: 200px; + padding: variables.$spacing-xl; &__blockTitle { text-transform: uppercase; diff --git a/airbyte-webapp/src/components/settings/SettingsNavigation/SettingsNavigation.tsx b/airbyte-webapp/src/area/settings/components/SettingsNavigation.tsx similarity index 100% rename from airbyte-webapp/src/components/settings/SettingsNavigation/SettingsNavigation.tsx rename to airbyte-webapp/src/area/settings/components/SettingsNavigation.tsx diff --git a/airbyte-webapp/src/area/workspace/components/UpdateWorkspaceNameForm.tsx b/airbyte-webapp/src/area/workspace/components/UpdateWorkspaceNameForm.tsx index e8efc61758b..84e1b686e95 100644 --- a/airbyte-webapp/src/area/workspace/components/UpdateWorkspaceNameForm.tsx +++ b/airbyte-webapp/src/area/workspace/components/UpdateWorkspaceNameForm.tsx @@ -64,7 +64,7 @@ export const UpdateWorkspaceNameForm = () => { fieldType="input" label={formatMessage({ id: "settings.workspaceSettings.updateWorkspaceNameForm.name.label" })} /> - {canUpdateWorkspace && } + {canUpdateWorkspace && } ); }; diff --git a/airbyte-webapp/src/area/workspace/utils/ConfirmWorkspaceDeletionModal.module.scss b/airbyte-webapp/src/area/workspace/utils/ConfirmWorkspaceDeletionModal.module.scss deleted file mode 100644 index 063c73a2b44..00000000000 --- a/airbyte-webapp/src/area/workspace/utils/ConfirmWorkspaceDeletionModal.module.scss +++ /dev/null @@ -1,7 +0,0 @@ -@use "scss/variables"; - -.form { - display: flex; - margin-top: variables.$spacing-xl; - gap: variables.$spacing-sm; -} diff --git a/airbyte-webapp/src/area/workspace/utils/useConfirmWorkspaceDeletionModal.tsx b/airbyte-webapp/src/area/workspace/utils/useConfirmWorkspaceDeletionModal.tsx deleted file mode 100644 index 18e18d05cfc..00000000000 --- a/airbyte-webapp/src/area/workspace/utils/useConfirmWorkspaceDeletionModal.tsx +++ /dev/null @@ -1,93 +0,0 @@ -import { UseMutateAsyncFunction } from "@tanstack/react-query"; -import { useState } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; -import { useNavigate } from "react-router-dom"; - -import { Box } from "components/ui/Box"; -import { Button } from "components/ui/Button"; -import { Input } from "components/ui/Input"; -import { Text } from "components/ui/Text"; - -import { WorkspaceRead } from "core/api/types/AirbyteClient"; -import { useModalService } from "hooks/services/Modal"; -import { useNotificationService } from "hooks/services/Notification"; -import { RoutePaths } from "pages/routePaths"; - -import styles from "./ConfirmWorkspaceDeletionModal.module.scss"; - -const WorkspaceDeletionModalContent = ({ onSubmit, workspace }: { onSubmit: () => void; workspace: WorkspaceRead }) => { - const [confirmationInput, setConfirmationInput] = useState(""); - const isConfirmationValid = confirmationInput === workspace.name; - return ( -
    - - - - -
    - setConfirmationInput(event.target.value)} - /> - -
    -
    -
    - ); -}; - -/** - * Returns a function that can be used to open a confirmation modal for deleting a - * workspace. The user must type the workspace name in a confirmation input in order to - * proceed with the deletion. - * - * @param workspace - the workspace to delete - * @param deleteWorkspace - the API function which will actually delete the workspace upon successful confirmation - */ -export const useConfirmWorkspaceDeletionModal = ( - workspace: WorkspaceRead, - deleteWorkspace: UseMutateAsyncFunction -) => { - const { formatMessage } = useIntl(); - const { registerNotification } = useNotificationService(); - const navigate = useNavigate(); - const { openModal } = useModalService(); - - return async () => { - const result = await openModal<"confirm">({ - title: formatMessage( - { - id: "settings.workspaceSettings.deleteWorkspace.confirmation.title", - }, - { name: workspace.name } - ), - content: ({ onClose }) => ( - onClose("confirm")} /> - ), - size: "md", - }); - - // type "closed" and reason "confirm" indicates a successful confirmation; "canceled" [sic] is its counterpart - // when the user backs out - if (result.type === "closed" && result.reason === "confirm") { - try { - await deleteWorkspace(workspace.workspaceId); - registerNotification({ - id: "settings.workspace.delete.success", - text: formatMessage({ id: "settings.workspaceSettings.delete.success" }), - type: "success", - }); - navigate(`/${RoutePaths.Workspaces}`); - } catch { - registerNotification({ - id: "settings.workspace.delete.error", - text: formatMessage({ id: "settings.workspaceSettings.delete.error" }), - type: "error", - }); - } - } - }; -}; diff --git a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx index f1d5dd5e3f7..2919ab4f483 100644 --- a/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx +++ b/airbyte-webapp/src/components/ArrayOfObjectsEditor/components/EditorRow.tsx @@ -41,7 +41,7 @@ export const EditorRow: React.FC = ({ name, id, description, onE arial-label={formatMessage({ id: "form.edit" })} onClick={() => onEdit(index)} disabled={disabled} - icon={} + icon="pencil" /> - ))} - - )} - - + + {version.isActive && ( + + + + )} + {isLoading && version.version === selectedVersion && } + + + ))} + + )} + ); }; const VersionChanger = ({ project, canUpdateConnector }: { project: BuilderProject; canUpdateConnector: boolean }) => { - const [changeInProgress, setChangeInProgress] = useState(false); + const { openModal } = useModalService(); + const { formatMessage } = useIntl(); + if (project.version === "draft") { return ( ); } + + const openVersionChangeModal = () => + openModal({ + title: formatMessage({ id: "connectorBuilder.changeVersionModal.title" }, { name: project.name }), + size: "sm", + content: ({ onComplete }) => , + }); + return ( - <> - - {changeInProgress && setChangeInProgress(false)} project={project} />} - + ); }; @@ -233,7 +233,7 @@ export const ConnectorBuilderProjectTable = ({ type="button" variant="clear" disabled={Boolean(props.row.original.sourceDefinitionId)} - icon={} + icon="trash" onClick={() => { unregisterNotificationById(NOTIFICATION_ID); openConfirmationModal({ @@ -302,6 +302,7 @@ export const ConnectorBuilderProjectTable = ({ data={projects} className={styles.table} sorting={false} + stickyHeaders={false} initialSortBy={[{ id: "name", desc: false }]} /> ); diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx index b46d8aafe09..83d52f00250 100644 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx @@ -6,16 +6,14 @@ import { Link } from "components/ui/Link"; import { Table } from "components/ui/Table"; import { useCurrentWorkspaceLink } from "area/workspace/utils"; -import { ConnectionScheduleType, SchemaChange } from "core/api/types/AirbyteClient"; -import { FeatureItem, useFeature } from "core/services/features"; import { RoutePaths } from "pages/routePaths"; -import ConnectionSettingsCell from "./components/ConnectionSettingsCell"; import { ConnectionStatusCell } from "./components/ConnectionStatusCell"; import { ConnectorNameCell } from "./components/ConnectorNameCell"; import { FrequencyCell } from "./components/FrequencyCell"; import { LastSyncCell } from "./components/LastSyncCell"; -import { StatusCell } from "./components/StatusCell"; +import { SchemaChangeCell } from "./components/SchemaChangeCell"; +import { StateSwitchCell } from "./components/StateSwitchCell"; import { StreamsStatusCell } from "./components/StreamStatusCell"; import styles from "./ConnectionTable.module.scss"; import { ConnectionTableDataItem } from "./types"; @@ -28,7 +26,6 @@ interface ConnectionTableProps { const ConnectionTable: React.FC = ({ data, entity, variant }) => { const createLink = useCurrentWorkspaceLink(); - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); const streamCentricUIEnabled = false; const columnHelper = createColumnHelper(); @@ -158,28 +155,29 @@ const ConnectionTable: React.FC = ({ data, entity, variant thClassName: styles.thEnabled, }, cell: (props) => ( - ), enableSorting: false, }), - columnHelper.accessor("connectionId", { + columnHelper.accessor("schemaChange", { header: "", meta: { thClassName: styles.thConnectionSettings, }, - cell: (props) => , + cell: (props) => ( + + ), enableSorting: false, }), ], - [columnHelper, createLink, entity, allowAutoDetectSchema] + [columnHelper, createLink, entity] ); return ( diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss deleted file mode 100644 index fd1bf72927d..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.module.scss +++ /dev/null @@ -1,28 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.button { - min-width: 17px; - font-size: 17px; - border: none; - background: none; - padding: 0; -} - -.link { - color: transparent; - padding: variables.$spacing-xs variables.$spacing-sm; - - &:focus, - &:hover { - color: colors.$grey-600 !important; - } - - tr:hover & { - color: colors.$grey-400; - } -} - -.icon { - color: inherit; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx b/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx deleted file mode 100644 index 7de3a818713..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/ConnectionSettingsCell.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import React from "react"; - -import { Icon } from "components/ui/Icon"; -import { Link } from "components/ui/Link"; - -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; -import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; - -import styles from "./ConnectionSettingsCell.module.scss"; - -interface IProps { - id: string; -} - -const ConnectorCell: React.FC = ({ id }) => { - const { workspaceId } = useCurrentWorkspace(); - - const openSettings = (event: React.MouseEvent) => { - event.stopPropagation(); - }; - - const settingPath = `/${RoutePaths.Workspaces}/${workspaceId}/${RoutePaths.Connections}/${id}/${ConnectionRoutePaths.Replication}`; - - return ( - - ); -}; - -export default ConnectorCell; diff --git a/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx b/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx new file mode 100644 index 00000000000..fd2f9b41d26 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx @@ -0,0 +1,28 @@ +import React from "react"; + +import { Link } from "components/ui/Link"; + +import { ConnectionId, SchemaChange } from "core/api/types/AirbyteClient"; +import { FeatureItem, useFeature } from "core/services/features"; +import { ConnectionRoutePaths } from "pages/routePaths"; + +import { ChangesStatusIcon } from "./ChangesStatusIcon"; + +interface SchemaChangeCellProps { + connectionId: ConnectionId; + schemaChange: SchemaChange; +} + +export const SchemaChangeCell: React.FC = ({ connectionId, schemaChange }) => { + const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); + + if (!allowAutoDetectSchema || schemaChange !== SchemaChange.breaking) { + return null; + } + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx new file mode 100644 index 00000000000..edef2e92104 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.test.tsx @@ -0,0 +1,70 @@ +import { render } from "@testing-library/react"; + +import { TestSuspenseBoundary, TestWrapper } from "test-utils"; +import { mockWorkspace } from "test-utils/mock-data/mockWorkspace"; + +import { StateSwitchCell } from "./StateSwitchCell"; + +jest.mock("core/api", () => ({ + useCurrentWorkspace: jest.fn(() => mockWorkspace), + useUpdateConnection: jest.fn(() => ({ + mutateAsync: jest.fn(), + isLoading: false, + })), +})); + +jest.mock("core/utils/rbac", () => ({ + useIntent: jest.fn(() => true), +})); + +const mockId = "mock-id"; + +describe(`${StateSwitchCell.name}`, () => { + it("renders enabled switch", () => { + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + const switchElement = getByTestId("connection-state-switch-mock-id"); + + expect(switchElement).toBeEnabled(); + expect(switchElement).toBeChecked(); + }); + + it("renders disabled switch when connection has `breaking` changes", () => { + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + expect(getByTestId("connection-state-switch-mock-id")).toBeDisabled(); + }); + + it("renders disabled switch when connection is in loading state", () => { + jest.doMock("core/api", () => ({ + useUpdateConnection: jest.fn(() => ({ + isLoading: true, + })), + })); + + const { getByTestId } = render( + + + , + { + wrapper: TestWrapper, + } + ); + + expect(getByTestId("connection-state-switch-mock-id")).toBeDisabled(); + }); +}); diff --git a/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx new file mode 100644 index 00000000000..4a82cd035e4 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/StateSwitchCell.tsx @@ -0,0 +1,45 @@ +import React from "react"; + +import { FlexContainer } from "components/ui/Flex"; +import { Switch } from "components/ui/Switch"; + +import { useCurrentWorkspace, useUpdateConnection } from "core/api"; +import { ConnectionId, ConnectionStatus, SchemaChange } from "core/api/types/AirbyteClient"; +import { useIntent } from "core/utils/rbac"; +import { useAnalyticsTrackFunctions } from "hooks/services/ConnectionEdit/useAnalyticsTrackFunctions"; + +interface StateSwitchCellProps { + connectionId: ConnectionId; + enabled?: boolean; + schemaChange?: SchemaChange; +} + +export const StateSwitchCell: React.FC = ({ connectionId, enabled, schemaChange }) => { + const { trackConnectionStatusUpdate } = useAnalyticsTrackFunctions(); + const { workspaceId } = useCurrentWorkspace(); + const canEditConnection = useIntent("EditConnection", { workspaceId }); + const { mutateAsync: updateConnection, isLoading } = useUpdateConnection(); + + const onChange = async ({ target: { checked } }: React.ChangeEvent) => { + const updatedConnection = await updateConnection({ + connectionId, + status: checked ? ConnectionStatus.active : ConnectionStatus.inactive, + }); + trackConnectionStatusUpdate(updatedConnection); + }; + + const isDisabled = schemaChange === SchemaChange.breaking || !canEditConnection || isLoading; + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss deleted file mode 100644 index 663c904fc6d..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.module.scss +++ /dev/null @@ -1,7 +0,0 @@ -.container { - display: flex; - flex-direction: row; - align-content: center; - justify-content: space-between; - width: 120px; -} diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx deleted file mode 100644 index b5e6fc4aa9c..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.test.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import { render, waitFor } from "@testing-library/react"; - -import { TestWrapper, TestSuspenseBoundary, mockConnection } from "test-utils"; -import { mockWorkspace } from "test-utils/mock-data/mockWorkspace"; - -import { StatusCell } from "./StatusCell"; - -jest.mock("core/api", () => ({ - useConnectionList: jest.fn(() => ({ - connections: [], - })), - useCurrentWorkspace: jest.fn(() => mockWorkspace), - useSyncConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - })), - useUpdateConnection: jest.fn(() => ({ - mutateAsync: jest.fn(), - isLoading: false, - })), -})); - -jest.mock("core/utils/rbac", () => ({ - useIntent: jest.fn(() => true), -})); - -const mockId = "mock-id"; - -describe("", () => { - it("renders switch when connection has schedule", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - const switchElement = getByTestId("enable-connection-switch"); - - expect(switchElement).toBeEnabled(); - expect(switchElement).toBeChecked(); - }); - - it("renders button when connection does not have schedule", async () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - await waitFor(() => expect(getByTestId("manual-sync-button")).toBeEnabled()); - }); - - it("disables switch when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("enable-connection-switch")).toBeDisabled(); - }); - - it("disables manual sync button when hasBreakingChange is true", () => { - const { getByTestId } = render( - - - , - { - wrapper: TestWrapper, - } - ); - - expect(getByTestId("manual-sync-button")).toBeDisabled(); - }); -}); diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx deleted file mode 100644 index 9b07a619d09..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCell.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import React from "react"; - -import { Link } from "components/ui/Link"; - -import { SchemaChange, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -import { FeatureItem, useFeature } from "core/services/features"; -import { ConnectionRoutePaths } from "pages/routePaths"; - -import { ChangesStatusIcon } from "./ChangesStatusIcon"; -import styles from "./StatusCell.module.scss"; -import { StatusCellControl } from "./StatusCellControl"; - -interface StatusCellProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - schemaChange?: SchemaChange; - connection: WebBackendConnectionListItem; -} - -export const StatusCell: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - schemaChange, - hasBreakingChange, - connection, -}) => { - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - - return ( -
    - - {allowAutoDetectSchema && hasBreakingChange && ( - - - - )} -
    - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx b/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx deleted file mode 100644 index bd5fadd4c73..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/StatusCellControl.tsx +++ /dev/null @@ -1,92 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { Switch } from "components/ui/Switch"; - -import { useCurrentWorkspace, useSyncConnection, useUpdateConnection } from "core/api"; -import { ConnectionStatus, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -import { Action, Namespace, getFrequencyFromScheduleData, useAnalyticsService } from "core/services/analytics"; -import { useIntent } from "core/utils/rbac"; - -interface StatusCellControlProps { - hasBreakingChange?: boolean; - enabled?: boolean; - isSyncing?: boolean; - isManual?: boolean; - id: string; - connection: WebBackendConnectionListItem; -} - -export const StatusCellControl: React.FC = ({ - enabled, - isManual, - id, - isSyncing, - hasBreakingChange, - connection, -}) => { - const analyticsService = useAnalyticsService(); - const { mutateAsync: updateConnection, isLoading } = useUpdateConnection(); - const { mutateAsync: syncConnection, isLoading: isSyncStarting } = useSyncConnection(); - - const { workspaceId } = useCurrentWorkspace(); - const canEditConnection = useIntent("EditConnection", { workspaceId }); - const canSyncConnection = useIntent("SyncConnection", { workspaceId }); - - const onRunManualSync = (event: React.SyntheticEvent) => { - event.stopPropagation(); - - if (connection) { - syncConnection(connection); - } - }; - - if (!isManual) { - const onSwitchChange = async (event: React.SyntheticEvent) => { - event.stopPropagation(); - await updateConnection({ - connectionId: id, - status: enabled ? ConnectionStatus.inactive : ConnectionStatus.active, - }).then((updatedConnection) => { - const action = updatedConnection.status === ConnectionStatus.active ? Action.REENABLE : Action.DISABLE; - - analyticsService.track(Namespace.CONNECTION, action, { - frequency: getFrequencyFromScheduleData(connection.scheduleData), - connector_source: connection.source?.sourceName, - connector_source_definition_id: connection.source?.sourceDefinitionId, - connector_destination: connection.destination?.destinationName, - connector_destination_definition_id: connection.destination?.destinationDefinitionId, - }); - }); - }; - - return ( - // this is so we can stop event propagation so the row doesn't receive the click and redirect - // eslint-disable-next-line jsx-a11y/no-static-element-interactions -
    event.stopPropagation()} - onKeyPress={(event: React.SyntheticEvent) => event.stopPropagation()} - > - -
    - ); - } - - return ( - - ); -}; diff --git a/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss b/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss index 5df450252b8..1fc97d5e36c 100644 --- a/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss +++ b/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss @@ -48,4 +48,5 @@ width: 24px; height: 24px; border-radius: variables.$border-radius-xs; + aspect-ratio: 1 / 1; } diff --git a/airbyte-webapp/src/components/JobFailure/JobFailure.tsx b/airbyte-webapp/src/components/JobFailure/JobFailure.tsx index 29842b386f0..c50ee379817 100644 --- a/airbyte-webapp/src/components/JobFailure/JobFailure.tsx +++ b/airbyte-webapp/src/components/JobFailure/JobFailure.tsx @@ -145,7 +145,7 @@ export const JobFailure: React.FC = ({ job, fallbackMessage }) expanded={isStacktraceExpanded} messageId="jobs.failure.expandStacktrace" /> - {isStacktraceExpanded && } + {isStacktraceExpanded && } )} {job.logs?.logLines && job.logs.logLines.length > 0 && ( @@ -156,7 +156,7 @@ export const JobFailure: React.FC = ({ job, fallbackMessage }) messageId="jobs.failure.expandLogs" icon={} /> - {isLogsExpanded && } + {isLogsExpanded && } )} diff --git a/airbyte-webapp/src/components/Logs/Logs.tsx b/airbyte-webapp/src/components/Logs/Logs.tsx index cb9afd9a386..80257b3829c 100644 --- a/airbyte-webapp/src/components/Logs/Logs.tsx +++ b/airbyte-webapp/src/components/Logs/Logs.tsx @@ -7,6 +7,7 @@ import styles from "./Logs.module.scss"; interface LogsProps { logsArray?: string[]; maxRows?: number; + follow?: boolean; } const ROW_HEIGHT = 19; @@ -19,7 +20,7 @@ function trimLogs(logs: string[]) { return trimmedLogs; } -const Logs: React.FC = ({ logsArray, maxRows = 21 }) => { +const Logs: React.FC = ({ logsArray, maxRows = 21, follow }) => { const trimmedLogs = trimLogs(logsArray || []); const logsJoin = trimmedLogs.length ? trimmedLogs.join("\n") : "No logs available"; @@ -41,7 +42,7 @@ const Logs: React.FC = ({ logsArray, maxRows = 21 }) => { lineClassName={styles.logLine} highlightLineClassName={styles.highlightLogLine} selectableLines - follow + follow={follow} style={{ background: "transparent" }} scrollToLine={undefined} highlight={[]} diff --git a/airbyte-webapp/src/components/NotificationSettingsForm/NotificationSettingsForm.tsx b/airbyte-webapp/src/components/NotificationSettingsForm/NotificationSettingsForm.tsx index 56e2e74d5f2..54569a92264 100644 --- a/airbyte-webapp/src/components/NotificationSettingsForm/NotificationSettingsForm.tsx +++ b/airbyte-webapp/src/components/NotificationSettingsForm/NotificationSettingsForm.tsx @@ -185,7 +185,7 @@ export const NotificationSettingsForm: React.FC = () => { )} - + ); diff --git a/airbyte-webapp/src/components/WorkspaceEmailForm/WorkspaceEmailForm.tsx b/airbyte-webapp/src/components/WorkspaceEmailForm/WorkspaceEmailForm.tsx index b9f0798e134..01de92ea1ac 100644 --- a/airbyte-webapp/src/components/WorkspaceEmailForm/WorkspaceEmailForm.tsx +++ b/airbyte-webapp/src/components/WorkspaceEmailForm/WorkspaceEmailForm.tsx @@ -68,14 +68,13 @@ export const WorkspaceEmailForm = () => { name="email" fieldType="input" - inline - description={formatMessage({ id: "settings.notifications.emailRecipient" })} + labelTooltip={formatMessage({ id: "settings.notifications.emailRecipient" })} label={formatMessage({ id: "settings.workspaceSettings.updateWorkspaceNameForm.email.label" })} placeholder={formatMessage({ id: "settings.workspaceSettings.updateWorkspaceNameForm.email.placeholder", })} /> - + ); }; diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx b/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx deleted file mode 100644 index 6234f23733b..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/ApiErrorBoundary.tsx +++ /dev/null @@ -1,161 +0,0 @@ -import { useQueryErrorResetBoundary } from "@tanstack/react-query"; -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { NavigateFunction, useNavigate } from "react-router-dom"; -import { useLocation } from "react-use"; -import { LocationSensorState } from "react-use/lib/useLocation"; - -import { CommonRequestError, isVersionError } from "core/api"; -import { isFormBuildError } from "core/form/FormBuildError"; -import { trackError } from "core/utils/datadog"; -import { TrackErrorFn } from "hooks/services/AppMonitoringService"; -import { ErrorOccurredView } from "views/common/ErrorOccurredView"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; - -import { ServerUnavailableView } from "./ServerUnavailableView"; - -interface ApiErrorBoundaryState { - errorId?: string; - message?: string; - didRetry?: boolean; - retryDelay?: number; -} - -enum ErrorId { - VersionMismatch = "version.mismatch", - FormBuild = "form.build", - ServerUnavailable = "server.unavailable", - UnknownError = "unknown", -} - -interface ApiErrorBoundaryHookProps { - location: LocationSensorState; - onRetry?: () => void; - navigate: NavigateFunction; - trackError: TrackErrorFn; -} - -interface ApiErrorBoundaryProps { - onError?: (errorId?: string) => void; -} - -const RETRY_DELAY = 2500; - -class ApiErrorBoundaryComponent extends React.Component< - React.PropsWithChildren, - ApiErrorBoundaryState -> { - state: ApiErrorBoundaryState = { - retryDelay: RETRY_DELAY, - }; - - static getDerivedStateFromError(error: { message: string; status?: number; __type?: string }): ApiErrorBoundaryState { - // Update state so the next render will show the fallback UI. - if (isVersionError(error)) { - return { errorId: ErrorId.VersionMismatch, message: error.message }; - } - - if (isFormBuildError(error)) { - return { errorId: ErrorId.FormBuild, message: error.message }; - } - - const isNetworkBoundaryMessage = error.message === "Failed to fetch"; - const is502 = error.status === 502; - - if (isNetworkBoundaryMessage || is502) { - return { errorId: ErrorId.ServerUnavailable, didRetry: false }; - } - - return { errorId: ErrorId.UnknownError, didRetry: false }; - } - - componentDidUpdate(prevProps: ApiErrorBoundaryHookProps) { - const { location } = this.props; - - if (location !== prevProps.location) { - this.setState({ errorId: undefined, didRetry: false }); - this.props.onError?.(undefined); - } else { - this.props.onError?.(this.state.errorId); - } - } - - componentDidCatch(error: Error) { - const context = { - errorBoundary: this.constructor.name, - requestStatus: error instanceof CommonRequestError ? error.status : undefined, - }; - - this.props.trackError(error, context); - } - - retry = () => { - this.setState((state) => ({ - didRetry: true, - errorId: undefined, - retryDelay: Math.round((state?.retryDelay || RETRY_DELAY) * 1.2), - })); - this.props.onRetry?.(); - }; - - render(): React.ReactNode { - const { navigate, children } = this.props; - const { errorId, didRetry, message, retryDelay } = this.state; - - if (errorId === ErrorId.VersionMismatch) { - return ; - } - - if (errorId === ErrorId.FormBuild) { - return ( - - -
    - - - } - docLink="https://docs.airbyte.com/connector-development/connector-specification-reference/#airbyte-modifications-to-jsonschema" - /> - ); - } - - if (errorId === ErrorId.ServerUnavailable && !didRetry) { - return ; - } - - return !errorId ? ( - } trackError={this.props.trackError}> - {children} - - ) : ( - } - ctaButtonText={} - onCtaButtonClick={() => { - navigate(".."); - }} - /> - ); - } -} - -export const ApiErrorBoundary: React.FC> = ({ children, ...props }) => { - const { reset } = useQueryErrorResetBoundary(); - const location = useLocation(); - const navigate = useNavigate(); - - return ( - - {children} - - ); -}; diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx b/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx deleted file mode 100644 index 7553d5e6000..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/ServerUnavailableView.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import React, { useEffect } from "react"; -import { FormattedMessage } from "react-intl"; - -import { ErrorOccurredView } from "views/common/ErrorOccurredView"; - -interface ServerUnavailableViewProps { - onRetryClick: () => void; - retryDelay: number; -} - -export const ServerUnavailableView: React.FC = ({ onRetryClick, retryDelay }) => { - useEffect(() => { - const timer: ReturnType = setTimeout(() => { - onRetryClick(); - }, retryDelay); - return () => clearTimeout(timer); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); - - return ( - } - ctaButtonText={} - onCtaButtonClick={onRetryClick} - /> - ); -}; diff --git a/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts b/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts deleted file mode 100644 index 0f3fb4d7305..00000000000 --- a/airbyte-webapp/src/components/common/ApiErrorBoundary/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./ApiErrorBoundary"; diff --git a/airbyte-webapp/src/components/common/ConfirmationModal/ConfirmationModal.tsx b/airbyte-webapp/src/components/common/ConfirmationModal/ConfirmationModal.tsx index abfc63193c5..252be9cd5e5 100644 --- a/airbyte-webapp/src/components/common/ConfirmationModal/ConfirmationModal.tsx +++ b/airbyte-webapp/src/components/common/ConfirmationModal/ConfirmationModal.tsx @@ -1,49 +1,89 @@ +import isString from "lodash/isString"; import React from "react"; import { FormattedMessage } from "react-intl"; +import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; +import { Input } from "components/ui/Input"; import { Modal } from "components/ui/Modal"; +import { Text } from "components/ui/Text"; + +import useLoadingState from "hooks/useLoadingState"; import styles from "./ConfirmationModal.module.scss"; -import useLoadingState from "../../../hooks/useLoadingState"; export interface ConfirmationModalProps { - onClose: () => void; - title: string; - text: string; + title: string | React.ReactNode; + text: string | React.ReactNode; textValues?: Record; - submitButtonText: string; + onCancel: () => void; onSubmit: () => void; - submitButtonDataId?: string; cancelButtonText?: string; + confirmationText?: string; + submitButtonText: string; + submitButtonDataId?: string; additionalContent?: React.ReactNode; submitButtonVariant?: "danger" | "primary"; } export const ConfirmationModal: React.FC = ({ - onClose, title, text, additionalContent, textValues, + onCancel, onSubmit, submitButtonText, submitButtonDataId, cancelButtonText, + confirmationText, submitButtonVariant = "danger", }) => { const { isLoading, startAction } = useLoadingState(); const onSubmitBtnClick = () => startAction({ action: async () => onSubmit() }); + const [confirmationValue, setConfirmationValue] = React.useState(""); return ( - } testId="confirmationModal"> + : title} + testId="confirmationModal" + >
    - + {isString(text) ? : text} {additionalContent} + {confirmationText && ( + + + {/* eslint-disable-next-line jsx-a11y/label-has-associated-control -- eslint loses the input even though it has an "htmlFor" */} + + setConfirmationValue(event.target.value)} + value={confirmationValue} + /> + + + )}
    + + + + + + + + + + + + + + + + ); +}; diff --git a/airbyte-webapp/src/components/common/ConnectionDangerBlock/index.ts b/airbyte-webapp/src/components/common/ConnectionDangerBlock/index.ts new file mode 100644 index 00000000000..aef00e1e882 --- /dev/null +++ b/airbyte-webapp/src/components/common/ConnectionDangerBlock/index.ts @@ -0,0 +1 @@ +export * from "./ConnectionDangerBlock"; diff --git a/airbyte-webapp/src/components/common/DeleteBlock/DeleteBlock.module.scss b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.module.scss similarity index 100% rename from airbyte-webapp/src/components/common/DeleteBlock/DeleteBlock.module.scss rename to airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.module.scss diff --git a/airbyte-webapp/src/components/common/DeleteBlock/DeleteBlock.tsx b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx similarity index 58% rename from airbyte-webapp/src/components/common/DeleteBlock/DeleteBlock.tsx rename to airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx index 27a64f3d197..5413997f105 100644 --- a/airbyte-webapp/src/components/common/DeleteBlock/DeleteBlock.tsx +++ b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx @@ -6,30 +6,30 @@ import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; +import { useDeleteConnection } from "core/api"; +import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; import { useDeleteModal } from "hooks/useDeleteModal"; -import styles from "./DeleteBlock.module.scss"; +import styles from "./ConnectionDeleteBlock.module.scss"; -interface DeleteBlockProps { - type: "source" | "destination" | "connection"; - onDelete: () => Promise; - modalAdditionalContent?: React.ReactNode; -} - -export const DeleteBlock: React.FC = ({ type, onDelete }) => { +export const ConnectionDeleteBlock: React.FC = () => { const { mode } = useConnectionFormService(); - const onDeleteButtonClick = useDeleteModal(type, onDelete); + const { connection } = useConnectionEditService(); + const { mutateAsync: deleteConnection } = useDeleteConnection(); + const onDelete = () => deleteConnection(connection); + + const onDeleteButtonClick = useDeleteModal("connection", onDelete, undefined, connection.name); return ( - + - + diff --git a/airbyte-webapp/src/components/common/ConnectionDeleteBlock/index.ts b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/index.ts new file mode 100644 index 00000000000..eb0cf7da406 --- /dev/null +++ b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/index.ts @@ -0,0 +1 @@ +export * from "./ConnectionDeleteBlock"; diff --git a/airbyte-webapp/src/components/common/DeleteBlock/index.ts b/airbyte-webapp/src/components/common/DeleteBlock/index.ts deleted file mode 100644 index efdf28e28c2..00000000000 --- a/airbyte-webapp/src/components/common/DeleteBlock/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./DeleteBlock"; diff --git a/airbyte-webapp/src/components/common/HeadTitle/HeadTitle.tsx b/airbyte-webapp/src/components/common/HeadTitle/HeadTitle.tsx index 7a4c43354fe..624981695cb 100644 --- a/airbyte-webapp/src/components/common/HeadTitle/HeadTitle.tsx +++ b/airbyte-webapp/src/components/common/HeadTitle/HeadTitle.tsx @@ -2,9 +2,10 @@ import React from "react"; import { Helmet } from "react-helmet-async"; import { useIntl } from "react-intl"; +import { useCurrentWorkspaceId } from "area/workspace/utils"; +import { useGetWorkspace } from "core/api"; import { useAuthService } from "core/services/auth"; import { useLocalStorage } from "core/utils/useLocalStorage"; -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; const AIRBYTE = "Airbyte"; const SEPARATOR = "|"; @@ -33,12 +34,13 @@ interface WorkspacePrefixedTitleProps { } const WorkspacePrefixedTitle: React.FC = ({ title }) => { - const workspace = useCurrentWorkspace(); + const workspaceId = useCurrentWorkspaceId(); + const workspace = useGetWorkspace(workspaceId, { enabled: !!workspaceId }); return ( {title} diff --git a/airbyte-webapp/src/components/common/Version/Version.tsx b/airbyte-webapp/src/components/common/Version/Version.tsx index 98383640430..c5f86678e7f 100644 --- a/airbyte-webapp/src/components/common/Version/Version.tsx +++ b/airbyte-webapp/src/components/common/Version/Version.tsx @@ -2,10 +2,9 @@ import React from "react"; import { Text } from "components/ui/Text"; -import { useConfig } from "core/config"; +import { config } from "core/config"; export const Version: React.FC = () => { - const config = useConfig(); return ( {config.version} diff --git a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.stories.tsx b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.stories.tsx index d613bf9dd9c..b20108c4d3d 100644 --- a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.stories.tsx +++ b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.stories.tsx @@ -1,4 +1,4 @@ -import { ComponentStory, ComponentMeta } from "@storybook/react"; +import { ComponentMeta, ComponentStory } from "@storybook/react"; import { FormattedMessage } from "react-intl"; import { Modal } from "components/ui/Modal"; @@ -17,7 +17,7 @@ const Template: ComponentStory = (args) => { return ( }> - null} /> + null} /> ); diff --git a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.test.tsx b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.test.tsx index 68cbd0aa4da..6c6d298a97f 100644 --- a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.test.tsx +++ b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.test.tsx @@ -157,7 +157,7 @@ describe("catalog diff modal", () => { { + onComplete={() => { return null; }} /> @@ -206,7 +206,7 @@ describe("catalog diff modal", () => { { + onComplete={() => { return null; }} /> @@ -227,7 +227,7 @@ describe("catalog diff modal", () => { { + onComplete={() => { return null; }} /> @@ -248,7 +248,7 @@ describe("catalog diff modal", () => { { + onComplete={() => { return null; }} /> diff --git a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.tsx b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.tsx index f6c29bc3cd5..405805e6701 100644 --- a/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.tsx +++ b/airbyte-webapp/src/components/connection/CatalogDiffModal/CatalogDiffModal.tsx @@ -14,10 +14,10 @@ import { getSortedDiff } from "./utils"; interface CatalogDiffModalProps { catalogDiff: CatalogDiff; catalog: AirbyteCatalog; - onClose: () => void; + onComplete: () => void; } -export const CatalogDiffModal: React.FC = ({ catalogDiff, catalog, onClose }) => { +export const CatalogDiffModal: React.FC = ({ catalogDiff, catalog, onComplete }) => { const { newItems, removedItems, changedItems } = useMemo( () => getSortedDiff(catalogDiff.transforms), [catalogDiff.transforms] @@ -33,7 +33,7 @@ export const CatalogDiffModal: React.FC = ({ catalogDiff,
    - diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx index 91f3bdd13d7..217e542ec4c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/CreateConnectionFormControls.tsx @@ -36,7 +36,7 @@ export const CreateConnectionFormControls: React.FC = () => { }); return ( - + {errorMessage} diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx index 5c6ee27ca94..bcdc1845136 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx @@ -17,7 +17,7 @@ export const CustomTransformationsFormField: React.FC = () => { const { fields, append, remove, update, move } = useFieldArray({ name: "transformations", }); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const defaultTransformation: OperationCreate = useMemo( () => ({ @@ -36,10 +36,10 @@ export const CustomTransformationsFormField: React.FC = () => { ); const openEditModal = (transformationItemIndex?: number) => - openModal({ + openModal({ size: "xl", title: , - content: () => ( + content: ({ onComplete, onCancel }) => ( { isDefined(transformationItemIndex) ? update(transformationItemIndex, transformation) : append(transformation); - closeModal(); + onComplete(); }} - onCancel={closeModal} + onCancel={onCancel} /> ), }); diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/DestinationStreamPrefixNameFormField.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/DestinationStreamPrefixNameFormField.tsx index d98ba2bbbe6..be5fc0eb277 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/DestinationStreamPrefixNameFormField.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/DestinationStreamPrefixNameFormField.tsx @@ -21,7 +21,7 @@ import { export const DestinationStreamPrefixNameFormField = () => { const { formatMessage } = useIntl(); const { setValue, control } = useFormContext(); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const prefix = useWatch({ name: "prefix", control }); const destinationStreamNamesChange = useCallback( @@ -39,20 +39,23 @@ export const DestinationStreamPrefixNameFormField = () => { const openDestinationStreamNamesModal = useCallback( () => - openModal({ + openModal({ size: "sm", title: , - content: () => ( + content: ({ onComplete, onCancel }) => ( { + destinationStreamNamesChange(values); + onComplete(); + }} /> ), }), - [closeModal, destinationStreamNamesChange, openModal, prefix] + [destinationStreamNamesChange, openModal, prefix] ); return ( diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/NamespaceDefinitionFormField.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/NamespaceDefinitionFormField.tsx index 92c54a04daa..ec5d84c66a2 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/NamespaceDefinitionFormField.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/NamespaceDefinitionFormField.tsx @@ -14,11 +14,11 @@ import { useModalService } from "hooks/services/Modal"; import { FormConnectionFormValues } from "./formConfig"; import { FormFieldLayout } from "./FormFieldLayout"; import { namespaceDefinitionOptions } from "./types"; -import { DestinationNamespaceModal, DestinationNamespaceFormValues } from "../DestinationNamespaceModal"; +import { DestinationNamespaceFormValues, DestinationNamespaceModal } from "../DestinationNamespaceModal"; export const NamespaceDefinitionFormField = () => { const { setValue, control } = useFormContext(); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const namespaceDefinition = useWatch({ name: "namespaceDefinition", control }); const namespaceFormat = useWatch({ name: "namespaceFormat", control }); @@ -38,21 +38,24 @@ export const NamespaceDefinitionFormField = () => { const openDestinationNamespaceModal = useCallback( () => - openModal({ + openModal({ size: "lg", title: , - content: () => ( + content: ({ onComplete, onCancel }) => ( { + destinationNamespaceChange(values); + onComplete(); + }} /> ), }), - [closeModal, destinationNamespaceChange, namespaceDefinition, namespaceFormat, openModal] + [destinationNamespaceChange, namespaceDefinition, namespaceFormat, openModal] ); return ( diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/ScheduleFormField/useBasicFrequencyDropdownData.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/ScheduleFormField/useBasicFrequencyDropdownData.tsx index e5a0bb08e96..45567885948 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/ScheduleFormField/useBasicFrequencyDropdownData.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/ScheduleFormField/useBasicFrequencyDropdownData.tsx @@ -3,6 +3,7 @@ import { useIntl } from "react-intl"; import { Option } from "components/ui/ListBox"; +import { ConnectorIds } from "area/connector/utils"; import { ConnectionScheduleDataBasicSchedule, WebBackendConnectionRead } from "core/api/types/AirbyteClient"; export const BASIC_FREQUENCY_DEFAULT_VALUE: ConnectionScheduleDataBasicSchedule = { units: 24, timeUnit: "hours" }; @@ -34,6 +35,10 @@ export const frequencyConfig: ConnectionScheduleDataBasicSchedule[] = [ BASIC_FREQUENCY_DEFAULT_VALUE, ]; +export const SOURCE_SPECIFIC_FREQUENCY_DEFAULT: Record = { + [ConnectorIds.Sources.MongoDb]: { units: 6, timeUnit: "hours" }, +}; + export const useBasicFrequencyDropdownData = ( additionalFrequency: WebBackendConnectionRead["scheduleData"] ): Array> => { diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogCard.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogCard.tsx index 4a574f49064..90fbd9e120c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogCard.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogCard.tsx @@ -11,11 +11,11 @@ import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; -import { Icon } from "components/ui/Icon"; import { LoadingBackdrop } from "components/ui/LoadingBackdrop"; import { naturalComparatorBy } from "core/utils/objects"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; import { FormConnectionFormValues, SyncStreamFieldWithId } from "./formConfig"; import { useRefreshSourceSchemaWithConfirmationOnDirty } from "./refreshSourceSchemaWithConfirmationOnDirty"; @@ -46,6 +46,7 @@ export const SyncCatalogCard: React.FC = () => { name: "syncCatalog.streams", control, }); + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); const watchedPrefix = useWatch({ name: "prefix", control }); const watchedNamespaceDefinition = useWatch({ name: "namespaceDefinition", control }); @@ -87,12 +88,17 @@ export const SyncCatalogCard: React.FC = () => { }; }, [locationState?.action, locationState?.namespace, locationState?.streamName, filteredStreams]); + let cardTitle = mode === "readonly" ? "form.dataSync.readonly" : "form.dataSync"; + if (isSimplifiedCreation) { + cardTitle = mode === "readonly" ? "connectionForm.selectStreams.readonly" : "connectionForm.selectStreams"; + } + return ( - + {mode !== "readonly" && ( diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap b/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap index 3cfd96bb56a..8651a786ebc 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap +++ b/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap @@ -11,6 +11,7 @@ exports[`#useInitialFormValues should generate initial values w/ 'not create' mo "normalization": "basic", "notifySchemaChanges": true, "prefix": "", + "scheduleData": undefined, "scheduleType": "manual", "syncCatalog": { "streams": [ @@ -1566,12 +1567,14 @@ exports[`#useInitialFormValues should generate initial values w/ 'not create' mo { "backfillPreference": "disabled", "geography": "auto", + "name": "Scrafty <> Heroku Postgres", "namespaceDefinition": "source", "namespaceFormat": "\${SOURCE_NAMESPACE}", "nonBreakingChangesPreference": "ignore", "normalization": "basic", "notifySchemaChanges": true, "prefix": "", + "scheduleData": undefined, "scheduleType": "manual", "syncCatalog": { "streams": [ @@ -3134,6 +3137,7 @@ exports[`#useInitialFormValues should generate initial values w/ no 'not create' "normalization": "basic", "notifySchemaChanges": true, "prefix": "", + "scheduleData": undefined, "scheduleType": "manual", "syncCatalog": { "streams": [ diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts index 09085efc7c6..ba0fa51b4da 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.test.ts @@ -14,7 +14,7 @@ import { analyzeSyncCatalogBreakingChanges } from "./calculateInitialCatalog"; const mockSyncSchemaStream: AirbyteStreamAndConfiguration = { stream: { sourceDefinedCursor: true, - defaultCursorField: ["source_cursor"], + defaultCursorField: ["new_source_cursor"], sourceDefinedPrimaryKey: [["new_primary_key"]], jsonSchema: {}, name: "test", @@ -31,6 +31,26 @@ const mockSyncSchemaStream: AirbyteStreamAndConfiguration = { }, }; +const mockSyncSchemaStreamUserDefined: AirbyteStreamAndConfiguration = { + stream: { + sourceDefinedCursor: true, + defaultCursorField: [], + sourceDefinedPrimaryKey: [], + jsonSchema: {}, + name: "test", + namespace: "namespace-test", + supportedSyncModes: [], + }, + config: { + destinationSyncMode: DestinationSyncMode.append, + selected: false, + syncMode: SyncMode.full_refresh, + cursorField: ["old_cursor"], + primaryKey: [["old_primary_key"]], + aliasName: "", + }, +}; + describe("analyzeSyncCatalogBreakingChanges", () => { it("should return syncCatalog unchanged when schemaChange is no_change and catalogDiff is undefined", () => { const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStream] }; @@ -62,7 +82,7 @@ describe("analyzeSyncCatalogBreakingChanges", () => { ], }; const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); - expect(result.streams[0].config?.primaryKey).toEqual([]); + expect(result.streams[0].config?.primaryKey).toEqual([["new_primary_key"]]); }); it("should return syncCatalog with transformed streams when there are breaking changes - cursor", () => { @@ -83,6 +103,48 @@ describe("analyzeSyncCatalogBreakingChanges", () => { ], }; const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); + expect(result.streams[0].config?.cursorField).toEqual(["new_source_cursor"]); + }); + + it("should return syncCatalog with transformed streams when there are breaking changes - primaryKey - user-defined", () => { + const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStreamUserDefined] }; + const catalogDiff: CatalogDiff = { + transforms: [ + { + transformType: StreamTransformTransformType.update_stream, + streamDescriptor: { name: "test", namespace: "namespace-test" }, + updateStream: [ + { + breaking: true, + transformType: FieldTransformTransformType.remove_field, + fieldName: ["old_primary_key"], + }, + ], + }, + ], + }; + const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); + expect(result.streams[0].config?.primaryKey).toEqual([]); + }); + + it("should return syncCatalog with transformed streams when there are breaking changes - cursor - user-defined", () => { + const syncCatalog: AirbyteCatalog = { streams: [mockSyncSchemaStreamUserDefined] }; + const catalogDiff: CatalogDiff = { + transforms: [ + { + transformType: StreamTransformTransformType.update_stream, + streamDescriptor: { name: "test", namespace: "namespace-test" }, + updateStream: [ + { + breaking: true, + transformType: FieldTransformTransformType.remove_field, + fieldName: ["old_cursor"], + }, + ], + }, + ], + }; + const result = analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, SchemaChange.breaking); expect(result.streams[0].config?.cursorField).toEqual([]); }); diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts index e33988585ab..ef8d6f0fe06 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/calculateInitialCatalog.ts @@ -19,20 +19,18 @@ const clearBreakingFieldChanges = ( } const { primaryKey, cursorField } = nodeStream.config; + const stream = nodeStream.stream; let clearPrimaryKey = false; let clearCursorField = false; - for (const streamTransformation of breakingChangesByStream) { if (!streamTransformation.updateStream || !streamTransformation.updateStream?.length) { continue; } - // get all of the removed field paths for this transformation const breakingFieldPaths = streamTransformation.updateStream .filter(({ breaking }) => breaking) .map((update) => update.fieldName); - // if there is a primary key in the config, and any of its field paths were removed, we'll be clearing it if ( !!primaryKey?.length && @@ -40,20 +38,26 @@ const clearBreakingFieldChanges = ( ) { clearPrimaryKey = true; } - // if there is a cursor field, and any of its field path was removed, we'll be clearing it if (!!cursorField?.length && breakingFieldPaths.some((path) => isEqual(path, cursorField))) { clearCursorField = true; } } - if (clearPrimaryKey || clearCursorField) { return { ...nodeStream, config: { ...nodeStream.config, - primaryKey: clearPrimaryKey ? [] : nodeStream.config.primaryKey, - cursorField: clearCursorField ? [] : nodeStream.config.cursorField, + primaryKey: stream?.sourceDefinedPrimaryKey // it's possible there's a new source-defined primary key, in which case that should take precedence + ? stream?.sourceDefinedPrimaryKey + : clearPrimaryKey + ? [] + : nodeStream.config.primaryKey, + cursorField: nodeStream.stream?.defaultCursorField + ? nodeStream.stream?.defaultCursorField // likewise, a source-defined cursor should never be cleared + : clearCursorField + ? [] + : nodeStream.config.cursorField, }, }; } diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts index eb2790f5429..84dd8ca3015 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts @@ -26,7 +26,7 @@ describe("#useInitialFormValues", () => { it("should generate initial values w/ 'not create' mode: true", () => { const { result } = renderHook(() => useInitialFormValues(mockConnection, mockDestinationDefinitionVersion, true)); expect(result.current).toMatchSnapshot(); - expect(result.current.name).toBeUndefined(); + expect(result.current.name).toBeDefined(); }); // This is a low-priority test diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx index 408eb4612fe..a6930b33d0c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx @@ -19,14 +19,14 @@ import { SchemaChangeBackfillPreference, } from "core/api/types/AirbyteClient"; import { FeatureItem, useFeature } from "core/services/features"; -import { - ConnectionOrPartialConnection, - useConnectionFormService, -} from "hooks/services/ConnectionForm/ConnectionFormService"; +import { ConnectionOrPartialConnection } from "hooks/services/ConnectionForm/ConnectionFormService"; import { useExperiment } from "hooks/services/Experiment"; import { analyzeSyncCatalogBreakingChanges } from "./calculateInitialCatalog"; -import { BASIC_FREQUENCY_DEFAULT_VALUE } from "./ScheduleFormField/useBasicFrequencyDropdownData"; +import { + BASIC_FREQUENCY_DEFAULT_VALUE, + SOURCE_SPECIFIC_FREQUENCY_DEFAULT, +} from "./ScheduleFormField/useBasicFrequencyDropdownData"; import { createConnectionValidationSchema } from "./schema"; import { DbtOperationRead } from "../TransformationForm"; @@ -34,7 +34,7 @@ import { DbtOperationRead } from "../TransformationForm"; * react-hook-form form values type for the connection form */ export interface FormConnectionFormValues { - name?: string; + name: string; scheduleType: ConnectionScheduleType; scheduleData?: ConnectionScheduleData; namespaceDefinition: NamespaceDefinitionType; @@ -71,11 +71,10 @@ export const SUPPORTED_MODES: Array<[SyncMode, DestinationSyncMode]> = [ export const useConnectionValidationSchema = () => { const allowSubOneHourCronExpressions = useFeature(FeatureItem.AllowSyncSubOneHourCronExpressions); const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - const { mode } = useConnectionFormService(); return useMemo( - () => createConnectionValidationSchema(mode, allowSubOneHourCronExpressions, allowAutoDetectSchema), - [allowAutoDetectSchema, allowSubOneHourCronExpressions, mode] + () => createConnectionValidationSchema(allowSubOneHourCronExpressions, allowAutoDetectSchema), + [allowAutoDetectSchema, allowSubOneHourCronExpressions] ); }; @@ -119,23 +118,16 @@ export const useInitialFormValues = ( return useMemo(() => { const initialValues: FormConnectionFormValues = { - // set name field - ...(isEditMode - ? {} - : { - name: connection.name ?? `${connection.source.name} → ${connection.destination.name}`, - }), + name: connection.name ?? `${connection.source.name} → ${connection.destination.name}`, scheduleType: connection.scheduleType ?? ConnectionScheduleType.basic, - // set scheduleData field if it's defined, otherwise there is no need to set it - ...{ - ...(connection.scheduleData - ? { scheduleData: connection.scheduleData } - : connection.scheduleType === ConnectionScheduleType.manual - ? {} - : { - scheduleData: { basicSchedule: BASIC_FREQUENCY_DEFAULT_VALUE }, - }), - }, + scheduleData: connection.scheduleData + ? connection.scheduleData + : connection.scheduleType === ConnectionScheduleType.manual + ? undefined + : { + basicSchedule: + SOURCE_SPECIFIC_FREQUENCY_DEFAULT[connection.source?.sourceDefinitionId] ?? BASIC_FREQUENCY_DEFAULT_VALUE, + }, namespaceDefinition: connection.namespaceDefinition || NamespaceDefinitionType.destination, // set connection's namespaceFormat if it's defined, otherwise there is no need to set it ...{ @@ -165,9 +157,9 @@ export const useInitialFormValues = ( return initialValues; }, [ - isEditMode, connection.name, connection.source.name, + connection.source?.sourceDefinitionId, connection.destination.name, connection.scheduleType, connection.scheduleData, @@ -182,6 +174,7 @@ export const useInitialFormValues = ( defaultNonBreakingChangesPreference, workspace.defaultGeography, destDefinitionVersion.supportsDbt, + isEditMode, syncCatalog, catalogDiff, schemaChange, diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/frequencyConfig.ts b/airbyte-webapp/src/components/connection/ConnectionForm/frequencyConfig.ts deleted file mode 100644 index 3c8245bf34f..00000000000 --- a/airbyte-webapp/src/components/connection/ConnectionForm/frequencyConfig.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { ConnectionScheduleDataBasicSchedule } from "core/api/types/AirbyteClient"; - -export const frequencyConfig: ConnectionScheduleDataBasicSchedule[] = [ - { - units: 1, - timeUnit: "hours", - }, - { - units: 2, - timeUnit: "hours", - }, - { - units: 3, - timeUnit: "hours", - }, - { - units: 6, - timeUnit: "hours", - }, - { - units: 8, - timeUnit: "hours", - }, - { - units: 12, - timeUnit: "hours", - }, - { - units: 24, - timeUnit: "hours", - }, -]; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts index 06b4a3f460a..0eb22be5cc2 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts @@ -16,7 +16,6 @@ import { SyncMode, SchemaChangeBackfillPreference, } from "core/api/types/AirbyteClient"; -import { ConnectionFormMode } from "hooks/services/ConnectionForm/ConnectionFormService"; import { dbtOperationReadOrCreateSchema } from "../TransformationForm"; @@ -103,6 +102,9 @@ const streamConfigSchema: SchemaOf = yup.object({ .optional(), aliasName: yup.string().optional(), primaryKey: yup.array().of(yup.array().of(yup.string())).optional(), + minimumGenerationId: yup.number().optional(), + generationId: yup.number().optional(), + syncId: yup.number().optional(), }); export const streamAndConfigurationSchema: SchemaOf = yup.object({ @@ -184,15 +186,13 @@ export const namespaceFormatSchema = yup.string().when("namespaceDefinition", { * generate yup schema for the create connection form */ export const createConnectionValidationSchema = ( - mode: ConnectionFormMode, allowSubOneHourCronExpressions: boolean, allowAutoDetectSchema: boolean ) => yup .object({ - // The connection name during Editing is handled separately from the form - name: mode === "create" ? yup.string().required("form.empty.error") : yup.string().notRequired(), - // scheduleType can't de 'undefined', make it required() + name: yup.string().required("form.empty.error"), + // scheduleType can't be 'undefined', make it required() scheduleType: yup.mixed().oneOf(Object.values(ConnectionScheduleType)).required(), scheduleData: getScheduleDataSchema(allowSubOneHourCronExpressions), namespaceDefinition: namespaceDefinitionSchema.required("form.empty.error"), diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss new file mode 100644 index 00000000000..421b431efdc --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss @@ -0,0 +1,3 @@ +.switch { + width: 90px; +} diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx new file mode 100644 index 00000000000..5cafbaa6bd8 --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx @@ -0,0 +1,115 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; +import { useNavigate } from "react-router-dom"; + +import { Box } from "components/ui/Box"; +import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; +import { SwitchNext } from "components/ui/SwitchNext"; +import { Text } from "components/ui/Text"; +import { Tooltip } from "components/ui/Tooltip"; + +import { ConnectionStatus } from "core/api/types/AirbyteClient"; +import { useSchemaChanges } from "hooks/connection/useSchemaChanges"; +import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; +import { ConnectionRoutePaths } from "pages/routePaths"; + +import styles from "./ConnectionHeaderControls.module.scss"; +import { FormattedScheduleDataMessage } from "./FormattedScheduleDataMessage"; +import { useConnectionStatus } from "../ConnectionStatus/useConnectionStatus"; +import { useConnectionSyncContext } from "../ConnectionSync/ConnectionSyncContext"; +import { FreeHistoricalSyncIndicator } from "../EnabledControl/FreeHistoricalSyncIndicator"; + +export const ConnectionHeaderControls: React.FC = () => { + const { mode } = useConnectionFormService(); + const { connection, updateConnectionStatus, connectionUpdating } = useConnectionEditService(); + const { hasBreakingSchemaChange } = useSchemaChanges(connection.schemaChange); + const navigate = useNavigate(); + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + + const connectionStatus = useConnectionStatus(connection.connectionId ?? ""); + const isReadOnly = mode === "readonly"; + + const { syncStarting, cancelStarting, cancelJob, syncConnection, connectionEnabled, resetStarting, jobResetRunning } = + useConnectionSyncContext(); + + const onScheduleBtnClick = () => { + navigate(`${ConnectionRoutePaths.Settings}`, { + state: { action: "scheduleType" }, + }); + }; + + const onChangeStatus = async (checked: boolean) => + await updateConnectionStatus(checked ? ConnectionStatus.active : ConnectionStatus.inactive); + + const isDisabled = isReadOnly || syncStarting || cancelStarting || resetStarting; + const isStartSyncBtnDisabled = isDisabled || !connectionEnabled; + const isCancelBtnDisabled = isDisabled || connectionUpdating; + const isSwitchDisabled = isDisabled || hasBreakingSchemaChange; + + return ( + + + + + + } + placement="top" + > + + + {!connectionStatus.isRunning && ( + + )} + {connectionStatus.isRunning && cancelJob && ( + + )} + + + + + ); +}; diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx new file mode 100644 index 00000000000..ba81396a96f --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.test.tsx @@ -0,0 +1,58 @@ +import { render } from "@testing-library/react"; + +import { TestWrapper } from "test-utils"; + +import { ConnectionScheduleData, ConnectionScheduleDataBasicScheduleTimeUnit } from "core/api/types/AirbyteClient"; + +import { FormattedScheduleDataMessage, FormattedScheduleDataMessageProps } from "./FormattedScheduleDataMessage"; + +describe("FormattedScheduleDataMessage", () => { + const renderComponent = (props: FormattedScheduleDataMessageProps) => { + return render( + + + + ); + }; + + it("should render 'Manual' schedule type if scheduleData wasn't provided", () => { + const { getByText } = renderComponent({ scheduleType: "manual" }); + expect(getByText("Manual")).toBeInTheDocument(); + }); + + it("should render '24 hours' schedule type", () => { + const scheduleData = { + basicSchedule: { + units: 24, + timeUnit: "hours" as ConnectionScheduleDataBasicScheduleTimeUnit, + }, + }; + const { getByText } = renderComponent({ scheduleType: "basic", scheduleData }); + expect(getByText("Every 24 hours")).toBeInTheDocument(); + }); + + it("should render 'Cron' schedule type with humanized format", () => { + const scheduleData = { + cron: { + cronExpression: "0 0 14 ? * THU" as string, + cronTimeZone: "UTC", + }, + }; + const { getByText } = renderComponent({ scheduleType: "cron", scheduleData }); + expect(getByText("At 02:00 PM, only on Thursday")).toBeInTheDocument(); + }); + + it("should NOT render anything", () => { + const scheduleData = { + basic: { + units: 24, + timeUnit: "hours" as ConnectionScheduleDataBasicScheduleTimeUnit, + }, + }; + const { queryByText } = renderComponent({ + scheduleType: "cron", + scheduleData: scheduleData as unknown as ConnectionScheduleData, // for testing purposes + }); + expect(queryByText("24")).toBeNull(); + }); +}); diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx new file mode 100644 index 00000000000..d8c16a46eb7 --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/FormattedScheduleDataMessage.tsx @@ -0,0 +1,45 @@ +import React from "react"; +import { FormattedMessage } from "react-intl"; + +import { ConnectionScheduleData, ConnectionScheduleType } from "core/api/types/AirbyteClient"; +import { humanizeCron } from "core/utils/cron"; + +export interface FormattedScheduleDataMessageProps { + scheduleType?: ConnectionScheduleType; + scheduleData?: ConnectionScheduleData; +} + +/** + * Formats schedule data based on the schedule type and schedule data. + * If schedule type is "manual" returns "Manual". + * If schedule type is "basic" returns "Every {units} {timeUnit}". + * If schedule type is "cron" returns humanized cron expression. + * @param scheduleType + * @param scheduleData + */ +export const FormattedScheduleDataMessage: React.FC = ({ + scheduleType, + scheduleData, +}: { + scheduleType?: ConnectionScheduleType; + scheduleData?: ConnectionScheduleData; +}) => { + if (scheduleType === "manual") { + return ; + } + + if (scheduleType === "basic" && scheduleData?.basicSchedule) { + return ( + + ); + } + + if (scheduleType === "cron" && scheduleData?.cron) { + return <>{humanizeCron(scheduleData.cron.cronExpression)}; + } + + return null; +}; diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts new file mode 100644 index 00000000000..77b3a955a0b --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/index.ts @@ -0,0 +1 @@ +export { ConnectionHeaderControls } from "./ConnectionHeaderControls"; diff --git a/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss b/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss index 6d5fab33906..630b78b4d22 100644 --- a/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss +++ b/airbyte-webapp/src/components/connection/ConnectionStatusIndicator/ConnectionStatusIndicator.module.scss @@ -4,8 +4,10 @@ .status { position: relative; + transform: scale(1.2); .icon { + transform: scale(1.1); width: 20px; height: 20px; display: flex; @@ -34,9 +36,3 @@ } } } - -.spinner { - position: absolute; - top: -1px; - left: -1px; -} diff --git a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.module.scss b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.module.scss index 4bd0d346ed8..461f138259d 100644 --- a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.module.scss +++ b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.module.scss @@ -1,8 +1,9 @@ +@use "scss/colors"; + :export { syncIconHeight: 15px; } -.buttons { - display: flex; - gap: 5px; +.clearDataLabel > p { + color: colors.$red; } diff --git a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.tsx b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.tsx index 30f6c98680e..4280dd5af09 100644 --- a/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionSync/ConnectionSyncButtons.tsx @@ -1,15 +1,19 @@ +import classNames from "classnames"; import { useCallback } from "react"; import { FormattedMessage, useIntl } from "react-intl"; +import { Box } from "components/ui/Box"; import { Button, ButtonVariant } from "components/ui/Button"; import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; +import { Text } from "components/ui/Text"; import { ConnectionStatus } from "core/api/types/AirbyteClient"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; +import styles from "./ConnectionSyncButtons.module.scss"; import { useConnectionSyncContext } from "./ConnectionSyncContext"; import { useConnectionStatus } from "../ConnectionStatus/useConnectionStatus"; @@ -41,24 +45,43 @@ export const ConnectionSyncButtons: React.FC = ({ } = useConnectionSyncContext(); const { mode, connection } = useConnectionFormService(); const isReadOnly = mode === "readonly"; + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); const connectionStatus = useConnectionStatus(connection.connectionId ?? ""); const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const resetWithModal = useCallback(() => { - openConfirmationModal({ - text: `form.resetDataText`, - title: `form.resetData`, - submitButtonText: "form.reset", - cancelButtonText: "form.noNeed", - onSubmit: async () => { - await resetStreams(); - closeConfirmationModal(); - }, - submitButtonDataId: "reset", - }); - }, [closeConfirmationModal, openConfirmationModal, resetStreams]); + sayClearInsteadOfReset + ? openConfirmationModal({ + title: , + text: "connection.actions.clearData.confirm.text", + additionalContent: ( + + + + + + ), + submitButtonText: "connection.stream.actions.clearData.confirm.submit", + cancelButtonText: "connection.stream.actions.clearData.confirm.cancel", + onSubmit: async () => { + await resetStreams(); + closeConfirmationModal(); + }, + }) + : openConfirmationModal({ + text: `form.resetDataText`, + title: `form.resetData`, + submitButtonText: "form.reset", + cancelButtonText: "form.noNeed", + onSubmit: async () => { + await resetStreams(); + closeConfirmationModal(); + }, + submitButtonDataId: "reset", + }); + }, [closeConfirmationModal, openConfirmationModal, resetStreams, sayClearInsteadOfReset]); const handleDropdownMenuOptionClick = (optionClicked: DropdownMenuOptionType) => { switch (optionClicked.value) { @@ -73,7 +96,7 @@ export const ConnectionSyncButtons: React.FC = ({ {!connectionStatus.isRunning && ( )} @@ -101,16 +130,19 @@ export const ConnectionSyncButtons: React.FC = ({ data-testid="job-history-dropdown-menu" options={[ { - displayName: formatMessage({ id: "connection.resetData" }), + displayName: formatMessage({ + id: sayClearInsteadOfReset ? "connection.stream.actions.clearData" : "connection.resetData", + }), value: ContextMenuOptions.ResetData, disabled: connectionStatus.isRunning || connection.status !== ConnectionStatus.active || mode === "readonly", "data-testid": "reset-data-dropdown-option", + className: classNames({ [styles.clearDataLabel]: sayClearInsteadOfReset }), }, ]} onChange={handleDropdownMenuOptionClick} > - {() => diff --git a/airbyte-webapp/src/components/connection/CreateConnection/CreateNewSource.tsx b/airbyte-webapp/src/components/connection/CreateConnection/CreateNewSource.tsx index 2fbb6b5f2fe..ddef3623f17 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/CreateNewSource.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnection/CreateNewSource.tsx @@ -5,10 +5,9 @@ import { PageContainer } from "components/PageContainer"; import { SelectConnector } from "components/source/SelectConnector"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { useSuggestedSources } from "area/connector/utils"; -import { useSourceDefinitionList, useCreateSource } from "core/api"; +import { useCreateSource, useSourceDefinitionList } from "core/api"; import { AppActionCodes, useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { useFormChangeTrackerService } from "hooks/services/FormChangeTracker"; import { SourceForm, SourceFormValues } from "pages/source/CreateSourcePage/SourceForm"; @@ -60,7 +59,7 @@ export const CreateNewSource: React.FC = () => { - diff --git a/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.module.scss b/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.module.scss index 42eceeb2aee..7c4d758171a 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.module.scss @@ -24,6 +24,13 @@ &--disabled { cursor: not-allowed; } + + &--light.radioButtonTiles__toggle--light { + border: none; + outline: none; + padding-top: 0; + padding-bottom: 0; + } } &__dot { diff --git a/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.tsx b/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.tsx index e2e3bb2fcf0..9d917dac53b 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnection/RadioButtonTiles.tsx @@ -1,6 +1,5 @@ import classNames from "classnames"; import { ComponentProps } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; import { Box } from "components/ui/Box"; import { FlexContainer, FlexItem } from "components/ui/Flex"; @@ -11,9 +10,8 @@ import { SelectedIndicatorDot } from "./SelectedIndicatorDot"; interface RadioButtonTilesOption { value: T; - label: string; - labelValues?: ComponentProps["values"]; - description: string; + label: React.ReactNode; + description: React.ReactNode; extra?: React.ReactNode; disabled?: boolean; } @@ -24,6 +22,7 @@ interface RadioButtonTilesProps { onSelectRadioButton: (value: T) => void; name: string; direction?: ComponentProps["direction"]; + light?: boolean; } export const RadioButtonTiles = ({ @@ -32,46 +31,45 @@ export const RadioButtonTiles = ({ selectedValue, name, direction, -}: RadioButtonTilesProps) => { - const { formatMessage } = useIntl(); - return ( - - {options.map((option) => ( - - onSelectRadioButton(option.value)} - className={styles.radioButtonTiles__hiddenInput} - data-testid={`radio-button-tile-${name}-${option.value}`} - /> - + + ))} + +); diff --git a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss index 45ffca0140e..67f84d77f76 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.module.scss @@ -1,5 +1,6 @@ -@forward "src/components/ui/Button/Button.module.scss"; +// stylelint-disable-next-line airbyte/no-use-renaming +@use "scss/mixins"; .linkText { - text-decoration: none; + @include mixins.link-text; } diff --git a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx index dbf07bc112a..3cce3965f29 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnection/SelectDestination.tsx @@ -1,6 +1,6 @@ import classNames from "classnames"; import { useMemo } from "react"; -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import { useSearchParams } from "react-router-dom"; import { CloudInviteUsersHint } from "components/CloudInviteUsersHint"; @@ -13,6 +13,7 @@ import { Link } from "components/ui/Link"; import { useCurrentWorkspaceLink } from "area/workspace/utils"; import { useConnectionList, useDestinationList } from "core/api"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useExperiment } from "hooks/services/Experiment"; import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; @@ -29,6 +30,8 @@ export const DESTINATION_TYPE_PARAM = "destinationType"; export const DESTINATION_ID_PARAM = "destinationId"; export const SelectDestination: React.FC = () => { + useTrackPage(PageTrackingCodes.CONNECTIONS_NEW_DEFINE_DESTINATION); + const { formatMessage } = useIntl(); const { destinations } = useDestinationList(); const connectionList = useConnectionList(); const [searchParams, setSearchParams] = useSearchParams(); @@ -89,14 +92,14 @@ export const SelectDestination: React.FC = () => { options={[ { value: EXISTING_DESTINATION_TYPE, - label: "connectionForm.destinationExisting", - description: "connectionForm.destinationExistingDescription", + label: formatMessage({ id: "connectionForm.destinationExisting" }), + description: formatMessage({ id: "connectionForm.destinationExistingDescription" }), disabled: destinations.length === 0, }, { value: NEW_DESTINATION_TYPE, - label: "connectionForm.destinationNew", - description: "connectionForm.destinationNewDescription", + label: formatMessage({ id: "connectionForm.destinationNew" }), + description: formatMessage({ id: "connectionForm.destinationNewDescription" }), }, ]} selectedValue={selectedDestinationType} @@ -121,7 +124,7 @@ export const SelectDestination: React.FC = () => { diff --git a/airbyte-webapp/src/components/connection/CreateConnection/SelectSource.tsx b/airbyte-webapp/src/components/connection/CreateConnection/SelectSource.tsx index b508558473d..b83fa55791d 100644 --- a/airbyte-webapp/src/components/connection/CreateConnection/SelectSource.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnection/SelectSource.tsx @@ -1,5 +1,5 @@ import { useMemo } from "react"; -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import { useSearchParams } from "react-router-dom"; import { CloudInviteUsersHint } from "components/CloudInviteUsersHint"; @@ -10,6 +10,7 @@ import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { useSourceDefinitionList, useSourceList } from "core/api"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { CreateNewSource, SOURCE_DEFINITION_PARAM } from "./CreateNewSource"; import { RadioButtonTiles } from "./RadioButtonTiles"; @@ -23,6 +24,8 @@ export const SOURCE_TYPE_PARAM = "sourceType"; export const SOURCE_ID_PARAM = "sourceId"; export const SelectSource: React.FC = () => { + useTrackPage(PageTrackingCodes.CONNECTIONS_NEW_DEFINE_SOURCE); + const { formatMessage } = useIntl(); const { sources } = useSourceList(); const { sourceDefinitionMap } = useSourceDefinitionList(); const [searchParams, setSearchParams] = useSearchParams(); @@ -77,14 +80,14 @@ export const SelectSource: React.FC = () => { options={[ { value: EXISTING_SOURCE_TYPE, - label: "connectionForm.sourceExisting", - description: "connectionForm.sourceExistingDescription", + label: formatMessage({ id: "connectionForm.sourceExisting" }), + description: formatMessage({ id: "connectionForm.sourceExistingDescription" }), disabled: sources.length === 0, }, { value: NEW_SOURCE_TYPE, - label: "onboarding.sourceSetUp", - description: "onboarding.sourceSetUp.description", + label: formatMessage({ id: "onboarding.sourceSetUp" }), + description: formatMessage({ id: "onboarding.sourceSetUp.description" }), }, ]} selectedValue={selectedSourceType} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.test.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.test.tsx index 8d29cd597af..f349964d032 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.test.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.test.tsx @@ -17,7 +17,6 @@ import { import { mockTheme } from "test-utils/mock-data/mockTheme"; import { mocked, TestWrapper, useMockIntersectionObserver } from "test-utils/testutils"; -import type { SchemaError } from "core/api"; import { useDiscoverSchema } from "core/api"; import { defaultOssFeatures, FeatureItem } from "core/services/features"; @@ -46,7 +45,7 @@ jest.mock("core/api", () => ({ useSourceDefinition: () => mockSourceDefinition, useDestinationDefinition: () => mockDestinationDefinition, useDiscoverSchema: jest.fn(() => mockBaseUseDiscoverSchema), - LogsRequestError: jest.requireActual("core/api/errors").LogsRequestError, + ErrorWithJobInfo: jest.requireActual("core/api/errors").ErrorWithJobInfo, })); jest.mock("area/connector/utils", () => ({ @@ -101,7 +100,7 @@ describe("CreateConnectionForm", () => { it("should render with an error", async () => { mocked(useDiscoverSchema).mockImplementationOnce(() => ({ ...mockBaseUseDiscoverSchema, - schemaErrorStatus: new Error("Test Error") as SchemaError, + schemaErrorStatus: new Error("Test Error"), })); const renderResult = await render(); diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx index 8281f083a9d..17a91297b5b 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx @@ -47,7 +47,7 @@ const CreateConnectionFormInner: React.FC = () => { const validationSchema = useConnectionValidationSchema(); - const useSimpliedCreation = useExperiment("connection.simplifiedCreation", false); + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); const onSubmit = useCallback( async ({ normalization, transformations, ...restFormValues }: FormConnectionFormValues) => { @@ -76,7 +76,7 @@ const CreateConnectionFormInner: React.FC = () => { navigate(`../../connections/${createdConnection.connectionId}`); const willSyncAfterCreation = restFormValues.scheduleType === ConnectionScheduleType.basic; - if (useSimpliedCreation && willSyncAfterCreation) { + if (isSimplifiedCreation && willSyncAfterCreation) { registerNotification({ id: "onboarding.firstSyncStarted", text: formatMessage({ id: "onboarding.firstSyncStarted" }), @@ -96,7 +96,7 @@ const CreateConnectionFormInner: React.FC = () => { navigate, setSubmitError, workspaceId, - useSimpliedCreation, + isSimplifiedCreation, registerNotification, formatMessage, ] @@ -112,7 +112,7 @@ const CreateConnectionFormInner: React.FC = () => { formTrackerId={CREATE_CONNECTION_FORM_ID} > - {useSimpliedCreation ? ( + {isSimplifiedCreation ? ( ) : ( <> diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SchemaError.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SchemaError.tsx index bc4dbe88288..eb2c4eaa93b 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SchemaError.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SchemaError.tsx @@ -4,9 +4,8 @@ import { JobFailure } from "components/JobFailure"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { SchemaError as SchemaErrorType, LogsRequestError } from "core/api"; +import { ErrorWithJobInfo } from "core/api"; import styles from "./SchemaError.module.scss"; @@ -14,16 +13,16 @@ export const SchemaError = ({ schemaError, refreshSchema, }: { - schemaError: Exclude; + schemaError: Error; refreshSchema: () => Promise; }) => { - const job = LogsRequestError.extractJobInfo(schemaError); + const job = ErrorWithJobInfo.getJobInfo(schemaError); return ( - diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/ConnectorNamespaceConfiguration.ts b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/ConnectorNamespaceConfiguration.ts new file mode 100644 index 00000000000..6f08f94637c --- /dev/null +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/ConnectorNamespaceConfiguration.ts @@ -0,0 +1,19 @@ +import { ConnectorIds } from "area/connector/utils"; + +export const SourceNamespaceConfiguration = { + [ConnectorIds.Sources.E2ETesting]: { supportsNamespaces: false }, + [ConnectorIds.Sources.EndToEndTesting]: { supportsNamespaces: false }, +} as const; + +export const DestinationNamespaceConfiguration = { + [ConnectorIds.Destinations.BigQuery]: { supportsNamespaces: true, defaultNamespacePath: "dataset_id" }, + [ConnectorIds.Destinations.E2ETesting]: { supportsNamespaces: false }, + [ConnectorIds.Destinations.EndToEndTesting]: { supportsNamespaces: false }, + [ConnectorIds.Destinations.Milvus]: { supportsNamespaces: true }, + [ConnectorIds.Destinations.Pinecone]: { supportsNamespaces: true }, + [ConnectorIds.Destinations.Postgres]: { supportsNamespaces: true, defaultNamespacePath: "schema" }, + [ConnectorIds.Destinations.Redshift]: { supportsNamespaces: true, defaultNamespacePath: "schema" }, + [ConnectorIds.Destinations.S3]: { supportsNamespaces: true }, + [ConnectorIds.Destinations.Snowflake]: { supportsNamespaces: true, defaultNamespacePath: "schema" }, + [ConnectorIds.Destinations.Weaviate]: { supportsNamespaces: false }, +} as const; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss index bbeccf47473..320dafb6570 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.module.scss @@ -1,3 +1,28 @@ +@use "scss/colors"; +@use "scss/variables"; + +@keyframes highlight { + 0%, + 50% { + position: relative; + box-shadow: variables.$box-shadow-highlight colors.$blue-200; + z-index: 1; + } + + 99% { + z-index: 1; + } + + 100% { + box-shadow: 0 0 0 0 transparent; + z-index: 0; + } +} + .container { - width: 250px; + width: 300px; + + &.highlighted { + animation: highlight 2s ease-out; + } } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx index f4cd0cd00c2..43f3f0fb387 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/InputContainer.tsx @@ -1,5 +1,39 @@ +import classNames from "classnames"; +import { useState } from "react"; +import { Location, useLocation, useNavigate } from "react-router-dom"; +import { useEffectOnce } from "react-use"; + import styles from "./InputContainer.module.scss"; -export const InputContainer: React.FC = ({ children }) => { - return
    {children}
    ; +export interface LocationWithState extends Location { + state: { action?: "scheduleType" }; +} + +export const InputContainer: React.FC> = ({ + children, + highlightAfterRedirect, +}) => { + const [highlighted, setHighlighted] = useState(false); + const navigate = useNavigate(); + const { state: locationState, pathname } = useLocation() as LocationWithState; + + useEffectOnce(() => { + let highlightTimeout: number; + + if (highlightAfterRedirect && locationState?.action === "scheduleType") { + // remove the redirection info from the location state + navigate(pathname, { replace: true }); + + setHighlighted(true); + highlightTimeout = window.setTimeout(() => { + setHighlighted(false); + }, 1500); + } + + return () => { + window.clearTimeout(highlightTimeout); + }; + }); + + return
    {children}
    ; }; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplfiedSchemaChangesFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplfiedSchemaChangesFormField.tsx index 2c7437b8d42..51ac3da9964 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplfiedSchemaChangesFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplfiedSchemaChangesFormField.tsx @@ -1,18 +1,26 @@ import { ComponentProps, useMemo } from "react"; import { Controller, useFormContext, useWatch } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import { FormConnectionFormValues } from "components/connection/ConnectionForm/formConfig"; import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; import { RadioButtonTiles } from "components/connection/CreateConnection/RadioButtonTiles"; import { ControlLabels } from "components/LabeledControl"; +import { Box } from "components/ui/Box"; +import { ListBox } from "components/ui/ListBox"; import { Message } from "components/ui/Message"; import { Text } from "components/ui/Text"; import { NonBreakingChangesPreference } from "core/api/types/AirbyteClient"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; -export const SimplfiedSchemaChangesFormField = () => { +import { InputContainer } from "./InputContainer"; + +export const SimplfiedSchemaChangesFormField: React.FC<{ isCreating: boolean; disabled?: boolean }> = ({ + isCreating, + disabled, +}) => { + const { formatMessage } = useIntl(); const { connection, mode } = useConnectionFormService(); const { setValue, control } = useFormContext(); @@ -37,11 +45,13 @@ export const SimplfiedSchemaChangesFormField = () => { ]; return supportedPreferences.map((value) => ({ value, - label: `connectionForm.nonBreakingChangesPreference.autopropagation.${value}.next`, - description: `connectionForm.nonBreakingChangesPreference.autopropagation.${value}.description`, + label: formatMessage({ id: `connectionForm.nonBreakingChangesPreference.autopropagation.${value}.next` }), + description: formatMessage({ + id: `connectionForm.nonBreakingChangesPreference.autopropagation.${value}.description`, + }), "data-testid": value, })); - }, []); + }, [formatMessage]); return ( { - +
    } /> - setValue("nonBreakingChangesPreference", value, { shouldDirty: true })} - /> - {showAutoPropagationMessage && ( - } + {isCreating ? ( + setValue("nonBreakingChangesPreference", value, { shouldDirty: true })} /> + ) : ( + + + setValue("nonBreakingChangesPreference", value, { shouldDirty: true }) + } + selectedValue={field.value} + /> + + )} + {showAutoPropagationMessage && ( + + } + /> + )} )} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedBackfillFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedBackfillFormField.tsx index 6fee04de891..e02ef2874e2 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedBackfillFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedBackfillFormField.tsx @@ -13,7 +13,7 @@ import { Text } from "components/ui/Text"; import { SchemaChangeBackfillPreference } from "core/api/types/AirbyteClient"; import { isCloudApp } from "core/utils/app"; -export const SimplifiedBackfillFormField = () => { +export const SimplifiedBackfillFormField: React.FC<{ disabled?: boolean }> = ({ disabled }) => { const { control } = useFormContext(); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -53,6 +53,7 @@ export const SimplifiedBackfillFormField = () => { ); }} size="lg" + disabled={disabled} /> )} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss index 4f5e6ef50b3..ecbc6448098 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss @@ -1,49 +1,25 @@ @use "scss/colors"; @use "scss/variables"; +@use "scss/mixins"; .linkText { - // base "button" - display: inline-flex; - align-items: center; - justify-content: center; - text-decoration: none; - border-radius: variables.$border-radius-sm; - font-weight: 600; - cursor: pointer; - background-color: colors.$white; - - // secondary - color: colors.$grey-400; - border: 1px solid colors.$grey-300; - - // sizeXS - height: variables.$button-height-xs; - font-size: variables.$font-size-sm; - line-height: 15px; - padding: 10px; + @include mixins.link-text; } -.advancedSettings { - cursor: pointer; - background: transparent; - border: none; - text-decoration: underline; - color: colors.$blue; - font-weight: 600; - padding: 0; -} +.nextLink { + @include mixins.base-button; -.hideOverflow { - // keep the hr under advanced settings from overflowing - overflow-x: hidden; -} + // primary + color: colors.$white; + background-color: colors.$blue-400; + border: 0; -.hr { - border-width: 0; - border-top: 1px solid colors.$grey-100; - transform: scaleX(2); // expand out of its container to the card edge -} + &:hover { + background-color: colors.$blue-500; + color: colors.$white; + } -.hidden { - display: none; + &:active { + background-color: colors.$blue-600; + } } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx index c2ab4cdeb84..91b02d7f44d 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx @@ -14,27 +14,18 @@ import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { Link } from "components/ui/Link"; import { Text } from "components/ui/Text"; import { useGetDestinationFromSearchParams, useGetSourceFromSearchParams } from "area/connector/utils"; import { useCurrentWorkspaceLink } from "area/workspace/utils"; -import { FeatureItem, useFeature } from "core/services/features"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; -import { useExperiment } from "hooks/services/Experiment"; import { useFormChangeTrackerService } from "hooks/services/FormChangeTracker"; import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; -import { SimplfiedSchemaChangesFormField } from "./SimplfiedSchemaChangesFormField"; -import { SimplifiedBackfillFormField } from "./SimplifiedBackfillFormField"; import styles from "./SimplifiedConnectionConfiguration.module.scss"; -import { SimplfiedConnectionDataResidencyFormField } from "./SimplifiedConnectionDataResidencyFormField"; -import { SimplifiedConnectionNameFormField } from "./SimplifiedConnectionNameFormField"; -import { SimplifiedConnectionScheduleFormField } from "./SimplifiedConnectionScheduleFormField"; -import { SimplifiedDestinationNamespaceFormField } from "./SimplifiedDestinationNamespaceFormField"; -import { SimplifiedDestinationStreamPrefixNameFormField } from "./SimplifiedDestinationStreamPrefixNameFormField"; -import { SimplifiedSchemaChangeNotificationFormField } from "./SimplifiedSchemaChangeNotificationFormField"; +import { SimplifiedConnectionsSettingsCard } from "./SimplifiedConnectionSettingsCard"; import { SimplifiedSchemaQuestionnaire } from "./SimplifiedSchemaQuestionnaire"; import { CREATE_CONNECTION_FORM_ID } from "../CreateConnectionForm"; @@ -64,6 +55,7 @@ export const SimplifiedConnectionConfiguration: React.FC = () => { }; const SimplifiedConnectionCreationReplication: React.FC = () => { + useTrackPage(PageTrackingCodes.CONNECTIONS_NEW_SELECT_STREAMS); const { formatMessage } = useIntl(); const { isDirty } = useFormState(); const { trackFormChange } = useFormChangeTrackerService(); @@ -75,7 +67,10 @@ const SimplifiedConnectionCreationReplication: React.FC = () => { return ( <> - + @@ -84,61 +79,33 @@ const SimplifiedConnectionCreationReplication: React.FC = () => { }; const SimplifiedConnectionCreationConfigureConnection: React.FC = () => { + useTrackPage(PageTrackingCodes.CONNECTIONS_NEW_CONFIGURE_CONNECTION); const { formatMessage } = useIntl(); - const [isAdvancedOpen, setIsAdvancedOpen] = React.useState(false); - const canEditDataGeographies = useFeature(FeatureItem.AllowChangeDataGeographies); - const canBackfillNewColumns = useExperiment("platform.auto-backfill-on-new-columns", false); const { isDirty } = useFormState(); const { trackFormChange } = useFormChangeTrackerService(); + const source = useGetSourceFromSearchParams(); + const destination = useGetDestinationFromSearchParams(); + // if the user is navigating from the first step the form may be dirty useMount(() => { trackFormChange(CREATE_CONNECTION_FORM_ID, isDirty); }); return ( - - - - - - - - - - - - - - - - {isAdvancedOpen &&
    } - - {/* using styles.hidden to show/hide as residency field makes an http request for geographies */} - {/* which triggers a suspense boundary - none of the places for a suspense fallback are good UX */} - {/* so always render, making the geography request as part of the initial page load */} - - {canEditDataGeographies && } - - - {canBackfillNewColumns && } - -
    -
    + ); }; const FirstNav: React.FC = () => { const createLink = useCurrentWorkspaceLink(); - const source = useGetSourceFromSearchParams(); const destination = useGetDestinationFromSearchParams(); + const source = useGetSourceFromSearchParams(); const { isValid, errors } = useFormState(); const { trigger } = useFormContext(); @@ -185,7 +152,7 @@ const FirstNav: React.FC = () => { ), search: `?${SOURCE_ID_PARAM}=${source.sourceId}&${DESTINATION_ID_PARAM}=${destination.destinationId}`, }} - className={classNames(styles.linkText)} + className={classNames(styles.nextLink)} onClick={() => { // we're navigating to the next step which retains the creation form's state clearFormChange(CREATE_CONNECTION_FORM_ID); @@ -194,7 +161,7 @@ const FirstNav: React.FC = () => { ) : ( - )} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionDataResidencyFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionDataResidencyFormField.tsx index 05bc5e20941..9b15d1a1bff 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionDataResidencyFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionDataResidencyFormField.tsx @@ -11,7 +11,7 @@ import { Text } from "components/ui/Text"; import { InputContainer } from "./InputContainer"; -export const SimplfiedConnectionDataResidencyFormField = () => { +export const SimplfiedConnectionDataResidencyFormField: React.FC<{ disabled: boolean }> = ({ disabled }) => { const { control } = useFormContext(); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -30,7 +30,7 @@ export const SimplfiedConnectionDataResidencyFormField = () => { } /> - name={field.name} /> + name={field.name} disabled={disabled} /> )} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionNameFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionNameFormField.tsx index 0a758741c93..533bbd4825d 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionNameFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionNameFormField.tsx @@ -6,6 +6,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { FormConnectionFormValues } from "components/connection/ConnectionForm/formConfig"; import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; import { ControlLabels } from "components/LabeledControl"; +import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; import { Input } from "components/ui/Input"; import { Text } from "components/ui/Text"; @@ -21,7 +22,7 @@ export const SimplifiedConnectionNameFormField = () => { ( + render={({ field, fieldState }) => ( { onChange={field.onChange} /> + {fieldState.error && ( + + + + + + )} )} /> diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx index 0368c688127..5f550687d0a 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx @@ -32,21 +32,24 @@ import { useConnectionFormService } from "hooks/services/ConnectionForm/Connecti import { InputContainer } from "./InputContainer"; import styles from "./SimplifiedConnectionScheduleFormField.module.scss"; -export const SimplifiedConnectionScheduleFormField = () => { +export const SimplifiedConnectionScheduleFormField: React.FC<{ disabled: boolean }> = ({ disabled }) => { const watchedScheduleType = useWatch({ name: "scheduleType" }); return ( <> - - {watchedScheduleType === ConnectionScheduleType.basic && } - {watchedScheduleType === ConnectionScheduleType.cron && } + + {watchedScheduleType === ConnectionScheduleType.basic && ( + + )} + {watchedScheduleType === ConnectionScheduleType.cron && } ); }; -const SimplfieidScheduleTypeFormControl = () => { +const SimplifiedScheduleTypeFormControl: React.FC<{ disabled: boolean }> = ({ disabled }) => { const { formatMessage } = useIntl(); const { setValue, control } = useFormContext(); + const { defaultValues } = useFormState(); const [controlId] = useState(`input-control-${uniqueId()}`); const scheduleTypeOptions: Array> = [ @@ -74,19 +77,33 @@ const SimplfieidScheduleTypeFormControl = () => { ]; const onScheduleTypeSelect = (value: ConnectionScheduleType): void => { + setValue("scheduleType", value, { shouldValidate: true }); + // reset scheduleData since we don't need it for manual if (value === ConnectionScheduleType.manual) { - setValue("scheduleData", undefined, { shouldValidate: true }); + setValue("scheduleData", undefined, { shouldValidate: true, shouldDirty: true }); return; } // set default basic schedule if (value === ConnectionScheduleType.basic) { - setValue("scheduleData", { basicSchedule: BASIC_FREQUENCY_DEFAULT_VALUE }, { shouldValidate: true }); + setValue( + "scheduleData", + // @ts-expect-error react-hook-form makes every value in defaultValues optional + // which doesn't match our types or usage + { basicSchedule: defaultValues?.scheduleData?.basicSchedule ?? BASIC_FREQUENCY_DEFAULT_VALUE }, + { shouldValidate: true, shouldDirty: true } + ); return; } // set default cron schedule if (value === ConnectionScheduleType.cron) { - setValue("scheduleData", { cron: CRON_DEFAULT_VALUE }, { shouldValidate: true }); + setValue( + "scheduleData", + // @ts-expect-error react-hook-form makes every value in defaultValues optional + // which doesn't match our types or usage + { cron: defaultValues?.scheduleData?.cron ?? CRON_DEFAULT_VALUE }, + { shouldValidate: true, shouldDirty: true } + ); } }; @@ -109,12 +126,12 @@ const SimplfieidScheduleTypeFormControl = () => {
    } /> - + + isDisabled={disabled} id={controlId} options={scheduleTypeOptions} onSelect={(value) => { - field.onChange(value); onScheduleTypeSelect(value); }} selectedValue={field.value} @@ -127,7 +144,7 @@ const SimplfieidScheduleTypeFormControl = () => { ); }; -const SimplifiedBasicScheduleFormControl: React.FC = () => { +const SimplifiedBasicScheduleFormControl: React.FC<{ disabled: boolean }> = ({ disabled }) => { const { connection } = useConnectionFormService(); const { setValue, control } = useFormContext(); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -162,6 +179,7 @@ const SimplifiedBasicScheduleFormControl: React.FC = () => { /> + isDisabled={disabled} id={controlId} options={frequencies} onSelect={onBasicScheduleSelect} @@ -175,7 +193,7 @@ const SimplifiedBasicScheduleFormControl: React.FC = () => { ); }; -const SimplifiedCronScheduleFormControl: React.FC = () => { +const SimplifiedCronScheduleFormControl: React.FC<{ disabled: boolean }> = ({ disabled }) => { const [debouncedErrorMessage, setDebouncedErrorMessage] = useState(""); const [debouncedCronDescription, setDebouncedCronDescription] = useState(""); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -232,6 +250,7 @@ const SimplifiedCronScheduleFormControl: React.FC = () => { { /> = ({ + title, + isCreating, + source, + destination, + isDeprecated = false, +}) => { + const [isAdvancedOpen, setIsAdvancedOpen] = useState(false); + const canEditDataGeographies = useFeature(FeatureItem.AllowChangeDataGeographies); + const canBackfillNewColumns = useExperiment("platform.auto-backfill-on-new-columns", false); + + return ( + + + + + {isCreating && ( + + )} + {isCreating && } + + + + {/* readonly mode disables all elements, including buttons, from the fieldset */} + {/* to keep this toggle available, style and attribute a span like a button */} + setIsAdvancedOpen((isAdvancedOpen) => !isAdvancedOpen)} + onKeyUp={(e) => + (e.key === "Enter" || e.key === " ") && setIsAdvancedOpen((isAdvancedOpen) => !isAdvancedOpen) + } + > + + + + + {isCreating && ( + + + + )} + + {/* using styles.hidden to show/hide as residency field makes an http request for geographies */} + {/* which triggers a suspense boundary - none of the places for a suspense fallback are good UX */} + {/* so always render, making the geography request as part of the initial page load */} + + + {canEditDataGeographies && } + {!isCreating && ( + + )} + {!isCreating && } + + + {canBackfillNewColumns && } + + + + {!isCreating && ( + + + + )} + + + ); +}; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.module.scss new file mode 100644 index 00000000000..a9a255ac590 --- /dev/null +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.module.scss @@ -0,0 +1,14 @@ +@use "scss/variables"; + +.originalCasing, +%originalCasing { + text-transform: unset; +} + +.sourceNamespace { + @extend %originalCasing; + + &:not(:first-child) { + margin-left: variables.$spacing-xs; + } +} diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.tsx index 0dc15eeacca..acd39a631f8 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationNamespaceFormField.tsx @@ -1,54 +1,168 @@ +import get from "lodash/get"; import { ComponentProps, useEffect } from "react"; -import { Controller, useFormContext, useWatch } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; +import { Controller, useFormContext, useFormState, useWatch } from "react-hook-form"; +import { FormattedMessage, useIntl } from "react-intl"; import { FormConnectionFormValues } from "components/connection/ConnectionForm/formConfig"; import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; import { RadioButtonTiles } from "components/connection/CreateConnection/RadioButtonTiles"; -import { FormControl } from "components/forms"; import { ControlLabels } from "components/LabeledControl"; +import { Badge } from "components/ui/Badge"; +import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; +import { Input } from "components/ui/Input"; +import { ExternalLink } from "components/ui/Link"; +import { ListBox } from "components/ui/ListBox"; import { Text } from "components/ui/Text"; -import { useGetSourceFromSearchParams } from "area/connector/utils"; -import { NamespaceDefinitionType } from "core/api/types/AirbyteClient"; +import { DestinationRead, NamespaceDefinitionType, SourceRead } from "core/api/types/AirbyteClient"; +import { links } from "core/utils/links"; +import { naturalComparator } from "core/utils/objects"; -export const SimplifiedDestinationNamespaceFormField = () => { - const source = useGetSourceFromSearchParams(); +import { DestinationNamespaceConfiguration, SourceNamespaceConfiguration } from "./ConnectorNamespaceConfiguration"; +import { InputContainer } from "./InputContainer"; +import styles from "./SimplifiedDestinationNamespaceFormField.module.scss"; + +// eslint-disable-next-line no-template-curly-in-string +const SOURCE_NAMESPACE_REPLACEMENT_STRING = "${SOURCE_NAMESPACE}"; + +export const SimplifiedDestinationNamespaceFormField: React.FC<{ + isCreating: boolean; + source: SourceRead; + destination: DestinationRead; + disabled?: boolean; +}> = ({ isCreating, source, destination, disabled }) => { const { trigger, setValue, control, watch } = useFormContext(); + const { defaultValues } = useFormState(); const namespaceDefinition = useWatch({ name: "namespaceDefinition", control }); + const streams = useWatch({ name: "syncCatalog.streams", control }); + const namespaceFormat = useWatch({ name: "namespaceFormat", control }); + const { formatMessage } = useIntl(); const watchedNamespaceDefinition = watch("namespaceDefinition"); useEffect(() => { trigger("namespaceFormat", { shouldFocus: true }); - }, [trigger, watchedNamespaceDefinition]); + }, [trigger, setValue, defaultValues?.namespaceFormat, watchedNamespaceDefinition]); + + const sourceNamespaceAbilities = SourceNamespaceConfiguration[source.sourceDefinitionId] ?? { + supportsNamespaces: true, + }; + const destinationNamespaceAbilities = DestinationNamespaceConfiguration[destination.destinationDefinitionId] ?? { + supportsNamespaces: true, + }; + + if (!destinationNamespaceAbilities.supportsNamespaces) { + return null; + } + + const destinationDefinedNamespace = + (destinationNamespaceAbilities.defaultNamespacePath && + get(destination.connectionConfiguration, destinationNamespaceAbilities.defaultNamespacePath)) ?? + "no_value_provided"; + + const destinationDefinedDescriptionValues = { + destinationDefinedNamespace, + badge: (children: React.ReactNode[]) => ( + + {children} + + ), + }; + + const enabledStreamNamespaces = Array.from( + streams.reduce((acc, stream) => { + if (stream.config?.selected && stream.stream?.namespace) { + acc.add(stream.stream.namespace); + } + return acc; + }, new Set()) + ).sort(naturalComparator); + + const sourceDefinedDescriptionValues = { + sourceDefinedNamespaces: enabledStreamNamespaces, + badges: (children: React.ReactNode[]) => { + if (children.length === 0) { + return null; + } + + return ( + + {children.map((child) => ( + + {child} + + ))} + + ); + }, + }; + + const customFormatField = + namespaceDefinition === NamespaceDefinitionType.customformat ? ( + <> + ( + + + + + {fieldState.error && ( + + + + + + )} + + )} + /> + {namespaceFormat?.includes(SOURCE_NAMESPACE_REPLACEMENT_STRING) && + enabledStreamNamespaces.map((namespace) => { + return ( + + {namespaceFormat.replace(SOURCE_NAMESPACE_REPLACEMENT_STRING, namespace)} + + ); + })} + + ) : null; const destinationNamespaceOptions: ComponentProps>["options"] = [ { value: NamespaceDefinitionType.customformat, - label: "connectionForm.customFormat", - description: "connectionForm.customFormatDescriptionNext", - extra: - namespaceDefinition === NamespaceDefinitionType.customformat ? ( - - ) : null, + label: formatMessage({ id: "connectionForm.customFormat" }), + description: formatMessage({ id: "connectionForm.customFormatDescriptionNext" }), + extra: customFormatField, }, { value: NamespaceDefinitionType.destination, - label: "connectionForm.destinationFormatNext", - description: "connectionForm.destinationFormatDescriptionNext", - }, - { - value: NamespaceDefinitionType.source, - label: "connectionForm.sourceFormatNext", - description: "connectionForm.sourceFormatDescriptionNext", + label: formatMessage({ id: "connectionForm.destinationFormatNext" }), + description: formatMessage( + { id: "connectionForm.destinationFormatDescriptionNext" }, + destinationDefinedDescriptionValues + ), }, + ...(sourceNamespaceAbilities.supportsNamespaces + ? [ + { + value: NamespaceDefinitionType.source, + label: formatMessage({ id: "connectionForm.sourceFormatNext" }), + description: formatMessage( + { id: "connectionForm.sourceFormatDescriptionNext" }, + sourceDefinedDescriptionValues + ), + }, + ] + : []), ]; return ( @@ -65,17 +179,56 @@ export const SimplifiedDestinationNamespaceFormField = () => { + + + + + } /> - setValue("namespaceDefinition", value, { shouldDirty: true })} - /> + {isCreating ? ( + setValue("namespaceDefinition", value, { shouldDirty: true })} + /> + ) : ( + + + setValue("namespaceDefinition", value, { shouldDirty: true }) + } + selectedValue={field.value} + /> + {field.value === NamespaceDefinitionType.destination && ( + + + + + + )} + {field.value === NamespaceDefinitionType.source && ( + + + + + + )} + {customFormatField} + + )} )} /> diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationStreamPrefixNameFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationStreamPrefixNameFormField.tsx index 7b66185a490..7034938fcb3 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationStreamPrefixNameFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedDestinationStreamPrefixNameFormField.tsx @@ -12,7 +12,7 @@ import { Text } from "components/ui/Text"; import { InputContainer } from "./InputContainer"; -export const SimplifiedDestinationStreamPrefixNameFormField = () => { +export const SimplifiedDestinationStreamPrefixNameFormField: React.FC<{ disabled?: boolean }> = ({ disabled }) => { const { formatMessage } = useIntl(); const { control } = useFormContext(); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -30,6 +30,10 @@ export const SimplifiedDestinationStreamPrefixNameFormField = () => { +   + + + @@ -45,6 +49,7 @@ export const SimplifiedDestinationStreamPrefixNameFormField = () => { inline={false} value={field.value} onChange={field.onChange} + disabled={disabled} /> {prefix && ( diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaChangeNotificationFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaChangeNotificationFormField.tsx index 2479b587253..a9a7b9603ea 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaChangeNotificationFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaChangeNotificationFormField.tsx @@ -1,5 +1,5 @@ import uniqueId from "lodash/uniqueId"; -import { useState } from "react"; +import React, { useState } from "react"; import { Controller, useFormContext } from "react-hook-form"; import { FormattedMessage } from "react-intl"; @@ -10,7 +10,7 @@ import { FlexContainer } from "components/ui/Flex"; import { Switch } from "components/ui/Switch"; import { Text } from "components/ui/Text"; -export const SimplifiedSchemaChangeNotificationFormField = () => { +export const SimplifiedSchemaChangeNotificationFormField: React.FC<{ disabled?: boolean }> = ({ disabled }) => { const { control } = useFormContext(); const [controlId] = useState(`input-control-${uniqueId()}`); @@ -33,7 +33,7 @@ export const SimplifiedSchemaChangeNotificationFormField = () => { } /> - + )} /> diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss index 2aa1d198e42..7762211e8b3 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.module.scss @@ -8,10 +8,11 @@ .collapsedQuestion { overflow: hidden; height: 0; + transition: height variables.$transition-out; } .expandedQuestion { overflow: hidden; - height: 171px; + height: 131px; transition: height variables.$transition-out; } diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx index 6eb30ca66df..5a84efc0c7f 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSchemaQuestionnaire.tsx @@ -3,17 +3,18 @@ import { useFormContext } from "react-hook-form"; import { FormattedMessage } from "react-intl"; import { FormConnectionFormValues } from "components/connection/ConnectionForm/formConfig"; -import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; import { RadioButtonTiles } from "components/connection/CreateConnection/RadioButtonTiles"; import { updateStreamSyncMode } from "components/connection/syncCatalog/SyncCatalog/updateStreamSyncMode"; import { SyncModeValue } from "components/connection/syncCatalog/SyncModeSelect"; import { ControlLabels } from "components/LabeledControl"; import { Badge } from "components/ui/Badge"; +import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { DestinationSyncMode, SyncMode } from "core/api/types/AirbyteClient"; +import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; import styles from "./SimplifiedSchemaQuestionnaire.module.scss"; @@ -25,33 +26,37 @@ type QuestionnaireOutcomes = Record>["options"] = [ { value: "mirrorSource", - label: "connectionForm.questionnaire.delivery.mirrorSource.title", - labelValues: { - badge: ( - - Recommended - - ), - }, - description: "connectionForm.questionnaire.delivery.mirrorSource.subtitle", + label: ( + + Recommended + + ), + }} + /> + ), + description: , }, { value: "appendChanges", - label: "connectionForm.questionnaire.delivery.appendChanges.title", - description: "connectionForm.questionnaire.delivery.appendChanges.subtitle", + label: , + description: , }, ]; const deletionRecordsOptions: ComponentProps>["options"] = [ { value: SyncMode.incremental, - label: "connectionForm.questionnaire.incrementOrRefresh.increment.title", - description: "connectionForm.questionnaire.incrementOrRefresh.increment.subtitle", + label: , + description: , }, { value: SyncMode.full_refresh, - label: "connectionForm.questionnaire.incrementOrRefresh.refresh.title", - description: "connectionForm.questionnaire.incrementOrRefresh.refresh.subtitle", + label: , + description: , extra: ( @@ -98,6 +103,7 @@ export const getEnforcedIncrementOrRefresh = (supportedSyncModes: SyncMode[]) => }; export const SimplifiedSchemaQuestionnaire = () => { + const analyticsService = useAnalyticsService(); const { connection, destDefinitionSpecification: { supportedDestinationSyncModes }, @@ -138,20 +144,41 @@ export const SimplifiedSchemaQuestionnaire = () => { const enforcedSelectedDelivery = getEnforcedDelivery(questionnaireOutcomes); const enforcedIncrementOrRefresh = getEnforcedIncrementOrRefresh(supportedSyncModes); - const [selectedDelivery, _setSelectedDelivery] = useState(enforcedSelectedDelivery); - const [selectedIncrementOrRefresh, setSelectedIncrementOrRefresh] = useState( + const [selectedDelivery, _setSelectedDelivery] = useState(enforcedSelectedDelivery ?? "mirrorSource"); + const [selectedIncrementOrRefresh, _setSelectedIncrementOrRefresh] = useState( enforcedIncrementOrRefresh ); const setSelectedDelivery: typeof _setSelectedDelivery = (value) => { + analyticsService.track(Namespace.SYNC_QUESTIONNAIRE, Action.ANSWERED, { + actionDescription: "First question has been answered", + question: "delivery", + answer: value, + }); + _setSelectedDelivery(value); if (value === "mirrorSource") { // clear any user-provided answer for the second question when switching to mirrorSource // this is purely a UX decision - setSelectedIncrementOrRefresh(enforcedIncrementOrRefresh); + setSelectedIncrementOrRefresh(enforcedIncrementOrRefresh, { automatedAction: true }); } }; + const setSelectedIncrementOrRefresh = ( + value: SyncMode | undefined, + { automatedAction }: { automatedAction?: boolean } = { automatedAction: false } + ) => { + if (!automatedAction) { + analyticsService.track(Namespace.SYNC_QUESTIONNAIRE, Action.ANSWERED, { + actionDescription: "Second question has been answered", + question: "all_or_some", + answer: value, + }); + } + + _setSelectedIncrementOrRefresh(value); + }; + const selectedModes = useMemo(() => { if (selectedDelivery === "mirrorSource") { return questionnaireOutcomes.mirrorSource.map(([syncMode, destinationSyncMode]) => { @@ -171,12 +198,6 @@ export const SimplifiedSchemaQuestionnaire = () => { return []; }, [selectedDelivery, questionnaireOutcomes.mirrorSource, selectedIncrementOrRefresh]); - // if a source & destination sync mode selection has been made (by default or by the user), show the result - let selectionMessage; - if (selectedModes.length) { - selectionMessage = ; - } - // when a sync mode is selected, choose it for all streams const { trigger, getValues, setValue } = useFormContext(); useEffect(() => { @@ -208,17 +229,39 @@ export const SimplifiedSchemaQuestionnaire = () => { }); setValue("syncCatalog.streams", nextFields); trigger("syncCatalog.streams"); - }, [setValue, trigger, getValues, selectedDelivery, selectedIncrementOrRefresh, selectedModes]); + analyticsService.track(Namespace.SYNC_QUESTIONNAIRE, Action.APPLIED, { + actionDescription: "Questionnaire has applied a sync mode", + delivery: selectedDelivery, + all_or_some: selectedIncrementOrRefresh, + }); + }, [setValue, trigger, getValues, selectedDelivery, selectedIncrementOrRefresh, selectedModes, analyticsService]); + + const showFirstQuestion = enforcedSelectedDelivery == null; const showSecondQuestion = enforcedIncrementOrRefresh == null && selectedDelivery === "appendChanges"; + useEffect(() => { + if (showFirstQuestion) { + analyticsService.track(Namespace.SYNC_QUESTIONNAIRE, Action.DISPLAYED, { + actionDescription: "First question has been shown to the user", + question: "delivery", + }); + } + }, [showFirstQuestion, analyticsService]); + + useEffect(() => { + if (showSecondQuestion) { + analyticsService.track(Namespace.SYNC_QUESTIONNAIRE, Action.DISPLAYED, { + actionDescription: "Second question has been shown to the user", + question: "all_or_some", + }); + } + }, [showSecondQuestion, analyticsService]); + return ( - - {enforcedSelectedDelivery == null && ( - + + {showFirstQuestion && ( + @@ -229,42 +272,37 @@ export const SimplifiedSchemaQuestionnaire = () => { } /> - + )}
    - - - - - - - } - /> - - + + + + + + + + } + /> + + +
    - - {selectionMessage && ( - - - {selectionMessage} - - )}
    ); }; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSyncModeCard.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSyncModeCard.tsx deleted file mode 100644 index 3fa64cf5cf8..00000000000 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedSyncModeCard.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import { useMemo, useState } from "react"; -import { useIntl } from "react-intl"; - -import { SUPPORTED_MODES } from "components/connection/ConnectionForm/formConfig"; -import { FormFieldLayout } from "components/connection/ConnectionForm/FormFieldLayout"; -import { SyncModeValue } from "components/connection/syncCatalog/SyncModeSelect"; -import { ControlLabels } from "components/LabeledControl"; -import { FlexContainer } from "components/ui/Flex"; -import { ListBox, Option } from "components/ui/ListBox"; -import { Text } from "components/ui/Text"; - -import { DestinationSyncMode, SyncMode } from "core/api/types/AirbyteClient"; -import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; - -export const SimplifiedSyncModeCard = () => { - const { formatMessage } = useIntl(); - const { - connection, - destDefinitionSpecification: { supportedDestinationSyncModes }, - } = useConnectionFormService(); - - const streamSupportedSyncModes: SyncMode[] = useMemo(() => { - const foundModes = new Set(); - for (let i = 0; i < connection.syncCatalog.streams.length; i++) { - const stream = connection.syncCatalog.streams[i]; - stream.stream?.supportedSyncModes?.forEach((mode) => foundModes.add(mode)); - } - return Array.from(foundModes); - }, [connection.syncCatalog.streams]); - - const availableSyncModes: SyncModeValue[] = useMemo( - () => - SUPPORTED_MODES.filter( - ([syncMode, destinationSyncMode]) => - streamSupportedSyncModes.includes(syncMode) && supportedDestinationSyncModes?.includes(destinationSyncMode) - ).map(([syncMode, destinationSyncMode]) => ({ - syncMode, - destinationSyncMode, - })), - [streamSupportedSyncModes, supportedDestinationSyncModes] - ); - - const syncModeOptions: Array> = useMemo( - () => - availableSyncModes.map((option) => { - const syncModeId = option.syncMode === SyncMode.full_refresh ? "syncMode.fullRefresh" : "syncMode.incremental"; - const destinationSyncModeId = - option.destinationSyncMode === DestinationSyncMode.overwrite - ? "destinationSyncMode.overwrite" - : option.destinationSyncMode === DestinationSyncMode.append_dedup - ? "destinationSyncMode.appendDedup" - : "destinationSyncMode.append"; - return { - label: `${formatMessage({ id: syncModeId })} | ${formatMessage({ - id: destinationSyncModeId, - })}`, - value: option, - }; - }), - [formatMessage, availableSyncModes] - ); - - const [defaultSyncMode, setDefaultSyncMode] = useState(availableSyncModes[0]); - - return ( - - - {formatMessage({ id: "form.syncMode" })} - - {formatMessage({ id: "form.syncMode.subtitle" })} - - - } - /> - - - ); -}; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap b/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap index 9b87244e172..9e4e96c22f6 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/__snapshots__/CreateConnectionForm.test.tsx.snap @@ -1025,69 +1025,13 @@ exports[`CreateConnectionForm should render 1`] = `
    -
    - -
    -
    -
    + />
    -
    -
    -

    - All -

    -
    -
    -
    + />
    @@ -1347,18 +1291,22 @@ exports[`CreateConnectionForm should render with an error 1`] = ` >
    -
    -
    - +
    +
    +
    - Test Error - + + Test Error + +
    diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/useAnalyticsTrackFunctions.ts b/airbyte-webapp/src/components/connection/CreateConnectionForm/useAnalyticsTrackFunctions.ts index 767e54ddfd5..9dcfd9ee0e4 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/useAnalyticsTrackFunctions.ts +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/useAnalyticsTrackFunctions.ts @@ -1,6 +1,6 @@ import { useCallback } from "react"; -import { SchemaError } from "core/api"; +import { ErrorWithJobInfo } from "core/api"; import { DestinationRead, SourceRead } from "core/api/types/AirbyteClient"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; @@ -11,17 +11,19 @@ export const useAnalyticsTrackFunctions = () => { const analyticsService = useAnalyticsService(); const trackFailure = useCallback( - (source: SourceRead, destination: DestinationRead, schemaError: SchemaError) => + (source: SourceRead, destination: DestinationRead, schemaError: Error | ErrorWithJobInfo) => { + const jobInfo = ErrorWithJobInfo.getJobInfo(schemaError); analyticsService.track(Namespace.CONNECTION, Action.DISCOVER_SCHEMA, { actionDescription: "Discover schema failure", connector_source_definition: source.sourceName, connector_source_definition_id: source.sourceDefinitionId, connector_destination_definition: destination.destinationName, connector_destination_definition_id: destination.destinationDefinitionId, - failure_type: schemaError?.response?.failureReason?.failureType, - failure_external_message: schemaError?.response?.failureReason?.externalMessage, - failure_internal_message: schemaError?.response?.failureReason?.internalMessage, - }), + failure_type: jobInfo?.failureReason?.failureType, + failure_external_message: jobInfo?.failureReason?.externalMessage, + failure_internal_message: jobInfo?.failureReason?.internalMessage, + }); + }, [analyticsService] ); diff --git a/airbyte-webapp/src/components/connection/DestinationNamespaceModal/DestinationNamespaceModal.tsx b/airbyte-webapp/src/components/connection/DestinationNamespaceModal/DestinationNamespaceModal.tsx index 66395a20821..a80d67c7221 100644 --- a/airbyte-webapp/src/components/connection/DestinationNamespaceModal/DestinationNamespaceModal.tsx +++ b/airbyte-webapp/src/components/connection/DestinationNamespaceModal/DestinationNamespaceModal.tsx @@ -52,21 +52,17 @@ const destinationNamespaceValidationSchema = yup.object().shape({ interface DestinationNamespaceModalProps { initialValues: Pick; - onCloseModal: () => void; - onSubmit: (values: DestinationNamespaceFormValues) => void; + onCancel: () => void; + onSubmit: (values: DestinationNamespaceFormValues) => Promise; } export const DestinationNamespaceModal: React.FC = ({ initialValues, - onCloseModal, + onCancel, onSubmit, }) => { const { formatMessage } = useIntl(); - const onSubmitCallback = async (values: DestinationNamespaceFormValues) => { - onCloseModal(); - onSubmit(values); - }; return (
    namespaceFormat: initialValues.namespaceFormat ?? "${SOURCE_NAMESPACE}", }} schema={destinationNamespaceValidationSchema} - onSubmit={onSubmitCallback} + onSubmit={onSubmit} > <> @@ -112,7 +108,7 @@ export const DestinationNamespaceModal: React.FC diff --git a/airbyte-webapp/src/components/connection/DestinationStreamNamesModal/DestinationStreamNamesModal.tsx b/airbyte-webapp/src/components/connection/DestinationStreamNamesModal/DestinationStreamNamesModal.tsx index f0114699420..b9dcc551a47 100644 --- a/airbyte-webapp/src/components/connection/DestinationStreamNamesModal/DestinationStreamNamesModal.tsx +++ b/airbyte-webapp/src/components/connection/DestinationStreamNamesModal/DestinationStreamNamesModal.tsx @@ -70,22 +70,17 @@ const destinationStreamNamesValidationSchema = yup.object().shape({ interface DestinationStreamNamesModalProps { initialValues: Pick; - onCloseModal: () => void; - onSubmit: (value: DestinationStreamNamesFormValues) => void; + onCancel: () => void; + onSubmit: (value: DestinationStreamNamesFormValues) => Promise; } export const DestinationStreamNamesModal: React.FC = ({ initialValues, - onCloseModal, + onCancel, onSubmit, }) => { const { formatMessage } = useIntl(); - const onSubmitCallback = async (values: DestinationStreamNamesFormValues) => { - onCloseModal(); - onSubmit(values); - }; - return ( @@ -135,7 +130,7 @@ export const DestinationStreamNamesModal: React.FC diff --git a/airbyte-webapp/src/components/connection/JobProgress/JobProgress.module.scss b/airbyte-webapp/src/components/connection/JobProgress/JobProgress.module.scss deleted file mode 100644 index 21e0315ae22..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/JobProgress.module.scss +++ /dev/null @@ -1,23 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.estimationStats { - display: flex; - justify-content: space-between; -} - -.estimationDetails { - display: flex; - gap: variables.$spacing-lg; - margin: variables.$spacing-md 0; - - .icon { - color: colors.$grey-400; - margin-right: variables.$spacing-sm; - } -} - -.streams { - margin: variables.$spacing-md 0 0; - width: 100%; -} diff --git a/airbyte-webapp/src/components/connection/JobProgress/JobProgress.tsx b/airbyte-webapp/src/components/connection/JobProgress/JobProgress.tsx deleted file mode 100644 index ed2fbaf8ee6..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/JobProgress.tsx +++ /dev/null @@ -1,125 +0,0 @@ -import classNames from "classnames"; -import { FormattedMessage, useIntl } from "react-intl"; - -import { Icon } from "components/ui/Icon"; -import { Text } from "components/ui/Text"; - -import { JobWithAttempts } from "area/connection/types/jobs"; -import { getJobStatus } from "area/connection/utils/jobs"; -import { AttemptRead, AttemptStatus, SynchronousJobRead } from "core/api/types/AirbyteClient"; -import { formatBytes } from "core/utils/numberHelper"; - -import styles from "./JobProgress.module.scss"; -import { ProgressLine } from "./JobProgressLine"; -import { StreamProgress } from "./StreamProgress"; -import { progressBarCalculations } from "./utils"; - -function isJobsWithJobs(job: JobWithAttempts | SynchronousJobRead): job is JobWithAttempts { - return "attempts" in job; -} - -interface ProgressBarProps { - job: JobWithAttempts | SynchronousJobRead; - expanded?: boolean; -} - -export const JobProgress: React.FC = ({ job, expanded }) => { - const { formatMessage, formatNumber } = useIntl(); - - let latestAttempt: AttemptRead | undefined; - if (isJobsWithJobs(job) && job.attempts) { - latestAttempt = job.attempts[job.attempts.length - 1]; - } - if (!latestAttempt) { - return null; - } - - const jobStatus = getJobStatus(job); - if (["failed", "succeeded", "cancelled"].includes(jobStatus)) { - return null; - } - - const { - displayProgressBar, - totalPercentRecords, - timeRemaining, - numeratorBytes, - numeratorRecords, - denominatorRecords, - denominatorBytes, - elapsedTimeMS, - } = progressBarCalculations(latestAttempt); - - let timeRemainingString = ""; - if (elapsedTimeMS && timeRemaining) { - const minutesRemaining = Math.ceil(timeRemaining / 1000 / 60); - const hoursRemaining = Math.ceil(minutesRemaining / 60); - if (minutesRemaining <= 60) { - timeRemainingString = formatMessage({ id: "connection.progress.minutesRemaining" }, { value: minutesRemaining }); - } else { - timeRemainingString = formatMessage({ id: "connection.progress.hoursRemaining" }, { value: hoursRemaining }); - } - } - - return ( - - {displayProgressBar && ( - - )} - {latestAttempt?.status === AttemptStatus.running && ( - <> - {displayProgressBar && ( -
    - {timeRemaining < Infinity && timeRemaining > 0 && timeRemainingString} - {formatNumber(totalPercentRecords, { style: "percent", maximumFractionDigits: 0 })} -
    - )} - {expanded && ( - <> - {denominatorRecords > 0 && denominatorBytes > 0 && ( -
    - - - - - - - - -
    - )} - {latestAttempt.streamStats && ( -
    - {latestAttempt.streamStats - ?.map((stats) => ({ - ...stats, - done: (stats.stats.recordsEmitted ?? 0) >= (stats.stats.estimatedRecords ?? Infinity), - })) - // Move finished streams to the end of the list - .sort((a, b) => Number(a.done) - Number(b.done)) - .map((stream) => { - return ; - })} -
    - )} - - )} - - )} -
    - ); -}; diff --git a/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.module.scss b/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.module.scss deleted file mode 100644 index d23bb032cb3..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.module.scss +++ /dev/null @@ -1,24 +0,0 @@ -@use "scss/colors"; - -.lineOuter { - height: 5px; - width: 100%; - background-color: colors.$grey-100; - border-radius: 10px; - margin-top: 5px; - margin-bottom: 5px; -} - -.lineInner { - height: 100%; - border-radius: 10px; - transition: width 5s ease-in-out; - - &.warning { - background-color: colors.$yellow-400; - } - - &.default { - background-color: colors.$blue-200; - } -} diff --git a/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.tsx b/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.tsx deleted file mode 100644 index 028b730261e..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/JobProgressLine.tsx +++ /dev/null @@ -1,29 +0,0 @@ -// Inspired by https://dev.to/ramonak/react-how-to-create-a-custom-progress-bar-component-in-5-minutes-2lcl - -import classNames from "classnames"; -import { useIntl } from "react-intl"; - -import styles from "./JobProgressLine.module.scss"; - -interface ProgressLineProps { - type?: "default" | "warning"; - percent: number; -} - -export const ProgressLine: React.FC = ({ type = "default", percent }) => { - const { formatMessage } = useIntl(); - return ( -
    -
    -
    - ); -}; diff --git a/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.module.scss b/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.module.scss deleted file mode 100644 index 04f4f053246..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.module.scss +++ /dev/null @@ -1,73 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.stream { - display: inline-block; - padding: variables.$spacing-xs variables.$spacing-md; - background: colors.$blue-50; - border-radius: 16px; - margin-right: variables.$spacing-md; - margin-bottom: variables.$spacing-sm; - white-space: nowrap; - color: colors.$grey-700; - line-height: 16px; -} - -.wrapper { - display: flex; - align-items: center; - gap: variables.$spacing-sm; - min-height: 16px + 2 * variables.$spacing-xs; -} - -.progress { - justify-content: center; - align-items: center; - min-width: 16px; - margin: variables.$spacing-xs; - aspect-ratio: 1 / 1; - display: flex; - - .check { - fill: colors.$foreground; - display: none; - } - - .fg { - stroke: colors.$blue; - } - - .bg { - fill: colors.$foreground; - } - - &.done { - .bg { - fill: colors.$green; - } - - .fg { - display: none; - } - - .check { - display: block; - } - } -} - -.metrics { - margin: variables.$spacing-md 0 0; - display: grid; - grid-template-columns: max-content auto; - gap: variables.$spacing-md; - - dt { - grid-column-start: 1; - } - - dd { - margin: 0; - grid-column-start: 2; - } -} diff --git a/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.tsx b/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.tsx deleted file mode 100644 index 4a9bd328b9c..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/StreamProgress.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import classNames from "classnames"; -import { FormattedMessage, FormattedNumber, useIntl } from "react-intl"; - -import { Text } from "components/ui/Text"; -import { Tooltip } from "components/ui/Tooltip"; - -import { AttemptStreamStats } from "core/api/types/AirbyteClient"; - -import styles from "./StreamProgress.module.scss"; - -interface StreamProgressProps { - stream: AttemptStreamStats; -} - -const CircleProgress: React.FC<{ percent: number }> = ({ percent }) => { - const svgClassName = classNames(styles.progress, { - [styles.done]: percent >= 1, - }); - - return ( - - - - - - ); -}; - -export const StreamProgress: React.FC = ({ stream }) => { - const { formatNumber } = useIntl(); - const { recordsEmitted, estimatedRecords } = stream.stats; - - const progress = estimatedRecords ? (recordsEmitted ?? 0) / estimatedRecords : undefined; - - return ( - - - {stream.streamName} {progress !== undefined && } - - - } - > - - - -
    -
    - -
    -
    - {progress !== undefined ? ( - - ) : ( - - )} -
    - {(estimatedRecords || recordsEmitted) && ( - <> -
    - -
    -
    - {estimatedRecords ? ( - - ) : ( - - )} -
    - - )} -
    -
    - ); -}; diff --git a/airbyte-webapp/src/components/connection/JobProgress/index.tsx b/airbyte-webapp/src/components/connection/JobProgress/index.tsx deleted file mode 100644 index 5e1783a4557..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export * from "./JobProgress"; diff --git a/airbyte-webapp/src/components/connection/JobProgress/utils.test.ts b/airbyte-webapp/src/components/connection/JobProgress/utils.test.ts deleted file mode 100644 index 6339f209040..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/utils.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { AttemptRead, AttemptStats, AttemptStatus, AttemptStreamStats } from "core/api/types/AirbyteClient"; - -import { progressBarCalculations } from "./utils"; - -describe("#progressBarCalculations", () => { - beforeEach(() => { - jest.spyOn(Date, "now").mockImplementation(() => new Date("2023-01-01T00:00:00.000Z").getTime()); - }); - - afterEach(() => { - jest.restoreAllMocks(); - }); - - it("for an attempt with no throughput information", () => { - const attempt = makeAttempt(); - const { displayProgressBar, totalPercentRecords } = progressBarCalculations(attempt); - - expect(displayProgressBar).toEqual(false); - expect(totalPercentRecords).toEqual(0); - }); - - it("for an attempt with total stats", () => { - const totalStats: AttemptStats = { recordsEmitted: 1, estimatedRecords: 100, bytesEmitted: 1, estimatedBytes: 50 }; - const attempt = makeAttempt(totalStats); - const { displayProgressBar, totalPercentRecords, elapsedTimeMS, timeRemaining } = progressBarCalculations(attempt); - - expect(displayProgressBar).toEqual(true); - expect(totalPercentRecords).toEqual(0.01); - expect(elapsedTimeMS).toEqual(10 * 1000); - expect(timeRemaining).toEqual(990 * 1000); - }); - - it("for an attempt with per-stream stats", () => { - const totalStats: AttemptStats = { recordsEmitted: 3, estimatedRecords: 300, bytesEmitted: 3, estimatedBytes: 300 }; - const streamStatsA: AttemptStreamStats = { - streamName: "A", - stats: { recordsEmitted: 1, estimatedRecords: 100, bytesEmitted: 1, estimatedBytes: 100 }, - }; - const streamStatsB: AttemptStreamStats = { - streamName: "B", - stats: { recordsEmitted: 2, estimatedRecords: 100, bytesEmitted: 2, estimatedBytes: 100 }, - }; - const streamStatsC: AttemptStreamStats = { - streamName: "C", - stats: {}, - }; - - const attempt = makeAttempt(totalStats, [streamStatsA, streamStatsB, streamStatsC]); - const { displayProgressBar, totalPercentRecords, elapsedTimeMS, timeRemaining } = progressBarCalculations(attempt); - - expect(displayProgressBar).toEqual(true); - expect(totalPercentRecords).toEqual(0.01); - expect(elapsedTimeMS).toEqual(10 * 1000); - expect(timeRemaining).toEqual(990 * 1000); - }); -}); - -const makeAttempt = (totalStats: AttemptStats = {}, streamStats: AttemptStreamStats[] = []) => { - const now = Date.now(); - // API returns time in seconds - const createdAt = now / 1000 - 10; - const updatedAt = now / 1000; - const id = 123; - const status: AttemptStatus = "running"; - const attempt: AttemptRead = { id, status, createdAt, updatedAt, totalStats, streamStats }; - return attempt; -}; diff --git a/airbyte-webapp/src/components/connection/JobProgress/utils.ts b/airbyte-webapp/src/components/connection/JobProgress/utils.ts deleted file mode 100644 index ce6435307d2..00000000000 --- a/airbyte-webapp/src/components/connection/JobProgress/utils.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { AttemptRead, AttemptStatus } from "core/api/types/AirbyteClient"; - -export const progressBarCalculations = (latestAttempt: AttemptRead) => { - let numeratorRecords = 0; - let denominatorRecords = 0; - let totalPercentRecords = 0; - let numeratorBytes = 0; - let denominatorBytes = 0; - let elapsedTimeMS = 0; - let timeRemaining = 0; - let displayProgressBar = true; - - if ( - latestAttempt.totalStats?.recordsEmitted !== undefined && - latestAttempt.totalStats?.estimatedRecords !== undefined && - latestAttempt.totalStats?.bytesEmitted !== undefined && - latestAttempt.totalStats?.estimatedBytes !== undefined - ) { - numeratorRecords = latestAttempt.totalStats.recordsEmitted; - denominatorRecords = latestAttempt.totalStats.estimatedRecords; - numeratorBytes = latestAttempt.totalStats.bytesEmitted; - denominatorBytes = latestAttempt.totalStats.estimatedBytes; - } else if (latestAttempt.streamStats) { - for (const stream of latestAttempt.streamStats) { - numeratorRecords += stream.stats.recordsEmitted ?? 0; - denominatorRecords += stream.stats.estimatedRecords ?? 0; - numeratorBytes += stream.stats.bytesEmitted ?? 0; - denominatorBytes += stream.stats.estimatedBytes ?? 0; - } - } - - totalPercentRecords = denominatorRecords > 0 ? numeratorRecords / denominatorRecords : 0; - - // chose to estimate time remaining based on records rather than bytes - if (latestAttempt.status === AttemptStatus.running && denominatorRecords > 0) { - elapsedTimeMS = Date.now() - latestAttempt.createdAt * 1000; - timeRemaining = Math.floor(elapsedTimeMS / totalPercentRecords) * (1 - totalPercentRecords); // in ms - } else { - displayProgressBar = false; - } - - return { - displayProgressBar, - totalPercentRecords, - timeRemaining, - numeratorBytes, - numeratorRecords, - denominatorRecords, - denominatorBytes, - elapsedTimeMS, - }; -}; diff --git a/airbyte-webapp/src/components/connection/syncCatalog/CellText/CellText.tsx b/airbyte-webapp/src/components/connection/syncCatalog/CellText/CellText.tsx index 67227f4ee45..ab53a87c02c 100644 --- a/airbyte-webapp/src/components/connection/syncCatalog/CellText/CellText.tsx +++ b/airbyte-webapp/src/components/connection/syncCatalog/CellText/CellText.tsx @@ -8,7 +8,6 @@ type Sizes = "xsmall" | "small" | "medium" | "large" | "fixed"; export interface CellTextProps { size?: Sizes; className?: string; - withOverflow?: boolean; } // This lets us avoid the eslint complaint about unused styles @@ -22,16 +21,12 @@ const STYLES_BY_SIZE: Readonly> = { export const CellText: React.FC> = ({ size = "medium", - withOverflow, className, children, ...props }) => { return ( -
    +
    {children}
    ); diff --git a/airbyte-webapp/src/components/connection/syncCatalog/StreamDetailsPanel/StreamPanelHeader.tsx b/airbyte-webapp/src/components/connection/syncCatalog/StreamDetailsPanel/StreamPanelHeader.tsx index 19ff2d0f770..dc26f956731 100644 --- a/airbyte-webapp/src/components/connection/syncCatalog/StreamDetailsPanel/StreamPanelHeader.tsx +++ b/airbyte-webapp/src/components/connection/syncCatalog/StreamDetailsPanel/StreamPanelHeader.tsx @@ -1,25 +1,23 @@ -import React, { ReactNode, useMemo } from "react"; +import React, { ReactNode } from "react"; import { FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { Switch } from "components/ui/Switch"; import { Text } from "components/ui/Text"; import { AirbyteStream, AirbyteStreamConfiguration } from "core/api/types/AirbyteClient"; -import { useExperiment } from "hooks/services/Experiment"; import styles from "./StreamPanelHeader.module.scss"; import { SyncModeSelect, SyncModeValue } from "../SyncModeSelect"; interface StreamPanelHeaderProps { - config?: AirbyteStreamConfiguration; + stream: AirbyteStream; + config: AirbyteStreamConfiguration; disabled?: boolean; onClose: () => void; onSelectedChange: () => void; - stream?: AirbyteStream; onSelectSyncMode: (option: SyncModeValue) => void; availableSyncModes: SyncModeValue[]; } @@ -42,21 +40,9 @@ export const StreamProperty: React.FC = ({ messageId, value ); const NamespaceProperty: React.FC<{ namespace?: string }> = ({ namespace }) => { - const isSimplifiedCatalogRowEnabled = useExperiment("connection.syncCatalog.simplifiedCatalogRow", true); - - if (isSimplifiedCatalogRowEnabled) { - return namespace ? ( - - ) : null; - } - - return ( - } - data-testid="stream-details-namespace" - /> - ); + return namespace ? ( + + ) : null; }; export const StreamPanelHeader: React.FC = ({ @@ -68,37 +54,19 @@ export const StreamPanelHeader: React.FC = ({ availableSyncModes, onSelectSyncMode, }) => { - const isSimplifiedCatalogRowEnabled = useExperiment("connection.syncCatalog.simplifiedCatalogRow", true); - - const syncSchema: SyncModeValue | undefined = useMemo(() => { - if (!config) { - return undefined; - } - const { syncMode, destinationSyncMode } = config; - return { syncMode, destinationSyncMode }; - }, [config]); - - const syncMode = ( - <> - {config?.syncMode && } - {` | `} - {config?.destinationSyncMode && } - - ); + const { syncMode, destinationSyncMode, selected: isStreamSelectedForSync } = config ?? {}; return ( -
    - -
    + @@ -106,26 +74,24 @@ export const StreamPanelHeader: React.FC = ({ - - {isSimplifiedCatalogRowEnabled ? ( + {isStreamSelectedForSync && ( + - ) : ( - - )} - + + )}
    - } - placement="top" - > + } placement="top"> ); diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderList.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderList.tsx index 75d73f0e0f1..41016b9fc94 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderList.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderList.tsx @@ -2,7 +2,6 @@ import React, { ReactElement, useMemo } from "react"; import { useFieldArray } from "react-hook-form"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { RemoveButton } from "components/ui/RemoveButton/RemoveButton"; import styles from "./BuilderList.module.scss"; @@ -43,7 +42,7 @@ export const BuilderList: React.FC = ({ children, emptyItem, b - diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx index 36d35653eff..b945646fb6d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsView.tsx @@ -1,10 +1,10 @@ -import { DndContext, closestCenter, useSensor, useSensors, DragEndEvent } from "@dnd-kit/core"; +import { closestCenter, DndContext, DragEndEvent, useSensor, useSensors } from "@dnd-kit/core"; import { arrayMove, SortableContext, sortableKeyboardCoordinates, - verticalListSortingStrategy, useSortable, + verticalListSortingStrategy, } from "@dnd-kit/sortable"; import { CSS } from "@dnd-kit/utilities"; import React, { useMemo, useState } from "react"; @@ -22,19 +22,16 @@ import { BuilderConfigView } from "./BuilderConfigView"; import { KeyboardSensor, PointerSensor } from "./dndSensors"; import { InputForm, InputInEditing, newInputInEditing } from "./InputsForm"; import styles from "./InputsView.module.scss"; -import { BuilderFormInput, orderInputs, useBuilderWatch } from "../types"; -import { useInferredInputs } from "../useInferredInputs"; +import { BuilderFormInput, useBuilderWatch } from "../types"; const supportedTypes = ["string", "integer", "number", "array", "boolean", "enum", "unknown"] as const; export const InputsView: React.FC = () => { const { formatMessage } = useIntl(); const inputs = useBuilderWatch("formValues.inputs"); - const storedInputOrder = useBuilderWatch("formValues.inputOrder"); const { setValue } = useFormContext(); const { permission } = useConnectorBuilderFormState(); const [inputInEditing, setInputInEditing] = useState(undefined); - const inferredInputs = useInferredInputs(); const sensors = useSensors( useSensor(PointerSensor), useSensor(KeyboardSensor, { @@ -42,19 +39,15 @@ export const InputsView: React.FC = () => { }) ); - const { orderedInputs, inputOrder } = useMemo(() => { - const orderedInputs = orderInputs(inputs, inferredInputs, storedInputOrder); - const inputOrder = orderedInputs.map((input) => input.id); - return { orderedInputs, inputOrder }; - }, [inferredInputs, storedInputOrder, inputs]); + const inputsWithIds = useMemo(() => inputs.map((input) => ({ input, id: input.key })), [inputs]); const handleDragEnd = (event: DragEndEvent) => { const { active, over } = event; if (over !== null && active.id !== over.id) { - const oldIndex = inputOrder.indexOf(active.id.toString()); - const newIndex = inputOrder.indexOf(over.id.toString()); - setValue("formValues.inputOrder", arrayMove(inputOrder, oldIndex, newIndex)); + const oldIndex = inputs.findIndex((input) => input.key === active.id.toString()); + const newIndex = inputs.findIndex((input) => input.key === over.id.toString()); + setValue("formValues.inputs", arrayMove(inputs, oldIndex, newIndex)); } }; @@ -66,9 +59,9 @@ export const InputsView: React.FC = () => { - - {orderedInputs.map((input) => ( - + + {inputsWithIds.map((inputWithId) => ( + ))} @@ -78,7 +71,7 @@ export const InputsView: React.FC = () => { onClick={() => { setInputInEditing(newInputInEditing()); }} - icon={} + icon="plus" iconPosition="left" variant="secondary" type="button" @@ -115,30 +108,26 @@ function getType(definition: BuilderFormInput["definition"]): InputInEditing["ty return supportedType; } -function formInputToInputInEditing( - { key, definition, required }: BuilderFormInput, - isInferredInputOverride: boolean -): InputInEditing { +function formInputToInputInEditing({ key, definition, required, isLocked }: BuilderFormInput): InputInEditing { return { key, previousKey: key, definition, required, + isLocked, isNew: false, showDefaultValueField: Boolean(definition.default), type: getType(definition), - isInferredInputOverride, }; } interface SortableInputProps { input: BuilderFormInput; - isInferred: boolean; id: string; setInputInEditing: (inputInEditing: InputInEditing) => void; } -const SortableInput: React.FC = ({ input, isInferred, id, setInputInEditing }) => { +const SortableInput: React.FC = ({ input, id, setInputInEditing }) => { const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id }); const { permission } = useConnectorBuilderFormState(); const canEdit = permission !== "readOnly"; @@ -164,7 +153,7 @@ const SortableInput: React.FC = ({ input, isInferred, id, se aria-label="Edit" type="button" onClick={() => { - setInputInEditing(formInputToInputInEditing(input, isInferred)); + setInputInEditing(formInputToInputInEditing(input)); }} data-no-dnd="true" > diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx index 22b3a8ba650..184559f0151 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx @@ -13,6 +13,7 @@ import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderOneOf } from "./BuilderOneOf"; import { BuilderRequestInjection } from "./BuilderRequestInjection"; import { ToggleGroupField } from "./ToggleGroupField"; +import { manifestPaginatorToBuilder } from "../convertManifestToBuilderForm"; import { BuilderCursorPagination, BuilderPaginator, @@ -21,6 +22,7 @@ import { PAGE_INCREMENT, StreamPathFn, useBuilderWatch, + builderPaginatorToManifest, } from "../types"; interface PaginationSectionProps { @@ -39,7 +41,8 @@ export const PaginationSection: React.FC = ({ streamFiel docLink={links.connectorBuilderPagination} label={formatMessage({ id: "connectorBuilder.pagination.label" })} tooltip={formatMessage({ id: "connectorBuilder.pagination.tooltip" })} - toggleConfig={{ + inputsConfig={{ + toggleable: true, path: streamFieldPath("paginator"), defaultValue: { strategy: { @@ -56,6 +59,10 @@ export const PaginationSection: React.FC = ({ streamFiel field_name: "", }, }, + yamlConfig: { + builderToManifest: builderPaginatorToManifest, + manifestToBuilder: manifestPaginatorToBuilder, + }, }} copyConfig={{ path: "paginator", @@ -83,6 +90,8 @@ export const PaginationSection: React.FC = ({ streamFiel manifestPath="OffsetIncrement.properties.page_size" path={streamFieldPath("paginator.strategy.page_size")} optional + step={1} + min={1} /> {pageSize ? ( = ({ streamFiel path={streamFieldPath("paginator.strategy.page_size")} manifestPath="PageIncrement.properties.page_size" optional + step={1} + min={1} /> {pageSize ? ( = ({ streamFiel } }} optional + step={1} + min={1} /> {pageSize ? ( = ({ stre docLink={links.connectorBuilderParentStream} label={formatMessage({ id: "connectorBuilder.parentStreams.label" })} tooltip={formatMessage({ id: "connectorBuilder.parentStreams.tooltip" })} - toggleConfig={{ + inputsConfig={{ + toggleable: true, path: streamFieldPath("parentStreams"), defaultValue: [EMPTY_PARENT_STREAM], }} diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx index 28cfe01d672..18c310c9786 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx @@ -246,7 +246,7 @@ const StreamTab = ({ const SchemaEditor = ({ streamFieldPath }: { streamFieldPath: StreamPathFn }) => { const { formatMessage } = useIntl(); const analyticsService = useAnalyticsService(); - const { permission } = useConnectorBuilderFormState(); + const { permission, streamNames } = useConnectorBuilderFormState(); const autoImportSchemaFieldPath = streamFieldPath("autoImportSchema"); const autoImportSchema = useBuilderWatch(autoImportSchemaFieldPath); const schemaFieldPath = streamFieldPath("schema"); @@ -256,10 +256,7 @@ const SchemaEditor = ({ streamFieldPath }: { streamFieldPath: StreamPathFn }) => const path = streamFieldPath("schema"); const { errors } = useFormState({ name: path }); const error = get(errors, path); - const { - resolvedManifest: { streams }, - streamRead, - } = useConnectorBuilderTestRead(); + const { streamRead } = useConnectorBuilderTestRead(); const showImportButton = !autoImportSchema && isEmptyOrDefault(schema) && streamRead.data?.inferred_schema; const formattedSchema = useMemo(() => { @@ -294,7 +291,7 @@ const SchemaEditor = ({ streamFieldPath }: { streamFieldPath: StreamPathFn }) => setValue(path, formattedJson); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.OVERWRITE_SCHEMA, { actionDescription: "Declared schema overwritten by detected schema", - stream_name: streams[testStreamIndex]?.name, + stream_name: streamNames[testStreamIndex], }); }} > diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx index 2c3d8dacfae..4710cdc0f6d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx @@ -9,7 +9,8 @@ import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderList } from "./BuilderList"; import { BuilderOneOf, OneOfOption } from "./BuilderOneOf"; import { getDescriptionByManifest, getLabelByManifest } from "./manifestHelpers"; -import { BuilderTransformation } from "../types"; +import { manifestTransformationsToBuilder } from "../convertManifestToBuilderForm"; +import { BuilderTransformation, builderTransformationsToManifest } from "../types"; interface TransformationSectionProps { streamFieldPath: (fieldPath: T) => `formValues.streams.${number}.${T}`; @@ -63,7 +64,8 @@ export const TransformationSection: React.FC = ({ docLink={links.connectorBuilderTransformations} label={getLabelByManifest("DeclarativeStream.properties.transformations")} tooltip={getDescriptionByManifest("DeclarativeStream.properties.transformations")} - toggleConfig={{ + inputsConfig={{ + toggleable: true, path: streamFieldPath("transformations"), defaultValue: [ { @@ -71,6 +73,10 @@ export const TransformationSection: React.FC = ({ path: [], }, ], + yamlConfig: { + builderToManifest: builderTransformationsToManifest, + manifestToBuilder: manifestTransformationsToBuilder, + }, }} copyConfig={{ path: "transformations", diff --git a/airbyte-webapp/src/components/connectorBuilder/DownloadYamlButton.tsx b/airbyte-webapp/src/components/connectorBuilder/DownloadYamlButton.tsx index a5b2b6eb9cc..05fd08bfed6 100644 --- a/airbyte-webapp/src/components/connectorBuilder/DownloadYamlButton.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/DownloadYamlButton.tsx @@ -1,14 +1,15 @@ -import { dump } from "js-yaml"; import snakeCase from "lodash/snakeCase"; import { FormattedMessage } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { Tooltip } from "components/ui/Tooltip"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { FILE_TYPE_DOWNLOAD, downloadFile } from "core/utils/file"; -import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; +import { downloadFile, FILE_TYPE_DOWNLOAD } from "core/utils/file"; +import { + convertJsonToYaml, + useConnectorBuilderFormState, +} from "services/connectorBuilder/ConnectorBuilderStateService"; import styles from "./DownloadYamlButton.module.scss"; import { useBuilderWatch } from "./types"; @@ -27,12 +28,7 @@ export const DownloadYamlButton: React.FC = ({ classNam const mode = useBuilderWatch("mode"); const downloadYaml = () => { - const yamlToDownload = - mode === "ui" - ? dump(jsonManifest, { - noRefs: true, - }) - : yaml; + const yamlToDownload = mode === "ui" ? convertJsonToYaml(jsonManifest) : yaml; const file = new Blob([yamlToDownload], { type: FILE_TYPE_DOWNLOAD }); downloadFile(file, connectorNameField ? `${snakeCase(connectorNameField)}.yaml` : "connector_builder.yaml"); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.DOWNLOAD_YAML, { @@ -71,7 +67,7 @@ export const DownloadYamlButton: React.FC = ({ classNam variant="secondary" onClick={handleClick} disabled={buttonDisabled} - icon={showWarningIcon ? : undefined} + {...(showWarningIcon && { icon: "warningOutline" })} data-testid="download-yaml-button" type="button" > diff --git a/airbyte-webapp/src/components/connectorBuilder/PublishButton.tsx b/airbyte-webapp/src/components/connectorBuilder/PublishButton.tsx index f685babc99c..d91596afd4d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/PublishButton.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/PublishButton.tsx @@ -2,13 +2,9 @@ import { useState } from "react"; import { FormattedMessage } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { Tooltip } from "components/ui/Tooltip"; -import { - useConnectorBuilderFormState, - useConnectorBuilderTestRead, -} from "services/connectorBuilder/ConnectorBuilderStateService"; +import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; import styles from "./PublishButton.module.scss"; import { PublishModal } from "./PublishModal"; @@ -20,11 +16,10 @@ interface PublishButtonProps { export const PublishButton: React.FC = ({ className }) => { const [isModalOpen, setModalOpen] = useState(false); - const { currentProject, yamlIsValid, formValuesValid, permission } = useConnectorBuilderFormState(); + const { currentProject, yamlIsValid, formValuesValid, permission, resolveErrorMessage } = + useConnectorBuilderFormState(); const mode = useBuilderWatch("mode"); - const { resolveErrorMessage } = useConnectorBuilderTestRead(); - let buttonDisabled = permission === "readOnly"; let showWarningIcon = false; let tooltipContent = undefined; @@ -57,7 +52,7 @@ export const PublishButton: React.FC = ({ className }) => { }} disabled={buttonDisabled} data-testid="publish-button" - icon={showWarningIcon ? : undefined} + {...(showWarningIcon && { type: "warningOutline" })} type="button" > void }> = ({ onClose }) => id={currentProject.sourceDefinitionId ? "connectorBuilder.releaseNewVersion" : "connectorBuilder.publish"} /> } - onClose={onClose} + onCancel={onClose} > @@ -153,7 +153,7 @@ export const PublishModal: React.FC<{ onClose: () => void }> = ({ onClose }) => id={currentProject.sourceDefinitionId ? "connectorBuilder.releaseNewVersion" : "connectorBuilder.publish"} /> } - onClose={onClose} + onCancel={onClose} > diff --git a/airbyte-webapp/src/components/connectorBuilder/SavingIndicator.tsx b/airbyte-webapp/src/components/connectorBuilder/SavingIndicator.tsx index 4fc72e05029..354e7bcedf9 100644 --- a/airbyte-webapp/src/components/connectorBuilder/SavingIndicator.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/SavingIndicator.tsx @@ -128,7 +128,7 @@ export const SavingIndicator: React.FC = () => { onClick={() => { setChangeInProgress(true); }} - icon={} + icon="chevronDown" iconPosition="right" > {message} diff --git a/airbyte-webapp/src/components/connectorBuilder/SchemaConflictIndicator.tsx b/airbyte-webapp/src/components/connectorBuilder/SchemaConflictIndicator.tsx index 54fa125e769..c2a48c4b11a 100644 --- a/airbyte-webapp/src/components/connectorBuilder/SchemaConflictIndicator.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/SchemaConflictIndicator.tsx @@ -11,6 +11,7 @@ export const SchemaConflictIndicator: React.FC<{ errors?: string[] }> = ({ error control={ > = ({ className, yamlSelected, children }) => { - const { toggleUI } = useConnectorBuilderFormState(); + const analyticsService = useAnalyticsService(); + const { toggleUI, isResolving } = useConnectorBuilderFormState(); const formValues = useBuilderWatch("formValues"); const showSavingIndicator = yamlSelected || formValues.streams.length > 0; - const OnUiToggleClick = () => toggleUI(yamlSelected ? "ui" : "yaml"); + const OnUiToggleClick = () => { + toggleUI(yamlSelected ? "ui" : "yaml"); + analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.TOGGLE_UI_YAML, { + actionDescription: "User clicked the UI | YAML toggle button", + current_view: yamlSelected ? "yaml" : "ui", + new_view: yamlSelected ? "ui" : "yaml", + }); + }; return ( - + : undefined + } + /> diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/PageDisplay.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/PageDisplay.tsx index 76b016085bb..09e29314580 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/PageDisplay.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/PageDisplay.tsx @@ -88,7 +88,7 @@ export const PageDisplay: React.FC = ({ page, className, infer { key: "schema", title: ( - + {mode === "ui" && schemaDifferences && !autoImportSchema && ( diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/RecordTable.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/RecordTable.tsx index 1a9f9c5ccd0..4a9642a4dfc 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/RecordTable.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/RecordTable.tsx @@ -52,7 +52,7 @@ export const RecordTable = React.memo(({ records }: { records: StreamReadSlicesI }} /> {modalValue !== undefined && ( - setModalValue(undefined)} title={modalValue.key}> + setModalValue(undefined)} title={modalValue.key}>
    {toString(modalValue.value, 2)}
    diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/SchemaDiffView.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/SchemaDiffView.tsx index 26a0d9733c7..e4a8f27385e 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/SchemaDiffView.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/SchemaDiffView.tsx @@ -14,7 +14,7 @@ import { Tooltip } from "components/ui/Tooltip"; import { StreamReadInferredSchema } from "core/api/types/ConnectorBuilderClient"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { useConnectorBuilderTestRead } from "services/connectorBuilder/ConnectorBuilderStateService"; +import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; import styles from "./SchemaDiffView.module.scss"; import { SchemaConflictMessage } from "../SchemaConflictMessage"; @@ -63,9 +63,7 @@ function getDiff(existingSchema: string | undefined, detectedSchema: object): Di export const SchemaDiffView: React.FC = ({ inferredSchema, incompatibleErrors }) => { const analyticsService = useAnalyticsService(); - const { - resolvedManifest: { streams }, - } = useConnectorBuilderTestRead(); + const { streamNames } = useConnectorBuilderFormState(); const mode = useBuilderWatch("mode"); const testStreamIndex = useBuilderWatch("testStreamIndex"); const { setValue } = useFormContext(); @@ -104,7 +102,7 @@ export const SchemaDiffView: React.FC = ({ inferredSchema, setValue(path, formattedSchema); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.OVERWRITE_SCHEMA, { actionDescription: "Declared schema overwritten by detected schema", - stream_name: streams[testStreamIndex]?.name, + stream_name: streamNames[testStreamIndex], }); }} > @@ -129,7 +127,7 @@ export const SchemaDiffView: React.FC = ({ inferredSchema, setValue(path, schemaDiff.mergedSchema); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.MERGE_SCHEMA, { actionDescription: "Detected and Declared schemas merged to update declared schema", - stream_name: streams[testStreamIndex]?.name, + stream_name: streamNames[testStreamIndex], }); }} > @@ -155,7 +153,7 @@ export const SchemaDiffView: React.FC = ({ inferredSchema, setValue(path, formattedSchema); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.OVERWRITE_SCHEMA, { actionDescription: "Declared schema overwritten by detected schema", - stream_name: streams[testStreamIndex]?.name, + stream_name: streamNames[testStreamIndex], }); }} data-testid="accept-schema" diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamSelector.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamSelector.tsx index b1465bb1128..91001b9a3f2 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamSelector.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamSelector.tsx @@ -9,7 +9,7 @@ import { Icon } from "components/ui/Icon"; import { ListBox, ListBoxControlButtonProps } from "components/ui/ListBox"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { useConnectorBuilderTestRead } from "services/connectorBuilder/ConnectorBuilderStateService"; +import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; import styles from "./StreamSelector.module.scss"; import { useBuilderWatch } from "../types"; @@ -38,11 +38,9 @@ export const StreamSelector: React.FC = ({ className }) => const view = useBuilderWatch("view"); const testStreamIndex = useBuilderWatch("testStreamIndex"); - const { - resolvedManifest: { streams }, - } = useConnectorBuilderTestRead(); + const { streamNames } = useConnectorBuilderFormState(); - if (streams.length === 0) { + if (streamNames.length === 0) { return ( @@ -52,14 +50,14 @@ export const StreamSelector: React.FC = ({ className }) => ); } - const options = streams.map((stream) => { + const options = streamNames.map((streamName) => { const label = - stream.name && stream.name.trim() ? capitalize(stream.name) : formatMessage({ id: "connectorBuilder.emptyName" }); - return { label, value: stream.name ?? "" }; + streamName && streamName.trim() ? capitalize(streamName) : formatMessage({ id: "connectorBuilder.emptyName" }); + return { label, value: streamName ?? "" }; }); const handleStreamSelect = (selectedStreamName: string) => { - const selectedStreamIndex = streams.findIndex((stream) => selectedStreamName === stream.name); + const selectedStreamIndex = streamNames.findIndex((streamName) => selectedStreamName === streamName); if (selectedStreamIndex >= 0) { setValue("testStreamIndex", selectedStreamIndex); @@ -77,7 +75,7 @@ export const StreamSelector: React.FC = ({ className }) => = ({ let showWarningIcon = false; let tooltipContent = undefined; - if (isResolving && mode === "yaml") { + if (isResolving) { buttonDisabled = true; tooltipContent = ; } @@ -76,15 +75,7 @@ export const StreamTestButton: React.FC = ({ disabled={buttonDisabled} type="button" data-testid="read-stream" - icon={ - showWarningIcon ? ( - - ) : ( -
    - -
    - ) - } + icon={showWarningIcon ? "warningOutline" : "rotate"} > diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTester.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTester.tsx index 78ca980845f..4f8bc7dcb13 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTester.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTester.tsx @@ -12,10 +12,14 @@ import { ResizablePanels } from "components/ui/ResizablePanels"; import { Spinner } from "components/ui/Spinner"; import { Text } from "components/ui/Text"; +import { HttpError } from "core/api"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { links } from "core/utils/links"; import { useLocalStorage } from "core/utils/useLocalStorage"; -import { useConnectorBuilderTestRead } from "services/connectorBuilder/ConnectorBuilderStateService"; +import { + useConnectorBuilderFormState, + useConnectorBuilderTestRead, +} from "services/connectorBuilder/ConnectorBuilderStateService"; import { GlobalRequestsDisplay } from "./GlobalRequestsDisplay"; import { LogsDisplay } from "./LogsDisplay"; @@ -32,11 +36,8 @@ export const StreamTester: React.FC<{ setTestingValuesInputOpen: (open: boolean) => void; }> = ({ hasTestingValuesErrors, setTestingValuesInputOpen }) => { const { formatMessage } = useIntl(); + const { streamNames, isResolving, resolveErrorMessage, resolveError } = useConnectorBuilderFormState(); const { - resolvedManifest, - isResolving, - resolveErrorMessage, - resolveError, streamRead: { data: streamReadData, refetch: readStream, @@ -56,8 +57,7 @@ export const StreamTester: React.FC<{ const auxiliaryRequests = streamReadData?.auxiliary_requests; const autoImportSchema = useAutoImportSchema(testStreamIndex); - const resolvedStreams = resolvedManifest.streams; - const streamName = resolvedStreams[testStreamIndex]?.name; + const streamName = streamNames[testStreamIndex]; const analyticsService = useAnalyticsService(); @@ -65,12 +65,12 @@ export const StreamTester: React.FC<{ const unknownErrorMessage = formatMessage({ id: "connectorBuilder.unknownError" }); const errorMessage = isError - ? error instanceof Error - ? error.message || unknownErrorMessage + ? error instanceof HttpError + ? error.response?.message || unknownErrorMessage : unknownErrorMessage : undefined; - const errorExceptionStack = resolveError?.payload?.exceptionStack; + const errorExceptionStack = resolveError?.response?.exceptionStack; const [errorLogs, nonErrorLogs] = useMemo( () => @@ -119,10 +119,9 @@ export const StreamTester: React.FC<{ const testDataWarnings = useTestWarnings(); - const currentStream = resolvedStreams[testStreamIndex]; return (
    - {!currentStream && isResolving && ( + {streamName === undefined && isResolving && ( @@ -187,7 +186,7 @@ export const StreamTester: React.FC<{ )} {!isFetching && streamReadData && streamReadData.test_read_limit_reached && showLimitWarning && ( = ({ isOpen, setIsOpe data-testid="test-read-settings" disabled={isFetching} onClick={() => setIsOpen(true)} - icon={} + icon="gear" /> } > @@ -102,7 +101,7 @@ const TestReadLimitsModal: React.FC & Tes return ( setIsOpen(false)} + onCancel={() => setIsOpen(false)} title={formatMessage({ id: "connectorBuilder.testReadSettings.modalTitle" })} > = ({ testingVal disabled={ isFetching || !spec || Object.keys(spec.connection_specification?.properties || {}).length === 0 } - icon={} + icon="user" + iconClassName={styles.icon} > @@ -89,7 +89,7 @@ export const TestingValuesMenu: React.FC = ({ testingVal {isOpen && spec && ( setIsOpen(false)} + onCancel={() => setIsOpen(false)} title={} > @@ -101,12 +101,11 @@ export const TestingValuesMenu: React.FC = ({ testingVal {showInputsWarning && ( { setShowInputsWarning(false); }} - text={} + text={} /> )} {permission === "adminReadOnly" && ( diff --git a/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.module.scss b/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.module.scss index 1d3169d70c7..60cca39bdf5 100644 --- a/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.module.scss +++ b/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.module.scss @@ -6,15 +6,28 @@ cursor: pointer; border: variables.$border-thin solid colors.$dark-blue; background-color: colors.$dark-blue; - border-radius: variables.$border-radius-sm; padding: 0; overflow: hidden; display: grid; grid-template: 1fr / 1fr 1fr; align-self: center; - width: builderVariables.$toggleButtonWidth; flex-basis: auto; flex-shrink: 0; + + &.xs { + width: auto; + border-radius: variables.$border-radius-xs; + } + + &.sm { + width: builderVariables.$toggleButtonWidth; + border-radius: variables.$border-radius-sm; + } + + &:disabled { + pointer-events: none; + opacity: 0.25; + } } .text { @@ -24,7 +37,17 @@ align-items: center; justify-content: center; font-weight: 700; - padding: 4px 8px; + font-size: 9px; + + &.xs { + font-size: variables.$font-size-xs; + padding: 3px 5px; + } + + &.sm { + font-size: 11px; + padding: 4px 8px; + } } .selected { @@ -36,3 +59,8 @@ background-color: colors.$foreground; color: colors.$dark-blue; } + +.tooltipContainer { + width: fit-content; + align-self: center; +} diff --git a/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.tsx b/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.tsx index ae7c70ba5c0..4308190968c 100644 --- a/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/UiYamlToggleButton.tsx @@ -1,9 +1,9 @@ import classnames from "classnames"; +import { ReactNode } from "react"; import { FormattedMessage } from "react-intl"; import { Text } from "components/ui/Text"; - -import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; +import { Tooltip } from "components/ui/Tooltip"; import styles from "./UiYamlToggleButton.module.scss"; @@ -11,44 +11,59 @@ interface UiYamlToggleButtonProps { className?: string; yamlSelected: boolean; onClick: () => void; + size: "xs" | "sm"; + disabled?: boolean; + tooltip?: ReactNode; } -export const UiYamlToggleButton: React.FC = ({ className, yamlSelected, onClick }) => { - const analyticsService = useAnalyticsService(); +export const UiYamlToggleButton: React.FC = ({ + className, + yamlSelected, + onClick, + size, + disabled, + tooltip, +}) => { + const sizeStyles = { + [styles.xs]: size === "xs", + [styles.sm]: size === "sm", + }; - return ( + const toggleButton = ( ); + + return tooltip ? ( + + {tooltip} + + ) : ( + toggleButton + ); }; diff --git a/airbyte-webapp/src/components/connectorBuilder/VersionModal.tsx b/airbyte-webapp/src/components/connectorBuilder/VersionModal.tsx index 7bc9d0fb6fd..d4f26ec0d26 100644 --- a/airbyte-webapp/src/components/connectorBuilder/VersionModal.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/VersionModal.tsx @@ -36,7 +36,7 @@ export const VersionModal: React.FC<{ }, [data, isLoading, onClose, selectedVersion, setDisplayedVersion]); return ( - } onClose={onClose}> + } onCancel={onClose}> {isLoadingVersionList ? ( diff --git a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx index 406405a754d..c838c69a72f 100644 --- a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx @@ -1,42 +1,43 @@ import { useMonaco } from "@monaco-editor/react"; -import { load, YAMLException } from "js-yaml"; -import debounce from "lodash/debounce"; +import { load, Mark, YAMLException } from "js-yaml"; import { editor } from "monaco-editor/esm/vs/editor/editor.api"; -import React, { useMemo, useRef } from "react"; -import { useFormContext } from "react-hook-form"; +import React, { useRef, useCallback } from "react"; import { useUpdateEffect } from "react-use"; import { CodeEditor } from "components/ui/CodeEditor"; -import { ConnectorManifest } from "core/api/types/ConnectorManifest"; -import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; +interface YamlEditorProps { + value: string; + onChange: (value: string | undefined) => void; + onSuccessfulLoad?: (json: unknown, yaml: string) => void; + onYamlException?: (e: YAMLException) => void; + onMount?: (editor: editor.IStandaloneCodeEditor) => void; + lineNumberCharacterWidth?: number; + paddingTop?: boolean; +} -import styles from "./YamlEditor.module.scss"; -import { Sidebar } from "../Sidebar"; -import { useBuilderWatch } from "../types"; - -export const YamlEditor: React.FC = () => { +export const YamlEditor: React.FC = ({ + value, + onChange, + onSuccessfulLoad, + onYamlException, + onMount, + lineNumberCharacterWidth, + paddingTop, +}) => { const yamlEditorRef = useRef(); - const { setYamlEditorIsMounted, setYamlIsValid, updateJsonManifest } = useConnectorBuilderFormState(); - const { setValue } = useFormContext(); - const yamlValue = useBuilderWatch("yaml"); - - // debounce the setJsonManifest calls so that it doesnt result in a network call for every keystroke - const debouncedUpdateJsonManifest = useMemo(() => debounce(updateJsonManifest, 200), [updateJsonManifest]); - const monaco = useMonaco(); const monacoRef = useRef(monaco); monacoRef.current = monaco; - useUpdateEffect(() => { - if (monacoRef.current && yamlEditorRef.current && yamlValue) { + const validateAndSetMarkers = useCallback(() => { + if (monacoRef.current && yamlEditorRef.current && value) { const errOwner = "yaml"; const yamlEditorModel = yamlEditorRef.current.getModel(); try { - const json = load(yamlValue) as ConnectorManifest; - setYamlIsValid(true); - debouncedUpdateJsonManifest(json); + const json = load(value); + onSuccessfulLoad?.(json, value); // clear editor error markers if (yamlEditorModel) { @@ -44,11 +45,11 @@ export const YamlEditor: React.FC = () => { } } catch (err) { if (err instanceof YAMLException) { - setYamlIsValid(false); - const mark = err.mark; + onYamlException?.(err); + const mark: Mark | undefined = err.mark; // set editor error markers - if (yamlEditorModel) { + if (yamlEditorModel && mark) { monacoRef.current.editor.setModelMarkers(yamlEditorModel, errOwner, [ { startLineNumber: mark.line + 1, @@ -63,25 +64,25 @@ export const YamlEditor: React.FC = () => { } } } - }, [yamlValue, debouncedUpdateJsonManifest, setYamlIsValid]); + }, [onSuccessfulLoad, onYamlException, value]); + + useUpdateEffect(() => { + validateAndSetMarkers(); + }, [validateAndSetMarkers]); return ( -
    - -
    - setValue("yaml", value ?? "")} - lineNumberCharacterWidth={6} - onMount={(editor) => { - setYamlEditorIsMounted(true); - yamlEditorRef.current = editor; - }} - paddingTop - /> -
    -
    + { + yamlEditorRef.current = editor; + validateAndSetMarkers(); + onMount?.(editor); + }} + lineNumberCharacterWidth={lineNumberCharacterWidth} + paddingTop={paddingTop} + /> ); }; diff --git a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.module.scss b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlManifestEditor.module.scss similarity index 100% rename from airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.module.scss rename to airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlManifestEditor.module.scss diff --git a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlManifestEditor.tsx b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlManifestEditor.tsx new file mode 100644 index 00000000000..3a664021d17 --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlManifestEditor.tsx @@ -0,0 +1,41 @@ +import debounce from "lodash/debounce"; +import React, { useMemo } from "react"; +import { useFormContext } from "react-hook-form"; + +import { ConnectorManifest } from "core/api/types/ConnectorManifest"; +import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; + +import { YamlEditor } from "./YamlEditor"; +import styles from "./YamlManifestEditor.module.scss"; +import { Sidebar } from "../Sidebar"; +import { useBuilderWatch } from "../types"; + +export const YamlManifestEditor: React.FC = () => { + const { setYamlEditorIsMounted, setYamlIsValid, updateJsonManifest } = useConnectorBuilderFormState(); + const { setValue } = useFormContext(); + const yamlManifestValue = useBuilderWatch("yaml"); + // debounce the setJsonManifest calls so that it doesnt result in a network call for every keystroke + const debouncedUpdateJsonManifest = useMemo(() => debounce(updateJsonManifest, 200), [updateJsonManifest]); + + return ( +
    + +
    + setValue("yaml", value ?? "")} + onSuccessfulLoad={(json: unknown) => { + setYamlIsValid(true); + debouncedUpdateJsonManifest(json as ConnectorManifest); + }} + onYamlException={(_) => setYamlIsValid(false)} + onMount={(_) => { + setYamlEditorIsMounted(true); + }} + lineNumberCharacterWidth={6} + paddingTop + /> +
    +
    + ); +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/index.tsx b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/index.tsx index 69762885758..978f91e7929 100644 --- a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/index.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/index.tsx @@ -1 +1,2 @@ export * from "./YamlEditor"; +export * from "./YamlManifestEditor"; diff --git a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts index 66eb9d9d1af..543a288704d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts +++ b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts @@ -1,6 +1,11 @@ +import { dump } from "js-yaml"; import cloneDeep from "lodash/cloneDeep"; +import get from "lodash/get"; +import isArray from "lodash/isArray"; import isEqual from "lodash/isEqual"; +import isString from "lodash/isString"; import pick from "lodash/pick"; +import { match } from "ts-pattern"; import { ConnectorManifest, @@ -24,21 +29,21 @@ import { BearerAuthenticator, OAuthAuthenticator, DefaultPaginator, - CursorPagination, DeclarativeComponentSchemaMetadata, HttpRequesterErrorHandler, NoAuth, SessionTokenAuthenticator, + DatetimeBasedCursorType, } from "core/api/types/ConnectorManifest"; import { removeEmptyProperties } from "core/utils/form"; import { API_KEY_AUTHENTICATOR, - authTypeToKeyToInferredInput, BASIC_AUTHENTICATOR, BEARER_AUTHENTICATOR, + BuilderErrorHandler, BuilderFormAuthenticator, - BuilderFormValues, + BuilderFormInput, BuilderIncrementalSync, BuilderPaginator, BuilderRequestBody, @@ -47,18 +52,21 @@ import { DEFAULT_BUILDER_FORM_VALUES, DEFAULT_BUILDER_STREAM_VALUES, extractInterpolatedConfigKey, - getInferredAuthValue, - hasIncrementalSyncUserInput, INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, - incrementalSyncInferredInputs, + interpolateConfigKey, isInterpolatedConfigKey, NO_AUTH, - OAUTH_ACCESS_TOKEN_INPUT, OAUTH_AUTHENTICATOR, - OAUTH_TOKEN_EXPIRY_DATE_INPUT, RequestOptionOrPathInject, SESSION_TOKEN_AUTHENTICATOR, + YamlString, + YamlSupportedComponentName, } from "./types"; +import { + getKeyToDesiredLockedInput, + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE, + LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME, +} from "./useLockedInputs"; import { formatJson } from "./utils"; import { AirbyteJSONSchema } from "../../core/jsonSchema/types"; @@ -68,15 +76,7 @@ export const convertToBuilderFormValuesSync = (resolvedManifest: ConnectorManife const streams = resolvedManifest.streams; if (streams === undefined || streams.length === 0) { - const { inputs, inferredInputOverrides, inputOrder } = manifestSpecAndAuthToBuilder( - resolvedManifest.spec, - undefined, - undefined - ); - builderFormValues.inputs = inputs; - builderFormValues.inferredInputOverrides = inferredInputOverrides; - builderFormValues.inputOrder = inputOrder; - + builderFormValues.inputs = manifestSpecToBuilderInputs(resolvedManifest.spec, { type: NO_AUTH }, []); return removeEmptyProperties(builderFormValues); } @@ -92,19 +92,20 @@ export const convertToBuilderFormValuesSync = (resolvedManifest: ConnectorManife serializedStreamToIndex, streams[0].retriever.requester.url_base, streams[0].retriever.requester.authenticator, - resolvedManifest.metadata + resolvedManifest.metadata, + resolvedManifest.spec ) ); - const { inputs, inferredInputOverrides, auth, inputOrder } = manifestSpecAndAuthToBuilder( - resolvedManifest.spec, + builderFormValues.global.authenticator = manifestAuthenticatorToBuilder( streams[0].retriever.requester.authenticator, + resolvedManifest.spec + ); + builderFormValues.inputs = manifestSpecToBuilderInputs( + resolvedManifest.spec, + builderFormValues.global.authenticator, builderFormValues.streams ); - builderFormValues.inputs = inputs; - builderFormValues.inferredInputOverrides = inferredInputOverrides; - builderFormValues.global.authenticator = auth; - builderFormValues.inputOrder = inputOrder; return removeEmptyProperties(builderFormValues); }; @@ -147,7 +148,8 @@ const manifestStreamToBuilder = ( serializedStreamToIndex: Record, firstStreamUrlBase: string, firstStreamAuthenticator?: HttpRequesterAuthenticator, - metadata?: DeclarativeComponentSchemaMetadata + metadata?: DeclarativeComponentSchemaMetadata, + spec?: Spec ): BuilderStream => { assertType(stream.retriever, "SimpleRetriever", stream.name); const retriever = stream.retriever; @@ -185,7 +187,7 @@ const manifestStreamToBuilder = ( return { ...DEFAULT_BUILDER_STREAM_VALUES, id: index.toString(), - name: stream.name ?? "", + name: stream.name ?? `stream_${index}`, urlPath: requester.path, httpMethod: requester.http_method === "POST" ? "POST" : "GET", fieldPointer: retriever.record_selector.extractor.field_path as string[], @@ -195,13 +197,38 @@ const manifestStreamToBuilder = ( requestBody: requesterToRequestBody(requester), }, primaryKey: manifestPrimaryKeyToBuilder(stream), - paginator: manifestPaginatorToBuilder(retriever.paginator, stream.name), - incrementalSync: manifestIncrementalSyncToBuilder(stream.incremental_sync, stream.name), + paginator: convertOrDumpAsString( + retriever.paginator, + manifestPaginatorToBuilder, + "paginator", + stream.name, + metadata + ), + incrementalSync: convertOrDumpAsString( + stream.incremental_sync, + manifestIncrementalSyncToBuilder, + "incrementalSync", + stream.name, + metadata, + spec + ), parentStreams, parameterizedRequests, schema: manifestSchemaLoaderToBuilderSchema(stream.schema_loader), - errorHandler: manifestErrorHandlerToBuilder(stream.name, requester.error_handler), - transformations: manifestTransformationsToBuilder(stream.name, stream.transformations), + errorHandler: convertOrDumpAsString( + requester.error_handler, + manifestErrorHandlerToBuilder, + "errorHandler", + stream.name, + metadata + ), + transformations: convertOrDumpAsString( + stream.transformations, + manifestTransformationsToBuilder, + "transformations", + stream.name, + metadata + ), unsupportedFields: { retriever: { record_selector: { @@ -217,22 +244,21 @@ function requesterToRequestBody(requester: HttpRequester): BuilderRequestBody { if (requester.request_body_data && typeof requester.request_body_data === "object") { return { type: "form_list", values: Object.entries(requester.request_body_data) }; } - if (requester.request_body_data && typeof requester.request_body_data === "string") { + if (requester.request_body_data && isString(requester.request_body_data)) { return { type: "string_freeform", value: requester.request_body_data }; } if (!requester.request_body_json) { return { type: "json_list", values: [] }; } - const allStringValues = Object.values(requester.request_body_json).every((value) => typeof value === "string"); + const allStringValues = Object.values(requester.request_body_json).every((value) => isString(value)); if (allStringValues) { return { type: "json_list", values: Object.entries(requester.request_body_json) }; } return { type: "json_freeform", - value: - typeof requester.request_body_json === "string" - ? requester.request_body_json - : formatJson(requester.request_body_json), + value: isString(requester.request_body_json) + ? requester.request_body_json + : formatJson(requester.request_body_json), }; } @@ -275,16 +301,15 @@ function manifestPartitionRouterToBuilder( parameterizedRequests: [ { ...partitionRouter, - values: - typeof partitionRouter.values === "string" - ? { - value: partitionRouter.values, - type: "variable" as const, - } - : { - value: partitionRouter.values, - type: "list" as const, - }, + values: isString(partitionRouter.values) + ? { + value: partitionRouter.values, + type: "variable" as const, + } + : { + value: partitionRouter.values, + type: "list" as const, + }, }, ], }; @@ -322,24 +347,37 @@ function manifestPartitionRouterToBuilder( throw new ManifestCompatibilityError(streamName, "partition_router type is unsupported"); } -function manifestErrorHandlerToBuilder( - streamName: string | undefined, - errorHandler: HttpRequesterErrorHandler | undefined -): BuilderStream["errorHandler"] { +export function manifestErrorHandlerToBuilder( + errorHandler: HttpRequesterErrorHandler | undefined, + streamName?: string +): BuilderErrorHandler[] | undefined { if (!errorHandler) { return undefined; } - const handlers = errorHandler.type === "CompositeErrorHandler" ? errorHandler.error_handlers : [errorHandler]; - if (handlers.some((handler) => handler.type === "CustomErrorHandler")) { - throw new ManifestCompatibilityError(streamName, "custom error handler used"); - } - if (handlers.some((handler) => handler.type === "CompositeErrorHandler")) { - throw new ManifestCompatibilityError(streamName, "nested composite error handler used"); - } + const handlers: HttpRequesterErrorHandler[] = + errorHandler.type === "CompositeErrorHandler" ? errorHandler.error_handlers : [errorHandler]; + + handlers.forEach((handler) => { + match(handler.type) + .with("DefaultErrorHandler", () => {}) + .with("CustomErrorHandler", () => { + throw new ManifestCompatibilityError(streamName, "custom error handler used"); + }) + .with("CompositeErrorHandler", () => { + throw new ManifestCompatibilityError(streamName, "nested composite error handler used"); + }) + .otherwise(() => { + throw new ManifestCompatibilityError( + streamName, + "error handler type is unsupported; only CompositeErrorHandler and DefaultErrorHandler are supported" + ); + }); + }); + const defaultHandlers = handlers as DefaultErrorHandler[]; return defaultHandlers.map((handler) => { if (handler.backoff_strategies && handler.backoff_strategies.length > 1) { - throw new ManifestCompatibilityError(streamName, "more than one backoff strategy"); + throw new ManifestCompatibilityError(streamName, "more than one backoff strategy per handler"); } const backoffStrategy = handler.backoff_strategies?.[0]; if (backoffStrategy?.type === "CustomBackoffStrategy") { @@ -375,9 +413,9 @@ function manifestPrimaryKeyToBuilder(manifestStream: DeclarativeStream): Builder } } -function manifestTransformationsToBuilder( - name: string | undefined, - transformations: DeclarativeStreamTransformationsItem[] | undefined +export function manifestTransformationsToBuilder( + transformations: DeclarativeStreamTransformationsItem[] | undefined, + streamName?: string ): BuilderTransformation[] | undefined { if (!transformations) { return undefined; @@ -386,7 +424,7 @@ function manifestTransformationsToBuilder( transformations.forEach((transformation) => { if (transformation.type === "CustomTransformation") { - throw new ManifestCompatibilityError(name, "custom transformation used"); + throw new ManifestCompatibilityError(streamName, "custom transformation used"); } if (transformation.type === "AddFields") { builderTransformations.push( @@ -409,25 +447,26 @@ function manifestTransformationsToBuilder( } function getFormat( - format: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, + manifestCursorDatetime: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, manifestIncrementalSync: DeclarativeStreamIncrementalSync ) { - if (typeof format === "string" || !format.datetime_format) { + if (isString(manifestCursorDatetime) || !manifestCursorDatetime.datetime_format) { return manifestIncrementalSync.datetime_format; } - return format.datetime_format; + return manifestCursorDatetime.datetime_format; } function isFormatSupported( - format: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, + manifestCursorDatetime: DatetimeBasedCursorStartDatetime | DatetimeBasedCursorEndDatetime, manifestIncrementalSync: DeclarativeStreamIncrementalSync ) { - return getFormat(format, manifestIncrementalSync) === INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT; + return getFormat(manifestCursorDatetime, manifestIncrementalSync) === INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT; } -function manifestIncrementalSyncToBuilder( +export function manifestIncrementalSyncToBuilder( manifestIncrementalSync: DeclarativeStreamIncrementalSync | undefined, - streamName?: string + streamName?: string, + spec?: Spec ): BuilderStream["incrementalSync"] | undefined { if (!manifestIncrementalSync) { return undefined; @@ -435,6 +474,7 @@ function manifestIncrementalSyncToBuilder( if (manifestIncrementalSync.type === "CustomIncrementalSync") { throw new ManifestCompatibilityError(streamName, "incremental sync uses a custom implementation"); } + assertType(manifestIncrementalSync, "DatetimeBasedCursor", streamName); if (manifestIncrementalSync.partition_field_start || manifestIncrementalSync.partition_field_end) { throw new ManifestCompatibilityError( @@ -458,30 +498,44 @@ function manifestIncrementalSyncToBuilder( ...regularFields } = manifestIncrementalSync; + if ( + (manifestStartDateTime && + typeof manifestStartDateTime !== "string" && + (manifestStartDateTime.max_datetime || manifestStartDateTime.min_datetime)) || + (manifestEndDateTime && + typeof manifestEndDateTime !== "string" && + (manifestEndDateTime.max_datetime || manifestEndDateTime.min_datetime)) + ) { + throw new ManifestCompatibilityError( + streamName, + "DatetimeBasedCursor max_datetime and min_datetime are not supported" + ); + } + let start_datetime: BuilderIncrementalSync["start_datetime"] = { type: "custom", - value: typeof manifestStartDateTime === "string" ? manifestStartDateTime : manifestStartDateTime.datetime, + value: isString(manifestStartDateTime) ? manifestStartDateTime : manifestStartDateTime.datetime, format: getFormat(manifestStartDateTime, manifestIncrementalSync), }; let end_datetime: BuilderIncrementalSync["end_datetime"] = { type: "custom", - value: typeof manifestEndDateTime === "string" ? manifestEndDateTime : manifestEndDateTime?.datetime || "", + value: isString(manifestEndDateTime) ? manifestEndDateTime : manifestEndDateTime?.datetime || "", format: manifestEndDateTime ? getFormat(manifestEndDateTime, manifestIncrementalSync) : undefined, }; - if ( - start_datetime.value === "{{ config['start_date'] }}" && - isFormatSupported(manifestStartDateTime, manifestIncrementalSync) - ) { - start_datetime = { type: "user_input" }; + const startDateSpecKey = tryExtractAndValidateIncrementalKey( + ["start_datetime"], + start_datetime.value, + spec, + streamName + ); + if (startDateSpecKey && isFormatSupported(manifestStartDateTime, manifestIncrementalSync)) { + start_datetime = { type: "user_input", value: interpolateConfigKey(startDateSpecKey) }; } - if ( - end_datetime.value === "{{ config['end_date'] }}" && - manifestEndDateTime && - isFormatSupported(manifestEndDateTime, manifestIncrementalSync) - ) { - end_datetime = { type: "user_input" }; + const endDateSpecKey = tryExtractAndValidateIncrementalKey(["end_datetime"], end_datetime.value, spec, streamName); + if (manifestEndDateTime && endDateSpecKey && isFormatSupported(manifestEndDateTime, manifestIncrementalSync)) { + end_datetime = { type: "user_input", value: interpolateConfigKey(endDateSpecKey) }; } else if ( !manifestEndDateTime || end_datetime.value === `{{ now_utc().strftime('${INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT}') }}` @@ -533,7 +587,12 @@ function manifestPaginatorStrategyToBuilder( if (strategy.type === "OffsetIncrement" || strategy.type === "PageIncrement") { return strategy; } - const { cursor_value, stop_condition, ...rest } = strategy as CursorPagination; + + if (strategy.type !== "CursorPagination") { + throw new ManifestCompatibilityError(undefined, "paginator.pagination_strategy uses an unsupported type"); + } + + const { cursor_value, stop_condition, ...rest } = strategy; const path = safeJinjaAccessToPath(cursor_value, stop_condition || ""); @@ -549,13 +608,14 @@ function manifestPaginatorStrategyToBuilder( }; } -function manifestPaginatorToBuilder( +export function manifestPaginatorToBuilder( manifestPaginator: SimpleRetrieverPaginator | undefined, - streamName: string | undefined + streamName?: string ): BuilderPaginator | undefined { if (manifestPaginator === undefined || manifestPaginator.type === "NoPagination") { return undefined; } + assertType(manifestPaginator, "DefaultPaginator", streamName); if (manifestPaginator.pagination_strategy.type === "CustomPaginationStrategy") { throw new ManifestCompatibilityError(streamName, "paginator.pagination_strategy uses a CustomPaginationStrategy"); @@ -604,7 +664,7 @@ function removeTrailingSlashes(path: string) { return path.replace(/\/+$/, ""); } -type SupportedAuthenticators = +type SupportedAuthenticator = | ApiKeyAuthenticator | BasicHttpAuthenticator | BearerAuthenticator @@ -612,7 +672,7 @@ type SupportedAuthenticators = | NoAuth | SessionTokenAuthenticator; -function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): authenticator is SupportedAuthenticators { +function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): authenticator is SupportedAuthenticator { const supportedAuthTypes: string[] = [ NO_AUTH, API_KEY_AUTHENTICATOR, @@ -625,154 +685,190 @@ function isSupportedAuthenticator(authenticator: HttpRequesterAuthenticator): au } function manifestAuthenticatorToBuilder( - manifestAuthenticator: HttpRequesterAuthenticator | undefined, + authenticator: HttpRequesterAuthenticator | undefined, + spec: Spec | undefined, streamName?: string ): BuilderFormAuthenticator { - let builderAuthenticator: BuilderFormAuthenticator; - if (manifestAuthenticator === undefined) { - builderAuthenticator = { - type: "NoAuth", + if (authenticator === undefined) { + return { + type: NO_AUTH, }; - } else if (manifestAuthenticator.type === undefined) { + } else if (authenticator.type === undefined) { throw new ManifestCompatibilityError(streamName, "Authenticator has no type"); - } else if (!isSupportedAuthenticator(manifestAuthenticator)) { - throw new ManifestCompatibilityError(streamName, `Unsupported authenticator type: ${manifestAuthenticator.type}`); - } else if (manifestAuthenticator.type === "ApiKeyAuthenticator") { - builderAuthenticator = { - ...manifestAuthenticator, - inject_into: manifestAuthenticator.inject_into ?? { - type: "RequestOption", - field_name: manifestAuthenticator.header || "", - inject_into: "header", - }, - }; - } else if (manifestAuthenticator.type === "OAuthAuthenticator") { - if ( - Object.values(manifestAuthenticator.refresh_request_body ?? {}).filter((value) => typeof value !== "string") - .length > 0 - ) { - throw new ManifestCompatibilityError( - streamName, - "OAuthAuthenticator contains a refresh_request_body with non-string values" - ); + } else if (!isSupportedAuthenticator(authenticator)) { + throw new ManifestCompatibilityError(streamName, `Unsupported authenticator type: ${authenticator.type}`); + } + + switch (authenticator.type) { + case NO_AUTH: { + return { + type: NO_AUTH, + }; + } + + case API_KEY_AUTHENTICATOR: { + return { + ...authenticator, + inject_into: authenticator.inject_into ?? { + type: "RequestOption", + field_name: authenticator.header || "", + inject_into: "header", + }, + api_token: interpolateConfigKey(extractAndValidateAuthKey(["api_token"], authenticator, spec, streamName)), + }; + } + + case BEARER_AUTHENTICATOR: { + return { + ...authenticator, + api_token: interpolateConfigKey(extractAndValidateAuthKey(["api_token"], authenticator, spec, streamName)), + }; + } + + case BASIC_AUTHENTICATOR: { + return { + ...authenticator, + username: interpolateConfigKey(extractAndValidateAuthKey(["username"], authenticator, spec, streamName)), + password: interpolateConfigKey(extractAndValidateAuthKey(["password"], authenticator, spec, streamName)), + }; } - const refreshTokenUpdater = manifestAuthenticator.refresh_token_updater; - if (refreshTokenUpdater) { - if (!isEqual(refreshTokenUpdater?.access_token_config_path, [OAUTH_ACCESS_TOKEN_INPUT])) { + case OAUTH_AUTHENTICATOR: { + if ( + Object.values(authenticator.refresh_request_body ?? {}).filter((value) => typeof value !== "string").length > 0 + ) { throw new ManifestCompatibilityError( streamName, - `OAuthAuthenticator access token config path needs to be [${OAUTH_ACCESS_TOKEN_INPUT}]` + "OAuthAuthenticator contains a refresh_request_body with non-string values" ); } - if (!isEqual(refreshTokenUpdater?.token_expiry_date_config_path, [OAUTH_TOKEN_EXPIRY_DATE_INPUT])) { + if ( + authenticator.grant_type && + authenticator.grant_type !== "refresh_token" && + authenticator.grant_type !== "client_credentials" + ) { throw new ManifestCompatibilityError( streamName, - `OAuthAuthenticator token expiry date config path needs to be [${OAUTH_TOKEN_EXPIRY_DATE_INPUT}]` + "OAuthAuthenticator sets custom grant_type, but it must be one of 'refresh_token' or 'client_credentials'" ); } + + let builderAuthenticator: BuilderFormAuthenticator = { + ...authenticator, + refresh_request_body: Object.entries(authenticator.refresh_request_body ?? {}), + grant_type: authenticator.grant_type ?? "refresh_token", + refresh_token_updater: undefined, + client_id: interpolateConfigKey(extractAndValidateAuthKey(["client_id"], authenticator, spec, streamName)), + client_secret: interpolateConfigKey( + extractAndValidateAuthKey(["client_secret"], authenticator, spec, streamName) + ), + }; + + if (!authenticator.grant_type || authenticator.grant_type === "refresh_token") { + const refreshTokenSpecKey = extractAndValidateAuthKey(["refresh_token"], authenticator, spec, streamName); + builderAuthenticator = { + ...builderAuthenticator, + refresh_token: interpolateConfigKey(refreshTokenSpecKey), + }; + + if (authenticator.refresh_token_updater) { + if (!isEqual(authenticator.refresh_token_updater?.refresh_token_config_path, [refreshTokenSpecKey])) { + throw new ManifestCompatibilityError( + streamName, + "OAuthAuthenticator refresh_token_config_path needs to match the config path used for refresh_token" + ); + } + const { + access_token_config_path, + token_expiry_date_config_path, + refresh_token_config_path, + ...refresh_token_updater + } = authenticator.refresh_token_updater; + builderAuthenticator = { + ...builderAuthenticator, + refresh_token_updater: { + ...refresh_token_updater, + access_token: interpolateConfigKey( + extractAndValidateAuthKey( + ["refresh_token_updater", "access_token_config_path"], + authenticator, + spec, + streamName + ) + ), + token_expiry_date: interpolateConfigKey( + extractAndValidateAuthKey( + ["refresh_token_updater", "token_expiry_date_config_path"], + authenticator, + spec, + streamName + ) + ), + }, + }; + } + } + + return builderAuthenticator; + } + + case SESSION_TOKEN_AUTHENTICATOR: { + const manifestLoginRequester = authenticator.login_requester; if ( - !isEqual(refreshTokenUpdater?.refresh_token_config_path, [ - extractInterpolatedConfigKey(manifestAuthenticator.refresh_token), - ]) + manifestLoginRequester.authenticator && + manifestLoginRequester.authenticator?.type !== NO_AUTH && + manifestLoginRequester.authenticator?.type !== API_KEY_AUTHENTICATOR && + manifestLoginRequester.authenticator?.type !== BEARER_AUTHENTICATOR && + manifestLoginRequester.authenticator?.type !== BASIC_AUTHENTICATOR ) { throw new ManifestCompatibilityError( streamName, - "OAuthAuthenticator refresh_token_config_path needs to match the config value used for refresh_token" + `SessionTokenAuthenticator login_requester.authenticator must have one of the following types: ${NO_AUTH}, ${API_KEY_AUTHENTICATOR}, ${BEARER_AUTHENTICATOR}, ${BASIC_AUTHENTICATOR}` ); } - } - if ( - manifestAuthenticator.grant_type && - manifestAuthenticator.grant_type !== "refresh_token" && - manifestAuthenticator.grant_type !== "client_credentials" - ) { - throw new ManifestCompatibilityError(streamName, "OAuthAuthenticator sets custom grant_type"); - } - - builderAuthenticator = { - ...manifestAuthenticator, - refresh_request_body: Object.entries(manifestAuthenticator.refresh_request_body ?? {}), - grant_type: manifestAuthenticator.grant_type ?? "refresh_token", - }; - } else if (manifestAuthenticator.type === "SessionTokenAuthenticator") { - const manifestLoginRequester = manifestAuthenticator.login_requester; - if ( - manifestLoginRequester.authenticator && - manifestLoginRequester.authenticator?.type !== NO_AUTH && - manifestLoginRequester.authenticator?.type !== API_KEY_AUTHENTICATOR && - manifestLoginRequester.authenticator?.type !== BEARER_AUTHENTICATOR && - manifestLoginRequester.authenticator?.type !== BASIC_AUTHENTICATOR - ) { - throw new ManifestCompatibilityError( - streamName, - `SessionTokenAuthenticator login_requester.authenticator must have one of the following types: ${NO_AUTH}, ${API_KEY_AUTHENTICATOR}, ${BEARER_AUTHENTICATOR}, ${BASIC_AUTHENTICATOR}` + const builderLoginRequesterAuthenticator = manifestAuthenticatorToBuilder( + manifestLoginRequester.authenticator, + spec, + streamName ); - } - builderAuthenticator = { - ...manifestAuthenticator, - login_requester: { - url: `${removeTrailingSlashes(manifestLoginRequester.url_base)}/${removeLeadingSlashes( - manifestLoginRequester.path - )}`, - authenticator: manifestLoginRequester.authenticator ?? { type: NO_AUTH }, - httpMethod: manifestLoginRequester.http_method === "GET" ? "GET" : "POST", - requestOptions: { - requestParameters: Object.entries(manifestLoginRequester.request_parameters ?? {}), - requestHeaders: Object.entries(manifestLoginRequester.request_headers ?? {}), - requestBody: requesterToRequestBody(manifestLoginRequester), - }, - errorHandler: manifestErrorHandlerToBuilder(undefined, manifestLoginRequester.error_handler), - }, - }; - } else { - builderAuthenticator = manifestAuthenticator; - } - - // verify that all auth keys which require a user input have a {{config[]}} value - const inferredInputs = authTypeToKeyToInferredInput(builderAuthenticator); - const userInputAuthKeys = Object.keys(inferredInputs); - - for (const userInputAuthKey of userInputAuthKeys) { - if ( - !inferredInputs[userInputAuthKey].as_config_path && - !isInterpolatedConfigKey(getInferredAuthValue(builderAuthenticator, userInputAuthKey)) - ) { - throw new ManifestCompatibilityError( - undefined, - `Authenticator's ${userInputAuthKey} value must be of the form {{ config['key'] }}` - ); + return { + ...authenticator, + login_requester: { + url: `${removeTrailingSlashes(manifestLoginRequester.url_base)}/${removeLeadingSlashes( + manifestLoginRequester.path + )}`, + authenticator: builderLoginRequesterAuthenticator as + | ApiKeyAuthenticator + | BearerAuthenticator + | BasicHttpAuthenticator, + httpMethod: manifestLoginRequester.http_method === "GET" ? "GET" : "POST", + requestOptions: { + requestParameters: Object.entries(manifestLoginRequester.request_parameters ?? {}), + requestHeaders: Object.entries(manifestLoginRequester.request_headers ?? {}), + requestBody: requesterToRequestBody(manifestLoginRequester), + }, + errorHandler: manifestErrorHandlerToBuilder(manifestLoginRequester.error_handler), + }, + }; } } - - return builderAuthenticator; } -function manifestSpecAndAuthToBuilder( +function manifestSpecToBuilderInputs( manifestSpec: Spec | undefined, - manifestAuthenticator: HttpRequesterAuthenticator | undefined, - streams: BuilderStream[] | undefined + authenticator: BuilderFormAuthenticator, + streams: BuilderStream[] ) { - const result: { - inputs: BuilderFormValues["inputs"]; - inferredInputOverrides: BuilderFormValues["inferredInputOverrides"]; - auth: BuilderFormAuthenticator; - inputOrder: string[]; - } = { - inputs: [], - inferredInputOverrides: {}, - auth: manifestAuthenticatorToBuilder(manifestAuthenticator), - inputOrder: [], - }; - if (manifestSpec === undefined) { - return result; + return []; } + const lockedInputKeys = Object.keys(getKeyToDesiredLockedInput(authenticator, streams)); + const required = manifestSpec.connection_specification.required as string[] | undefined; - Object.entries(manifestSpec.connection_specification.properties as Record) + return Object.entries(manifestSpec.connection_specification.properties as Record) .sort(([_keyA, valueA], [_keyB, valueB]) => { if (valueA.order !== undefined && valueB.order !== undefined) { return valueA.order - valueB.order; @@ -785,37 +881,14 @@ function manifestSpecAndAuthToBuilder( } return 0; }) - .forEach(([specKey, specDefinition]) => { - const matchingInferredInput = getMatchingInferredInput(result.auth, streams, specKey); - if (matchingInferredInput) { - result.inferredInputOverrides[matchingInferredInput.key] = specDefinition; - } else { - result.inputs.push({ - key: specKey, - definition: specDefinition, - required: required?.includes(specKey) || false, - }); - } - if (specDefinition.order !== undefined) { - result.inputOrder.push(specKey); - } + .map(([specKey, specDefinition]) => { + return { + key: specKey, + definition: specDefinition, + required: required?.includes(specKey) || false, + isLocked: lockedInputKeys.includes(specKey), + }; }); - - return result; -} - -function getMatchingInferredInput( - auth: BuilderFormAuthenticator, - streams: BuilderStream[] | undefined, - specKey: string -) { - if (streams && specKey === "start_date" && hasIncrementalSyncUserInput(streams, "start_datetime")) { - return incrementalSyncInferredInputs.start_date; - } - if (streams && specKey === "end_date" && hasIncrementalSyncUserInput(streams, "end_datetime")) { - return incrementalSyncInferredInputs.end_date; - } - return Object.values(authTypeToKeyToInferredInput(auth)).find((input) => input.key === specKey); } function assertType( @@ -844,3 +917,136 @@ export class ManifestCompatibilityError extends Error { export function isManifestCompatibilityError(error: { __type?: string }): error is ManifestCompatibilityError { return error.__type === "connectorBuilder.manifestCompatibility"; } + +function convertOrDumpAsString( + manifestValue: ManifestInput, + convertFn: (manifestValue: ManifestInput, streamName?: string, spec?: Spec) => BuilderOutput | undefined, + componentName: YamlSupportedComponentName, + streamName?: string | undefined, + metadata?: DeclarativeComponentSchemaMetadata, + spec?: Spec +): BuilderOutput | YamlString | undefined { + if (streamName && metadata?.yamlComponents?.streams?.[streamName]?.includes(componentName)) { + return dump(manifestValue); + } + + try { + return convertFn(manifestValue, streamName, spec); + } catch (e) { + if (isManifestCompatibilityError(e)) { + return dump(manifestValue); + } + throw e; + } +} + +const extractAndValidateAuthKey = ( + path: string[], + authenticator: SupportedAuthenticator, + manifestSpec: Spec | undefined, + streamName?: string +) => { + return extractAndValidateSpecKey( + path, + get(authenticator, path), + get(LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[authenticator.type], path), + authenticator.type, + manifestSpec, + streamName + ); +}; + +const tryExtractAndValidateIncrementalKey = ( + path: string[], + value: string, + manifestSpec: Spec | undefined, + streamName?: string +) => { + try { + return extractAndValidateSpecKey( + path, + value, + get(LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME, path), + DatetimeBasedCursorType.DatetimeBasedCursor, + manifestSpec, + streamName + ); + } catch (e) { + if (isManifestCompatibilityError(e)) { + // if the manifest value doesn't point to the expected input in the spec, just treat it as custom + return undefined; + } + throw e; + } +}; + +const extractAndValidateSpecKey = ( + path: string[], + value: string | string[] | undefined, + lockedInput: BuilderFormInput, + componentName: string, + spec: Spec | undefined, + streamName?: string +): string => { + const manifestPath = `${componentName}.${path.join(".")}`; + + let specKey: string | undefined = undefined; + if (isArray(value)) { + if (value.length < 1) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} has an empty path, but a non-empty path is required.` + ); + } + if (value.length > 1) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} points to a nested config path, but only top-level config fields are supported.` + ); + } + [specKey] = value; + } + if (isString(value)) { + if (!isInterpolatedConfigKey(value)) { + throw new ManifestCompatibilityError(streamName, `${manifestPath} must be of the form {{ config["key"] }}`); + } + specKey = extractInterpolatedConfigKey(value); + } + if (!specKey) { + throw new ManifestCompatibilityError(streamName, `${manifestPath} must point to a config field`); + } + + const specDefinition = specKey ? spec?.connection_specification?.properties?.[specKey] : undefined; + if (!specDefinition) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must appear in the spec` + ); + } + if (lockedInput.required && !spec?.connection_specification?.required?.includes(specKey)) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must be required in the spec` + ); + } + if (specDefinition.type !== "string") { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must be of type string` + ); + } + if (lockedInput.definition.airbyte_secret && !specDefinition.airbyte_secret) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must have airbyte_secret set to true` + ); + } + if (lockedInput.definition.pattern && specDefinition.pattern !== lockedInput.definition.pattern) { + throw new ManifestCompatibilityError( + streamName, + `${manifestPath} references spec key "${specKey}", which must have pattern "${lockedInput.definition.pattern}"` + ); + } + + return specKey; +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/types.ts b/airbyte-webapp/src/components/connectorBuilder/types.ts index ca90003eea9..26929412309 100644 --- a/airbyte-webapp/src/components/connectorBuilder/types.ts +++ b/airbyte-webapp/src/components/connectorBuilder/types.ts @@ -1,7 +1,10 @@ +import { load } from "js-yaml"; import { JSONSchema7 } from "json-schema"; +import isString from "lodash/isString"; import merge from "lodash/merge"; import { FieldPath, useWatch } from "react-hook-form"; import semver from "semver"; +import { match } from "ts-pattern"; import * as yup from "yup"; import { MixedSchema } from "yup/lib/mixed"; @@ -17,7 +20,6 @@ import { RequestOption, OAuthAuthenticator, HttpRequesterAuthenticator, - DeclarativeStreamSchemaLoader, PageIncrement, OffsetIncrement, CursorPagination, @@ -41,15 +43,15 @@ import { DeclarativeStreamTransformationsItem, HttpResponseFilter, DefaultPaginator, - DeclarativeComponentSchemaMetadata, SessionTokenAuthenticator, SessionTokenAuthenticatorType, SessionTokenRequestApiKeyAuthenticatorType, SessionTokenRequestBearerAuthenticatorType, RequestOptionInjectInto, NoAuthType, + HttpRequester, + OAuthAuthenticatorRefreshTokenUpdater, } from "core/api/types/ConnectorManifest"; -import { naturalComparator } from "core/utils/objects"; import { CDK_VERSION } from "./cdk"; import { formatJson } from "./utils"; @@ -70,7 +72,7 @@ export interface BuilderFormInput { key: string; required: boolean; definition: AirbyteJSONSchema; - as_config_path?: boolean; + isLocked?: boolean; } type BuilderHttpMethod = "GET" | "POST"; @@ -91,16 +93,27 @@ export type BuilderSessionTokenAuthenticator = Omit & { - refresh_request_body: Array<[string, string]>; - }) + | BuilderFormOAuthAuthenticator | ApiKeyAuthenticator | BearerAuthenticator | BasicHttpAuthenticator - | BuilderSessionTokenAuthenticator -) & { type: string }; + | BuilderSessionTokenAuthenticator; + +export type BuilderFormOAuthAuthenticator = Omit< + OAuthAuthenticator, + "refresh_request_body" | "refresh_token_updater" +> & { + refresh_request_body: Array<[string, string]>; + refresh_token_updater?: Omit< + OAuthAuthenticatorRefreshTokenUpdater, + "access_token_config_path" | "token_expiry_date_config_path" | "refresh_token_config_path" + > & { + access_token: string; + token_expiry_date: string; + }; +}; export interface BuilderFormValues { global: { @@ -108,8 +121,6 @@ export interface BuilderFormValues { authenticator: BuilderFormAuthenticator; }; inputs: BuilderFormInput[]; - inferredInputOverrides: Record>; - inputOrder: string[]; streams: BuilderStream[]; checkStreams: string[]; version: string; @@ -171,12 +182,14 @@ export interface BuilderIncrementalSync end_datetime: | { type: "user_input"; + value: string; } | { type: "now" } | { type: "custom"; value: string; format?: string }; start_datetime: | { type: "user_input"; + value: string; } | { type: "custom"; value: string; format?: string }; slicer?: { @@ -208,6 +221,9 @@ export type BuilderRequestBody = value: string; }; +export type YamlString = string; +export const isYamlString = (value: unknown): value is YamlString => isString(value); + export interface BuilderStream { id: string; name: string; @@ -216,17 +232,58 @@ export interface BuilderStream { primaryKey: string[]; httpMethod: BuilderHttpMethod; requestOptions: BuilderRequestOptions; - paginator?: BuilderPaginator; - transformations?: BuilderTransformation[]; - incrementalSync?: BuilderIncrementalSync; + paginator?: BuilderPaginator | YamlString; + transformations?: BuilderTransformation[] | YamlString; + incrementalSync?: BuilderIncrementalSync | YamlString; parentStreams?: BuilderParentStream[]; parameterizedRequests?: BuilderParameterizedRequests[]; - errorHandler?: BuilderErrorHandler[]; + errorHandler?: BuilderErrorHandler[] | YamlString; schema?: string; unsupportedFields?: Record; autoImportSchema: boolean; } +type StreamName = string; +// todo: add more component names to this type as more components support in YAML +export type YamlSupportedComponentName = "paginator" | "errorHandler" | "transformations" | "incrementalSync"; + +export interface BuilderMetadata { + autoImportSchema: Record; + yamlComponents?: { + streams: Record; + }; +} + +export type ManifestValuePerComponentPerStream = Record>; +export const getManifestValuePerComponentPerStream = ( + manifest: ConnectorManifest +): ManifestValuePerComponentPerStream => { + if (manifest.metadata === undefined) { + return {}; + } + const metadata = manifest.metadata as BuilderMetadata; + if (metadata?.yamlComponents?.streams === undefined) { + return {}; + } + return Object.fromEntries( + Object.entries(metadata?.yamlComponents?.streams).map(([streamName, yamlComponentNames]) => { + // this method is only called in UI mode, so we can assume full streams are found in definitions + const stream = manifest.definitions?.streams?.[streamName]; + const manifestValuePerComponent = Object.fromEntries( + yamlComponentNames.map((yamlComponentName) => + match(yamlComponentName) + .with("paginator", (name) => [name, stream?.retriever?.paginator]) + .with("errorHandler", (name) => [name, stream?.retriever?.requester?.error_handler]) + .with("transformations", (name) => [name, stream?.transformations]) + .with("incrementalSync", (name) => [name, stream?.incremental_sync]) + .otherwise(() => []) + ) + ); + return [streamName, manifestValuePerComponent]; + }) + ); +}; + // 0.29.0 is the version where breaking changes got introduced - older states can't be supported export const OLDEST_SUPPORTED_CDK_VERSION = "0.29.0"; @@ -256,8 +313,14 @@ export const SMALL_DURATION_OPTIONS = [ export const DATETIME_FORMAT_OPTIONS = [ { value: "%Y-%m-%d" }, { value: "%Y-%m-%d %H:%M:%S" }, - { value: "%Y-%m-%d %H:%M:%S.%f+00:00" }, + { value: "%Y-%m-%dT%H:%M:%S" }, + { value: "%Y-%m-%dT%H:%M:%SZ" }, + { value: "%Y-%m-%dT%H:%M:%S%z" }, + { value: "%Y-%m-%dT%H:%M:%S.%fZ" }, { value: "%Y-%m-%dT%H:%M:%S.%f%z" }, + { value: "%Y-%m-%d %H:%M:%S.%f+00:00" }, + { value: "%s" }, + { value: "%ms" }, ]; export const DEFAULT_BUILDER_FORM_VALUES: BuilderFormValues = { @@ -266,8 +329,6 @@ export const DEFAULT_BUILDER_FORM_VALUES: BuilderFormValues = { authenticator: { type: "NoAuth" }, }, inputs: [], - inferredInputOverrides: {}, - inputOrder: [], streams: [], checkStreams: [], version: CDK_VERSION, @@ -321,235 +382,39 @@ export const CURSOR_PAGINATION: CursorPaginationType = "CursorPagination"; export const OFFSET_INCREMENT: OffsetIncrementType = "OffsetIncrement"; export const PAGE_INCREMENT: PageIncrementType = "PageIncrement"; -export const incrementalSyncInferredInputs: Record<"start_date" | "end_date", BuilderFormInput> = { - start_date: { - key: "start_date", - required: true, - definition: { - type: "string", - title: "Start date", - format: "date-time", - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - }, - }, - end_date: { - key: "end_date", - required: true, - definition: { - type: "string", - title: "End date", - format: "date-time", - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - }, - }, -}; - -export const DEFAULT_INFERRED_INPUT_ORDER = [ - "api_key", - "username", - "password", - "client_id", - "client_secret", - "client_refresh_token", -]; - -export const authTypeToKeyToInferredInput = ( - authenticator: BuilderFormAuthenticator | { type: BuilderFormAuthenticator["type"] } -): Record => { - switch (authenticator.type) { - case "NoAuth": - return {}; - case API_KEY_AUTHENTICATOR: - return { - api_token: { - key: "api_key", - required: true, - definition: { - type: "string", - title: "API Key", - airbyte_secret: true, - }, - }, - }; - case BEARER_AUTHENTICATOR: - return { - api_token: { - key: "api_key", - required: true, - definition: { - type: "string", - title: "API Key", - airbyte_secret: true, - }, - }, - }; - case BASIC_AUTHENTICATOR: - return { - username: { - key: "username", - required: true, - definition: { - type: "string", - title: "Username", - }, - }, - password: { - key: "password", - required: false, - definition: { - type: "string", - title: "Password", - always_show: true, - airbyte_secret: true, - }, - }, - }; - case OAUTH_AUTHENTICATOR: - const baseInputs: Record = { - client_id: { - key: "client_id", - required: true, - definition: { - type: "string", - title: "Client ID", - airbyte_secret: true, - }, - }, - client_secret: { - key: "client_secret", - required: true, - definition: { - type: "string", - title: "Client secret", - airbyte_secret: true, - }, - }, - }; - if (!("grant_type" in authenticator) || authenticator.grant_type === "refresh_token") { - baseInputs.refresh_token = { - key: "client_refresh_token", - required: true, - definition: { - type: "string", - title: "Refresh token", - airbyte_secret: true, - }, - }; - if ("refresh_token_updater" in authenticator && authenticator.refresh_token_updater) { - baseInputs.oauth_access_token = { - key: "oauth_access_token", - required: false, - definition: { - type: "string", - title: "Access token", - airbyte_secret: true, - description: - "The current access token. This field might be overridden by the connector based on the token refresh endpoint response.", - }, - as_config_path: true, - }; - baseInputs.oauth_token_expiry_date = { - key: "oauth_token_expiry_date", - required: false, - definition: { - type: "string", - title: "Token expiry date", - format: "date-time", - description: - "The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.", - }, - as_config_path: true, - }; - } - } - return baseInputs; - case SESSION_TOKEN_AUTHENTICATOR: - if ("login_requester" in authenticator && "type" in authenticator.login_requester.authenticator) { - return authTypeToKeyToInferredInput(authenticator.login_requester.authenticator); - } - return {}; - } -}; - export const OAUTH_ACCESS_TOKEN_INPUT = "oauth_access_token"; export const OAUTH_TOKEN_EXPIRY_DATE_INPUT = "oauth_token_expiry_date"; -export const inferredAuthValues = (type: BuilderFormAuthenticator["type"]): Record => { - return Object.fromEntries( - Object.entries(authTypeToKeyToInferredInput({ type })).map(([authKey, inferredInput]) => { - return [authKey, interpolateConfigKey(inferredInput.key)]; - }) - ); -}; - export function hasIncrementalSyncUserInput( streams: BuilderFormValues["streams"], key: "start_datetime" | "end_datetime" ) { return streams.some( (stream) => - stream.incrementalSync?.[key].type === "user_input" && + !isYamlString(stream.incrementalSync) && + stream.incrementalSync?.[key]?.type === "user_input" && (key === "start_datetime" || stream.incrementalSync?.filter_mode === "range") ); } -export const getInferredAuthValue = (authenticator: BuilderFormAuthenticator, authKey: string) => { - if (authenticator.type === "SessionTokenAuthenticator") { - return Reflect.get(authenticator.login_requester.authenticator, authKey); - } - return Reflect.get(authenticator, authKey); -}; - -export function getInferredInputList( - authenticator: BuilderFormAuthenticator, - inferredInputOverrides: BuilderFormValues["inferredInputOverrides"], - startDateInput: boolean, - endDateInput: boolean -): BuilderFormInput[] { - const authKeyToInferredInput = authTypeToKeyToInferredInput(authenticator); - const authKeys = Object.keys(authKeyToInferredInput); - const inputs = authKeys.flatMap((authKey) => { - if ( - authKeyToInferredInput[authKey].as_config_path || - extractInterpolatedConfigKey(getInferredAuthValue(authenticator, authKey)) === authKeyToInferredInput[authKey].key - ) { - return [authKeyToInferredInput[authKey]]; - } - return []; - }); - - if (startDateInput) { - inputs.push(incrementalSyncInferredInputs.start_date); - } - - if (endDateInput) { - inputs.push(incrementalSyncInferredInputs.end_date); - } - - return inputs.map((input) => - inferredInputOverrides[input.key] - ? { - ...input, - definition: { ...input.definition, ...inferredInputOverrides[input.key] }, - } - : input - ); +export function interpolateConfigKey(key: string): string; +export function interpolateConfigKey(key: string | undefined): string | undefined; +export function interpolateConfigKey(key: string | undefined): string | undefined { + return key ? `{{ config["${key}"] }}` : undefined; } -const interpolateConfigKey = (key: string): string => { - return `{{ config['${key}'] }}`; -}; - -const interpolatedConfigValueRegex = /^{{config(\.(.+)|\[('|"+)(.+)('|"+)\])}}$/; +const interpolatedConfigValueRegexBracket = /^\s*{{\s*config\[('|")+(\S+)('|")+\]\s*}}\s*$/; +const interpolatedConfigValueRegexDot = /^\s*{{\s*config\.(\S+)\s*}}\s*$/; export function isInterpolatedConfigKey(str: string | undefined): boolean { if (str === undefined) { return false; } - const noWhitespaceString = str.replace(/\s/g, ""); - return interpolatedConfigValueRegex.test(noWhitespaceString); + return interpolatedConfigValueRegexBracket.test(str) || interpolatedConfigValueRegexDot.test(str); } +export function extractInterpolatedConfigKey(str: string): string; +export function extractInterpolatedConfigKey(str: string | undefined): string | undefined; export function extractInterpolatedConfigKey(str: string | undefined): string | undefined { /** * This methods does not work for nested configs like `config["credentials"]["client_secret"]` as the interpolated config key would be @@ -558,14 +423,15 @@ export function extractInterpolatedConfigKey(str: string | undefined): string | if (str === undefined) { return undefined; } - const noWhitespaceString = str.replace(/\s/g, ""); - const regexResult = interpolatedConfigValueRegex.exec(noWhitespaceString); - if (regexResult === null) { - return undefined; - } else if (regexResult.length > 2) { - return regexResult[4]; + const regexBracketResult = interpolatedConfigValueRegexBracket.exec(str); + if (regexBracketResult === null) { + const regexDotResult = interpolatedConfigValueRegexDot.exec(str); + if (regexDotResult === null) { + return undefined; + } + return regexDotResult[1]; } - return regexResult[2]; + return regexBracketResult[2]; } const INTERPOLATION_PATTERN = /^\{\{.+\}\}$/; @@ -766,6 +632,25 @@ export const globalSchema = yup.object().shape({ authenticator: authenticatorSchema, }); +const maybeYamlSchema = (schema: yup.BaseSchema) => { + return yup.lazy((val) => + isYamlString(val) + ? // eslint-disable-next-line no-template-curly-in-string + yup.string().test("is-valid-yaml", "${path} is not valid YAML", (value) => { + if (!value) { + return true; + } + try { + load(value); + return true; + } catch { + return false; + } + }) + : schema + ); +}; + export const streamSchema = yup.object().shape({ name: yup.string().required(REQUIRED_ERROR), urlPath: yup.string().required(REQUIRED_ERROR), @@ -774,61 +659,63 @@ export const streamSchema = yup.object().shape({ httpMethod: httpMethodSchema, requestOptions: requestOptionsSchema, schema: jsonString, - paginator: yup - .object() - .shape({ - pageSizeOption: yup.mixed().when("strategy.page_size", { - is: (val: number) => Boolean(val), - then: nonPathRequestOptionSchema, - otherwise: strip, - }), - pageTokenOption: yup - .object() - .shape({ - inject_into: yup.mixed().oneOf(injectIntoOptions.map((option) => option.value)), - field_name: yup.mixed().when("inject_into", { - is: "path", - then: strip, - otherwise: yup.string().required(REQUIRED_ERROR), - }), - }) - .notRequired() - .default(undefined), - strategy: yup - .object({ - page_size: yupNumberOrEmptyString, - cursor: yup.mixed().when("type", { - is: CURSOR_PAGINATION, - then: yup.object().shape({ - cursor_value: yup.mixed().when("type", { - is: "custom", - then: yup.string().required(REQUIRED_ERROR), - otherwise: strip, - }), - stop_condition: yup.mixed().when("type", { - is: "custom", - then: yup.string(), - otherwise: strip, - }), - path: yup.mixed().when("type", { - is: (val: string) => val !== "custom", - then: yup.array().of(yup.string()).min(1, REQUIRED_ERROR), - otherwise: strip, + paginator: maybeYamlSchema( + yup + .object() + .shape({ + pageSizeOption: yup.mixed().when("strategy.page_size", { + is: (val: number) => Boolean(val), + then: nonPathRequestOptionSchema, + otherwise: strip, + }), + pageTokenOption: yup + .object() + .shape({ + inject_into: yup.mixed().oneOf(injectIntoOptions.map((option) => option.value)), + field_name: yup.mixed().when("inject_into", { + is: "path", + then: strip, + otherwise: yup.string().required(REQUIRED_ERROR), + }), + }) + .notRequired() + .default(undefined), + strategy: yup + .object({ + page_size: yupNumberOrEmptyString, + cursor: yup.mixed().when("type", { + is: CURSOR_PAGINATION, + then: yup.object().shape({ + cursor_value: yup.mixed().when("type", { + is: "custom", + then: yup.string().required(REQUIRED_ERROR), + otherwise: strip, + }), + stop_condition: yup.mixed().when("type", { + is: "custom", + then: yup.string(), + otherwise: strip, + }), + path: yup.mixed().when("type", { + is: (val: string) => val !== "custom", + then: yup.array().of(yup.string()).min(1, REQUIRED_ERROR), + otherwise: strip, + }), }), + otherwise: strip, }), - otherwise: strip, - }), - start_from_page: yup.mixed().when("type", { - is: PAGE_INCREMENT, - then: yupNumberOrEmptyString, - otherwise: strip, - }), - }) - .notRequired() - .default(undefined), - }) - .notRequired() - .default(undefined), + start_from_page: yup.mixed().when("type", { + is: PAGE_INCREMENT, + then: yupNumberOrEmptyString, + otherwise: strip, + }), + }) + .notRequired() + .default(undefined), + }) + .notRequired() + .default(undefined) + ), parentStreams: yup .array( yup.object().shape({ @@ -856,59 +743,63 @@ export const streamSchema = yup.object().shape({ ) .notRequired() .default(undefined), - transformations: yup - .array( - yup.object().shape({ - path: yup.array(yup.string()).min(1, REQUIRED_ERROR), - value: yup.mixed().when("type", { - is: (val: string) => val === "add", - then: yup.string().required(REQUIRED_ERROR), - otherwise: strip, - }), - }) - ) - .notRequired() - .default(undefined), - errorHandler: errorHandlerSchema, - incrementalSync: yup - .object() - .shape({ - cursor_field: yup.string().required(REQUIRED_ERROR), - slicer: schemaIfNotDataFeed( - yup - .object() - .shape({ - cursor_granularity: yup.string().required(REQUIRED_ERROR), - step: yup.string().required(REQUIRED_ERROR), - }) - .default(undefined) - ), - start_datetime: yup.object().shape({ - value: yup.mixed().when("type", { - is: (val: string) => val === "custom", - then: yup.string().required(REQUIRED_ERROR), - otherwise: strip, - }), - }), - end_datetime: schemaIfRangeFilter( + transformations: maybeYamlSchema( + yup + .array( yup.object().shape({ + path: yup.array(yup.string()).min(1, REQUIRED_ERROR), value: yup.mixed().when("type", { - is: (val: string) => val === "custom", + is: (val: string) => val === "add", then: yup.string().required(REQUIRED_ERROR), otherwise: strip, }), }) - ), - datetime_format: yup.string().notRequired().default(undefined), - cursor_datetime_formats: yup.array(yup.string()).min(1, REQUIRED_ERROR).required(REQUIRED_ERROR), - start_time_option: schemaIfNotDataFeed(nonPathRequestOptionSchema), - end_time_option: schemaIfRangeFilter(nonPathRequestOptionSchema), - stream_state_field_start: yup.string(), - stream_state_field_end: yup.string(), - lookback_window: yup.string(), - }) - .notRequired() - .default(undefined), + ) + .notRequired() + .default(undefined) + ), + errorHandler: maybeYamlSchema(errorHandlerSchema), + incrementalSync: maybeYamlSchema( + yup + .object() + .shape({ + cursor_field: yup.string().required(REQUIRED_ERROR), + slicer: schemaIfNotDataFeed( + yup + .object() + .shape({ + cursor_granularity: yup.string().required(REQUIRED_ERROR), + step: yup.string().required(REQUIRED_ERROR), + }) + .default(undefined) + ), + start_datetime: yup.object().shape({ + value: yup.mixed().when("type", { + is: (val: string) => val === "custom" || val === "user_input", + then: yup.string().required(REQUIRED_ERROR), + otherwise: strip, + }), + }), + end_datetime: schemaIfRangeFilter( + yup.object().shape({ + value: yup.mixed().when("type", { + is: (val: string) => val === "custom" || val === "user_input", + then: yup.string().required(REQUIRED_ERROR), + otherwise: strip, + }), + }) + ), + datetime_format: yup.string().notRequired().default(undefined), + cursor_datetime_formats: yup.array(yup.string()).min(1, REQUIRED_ERROR).required(REQUIRED_ERROR), + start_time_option: schemaIfNotDataFeed(nonPathRequestOptionSchema), + end_time_option: schemaIfRangeFilter(nonPathRequestOptionSchema), + stream_state_field_start: yup.string(), + stream_state_field_end: yup.string(), + lookback_window: yup.string(), + }) + .notRequired() + .default(undefined) + ), }); export const builderFormValidationSchema = yup.object().shape({ @@ -945,8 +836,28 @@ function splitUrl(url: string): { base: string; path: string } { return { base: leftSide, path: rightSide || "/" }; } -function builderAuthenticatorToManifest(globalSettings: BuilderFormValues["global"]): HttpRequesterAuthenticator { +function convertOrLoadYamlString( + builderValue: BuilderInput | YamlString | undefined, + convertFn: (builderValue: BuilderInput | undefined) => ManifestOutput | undefined +) { + if (builderValue === undefined) { + return undefined; + } + if (isYamlString(builderValue)) { + return load(builderValue) as ManifestOutput; + } + return convertFn(builderValue); +} + +function builderAuthenticatorToManifest( + globalSettings: BuilderFormValues["global"] +): HttpRequesterAuthenticator | undefined { + if (globalSettings.authenticator.type === "NoAuth") { + return undefined; + } if (globalSettings.authenticator.type === "OAuthAuthenticator") { + const { access_token, token_expiry_date, ...refresh_token_updater } = + globalSettings.authenticator.refresh_token_updater ?? {}; return { ...globalSettings.authenticator, refresh_token: @@ -954,9 +865,19 @@ function builderAuthenticatorToManifest(globalSettings: BuilderFormValues["globa ? undefined : globalSettings.authenticator.refresh_token, refresh_token_updater: - globalSettings.authenticator.grant_type === "client_credentials" + globalSettings.authenticator.grant_type === "client_credentials" || + !globalSettings.authenticator.refresh_token_updater ? undefined - : globalSettings.authenticator.refresh_token_updater, + : { + ...refresh_token_updater, + access_token_config_path: [ + extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token_updater.access_token), + ], + token_expiry_date_config_path: [ + extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token_updater.token_expiry_date), + ], + refresh_token_config_path: [extractInterpolatedConfigKey(globalSettings.authenticator.refresh_token!)], + }, refresh_request_body: Object.fromEntries(globalSettings.authenticator.refresh_request_body), }; } @@ -964,6 +885,20 @@ function builderAuthenticatorToManifest(globalSettings: BuilderFormValues["globa return { ...globalSettings.authenticator, header: undefined, + api_token: globalSettings.authenticator.api_token, + }; + } + if (globalSettings.authenticator.type === "BearerAuthenticator") { + return { + ...globalSettings.authenticator, + api_token: globalSettings.authenticator.api_token, + }; + } + if (globalSettings.authenticator.type === "BasicHttpAuthenticator") { + return { + ...globalSettings.authenticator, + username: globalSettings.authenticator.username, + password: globalSettings.authenticator.password, }; } if (globalSettings.authenticator.type === "SessionTokenAuthenticator") { @@ -976,7 +911,7 @@ function builderAuthenticatorToManifest(globalSettings: BuilderFormValues["globa url_base: base, path, authenticator: builderLoginRequester.authenticator, - error_handler: buildCompositeErrorHandler(builderLoginRequester.errorHandler), + error_handler: builderErrorHandlersToManifest(builderLoginRequester.errorHandler), http_method: builderLoginRequester.httpMethod, request_parameters: Object.fromEntries(builderLoginRequester.requestOptions.requestParameters), request_headers: Object.fromEntries(builderLoginRequester.requestOptions.requestHeaders), @@ -1002,10 +937,17 @@ function pathToSafeJinjaAccess(path: string[]): string { function builderPaginationStrategyToManifest( strategy: BuilderPaginator["strategy"] ): DefaultPaginator["pagination_strategy"] { - if (strategy.type === "OffsetIncrement" || strategy.type === "PageIncrement") { - return strategy; + const correctedStrategy = { + ...strategy, + // must manually convert page_size to a number if it exists, because RHF watch() treats all numeric values as strings + page_size: strategy.page_size ? Number(strategy.page_size) : undefined, + }; + + if (correctedStrategy.type === "OffsetIncrement" || correctedStrategy.type === "PageIncrement") { + return correctedStrategy; } - const { cursor, ...rest } = strategy; + + const { cursor, ...rest } = correctedStrategy; return { ...rest, @@ -1018,9 +960,11 @@ function builderPaginationStrategyToManifest( }; } -function builderPaginatorToManifest(paginator: BuilderStream["paginator"]): SimpleRetrieverPaginator { +export function builderPaginatorToManifest( + paginator: BuilderPaginator | undefined +): SimpleRetrieverPaginator | undefined { if (!paginator) { - return { type: "NoPagination" }; + return undefined; } let pageTokenOption: DefaultPaginatorPageTokenOption | undefined; @@ -1043,7 +987,9 @@ function builderPaginatorToManifest(paginator: BuilderStream["paginator"]): Simp }; } -function builderIncrementalToManifest(formValues: BuilderStream["incrementalSync"]): DatetimeBasedCursor | undefined { +export function builderIncrementalSyncToManifest( + formValues: BuilderIncrementalSync | undefined +): DatetimeBasedCursor | undefined { if (!formValues) { return undefined; } @@ -1061,7 +1007,7 @@ function builderIncrementalToManifest(formValues: BuilderStream["incrementalSync } = formValues; const startDatetime = { type: "MinMaxDatetime" as const, - datetime: start_datetime.type === "custom" ? start_datetime.value : `{{ config['start_date'] }}`, + datetime: start_datetime.value, datetime_format: start_datetime.type === "custom" ? start_datetime.format : INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, }; const manifestIncrementalSync = { @@ -1080,11 +1026,9 @@ function builderIncrementalToManifest(formValues: BuilderStream["incrementalSync end_datetime: { type: "MinMaxDatetime", datetime: - end_datetime.type === "custom" - ? end_datetime.value - : end_datetime.type === "now" + end_datetime.type === "now" ? `{{ now_utc().strftime('${INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT}') }}` - : `{{ config['end_date'] }}`, + : end_datetime.value, datetime_format: end_datetime.type === "custom" ? end_datetime.format : INCREMENTAL_SYNC_USER_INPUT_DATE_FORMAT, }, step: slicer?.step, @@ -1134,7 +1078,7 @@ function builderStreamPartitionRouterToManifest( parent_key: parentStreamConfiguration.parent_key, request_option: parentStreamConfiguration.request_option, partition_field: parentStreamConfiguration.partition_field, - stream: builderStreamToDeclarativeSteam(values, parentStream, visitedStreams), + stream: streamRef(parentStream.name), }, ], }; @@ -1151,10 +1095,13 @@ function builderStreamPartitionRouterToManifest( }); } - return [...(substreamPartitionRouters ?? []), ...(listPartitionRouters ?? [])]; + const combinedPartitionRouters = [...(substreamPartitionRouters ?? []), ...(listPartitionRouters ?? [])]; + return combinedPartitionRouters.length > 0 ? combinedPartitionRouters : undefined; } -function buildCompositeErrorHandler(errorHandlers: BuilderStream["errorHandler"]): CompositeErrorHandler | undefined { +export function builderErrorHandlersToManifest( + errorHandlers: BuilderErrorHandler[] | undefined +): CompositeErrorHandler | undefined { if (!errorHandlers || errorHandlers.length === 0) { return undefined; } @@ -1172,7 +1119,7 @@ function buildCompositeErrorHandler(errorHandlers: BuilderStream["errorHandler"] }; } -function builderTransformationsToManifest( +export function builderTransformationsToManifest( transformations: BuilderTransformation[] | undefined ): DeclarativeStreamTransformationsItem[] | undefined { if (!transformations) { @@ -1200,62 +1147,61 @@ function builderTransformationsToManifest( }); } -const EMPTY_SCHEMA = { type: "InlineSchemaLoader", schema: {} } as const; - -function parseSchemaString(schema?: string): DeclarativeStreamSchemaLoader { - if (!schema) { - return EMPTY_SCHEMA; - } - try { - return { type: "InlineSchemaLoader", schema: JSON.parse(schema) }; - } catch { - return EMPTY_SCHEMA; - } +function fromEntriesOrUndefined(...args: Parameters) { + const obj = Object.fromEntries(...args); + return Object.keys(obj).length > 0 ? obj : undefined; } function builderRequestBodyToStreamRequestBody(builderRequestBody: BuilderRequestBody) { try { - return { + const requestBody = { request_body_json: builderRequestBody.type === "json_list" - ? Object.fromEntries(builderRequestBody.values) + ? fromEntriesOrUndefined(builderRequestBody.values) : builderRequestBody.type === "json_freeform" - ? JSON.parse(builderRequestBody.value) + ? ((parsedJson) => (Object.keys(parsedJson).length > 0 ? parsedJson : undefined))( + JSON.parse(builderRequestBody.value) + ) : undefined, request_body_data: builderRequestBody.type === "form_list" - ? Object.fromEntries(builderRequestBody.values) + ? fromEntriesOrUndefined(builderRequestBody.values) : builderRequestBody.type === "string_freeform" ? builderRequestBody.value : undefined, }; + return Object.keys(requestBody).length > 0 ? requestBody : undefined; } catch { - return {}; + return undefined; } } +type BaseRequester = Pick; + function builderStreamToDeclarativeSteam( values: BuilderFormValues, stream: BuilderStream, visitedStreams: string[] ): DeclarativeStream { + // cast to tell typescript which properties will be present after resolving the ref + const requesterRef = { + $ref: "#/definitions/base_requester", + } as unknown as BaseRequester; + const declarativeStream: DeclarativeStream = { type: "DeclarativeStream", name: stream.name, - primary_key: stream.primaryKey, - schema_loader: parseSchemaString(stream.schema), + primary_key: stream.primaryKey.length > 0 ? stream.primaryKey : undefined, retriever: { type: "SimpleRetriever", requester: { - type: "HttpRequester", - url_base: values.global?.urlBase?.trim(), + ...requesterRef, path: stream.urlPath?.trim(), http_method: stream.httpMethod, - request_parameters: Object.fromEntries(stream.requestOptions.requestParameters), - request_headers: Object.fromEntries(stream.requestOptions.requestHeaders), - authenticator: builderAuthenticatorToManifest(values.global), - error_handler: buildCompositeErrorHandler(stream.errorHandler), + request_parameters: fromEntriesOrUndefined(stream.requestOptions.requestParameters), + request_headers: fromEntriesOrUndefined(stream.requestOptions.requestHeaders), ...builderRequestBodyToStreamRequestBody(stream.requestOptions.requestBody), + error_handler: convertOrLoadYamlString(stream.errorHandler, builderErrorHandlersToManifest), }, record_selector: { type: "RecordSelector", @@ -1264,7 +1210,7 @@ function builderStreamToDeclarativeSteam( field_path: stream.fieldPointer, }, }, - paginator: builderPaginatorToManifest(stream.paginator), + paginator: convertOrLoadYamlString(stream.paginator, builderPaginatorToManifest), partition_router: builderStreamPartitionRouterToManifest( values, stream.parentStreams, @@ -1272,110 +1218,112 @@ function builderStreamToDeclarativeSteam( [...visitedStreams, stream.id] ), }, - transformations: builderTransformationsToManifest(stream.transformations), - incremental_sync: builderIncrementalToManifest(stream.incrementalSync), + incremental_sync: convertOrLoadYamlString(stream.incrementalSync, builderIncrementalSyncToManifest), + transformations: convertOrLoadYamlString(stream.transformations, builderTransformationsToManifest), + schema_loader: { type: "InlineSchemaLoader", schema: schemaRef(stream.name) }, }; return merge({}, declarativeStream, stream.unsupportedFields); } -export const orderInputs = ( - inputs: BuilderFormInput[], - inferredInputs: BuilderFormInput[], - storedInputOrder: string[] -) => { - const keyToStoredOrder = storedInputOrder.reduce((map, key, index) => map.set(key, index), new Map()); - - return inferredInputs - .map((input) => { - return { input, isInferred: true, id: input.key }; - }) - .concat( - inputs.map((input) => { - return { input, isInferred: false, id: input.key }; - }) - ) - .sort((inputA, inputB) => { - const storedIndexA = keyToStoredOrder.get(inputA.id); - const storedIndexB = keyToStoredOrder.get(inputB.id); +export const builderFormValuesToMetadata = (values: BuilderFormValues): BuilderMetadata => { + const componentNameIfString = (componentName: YamlSupportedComponentName, value: unknown) => + isYamlString(value) ? [componentName] : []; + + const yamlComponentsPerStream = {} as Record; + values.streams.forEach((stream) => { + const yamlComponents = [ + ...componentNameIfString("paginator", stream.paginator), + ...componentNameIfString("errorHandler", stream.errorHandler), + ...componentNameIfString("transformations", stream.transformations), + ...componentNameIfString("incrementalSync", stream.incrementalSync), + ]; + if (yamlComponents.length > 0) { + yamlComponentsPerStream[stream.name] = yamlComponents; + } + }); - if (storedIndexA !== undefined && storedIndexB !== undefined) { - return storedIndexA - storedIndexB; - } - if (storedIndexA !== undefined && storedIndexB === undefined) { - return inputB.isInferred ? 1 : -1; - } - if (storedIndexA === undefined && storedIndexB !== undefined) { - return inputA.isInferred ? -1 : 1; - } - // both indexes are undefined - if (inputA.isInferred && inputB.isInferred) { - return DEFAULT_INFERRED_INPUT_ORDER.indexOf(inputA.id) - DEFAULT_INFERRED_INPUT_ORDER.indexOf(inputB.id); - } - if (inputA.isInferred && !inputB.isInferred) { - return -1; - } - if (!inputA.isInferred && inputB.isInferred) { - return 1; - } - return naturalComparator(inputA.id, inputB.id); - }); -}; + const hasYamlComponents = Object.keys(yamlComponentsPerStream).length > 0; -export const builderFormValuesToMetadata = (values: BuilderFormValues): DeclarativeComponentSchemaMetadata => { return { autoImportSchema: Object.fromEntries(values.streams.map((stream) => [stream.name, stream.autoImportSchema])), + ...(hasYamlComponents && { + yamlComponents: { + streams: yamlComponentsPerStream, + }, + }), }; }; -export const convertToManifest = (values: BuilderFormValues): ConnectorManifest => { - const manifestStreams: DeclarativeStream[] = values.streams.map((stream) => - builderStreamToDeclarativeSteam(values, stream, []) - ); - - const orderedInputs = orderInputs( - values.inputs, - getInferredInputList( - values.global.authenticator, - values.inferredInputOverrides, - hasIncrementalSyncUserInput(values.streams, "start_datetime"), - hasIncrementalSyncUserInput(values.streams, "end_datetime") - ), - values.inputOrder - ); - const allInputs = orderedInputs.map((orderedInput) => orderedInput.input); - +export const builderInputsToSpec = (inputs: BuilderFormInput[]): Spec => { const specSchema: JSONSchema7 = { $schema: "http://json-schema.org/draft-07/schema#", type: "object", - required: allInputs.filter((input) => input.required).map((input) => input.key), - properties: Object.fromEntries(allInputs.map((input, index) => [input.key, { ...input.definition, order: index }])), + required: inputs.filter((input) => input.required).map((input) => input.key), + properties: Object.fromEntries(inputs.map((input, index) => [input.key, { ...input.definition, order: index }])), additionalProperties: true, }; - const spec: Spec = { + return { connection_specification: specSchema, type: "Spec", }; +}; + +export const convertToManifest = (values: BuilderFormValues): ConnectorManifest => { + const manifestStreams: DeclarativeStream[] = values.streams.map((stream) => + builderStreamToDeclarativeSteam(values, stream, []) + ); const streamNames = values.streams.map((s) => s.name); const validCheckStreamNames = (values.checkStreams ?? []).filter((checkStream) => streamNames.includes(checkStream)); const correctedCheckStreams = validCheckStreamNames.length > 0 ? validCheckStreamNames : streamNames.length > 0 ? [streamNames[0]] : []; - return merge({ + const streamNameToStream = Object.fromEntries(manifestStreams.map((stream) => [stream.name, stream])); + const streamRefs = manifestStreams.map((stream) => streamRef(stream.name!)); + + const streamNameToSchema = Object.fromEntries( + values.streams.map((stream) => { + const schema = stream.schema ? JSON.parse(stream.schema) : JSON.parse(DEFAULT_SCHEMA); + schema.additionalProperties = true; + return [stream.name, schema]; + }) + ); + + const baseRequester: BaseRequester = { + type: "HttpRequester", + url_base: values.global?.urlBase?.trim(), + authenticator: builderAuthenticatorToManifest(values.global), + }; + + return { version: CDK_VERSION, type: "DeclarativeSource", check: { type: "CheckStream", stream_names: correctedCheckStreams, }, - streams: manifestStreams, - spec, + definitions: { + base_requester: baseRequester, + streams: streamNameToStream, + }, + streams: streamRefs, + schemas: streamNameToSchema, + spec: builderInputsToSpec(values.inputs), metadata: builderFormValuesToMetadata(values), - }); + }; }; +function streamRef(streamName: string) { + // force cast to DeclarativeStream so that this still validates against the types + return { $ref: `#/definitions/streams/${streamName}` } as unknown as DeclarativeStream; +} + +function schemaRef(streamName: string) { + return { $ref: `#/schemas/${streamName}` }; +} + export const DEFAULT_JSON_MANIFEST_VALUES: ConnectorManifest = convertToManifest(DEFAULT_BUILDER_FORM_VALUES); export const useBuilderWatch = >(path: TPath, options?: { exact: boolean }) => diff --git a/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts b/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts deleted file mode 100644 index 6926596c6bd..00000000000 --- a/airbyte-webapp/src/components/connectorBuilder/useInferredInputs.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { useContext } from "react"; - -import { ConnectorBuilderMainRHFContext } from "services/connectorBuilder/ConnectorBuilderStateService"; - -import { getInferredInputList, hasIncrementalSyncUserInput } from "./types"; - -export const useInferredInputs = () => { - const { watch } = useContext(ConnectorBuilderMainRHFContext) || {}; - if (!watch) { - throw new Error("rhf context not available"); - } - const authenticator = watch("formValues.global.authenticator"); - const inferredInputOverrides = watch("formValues.inferredInputOverrides"); - const streams = watch("formValues.streams"); - const startDateInput = hasIncrementalSyncUserInput(streams, "start_datetime"); - const endDateInput = hasIncrementalSyncUserInput(streams, "end_datetime"); - return getInferredInputList(authenticator, inferredInputOverrides, startDateInput, endDateInput); -}; diff --git a/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts b/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts new file mode 100644 index 00000000000..99c0f424ec9 --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/useLockedInputs.ts @@ -0,0 +1,299 @@ +import { useEffect } from "react"; +import { useFormContext } from "react-hook-form"; + +import { + API_KEY_AUTHENTICATOR, + BASIC_AUTHENTICATOR, + BEARER_AUTHENTICATOR, + BuilderFormAuthenticator, + BuilderFormInput, + BuilderStream, + NO_AUTH, + OAUTH_AUTHENTICATOR, + SESSION_TOKEN_AUTHENTICATOR, + extractInterpolatedConfigKey, + isYamlString, + useBuilderWatch, +} from "./types"; + +export const useUpdateLockedInputs = () => { + const formValues = useBuilderWatch("formValues"); + const { setValue } = useFormContext(); + + useEffect(() => { + const keyToDesiredLockedInput = getKeyToDesiredLockedInput(formValues.global.authenticator, formValues.streams); + + const existingLockedInputKeys = formValues.inputs.filter((input) => input.isLocked).map((input) => input.key); + const lockedInputKeysToCreate = Object.keys(keyToDesiredLockedInput).filter( + (key) => !existingLockedInputKeys.includes(key) + ); + const lockedInputKeysToDelete = existingLockedInputKeys.filter((key) => !keyToDesiredLockedInput[key]); + if (lockedInputKeysToCreate.length === 0 && lockedInputKeysToDelete.length === 0) { + return; + } + + const updatedInputs = formValues.inputs.filter((input) => !lockedInputKeysToDelete.includes(input.key)); + lockedInputKeysToCreate.forEach((key) => { + updatedInputs.push({ + ...keyToDesiredLockedInput[key], + key, + isLocked: true, + }); + }); + setValue("formValues.inputs", updatedInputs); + }, [formValues.global.authenticator, formValues.inputs, formValues.streams, setValue]); +}; + +export const useGetUniqueKey = () => { + const builderInputs = useBuilderWatch("formValues.inputs"); + const builderStreams = useBuilderWatch("formValues.streams"); + + // If reuseIncrementalField is set, find the first stream which has the corresponding incremental field + // set to user input and return its key. Otherwise, return a unique version of the desired key. + return (desiredKey: string, reuseIncrementalField?: "start_datetime" | "end_datetime") => { + if (reuseIncrementalField) { + let existingKey: string | undefined = undefined; + builderStreams.some((stream) => { + if (stream.incrementalSync && !isYamlString(stream.incrementalSync)) { + const incrementalDatetime = stream.incrementalSync[reuseIncrementalField]; + if (incrementalDatetime.type === "user_input") { + existingKey = extractInterpolatedConfigKey(incrementalDatetime.value); + return true; + } + } + return false; + }); + if (existingKey) { + return existingKey; + } + } + + const existingKeys = builderInputs.map((input) => input.key); + let key = desiredKey; + let i = 2; + while (existingKeys.includes(key)) { + key = `${desiredKey}_${i}`; + i++; + } + return key; + }; +}; + +export function getKeyToDesiredLockedInput( + authenticator: BuilderFormAuthenticator, + streams: BuilderStream[] +): Record { + const authKeyToDesiredInput = getAuthKeyToDesiredLockedInput(authenticator); + + const incrementalStartDateKeys = new Set(); + const incrementalEndDateKeys = new Set(); + streams.forEach((stream) => { + if (stream.incrementalSync && !isYamlString(stream.incrementalSync)) { + const startDatetime = stream.incrementalSync.start_datetime; + if (startDatetime.type === "user_input") { + incrementalStartDateKeys.add(extractInterpolatedConfigKey(startDatetime.value)); + } + + const endDatetime = stream.incrementalSync.end_datetime; + if (endDatetime.type === "user_input") { + incrementalEndDateKeys.add(extractInterpolatedConfigKey(endDatetime.value)); + } + } + }); + + const incrementalKeyToDesiredInput: Record = { + ...Array.from(incrementalStartDateKeys).reduce( + (acc, key) => ({ + ...acc, + [key]: LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.start_datetime, + }), + {} + ), + ...Array.from(incrementalEndDateKeys).reduce( + (acc, key) => ({ + ...acc, + [key]: LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME.end_datetime, + }), + {} + ), + }; + + return { + ...authKeyToDesiredInput, + ...incrementalKeyToDesiredInput, + }; +} + +function getAuthKeyToDesiredLockedInput(authenticator: BuilderFormAuthenticator): Record { + switch (authenticator.type) { + case API_KEY_AUTHENTICATOR: + case BEARER_AUTHENTICATOR: + const apiTokenKey = extractInterpolatedConfigKey(authenticator.api_token); + return { + ...(apiTokenKey && { [apiTokenKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[authenticator.type].api_token }), + }; + + case BASIC_AUTHENTICATOR: + const usernameKey = extractInterpolatedConfigKey(authenticator.username); + const passwordKey = extractInterpolatedConfigKey(authenticator.password); + return { + [usernameKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].username, + ...(passwordKey && { + [passwordKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[BASIC_AUTHENTICATOR].password, + }), + }; + + case OAUTH_AUTHENTICATOR: + const clientIdKey = extractInterpolatedConfigKey(authenticator.client_id); + const clientSecretKey = extractInterpolatedConfigKey(authenticator.client_secret); + const refreshTokenKey = extractInterpolatedConfigKey(authenticator.refresh_token); + const accessTokenKey = extractInterpolatedConfigKey(authenticator.refresh_token_updater?.access_token); + const tokenExpiryDateKey = extractInterpolatedConfigKey(authenticator.refresh_token_updater?.token_expiry_date); + return { + [clientIdKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_id, + [clientSecretKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].client_secret, + ...(refreshTokenKey && { + [refreshTokenKey]: LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token, + }), + ...(accessTokenKey && { + [accessTokenKey]: + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater.access_token_config_path, + }), + ...(tokenExpiryDateKey && { + [tokenExpiryDateKey]: + LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE[OAUTH_AUTHENTICATOR].refresh_token_updater + .token_expiry_date_config_path, + }), + }; + + case SESSION_TOKEN_AUTHENTICATOR: + const loginRequesterAuthenticator = authenticator.login_requester.authenticator; + return loginRequesterAuthenticator ? getAuthKeyToDesiredLockedInput(loginRequesterAuthenticator) : {}; + + default: + return {}; + } +} + +export const LOCKED_INPUT_BY_FIELD_NAME_BY_AUTH_TYPE = { + [NO_AUTH]: {}, + [API_KEY_AUTHENTICATOR]: { + api_token: { + key: "api_key", + required: true, + definition: { + type: "string" as const, + title: "API Key", + airbyte_secret: true, + }, + }, + }, + [BEARER_AUTHENTICATOR]: { + api_token: { + key: "api_key", + required: true, + definition: { + type: "string" as const, + title: "API Key", + airbyte_secret: true, + }, + }, + }, + [BASIC_AUTHENTICATOR]: { + username: { + key: "username", + required: true, + definition: { + type: "string" as const, + title: "Username", + }, + }, + password: { + key: "password", + required: false, + definition: { + type: "string" as const, + title: "Password", + always_show: true, + airbyte_secret: true, + }, + }, + }, + [OAUTH_AUTHENTICATOR]: { + client_id: { + key: "client_id", + required: true, + definition: { + type: "string" as const, + title: "Client ID", + airbyte_secret: true, + }, + }, + client_secret: { + key: "client_secret", + required: true, + definition: { + type: "string" as const, + title: "Client secret", + airbyte_secret: true, + }, + }, + refresh_token: { + key: "client_refresh_token", + required: true, + definition: { + type: "string" as const, + title: "Refresh token", + airbyte_secret: true, + }, + }, + refresh_token_updater: { + access_token_config_path: { + key: "oauth_access_token", + required: false, + definition: { + type: "string" as const, + title: "Access token", + airbyte_secret: true, + description: + "The current access token. This field might be overridden by the connector based on the token refresh endpoint response.", + }, + }, + token_expiry_date_config_path: { + key: "oauth_token_expiry_date", + required: false, + definition: { + type: "string" as const, + title: "Token expiry date", + format: "date-time", + description: + "The date the current access token expires in. This field might be overridden by the connector based on the token refresh endpoint response.", + }, + }, + }, + }, + [SESSION_TOKEN_AUTHENTICATOR]: {}, +}; + +export const LOCKED_INPUT_BY_INCREMENTAL_FIELD_NAME: Record = { + start_datetime: { + key: "start_date", + required: true, + definition: { + type: "string", + title: "Start date", + format: "date-time", + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + }, + }, + end_datetime: { + key: "end_date", + required: true, + definition: { + type: "string", + title: "End date", + format: "date-time", + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", + }, + }, +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts index c8919cfd843..a10a01f042f 100644 --- a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts +++ b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.test.ts @@ -1,8 +1,20 @@ import merge from "lodash/merge"; -import { ConnectorManifest, DeclarativeStream } from "core/api/types/ConnectorManifest"; +import { + ConnectorManifest, + DeclarativeStream, + DeclarativeStreamIncrementalSync, + HttpRequesterErrorHandler, + SimpleRetrieverPaginator, + Spec, +} from "core/api/types/ConnectorManifest"; import { removeEmptyProperties } from "core/utils/form"; +import { + manifestErrorHandlerToBuilder, + manifestIncrementalSyncToBuilder, + manifestPaginatorToBuilder, +} from "./convertManifestToBuilderForm"; import { DEFAULT_BUILDER_FORM_VALUES, DEFAULT_CONNECTOR_NAME, OLDEST_SUPPORTED_CDK_VERSION } from "./types"; import { convertToBuilderFormValues } from "./useManifestToBuilderForm"; import { formatJson } from "./utils"; @@ -17,6 +29,68 @@ const baseManifest: ConnectorManifest = { streams: [], }; +const apiAuthRetriever = { + retriever: { + type: "SimpleRetriever", + requester: { + authenticator: { + type: "ApiKeyAuthenticator", + api_token: "{{ config['api_token'] }}", + header: "API_KEY", + }, + }, + }, +}; + +const apiTokenSpec: Spec = { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "string", + title: "API Token", + airbyte_secret: true, + }, + }, + }, +}; + +const oauthSpec: Spec = { + type: "Spec", + connection_specification: { + type: "object", + required: ["client_id", "client_secret", "client_refresh_token"], + properties: { + client_id: { + type: "string", + title: "Client ID", + airbyte_secret: true, + }, + client_secret: { + type: "string", + title: "Client Secret", + airbyte_secret: true, + }, + client_refresh_token: { + type: "string", + title: "Client Refresh Token", + airbyte_secret: true, + }, + oauth_access_token: { + type: "string", + title: "Access Token", + airbyte_secret: true, + }, + oauth_token_expiry_date: { + type: "string", + title: "Token Expiry Date", + }, + }, + }, +}; + const stream1: DeclarativeStream = { type: "DeclarativeStream", name: "stream1", @@ -124,7 +198,7 @@ describe("Conversion throws error when", () => { }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow("api_token value must be of the form {{ config["); + await expect(convert).rejects.toThrow('ApiKeyAuthenticator.api_token must be of the form {{ config["key"] }}'); }); it("manifest has an authenticator with a interpolated secret key of type config.", async () => { @@ -143,12 +217,13 @@ describe("Conversion throws error when", () => { }, }), ], + spec: apiTokenSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); if (formValues.global.authenticator.type !== "ApiKeyAuthenticator") { throw new Error("Has to be ApiKeyAuthenticator"); } - expect(formValues.global.authenticator.api_token).toEqual("{{ config.api_token }}"); + expect(formValues.global.authenticator.api_token).toEqual('{{ config["api_token"] }}'); }); it("manifest has an authenticator with a interpolated secret key of type config['config key']", async () => { @@ -167,48 +242,108 @@ describe("Conversion throws error when", () => { }, }), ], + spec: apiTokenSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); if (formValues.global.authenticator.type !== "ApiKeyAuthenticator") { throw new Error("Has to be ApiKeyAuthenticator"); } - expect(formValues.global.authenticator.api_token).toEqual("{{ config['api_token'] }}"); + expect(formValues.global.authenticator.api_token).toEqual('{{ config["api_token"] }}'); }); - it("manifest has an OAuthAuthenticator with a refresh_request_body containing non-string values", async () => { - const convert = () => { + it("manifest has an authenticator with an interpolated key that doesn't match any spec key", async () => { + const convert = async () => { const manifest: ConnectorManifest = { ...baseManifest, - streams: [ - merge({}, stream1, { - retriever: { - requester: { - authenticator: { - type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", - refresh_request_body: { - key1: "val1", - key2: { - a: 1, - b: 2, - }, - }, - token_refresh_endpoint: "https://api.com/refresh_token", - grant_type: "client_credentials", - }, + streams: [merge({}, stream1, apiAuthRetriever)], + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must appear in the spec' + ); + }); + + it("manifest has an authenticator with a required interpolated key that is not required in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + properties: { + api_token: { + type: "string", + title: "API Token", + airbyte_secret: true, }, }, - }), - ], + }, + }, }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow("OAuthAuthenticator contains a refresh_request_body with non-string values"); + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must be required in the spec' + ); }); - it("manifest has an OAuthAuthenticator with non-standard access token or token expiry date config path", async () => { + it("manifest has an authenticator with an interpolated key that is not type string in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "integer", + title: "API Token", + airbyte_secret: true, + }, + }, + }, + }, + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must be of type string' + ); + }); + + it("manifest has an authenticator with an interpolated secret key that is not secret in the spec", async () => { + const convert = async () => { + const manifest: ConnectorManifest = { + ...baseManifest, + streams: [merge({}, stream1, apiAuthRetriever)], + spec: { + type: "Spec", + connection_specification: { + type: "object", + required: ["api_token"], + properties: { + api_token: { + type: "string", + title: "API Token", + }, + }, + }, + }, + }; + return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); + }; + await expect(convert).rejects.toThrow( + 'ApiKeyAuthenticator.api_token references spec key "api_token", which must have airbyte_secret set to true' + ); + }); + + it("manifest has an OAuthAuthenticator with a refresh_request_body containing non-string values", async () => { const convert = () => { const manifest: ConnectorManifest = { ...baseManifest, @@ -221,13 +356,15 @@ describe("Conversion throws error when", () => { client_id: "{{ config['client_id'] }}", client_secret: "{{ config['client_secret'] }}", refresh_token: "{{ config['client_refresh_token'] }}", + refresh_request_body: { + key1: "val1", + key2: { + a: 1, + b: 2, + }, + }, token_refresh_endpoint: "https://api.com/refresh_token", grant_type: "client_credentials", - refresh_token_updater: { - access_token_config_path: ["credentials", "access_token"], - refresh_token_config_path: ["client_refresh_token"], - token_expiry_date_config_path: ["oauth_token_expiry_date"], - }, }, }, }, @@ -236,9 +373,7 @@ describe("Conversion throws error when", () => { }; return convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); }; - await expect(convert).rejects.toThrow( - "OAuthAuthenticator access token config path needs to be [oauth_access_token]" - ); + await expect(convert).rejects.toThrow("OAuthAuthenticator contains a refresh_request_body with non-string values"); }); it("manifest has a SessionTokenAuthenticator with an unsupported login_requester authenticator type", async () => { @@ -285,6 +420,38 @@ describe("Conversion throws error when", () => { "SessionTokenAuthenticator login_requester.authenticator must have one of the following types" ); }); + + it("manifest has a paginator with an unsupported type", async () => { + const convert = () => { + const paginator = { + type: "UnsupportedPaginatorHandler", + }; + return manifestPaginatorToBuilder(paginator as SimpleRetrieverPaginator); + }; + expect(convert).toThrow("doesn't use a DefaultPaginato"); + }); + + it("manifest has an error handler with an unsupported type", async () => { + const convert = () => { + const errorHandler = { + type: "UnsupportedErrorHandler", + }; + return manifestErrorHandlerToBuilder(errorHandler as HttpRequesterErrorHandler); + }; + expect(convert).toThrow( + "error handler type is unsupported; only CompositeErrorHandler and DefaultErrorHandler are supported" + ); + }); + + it("manifest has an incremental sync with an unsupported type", async () => { + const convert = () => { + const incrementalSync = { + type: "UnsupportedIncrementalSync", + }; + return manifestIncrementalSyncToBuilder(incrementalSync as DeclarativeStreamIncrementalSync); + }; + expect(convert).toThrow("doesn't use a DatetimeBasedCursor"); + }); }); describe("Conversion successfully results in", () => { @@ -313,11 +480,11 @@ describe("Conversion successfully results in", () => { }, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); - expect(formValues.inferredInputOverrides).toEqual({}); expect(formValues.inputs).toEqual([ { key: "api_key", required: true, + isLocked: false, definition: manifest.spec?.connection_specification.properties.api_key, }, ]); @@ -346,12 +513,13 @@ describe("Conversion successfully results in", () => { { key: "api_key", required: false, + isLocked: false, definition: manifest.spec?.connection_specification.properties.api_key, }, ]); }); - it("spec properties converted to input overrides on matching auth keys", async () => { + it("spec properties converted to locked inputs on matching auth keys", async () => { const manifest: ConnectorManifest = { ...baseManifest, streams: [ @@ -389,15 +557,19 @@ describe("Conversion successfully results in", () => { }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.inputs).toEqual([ + { + key: "api_key", + required: true, + isLocked: true, + definition: manifest.spec?.connection_specification.properties.api_key, + }, { key: "numeric_key", required: false, + isLocked: false, definition: manifest.spec?.connection_specification.properties.numeric_key, }, ]); - expect(formValues.inferredInputOverrides).toEqual({ - api_key: manifest.spec?.connection_specification.properties.api_key, - }); }); it("request options converted to key-value list", async () => { @@ -674,13 +846,14 @@ describe("Conversion successfully results in", () => { }, }), ], + spec: oauthSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.global.authenticator).toEqual({ type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", + client_id: '{{ config["client_id"] }}', + client_secret: '{{ config["client_secret"] }}', + refresh_token: '{{ config["client_refresh_token"] }}', refresh_request_body: [ ["key1", "val1"], ["key2", "val2"], @@ -715,21 +888,21 @@ describe("Conversion successfully results in", () => { }, }), ], + spec: oauthSpec, }; const formValues = await convertToBuilderFormValues(noOpResolve, manifest, DEFAULT_CONNECTOR_NAME); expect(formValues.global.authenticator).toEqual({ type: "OAuthAuthenticator", - client_id: "{{ config['client_id'] }}", - client_secret: "{{ config['client_secret'] }}", - refresh_token: "{{ config['client_refresh_token'] }}", + client_id: '{{ config["client_id"] }}', + client_secret: '{{ config["client_secret"] }}', + refresh_token: '{{ config["client_refresh_token"] }}', refresh_request_body: [], token_refresh_endpoint: "https://api.com/refresh_token", grant_type: "refresh_token", refresh_token_updater: { refresh_token_name: "refresh_token", - access_token_config_path: ["oauth_access_token"], - refresh_token_config_path: ["client_refresh_token"], - token_expiry_date_config_path: ["oauth_token_expiry_date"], + access_token: '{{ config["oauth_access_token"] }}', + token_expiry_date: '{{ config["oauth_token_expiry_date"] }}', }, }); }); diff --git a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts index 47ebf6c991e..ed1c34db933 100644 --- a/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts +++ b/airbyte-webapp/src/components/connectorBuilder/useManifestToBuilderForm.ts @@ -1,4 +1,4 @@ -import { useBuilderResolveManifestQuery } from "core/api"; +import { HttpError, useBuilderResolveManifestQuery } from "core/api"; import { ResolveManifest } from "core/api/types/ConnectorBuilderClient"; import { ConnectorManifest } from "core/api/types/ConnectorManifest"; @@ -20,7 +20,7 @@ export const convertToBuilderFormValues = async ( try { resolveResult = await resolve(manifest, projectId); } catch (e) { - let errorMessage = e.message; + let errorMessage = e instanceof HttpError ? e.response.message : e.message; if (errorMessage[0] === '"') { errorMessage = errorMessage.substring(1, errorMessage.length); } diff --git a/airbyte-webapp/src/components/destination/DestinationForm/DestinationForm.tsx b/airbyte-webapp/src/components/destination/DestinationForm/DestinationForm.tsx index 6f59fe20b6c..342cdd9350c 100644 --- a/airbyte-webapp/src/components/destination/DestinationForm/DestinationForm.tsx +++ b/airbyte-webapp/src/components/destination/DestinationForm/DestinationForm.tsx @@ -7,10 +7,9 @@ import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { ConnectionConfiguration } from "area/connector/types"; -import { useGetDestinationDefinitionSpecificationAsync, LogsRequestError } from "core/api"; +import { useGetDestinationDefinitionSpecificationAsync } from "core/api"; import { DestinationDefinitionRead } from "core/api/types/AirbyteClient"; import { Connector } from "core/domain/connector"; -import { FormError } from "core/utils/errorStatusMessage"; import { ConnectorCard } from "views/Connector/ConnectorCard"; import { ConnectorCardValues } from "views/Connector/ConnectorForm"; @@ -24,7 +23,6 @@ export interface DestinationFormValues { interface DestinationFormProps { onSubmit: (values: DestinationFormValues) => Promise; destinationDefinitions: DestinationDefinitionRead[]; - error?: FormError | null; selectedDestinationDefinitionId?: string; } @@ -39,7 +37,6 @@ const hasDestinationDefinitionId = (state: unknown): state is { destinationDefin export const DestinationForm: React.FC = ({ onSubmit, destinationDefinitions, - error, selectedDestinationDefinitionId, }) => { const location = useLocation(); @@ -95,7 +92,6 @@ export const DestinationForm: React.FC = ({ selectedConnectorDefinitionSpecification={destinationDefinitionSpecification} selectedConnectorDefinitionId={destinationDefinitionId} onSubmit={onSubmitForm} - jobInfo={LogsRequestError.extractJobInfo(error)} supportLevel={selectedDestinationDefinition?.supportLevel} /> ); diff --git a/airbyte-webapp/src/components/forms/DataResidencyDropdown.tsx b/airbyte-webapp/src/components/forms/DataResidencyDropdown.tsx index b871e40280b..d31b518872f 100644 --- a/airbyte-webapp/src/components/forms/DataResidencyDropdown.tsx +++ b/airbyte-webapp/src/components/forms/DataResidencyDropdown.tsx @@ -60,7 +60,8 @@ export const DataResidencyDropdown = ({ export const StandaloneDataResidencyDropdown = ({ name, -}: Pick, "name">): JSX.Element => { + disabled, +}: Pick, "name" | "disabled">): JSX.Element => { const { formatMessage } = useIntl(); const { geographies } = useAvailableGeographies(); @@ -77,5 +78,5 @@ export const StandaloneDataResidencyDropdown = ({ }; }); - return ; + return ; }; diff --git a/airbyte-webapp/src/components/forms/FormSubmissionButtons.module.scss b/airbyte-webapp/src/components/forms/FormSubmissionButtons.module.scss new file mode 100644 index 00000000000..fe5b37d7f1a --- /dev/null +++ b/airbyte-webapp/src/components/forms/FormSubmissionButtons.module.scss @@ -0,0 +1,3 @@ +.reversed { + flex-direction: row-reverse; +} diff --git a/airbyte-webapp/src/components/forms/FormSubmissionButtons.tsx b/airbyte-webapp/src/components/forms/FormSubmissionButtons.tsx index a627bc3fc2b..02bb3f4913c 100644 --- a/airbyte-webapp/src/components/forms/FormSubmissionButtons.tsx +++ b/airbyte-webapp/src/components/forms/FormSubmissionButtons.tsx @@ -4,40 +4,50 @@ import { FormattedMessage } from "react-intl"; import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex/FlexContainer"; +import styles from "./FormSubmissionButtons.module.scss"; + interface FormSubmissionButtonsProps { submitKey?: string; cancelKey?: string; allowNonDirtyCancel?: boolean; + allowNonDirtySubmit?: boolean; onCancelClickCallback?: () => void; justify?: "flex-start" | "flex-end"; + reversed?: boolean; + noCancel?: boolean; } export const FormSubmissionButtons: React.FC = ({ submitKey = "form.submit", cancelKey = "form.cancel", allowNonDirtyCancel = false, + allowNonDirtySubmit = false, onCancelClickCallback, justify = "flex-end", + noCancel, + reversed = false, }) => { // get isSubmitting from useFormState to avoid re-rendering of whole form if they change // reset is a stable function so it's fine to get it from useFormContext const { reset } = useFormContext(); - const { isDirty, isSubmitting } = useFormState(); + const { isValid, isDirty, isSubmitting } = useFormState(); return ( - - - + )} + diff --git a/airbyte-webapp/src/components/settings/SettingsNavigation/index.ts b/airbyte-webapp/src/components/settings/SettingsNavigation/index.ts deleted file mode 100644 index 87c0546437f..00000000000 --- a/airbyte-webapp/src/components/settings/SettingsNavigation/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./SettingsNavigation"; diff --git a/airbyte-webapp/src/views/Connector/RequestConnectorModal/RequestConnectorModal.tsx b/airbyte-webapp/src/components/source/SelectConnector/RequestConnectorModal.tsx similarity index 90% rename from airbyte-webapp/src/views/Connector/RequestConnectorModal/RequestConnectorModal.tsx rename to airbyte-webapp/src/components/source/SelectConnector/RequestConnectorModal.tsx index 7c5f532e995..21e1c22b79a 100644 --- a/airbyte-webapp/src/views/Connector/RequestConnectorModal/RequestConnectorModal.tsx +++ b/airbyte-webapp/src/components/source/SelectConnector/RequestConnectorModal.tsx @@ -10,8 +10,6 @@ import { ModalBody, ModalFooter } from "components/ui/Modal"; import { useNotificationService } from "hooks/services/Notification"; import useRequestConnector from "hooks/services/useRequestConnector"; -import { Values } from "./types"; - interface ConnectorRequest { connectorType: "source" | "destination"; name: string; @@ -20,7 +18,8 @@ interface ConnectorRequest { } interface RequestConnectorModalProps { - onClose: () => void; + onSubmit: () => void; + onCancel: () => void; connectorType: ConnectorRequest["connectorType"]; workspaceEmail?: string; searchedConnectorName?: string; @@ -35,8 +34,9 @@ const validationSchema = yup.object().shape({ const RequestControl = FormControl; -const RequestConnectorModal: React.FC = ({ - onClose, +export const RequestConnectorModal: React.FC = ({ + onSubmit, + onCancel, connectorType, searchedConnectorName, workspaceEmail, @@ -45,14 +45,14 @@ const RequestConnectorModal: React.FC = ({ const notificationService = useNotificationService(); const { requestConnector } = useRequestConnector(); - const onSubmit = (values: Values) => { + const onSubmitBtnClick = async (values: ConnectorRequest) => { requestConnector(values); notificationService.registerNotification({ id: "connector.requestConnector.success", text: formatMessage({ id: "connector.request.success" }), type: "success", }); - onClose(); + onSubmit(); }; return ( @@ -64,9 +64,7 @@ const RequestConnectorModal: React.FC = ({ email: workspaceEmail ?? "", }} schema={validationSchema} - onSubmit={async (values) => { - onSubmit(values); - }} + onSubmit={onSubmitBtnClick} trackDirtyChanges > @@ -96,7 +94,7 @@ const RequestConnectorModal: React.FC = ({ @@ -119,5 +117,3 @@ const NameControl = () => { /> ); }; - -export default RequestConnectorModal; diff --git a/airbyte-webapp/src/components/source/SelectConnector/SelectConnector.tsx b/airbyte-webapp/src/components/source/SelectConnector/SelectConnector.tsx index 92cbde0b343..ab49d1e1a9c 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/SelectConnector.tsx +++ b/airbyte-webapp/src/components/source/SelectConnector/SelectConnector.tsx @@ -16,10 +16,10 @@ import { isSourceDefinition } from "core/domain/connector/source"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { useLocalStorage } from "core/utils/useLocalStorage"; import { useModalService } from "hooks/services/Modal"; -import RequestConnectorModal from "views/Connector/RequestConnectorModal"; import { ConnectorGrid } from "./ConnectorGrid"; import { FilterSupportLevel } from "./FilterSupportLevel"; +import { RequestConnectorModal } from "./RequestConnectorModal"; import styles from "./SelectConnector.module.scss"; import { useTrackSelectConnector } from "./useTrackSelectConnector"; @@ -46,7 +46,7 @@ const SelectConnectorSupportLevel: React.FC = ({ }) => { const { formatMessage } = useIntl(); const { email } = useCurrentWorkspace(); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const trackSelectConnector = useTrackSelectConnector(connectorType); const [searchTerm, setSearchTerm] = useState(""); const [supportLevelsInLocalStorage, setSelectedSupportLevels] = useLocalStorage( @@ -81,14 +81,15 @@ const SelectConnectorSupportLevel: React.FC = ({ }; const onOpenRequestConnectorModal = () => - openModal({ + openModal({ title: formatMessage({ id: "connector.requestConnector" }), - content: () => ( + content: ({ onComplete, onCancel }) => ( ), size: "sm", diff --git a/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss b/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss index 7cc28db18ed..ed8d2ebe1a0 100644 --- a/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss +++ b/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss @@ -20,7 +20,7 @@ color: colors.$white; } -.red { - background-color: colors.$red-400; - color: colors.$white; +.yellow { + background-color: colors.$yellow-500; + color: colors.$black; } diff --git a/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx b/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx index fe815af509e..d59afff4568 100644 --- a/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx +++ b/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx @@ -11,7 +11,7 @@ interface AlertBannerProps { export const AlertBanner: React.FC = ({ color, message }) => { const bannerStyle = classnames(styles.alertBannerContainer, { [styles.default]: color === "default" || !color, - [styles.red]: color === "warning", + [styles.yellow]: color === "warning", }); return
    {message}
    ; diff --git a/airbyte-webapp/src/components/ui/Breadcrumbs/Breadcrumbs.tsx b/airbyte-webapp/src/components/ui/Breadcrumbs/Breadcrumbs.tsx index 9b039209b7e..9044ac468a2 100644 --- a/airbyte-webapp/src/components/ui/Breadcrumbs/Breadcrumbs.tsx +++ b/airbyte-webapp/src/components/ui/Breadcrumbs/Breadcrumbs.tsx @@ -18,9 +18,9 @@ export const Breadcrumbs: React.FC = ({ data }) => { return ( <> {data.length && ( - + {data.map((item, index) => ( - + {item.to ? ( diff --git a/airbyte-webapp/src/components/ui/Button/Button.module.scss b/airbyte-webapp/src/components/ui/Button/Button.module.scss index 3c33fe36721..03dc88f8e28 100644 --- a/airbyte-webapp/src/components/ui/Button/Button.module.scss +++ b/airbyte-webapp/src/components/ui/Button/Button.module.scss @@ -37,7 +37,7 @@ &:disabled:not(.isLoading), &.disabled:not(.isLoading) { - opacity: 0.25; + opacity: 0.5; } .buttonIcon { @@ -262,5 +262,6 @@ border: none; text-decoration: underline; box-shadow: none; + height: unset; } } diff --git a/airbyte-webapp/src/components/ui/Button/Button.stories.tsx b/airbyte-webapp/src/components/ui/Button/Button.stories.tsx index b24d0d4ff97..33c0e1fc807 100644 --- a/airbyte-webapp/src/components/ui/Button/Button.stories.tsx +++ b/airbyte-webapp/src/components/ui/Button/Button.stories.tsx @@ -1,6 +1,4 @@ -import { ComponentStory, ComponentMeta } from "@storybook/react"; - -import { Icon } from "components/ui/Icon"; +import { ComponentMeta, ComponentStory } from "@storybook/react"; import { Button } from "./Button"; @@ -18,7 +16,7 @@ export const Primary = Template.bind({}); Primary.args = { variant: "primary", children: "Primary", - icon: , + icon: "cross", iconPosition: "left", disabled: false, }; @@ -34,7 +32,7 @@ LoadingButton.args = { export const ButtonWithIcon = Template.bind({}); ButtonWithIcon.args = { variant: "primary", - icon: , + icon: "cross", iconPosition: "left", disabled: false, }; @@ -42,7 +40,7 @@ ButtonWithIcon.args = { export const ButtonWithTextAndIconLeft = Template.bind({}); ButtonWithTextAndIconLeft.args = { variant: "primary", - icon: , + icon: "cross", iconPosition: "left", children: "Icon Left", disabled: false, @@ -51,7 +49,7 @@ ButtonWithTextAndIconLeft.args = { export const ButtonWithTextAndIconRight = Template.bind({}); ButtonWithTextAndIconRight.args = { variant: "primary", - icon: , + icon: "cross", iconPosition: "right", children: "Icon Right", disabled: false, diff --git a/airbyte-webapp/src/components/ui/Button/Button.tsx b/airbyte-webapp/src/components/ui/Button/Button.tsx index 7802afbbfba..2a63936b383 100644 --- a/airbyte-webapp/src/components/ui/Button/Button.tsx +++ b/airbyte-webapp/src/components/ui/Button/Button.tsx @@ -10,14 +10,17 @@ export const Button = React.forwardRef((props, r const { full = false, size = "xs", - iconPosition = "left", variant = "primary", children, className, - icon, isLoading, width, disabled, + icon, + iconSize, + iconColor, + iconClassName, + iconPosition = "left", ...buttonProps } = props; @@ -50,7 +53,7 @@ export const Button = React.forwardRef((props, r {isLoading && } {icon && iconPosition === "left" && - React.cloneElement(icon, { + React.cloneElement(, { className: classNames(styles.buttonIcon, { [styles.positionLeft]: true, [styles.isRegularIcon]: true, @@ -60,8 +63,8 @@ export const Button = React.forwardRef((props, r {children} {icon && iconPosition === "right" && - React.cloneElement(icon, { - className: classNames(icon.props.className, styles.buttonIcon, { + React.cloneElement(, { + className: classNames(styles.buttonIcon, { [styles.positionRight]: true, [styles.isRegularIcon]: true, [styles.withLabel]: Boolean(children), diff --git a/airbyte-webapp/src/components/ui/Button/types.tsx b/airbyte-webapp/src/components/ui/Button/types.tsx index 8ef14ffef95..29a9a0aef89 100644 --- a/airbyte-webapp/src/components/ui/Button/types.tsx +++ b/airbyte-webapp/src/components/ui/Button/types.tsx @@ -1,5 +1,7 @@ import React from "react"; +import { IconProps } from "../Icon"; + type ButtonSize = "xs" | "sm" | "lg"; export type ButtonVariant = | "primary" @@ -14,11 +16,14 @@ export type ButtonVariant = export interface ButtonProps extends React.ButtonHTMLAttributes { full?: boolean; narrow?: boolean; - icon?: React.ReactElement; - iconPosition?: "left" | "right"; isLoading?: boolean; size?: ButtonSize; variant?: ButtonVariant; width?: number; + icon?: IconProps["type"]; + iconSize?: IconProps["size"]; + iconColor?: IconProps["color"]; + iconClassName?: IconProps["className"]; + iconPosition?: "left" | "right"; "data-testid"?: string; } diff --git a/airbyte-webapp/src/components/ui/Card/Card.module.scss b/airbyte-webapp/src/components/ui/Card/Card.module.scss index 6a356d37c0d..970cd679ce4 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.module.scss +++ b/airbyte-webapp/src/components/ui/Card/Card.module.scss @@ -8,12 +8,15 @@ $default-padding: variables.$spacing-xl; .cardHeader { display: flex; justify-content: space-between; - align-items: center; color: colors.$dark-blue; padding: $default-padding $default-padding 0 $default-padding; border-top-left-radius: $default-border-radius; border-top-right-radius: $default-border-radius; + .helpText { + color: colors.$grey-400; + } + &.withBorderBottom { border-bottom: colors.$grey-100 variables.$border-thin solid; padding-bottom: $default-padding; @@ -37,10 +40,6 @@ $default-padding: variables.$spacing-xl; background: colors.$foreground; border-radius: variables.$border-radius-lg; - .infoTooltip { - color: colors.$foreground; - } - &:has(.cardBody:empty) .cardHeader { // apply the bottom border to the header only if it has a rendered sibling (the card content) // checking for `children` in tsx isn't enough as the passed child can return null @@ -75,3 +74,7 @@ $default-padding: variables.$spacing-xl; .noPadding { padding: 0; } + +.infoTooltip { + color: colors.$foreground; +} diff --git a/airbyte-webapp/src/components/ui/Card/Card.stories.tsx b/airbyte-webapp/src/components/ui/Card/Card.stories.tsx index aa72598afed..f2299570c65 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.stories.tsx +++ b/airbyte-webapp/src/components/ui/Card/Card.stories.tsx @@ -65,3 +65,17 @@ CollapsibleWithPreviewInfo.args = {
    ), }; + +export const CardWithHelpText = Template.bind({}); +CardWithHelpText.args = { + title: "Title", + children: "Card content here", + helpText: "This is helpful text", +}; + +export const CardWithHelpDescription = Template.bind({}); +CardWithHelpDescription.args = { + title: "Title", + children: "Card content here", + description: "This is descriptive text", +}; diff --git a/airbyte-webapp/src/components/ui/Card/Card.tsx b/airbyte-webapp/src/components/ui/Card/Card.tsx index 2f9392188ac..5f7c137e185 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.tsx +++ b/airbyte-webapp/src/components/ui/Card/Card.tsx @@ -16,6 +16,7 @@ interface CardProps { * The title of the card */ title?: string; + helpText?: string; description?: React.ReactNode; /** * override card container styles @@ -46,6 +47,7 @@ interface CardProps { export const Card: React.FC> = ({ children, title, + helpText, description, className, bodyClassName, @@ -61,7 +63,9 @@ export const Card: React.FC> = ({ const [isCollapsed, toggleIsCollapsed] = useToggle(defaultCollapsedState); const headerTitle = ( -
    > = ({ )} )} -
    + {helpText && ( + + {helpText} + + )} +
    ); return ( diff --git a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx index 935eb89eda7..a95f6341c67 100644 --- a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx +++ b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx @@ -117,6 +117,7 @@ export const CodeEditor: React.FC = ({ top: paddingTopValue, } : {}, + fixedOverflowWidgets: true, }} /> ); diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss index d1459bacd05..c82f709d1bd 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss @@ -75,7 +75,10 @@ $icon-width: 18px; .body { width: 100%; - padding-left: calc($icon-width + variables.$spacing-sm); + + &:not(&--noPadding) { + padding-left: calc($icon-width + variables.$spacing-sm); + } > div:last-child { margin-bottom: 0; diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx index 62837845cc9..fca9d24ee00 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx @@ -18,6 +18,7 @@ interface CollapsibleProps { hideWhenEmpty?: boolean; "data-testid"?: string; initiallyOpen?: boolean; + noBodyPadding?: boolean; onClick?: (newOpenState: boolean) => void; } @@ -31,6 +32,7 @@ export const Collapsible: React.FC> = children, "data-testid": dataTestId, initiallyOpen = false, + noBodyPadding = false, onClick, }) => { const childrenCount = React.Children.count(children); @@ -70,7 +72,10 @@ export const Collapsible: React.FC> = {showErrorIndicator && }
    - + {children}
    diff --git a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss index f6afc51aa0b..6393937c7d0 100644 --- a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss +++ b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.module.scss @@ -3,13 +3,15 @@ .button { padding: variables.$spacing-lg; + position: relative; } .success { position: absolute; width: 13px; height: 13px; - top: -13px; - right: -15px; + top: -6px; + right: -6px; background-color: colors.$foreground; + border-radius: 50%; } diff --git a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx index 86af946854b..84251915974 100644 --- a/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx +++ b/airbyte-webapp/src/components/ui/CopyButton/CopyButton.tsx @@ -8,7 +8,7 @@ import { Icon } from "../Icon"; interface CopyButtonProps { className?: string; - content: string; + content: string | (() => string); title?: string; } @@ -28,7 +28,9 @@ export const CopyButton: React.FC> = ({ clearTimeout(timeoutRef.current); } - navigator.clipboard.writeText(content).then(() => { + const text = typeof content === "string" ? content : content(); + + navigator.clipboard.writeText(text).then(() => { setCopied(true); timeoutRef.current = setTimeout(() => setCopied(false), 2500); }); @@ -40,14 +42,11 @@ export const CopyButton: React.FC> = ({ className={classNames(className, styles.button)} variant="secondary" title={title || formatMessage({ id: "copyButton.title" })} - icon={ -
    - - {copied && } -
    - } onClick={handleClick} + icon={children ? "copy" : undefined} > + {copied && } + {children ? undefined : } {children} ); diff --git a/airbyte-webapp/src/components/ui/DatePicker/CustomHeader.tsx b/airbyte-webapp/src/components/ui/DatePicker/CustomHeader.tsx index 79bd3ecb50a..25e856e7034 100644 --- a/airbyte-webapp/src/components/ui/DatePicker/CustomHeader.tsx +++ b/airbyte-webapp/src/components/ui/DatePicker/CustomHeader.tsx @@ -1,4 +1,4 @@ -import { getYear, getMonth } from "date-fns"; +import { getMonth, getYear } from "date-fns"; import range from "lodash/range"; import React from "react"; import { ReactDatePickerCustomHeaderProps, ReactDatePickerProps } from "react-datepicker"; @@ -6,7 +6,6 @@ import { ReactDatePickerCustomHeaderProps, ReactDatePickerProps } from "react-da import styles from "./CustomHeader.module.scss"; import { Button } from "../Button"; import { FlexContainer } from "../Flex"; -import { Icon } from "../Icon"; import { Text } from "../Text"; /** @@ -50,7 +49,8 @@ export const CustomHeader: React.FC<
    diff --git a/airbyte-webapp/src/components/ui/Link/Link.tsx b/airbyte-webapp/src/components/ui/Link/Link.tsx index acec03a48cb..ce077141f10 100644 --- a/airbyte-webapp/src/components/ui/Link/Link.tsx +++ b/airbyte-webapp/src/components/ui/Link/Link.tsx @@ -9,6 +9,7 @@ export interface LinkProps { opensInNewTab?: boolean; variant?: "default" | "primary"; onClick?: ComponentProps["onClick"]; + title?: string; } interface InternalLinkProps extends LinkProps { diff --git a/airbyte-webapp/src/components/ui/Message/Message.module.scss b/airbyte-webapp/src/components/ui/Message/Message.module.scss index f6d028a63c8..003b6c132a3 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.module.scss +++ b/airbyte-webapp/src/components/ui/Message/Message.module.scss @@ -23,11 +23,6 @@ $message-icon-size: 22px; } .messageContainer { - display: flex; - flex-direction: row; - align-items: flex-start; - gap: variables.$spacing-xs; - box-sizing: border-box; padding: variables.$spacing-sm; border-radius: variables.$border-radius-md; @@ -37,21 +32,18 @@ $message-icon-size: 22px; @include type("error", colors.$red-300, colors.$red-50); } -.messageContainerWithChildren { - border-radius: variables.$border-radius-md variables.$border-radius-md 0 0; -} - @mixin children-type($name, $color, $background) { @include type($name, $color, $background); &.#{$name} { color: colors.$dark-blue-900; - border: 1px solid $background; + border: variables.$spacing-sm solid $background; + padding: variables.$spacing-sm; background: colors.$foreground; } } .childrenContainer { - border-radius: 0 0 variables.$border-radius-md variables.$border-radius-md; + border-radius: variables.$border-radius-md; font-size: variables.$font-size-lg; @include children-type("info", colors.$blue-400, colors.$blue-50); @@ -62,9 +54,6 @@ $message-icon-size: 22px; .iconContainer { padding: 4px; - display: flex; - align-items: center; - justify-content: center; } .messageIcon { @@ -94,8 +83,7 @@ $message-icon-size: 22px; text-align: left; } -.closeButton { - svg { - color: colors.$dark-blue-900; - } +.alignRightColumn { + align-self: stretch; // flex equivalent of `height: 100%` + max-height: calc(32px + 9px); // 32px for the button's height, allow up to 9px "padding" on the top } diff --git a/airbyte-webapp/src/components/ui/Message/Message.stories.tsx b/airbyte-webapp/src/components/ui/Message/Message.stories.tsx index 621cdfbdbfd..d02ecfc04f6 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.stories.tsx +++ b/airbyte-webapp/src/components/ui/Message/Message.stories.tsx @@ -98,3 +98,18 @@ WithChildren.args = {
    ), }; + +export const WithExpandableChildren = Template.bind({}); +WithExpandableChildren.args = { + text: "This is an error with more details, but you have to expand to see them.", + secondaryText: "This is a secondary text", + type: "error", + children: ( + + Learn More + Stacktrace + Logs + + ), + isExpandable: true, +}; diff --git a/airbyte-webapp/src/components/ui/Message/Message.tsx b/airbyte-webapp/src/components/ui/Message/Message.tsx index f70dafc46bf..1daef281998 100644 --- a/airbyte-webapp/src/components/ui/Message/Message.tsx +++ b/airbyte-webapp/src/components/ui/Message/Message.tsx @@ -1,11 +1,12 @@ import classNames from "classnames"; -import React from "react"; +import React, { useState } from "react"; import { Icon, IconType } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import styles from "./Message.module.scss"; import { Button, ButtonProps } from "../Button"; +import { FlexContainer } from "../Flex"; export type MessageType = "warning" | "success" | "error" | "info"; @@ -23,6 +24,7 @@ export interface MessageProps { hideIcon?: boolean; iconOverride?: keyof typeof ICON_MAPPING; textClassName?: string; + isExpandable?: boolean; } const ICON_MAPPING: Readonly> = { @@ -65,14 +67,18 @@ export const Message: React.FC> = ({ children, iconOverride, textClassName, + isExpandable = false, }) => { + const [isExpanded, setIsExpanded] = useState(false); + + const handleToggleExpand = () => { + setIsExpanded((isExpanded) => !isExpanded); + }; + + const isRenderingChildren = children && (!isExpandable || isExpanded); + const mainMessage = ( -
    + <> {!hideIcon && (
    @@ -86,38 +92,44 @@ export const Message: React.FC> = ({ )}
    - {onAction && ( - + {(onAction || isExpandable || onClose) && ( + + {onAction && ( + + )} + {isExpandable && ( + + )} + {onClose && ( +
    + ); - if (!children) { - return mainMessage; - } - return ( -
    - {mainMessage} -
    {children}
    -
    + + + {mainMessage} + + {isRenderingChildren && ( +
    {children}
    + )} +
    ); }; diff --git a/airbyte-webapp/src/components/ui/Modal/Modal.tsx b/airbyte-webapp/src/components/ui/Modal/Modal.tsx index 85d80cf2174..d54f0b75542 100644 --- a/airbyte-webapp/src/components/ui/Modal/Modal.tsx +++ b/airbyte-webapp/src/components/ui/Modal/Modal.tsx @@ -13,11 +13,7 @@ import { Overlay } from "../Overlay"; export interface ModalProps { title?: string | React.ReactNode; /** - * Function to call when the user clicks on the close button (cross icon). - */ - onClose?: (reason: string) => void; - /** - * Function to call when the user clicks on overlay or press escape. + * Function to call when the user press Escape, clicks on Backdrop clicks or X-button clicks. * Note: if openModal function was called with "preventCancel: true" then this function will not be called. */ onCancel?: () => void; @@ -42,7 +38,6 @@ export const Modal: React.FC> = ({ children, title, size, - onClose, onCancel, cardless, testId, @@ -58,11 +53,6 @@ export const Modal: React.FC> = ({ } }; - const onModalClose = () => { - setIsOpen(false); - onClose?.("closeButtonClicked"); - }; - const Wrapper = wrapIn || "div"; return ( @@ -85,13 +75,15 @@ export const Modal: React.FC> = ({ {title} - + {onCancel && ( + + )} {children} diff --git a/airbyte-webapp/src/components/ui/Separator/Separator.module.scss b/airbyte-webapp/src/components/ui/Separator/Separator.module.scss new file mode 100644 index 00000000000..a0f26ca0b58 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Separator/Separator.module.scss @@ -0,0 +1,9 @@ +@use "scss/colors"; +@use "scss/variables"; + +.separator { + margin: 0 auto; + border: none; + width: 100%; + border-bottom: variables.$border-thin solid colors.$grey-100; +} diff --git a/airbyte-webapp/src/components/ui/Separator/Separator.tsx b/airbyte-webapp/src/components/ui/Separator/Separator.tsx new file mode 100644 index 00000000000..4ef36511305 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Separator/Separator.tsx @@ -0,0 +1,11 @@ +import classNames from "classnames"; + +import styles from "./Separator.module.scss"; + +interface SeparatorProps { + className?: string; +} + +export const Separator: React.FC = ({ className }) => { + return
    ; +}; diff --git a/airbyte-webapp/src/components/ui/Separator/index.ts b/airbyte-webapp/src/components/ui/Separator/index.ts new file mode 100644 index 00000000000..0c664312946 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Separator/index.ts @@ -0,0 +1 @@ +export { Separator } from "./Separator"; diff --git a/airbyte-webapp/src/components/ui/SignInButton/SignInButton.module.scss b/airbyte-webapp/src/components/ui/SignInButton/SignInButton.module.scss new file mode 100644 index 00000000000..839cad92f6e --- /dev/null +++ b/airbyte-webapp/src/components/ui/SignInButton/SignInButton.module.scss @@ -0,0 +1,41 @@ +@use "scss/variables"; +@use "scss/colors"; + +.sso { + text-decoration: none; +} + +.signInButton { + background: colors.$foreground; + border: variables.$border-thin solid colors.$grey-300; + color: colors.$inverse; + height: 46px; + justify-content: center; + display: flex; + align-items: center; + font-size: 16px; + font-weight: 500; + width: 100%; + padding: variables.$spacing-md; + gap: variables.$spacing-md; + border-radius: variables.$border-radius-md; + transition: all variables.$transition; + cursor: pointer; + + &:hover, + &:focus { + box-shadow: + 0 1px 3px rgba(53, 53, 66, 20%), + 0 1px 2px rgba(53, 53, 66, 12%), + 0 1px 1px rgba(53, 53, 66, 14%); + } +} + +.sso:hover { + color: colors.$inverse; +} + +.error { + margin-top: variables.$spacing-lg; + color: colors.$red; +} diff --git a/airbyte-webapp/src/components/ui/SignInButton/SignInButton.tsx b/airbyte-webapp/src/components/ui/SignInButton/SignInButton.tsx new file mode 100644 index 00000000000..2347d5ef698 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SignInButton/SignInButton.tsx @@ -0,0 +1,20 @@ +import { PropsWithChildren } from "react"; + +import styles from "./SignInButton.module.scss"; + +interface SignInButtonProps { + disabled?: boolean; + onClick: () => void; +} + +export const SignInButton: React.FC> = ({ + children, + disabled = false, + onClick, +}) => { + return ( + + ); +}; diff --git a/airbyte-webapp/src/components/ui/SignInButton/index.ts b/airbyte-webapp/src/components/ui/SignInButton/index.ts new file mode 100644 index 00000000000..b89da15994e --- /dev/null +++ b/airbyte-webapp/src/components/ui/SignInButton/index.ts @@ -0,0 +1 @@ +export * from "./SignInButton"; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss new file mode 100644 index 00000000000..3b226d01ebf --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.module.scss @@ -0,0 +1,90 @@ +@use "scss/colors"; +@use "scss/variables"; + +$border-radius: 999px; +$button-width: 100px; +$button-height: 24px; +$knob-width: calc($button-height - variables.$border-thin * 2); + +@keyframes candystripe { + to { + background-position: 60px 0; + } +} + +.button { + display: flex; + flex-direction: column; + align-items: flex-start; + padding: 0; + width: $button-width; + height: $button-height; + border-radius: $border-radius; + border: variables.$border-thin solid colors.$grey-200; + background-color: colors.$grey-100; + overflow: hidden; + position: relative; + cursor: pointer; + + &.checked { + background-color: colors.$blue; + align-items: flex-end; + } + + &:disabled { + opacity: 0.7; + cursor: not-allowed; + } + + .stripe { + height: 100%; + width: 100%; + position: absolute; + top: 0; + left: 0; + + &.loading { + background-image: linear-gradient(-65deg, + transparent 25%, + colors.$blue-200 25%, + colors.$blue-200 50%, + transparent 50%, + transparent 75%, + colors.$blue-200 75%, + colors.$blue-200 100% + ); + background-size: 60px 80px; + background-repeat: repeat-x; + animation: candystripe 1s linear infinite; + + &.reverse { + animation-direction: reverse; + } + } + } + + .text { + text-transform: uppercase; + position: absolute; + text-align: center; + top: 50%; + left: 50%; + transform: translate(-35%, -50%); + font-size: variables.$font-size-sm; + font-weight: 500; + color: colors.$grey-500; + + &.checkedText { + color: colors.$white; + transform: translate(-70%, -50%); + } + } + + .knob { + border-radius: $border-radius; + width: $knob-width; + background: colors.$white; + flex: 1; + z-index: 1; + } +} \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx new file mode 100644 index 00000000000..648f4900618 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.stories.tsx @@ -0,0 +1,32 @@ +import { action } from "@storybook/addon-actions"; +import { Meta, StoryFn } from "@storybook/react"; +import { useState } from "react"; + +import { SwitchNext, SwitchNextProps } from "./SwitchNext"; + +export default { + title: "Ui/SwitchNext", + component: SwitchNext, + argTypes: { + checked: { control: "boolean" }, + }, +} as Meta; + +const SwitchNextWithState: StoryFn = ({ checked: initial = false, ...props }: SwitchNextProps) => { + const [checked, setChecked] = useState(initial); + const [loading, setLoading] = useState(false); + + const handleChange = (checked: boolean) => { + action("Switch toggled")(checked); + setLoading(true); + setTimeout(() => { + setChecked(checked); + setLoading(false); + }, 1500); + }; + + return ; +}; + +export const Primary = SwitchNextWithState.bind({}); +Primary.args = {}; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx new file mode 100644 index 00000000000..9720eb4d2e1 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/SwitchNext.tsx @@ -0,0 +1,65 @@ +import { Switch } from "@headlessui/react"; +import classNames from "classnames"; +import { motion } from "framer-motion"; +import React from "react"; +import { useIntl } from "react-intl"; + +import styles from "./SwitchNext.module.scss"; +import { Text } from "../Text"; + +export interface SwitchNextProps { + checked: boolean; + disabled?: boolean; + loading?: boolean; + onChange: (checked: boolean) => void; + name?: string; + checkedText?: string; + uncheckedText?: string; + className?: string; + testId?: string; +} + +export const SwitchNext: React.FC = (props) => { + const { formatMessage } = useIntl(); + + const { + name, + checked, + disabled, + loading, + onChange, + checkedText = formatMessage({ id: "ui.switch.enabled" }), + uncheckedText = formatMessage({ id: "ui.switch.disabled" }), + testId, + className, + } = props; + + return ( + + + + + {checked ? checkedText : uncheckedText} + + + ); +}; diff --git a/airbyte-webapp/src/components/ui/SwitchNext/index.tsx b/airbyte-webapp/src/components/ui/SwitchNext/index.tsx new file mode 100644 index 00000000000..091cf7825c4 --- /dev/null +++ b/airbyte-webapp/src/components/ui/SwitchNext/index.tsx @@ -0,0 +1 @@ +export { SwitchNext } from "./SwitchNext"; diff --git a/airbyte-webapp/src/components/ui/Table/Table.module.scss b/airbyte-webapp/src/components/ui/Table/Table.module.scss index a0523e31219..30b6dcbc994 100644 --- a/airbyte-webapp/src/components/ui/Table/Table.module.scss +++ b/airbyte-webapp/src/components/ui/Table/Table.module.scss @@ -16,7 +16,7 @@ $border-radius: variables.$border-radius-lg; } } -.thead { +.thead--sticky { position: sticky; top: 0; z-index: z-indices.$tableScroll; diff --git a/airbyte-webapp/src/components/ui/Table/Table.tsx b/airbyte-webapp/src/components/ui/Table/Table.tsx index 6ad70332a31..42441be2260 100644 --- a/airbyte-webapp/src/components/ui/Table/Table.tsx +++ b/airbyte-webapp/src/components/ui/Table/Table.tsx @@ -30,6 +30,7 @@ export interface TableProps { testId?: string; columnVisibility?: VisibilityState; sorting?: boolean; + stickyHeaders?: boolean; getRowClassName?: (data: T) => string | undefined; initialSortBy?: Array<{ id: string; desc: boolean }>; /** @@ -56,6 +57,7 @@ export const Table = ({ expandedRow, columnVisibility, getRowClassName, + stickyHeaders = true, sorting = true, initialSortBy, virtualized = false, @@ -89,7 +91,7 @@ export const Table = ({ ); const TableHead: TableComponents["TableHead"] = React.forwardRef((props, ref) => ( - + )); TableHead.displayName = "TableHead"; @@ -210,7 +212,7 @@ export const Table = ({ })} data-testid={testId} > - {headerContent()} + {headerContent()} {rows.map((row) => ( diff --git a/airbyte-webapp/src/components/ui/Text/MaskedText.tsx b/airbyte-webapp/src/components/ui/Text/MaskedText.tsx index dd27403defd..a4c15e8718a 100644 --- a/airbyte-webapp/src/components/ui/Text/MaskedText.tsx +++ b/airbyte-webapp/src/components/ui/Text/MaskedText.tsx @@ -2,10 +2,9 @@ import { PropsWithChildren } from "react"; import { FormattedMessage } from "react-intl"; import { useToggle } from "react-use"; -import { TextProps, Text } from "./Text"; +import { Text, TextProps } from "./Text"; import { Button } from "../Button"; import { FlexContainer } from "../Flex"; -import { Icon } from "../Icon"; export const MaskedText: React.FC> = ({ children, ...props }) => { const [maskText, toggleMaskText] = useToggle(true); @@ -15,7 +14,7 @@ export const MaskedText: React.FC> = ({ children, . {maskText ? : children} - + + + + + + + + ); +}; diff --git a/airbyte-webapp/src/core/errors/components/index.ts b/airbyte-webapp/src/core/errors/components/index.ts new file mode 100644 index 00000000000..c0b3e8e4c4b --- /dev/null +++ b/airbyte-webapp/src/core/errors/components/index.ts @@ -0,0 +1 @@ +export { DefaultErrorBoundary } from "./DefaultErrorBoundary"; diff --git a/airbyte-webapp/src/core/errors/components/pixel-octavia.png b/airbyte-webapp/src/core/errors/components/pixel-octavia.png new file mode 100644 index 00000000000..7b17f8509c3 Binary files /dev/null and b/airbyte-webapp/src/core/errors/components/pixel-octavia.png differ diff --git a/airbyte-webapp/src/core/errors/index.ts b/airbyte-webapp/src/core/errors/index.ts new file mode 100644 index 00000000000..b80d763f671 --- /dev/null +++ b/airbyte-webapp/src/core/errors/index.ts @@ -0,0 +1,2 @@ +export { DefaultErrorBoundary } from "./components"; +export { I18nError } from "./I18nError"; diff --git a/airbyte-webapp/src/core/form/FormBuildError.ts b/airbyte-webapp/src/core/form/FormBuildError.ts deleted file mode 100644 index 0816e3f0c5d..00000000000 --- a/airbyte-webapp/src/core/form/FormBuildError.ts +++ /dev/null @@ -1,14 +0,0 @@ -export class FormBuildError extends Error { - __type = "form.build"; - - constructor( - public message: string, - public connectorDefinitionId?: string - ) { - super(message); - } -} - -export function isFormBuildError(error: { __type?: string }): error is FormBuildError { - return error.__type === "form.build"; -} diff --git a/airbyte-webapp/src/core/form/FormBuildError.tsx b/airbyte-webapp/src/core/form/FormBuildError.tsx new file mode 100644 index 00000000000..55bed5c887d --- /dev/null +++ b/airbyte-webapp/src/core/form/FormBuildError.tsx @@ -0,0 +1,24 @@ +import { ExternalLink } from "components/ui/Link"; + +import { I18nError } from "core/errors"; +import { links } from "core/utils/links"; + +export class FormBuildError extends I18nError { + constructor( + public message: string, + public connectorDefinitionId?: string + ) { + super(message, { + docLink: (node: React.ReactNode) => ( + + {node} + + ), + }); + this.name = "FormBuildError"; + } +} + +export function isFormBuildError(error: unknown): error is FormBuildError { + return error instanceof FormBuildError; +} diff --git a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx index 1120516a9c6..595b4d2d846 100644 --- a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx +++ b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx @@ -12,6 +12,10 @@ export enum PageTrackingCodes { DESTINATION_ITEM = "Destination.Item", DESTINATION_ITEM_SETTINGS = "Destination.Item.Settings", CONNECTIONS_NEW = "Connections.New", + CONNECTIONS_NEW_DEFINE_SOURCE = "Connections.New.DefineSource", + CONNECTIONS_NEW_DEFINE_DESTINATION = "Connections.New.DefineDestination", + CONNECTIONS_NEW_SELECT_STREAMS = "Connections.New.SelectStreams", + CONNECTIONS_NEW_CONFIGURE_CONNECTION = "Connections.New.ConfigureConnection", CONNECTIONS_LIST = "Connections.List", CONNECTIONS_ITEM = "Connections.Item", CONNECTIONS_ITEM_STATUS = "Connections.Item.Status", diff --git a/airbyte-webapp/src/core/services/analytics/types.ts b/airbyte-webapp/src/core/services/analytics/types.ts index fc56fe82fc9..f514ccf1d8c 100644 --- a/airbyte-webapp/src/core/services/analytics/types.ts +++ b/airbyte-webapp/src/core/services/analytics/types.ts @@ -10,6 +10,9 @@ export const enum Namespace { SCHEMA = "Schema", ERD = "ERD", SETTINGS = "Settings", + SYNC_QUESTIONNAIRE = "SyncQuestionnaire", + STREAM_SELECTION = "StreamSelection", + FORM = "Form", } export const enum Action { @@ -39,6 +42,11 @@ export const enum Action { DOWNLOAD_SCHEDULER_LOGS = "DownloadSchedulerLogs", UPGRADE_VERSION = "UpgradeVersion", DISCOVER_SCHEMA = "DiscoverSchema", + DISPLAYED = "Displayed", + ANSWERED = "Answered", + APPLIED = "Applied", + SET_SYNC_MODE = "SetSyncMode", + DISMISSED_CHANGES_MODAL = "DismissedChangesModal", // Connector Builder Actions CONNECTOR_BUILDER_START = "ConnectorBuilderStart", diff --git a/airbyte-webapp/src/core/services/auth/AuthContext.ts b/airbyte-webapp/src/core/services/auth/AuthContext.ts index 8077eaeb520..90d41141b4b 100644 --- a/airbyte-webapp/src/core/services/auth/AuthContext.ts +++ b/airbyte-webapp/src/core/services/auth/AuthContext.ts @@ -17,7 +17,9 @@ export type AuthSignUp = (form: SignupFormValues) => Promise; export type AuthChangeName = (name: string) => Promise; export type AuthSendEmailVerification = () => Promise; -export type AuthVerifyEmail = (code: string) => Promise; +export type AuthVerifyEmail = FirebaseVerifyEmail | KeycloakVerifyEmail; +type FirebaseVerifyEmail = (code: string) => Promise; +type KeycloakVerifyEmail = () => Promise; export type AuthLogout = () => Promise; export type OAuthLoginState = "waiting" | "loading" | "done"; @@ -38,7 +40,9 @@ export interface AuthContextApi { inited: boolean; emailVerified: boolean; loggedOut: boolean; + /** @deprecated use `provider` instead */ providers: string[] | null; + provider: string | null; getAccessToken?: () => Promise; hasPasswordLogin?: () => boolean; login?: AuthLogin; diff --git a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx b/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx index 15fcc2bc02c..0a8a158c9a5 100644 --- a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx @@ -15,6 +15,7 @@ export const CommunityAuthService: React.FC> = ({ chi inited: true, emailVerified: false, providers: [], + provider: null, loggedOut: false, }} > diff --git a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx index 631eb4889b8..87c7b9f7e76 100644 --- a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx @@ -128,6 +128,7 @@ const AuthServiceProvider: React.FC> = ({ children }) inited, emailVerified: false, providers: [], + provider: null, loggedOut: false, logout: keycloakAuth.signoutRedirect, getAccessToken, diff --git a/airbyte-webapp/src/core/services/features/constants.ts b/airbyte-webapp/src/core/services/features/constants.ts index f4917e0ed15..f261f7e148a 100644 --- a/airbyte-webapp/src/core/services/features/constants.ts +++ b/airbyte-webapp/src/core/services/features/constants.ts @@ -13,13 +13,16 @@ export const defaultEnterpriseFeatures = [ FeatureItem.AllowAllRBACRoles, FeatureItem.APITokenManagement, FeatureItem.ConnectionHistoryGraphs, + FeatureItem.DisplayOrganizationUsers, FeatureItem.EnterpriseBranding, + FeatureItem.IndicateGuestUsers, FeatureItem.MultiWorkspaceUI, FeatureItem.RBAC, ]; export const defaultCloudFeatures = [ FeatureItem.AllowAutoDetectSchema, + FeatureItem.AllowInAppSupportChat, FeatureItem.AllowOAuthConnector, FeatureItem.AllowChangeDataGeographies, FeatureItem.AllowDBTCloudIntegration, diff --git a/airbyte-webapp/src/core/services/features/types.tsx b/airbyte-webapp/src/core/services/features/types.tsx index 068f92b4c6f..13767cd039c 100644 --- a/airbyte-webapp/src/core/services/features/types.tsx +++ b/airbyte-webapp/src/core/services/features/types.tsx @@ -6,6 +6,7 @@ export enum FeatureItem { AllowAllRBACRoles = "ALLOW_ALL_RBAC_ROLES", AllowAutoDetectSchema = "ALLOW_AUTO_DETECT_SCHEMA", + AllowInAppSupportChat = "ALLOW_IN_APP_SUPPORT_CHAT", AllowUploadCustomImage = "ALLOW_UPLOAD_CUSTOM_IMAGE", AllowCustomDBT = "ALLOW_CUSTOM_DBT", AllowDBTCloudIntegration = "ALLOW_DBT_CLOUD_INTEGRATION", @@ -17,9 +18,11 @@ export enum FeatureItem { Billing = "BILLING", ConnectionHistoryGraphs = "CONNECTION_HISTORY_GRAPHS", ConnectorBreakingChangeDeadlines = "CONNECTOR_BREAKING_CHANGE_DEADLINES", + DisplayOrganizationUsers = "DISPLAY_ORGANIZATION_USERS", EmailNotifications = "EMAIL_NOTIFICATIONS", EnterpriseBranding = "ENTERPRISE_BRANDING", ExternalInvitations = "EXTERNAL_INVITATIONS", + IndicateGuestUsers = "INDICATE_GUEST_USERS", KeycloakAuthentication = "KEYCLOAK_AUTHENTICATION", MultiWorkspaceUI = "MULTI_WORKSPACE_UI", RBAC = "RBAC", diff --git a/airbyte-webapp/src/core/utils/dataPrivacy.ts b/airbyte-webapp/src/core/utils/dataPrivacy.ts index 53f7424bbab..d6642db28b1 100644 --- a/airbyte-webapp/src/core/utils/dataPrivacy.ts +++ b/airbyte-webapp/src/core/utils/dataPrivacy.ts @@ -67,15 +67,15 @@ export const loadOsano = (): void => { ); document.head.appendChild(style); - // Create and append the script tag to load osano + // Create and append the script tag to load osano const script = document.createElement("script"); script.src = `https://cmp.osano.com/${process.env.REACT_APP_OSANO}/osano.js`; script.addEventListener("load", () => { window.Osano?.cm.addEventListener("osano-cm-script-blocked", (item) => { - console.debug(`Script blocked by Osano: ${item}`); + console.debug(`🛡️ [Osano] Script blocked: ${item}`); }); window.Osano?.cm.addEventListener("osano-cm-cookie-blocked", (item) => { - console.debug(`Cookie blocked by Osano: ${item}`); + console.debug(`️🛡️ [Osano] Cookie blocked: ${item}`); }); }); document.head.appendChild(script); diff --git a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx index bb6cc8682be..ddd850577d9 100644 --- a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx +++ b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx @@ -1,5 +1,9 @@ +import type { useIntl } from "react-intl"; + import { FormattedMessage } from "react-intl"; +import { FailureOrigin, FailureReason } from "core/api/types/AirbyteClient"; + export class FormError extends Error { status?: number; } @@ -19,3 +23,39 @@ export const generateMessageFromError = (error: FormError): JSX.Element | string ); }; + +interface FailureUiDetails { + type: "error" | "warning"; + typeLabel: string; + origin: FailureReason["failureOrigin"]; + message: string; + secondaryMessage?: string; +} +export const failureUiDetailsFromReason = < + T extends FailureReason | undefined | null, + RetVal = T extends FailureReason ? FailureUiDetails : null, +>( + reason: T, + formatMessage: ReturnType["formatMessage"] +): RetVal => { + if (!reason) { + return null as RetVal; + } + + const isConfigError = reason.failureType === "config_error"; + const isSourceError = reason.failureOrigin === FailureOrigin.source; + const isDestinationError = reason.failureOrigin === FailureOrigin.destination; + + const origin = reason.failureOrigin; + const type = isConfigError && (isSourceError || isDestinationError) ? "error" : "warning"; + const typeLabel = formatMessage( + { id: type === "error" ? "failureMessage.type.error" : "failureMessage.type.warning" }, + { origin } + ); + const message = reason.externalMessage ?? formatMessage({ id: "errorView.unknown" }); + const secondaryMessage = + type === "error" && reason.externalMessage !== reason.internalMessage ? undefined : reason.internalMessage; + + const result: FailureUiDetails = { type, typeLabel, origin, message, secondaryMessage }; + return result as RetVal; +}; diff --git a/airbyte-webapp/src/core/utils/links.ts b/airbyte-webapp/src/core/utils/links.ts index ffe31650ff8..a853ca36722 100644 --- a/airbyte-webapp/src/core/utils/links.ts +++ b/airbyte-webapp/src/core/utils/links.ts @@ -30,7 +30,7 @@ export const links = { webpageLink: "https://airbyte.com", webhookVideoGuideLink: "https://www.youtube.com/watch?v=NjYm8F-KiFc", cronReferenceLink: "http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html", - cloudAllowlistIPsLink: `${BASE_DOCS_LINK}/cloud/getting-started-with-airbyte-cloud/#allowlist-ip-addresses`, + cloudAllowlistIPsLink: `${BASE_DOCS_LINK}/operating-airbyte/security#network-security-1`, dataResidencySurvey: "https://forms.gle/Dr7MPTdt9k3xTinL8", connectionDataResidency: `${BASE_DOCS_LINK}/cloud/managing-airbyte-cloud/manage-data-residency#choose-the-data-residency-for-a-connection`, lowCodeYamlDescription: `${BASE_DOCS_LINK}/connector-development/config-based/understanding-the-yaml-file/yaml-overview`, @@ -49,9 +49,11 @@ export const links = { interpolationVariableDocs: `${BASE_DOCS_LINK}/connector-development/config-based/understanding-the-yaml-file/reference`, interpolationMacroDocs: `${BASE_DOCS_LINK}/connector-development/config-based/understanding-the-yaml-file/reference#macros`, creditDescription: "https://airbyte.com/pricing#what-is-a-credit", + pricingPage: "https://airbyte.com/pricing", usingCustomConnectors: `${BASE_DOCS_LINK}/operator-guides/using-custom-connectors/`, gettingSupport: `${BASE_DOCS_LINK}/community/getting-support`, autoRechargeEnrollment: `${BASE_DOCS_LINK}/cloud/managing-airbyte-cloud/manage-credits#automatic-reload-of-credits-beta`, + connectorSpecificationDocs: `${BASE_DOCS_LINK}/connector-development/connector-specification-reference/#airbyte-modifications-to-jsonschema`, } as const; export type OutboundLinks = typeof links; diff --git a/airbyte-webapp/src/core/utils/pollUntil.test.ts b/airbyte-webapp/src/core/utils/pollUntil.test.ts deleted file mode 100644 index 9ecdc97fdc2..00000000000 --- a/airbyte-webapp/src/core/utils/pollUntil.test.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { pollUntil } from "./pollUntil"; - -// a toy promise that can be polled for a specific response -const fourZerosAndThenSeven = () => { - let _callCount = 0; - return () => Promise.resolve([0, 0, 0, 0, 7][_callCount++]); -}; - -const truthyResponse = (x: unknown) => !!x; - -describe("pollUntil", () => { - beforeAll(() => { - jest.useFakeTimers({ doNotFake: ["nextTick"] }); - }); - - afterAll(() => { - jest.useRealTimers(); - }); - - describe("when maxTimeoutMs is not provided", () => { - it("calls the provided apiFn until condition returns true and resolves to its final return value", () => { - const pollableFn = fourZerosAndThenSeven(); - const result = pollUntil(pollableFn, truthyResponse, { intervalMs: 1 }); - jest.advanceTimersByTime(10); - return expect(result).resolves.toBe(7); - }); - }); - - describe("when condition returns true before maxTimeoutMs is reached", () => { - it("calls the provided apiFn until condition returns true and resolves to its final return value", () => { - const pollableFn = fourZerosAndThenSeven(); - const result = pollUntil(pollableFn, truthyResponse, { intervalMs: 1, maxTimeoutMs: 100 }); - jest.advanceTimersByTime(10); - return expect(result).resolves.toBe(7); - }); - }); - - describe("when maxTimeoutMs is reached before condition returns true", () => { - it("resolves to false", () => { - const pollableFn = fourZerosAndThenSeven(); - const result = pollUntil(pollableFn, truthyResponse, { intervalMs: 100, maxTimeoutMs: 1 }); - jest.advanceTimersByTime(100); - return expect(result).resolves.toBe(false); - }); - - it("calls its apiFn arg no more than (maxTimeoutMs / intervalMs) times", async () => { - let _callCount = 0; - const pollableFn = jest.fn(() => { - return Promise.resolve([1, 2, 3, 4, 5][_callCount++]); - }); - - const polling = pollUntil(pollableFn, (_) => false, { intervalMs: 20, maxTimeoutMs: 78 }); - - // Advance the timer by 20ms each. Make sure to wait one more tick (which isn't using fake timers) - // so rxjs will actually call pollableFn again (and thus we get the right count on the that function). - // Without waiting a tick after each advance timer, we'd effectively just advance by 80ms and - // not call the pollableFn multiple times, because the maxTimeout logic would be triggered which - // would cause the subsequent pollableFn calls to not be properly processed. - jest.advanceTimersByTime(20); - await new Promise(process.nextTick); - jest.advanceTimersByTime(20); - await new Promise(process.nextTick); - jest.advanceTimersByTime(20); - await new Promise(process.nextTick); - jest.advanceTimersByTime(20); - await new Promise(process.nextTick); - - const result = await polling; - - expect(result).toBe(false); - expect(pollableFn).toHaveBeenCalledTimes(4); - }); - }); -}); diff --git a/airbyte-webapp/src/core/utils/pollUntil.ts b/airbyte-webapp/src/core/utils/pollUntil.ts deleted file mode 100644 index 7349cf0459e..00000000000 --- a/airbyte-webapp/src/core/utils/pollUntil.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { timer, delay, from, concatMap, takeWhile, last, raceWith, lastValueFrom, NEVER } from "rxjs"; - -// Known issues: -// - the case where `apiFn` returns `false` and `condition(false) === true` is impossible to distinguish from a timeout -export function pollUntil( - apiFn: () => Promise, - condition: (res: ResponseType) => boolean, - options: { intervalMs: number; maxTimeoutMs?: number } -) { - const { intervalMs, maxTimeoutMs } = options; - const poll$ = timer(0, intervalMs).pipe( - concatMap(() => from(apiFn())), - takeWhile((result) => !condition(result), true), - last() - ); - - const timeout$ = maxTimeoutMs ? from([false]).pipe(delay(maxTimeoutMs)) : NEVER; - - return lastValueFrom(poll$.pipe(raceWith(timeout$))); -} diff --git a/airbyte-webapp/src/core/utils/rbac/rbac.docs.tsx b/airbyte-webapp/src/core/utils/rbac/rbac.docs.tsx index 398f03e7782..c63607f4807 100644 --- a/airbyte-webapp/src/core/utils/rbac/rbac.docs.tsx +++ b/airbyte-webapp/src/core/utils/rbac/rbac.docs.tsx @@ -2,7 +2,6 @@ import React, { Suspense, useState } from "react"; import { Button } from "components/ui/Button"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { Input } from "components/ui/Input"; import { ListBox } from "components/ui/ListBox"; @@ -75,6 +74,7 @@ interface PermissionQueryResultProps { role: RbacRole; permissions: RbacQuery[]; } + const PermissionQueryResult: React.FC = ({ resourceType, resourceId, @@ -177,7 +177,7 @@ const PermisisonTestViewInner = () => { User permissions{" "} - - @@ -80,7 +80,7 @@ describe("ModalService", () => { await waitFor(() => userEvent.click(rendered.getByTestId("close-reason1"))); expect(rendered.queryByTestId("testModalContent")).toBeFalsy(); - expect(resultCallback).toHaveBeenCalledWith({ type: "closed", reason: "reason1" }); + expect(resultCallback).toHaveBeenCalledWith({ type: "completed", reason: "reason1" }); resultCallback.mockReset(); rendered = renderModal(resultCallback); @@ -88,6 +88,6 @@ describe("ModalService", () => { await waitFor(() => userEvent.click(rendered.getByTestId("close-reason2"))); expect(rendered.queryByTestId("testModalContent")).toBeFalsy(); - expect(resultCallback).toHaveBeenCalledWith({ type: "closed", reason: "reason2" }); + expect(resultCallback).toHaveBeenCalledWith({ type: "completed", reason: "reason2" }); }); }); diff --git a/airbyte-webapp/src/hooks/services/Modal/ModalService.tsx b/airbyte-webapp/src/hooks/services/Modal/ModalService.tsx index 48152bb4c2f..3fea588f224 100644 --- a/airbyte-webapp/src/hooks/services/Modal/ModalService.tsx +++ b/airbyte-webapp/src/hooks/services/Modal/ModalService.tsx @@ -1,6 +1,7 @@ import React, { useContext, useMemo, useRef, useState } from "react"; import { firstValueFrom, Subject } from "rxjs"; +import { LoadingPage } from "components"; import { Modal } from "components/ui/Modal"; import { ModalOptions, ModalResult, ModalServiceContext } from "./types"; @@ -19,18 +20,14 @@ export const ModalServiceProvider: React.FC> = const service: ModalServiceContext = useMemo( () => ({ - openModal: (options) => { + openModal: async (options) => { resultSubjectRef.current = new Subject(); setModalOptions(options); - return firstValueFrom(resultSubjectRef.current).then((reason) => { - setModalOptions(undefined); - resultSubjectRef.current = undefined; - return reason; - }); - }, - closeModal: () => { - resultSubjectRef.current?.next({ type: "canceled" }); + const reason = await firstValueFrom(resultSubjectRef.current); + setModalOptions(undefined); + resultSubjectRef.current = undefined; + return reason; }, }), [] @@ -40,18 +37,21 @@ export const ModalServiceProvider: React.FC> = {children} {modalOptions && ( - resultSubjectRef.current?.next({ type: "canceled" })} - onClose={(reason) => resultSubjectRef.current?.next({ type: "closed", reason })} - > - resultSubjectRef.current?.next({ type: "canceled" })} - onClose={(reason) => resultSubjectRef.current?.next({ type: "closed", reason })} - /> - + }> + resultSubjectRef.current?.next({ type: "canceled" }) + } + > + resultSubjectRef.current?.next({ type: "canceled" })} + onComplete={(result) => resultSubjectRef.current?.next({ type: "completed", reason: result })} + /> + + )} ); diff --git a/airbyte-webapp/src/hooks/services/Modal/types.ts b/airbyte-webapp/src/hooks/services/Modal/types.ts index ffabcfcc789..48de57a3f01 100644 --- a/airbyte-webapp/src/hooks/services/Modal/types.ts +++ b/airbyte-webapp/src/hooks/services/Modal/types.ts @@ -10,14 +10,13 @@ export interface ModalOptions { testId?: string; } -export type ModalResult = { type: "canceled" } | { type: "closed"; reason: T }; +export type ModalResult = { type: "canceled" } | { type: "completed"; reason: T }; export interface ModalContentProps { - onClose: (reason: T) => void; + onComplete: (result: T) => void; onCancel: () => void; } export interface ModalServiceContext { openModal: (options: ModalOptions) => Promise>; - closeModal: () => void; } diff --git a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx index 6ff3d93e878..5bbcf78e4c0 100644 --- a/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx +++ b/airbyte-webapp/src/hooks/services/useConnectorAuth.tsx @@ -4,7 +4,7 @@ import { FormattedMessage, useIntl } from "react-intl"; import { useAsyncFn, useEffectOnce, useEvent, useUnmount } from "react-use"; import { v4 as uuid } from "uuid"; -import { useCompleteOAuth, useConsentUrls, isCommonRequestError } from "core/api"; +import { HttpError, useCompleteOAuth, useConsentUrls } from "core/api"; import { CompleteOAuthResponse, CompleteOAuthResponseAuthPayload, @@ -131,7 +131,7 @@ export function useConnectorAuth(): { return { consentUrl: response.consentUrl, payload }; } catch (e) { // If this API returns a 404 the OAuth credentials have not been added to the database. - if (isCommonRequestError(e) && e.status === 404) { + if (e instanceof HttpError && e.status === 404) { if (process.env.NODE_ENV === "development") { notificationService.registerNotification({ id: "oauthConnector.credentialsMissing", diff --git a/airbyte-webapp/src/hooks/useDeleteModal.tsx b/airbyte-webapp/src/hooks/useDeleteModal.tsx index a04926ab92c..c54e915c7a4 100644 --- a/airbyte-webapp/src/hooks/useDeleteModal.tsx +++ b/airbyte-webapp/src/hooks/useDeleteModal.tsx @@ -17,7 +17,12 @@ const routes: Routes = { connection: `../../../${RoutePaths.Connections}`, }; -export function useDeleteModal(entity: Entity, onDelete: () => Promise, additionalContent?: React.ReactNode) { +export function useDeleteModal( + entity: Entity, + onDelete: () => Promise, + additionalContent?: React.ReactNode, + confirmationText?: string +) { const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const navigate = useNavigate(); @@ -26,6 +31,7 @@ export function useDeleteModal(entity: Entity, onDelete: () => Promise, text: `tables.${entity}DeleteModalText`, additionalContent, title: `tables.${entity}DeleteConfirm`, + confirmationText, submitButtonText: "form.delete", onSubmit: async () => { await onDelete(); @@ -34,5 +40,5 @@ export function useDeleteModal(entity: Entity, onDelete: () => Promise, }, submitButtonDataId: "delete", }); - }, [openConfirmationModal, entity, additionalContent, onDelete, closeConfirmationModal, navigate]); + }, [openConfirmationModal, entity, additionalContent, confirmationText, onDelete, closeConfirmationModal, navigate]); } diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index de47ff5a4eb..a73301f4fa6 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -146,15 +146,18 @@ "form.noNeed": "No need!", "form.reset": "Reset", "form.resetData": "Reset your data", - "form.resetDataText": "Resetting your data will delete all the data for this connection in your destination and start syncs from scratch. Are you sure you want to do this?", + "form.resetData.description": "Resetting your data will delete all the data for this connection in your destination and the next sync will start from scratch.", + "form.resetData.successfulStart": "Your reset has started.", + "form.clearData.successfulStart": "Data clearing started.", + "form.resetDataText": "Resetting your data will delete all the data for this connection in your destination and the next sync will start from scratch. Are you sure you want to do this?", "form.dockerError": "Could not find docker image", "form.edit": "Edit", "form.done": "Done", "form.prefix": "Destination Stream Prefix", - "form.prefixNext": "Add a stream prefix", + "form.prefixNext": "Stream prefix", "form.prefix.message": "Add a prefix to stream names (ex. “airbyte_” causes “projects” => “airbyte_projects”)", "form.prefix.example": "example: ”projects” -> ”{prefix}projects”", - "form.prefix.subtitle": "Optional", + "form.prefix.subtitle": "Prefix text to each stream name in the destination", "form.prefix.placeholder": "prefix", "form.nameSearch": "Search stream name", "form.hideDisabledStreams": "Hide disabled streams", @@ -208,7 +211,6 @@ "connectionForm.questionnaire.incrementOrRefresh.refresh.title": "Append Full Snapshots", "connectionForm.questionnaire.incrementOrRefresh.refresh.subtitle": "Append a full copy of your source data every sync.", "connectionForm.questionnaire.incrementOrRefresh.refresh.warning": "This option potentially increases costs and sync times", - "connectionForm.questionnaire.result": "We've chosen a sync mode for your streams.", "connectionForm.namespaceDefinition.title": "Destination Namespace", "connectionForm.namespaceDefinition.subtitle": "Where data will be synced in the destination", "connectionForm.namespaceDefinition.subtitleNext": "The location where the replicated data will be stored in the destination", @@ -222,17 +224,20 @@ "connectionForm.backToSetupSchema": "Back to Set up schema", "connectionForm.nextButton": "Next", "connectionForm.configureConnection": "Configure connection", + "connectionForm.selectStreams": "Select streams", + "connectionForm.selectStreams.readonly": "Selected streams", "connectionForm.selectSyncMode": "Select sync mode", + "connectionForm.selectSyncModeDescription": "Tell us how you want your data moved and we'll select the right sync mode for your streams.", "connectionForm.destinationNew": "Set up a new destination", "connectionForm.destinationNewDescription": "Configure a new destination from Airbyte's catalog of available connectors", "connectionForm.sourceFormat": "Mirror source structure", "connectionForm.sourceFormatNext": "Source-defined", "connectionForm.sourceFormatDescription": "Match the schema the source is in", - "connectionForm.sourceFormatDescriptionNext": "Use the schema(s) defined by the source.", + "connectionForm.sourceFormatDescriptionNext": "Use the schema(s) defined by the source.{sourceDefinedNamespaces}", "connectionForm.destinationFormat": "Destination default", "connectionForm.destinationFormatNext": "Destination-defined", "connectionForm.destinationFormatDescription": "Sync all streams to the default schema defined in the destination", - "connectionForm.destinationFormatDescriptionNext": "Sync all streams to the schema defined in the destination's settings.", + "connectionForm.destinationFormatDescriptionNext": "Sync all streams to {destinationDefinedNamespace, select, no_value_provided {} other {{destinationDefinedNamespace},}} the schema defined in the destination's settings.", "connectionForm.customFormat": "Custom format", "connectionForm.customFormatDescription": "Sync all streams to a unique new schema", "connectionForm.customFormatDescriptionNext": "Sync all streams to a unique new schema. Useful when syncing from multiple sources.", @@ -264,7 +269,7 @@ "connectionForm.pillButtonLabel.notAvailable": "Not available", "connectionForm.nonBreakingChangesPreference.label": "Non-breaking schema updates detected", "connectionForm.nonBreakingChangesPreference.autopropagation.label": "Detect and propagate schema changes", - "connectionForm.nonBreakingChangesPreference.autopropagation.labelNext": "When the source schema changes, I want to:", + "connectionForm.nonBreakingChangesPreference.autopropagation.labelCreating": "When the source schema changes, I want to:", "connectionForm.nonBreakingChangesPreference.message": "Automatically reflect source schema changes in your destination. Propagating all changes will incur costs for new streams.", "connectionForm.nonBreakingChangesPreference.ignore": "Ignore", "connectionForm.nonBreakingChangesPreference.disable": "Disable connection", @@ -302,9 +307,9 @@ "connectionForm.modal.destinationNamespace.option.source.description": "Replicate the stream's source namespace in the destination.", "connectionForm.modal.destinationNamespace.option.destination.description": "Replicate and store in the default namespace defined in the destination settings.", "connectionForm.modal.destinationNamespace.option.customFormat.description": "Create a \"custom format\" to rename the namespace that your data will be replicated into.", - "connectionForm.modal.destinationNamespace.description": "Below is an example of how your data will be replicated with this namespace setting. The example assumes a default destination namespace of \"my_schema\".", + "connectionForm.modal.destinationNamespace.description": "Below is an example of how your data will be replicated with this namespace setting. The example assumes a default destination namespace of \"my_schema\".", "connectionForm.modal.destinationNamespace.description.emptySource": "Note that if the source does not have a namespace, we will use the destination's schema as a fallback.", - "connectionForm.modal.destinationNamespace.description.emptyCustom": "Note that if the source does not have a namespace, we will parse an empty string for the placeholder. If the entire custom string is empty, we will use the destination's schema as a fallback.", + "connectionForm.modal.destinationNamespace.description.emptyCustom": "Note that if the source does not have a namespace, we will parse an empty string for the placeholder. If the entire custom string is empty, we will use the destination's schema as a fallback.", "connectionForm.modal.destinationNamespace.table.header.sourceNamespace": "Source namespace", "connectionForm.modal.destinationNamespace.table.header.destinationNamespace": "Destination namespace", "connectionForm.modal.destinationNamespace.table.header.customFormat": "Custom format", @@ -337,9 +342,9 @@ "connectorForm.revocation.succeeded": "Your {connector} integration has been disconnected.", "connectorForm.reauthenticate": "Re-authenticate", "connectorForm.expandForm": "Expand this form to continue setting up your connector", - "connectorForm.error.oneOfWithNonObjects": "Spec uses oneOf without using object types for all conditions", - "connectorForm.error.oneOfWithoutConst": "Spec uses oneOf without a shared const property", - "connectorForm.error.topLevelNonObject": "Top level configuration has to be an object", + "connectorForm.error.oneOfWithNonObjects": "Spec uses oneOf without using object types for all conditions. Make sure your connectors are up to date. See connector docs for more information.", + "connectorForm.error.oneOfWithoutConst": "Spec uses oneOf without a shared const property. Make sure your connectors are up to date. See connector docs for more information.", + "connectorForm.error.topLevelNonObject": "Top level configuration has to be an object. Make sure your connectors are up to date. See connector docs for more information.", "connectorForm.allowlistIp.message": "Please allow inbound traffic from the following Airbyte IPs in your firewall whether connecting directly or via SSH Tunnel (more info):", "connectorForm.allowlistIp.addressesLabel": "Airbyte IP addresses", @@ -441,6 +446,11 @@ "jobs.jobStatus.reset_connection.succeeded": "Reset Succeeded ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.reset_connection.cancelled": "Reset Cancelled ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.reset_connection.partialSuccess": "Reset Partial Success ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", + "jobs.jobStatus.clear_data.failed": "Clearing {count, plural, =0 {0 Streams} one {Stream} other {Streams}} Failed", + "jobs.jobStatus.clear_data.running": "Clearing {count, plural, =0 {0 Streams} one {Stream} other {Streams}} Running", + "jobs.jobStatus.clear_data.succeeded": "Clearing {count, plural, =0 {0 Streams} one {Stream} other {Streams}} Succeeded", + "jobs.jobStatus.clear_data.cancelled": "Clearing {count, plural, =0 {0 Streams} one {Stream} other {Streams}} Cancelled", + "jobs.jobStatus.clear_data.partialSuccess": "Clearing {count, plural, =0 {0 Streams} one {Stream} other {Streams}} Partially Succeeded", "jobs.jobStatus.sync.failed": "Sync Failed", "jobs.jobStatus.sync.running": "Sync Running", "jobs.jobStatus.sync.succeeded": "Sync Succeeded", @@ -460,6 +470,7 @@ "jobs.failure.originLabel": "Failure origin:", "jobs.failure.typeLabel": "Failure type:", "jobs.failure.seeMore": "See more", + "jobs.failure.seeLess": "See less", "jobs.failure.copyText": "Copy text", "jobs.failure.copyText.success": "Text copied to clipboard", "jobs.noMetadataAvailable": "No job metadata available", @@ -538,8 +549,12 @@ "connection.onboarding.demoInstance": "or play around in our demo instance.", "connection.resetModalTitle": "Stream configuration changed", "connection.streamResetHint": "Due to changes in the stream configuration, we recommend a data reset. A reset will delete data in the destination of the affected streams and then re-sync that data. Skipping the reset is discouraged and might lead to unexpected behavior.", + "connection.clearDataHint": "Due to changes in the stream configuration, we recommend clearing the data from your destination. Clearing data will delete data in the destination of the affected streams.", + "connection.clearDataHint.emphasized": "You will need to trigger a re-sync after this operation to bring your data up to date. Skipping these steps are discouraged and might lead to unexpected behavior.", "connection.saveWithReset": "Reset affected streams (recommended)", "connection.saveWithFullReset": "Reset all streams (recommended)", + "connection.saveWithDataClear": "Clear data from affected streams (recommended)", + "connection.saveWithFullDataClear": "Clear all data (recommended)", "connection.save": "Save connection", "connection.title": "Connection", "connection.fromTo": "{source} → {destination}", @@ -561,6 +576,7 @@ "connection.updateSchema.namespace": "Namespace", "connection.updateSchema.dataType": "Data type", "connection.updateSchema.noDiff": "No changes were detected in the source schema.", + "connection.updateSchema.updateAutomaticallyApplied": "Schema update automatically applied.", "connection.enable.creditsProblem": "You're out of credits! Add more to re-enable your sync.", "connection.enable.creditsProblem.cta": "Add credits", "connection.freeHistoricalSyncs.message.initial": "Free syncs for 7 days", @@ -601,6 +617,8 @@ "connection.pendingSync": "Sync is pending or running", "connection.refreshSchema": "Refresh schema", "connection.replication": "Replication", + "connection.schema": "Schema", + "connection.syncStatusCard.title": "Sync Status", "connection.streams": "Streams", "connection.transfer": "Transfer", "connection.linkCopied": "Link copied!", @@ -610,6 +628,7 @@ "connection.startSyncError": "Failed to start sync", "connection.cancelSync": "Cancel Sync", "connection.cancelReset": "Cancel Reset", + "connection.cancelDataClear": "Cancel Clearing Data", "connection.linkedJobNotFound": "Job not found", "connection.returnToJobHistory": "Return to Job History", "connection.updateFailed": "Failed to update connection", @@ -648,6 +667,9 @@ "connection.uptimeStatus.actionRequired": "Stopped (action required)", "connection.uptimeStatus.cancelled": "Cancelled", + "connection.actions.clearData": "Clearing your data will delete all data in your destination.", + "connection.actions.error": "There was an error starting this job. Please try again.", + "connection.stream.status.actionRequired": "Action required", "connection.stream.status.onTime": "On time", "connection.stream.status.onTrack": "On track", @@ -655,9 +677,10 @@ "connection.stream.status.error": "Error", "connection.stream.status.late": "Late", "connection.stream.status.pending": "Pending", - "connection.stream.status.title": "Enabled streams", + "connection.stream.status.title": "Active Streams", "connection.stream.status.seeLogs": "See logs", - "connection.stream.status.gotoSettings": "Go to settings", + "connection.stream.status.checkSourceSettings": "Check source", + "connection.stream.status.checkDestinationSettings": "Check destination", "connection.stream.status.genericError": "There was an error with your {syncType}. See logs for details.", "connection.stream.status.table.status": "Status", "connection.stream.status.table.streamName": "Stream name", @@ -666,8 +689,27 @@ "connection.stream.status.table.emptyTable.message": "Re-enable the connection to show stream sync progress", "connection.stream.status.table.emptyTable.callToAction": "Re-enable", "connection.stream.actions.resetThisStream": "Reset this stream", - "connection.stream.actions.showInReplicationTable": "Show in replication table", + "connection.stream.actions.refreshStream": "Refresh stream", + "connection.stream.actions.refreshStream.confirm.title": "Are you sure you want to refresh the {streamName} stream?", + "connection.stream.actions.refreshStream.confirm.submit": "Refresh my stream", + "connection.stream.actions.refreshStream.description": "A refresh pulls all historical data from the source. During the refresh, all existing sync schedules will be paused until the refresh is complete, which means your data may become stale. This can also incur additional costs to load the data into the destination.", + "connection.stream.actions.refreshStream.options": " Please consider the options below and choose which refresh you intend to perform.", + "connection.stream.actions.refreshStream.chatWithUs": "(Chat with us if you’re unsure!)", + "connection.stream.actions.refreshStream.merge.label": "Refresh stream and retain records", + "connection.stream.actions.refreshStream.merge.description": "By electing to refresh your stream and retain records, you are instructing Airbyte to keep any records in your destination that the source no longer contains. This option is recommended for analytics use cases or where downstream reporting depends on historical data.", + "connection.stream.actions.refreshStream.truncate.label": "Refresh stream and remove records", + "connection.stream.actions.refreshStream.truncate.description": "By electing to refresh your stream and remove records, you are instructing Airbyte to remove previously synced records that are no longer in the source. This option is recommended for database replication use cases where you expect a 1:1 mirror of the source. Use this option carefully as this is not reversible.", + "connection.stream.actions.refreshStream.learnMoreLink": "Learn more about refreshing streams.", + "connection.stream.actions.clearData": "Clear data", + "connection.stream.actions.clearData.confirm.title": "Are you sure you want to clear data from the {streamName} stream?", + "connection.actions.clearData.confirm.title": "Are you sure you want to clear data from this connection?", + "connection.actions.clearData.confirm.text": "Clearing data for this connection will delete all data in your destination for this connection. The next sync will sync all historical data.", + "connection.stream.actions.clearData.confirm.text": "Clearing data for this stream will delete all data in your destination for this stream. The next sync will sync all historical data.", + "connection.stream.actions.clearData.confirm.additionalText": "WARNING: This cannot be undone.", + "connection.stream.actions.clearData.confirm.submit": "Yes, clear data", + "connection.stream.actions.clearData.confirm.cancel": "No, cancel", "connection.stream.actions.openDetails": "Open details", + "connection.stream.actions.showInReplicationTable": "Show in replication table", "connection.stream.status.nextSync": "Next sync {sync}", "connection.stream.status.nextTry": "Next try {sync}", @@ -701,7 +743,6 @@ "connection.customTransformations.errorMessage": "There was an error during updating your custom transformation settings", "connection.state.title": "Connection state", - "connection.state.noIncremental": "This connection contains no incremental streams, so it cannot have state.", "connection.state.warning": "Updates to connection state should be handled with extreme care.", "connection.state.warning.secondary": "Updates may break your syncs, requiring a reset to fix.

    Make changes only as directed by the Airbyte team.", "connection.state.update": "Update state", @@ -817,6 +858,8 @@ "settings.workspaceSettings": "Workspace", "settings.organizationSettings": "Organization", "settings.instanceSettings": "Instance", + "settings.workspace.general.title": "General workspace settings", + "settings.organization.general.title": "General organization settings", "settings.workspaceSettings.update.success": "Workspace settings have been updated!", "settings.workspaceSettings.update.error": "Something went wrong while updating your workspace settings. Please try again.", "settings.workspaceSettings.updateWorkspaceNameSuccess": "Workspace name has been updated!", @@ -878,7 +921,7 @@ "settings.notificationGuide.link.configuration": "Configure Sync notifications", "settings.notificationGuide.link.slackConfiguration": "Configure a Slack Notifications Webhook", "settings.metrics": "Metrics", - "settings.notificationSettings": "Notification Settings", + "settings.notificationSettings": "Notifications", "settings.metricsSettings": "Metrics Settings", "settings.emailNotifications": "Email notifications", "settings.securityUpdates": "Security updates (recommended)", @@ -888,11 +931,11 @@ "settings.cookiePreferences": "Cookie Preferences", "settings.dataResidency": "Data Residency", "settings.defaultDataResidency": "Default Data Residency", - "settings.geographyDescription": "Depending on your network configuration, you may need to add IP addresses to your allowlist. Request a new data residency.", "settings.defaultGeography": "Geography", - "settings.defaultDataResidencyDescription": "Choose the default preferred data processing location for all of your connections. The default data residency setting only affects new connections. Existing connections will retain their data residency setting. Learn more.", + "settings.defaultDataResidencyDescription": "Choose the default preferred data processing location for all of your connections. The default data residency setting only affects new connections. Existing connections will retain their data residency setting. Depending on your network configuration, you may need to add IP addresses to your allowlist. Request new data residency", "settings.defaultDataResidencyUpdateError": "There was an error updating the default data residency for this workspace.", "settings.defaultDataResidencyUpdateSuccess": "Data residency preference has been updated!", + "settings.general": "General", "settings.members": "Members", "settings.accessManagement.noUsers": "No users have this level of permission", "settings.accessManagement.removeUser": "Remove user", @@ -903,11 +946,9 @@ "settings.accessManagement.removePermissions": "Are you sure you want to remove {user} from {resource}?", "settings.accessManagement.youHint": "You", "settings.accessManagement.members": "Members", - "settings.accessManagement.permissionCreate.success": "User added successfully!", - "settings.accessManagement.permissionCreate.error": "There was an error adding this user.", - "settings.accessManagement.permissionUpdate.success": "Permission updated successfully!", + "settings.accessManagement.permissionUpdate.success": "Permission updated successfully", "settings.accessManagement.permissionUpdate.error": "There was an error updating this permission.", - "settings.accessManagement.permissionDelete.success": "User removed successfully!", + "settings.accessManagement.permissionDelete.success": "User removed successfully", "settings.accessManagement.permissionDelete.error": "There was an error removing this user.", "settings.accessManagement.guestUser": "This user is not a member of the organization and is a guest of this workspace.", "settings.accessManagement.cannotDemoteOrgAdmin": "You cannot demote an organization admin within a workspace.", @@ -922,6 +963,8 @@ "settings.advancedSettings.workspaceInTitleDescription": "Prefix the tab name with the current workspace name. This will require a page reload to take effect.", "settings.advancedSettings.attemptStats": "Show extended attempt details", "settings.advancedSettings.attemptStatsDescription": "Show additional details on attempts in the job history page like job ID and attempt count.", + "settings.advancedSettings.connectionDetails": "Show extended connection information", + "settings.advancedSettings.connectionDetailsDescription": "Show more details about a connection (e.g. connector version) on the connection page.", "settings.applications.token.new": "New access token", "settings.applications.token.warning": "You can only view this key once. Make sure you save this key in a safe place.", @@ -940,7 +983,7 @@ "settings.applications.table.generateToken": "Generate access token", "settings.applications.table.delete": "Delete", "settings.applications.tooltip": "Your applications have the same permissions that you do.", - "settings.applications.create.disabledTooltip": "You may only have up to two active applications. Please delete an existing application before creating a new one.", + "settings.applications.create.disabledTooltip": "You may only have up to two active applications. Please delete an existing application before creating a new one.", "settings.application.create": "Create an application", "settings.application.name": "Name your application", "settings.application.name.placeholder": "Enter name", @@ -1007,6 +1050,10 @@ "connector.breakingChange.upgradeModal.moreConnections": "+ {count, plural, one {# more connection} other {# more connections}}", "connector.breakingChange.upgradeToast.success": "{type} upgraded successfully. See this guide for any remaining actions.", "connector.breakingChange.upgradeToast.failure": "Failed to upgrade {type}. Please reach out to support for assistance.", + "connector.check.failed": "Connection test were not successful.", + "connector.check.jobFailed": "Failed to run connection tests.", + "connector.discoverSchema.jobFailed": "Failed to run schema discovery.", + "connector.discoverSchema.catalogMissing": "Source did not return a schema.", "credits.credits": "Credits", "credits.whatAreCredits": "What are credits?", @@ -1019,6 +1066,7 @@ "credits.maxCreditsError": "The maximum credit purchase is {maximum} credits.", "credits.checkoutModalTitle": "Select credit quantity", "credits.checkout": "Checkout", + "credits.checkout.creditExpiration": "Credits are valid for 12 months. Visit our pricing page to learn more.", "credits.aboveMaxCredits": "To buy large quantities of credits, talk to our Sales team.", "credits.numberOfCredits": "Number of credits", "credits.pricePerCredit": "Price per credit", @@ -1029,12 +1077,20 @@ "credits.noBillingAccount": "Your account is excluded from billing requirements and credits are not required.", "docs.notFoundError": "We were not able to receive docs. Please click the link above to open docs on our website", - "errorView.notFound": "Resource not found.", - "errorView.notAuthorized": "You don’t have permission to access this page.", + "errors.title": "Sorry, something went wrong.", + "errors.reload": "Reload", + "errors.copyDetails": "Copy details", + "errors.http.badRequest": "There was an error in the request sent to the API. If possible, change your inputs and try again. (HTTP 400)", + "errors.http.unauthorized": "It seems you're not authorized. Please try logging in again. (HTTP 401)", + "errors.http.forbidden": "You don't have the right permissions to take this action. (HTTP 403)", + "errors.http.notFound": "We can't seem to find what you're looking for. (HTTP 404)", + "errors.http.gone": "It seems what you're looking for no longer exists. (HTTP 410)", + "errors.http.teapot": "I'm a teapot 🫖 (HTTP 418)", + "errors.http.internalServerError": "An unexpected error occurred. Please report this if the issue persists. (HTTP 500)", + "errors.http.badGateway": "Airbyte is temporarily unavailable. Please try again. (HTTP 502)", + "errors.http.serviceUnavailable": "Airbyte is temporarily unavailable. Please try again. (HTTP 503)", + "errors.http.default": "An unknown error occurred. (HTTP {status})", "errorView.title": "Oops! Something went wrong…", - "errorView.docLink": "Check out the documentation", - "errorView.upgradeConnectors": "Make sure your connectors are up to date", - "errorView.retry": "Retry", "errorView.unknown": "Unknown", "errorView.unknownError": "Unknown error occurred", @@ -1058,6 +1114,8 @@ "ui.loading": "Loading …", "ui.markdown.copyCode": "Copy code", "ui.markdown.copied": "Copied", + "ui.switch.enabled": "Enabled", + "ui.switch.disabled": "Disabled", "airbyte.datatype.string": "String", "airbyte.datatype.binary_data": "Binary Data", @@ -1092,7 +1150,7 @@ "connectorBuilder.recordsTab": "Records", "connectorBuilder.requestTab": "Request", "connectorBuilder.responseTab": "Response", - "connectorBuilder.schemaTab": "Detected schema", + "connectorBuilder.schemaTab": "Detected Schema", "connectorBuilder.useSchemaButton": "Import detected schema", "connectorBuilder.differentSchemaDescription": "Detected schema and declared schema are different", "connectorBuilder.overwriteSchemaButton": "Overwrite declared schema", @@ -1165,12 +1223,12 @@ "connectorBuilder.inputModal.enum": "Allowed values", "connectorBuilder.inputModal.enumTooltip": "The user will only be able to choose from one of these values. If none are provided the user will be able to enter any value", "connectorBuilder.inputModal.unsupportedInput": "Detailed configuration for this property type is disabled, switch to YAML view to edit", - "connectorBuilder.inputModal.inferredInputMessage": "Detailed configuration for this user input is disabled as it is tied to the configuration", + "connectorBuilder.inputModal.lockedInput": "Some configuration options are not shown, since this input is auto-generated and linked to the current configuration of your connector.", "connectorBuilder.key": "Key", "connectorBuilder.value": "Value", "connectorBuilder.addKeyValue": "Add", "connectorBuilder.saveInputsForm": "Save", - "connectorBuilder.inputsFormWarning": "Testing values are not saved with the connector when publishing or releasing. They are required in order to test your streams, and will be asked to the end user in order to setup this connector", + "connectorBuilder.inputsFormMessage": "Testing values are not saved with the connector when publishing or releasing. They are required in order to test your streams, and will be asked to the end user in order to setup this connector", "connectorBuilder.inputsError": "User inputs form could not be rendered: {error}. Make sure the spec in the YAML conforms to the specified standard.", "connectorBuilder.inputsErrorDocumentation": "Check out the documentation", "connectorBuilder.goToYaml": "Switch to YAML view", @@ -1183,7 +1241,7 @@ "connectorBuilder.duplicateFieldID": "Make sure no field ID is used multiple times", "connectorBuilder.addNewParentStream": "Add new parent stream", "connectorBuilder.streamConfiguration": "Configuration", - "connectorBuilder.streamSchema": "Declared schema", + "connectorBuilder.streamSchema": "Declared Schema", "connectorBuilder.invalidJSON": "Invalid JSON - please fix syntax to have it applied", "connectorBuilder.copyToPaginationTitle": "Copy pagination settings to...", "connectorBuilder.copyFromPaginationTitle": "Import pagination settings from...", @@ -1261,7 +1319,7 @@ "connectorBuilder.warnings.noRecords": "No records could be extracted from responses, make sure the record selector is set correctly.", "connectorBuilder.warnings.primaryKeyMissing": "In at least one record, the configured primary key is not set. Make sure it is always available.", "connectorBuilder.warnings.primaryKeyDuplicate": "Primary key is not unique - found {duplicateKey} more than once.", - "connectorBuilder.resolvingStreamList": "Checking configuration", + "connectorBuilder.resolvingStreamList": "Checking configuration…", "connectorBuilder.documentationLink": "Open documentation in separate tab", "connectorBuilder.interpolationHeading": "Variables available for interpolation", "connectorBuilder.interpolationMacros": "See macros available for interpolation here", @@ -1438,6 +1496,11 @@ "connectorBuilder.transformation.remove.path.tooltip": "Path to the field to remove", "connectorBuilder.transformation.add": "Add Field", "connectorBuilder.adminTestingValuesWarning": "Admin: changes to testing values will be saved to the database, which the user will see if they return to this project.", + "connectorBuilder.yamlComponent.discardChanges.title": "Discard YAML Changes?", + "connectorBuilder.yamlComponent.discardChanges.unknownErrorIntro": "The YAML you entered is not supported in UI form.", + "connectorBuilder.yamlComponent.discardChanges.knownErrorIntro": "The YAML you entered is not supported in UI form due to the following error:", + "connectorBuilder.yamlComponent.discardChanges.errorOutro": "Click confirm to discard your YAML changes and revert back to the previous UI state.", + "connectorBuilder.yamlComponent.discardChanges.confirm": "Confirm", "jobs.noAttemptsFailure": "Failed to start job.", @@ -1453,7 +1516,6 @@ "jobHistory.logs.logDownloadPending": "Downloading logs for job {jobId}…", "jobHistory.logs.logDownloadFailed": "Failed to download logs for job {jobId}.", "jobHistory.logs.searchPlaceholder": "Search logs", - "jobHistory.logs.failureReason": "Failure reason: {reason}", "jobHistory.logs.logOrigin.all": "All logs", "jobHistory.logs.logOrigin.source": "source", "jobHistory.logs.logOrigin.destination": "destination", @@ -1476,6 +1538,7 @@ "auth.authError": "An error occurred during authentication: {errorMessage}", "modal.closeButtonLabel": "Close dialog", + "modal.confirmationTextDescription": "Type {confirmationText} below to confirm this action.", "copyButton.title": "Copy", @@ -1486,6 +1549,8 @@ "login.returnToLogin": "Return to login", "login.signup.submitButton": "Sign up", "login.loginTitle": "Log in to Airbyte", + "login.acceptInvite": "You've been invited to collaborate on Airbyte", + "login.acceptInvite.subtitle": "Log in or sign up to get started", "login.resendEmail": "Didn't receive the email? Send it again", "login.yourEmail": "Your work email*", "login.inviteEmail": "For security, re-enter your invite email*", @@ -1516,6 +1581,7 @@ "login.oauth.or": "or", "login.oauth.google": "Continue with Google", "login.oauth.github": "Continue with GitHub", + "login.email": "Continue with Email", "login.oauth.differentCredentialsError": "Use your email and password to sign in.", "login.oauth.unknownError": "An unknown error happened during sign in: {error}", "login.selfhosting": "Interested in self-hosting?", @@ -1559,6 +1625,7 @@ "confirmResetPassword.link.invalid": "The password reset link is invalid. Please double check the reset email.", "confirmResetPassword.password.weak": "Your password does not meet the minimum length", + "connection.header.frequency.tooltip": "Click to edit in Settings", "connection.dbtCloudJobs.cardTitle": "Transformations", "connection.dbtCloudJobs.addJob": "Add transformation", "connection.dbtCloudJobs.dbtError": "There was an error communicating with dbt Cloud: {displayMessage}", @@ -1609,13 +1676,13 @@ "settings.integrationSettings.dbtCloudSettings.actions.cancel": "Cancel", "settings.integrationSettings.dbtCloudSettings.actions.delete": "Delete service token", "settings.integrationSettings.dbtCloudSettings.actions.delete.confirm": "Confirm dbt Service Token deletion", - "settings.integrationSettings.dbtCloudSettings.action.delete.modal": "Are you sure you want to remove your dbt Service Token? \n \nThis will stop all dbt Cloud transformations from running and may cause sync failures. You will need to manually remove these transformations from your connections to keep syncing.", + "settings.integrationSettings.dbtCloudSettings.action.delete.modal": "Are you sure you want to remove your dbt Service Token? \n \nThis will stop all dbt Cloud transformations from running and may cause sync failures. You will need to manually remove these transformations from your connections to keep syncing.", "settings.integrationSettings.dbtCloudSettings.actions.delete.success": "Service token deleted successfully", "settings.integrationSettings.dbtCloudSettings.actions.submit": "Save changes", "settings.integrationSettings.dbtCloudSettings.actions.submit.success": "Service Token saved successfully", "settings.integrationSettings.dbtCloudSettings.form.serviceTokenLabel": "Service Token", "settings.integrationSettings.dbtCloudSettings.form.serviceTokenInputHidden": "Enter a service token", - "settings.integrationSettings.dbtCloudSettings.form.serviceTokenAlreadyExist": "This workspace has an existing dbt service token. To replace it, you must first remove the existing token.", + "settings.integrationSettings.dbtCloudSettings.form.serviceTokenAlreadyExist": "This workspace has an existing dbt service token. To replace it, you must first remove the existing token.", "settings.integrationSettings.dbtCloudSettings.form.description": "To use the dbt Cloud integration, enter your service token here. Learn more.", "userSettings.button.addNewUser": "New user", @@ -1649,7 +1716,8 @@ "credits.creditsProblem": "You’re out of credits! To set up connections and run syncs, add credits.", "credits.emailVerificationRequired": "You need to verify your email address before you can buy credits.", "credits.emailVerification.resendConfirmation": "We sent you a new verification link.", - "credits.emailVerification.resend": "Send verification link again", + "credits.emailVerification.resendConfirmationError": "There was an error sending the verification link. Please try again.", + "credits.emailVerification.resend": "Send verification link", "credits.lowBalance": "Your credit balance is low. Buy more credits to prevent your connections from being disabled or enroll in auto-recharge.", "credits.zeroBalance": "All your connections have been disabled because your credit balance is 0. Buy credits or enroll in auto-recharge to enable your data to sync.", @@ -1672,8 +1740,6 @@ "sidebar.credits": "Credits", "sidebar.billing": "Billing", - "experiment.speedyConnection": "Set up your first connection in the next and get 100 additional credits for your trial", - "workspace.adminWorkspaceWarning": "Admin", "workspace.adminWorkspaceWarningTooltip": "You are not a member of this workspace. Be careful when making changes!", @@ -1681,14 +1747,33 @@ "userInvitations.accept.success": "Invitation accepted successfully", "userInvitations.accept.error": "Something went wrong accepting the invitation. Please check your link.", + "userInvitations.accept.error.email": "Current user's email address does not match invitation.", + "userInvitations.accept.error.expired": "This invitation has expired. Please request a new one.", + "userInvitations.accept.error.cancelled": "This invitation has been cancelled.", + "userInvitations.accept.warning.alreadyAccepted": "You have already accepted this invitation.", "userInvitations.create.modal.title": "Add a member to {workspace}", "userInvitations.create.modal.addNew": "Add new member", - "userInvitations.create.modal.existingMember": "This member already exists. Go to workspace settings to edit this user’s permissions.", "userInvitations.create.modal.search": "Type to add a new member", "userInvitations.create.success": "Invitation created successfully", + "userInvitations.create.success.directlyAdded": "User added successfully", "userInvitations.create.error": "There was an error inviting this user. Please try again.", + "userInvitations.create.error.duplicate": "There is already a pending invitation for this email.", "userInvitations.create.modal.emptyList": "No matching users found", "userInvitations.create.modal.emptyList.canInvite": "No matching users found. Enter a valid email address to invite a new member.", "userInvitations.create.modal.emptyList.noOrganization": "Enter a valid email address to invite a new member.", - "userInvitations.newMember": "New member" + "userInvitations.newMember": "New member", + "userInvitations.create.modal.existingUserTooltip": "Go to workspace member settings to edit this user’s permissions.", + "userInvitations.create.modal.organizationAdminTooltip": "This user already has full access to this workspace.", + "userInvitations.pendingInvitation": "Pending...", + "userInvitations.pendingInvitation.tooltipMain": "This member has not yet accepted their invitation.", + "userInvitations.pendingInvitation.tooltipAdditionalInfo": "They do not yet have access to this workspace.", + "userInvitations.create.modal.asRole": "As {role}", + "userInvitations.cancel.success": "User invitation successfully cancelled", + "userInvitations.cancel.error": "There was an error cancelling this invitation. Please try again.", + "userInvitations.cancel.confirm.text": "Are you sure you want to cancel this invitation for {user} to {resource}?", + "userInvitations.cancel.confirm.title": "Cancel invitation", + + "failureMessage.type.error": "Failure in {origin}", + "failureMessage.type.warning": "Warning from {origin}", + "failureMessage.label": "{type} {message}" } diff --git a/airbyte-webapp/src/packages/cloud/App.tsx b/airbyte-webapp/src/packages/cloud/App.tsx index 85e9c727c13..d6be7011032 100644 --- a/airbyte-webapp/src/packages/cloud/App.tsx +++ b/airbyte-webapp/src/packages/cloud/App.tsx @@ -2,13 +2,12 @@ import React, { Suspense } from "react"; import { HelmetProvider } from "react-helmet-async"; import { createBrowserRouter, RouterProvider } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { DeployPreviewMessage } from "components/DeployPreviewMessage"; import { DevToolsToggle } from "components/DevToolsToggle"; import LoadingPage from "components/LoadingPage"; import { QueryProvider } from "core/api"; -import { ConfigServiceProvider, config } from "core/config"; +import { DefaultErrorBoundary } from "core/errors"; import { AnalyticsProvider } from "core/services/analytics"; import { defaultCloudFeatures, FeatureService } from "core/services/features"; import { I18nProvider } from "core/services/i18n"; @@ -52,18 +51,16 @@ const App: React.FC = () => { }> - - - - - - - - - - - - + + + + + + + + + + diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx index 83645f637bf..6d049ec4561 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -2,12 +2,12 @@ import React, { PropsWithChildren, Suspense, useMemo } from "react"; import { createSearchParams, Navigate, Route, Routes, useLocation } from "react-router-dom"; import { useEffectOnce } from "react-use"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import LoadingPage from "components/LoadingPage"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useCurrentOrganizationInfo, useCurrentWorkspace, useInvalidateAllWorkspaceScopeOnChange } from "core/api"; import { usePrefetchCloudWorkspaceData } from "core/api/cloud"; +import { DefaultErrorBoundary } from "core/errors"; import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "core/services/analytics/useAnalyticsService"; import { useAuthService } from "core/services/auth"; import { FeatureItem, useFeature } from "core/services/features"; @@ -87,7 +87,7 @@ const MainRoutes: React.FC = () => { const supportsDataResidency = useFeature(FeatureItem.AllowChangeDataGeographies); return ( - + } /> @@ -134,7 +134,7 @@ const MainRoutes: React.FC = () => { } /> } /> - + ); }; @@ -172,7 +172,7 @@ const CloudWorkspaceDataPrefetcher: React.FC> = ({ ch }; export const Routing: React.FC = () => { - const { user, inited, providers, loggedOut, requirePasswordReset } = useAuthService(); + const { user, inited, providers, provider, loggedOut, requirePasswordReset } = useAuthService(); const workspaceId = useCurrentWorkspaceId(); const { pathname: originalPathname, search, hash } = useLocation(); @@ -181,7 +181,7 @@ export const Routing: React.FC = () => { })}`; const loginRedirectTo = - loggedOut && (originalPathname === "/" || originalPathname.includes("/settings/account")) + loggedOut && originalPathname === "/" ? { pathname: CloudRoutes.Login } : { pathname: CloudRoutes.Login, search: loginRedirectSearchParam }; @@ -203,9 +203,15 @@ export const Routing: React.FC = () => { const userTraits = useMemo( () => user - ? { providers, email: user.email, isCorporate: isCorporateEmail(user.email), currentWorkspaceId: workspaceId } + ? { + providers, + provider, + email: user.email, + isCorporate: isCorporateEmail(user.email), + currentWorkspaceId: workspaceId, + } : {}, - [providers, user, workspaceId] + [provider, providers, user, workspaceId] ); useEffectOnce(() => { diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.module.scss b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.module.scss deleted file mode 100644 index cc27d26f754..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.module.scss +++ /dev/null @@ -1,6 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.countDownTimer { - color: colors.$blue-100; -} diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.tsx b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.tsx deleted file mode 100644 index 92a53e4154a..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/CountDownTimer.tsx +++ /dev/null @@ -1,13 +0,0 @@ -import { Text } from "components/ui/Text"; - -import styles from "./CountDownTimer.module.scss"; -import { useCountdown } from "./useCountdown"; -export const CountDownTimer: React.FC<{ expiredOfferDate: string }> = ({ expiredOfferDate }) => { - const [hours, minutes] = useCountdown(expiredOfferDate); - - return ( - - {hours.toString().padStart(2, "0")}h {minutes.toString().padStart(2, "0")}m - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/index.ts b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/index.ts deleted file mode 100644 index 002ded4737c..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./CountDownTimer"; diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/useCountdown.ts b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/useCountdown.ts deleted file mode 100644 index 3bf202559bb..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/CountDownTimer/useCountdown.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { useEffect, useState } from "react"; - -export const useCountdown = (targetDate: string) => { - const countDownDate = new Date(targetDate).getTime(); - - const [countDown, setCountDown] = useState(countDownDate - new Date().getTime()); - - useEffect(() => { - const interval = setInterval(() => { - setCountDown(countDownDate - new Date().getTime()); - }, 1000); - - return () => clearInterval(interval); - }, [countDownDate]); - - return getReturnValues(countDown); -}; - -const getReturnValues = (countDown: number): number[] => { - // calculate time left - const hours = Math.floor((countDown % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60)); - const minutes = Math.floor((countDown % (1000 * 60 * 60)) / (1000 * 60)); - - return [hours, minutes]; -}; diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.module.scss b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.module.scss deleted file mode 100644 index 2429ac543d0..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.module.scss +++ /dev/null @@ -1,22 +0,0 @@ -@use "src/scss/variables"; -@use "src/scss/colors"; - -.speedyConnectionbanner { - &__container { - padding: variables.$spacing-md; - background-color: colors.$blue; - color: colors.$foreground; - } - - &__message { - margin: auto; - color: colors.$foreground; - text-align: center; - } - - &__cta { - display: inline; - color: colors.$foreground; - font-weight: 700; - } -} diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.tsx b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.tsx deleted file mode 100644 index 0c8e18c74b9..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/SpeedyConnectionBanner.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { FormattedMessage } from "react-intl"; - -import { FlexContainer } from "components/ui/Flex"; -import { Link } from "components/ui/Link"; -import { Text } from "components/ui/Text"; - -import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; - -import styles from "./SpeedyConnectionBanner.module.scss"; -import { CountDownTimer } from "../CountDownTimer"; -import { useExperimentSpeedyConnection } from "../hooks/useExperimentSpeedyConnection"; - -export const SpeedyConnectionBanner = () => { - const { expiredOfferDate } = useExperimentSpeedyConnection(); - - return ( - - - ( - - {link} - - ), - timer: () => , - }} - /> - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/credits.svg b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/credits.svg deleted file mode 100644 index ec9380be9b1..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/credits.svg +++ /dev/null @@ -1,196 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/index.ts b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/index.ts deleted file mode 100644 index b3709edb7b2..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./SpeedyConnectionBanner"; diff --git a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/hooks/useExperimentSpeedyConnection.ts b/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/hooks/useExperimentSpeedyConnection.ts deleted file mode 100644 index 8458a7b9bb6..00000000000 --- a/airbyte-webapp/src/packages/cloud/components/experiments/SpeedyConnection/hooks/useExperimentSpeedyConnection.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { useCurrentWorkspaceState } from "core/api"; -import { useLocalStorage } from "core/utils/useLocalStorage"; -import { useExperiment } from "hooks/services/Experiment"; - -export const useExperimentSpeedyConnection = () => { - const { hasConnections } = useCurrentWorkspaceState(); - const isVariantEnabled = useExperiment("onboarding.speedyConnection", false); - const [expiredOfferDate] = useLocalStorage("exp-speedy-connection-timestamp", ""); - - const now = new Date(); - const isExperimentVariant = - !hasConnections && expiredOfferDate && new Date(expiredOfferDate) >= now && isVariantEnabled; - return { isExperimentVariant, expiredOfferDate }; -}; diff --git a/airbyte-webapp/src/packages/cloud/services/FirebaseSdkProvider.tsx b/airbyte-webapp/src/packages/cloud/services/FirebaseSdkProvider.tsx index 6d3c54fa58c..6ab6e3a1467 100644 --- a/airbyte-webapp/src/packages/cloud/services/FirebaseSdkProvider.tsx +++ b/airbyte-webapp/src/packages/cloud/services/FirebaseSdkProvider.tsx @@ -1,11 +1,10 @@ import { getAuth, connectAuthEmulator } from "firebase/auth"; import React from "react"; -import { useConfig } from "core/config"; +import { config } from "core/config"; import { FirebaseAppProvider, useFirebaseApp, AuthProvider } from "packages/firebaseReact"; const FirebaseAppSdksProvider: React.FC> = ({ children }) => { - const config = useConfig(); const firebaseApp = useFirebaseApp(); const auth = getAuth(firebaseApp); if (config.firebase.authEmulatorHost) { @@ -20,8 +19,6 @@ const FirebaseAppSdksProvider: React.FC> = ({ c * based on airbyte app config and also injecting all required firebase sdks */ const FirebaseSdkProvider: React.FC> = ({ children }) => { - const config = useConfig(); - return ( {children} diff --git a/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx index a87a77984fb..6668a01c878 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/CloudAuthService.tsx @@ -30,7 +30,6 @@ import { useGetOrCreateUser } from "core/api"; import { useCreateKeycloakUser, useResendSigninLink, useRevokeUserSession, useUpdateUser } from "core/api/cloud"; import { AuthProvider, UserRead } from "core/api/types/AirbyteClient"; import { AuthContext, AuthContextApi, OAuthLoginState } from "core/services/auth"; -import { useLocalStorage } from "core/utils/useLocalStorage"; import { useNotificationService } from "hooks/services/Notification"; import { SignupFormValues } from "packages/cloud/views/auth/SignupPage/components/SignupForm"; import { useAuth } from "packages/firebaseReact"; @@ -53,7 +52,6 @@ export enum FirebaseAuthMessageId { // Checks for a valid auth session with either keycloak or firebase, and returns the user if found. export const CloudAuthService: React.FC = ({ children }) => { const passwordRef = useRef(undefined); - const [, setSpeedyConnectionTimestamp] = useLocalStorage("exp-speedy-connection-timestamp", ""); const [logoutInProgress, setLogoutInProgress] = useState(false); const queryClient = useQueryClient(); const { registerNotification } = useNotificationService(); @@ -183,43 +181,13 @@ export const CloudAuthService: React.FC = ({ children }) => { }, hasPasswordLogin: () => !!firebaseUser.providerData.filter(({ providerId }) => providerId === "password"), providers: firebaseUser.providerData.map(({ providerId }) => providerId), + provider: null, sendEmailVerification: async () => { if (!firebaseUser) { console.error("sendEmailVerifiedLink should be used within auth flow"); throw new Error("Cannot send verification email if firebaseUser is null."); } - return sendEmailVerification(firebaseUser) - .then(() => { - registerNotification({ - id: "workspace.emailVerificationResendSuccess", - text: , - type: "success", - }); - }) - .catch((error) => { - switch (error.code) { - case AuthErrorCodes.NETWORK_REQUEST_FAILED: - registerNotification({ - id: error.code, - text: , - type: "error", - }); - break; - case AuthErrorCodes.TOO_MANY_ATTEMPTS_TRY_LATER: - registerNotification({ - id: error.code, - text: , - type: "warning", - }); - break; - default: - registerNotification({ - id: error.code, - text: , - type: "error", - }); - } - }); + return sendEmailVerification(firebaseUser); }, verifyEmail: verifyFirebaseEmail, }; @@ -232,12 +200,27 @@ export const CloudAuthService: React.FC = ({ children }) => { user: keycloakAuth.airbyteUser, authProvider: AuthProvider.keycloak, displayName: keycloakAuth.keycloakUser?.profile.name ?? null, - emailVerified: true, + emailVerified: keycloakAuth.keycloakUser?.profile.email_verified ?? false, email: keycloakAuth.keycloakUser?.profile.email ?? null, getAccessToken: () => Promise.resolve(keycloakAuth.accessTokenRef?.current), + updateName: async (name: string) => { + const user = keycloakAuth.airbyteUser; + if (!user) { + throw new Error("Cannot change name, airbyteUser is null"); + } + await updateAirbyteUser({ + userUpdate: { userId: user.userId, name }, + getAccessToken: async () => keycloakAuth.accessTokenRef?.current ?? "", + }).then(() => { + keycloakAuth.updateAirbyteUser({ ...user, name }); + }); + }, logout, loggedOut: false, providers: null, + provider: keycloakAuth.isSso + ? "sso" + : (keycloakAuth.keycloakUser?.profile.identity_provider as string | undefined) ?? "none", }; } // The context value for an unauthenticated user @@ -249,6 +232,7 @@ export const CloudAuthService: React.FC = ({ children }) => { emailVerified: false, loggedOut: true, providers: null, + provider: null, login: async ({ email, password }: { email: string; password: string }) => { await signInWithEmailAndPassword(firebaseAuth, email, password) .then(() => { @@ -334,11 +318,6 @@ export const CloudAuthService: React.FC = ({ children }) => { // Send verification mail via firebase await sendEmailVerification(user); - - // exp-speedy-connection - if (firebaseAuth.currentUser) { - setSpeedyConnectionTimestamp(String(new Date(new Date().getTime() + 24 * 60 * 60 * 1000))); - } } catch (err) { // Clear the password ref if the user creation fails passwordRef.current = undefined; @@ -396,9 +375,7 @@ export const CloudAuthService: React.FC = ({ children }) => { getAirbyteUser, keycloakAuth, logout, - registerNotification, resendWithSignInLink, - setSpeedyConnectionTimestamp, updateAirbyteUser, verifyFirebaseEmail, ]); diff --git a/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx b/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx index 4c68dc95627..881ca836b72 100644 --- a/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/auth/KeycloakService/KeycloakService.tsx @@ -1,3 +1,6 @@ +import { useQueryClient } from "@tanstack/react-query"; +import { BroadcastChannel } from "broadcast-channel"; +import Keycloak from "keycloak-js"; import isEqual from "lodash/isEqual"; import { User, WebStorageStateStore, UserManager } from "oidc-client-ts"; import { @@ -29,8 +32,11 @@ export type KeycloakServiceContext = { changeRealmAndRedirectToSignin: (realm: string) => Promise; // The access token is stored in a ref so we don't cause a re-render each time it changes. Instead, we can use the current ref value when we call the API. accessTokenRef: MutableRefObject; + updateAirbyteUser: (airbyteUser: UserRead) => void; redirectToSignInWithGoogle: () => Promise; redirectToSignInWithGithub: () => Promise; + redirectToSignInWithPassword: () => Promise; + redirectToRegistrationWithPassword: () => Promise; } & KeycloakAuthState; const keycloakServiceContext = createContext(undefined); @@ -51,6 +57,7 @@ interface KeycloakAuthState { error: Error | null; didInitialize: boolean; isAuthenticated: boolean; + isSso: boolean | null; } const keycloakAuthStateInitialState: KeycloakAuthState = { @@ -59,6 +66,7 @@ const keycloakAuthStateInitialState: KeycloakAuthState = { error: null, didInitialize: false, isAuthenticated: false, + isSso: null, }; type KeycloakAuthStateAction = @@ -73,8 +81,14 @@ type KeycloakAuthStateAction = | { type: "error"; error: Error; + } + | { + type: "userUpdated"; + airbyteUser: UserRead; }; +type BroadcastEvent = Extract; + const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuthStateAction): KeycloakAuthState => { switch (action.type) { case "userLoaded": @@ -84,8 +98,15 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth airbyteUser: action.airbyteUser, isAuthenticated: true, didInitialize: true, + // We are using an SSO login if we're not in the AIRBYTE_CLOUD_REALM, which would be the end of the issuer + isSso: !action.keycloakUser.profile.iss.endsWith(AIRBYTE_CLOUD_REALM), error: null, }; + case "userUpdated": + return { + ...state, + airbyteUser: action.airbyteUser, + }; case "userUnloaded": return { ...state, @@ -93,6 +114,7 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth airbyteUser: null, isAuthenticated: false, didInitialize: true, + isSso: null, error: null, }; case "error": @@ -104,8 +126,11 @@ const keycloakAuthStateReducer = (state: KeycloakAuthState, action: KeycloakAuth } }; +const broadcastChannel = new BroadcastChannel("keycloak-state-sync"); + export const KeycloakService: React.FC = ({ children }) => { const userSigninInitialized = useRef(false); + const queryClient = useQueryClient(); const [userManager] = useState(initializeUserManager); const [authState, dispatch] = useReducer(keycloakAuthStateReducer, keycloakAuthStateInitialState); const { mutateAsync: getAirbyteUser } = useGetOrCreateUser(); @@ -113,6 +138,23 @@ export const KeycloakService: React.FC = ({ children }) => { // Allows us to get the access token as a callback, instead of re-rendering every time a new access token arrives const keycloakAccessTokenRef = useRef(null); + useEffect(() => { + broadcastChannel.onmessage = (event) => { + console.log("broadcastChannel.onmessage", event); + if (event.type === "userUnloaded") { + console.debug("🔑 Received userUnloaded event from other tab."); + dispatch({ type: "userUnloaded" }); + // Need to clear all queries from cache. In the tab that triggered the logout this is + // handled inside CloudAuthService.logout + queryClient.removeQueries(); + } else if (event.type === "userLoaded") { + console.debug("🔑 Received userLoaded event from other tab."); + keycloakAccessTokenRef.current = event.keycloakUser.access_token; + dispatch({ type: "userLoaded", keycloakUser: event.keycloakUser, airbyteUser: event.airbyteUser }); + } + }; + }, [queryClient]); + // Initialization of the current user useEffect(() => { if (!userManager || userSigninInitialized.current) { @@ -165,12 +207,16 @@ export const KeycloakService: React.FC = ({ children }) => { // Only if actual user values (not just access_token) have changed, do we need to update the state and cause a re-render if (!usersAreSame({ keycloakUser, airbyteUser }, authState)) { dispatch({ type: "userLoaded", keycloakUser, airbyteUser }); + // Notify other tabs that this tab got a new user loaded (usually meaning this tab signed in) + broadcastChannel.postMessage({ type: "userLoaded", keycloakUser, airbyteUser }); } }; userManager.events.addUserLoaded(handleUserLoaded); const handleUserUnloaded = () => { dispatch({ type: "userUnloaded" }); + // Notify other open tabs that the user got unloaded (i.e. this tab signed out) + broadcastChannel.postMessage({ type: "userUnloaded" }); }; userManager.events.addUserUnloaded(handleUserUnloaded); @@ -215,35 +261,72 @@ export const KeycloakService: React.FC = ({ children }) => { await newUserManager.signinRedirect({ extraQueryParams: { kc_idp_hint: "github" } }); }, []); + const redirectToSignInWithPassword = useCallback(async () => { + const newUserManager = createUserManager(AIRBYTE_CLOUD_REALM); + await newUserManager.signinRedirect(); + }, []); + + /** + * Using the keycloak-js library here instead of oidc-ts, because keycloak-js knows how to route us directly to Keycloak's registration page. + * oidc-ts does not (because that's not part of the OIDC spec) and recreating the logic to set the correct state, code_challenge, etc. would be complicated to maintain. + */ + const redirectToRegistrationWithPassword = useCallback(async () => { + const keycloak = new Keycloak({ + url: `${config.keycloakBaseUrl}/auth`, + realm: AIRBYTE_CLOUD_REALM, + clientId: DEFAULT_KEYCLOAK_CLIENT_ID, + }); + await keycloak.init({}); + keycloak.register({ redirectUri: createRedirectUri(AIRBYTE_CLOUD_REALM) }); + }, []); + + const updateAirbyteUser = useCallback((airbyteUser: UserRead) => { + dispatch({ type: "userUpdated", airbyteUser }); + }, []); + const contextValue = useMemo(() => { const value = { ...authState, userManager, signin: () => userManager.signinRedirect(), signout: () => userManager.signoutRedirect({ post_logout_redirect_uri: window.location.origin }), + updateAirbyteUser, isAuthenticated: authState.isAuthenticated, changeRealmAndRedirectToSignin, accessTokenRef: keycloakAccessTokenRef, redirectToSignInWithGoogle, redirectToSignInWithGithub, + redirectToSignInWithPassword, + redirectToRegistrationWithPassword, }; return value; - }, [authState, userManager, changeRealmAndRedirectToSignin, redirectToSignInWithGoogle, redirectToSignInWithGithub]); + }, [ + authState, + userManager, + updateAirbyteUser, + changeRealmAndRedirectToSignin, + redirectToSignInWithGoogle, + redirectToSignInWithGithub, + redirectToSignInWithPassword, + redirectToRegistrationWithPassword, + ]); return {children}; }; -function createUserManager(realm: string) { +function createRedirectUri(realm: string) { const searchParams = new URLSearchParams(window.location.search); searchParams.set("realm", realm); - const redirect_uri = `${window.location.origin}${window.location.pathname}?${searchParams.toString()}`; - const userManager = new UserManager({ + return `${window.location.origin}${window.location.pathname}?${searchParams.toString()}`; +} + +function createUserManager(realm: string) { + return new UserManager({ userStore: new WebStorageStateStore({ store: window.localStorage }), authority: `${config.keycloakBaseUrl}/auth/realms/${realm}`, client_id: DEFAULT_KEYCLOAK_CLIENT_ID, - redirect_uri, + redirect_uri: createRedirectUri(realm), }); - return userManager; } export function initializeUserManager() { diff --git a/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx b/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx index c54fe935655..826cfe55cc9 100644 --- a/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx +++ b/airbyte-webapp/src/packages/cloud/services/thirdParty/launchdarkly/LDExperimentService.tsx @@ -7,7 +7,7 @@ import { finalize, Subject } from "rxjs"; import { LoadingPage } from "components"; import { useCurrentWorkspaceId } from "area/workspace/utils"; -import { useConfig } from "core/config"; +import { config } from "core/config"; import { useAnalyticsService } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; import { FeatureSet, FeatureItem, useFeatureService } from "core/services/features"; @@ -215,6 +215,10 @@ const LDInitializationWrapper: React.FC { + // Return all feature flags from the LD client + return ldClient.current?.allFlags() ?? {}; + }, }), [addContext, removeContext] ); @@ -234,7 +238,7 @@ const LDInitializationWrapper: React.FC> = ({ children }) => { - const { launchDarkly: launchdarklyKey } = useConfig(); + const { launchDarkly: launchdarklyKey } = config; return !launchdarklyKey ? ( <>{children} diff --git a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx index 4335429c261..97d0beb16f0 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/LoginPage/LoginPage.tsx @@ -9,14 +9,16 @@ import { HeadTitle } from "components/common/HeadTitle"; import { Form, FormControl } from "components/forms"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; -import { FlexContainer } from "components/ui/Flex"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { Link } from "components/ui/Link"; import { Text } from "components/ui/Text"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; +import { useLocalStorage } from "core/utils/useLocalStorage"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; +import { useExperiment } from "hooks/services/Experiment"; import { useNotificationService } from "hooks/services/Notification"; import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; import { LoginFormErrorCodes } from "packages/cloud/services/auth/types"; @@ -53,7 +55,11 @@ export const LoginPage: React.FC = () => { const { registerNotification } = useNotificationService(); const { trackError } = useAppMonitoringService(); const [searchParams] = useSearchParams(); + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; const loginRedirectString = searchParams.get("loginRedirect"); + const isAcceptingInvitation = loginRedirectString?.includes("accept-invite"); const navigate = useNavigate(); @@ -96,56 +102,66 @@ export const LoginPage: React.FC = () => { return ( - - - + + + + + {isAcceptingInvitation && ( + + + + + + )} + {loginWithOAuth && ( <> - - + + {!keycloakAuthEnabled && } )} - - - defaultValues={{ - email: "", - password: "", - }} - schema={LoginPageValidationSchema} - onSubmit={onSubmit} - onError={onError} - > - - - - - - - - - - - - - - + {!keycloakAuthEnabled && ( + + defaultValues={{ + email: "", + password: "", + }} + schema={LoginPageValidationSchema} + onSubmit={onSubmit} + onError={onError} + > + + + + + + + + + + + + + + + )} diff --git a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx index 0b02dd8ef60..b1059f82966 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.test.tsx @@ -8,8 +8,14 @@ import { OAuthLogin } from "./OAuthLogin"; const mockLoginWithOAuth = jest.fn(); +const mockRedirectToSignInWithGithub = jest.fn().mockReturnValue(Promise.resolve()); +const mockRedirectToSignInWithGoogle = jest.fn().mockReturnValue(Promise.resolve()); + jest.mock("packages/cloud/services/auth/KeycloakService", () => ({ - useKeycloakService: () => ({ redirectToSignInWithGithub: jest.fn(), redirectToSignInWithGoogle: jest.fn() }), + useKeycloakService: () => ({ + redirectToSignInWithGithub: mockRedirectToSignInWithGithub, + redirectToSignInWithGoogle: mockRedirectToSignInWithGoogle, + }), })); describe("OAuthLogin", () => { @@ -18,14 +24,18 @@ describe("OAuthLogin", () => { }); it("should call auth service for Google", async () => { - const { getByTestId } = render(, { wrapper: TestWrapper }); + const { getByTestId } = render(, { + wrapper: TestWrapper, + }); await userEvents.click(getByTestId("googleOauthLogin")); - expect(mockLoginWithOAuth).toHaveBeenCalledWith("google"); + expect(mockRedirectToSignInWithGoogle).toHaveBeenCalled(); }); it("should call auth service for GitHub", async () => { - const { getByTestId } = render(, { wrapper: TestWrapper }); + const { getByTestId } = render(, { + wrapper: TestWrapper, + }); await userEvents.click(getByTestId("githubOauthLogin")); - expect(mockLoginWithOAuth).toHaveBeenCalledWith("github"); + expect(mockRedirectToSignInWithGithub).toHaveBeenCalled(); }); }); diff --git a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx index d8f186c7161..4999c145bad 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/OAuthLogin/OAuthLogin.tsx @@ -4,13 +4,17 @@ import { createSearchParams, useNavigate, useSearchParams } from "react-router-d import { useUnmount } from "react-use"; import { Subscription } from "rxjs"; +import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; import { Link } from "components/ui/Link"; +import { LoadingSpinner } from "components/ui/LoadingSpinner"; +import { SignInButton } from "components/ui/SignInButton"; import { Spinner } from "components/ui/Spinner"; -import { OAuthProviders, AuthOAuthLogin } from "core/services/auth"; +import { AuthOAuthLogin, OAuthProviders } from "core/services/auth"; import { useLocalStorage } from "core/utils/useLocalStorage"; +import { useExperiment } from "hooks/services/Experiment"; import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; import { useKeycloakService } from "packages/cloud/services/auth/KeycloakService"; @@ -18,19 +22,26 @@ import githubLogo from "./assets/github-logo.svg"; import googleLogo from "./assets/google-logo.svg"; import styles from "./OAuthLogin.module.scss"; -const GitHubButton: React.FC<{ onClick: () => void }> = ({ onClick }) => { +type PendingRedirect = "github" | "google" | "password" | null; + +interface LoginButtonProps { + onClick: () => void; + pendingRedirect: PendingRedirect; +} + +const GitHubButton: React.FC = ({ pendingRedirect, onClick }) => { return ( - ); }; -const GoogleButton: React.FC<{ onClick: () => void }> = ({ onClick }) => { +const GoogleButton: React.FC = ({ onClick, pendingRedirect }) => { return ( - ); @@ -52,10 +63,12 @@ const SsoButton: React.FC = () => { }; interface OAuthLoginProps { + type: "login" | "signup"; loginWithOAuth: AuthOAuthLogin; } -export const OAuthLogin: React.FC = ({ loginWithOAuth }) => { +export const OAuthLogin: React.FC = ({ loginWithOAuth, type }) => { + const [pendingRedirect, setPendingRedirect] = useState<"google" | "github" | "password" | null>(null); const { formatMessage } = useIntl(); const stateSubscription = useRef(); const [errorCode, setErrorCode] = useState(); @@ -63,8 +76,15 @@ export const OAuthLogin: React.FC = ({ loginWithOAuth }) => { const [searchParams] = useSearchParams(); const loginRedirect = searchParams.get("loginRedirect"); const navigate = useNavigate(); - const [keycloakSocialLoginsEnabled] = useLocalStorage("airbyte_keycloak-social-logins", false); - const { redirectToSignInWithGithub, redirectToSignInWithGoogle } = useKeycloakService(); + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; + const { + redirectToSignInWithGithub, + redirectToSignInWithGoogle, + redirectToSignInWithPassword, + redirectToRegistrationWithPassword, + } = useKeycloakService(); useUnmount(() => { stateSubscription.current?.unsubscribe(); @@ -109,6 +129,25 @@ export const OAuthLogin: React.FC = ({ loginWithOAuth }) => { const errorMessage = errorCode ? getErrorMessage(errorCode) : undefined; + const doRedirectToSignInWithGithub = () => { + setPendingRedirect("github"); + redirectToSignInWithGithub().catch(() => setPendingRedirect(null)); + }; + + const doRedirectToSignInWithGoogle = () => { + setPendingRedirect("google"); + redirectToSignInWithGoogle().catch(() => setPendingRedirect(null)); + }; + + const handleEmailButtonClick = () => { + setPendingRedirect("password"); + if (type === "signup") { + redirectToRegistrationWithPassword().catch(() => setPendingRedirect(null)); + } else { + redirectToSignInWithPassword().catch(() => setPendingRedirect(null)); + } + }; + return ( <> {isLoading && ( @@ -119,12 +158,32 @@ export const OAuthLogin: React.FC = ({ loginWithOAuth }) => { {!isLoading && ( <> (keycloakSocialLoginsEnabled ? redirectToSignInWithGoogle() : login("google"))} + onClick={() => (keycloakAuthEnabled ? doRedirectToSignInWithGoogle() : login("google"))} + pendingRedirect={pendingRedirect} /> (keycloakSocialLoginsEnabled ? redirectToSignInWithGithub() : login("github"))} + onClick={() => (keycloakAuthEnabled ? doRedirectToSignInWithGithub() : login("github"))} + pendingRedirect={pendingRedirect} /> + + {keycloakAuthEnabled && + (type === "login" ? ( + + {pendingRedirect === "password" ? : } + + + ) : ( + + ))} )} {errorMessage &&
    {errorMessage}
    } diff --git a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx index 1fd21ed73db..2300d49285c 100644 --- a/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/auth/SignupPage/SignupPage.tsx @@ -1,6 +1,7 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { useSearchParams } from "react-router-dom"; +import useLocalStorage from "react-use/lib/useLocalStorage"; import { HeadTitle } from "components/common/HeadTitle"; import { Button } from "components/ui/Button"; @@ -10,6 +11,7 @@ import { Icon } from "components/ui/Icon"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; +import { useExperiment } from "hooks/services/Experiment"; import { SignupForm } from "./components/SignupForm"; import styles from "./SignupPage.module.scss"; @@ -20,6 +22,7 @@ import { OAuthLogin } from "../OAuthLogin"; interface SignupPageProps { highlightStyle?: React.CSSProperties; } + const Detail: React.FC> = ({ children }) => { return ( @@ -30,6 +33,9 @@ const Detail: React.FC> = ({ children }) => { }; const SignupPage: React.FC = () => { + const [keycloakAuthEnabledLocalStorage] = useLocalStorage("airbyte_keycloak-auth-ui", true); + const keycloakAuthEnabledExperiment = useExperiment("authPage.keycloak", true); + const keycloakAuthEnabled = keycloakAuthEnabledExperiment || keycloakAuthEnabledLocalStorage; const { loginWithOAuth, signUp } = useAuthService(); useTrackPage(PageTrackingCodes.SIGNUP); @@ -65,16 +71,18 @@ const SignupPage: React.FC = () => { {searchParams.get("method") === "email" ? ( <> {signUp && } - ) : ( <> - {loginWithOAuth && } - + {loginWithOAuth && } + {!keycloakAuthEnabled && ( + + )} )} diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx index 9bdf2b57a6c..64dca5234c9 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx @@ -5,29 +5,24 @@ import { ExternalLink, Link } from "components/ui/Link"; import { Message } from "components/ui/Message"; import { useCurrentWorkspace } from "core/api"; -import { useGetCloudWorkspace } from "core/api/cloud"; +import { useGetCloudWorkspace, useResendEmailVerification } from "core/api/cloud"; import { CloudWorkspaceReadCreditStatus, CloudWorkspaceReadWorkspaceTrialStatus } from "core/api/types/CloudApi"; -import { AuthSendEmailVerification, useAuthService } from "core/services/auth"; +import { useAuthService } from "core/services/auth"; import { links } from "core/utils/links"; import { useExperiment } from "hooks/services/Experiment"; const LOW_BALANCE_CREDIT_THRESHOLD = 20; -interface EmailVerificationHintProps { - sendEmailVerification: AuthSendEmailVerification; -} - -export const EmailVerificationHint: React.FC = ({ sendEmailVerification }) => { - const onResendVerificationMail = async () => { - return sendEmailVerification(); - }; +export const EmailVerificationHint: React.FC = () => { + const { mutateAsync: resendEmailVerification, isLoading } = useResendEmailVerification(); return ( } actionBtnText={} - onAction={onResendVerificationMail} + actionBtnProps={{ isLoading }} + onAction={resendEmailVerification} /> ); }; @@ -99,15 +94,13 @@ const LowCreditBalanceHint: React.FC = () => { }; export const BillingBanners: React.FC = () => { - const { sendEmailVerification, emailVerified } = useAuthService(); + const { emailVerified } = useAuthService(); const isAutoRechargeEnabled = useExperiment("billing.autoRecharge", false); return ( - {!emailVerified && sendEmailVerification && ( - - )} + {!emailVerified && } {isAutoRechargeEnabled ? : } ); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx index 93158b8d74d..6ba4b7faad4 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx @@ -8,7 +8,7 @@ import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmission import { Box } from "components/ui/Box"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; -import { Link } from "components/ui/Link"; +import { ExternalLink, Link } from "components/ui/Link"; import { Message } from "components/ui/Message"; import { ModalBody, ModalFooter } from "components/ui/Modal"; import { Text } from "components/ui/Text"; @@ -195,6 +195,14 @@ export const CheckoutCreditsModal: React.FC> = ({ onCanc + + + {children} }} + /> + + diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx index a12a32f2d77..3394be8ad92 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx @@ -1,5 +1,3 @@ -import classNames from "classnames"; - import { ConnectorIcon } from "components/common/ConnectorIcon"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { SupportLevelBadge } from "components/ui/SupportLevelBadge"; @@ -10,24 +8,16 @@ import { AvailableDestination, AvailableSource } from "./CreditsUsageContext"; interface ConnectorOptionLabelProps { connector: AvailableSource | AvailableDestination; - disabled?: boolean; } -export const ConnectorOptionLabel: React.FC = ({ connector, disabled }) => { - return ( - - - - {connector.name} - - - - - - ); -}; +export const ConnectorOptionLabel: React.FC = ({ connector }) => ( + + + + {connector.name} + + + + + +); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx index f49b04b2207..a946b6a5ce4 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx @@ -1,9 +1,9 @@ import dayjs from "dayjs"; -import { Dispatch, SetStateAction, createContext, useContext, useMemo, useState } from "react"; +import { createContext, useContext, useMemo, useState } from "react"; import { Option } from "components/ui/ListBox"; -import { useCurrentWorkspace } from "core/api"; +import { useCurrentWorkspace, useFilters } from "core/api"; import { useGetCloudWorkspaceUsage } from "core/api/cloud"; import { DestinationId, SourceId, SupportLevel } from "core/api/types/AirbyteClient"; import { ConsumptionTimeWindow } from "core/api/types/CloudApi"; @@ -42,10 +42,10 @@ interface CreditsUsageContext { destinationOptions: Array>; selectedSource: SourceId | null; selectedDestination: DestinationId | null; - setSelectedSource: Dispatch>; - setSelectedDestination: Dispatch>; + setSelectedSource: (sourceId: SourceId | null) => void; + setSelectedDestination: (destinationId: DestinationId | null) => void; selectedTimeWindow: ConsumptionTimeWindow; - setSelectedTimeWindow: Dispatch>; + setSelectedTimeWindow: (timeWindow: ConsumptionTimeWindow) => void; hasFreeUsage: boolean; } @@ -59,12 +59,24 @@ export const useCreditsContext = (): CreditsUsageContext => { return creditsUsageHelpers; }; +interface FilterValues { + selectedTimeWindow: ConsumptionTimeWindow; + selectedSource: SourceId | null; + selectedDestination: DestinationId | null; +} + export const CreditsUsageContextProvider: React.FC> = ({ children }) => { - const [selectedTimeWindow, setSelectedTimeWindow] = useState(ConsumptionTimeWindow.lastMonth); + const [filters, setFilterValue] = useFilters({ + selectedTimeWindow: ConsumptionTimeWindow.lastMonth, + selectedSource: null, + selectedDestination: null, + }); + const { selectedTimeWindow, selectedSource, selectedDestination } = filters; + const [hasFreeUsage, setHasFreeUsage] = useState(false); const { workspaceId } = useCurrentWorkspace(); - const data = useGetCloudWorkspaceUsage(workspaceId, selectedTimeWindow); + const data = useGetCloudWorkspaceUsage(workspaceId, filters.selectedTimeWindow); const { consumptionPerConnectionPerTimeframe, timeWindow } = data; @@ -82,8 +94,6 @@ export const CreditsUsageContextProvider: React.FC(null); - const [selectedDestination, setSelectedDestination] = useState(null); const availableSourcesAndDestinations = useMemo( () => calculateAvailableSourcesAndDestinations(rawConsumptionData), [rawConsumptionData] @@ -135,11 +145,13 @@ export const CreditsUsageContextProvider: React.FC setFilterValue("selectedSource", selectedSource), selectedDestination, - setSelectedDestination, + setSelectedDestination: (selectedDestination: DestinationId | null) => + setFilterValue("selectedDestination", selectedDestination), selectedTimeWindow, - setSelectedTimeWindow, + setSelectedTimeWindow: (selectedTimeWindow: ConsumptionTimeWindow) => + setFilterValue("selectedTimeWindow", selectedTimeWindow), hasFreeUsage, }} > diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx index c625dd0c186..90b5360b4c0 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx @@ -6,7 +6,6 @@ import { useEffectOnce } from "react-use"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { ExternalLink } from "components/ui/Link"; import { Text } from "components/ui/Text"; @@ -117,7 +116,7 @@ export const RemainingCredits: React.FC = () => { size="xs" onClick={showCreditsModal} isLoading={isWaitingForCredits} - icon={} + icon="plus" > diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx index 1a5e81a8b9c..7ba246ecc36 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx @@ -21,23 +21,22 @@ export const generateArrayForTimeWindow = (timeWindow?: ConsumptionTimeWindow) = const usagePerTimeChunk: UsagePerTimeChunk = []; // base case: lastMonth, which returns past 30 days of usage - let start = dayjs().subtract(29, "day"); + const end = dayjs(); + let start = end.subtract(29, "day"); let aggregation: ManipulateType = "day"; let formatterString = "MMM DD"; if (timeWindow === "lastSixMonths") { aggregation = "week"; formatterString = "MMM DD"; - start = dayjs().subtract(6, "month").startOf(aggregation); + start = end.subtract(6, "month").startOf(aggregation); } else if (timeWindow === "lastYear") { aggregation = "month"; formatterString = "MMM 'YY"; - start = dayjs().subtract(1, "year").startOf(aggregation); + start = end.subtract(1, "year").startOf(aggregation); } - const end = dayjs(); - - for (let current = start; !current.isAfter(end); current = current.add(1, aggregation)) { + for (let current = start; !current.isAfter(end.endOf(aggregation)); current = current.add(1, aggregation)) { usagePerTimeChunk.push({ timeChunkLabel: current.format(formatterString), billedCost: 0, diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudHelpDropdown.tsx b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudHelpDropdown.tsx index 5a44db7e4e4..4a056e20374 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudHelpDropdown.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudHelpDropdown.tsx @@ -53,7 +53,7 @@ export const CloudHelpDropdown: React.FC = () => { ]} onChange={handleChatUs} label={} - icon={} + icon="question" /> ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx index 38c27761cdf..7aa480f4205 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx @@ -7,58 +7,28 @@ import { FlexContainer } from "components/ui/Flex"; import { useCurrentWorkspace } from "core/api"; import { useGetCloudWorkspaceAsync, useListCloudWorkspacesInfinite } from "core/api/cloud"; -import { CloudWorkspaceReadWorkspaceTrialStatus as WorkspaceTrialStatus } from "core/api/types/CloudApi"; -import { useAuthService } from "core/services/auth"; -import { isCorporateEmail } from "core/utils/freeEmailProviders"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; -import { useExperimentSpeedyConnection } from "packages/cloud/components/experiments/SpeedyConnection/hooks/useExperimentSpeedyConnection"; -import { SpeedyConnectionBanner } from "packages/cloud/components/experiments/SpeedyConnection/SpeedyConnectionBanner"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; +import { DefaultErrorBoundary } from "core/errors"; import { SideBar } from "views/layout/SideBar/SideBar"; import { CloudHelpDropdown } from "./CloudHelpDropdown"; import styles from "./CloudMainView.module.scss"; -import { InsufficientPermissionsErrorBoundary } from "./InsufficientPermissionsErrorBoundary"; import { WorkspaceStatusBanner } from "./WorkspaceStatusBanner"; const CloudMainView: React.FC = (props) => { const workspace = useCurrentWorkspace(); const cloudWorkspace = useGetCloudWorkspaceAsync(workspace.workspaceId); - const { trackError } = useAppMonitoringService(); - - // exp-speedy-connection - const { isExperimentVariant } = useExperimentSpeedyConnection(); - - const { user } = useAuthService(); - - const isTrial = - cloudWorkspace?.workspaceTrialStatus === WorkspaceTrialStatus.in_trial || - cloudWorkspace?.workspaceTrialStatus === WorkspaceTrialStatus.pre_trial; - - const showExperimentBanner = isExperimentVariant && isTrial && user && isCorporateEmail(user.email); - return ( - } trackError={trackError}> -
    - {cloudWorkspace && - (showExperimentBanner ? ( - - ) : ( - - ))} + {cloudWorkspace && } + + } /> +
    + + }>{props.children ?? } +
    - - } /> -
    - } trackError={trackError}> - }>{props.children ?? } - -
    -
    - +
    ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/InsufficientPermissionsErrorBoundary.tsx b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/InsufficientPermissionsErrorBoundary.tsx deleted file mode 100644 index b81c25c1577..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/InsufficientPermissionsErrorBoundary.tsx +++ /dev/null @@ -1,50 +0,0 @@ -import React from "react"; - -import { CommonRequestError } from "core/api"; -import { TrackErrorFn } from "hooks/services/AppMonitoringService"; - -interface BoundaryState { - hasError: boolean; - message?: React.ReactNode | null; -} - -const initialState: BoundaryState = { - hasError: false, - message: null, -}; - -interface InsufficientPermissionsErrorBoundaryProps { - errorComponent: React.ReactElement; - trackError: TrackErrorFn; -} - -export class InsufficientPermissionsErrorBoundary extends React.Component< - React.PropsWithChildren, - BoundaryState -> { - static getDerivedStateFromError(error: CommonRequestError): BoundaryState { - if (error.message.startsWith("Insufficient permissions")) { - return { hasError: true, message: error.message }; - } - throw error; - } - - componentDidCatch(error: Error): void { - this.props.trackError(error, { errorBoundary: this.constructor.name }); - } - - state = initialState; - - reset = (): void => { - this.setState(initialState); - }; - - render(): React.ReactNode { - return this.state.hasError - ? React.cloneElement(this.props.errorComponent, { - message: this.state.message, - onReset: this.reset, - }) - : this.props.children; - } -} diff --git a/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx index 3322ade2616..a0f20c82f8c 100644 --- a/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx @@ -1,19 +1,16 @@ import React, { Suspense } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; +import { useIntl } from "react-intl"; import { Outlet } from "react-router-dom"; -import { LoadingPage, MainPageWithScroll } from "components"; -import { HeadTitle } from "components/common/HeadTitle"; +import { LoadingPage } from "components"; + +import { SettingsLayout, SettingsLayoutContent } from "area/settings/components/SettingsLayout"; import { SettingsButton, SettingsLink, SettingsNavigation, SettingsNavigationBlock, -} from "components/settings/SettingsNavigation"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Heading } from "components/ui/Heading"; -import { PageHeader } from "components/ui/PageHeader"; - +} from "area/settings/components/SettingsNavigation"; import { useCurrentOrganizationInfo } from "core/api"; import { FeatureItem, useFeature } from "core/services/features"; import { isOsanoActive, showOsanoDrawer } from "core/utils/dataPrivacy"; @@ -32,101 +29,88 @@ export const CloudSettingsPage: React.FC = () => { const showAdvancedSettings = useExperiment("settings.showAdvancedSettings", false); return ( - } - pageTitle={ - - - - } - /> - } - > - - - + + + + + {isTokenManagementEnabled && ( - {isTokenManagementEnabled && ( - - )} - {isOsanoActive() && ( - showOsanoDrawer()} - name={formatMessage({ id: "settings.cookiePreferences" })} - /> - )} - {showAdvancedSettings && ( - - )} - - + )} + {isOsanoActive() && ( + showOsanoDrawer()} + name={formatMessage({ id: "settings.cookiePreferences" })} + /> + )} + {showAdvancedSettings && ( - {supportsDataResidency && ( - - )} + )} + + + + {supportsDataResidency && ( + )} + + + {supportsCloudDbtIntegration && ( - {supportsCloudDbtIntegration && ( - - )} + )} + + + {organization && canViewOrgSettings && ( + - {organization && canViewOrgSettings && ( - - - - )} - - - }> - - - - - + )} + + + }> + + + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx index 0ebaa5bc011..891268aa45e 100644 --- a/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx @@ -5,8 +5,8 @@ import * as yup from "yup"; import { Form, FormControl } from "components/forms"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; import { Button } from "components/ui/Button"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { ExternalLink } from "components/ui/Link"; import { Text } from "components/ui/Text"; @@ -61,47 +61,46 @@ export const DbtCloudSettingsView: React.FC = () => { }; return ( - - - - {node}, - }} - /> - - - defaultValues={{ serviceToken: "" }} - onSubmit={onSubmit} - onSuccess={onSuccess} - onError={onError} - schema={ServiceTokenFormSchema} - disabled={!canUpdateWorkspace} - > - - {hasExistingToken ? ( - - - - ) : ( - - )} - - - + + {formatMessage({ id: "settings.integrationSettings.dbtCloudSettings" })} + + {node}, + }} + /> + + + defaultValues={{ serviceToken: "" }} + onSubmit={onSubmit} + onSuccess={onSuccess} + onError={onError} + schema={ServiceTokenFormSchema} + disabled={!canUpdateWorkspace} + > + + {hasExistingToken ? ( + + + + ) : ( + + )} + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/AccountSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/AccountSettingsView.tsx index cae4614a1b1..a28c88bac48 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/AccountSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/AccountSettingsView.tsx @@ -1,24 +1,28 @@ import React from "react"; import { FlexContainer } from "components/ui/Flex"; +import { Separator } from "components/ui/Separator"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; import { EmailSection, NameSection, PasswordSection } from "./components"; -import { LogoutSection } from "./components/LogoutSection"; export const AccountSettingsView: React.FC = () => { - const { logout, updateName, hasPasswordLogin, updatePassword } = useAuthService(); + const { updateName, hasPasswordLogin, updatePassword } = useAuthService(); useTrackPage(PageTrackingCodes.SETTINGS_ACCOUNT); return ( - - {updateName && } + - {hasPasswordLogin?.() && updatePassword && } - {logout && } + {updateName && } + {hasPasswordLogin?.() && updatePassword && ( + <> + + + + )} ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.module.scss b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.module.scss new file mode 100644 index 00000000000..f12608b1b93 --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.module.scss @@ -0,0 +1,3 @@ +.emailControl { + padding-bottom: 0; +} diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.tsx b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.tsx index 409d10ff94c..c3dd33a3e7c 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/EmailSection.tsx @@ -3,10 +3,11 @@ import { useIntl } from "react-intl"; import * as yup from "yup"; import { Form, FormControl } from "components/forms"; -import { Card } from "components/ui/Card"; import { useCurrentUser } from "core/services/auth"; +import styles from "./EmailSection.module.scss"; + const emailFormSchema = yup.object({ email: yup.string().required("form.empty.error"), }); @@ -20,27 +21,26 @@ export const EmailSection: React.FC = () => { const user = useCurrentUser(); return ( - - - defaultValues={{ - email: user.email, - }} - schema={emailFormSchema} - > - - name="email" - fieldType="input" - type="text" - label={formatMessage({ id: "settings.accountSettings.email" })} - placeholder={formatMessage({ - id: "login.yourEmail.placeholder", - })} - /* + + defaultValues={{ + email: user.email, + }} + schema={emailFormSchema} + > + + containerControlClassName={styles.emailControl} + name="email" + fieldType="input" + type="text" + label={formatMessage({ id: "settings.accountSettings.email" })} + placeholder={formatMessage({ + id: "login.yourEmail.placeholder", + })} + /* show user's email in read-only mode, details: https://github.com/airbytehq/airbyte-platform-internal/issues/1269 */ - disabled - /> - - + disabled + /> + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/LogoutSection.tsx b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/LogoutSection.tsx deleted file mode 100644 index a8ede60ab20..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/LogoutSection.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import { useMutation } from "@tanstack/react-query"; -import { FormattedMessage } from "react-intl"; - -import { Box } from "components/ui/Box"; -import { Button } from "components/ui/Button"; -import { FlexContainer } from "components/ui/Flex"; - -import { AuthLogout } from "core/services/auth"; - -export const LogoutSection = ({ logout }: { logout: AuthLogout }) => { - const { mutateAsync: doLogout, isLoading: isLoggingOut } = useMutation(() => logout()); - - return ( - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/NameSection.tsx b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/NameSection.tsx index 1ad9d220726..1deaef54d69 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/NameSection.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/NameSection.tsx @@ -4,7 +4,6 @@ import * as yup from "yup"; import { Form, FormControl } from "components/forms"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; -import { Card } from "components/ui/Card"; import { AuthChangeName, useCurrentUser } from "core/services/auth"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; @@ -46,24 +45,22 @@ export const NameSection: React.FC = ({ updateName }) => { }; return ( - - - onSubmit={({ name }) => updateName(name)} - onError={onError} - onSuccess={onSuccess} - schema={nameFormSchema} - defaultValues={{ name: user.name }} - > - - label={formatMessage({ id: "settings.accountSettings.name" })} - fieldType="input" - name="name" - placeholder={formatMessage({ - id: "settings.accountSettings.name.placeholder", - })} - /> - - - + + onSubmit={({ name }) => updateName(name)} + onError={onError} + onSuccess={onSuccess} + schema={nameFormSchema} + defaultValues={{ name: user.name }} + > + + label={formatMessage({ id: "settings.accountSettings.name" })} + fieldType="input" + name="name" + placeholder={formatMessage({ + id: "settings.accountSettings.name.placeholder", + })} + /> + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/PasswordSection.tsx b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/PasswordSection.tsx index 46b34c08be1..37c60b2fb6a 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/PasswordSection.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/AccountSettingsView/components/PasswordSection.tsx @@ -5,7 +5,6 @@ import * as yup from "yup"; import { Form, FormControl } from "components/forms"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; -import { Card } from "components/ui/Card"; import { AuthUpdatePassword, useCurrentUser } from "core/services/auth"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; @@ -98,39 +97,37 @@ export const PasswordSection: React.FC = ({ updatePassword }; return ( - - - defaultValues={defaultFormValues} - onSubmit={onSubmit} - onSuccess={onSuccess} - onError={onError} - schema={passwordFormSchema} - > - - label={formatMessage({ id: "settings.accountSettings.currentPassword" })} - name="currentPassword" - type="password" - fieldType="input" - required - autoComplete="current-password" - /> - - label={formatMessage({ id: "settings.accountSettings.newPassword" })} - name="newPassword" - type="password" - fieldType="input" - required - autoComplete="new-password" - /> - - label={formatMessage({ id: "settings.accountSettings.newPasswordConfirmation" })} - name="passwordConfirmation" - type="password" - fieldType="input" - required - /> - - - + + defaultValues={defaultFormValues} + onSubmit={onSubmit} + onSuccess={onSuccess} + onError={onError} + schema={passwordFormSchema} + > + + label={formatMessage({ id: "settings.accountSettings.currentPassword" })} + name="currentPassword" + type="password" + fieldType="input" + required + autoComplete="current-password" + /> + + label={formatMessage({ id: "settings.accountSettings.newPassword" })} + name="newPassword" + type="password" + fieldType="input" + required + autoComplete="new-password" + /> + + label={formatMessage({ id: "settings.accountSettings.newPasswordConfirmation" })} + name="passwordConfirmation" + type="password" + fieldType="input" + required + /> + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx index 62f7ad6bc81..69e5c5536f3 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx @@ -4,7 +4,6 @@ import { useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { Table } from "components/ui/Table"; @@ -85,10 +84,10 @@ export const ApplicationSettingsView = () => { }, [columnHelper]); return ( - + <> - + @@ -101,7 +100,7 @@ export const ApplicationSettingsView = () => { {applications.length ? ( - +
    ) : ( @@ -110,6 +109,6 @@ export const ApplicationSettingsView = () => { )} - + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/CreateApplicationControl.tsx b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/CreateApplicationControl.tsx index c568edf20bc..05d3e6ba029 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/CreateApplicationControl.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/CreateApplicationControl.tsx @@ -5,7 +5,6 @@ import { Form, FormControl } from "components/forms"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { ModalFooter } from "components/ui/Modal"; import { Tooltip } from "components/ui/Tooltip"; @@ -17,22 +16,24 @@ export const CreateApplicationControl = () => { const { formatMessage } = useIntl(); const { mutateAsync: createApplication } = useCreateApplication(); const { applications } = useListApplications(); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const schema = yup.object().shape({ name: yup.string().required("form.empty.error"), }); - const onCreateApplicationSubmission = async (values: ApplicationCreate) => { - await createApplication(values); - closeModal(); - }; - - const onAddApplicationButtonClick = async () => { - openModal({ + const onAddApplicationButtonClick = () => + openModal({ title: formatMessage({ id: "settings.application.create" }), - content: () => ( - schema={schema} defaultValues={{ name: "" }} onSubmit={onCreateApplicationSubmission}> + content: ({ onComplete, onCancel }) => ( + + schema={schema} + defaultValues={{ name: "" }} + onSubmit={async (values: ApplicationCreate) => { + await createApplication(values); + onComplete(); + }} + > { /> - + ), size: "md", }); - }; return ( <> {applications.length === 2 ? ( } onClick={onAddApplicationButtonClick} variant="primary" disabled> + } @@ -63,12 +63,7 @@ export const CreateApplicationControl = () => { ) : ( - )} diff --git a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/DeleteApplicationControl.tsx b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/DeleteApplicationControl.tsx index d5bc5497279..dbe62276d04 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/DeleteApplicationControl.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/DeleteApplicationControl.tsx @@ -2,12 +2,12 @@ import { useCallback } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { useDeleteApplication } from "core/api"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import styles from "./ActionButton.module.scss"; + export const DeleteApplicationControl: React.FC<{ applicationId: string; applicationName: string }> = ({ applicationId, applicationName, @@ -30,12 +30,7 @@ export const DeleteApplicationControl: React.FC<{ applicationId: string; applica }, [openConfirmationModal, formatMessage, applicationName, deleteApplication, applicationId, closeConfirmationModal]); return ( - ); diff --git a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/TokenModal.tsx b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/TokenModal.tsx index 24645da8dea..e347ac0ccce 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/TokenModal.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/TokenModal.tsx @@ -3,11 +3,10 @@ import { FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { CopyButton } from "components/ui/CopyButton"; -import { Icon } from "components/ui/Icon"; import { Message } from "components/ui/Message"; import { ModalBody, ModalFooter } from "components/ui/Modal"; -import { FILE_TYPE_DOWNLOAD, downloadFile, fileizeString } from "core/utils/file"; +import { downloadFile, FILE_TYPE_DOWNLOAD, fileizeString } from "core/utils/file"; import styles from "./TokenModal.module.scss"; @@ -35,7 +34,7 @@ export const TokenModalBody: React.FC<{ token: string }> = ({ token }) => { - diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx index da814435061..866d02673ae 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersHint/InviteUsersHint.tsx @@ -11,13 +11,14 @@ import { useIntent } from "core/utils/rbac"; import { useModalService } from "hooks/services/Modal"; import styles from "./InviteUsersHint.module.scss"; -import { InviteUsersModal } from "../InviteUsersModal"; +import { AddUserModal } from "../../workspaces/WorkspaceSettingsView/components/AddUserModal"; export interface InviteUsersHintProps { connectorType: "source" | "destination"; } export const InviteUsersHint: React.FC = ({ connectorType }) => { + const workspace = useCurrentWorkspace(); const { formatMessage } = useIntl(); const inviteUsersHintVisible = useFeature(FeatureItem.ShowInviteUsersHint); const { workspaceId } = useCurrentWorkspace(); @@ -29,9 +30,9 @@ export const InviteUsersHint: React.FC = ({ connectorType } const onOpenInviteUsersModal = () => - openModal({ - title: formatMessage({ id: "modals.addUser.title" }), - content: () => , + openModal({ + title: formatMessage({ id: "userInvitations.create.modal.title" }, { workspace: workspace.name }), + content: ({ onComplete }) => , size: "md", }); diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx deleted file mode 100644 index 6424842aff1..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/EmailFormControlList.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import React from "react"; -import { useFieldArray, useFormState } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; - -import { FormControl } from "components/forms"; -import { Button } from "components/ui/Button"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; - -import { InviteUsersFormValues } from "./InviteUsersModal"; - -export const EmailFormControlList: React.FC = () => { - const { isValid, isDirty } = useFormState(); - const { fields, remove, append } = useFieldArray({ - name: "users", - }); - - const appendNewRow = () => - append({ - email: "", - role: "admin", // the only role we currently have - }); - - return ( - <> - {fields.map((field, index) => ( - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx deleted file mode 100644 index ba65d36f16d..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/InviteUsersModal.tsx +++ /dev/null @@ -1,101 +0,0 @@ -import React from "react"; -import { useIntl } from "react-intl"; -import * as yup from "yup"; -import { SchemaOf } from "yup"; - -import { Form } from "components/forms"; -import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmissionButtons"; -import { FlexContainer } from "components/ui/Flex"; -import { ModalBody, ModalFooter } from "components/ui/Modal"; - -import { useUserHook } from "core/api/cloud"; -import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { trackError } from "core/utils/datadog"; -import { useModalService } from "hooks/services/Modal"; -import { useNotificationService } from "hooks/services/Notification"; -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; - -import { EmailFormControlList } from "./EmailFormControlList"; - -export interface InviteUsersFormValues { - users: Array<{ - role: string; - email: string; - }>; -} - -const requestConnectorValidationSchema: SchemaOf = yup.object({ - users: yup.array().of( - yup.object().shape({ - role: yup.string().required("form.empty.error"), - email: yup.string().required("form.empty.error").email("form.email.error"), - }) - ), -}); - -export const InviteUsersModal: React.FC<{ - invitedFrom: "source" | "destination" | "user.settings"; -}> = ({ invitedFrom }) => { - const { formatMessage } = useIntl(); - const { workspaceId } = useCurrentWorkspace(); - const { inviteUserLogic } = useUserHook(); - const { mutateAsync: invite } = inviteUserLogic; - const { closeModal } = useModalService(); - - const { registerNotification } = useNotificationService(); - const analyticsService = useAnalyticsService(); - - const onSubmit = async (values: InviteUsersFormValues) => { - await invite({ users: values.users, workspaceId }); - - analyticsService.track(Namespace.USER, Action.INVITE, { - invited_from: invitedFrom, - }); - }; - - const onSuccess = () => { - registerNotification({ - id: "invite-users-success", - text: formatMessage({ id: "inviteUsers.invitationsSentSuccess" }), - type: "success", - }); - closeModal(); - }; - - const onError = (e: Error, { users }: InviteUsersFormValues) => { - trackError(e, { users }); - registerNotification({ - id: "invite-users-error", - text: formatMessage({ id: "inviteUsers.invitationsSentError" }), - type: "error", - }); - }; - - const formDefaultValues = { - users: [ - { - email: "", - role: "admin", // the only role we have for now - }, - ], - }; - - return ( - - schema={requestConnectorValidationSchema} - defaultValues={formDefaultValues} - onSubmit={onSubmit} - onSuccess={onSuccess} - onError={onError} - > - - - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx b/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx deleted file mode 100644 index 1e5b24a28a9..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/users/InviteUsersModal/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export { InviteUsersModal } from "./InviteUsersModal"; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/DataResidencyView/DataResidencyView.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/DataResidencyView/DataResidencyView.tsx index be6cb52943a..49a0f1963ac 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/DataResidencyView/DataResidencyView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/DataResidencyView/DataResidencyView.tsx @@ -5,8 +5,8 @@ import * as yup from "yup"; import { Form } from "components/forms"; import { DataResidencyDropdown } from "components/forms/DataResidencyDropdown"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { ExternalLink } from "components/ui/Link"; import { Text } from "components/ui/Text"; @@ -27,16 +27,6 @@ interface DefaultDataResidencyFormValues { defaultGeography?: Geography; } -const fieldDescription = ( - {node}, - request: (node: React.ReactNode) => {node}, - }} - /> -); - export const DataResidencyView: React.FC = () => { const workspace = useCurrentWorkspace(); const { mutateAsync: updateWorkspace } = useUpdateWorkspace(); @@ -70,35 +60,33 @@ export const DataResidencyView: React.FC = () => { }; return ( - - - - {node}, - }} - /> - - - defaultValues={{ - defaultGeography: workspace.defaultGeography, + + {formatMessage({ id: "settings.defaultDataResidency" })} + + {node}, + request: (node: React.ReactNode) => {node}, }} - schema={schema} - onSubmit={handleSubmit} - onSuccess={onSuccess} - onError={onError} - disabled={!canUpdateWorkspace} - > - - labelId="settings.defaultGeography" - description={fieldDescription} - name="defaultGeography" - inline - /> - - - - + /> + + + defaultValues={{ + defaultGeography: workspace.defaultGeography, + }} + schema={schema} + onSubmit={handleSubmit} + onSuccess={onSuccess} + onError={onError} + disabled={!canUpdateWorkspace} + > + + labelId="settings.defaultGeography" + name="defaultGeography" + /> + + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx index ff55d4938d8..a8662cfa5d2 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/WorkspaceSettingsView.tsx @@ -1,10 +1,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; -import { Box } from "components/ui/Box"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; +import { Separator } from "components/ui/Separator"; import { useCurrentWorkspace } from "core/api"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; @@ -22,32 +21,31 @@ export const WorkspaceSettingsView: React.FC = () => { const isAccessManagementEnabled = useFeature(FeatureItem.RBAC); return ( - - - - - - - - + + + + + {isAccessManagementEnabled && ( - + <> + - + )} {canDeleteWorkspace && ( - + <> + - + - + )} ); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal.tsx index 3f31f15f317..7ede4bfbeaa 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal.tsx @@ -1,8 +1,9 @@ import { useDeferredValue, useMemo, useState } from "react"; import { useFormState } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; -import { SchemaOf } from "yup"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useLocation } from "react-router-dom"; import * as yup from "yup"; +import { SchemaOf } from "yup"; import { Form } from "components/forms"; import { Box } from "components/ui/Box"; @@ -18,7 +19,9 @@ import { useListWorkspaceAccessUsers, } from "core/api"; import { PermissionType, WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; +import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { FeatureItem, useFeature } from "core/services/features"; +import { useIntent } from "core/utils/rbac"; import { AddUserModalBody } from "./AddUserModalBody"; @@ -42,16 +45,31 @@ const SubmissionButton: React.FC = () => { ); }; -export const AddUserModal: React.FC<{ closeModal: () => void }> = ({ closeModal }) => { +export const AddUserModal: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => { + const { formatMessage } = useIntl(); const workspaceId = useCurrentWorkspaceId(); const organizationInfo = useCurrentOrganizationInfo(); - const { users } = useListUsersInOrganization(organizationInfo?.organizationId); + const canListUsersInOrganization = useIntent("ListOrganizationMembers", { + organizationId: organizationInfo?.organizationId, + }); + const { users } = useListUsersInOrganization( + canListUsersInOrganization ? organizationInfo?.organizationId : undefined + ); const [searchValue, setSearchValue] = useState(""); const deferredSearchValue = useDeferredValue(searchValue); const [selectedRow, setSelectedRow] = useState(null); const { mutateAsync: createInvitation } = useCreateUserInvitation(); const { usersWithAccess } = useListWorkspaceAccessUsers(workspaceId); const canInviteExternalUsers = useFeature(FeatureItem.ExternalInvitations); + const analyticsService = useAnalyticsService(); + const location = useLocation(); + const invitedFrom = useMemo(() => { + return location.pathname.includes("source") + ? "source" + : location.pathname.includes("destination") + ? "destination" + : "user.settings"; + }, [location.pathname]); const isValidEmail = useMemo(() => { // yup considers an empty string a valid email address so we need to check both @@ -65,7 +83,12 @@ export const AddUserModal: React.FC<{ closeModal: () => void }> = ({ closeModal scopeType: "workspace", scopeId: workspaceId, }); - closeModal(); + + analyticsService.track(Namespace.USER, Action.INVITE, { + invited_from: invitedFrom, + }); + + onSubmit(); }; /* Before the user begins typing an email address, the list of users should only be users @@ -74,7 +97,7 @@ export const AddUserModal: React.FC<{ closeModal: () => void }> = ({ closeModal When they begin typing, we filter a list that is a superset of workspaceAccessUsers + organization users. We want to prefer the workspaceAccessUsers object for a given user (if present) because it contains all relevant permissions for the user. - Then, we enrich that from the list of organization_member who don't have a permission to this workspace. + Then, we enrich that from the list of organization_members who don't have a permission to this workspace. */ const userMap = new Map(); @@ -95,9 +118,10 @@ export const AddUserModal: React.FC<{ closeModal: () => void }> = ({ closeModal }); users.forEach((user) => { - // the first check here is important only for the "empty search value" case, where we want to show all users who don't have a workspace permission - // for other cases, it is at worst slightly redundant - if (user.permissionType === "organization_member" && !userMap.has(user.userId)) { + if ( + user.permissionType === "organization_member" && // they are an organization_member + !usersWithAccess.some((u) => u.userId === user.userId) // they don't have a workspace permission (they may not be listed) + ) { userMap.set(user.userId, { userId: user.userId, userName: user.name, @@ -132,7 +156,11 @@ export const AddUserModal: React.FC<{ closeModal: () => void }> = ({ closeModal onSubmit={onInviteSubmit} > - setSearchValue(e.target.value)} /> + setSearchValue(e.target.value)} + placeholder={formatMessage({ id: "userInvitations.create.modal.search" })} + /> = ({ // handle when the selected option is no longer visible useEffect(() => { - // user had selected to invite a new user, then changed the search value so that option is no longer valid, clear form value - if (selectedRow === "inviteNewUser" && !showInviteNewUser) { - setSelectedRow(null); - setValue("email", "", { shouldValidate: true }); - } + const resetPredicates = [ + // user had selected to invite a new user, then changed the search value so that option is no longer valid + selectedRow === "inviteNewUser" && !showInviteNewUser, - // user had selected to invite a new user, then changed the search value to another valid option, clear form value and deselect - if (selectedRow === "inviteNewUser" && deferredSearchValue !== getValues("email")) { - setSelectedRow(null); - setValue("email", "", { shouldValidate: true }); - } + // user had selected to invite a new user, then changed the search value to another valid email + selectedRow === "inviteNewUser" && deferredSearchValue !== getValues("email"), + + // user had selected a user and that user is no longer visible + selectedRow && selectedRow !== "inviteNewUser" && !usersToList.find((user) => user.userId === selectedRow), + ]; - // user had selected a user and that user is no longer visible, clear it - if (selectedRow && selectedRow !== "inviteNewUser" && !usersToList.find((user) => user.userId === selectedRow)) { + if (resetPredicates.some(Boolean)) { setSelectedRow(null); setValue("email", "", { shouldValidate: true }); + setValue("permission", PermissionType.workspace_admin, { shouldValidate: true }); } }, [usersToList, showInviteNewUser, selectedRow, setSelectedRow, setValue, deferredSearchValue, getValues]); @@ -81,10 +80,7 @@ export const AddUserModalBody: React.FC = ({ email={user.userEmail} selectedRow={selectedRow} setSelectedRow={setSelectedRow} - permissions={{ - organizationPermission: user.organizationPermission, - workspacePermission: user.workspacePermission, - }} + user={user} /> ); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/DeleteCloudWorkspace.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/DeleteCloudWorkspace.tsx index 5d633d1bc8e..9ed35fb1767 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/DeleteCloudWorkspace.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/DeleteCloudWorkspace.tsx @@ -1,21 +1,50 @@ import React from "react"; -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useNavigate } from "react-router-dom"; import { Button } from "components/ui/Button"; -import { useConfirmWorkspaceDeletionModal } from "area/workspace/utils/useConfirmWorkspaceDeletionModal"; import { useCurrentWorkspace } from "core/api"; import { useRemoveCloudWorkspace } from "core/api/cloud"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; +import { useNotificationService } from "hooks/services/Notification"; +import { RoutePaths } from "pages/routePaths"; export const DeleteCloudWorkspace: React.FC = () => { const workspace = useCurrentWorkspace(); const { mutateAsync: removeCloudWorkspace, isLoading: isRemovingCloudWorkspace } = useRemoveCloudWorkspace(); + const { registerNotification } = useNotificationService(); + const navigate = useNavigate(); + const { formatMessage } = useIntl(); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); - const confirmWorkspaceDeletion = useConfirmWorkspaceDeletionModal(workspace, removeCloudWorkspace); + const onRemoveWorkspaceClick = () => + openConfirmationModal({ + text: `settings.workspaceSettings.deleteWorkspace.confirmation.text`, + title: ( + + ), + submitButtonText: "settings.workspaceSettings.delete.confirmation.submitButtonText", + confirmationText: workspace.name, + onSubmit: async () => { + await removeCloudWorkspace(workspace.workspaceId); + registerNotification({ + id: "settings.workspace.delete.success", + text: formatMessage({ id: "settings.workspaceSettings.delete.success" }), + type: "success", + }); + navigate(`/${RoutePaths.Workspaces}`); + closeConfirmationModal(); + }, + submitButtonDataId: "reset", + }); return ( - ); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ExistingUserIndicator.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ExistingUserIndicator.tsx new file mode 100644 index 00000000000..9d142a82a96 --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ExistingUserIndicator.tsx @@ -0,0 +1,39 @@ +import { FormattedMessage } from "react-intl"; + +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Text } from "components/ui/Text"; +import { Tooltip } from "components/ui/Tooltip"; + +import { RbacRole } from "core/utils/rbac/rbacPermissionsQuery"; + +interface ExistingUserIndicatorProps { + highestPermissionType: RbacRole; +} + +export const ExistingUserIndicator: React.FC = ({ highestPermissionType }) => { + const roleId = + highestPermissionType === "ADMIN" + ? "role.admin" + : highestPermissionType === "EDITOR" + ? "role.editor" + : highestPermissionType === "READER" + ? "role.reader" + : "role.member"; + + return ( + + + + + + + } + placement="top-start" + > + + + ); +}; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx deleted file mode 100644 index 036dee0d26c..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import { FormattedMessage, useIntl } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; - -import { useCurrentWorkspace } from "core/api"; -import { useIntent } from "core/utils/rbac"; -import { useModalService } from "hooks/services/Modal"; -import { InviteUsersModal } from "packages/cloud/views/users/InviteUsersModal"; - -export const FirebaseInviteUserButton: React.FC = () => { - const { openModal } = useModalService(); - const { formatMessage } = useIntl(); - const { workspaceId } = useCurrentWorkspace(); - const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId }); - - const onOpenInviteUsersModal = () => - openModal({ - title: formatMessage({ id: "modals.addUser.title" }), - content: () => , - size: "md", - }); - - return ( - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.module.scss b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.module.scss index 29da1fcf0aa..3365de164c9 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.module.scss +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.module.scss @@ -6,28 +6,46 @@ border-bottom: variables.$border-thin solid colors.$grey-50; &__label { + padding: variables.$spacing-md 0; display: inline-block; width: 100%; cursor: pointer; + padding-left: variables.$spacing-md; + padding-right: variables.$spacing-md; + height: 60px; + + &:hover { + background-color: colors.$grey-50; + } + } + + &__labelContent { + height: 100%; } &__dot { flex: 0 0 auto; + line-height: 0; } &__hiddenInput { @include mixins.visually-hidden; - &:checked { - + .radioButtonTiles__toggle { - border-color: colors.$blue; - } - } - &:focus-visible { - + .radioButtonTiles__toggle { - outline: 2px solid colors.$blue-900; + + .inviteUserRow__label { + outline: variables.$border-thin solid colors.$blue-900; } } } + + &__listBoxButton { + border: none; + border-radius: variables.$border-radius-sm; + background-color: transparent; + cursor: pointer; + } + + &__listBoxButton:hover { + background-color: colors.$grey-100; + } } diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.tsx index 096cb119c4f..92292df2120 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/InviteUserRow.tsx @@ -1,63 +1,79 @@ import React, { useMemo, useState } from "react"; import { useFormContext } from "react-hook-form"; -import { useIntl } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import { SelectedIndicatorDot } from "components/connection/CreateConnection/SelectedIndicatorDot"; import { Box } from "components/ui/Box"; -import { FlexContainer } from "components/ui/Flex"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { ListBox } from "components/ui/ListBox"; import { Text } from "components/ui/Text"; import { PermissionType, WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; +import { useCurrentUser } from "core/services/auth"; +import { FeatureItem, useFeature } from "core/services/features"; +import { partitionPermissionType } from "core/utils/rbac/rbacPermissionsQuery"; +import { + getWorkspaceAccessLevel, + permissionsByResourceType, + unifyWorkspaceUserData, +} from "pages/SettingsPage/pages/AccessManagementPage/components/useGetAccessManagementData"; +import { UserRoleText } from "pages/SettingsPage/pages/AccessManagementPage/components/UserRoleText"; +import { disallowedRoles } from "pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItem"; +import { ChangeRoleMenuItemContent } from "pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItemContent"; import { AddUserFormValues } from "./AddUserModal"; import styles from "./InviteUserRow.module.scss"; +import { ViewOnlyUserRow } from "./ViewOnlyUserRow"; interface InviteUserRowProps { id: string; name?: string; email: string; - permissions?: Pick; selectedRow: string | null; setSelectedRow: (value: string | null) => void; + user?: WorkspaceUserAccessInfoRead; } -export const InviteUserRow: React.FC = ({ - id, - name, - email, - permissions, - selectedRow, - setSelectedRow, -}) => { - const [permissionType] = useState(PermissionType.workspace_admin); +export const InviteUserRow: React.FC = ({ id, name, email, selectedRow, setSelectedRow, user }) => { + const transformedUser = !!user ? unifyWorkspaceUserData([user], [])[0] : null; + const allowAllRBACRoles = useFeature(FeatureItem.AllowAllRBACRoles); + + const [selectedPermissionType, setPermissionType] = useState(PermissionType.workspace_admin); const { setValue } = useFormContext(); const { formatMessage } = useIntl(); + const { userId: currentUserId } = useCurrentUser(); + const isCurrentUser = user?.userId === currentUserId; + const isOrgAdmin = user?.organizationPermission?.permissionType === PermissionType.organization_admin; const onSelectRow = () => { setSelectedRow(id); - setValue("permission", permissionType, { shouldValidate: true }); + setValue("permission", selectedPermissionType, { shouldValidate: true }); setValue("email", email, { shouldValidate: true }); }; + const onSelectPermission = (selectedValue: PermissionType) => { + setPermissionType(selectedValue); + setValue("permission", selectedValue, { shouldValidate: true }); + }; + const shouldDisableRow = useMemo(() => { - return id === "inviteNewUser" - ? false - : permissions?.organizationPermission?.permissionType === PermissionType.organization_admin || - !!permissions?.workspacePermission?.permissionType; - }, [permissions, id]); + return id === "inviteNewUser" ? false : isOrgAdmin || !!user?.workspacePermission?.permissionType || isCurrentUser; + }, [id, isOrgAdmin, isCurrentUser, user]); + + const highestPermissionType = user ? getWorkspaceAccessLevel(user) : undefined; + + const selectedPermissionTypeString = partitionPermissionType(selectedPermissionType)[1]; if (shouldDisableRow) { return ( - - - - {id === "inviteNewUser" ? formatMessage({ id: "userInvitations.create.modal.addNew" }) : name} - - {email} - - - - + ); } @@ -76,21 +92,63 @@ export const InviteUserRow: React.FC = ({ {/* the linter cannot seem to keep track of the input + label here */} {/* eslint-disable-next-line jsx-a11y/label-has-associated-control */} ); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/UpdateCloudWorkspaceName.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/UpdateCloudWorkspaceName.tsx index bea3b49fb93..6757a5828fb 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/UpdateCloudWorkspaceName.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/UpdateCloudWorkspaceName.tsx @@ -75,7 +75,7 @@ export const UpdateCloudWorkspaceName: React.FC = () => { id: "settings.workspaceSettings.updateWorkspaceNameForm.name.placeholder", })} /> - {canUpdateWorkspace && } + {canUpdateWorkspace && } ); }; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.module.scss b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.module.scss new file mode 100644 index 00000000000..ffb765db2d0 --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.module.scss @@ -0,0 +1,13 @@ +@use "scss/variables"; +@use "scss/colors"; + +.existingUserRow { + padding-left: variables.$spacing-md; + padding-right: variables.$spacing-md; + height: 60px; + border-bottom: variables.$border-thin solid colors.$grey-50; + + &__content { + height: 100%; + } +} diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.tsx new file mode 100644 index 00000000000..8675ee7e84c --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceSettingsView/components/ViewOnlyUserRow.tsx @@ -0,0 +1,62 @@ +import { FormattedMessage } from "react-intl"; + +import { Badge } from "components/ui/Badge"; +import { Box } from "components/ui/Box"; +import { FlexContainer } from "components/ui/Flex"; +import { Text } from "components/ui/Text"; +import { Tooltip } from "components/ui/Tooltip"; + +import { ExistingUserIndicator } from "./ExistingUserIndicator"; +import styles from "./ViewOnlyUserRow.module.scss"; + +interface ViewOnlyUserRowProps { + name?: string; + email: string; + isCurrentUser: boolean; + isOrgAdmin: boolean; + highestPermissionType?: "ADMIN" | "EDITOR" | "READER" | "MEMBER"; +} +export const ViewOnlyUserRow: React.FC = ({ + name, + email, + isCurrentUser, + isOrgAdmin, + highestPermissionType, +}) => { + return ( + + + + + {name} + {isCurrentUser && ( + + + + + + )} + + + {email} + + + {isOrgAdmin && ( + + + + } + placement="top-start" + > + + + )} + {!isOrgAdmin && !!highestPermissionType && ( + + )} + + + ); +}; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesCreateControl.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesCreateControl.tsx index 9af8a8c7b39..7a2d79707ec 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesCreateControl.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesCreateControl.tsx @@ -9,7 +9,6 @@ import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; -import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { useListWorkspaces } from "core/api"; @@ -99,7 +98,7 @@ export const CloudWorkspacesCreateControl: React.FC = () => { variant="secondary" data-testid="workspaces.createNew" size="lg" - icon={} + icon="plus" className={styles.createButton} > diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx index f6bbb920186..fbc63ff348c 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx @@ -5,7 +5,7 @@ import { useListPermissions } from "core/api"; import { useListCloudWorkspacesInfinite } from "core/api/cloud"; import { OrganizationRead } from "core/api/types/AirbyteClient"; -import { CloudWorkspacesPage } from "./CloudWorkspacesPage"; +import { CloudWorkspacesPageInner } from "./CloudWorkspacesPage"; jest.mock("core/services/auth", () => ({ useAuthService: () => ({}), @@ -47,7 +47,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "321" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).toBeInTheDocument(); expect(wrapper.getByTestId("noWorkspacePermissionsBanner")).toHaveTextContent("321@example.com"); }); @@ -58,7 +58,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "456" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).toBeInTheDocument(); expect(wrapper.getByTestId("noWorkspacePermissionsBanner")).toHaveTextContent("321@example.com"); }); @@ -76,7 +76,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "instance_admin", userId: "123", permissionId: "2" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); it("should not show if you see any workspaces (e.g. as an instance admin)", async () => { @@ -92,7 +92,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_member", userId: "123", permissionId: "123", organizationId: "321" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); @@ -103,7 +103,7 @@ describe("CloudWorkspacesPage", () => { { permissionType: "organization_editor", userId: "123", permissionId: "2", organizationId: "456" }, ], }); - const wrapper = await render(); + const wrapper = await render(); expect(wrapper.queryByTestId("noWorkspacePermissionsBanner")).not.toBeInTheDocument(); }); }); diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx index e6f4a1026bf..f2827d41f8b 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.tsx @@ -2,6 +2,7 @@ import { useMutation } from "@tanstack/react-query"; import React, { useDeferredValue, useState } from "react"; import { FormattedMessage } from "react-intl"; +import { HeadTitle } from "components/common/HeadTitle"; import AirbyteLogo from "components/illustrations/airbyte-logo.svg?react"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; @@ -22,7 +23,7 @@ import { WORKSPACE_LIST_LENGTH } from "pages/workspaces/WorkspacesPage"; import { CloudWorkspacesCreateControl } from "./CloudWorkspacesCreateControl"; import styles from "./CloudWorkspacesPage.module.scss"; -export const CloudWorkspacesPage: React.FC = () => { +export const CloudWorkspacesPageInner: React.FC = () => { const { isLoading: isLogoutLoading, mutateAsync: handleLogout } = useMutation(() => logout?.() ?? Promise.resolve()); useTrackPage(PageTrackingCodes.WORKSPACES); const [searchValue, setSearchValue] = useState(""); @@ -107,3 +108,12 @@ export const CloudWorkspacesPage: React.FC = () => { ); }; + +export const CloudWorkspacesPage = () => { + return ( + <> + + + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx index f41894dd430..43e0d3bfec8 100644 --- a/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/GeneralOrganizationSettingsPage.tsx @@ -1,9 +1,9 @@ import React from "react"; import { FormattedMessage } from "react-intl"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; +import { Separator } from "components/ui/Separator"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { FeatureItem, useFeature } from "core/services/features"; @@ -14,19 +14,20 @@ import { UpdateOrganizationSettingsForm } from "./UpdateOrganizationSettingsForm export const GeneralOrganizationSettingsPage: React.FC = () => { useTrackPage(PageTrackingCodes.SETTINGS_ORGANIZATION); const isAccessManagementEnabled = useFeature(FeatureItem.RBAC); + const displayOrganizationUsers = useFeature(FeatureItem.DisplayOrganizationUsers); return ( - - + + - - - - {isAccessManagementEnabled && ( - + + + {isAccessManagementEnabled && displayOrganizationUsers && ( + <> + - + )} ); diff --git a/airbyte-webapp/src/pages/SettingsPage/GeneralWorkspaceSettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/GeneralWorkspaceSettingsPage.tsx index 25024a9cfa2..d4225fd78cb 100644 --- a/airbyte-webapp/src/pages/SettingsPage/GeneralWorkspaceSettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/GeneralWorkspaceSettingsPage.tsx @@ -1,7 +1,6 @@ import { FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; @@ -21,32 +20,26 @@ export const GeneralWorkspaceSettingsPage = () => { return ( - + - - - + {isAccessManagementEnabled && ( - - - - - + + + )} {canDeleteWorkspace && ( - - - - - - - - + + + + + + - + )} ); diff --git a/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx index 520cf58c2a5..1268c2c0748 100644 --- a/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/SettingsPage.tsx @@ -1,15 +1,13 @@ import React, { Suspense } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; +import { useIntl } from "react-intl"; import { Outlet } from "react-router-dom"; -import { LoadingPage, MainPageWithScroll } from "components"; -import { HeadTitle } from "components/common/HeadTitle"; -import { SettingsLink, SettingsNavigation, SettingsNavigationBlock } from "components/settings/SettingsNavigation"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Heading } from "components/ui/Heading"; -import { PageHeader } from "components/ui/PageHeader"; +import { LoadingPage } from "components"; -import { useCurrentWorkspace } from "core/api"; +import { SettingsLayout, SettingsLayoutContent } from "area/settings/components/SettingsLayout"; +import { SettingsLink, SettingsNavigation, SettingsNavigationBlock } from "area/settings/components/SettingsNavigation"; +import { useCurrentWorkspace, useGetInstanceConfiguration } from "core/api"; +import { InstanceConfigurationResponseTrackingStrategy } from "core/api/types/AirbyteClient"; import { FeatureItem, useFeature } from "core/services/features"; import { useIntent } from "core/utils/rbac"; import { useGetConnectorsOutOfDate } from "hooks/services/useConnector"; @@ -17,6 +15,7 @@ import { SettingsRoutePaths } from "pages/routePaths"; export const SettingsPage: React.FC = () => { const { organizationId, workspaceId } = useCurrentWorkspace(); + const { trackingStrategy } = useGetInstanceConfiguration(); const { countNewSourceVersion, countNewDestinationVersion } = useGetConnectorsOutOfDate(); const multiWorkspaceUI = useFeature(FeatureItem.MultiWorkspaceUI); const apiTokenManagement = useFeature(FeatureItem.APITokenManagement); @@ -25,108 +24,97 @@ export const SettingsPage: React.FC = () => { const { formatMessage } = useIntl(); return ( - } - pageTitle={ - - - - } - /> - } - > - - - + + + + + {apiTokenManagement && ( - {apiTokenManagement && ( + )} + + {canViewWorkspaceSettings && ( + + {multiWorkspaceUI && ( )} - - {canViewWorkspaceSettings && ( - - {multiWorkspaceUI && ( + {!multiWorkspaceUI && ( + <> - )} - {!multiWorkspaceUI && ( - <> - - - - )} - + + + )} + + {trackingStrategy === InstanceConfigurationResponseTrackingStrategy.segment && ( - - )} - {(canViewOrganizationSettings || canViewWorkspaceSettings) && ( - - {multiWorkspaceUI && canViewOrganizationSettings && ( + )} + + )} + {multiWorkspaceUI && (canViewOrganizationSettings || canViewWorkspaceSettings) && ( + + {multiWorkspaceUI && canViewOrganizationSettings && ( + + )} + {multiWorkspaceUI && canViewWorkspaceSettings && ( + <> - )} - {multiWorkspaceUI && canViewWorkspaceSettings && ( - <> - - - - )} - - )} - - - }> - - - - - + + + )} + + )} + + + }> + + + + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/UpdateOrganizationSettingsForm.tsx b/airbyte-webapp/src/pages/SettingsPage/UpdateOrganizationSettingsForm.tsx index f360ee2a6bf..967d3258b9d 100644 --- a/airbyte-webapp/src/pages/SettingsPage/UpdateOrganizationSettingsForm.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/UpdateOrganizationSettingsForm.tsx @@ -80,7 +80,7 @@ const OrganizationSettingsForm = ({ organizationId }: { organizationId: string } name="email" labelTooltip={formatMessage({ id: "settings.organizationSettings.email.description" })} /> - {canUpdateOrganization && } + {canUpdateOrganization && } ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/components/DeleteWorkspace.tsx b/airbyte-webapp/src/pages/SettingsPage/components/DeleteWorkspace.tsx index 94f426614a1..2d4dbfb8313 100644 --- a/airbyte-webapp/src/pages/SettingsPage/components/DeleteWorkspace.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/components/DeleteWorkspace.tsx @@ -1,17 +1,47 @@ -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useNavigate } from "react-router-dom"; import { Button } from "components/ui/Button"; -import { useConfirmWorkspaceDeletionModal } from "area/workspace/utils/useConfirmWorkspaceDeletionModal"; import { useCurrentWorkspace, useDeleteWorkspace } from "core/api"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; +import { useNotificationService } from "hooks/services/Notification"; +import { RoutePaths } from "pages/routePaths"; export const DeleteWorkspace: React.FC = () => { const workspace = useCurrentWorkspace(); const { mutateAsync: deleteWorkspace, isLoading: isDeletingWorkspace } = useDeleteWorkspace(); - const confirmWorkspaceDeletion = useConfirmWorkspaceDeletionModal(workspace, deleteWorkspace); + const { registerNotification } = useNotificationService(); + const navigate = useNavigate(); + const { formatMessage } = useIntl(); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + + const onRemoveWorkspaceClick = () => + openConfirmationModal({ + text: `settings.workspaceSettings.deleteWorkspace.confirmation.text`, + title: ( + + ), + submitButtonText: "settings.workspaceSettings.delete.confirmation.submitButtonText", + confirmationText: workspace.name, + onSubmit: async () => { + await deleteWorkspace(workspace.workspaceId); + registerNotification({ + id: "settings.workspace.delete.success", + text: formatMessage({ id: "settings.workspaceSettings.delete.success" }), + type: "success", + }); + navigate(`/${RoutePaths.Workspaces}`); + closeConfirmationModal(); + }, + submitButtonDataId: "reset", + }); return ( - ); diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationAccessManagementSection.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationAccessManagementSection.tsx index 6093071c655..867bf2a8469 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationAccessManagementSection.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationAccessManagementSection.tsx @@ -5,6 +5,7 @@ import { useSearchParams } from "react-router-dom"; import { Badge } from "components/ui/Badge"; import { Box } from "components/ui/Box"; import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { Icon } from "components/ui/Icon"; import { ExternalLink } from "components/ui/Link"; import { SearchInput } from "components/ui/SearchInput"; @@ -49,9 +50,9 @@ export const OrganizationAccessManagementSection: React.FC = () => { return ( - + - + diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx index 6be3d25ea56..52a8a31c26a 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx @@ -4,19 +4,17 @@ import { FormattedMessage } from "react-intl"; import { Table } from "components/ui/Table"; -import { useCurrentWorkspace } from "core/api"; import { OrganizationUserRead } from "core/api/types/AirbyteClient"; import { useCurrentUser } from "core/services/auth"; import { RbacRoleHierarchy, partitionPermissionType } from "core/utils/rbac/rbacPermissionsQuery"; import { UserCell } from "./components/UserCell"; -import { RoleManagementMenu } from "./next/RoleManagementMenu"; +import { RoleManagementCell } from "./next/RoleManagementCell"; export const OrganizationUsersTable: React.FC<{ users: OrganizationUserRead[]; }> = ({ users }) => { const { userId: currentUserId } = useCurrentUser(); - const { workspaceId } = useCurrentWorkspace(); const columnHelper = createColumnHelper(); @@ -30,7 +28,7 @@ export const OrganizationUsersTable: React.FC<{ name={props.row.original.name} email={props.row.original.email} isCurrentUser={props.row.original.userId === currentUserId} - userId={props.row.original.userId} + uniqueId={props.row.original.userId} /> ); }, @@ -49,9 +47,8 @@ export const OrganizationUsersTable: React.FC<{ cell: (props) => { const user = { userName: props.row.original.name ?? "", - userId: props.row.original.userId, + id: props.row.original.userId, userEmail: props.row.original.email, - workspaceId, organizationPermission: { permissionType: props.row.original.permissionType, organizationId: props.row.original.organizationId, @@ -60,7 +57,7 @@ export const OrganizationUsersTable: React.FC<{ }, }; - return ; + return ; }, sortingFn: (a, b, order) => { const aRole = partitionPermissionType(a.original.permissionType)[1]; @@ -76,8 +73,8 @@ export const OrganizationUsersTable: React.FC<{ }, }), ], - [columnHelper, currentUserId, workspaceId] + [columnHelper, currentUserId] ); - return
    ; + return
    ; }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx index 122ddc77bf7..83f1ac2242e 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceAccessManagementSection.tsx @@ -5,18 +5,16 @@ import { useSearchParams } from "react-router-dom"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; +import { Heading } from "components/ui/Heading"; import { SearchInput } from "components/ui/SearchInput"; import { Text } from "components/ui/Text"; -import { useCurrentOrganizationInfo, useCurrentWorkspace, useListWorkspaceAccessUsers } from "core/api"; +import { useCurrentWorkspace, useListUserInvitations, useListWorkspaceAccessUsers } from "core/api"; import { useIntent } from "core/utils/rbac"; -import { useExperiment } from "hooks/services/Experiment"; import { useModalService } from "hooks/services/Modal"; import { AddUserModal } from "packages/cloud/views/workspaces/WorkspaceSettingsView/components/AddUserModal"; -import { FirebaseInviteUserButton } from "packages/cloud/views/workspaces/WorkspaceSettingsView/components/FirebaseInviteUserButton"; -import { AddUserControl } from "./components/AddUserControl"; +import { UnifiedWorkspaceUserModel, unifyWorkspaceUserData } from "./components/useGetAccessManagementData"; import styles from "./WorkspaceAccessManagementSection.module.scss"; import { WorkspaceUsersTable } from "./WorkspaceUsersTable"; @@ -24,27 +22,27 @@ const SEARCH_PARAM = "search"; const WorkspaceAccessManagementSection: React.FC = () => { const workspace = useCurrentWorkspace(); - const organization = useCurrentOrganizationInfo(); - const canViewOrgMembers = useIntent("ListOrganizationMembers", { organizationId: organization?.organizationId }); const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId: workspace.workspaceId }); - const { openModal, closeModal } = useModalService(); + const { openModal } = useModalService(); const usersWithAccess = useListWorkspaceAccessUsers(workspace.workspaceId).usersWithAccess; + const pendingInvitations = useListUserInvitations({ + scopeType: "workspace", + scopeId: workspace.workspaceId, + }); + const unifiedWorkspaceUsers = unifyWorkspaceUserData(usersWithAccess, pendingInvitations); + const [searchParams, setSearchParams] = useSearchParams(); const filterParam = searchParams.get("search"); const [userFilter, setUserFilter] = React.useState(filterParam ?? ""); const debouncedUserFilter = useDeferredValue(userFilter); const { formatMessage } = useIntl(); - const showAddUserButton = organization?.sso && canUpdateWorkspacePermissions && canViewOrgMembers; - const showFirebaseInviteButton = !organization?.sso && canUpdateWorkspacePermissions; - const invitationSystemv2 = useExperiment("settings.invitationSystemv2", false); - const onOpenInviteUsersModal = () => - openModal({ + openModal({ title: formatMessage({ id: "userInvitations.create.modal.title" }, { workspace: workspace.name }), - content: () => , + content: ({ onComplete }) => , size: "md", }); @@ -57,7 +55,7 @@ const WorkspaceAccessManagementSection: React.FC = () => { setSearchParams(searchParams); }, [debouncedUserFilter, searchParams, setSearchParams]); - const filteredUsersWithAccess = (usersWithAccess ?? []).filter((user) => { + const filteredWorkspaceUsers: UnifiedWorkspaceUserModel[] = (unifiedWorkspaceUsers ?? []).filter((user) => { return ( user.userName?.toLowerCase().includes(filterParam?.toLowerCase() ?? "") || user.userEmail?.toLowerCase().includes(filterParam?.toLowerCase() ?? "") @@ -67,31 +65,20 @@ const WorkspaceAccessManagementSection: React.FC = () => { return ( - + - + setUserFilter(e.target.value)} /> - {!invitationSystemv2 ? ( - <> - {showFirebaseInviteButton && } - {showAddUserButton && } - - ) : ( - - )} + - {filteredUsersWithAccess && filteredUsersWithAccess.length > 0 ? ( - + {filteredWorkspaceUsers && filteredWorkspaceUsers.length > 0 ? ( + ) : ( diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx index 43eb8d2e98e..67067535bce 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx @@ -4,22 +4,21 @@ import { FormattedMessage } from "react-intl"; import { Table } from "components/ui/Table"; -import { WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; import { useCurrentUser } from "core/services/auth"; import { FeatureItem, useFeature } from "core/services/features"; import { RbacRoleHierarchy } from "core/utils/rbac/rbacPermissionsQuery"; -import { getWorkspaceAccessLevel } from "./components/useGetAccessManagementData"; +import { getWorkspaceAccessLevel, UnifiedWorkspaceUserModel } from "./components/useGetAccessManagementData"; import { UserCell } from "./components/UserCell"; -import { RoleManagementMenu } from "./next/RoleManagementMenu"; +import { RoleManagementCell } from "./next/RoleManagementCell"; export const WorkspaceUsersTable: React.FC<{ - users: WorkspaceUserAccessInfoRead[]; + users: UnifiedWorkspaceUserModel[]; }> = ({ users }) => { const { userId: currentUserId } = useCurrentUser(); const areAllRbacRolesEnabled = useFeature(FeatureItem.AllowAllRBACRoles); - const columnHelper = createColumnHelper(); + const columnHelper = createColumnHelper(); const columns = useMemo( () => [ @@ -30,8 +29,8 @@ export const WorkspaceUsersTable: React.FC<{ ); }, @@ -52,7 +51,7 @@ export const WorkspaceUsersTable: React.FC<{ ), meta: { responsive: true }, cell: (props) => { - return ; + return ; }, enableSorting: !!areAllRbacRolesEnabled, sortingFn: (a, b, order) => { @@ -73,5 +72,7 @@ export const WorkspaceUsersTable: React.FC<{ [areAllRbacRolesEnabled, columnHelper, currentUserId] ); - return
    ; + return ( +
    + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss deleted file mode 100644 index e2871173b51..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.module.scss +++ /dev/null @@ -1,20 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.addUserControl__dropdown { - padding-bottom: 0; - width: variables.$width-wide-menu; - background-color: colors.$foreground; -} - -.addUserControl__dropdownMenu { - display: block; // default is `flex` which shrinks the options to fit the box, instead overflowing into scroll - overflow: auto; - max-height: variables.$height-long-listbox-options-list; -} - -.addUserControl__buttonName { - overflow: hidden; - text-overflow: ellipsis; - white-space: nowrap; -} diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx deleted file mode 100644 index cdc421379be..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/AddUserControl.tsx +++ /dev/null @@ -1,134 +0,0 @@ -import { useState } from "react"; -import { FormattedMessage } from "react-intl"; -import * as yup from "yup"; - -import { Form, FormControl } from "components/forms"; -import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; -import { Button } from "components/ui/Button"; -import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { ListBoxControlButtonProps } from "components/ui/ListBox"; -import { Text } from "components/ui/Text"; - -import { - useCreatePermission, - useCurrentWorkspace, - useListUsersInOrganization, - useListWorkspaceAccessUsers, -} from "core/api"; -import { OrganizationUserRead, PermissionCreate, PermissionType } from "core/api/types/AirbyteClient"; -import { useIntent } from "core/utils/rbac"; - -import styles from "./AddUserControl.module.scss"; - -/** - * The name of this component is based on what a user sees... not so much what it does. - * This button will NOT create a user, it will create a permission for an existing organization member to access a given workspace. - */ - -const createPermissionControlSchema = yup.object().shape({ - userId: yup.string().required(), - permissionType: yup.mixed().oneOf(Object.values(PermissionType)).required(), - workspaceId: yup.string(), - permissionId: yup.string().strip(), // this property is defined on the type solely for migration purposes - organizationId: yup.string().strip(), // we do not have a mechanism for creating an organization permission with this control as of yet -}); - -const AddUserForm: React.FC<{ - usersToAdd: OrganizationUserRead[]; - workspaceId: string; - setIsEditMode: (mode: boolean) => void; -}> = ({ usersToAdd, workspaceId, setIsEditMode }) => { - const { mutateAsync: createPermission } = useCreatePermission(); - const canUpdateWorkspacePermissions = useIntent("UpdateWorkspacePermissions", { workspaceId }); - - const onSubmitClick = async (values: PermissionCreate) => { - await createPermission(values).then(() => setIsEditMode(false)); - }; - - const AddUserListBoxControl = ({ selectedOption }: ListBoxControlButtonProps) => { - const value = selectedOption?.value; - const userToAdd = usersToAdd.find((user) => user.userId === value); - const nameToDisplay = userToAdd?.name ? userToAdd.name : userToAdd?.email; - - if (!userToAdd) { - return null; - } - - return ( - <> - - {nameToDisplay} - - - - ); - }; - - return ( - - schema={createPermissionControlSchema} - defaultValues={{ - userId: usersToAdd[0].userId, - permissionType: PermissionType.workspace_admin, - workspaceId, - }} - onSubmit={onSubmitClick} - disabled={!canUpdateWorkspacePermissions} - > - - - containerControlClassName={styles.addUserControl__dropdown} - optionsMenuClassName={styles.addUserControl__dropdownMenu} - controlButton={AddUserListBoxControl} - name="userId" - fieldType="dropdown" - options={usersToAdd.map((user) => { - return { - value: user.userId, - label: ( - - - {user.name ? user.name : user.email} - - - {user.email} - - - ), - }; - })} - /> - setIsEditMode(false)} - allowNonDirtyCancel - /> - - - ); -}; -export const AddUserControl: React.FC = () => { - const [isEditMode, setIsEditMode] = useState(false); - const workspace = useCurrentWorkspace(); - - const workspaceAccessUsers = useListWorkspaceAccessUsers(workspace.workspaceId); - const { users } = useListUsersInOrganization(workspace.organizationId); - - const usersToAdd = users.filter( - (organizationUser) => - !workspaceAccessUsers.usersWithAccess.find((workspaceUser) => workspaceUser.userId === organizationUser.userId) - ); - - if (!usersToAdd || usersToAdd.length === 0) { - return null; - } - - return !isEditMode ? ( - - ) : ( - - ); -}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx index 338ac1979d2..b4a865819ae 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx @@ -4,17 +4,17 @@ import { InitialBadge } from "components/InitialBadge/InitialBadge"; import { Badge } from "components/ui/Badge"; import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; -export const UserCell: React.FC<{ name?: string; email: string; isCurrentUser: boolean; userId: string }> = ({ +export const UserCell: React.FC<{ name?: string; email: string; isCurrentUser: boolean; uniqueId: string }> = ({ name, email, - userId, + uniqueId, isCurrentUser, }) => { const nameToDisplay = name || email; return ( - + {nameToDisplay} diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserRoleText.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserRoleText.tsx index bfbe662acb3..f45c150867a 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserRoleText.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserRoleText.tsx @@ -19,7 +19,7 @@ export const UserRoleText: React.FC<{ highestPermissionType?: RbacRole }> = ({ h : "role.member"; return ( - + ); diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/useGetAccessManagementData.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/useGetAccessManagementData.tsx index 3498ed978c8..ecd88592477 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/useGetAccessManagementData.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/useGetAccessManagementData.tsx @@ -1,11 +1,4 @@ -import { useCurrentWorkspace, useListUsersInOrganization } from "core/api"; -import { - OrganizationUserRead, - PermissionRead, - PermissionType, - WorkspaceUserAccessInfoRead, - WorkspaceUserRead, -} from "core/api/types/AirbyteClient"; +import { PermissionType, UserInvitationRead, WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; import { RbacRole, RbacRoleHierarchy, partitionPermissionType } from "core/utils/rbac/rbacPermissionsQuery"; export type ResourceType = "workspace" | "organization" | "instance"; @@ -51,37 +44,69 @@ export const permissionsByResourceType: Record = instance: [PermissionType.instance_admin], }; -export interface NextAccessUserRead { - userId: string; - email: string; - name?: string; - workspacePermission?: PermissionRead; - organizationPermission?: PermissionRead; -} +/** + * a unified typing to allow listing invited and current users together in WorkspaceUsersTable + * using this custom union rather than a union of WorkspaceUserAccessInfoRead | UserInvitationRead + * allows us to handle intentionally missing properties more gracefully. + */ +export type UnifiedWorkspaceUserModel = + | { + id: string; + userEmail: string; + userName?: string; + organizationPermission?: WorkspaceUserAccessInfoRead["organizationPermission"]; + workspacePermission?: WorkspaceUserAccessInfoRead["workspacePermission"]; + invitationStatus?: never; // Explicitly marking these as never when permissions are present + invitationPermissionType?: never; + } + | { + id: string; + userEmail: string; + userName?: string; + organizationPermission?: never; // Explicitly marking these as never when invitation fields are present + workspacePermission?: never; + invitationStatus: UserInvitationRead["status"]; + invitationPermissionType: UserInvitationRead["permissionType"]; + }; -export interface AccessUsers { - workspace?: { users: WorkspaceUserRead[]; usersToAdd: OrganizationUserRead[] }; - organization?: { users: OrganizationUserRead[]; usersToAdd: [] }; -} +export const unifyWorkspaceUserData = ( + workspaceAccessUsers: WorkspaceUserAccessInfoRead[], + workspaceInvitations: UserInvitationRead[] +): UnifiedWorkspaceUserModel[] => { + const normalizedUsers = workspaceAccessUsers.map((user) => { + return { + id: user.userId, + userEmail: user.userEmail, + userName: user.userName, + organizationPermission: user.organizationPermission, + workspacePermission: user.workspacePermission, + }; + }); -export interface NextAccessUsers { - workspace?: { users: NextAccessUserRead[]; usersToAdd: OrganizationUserRead[] }; -} + const normalizedInvitations = workspaceInvitations.map((invitation) => { + return { + id: invitation.inviteCode, + userEmail: invitation.invitedEmail, + invitationStatus: invitation.status, + invitationPermissionType: invitation.permissionType, + }; + }); -export const useGetOrganizationAccessUsers = (): AccessUsers => { - const workspace = useCurrentWorkspace(); - const { users: organizationUsers } = useListUsersInOrganization(workspace.organizationId); - - return { - organization: { users: organizationUsers, usersToAdd: [] }, - }; + return [...normalizedUsers, ...normalizedInvitations]; }; -export const getWorkspaceAccessLevel = (user: WorkspaceUserAccessInfoRead): RbacRole => { - const orgPermissionType = user.organizationPermission?.permissionType; - const workspacePermissionType = user.workspacePermission?.permissionType; +export const getWorkspaceAccessLevel = ( + unifiedWorkspaceUser: Pick< + UnifiedWorkspaceUserModel, + "workspacePermission" | "organizationPermission" | "invitationPermissionType" + > +): RbacRole => { + const workspacePermissionType = + unifiedWorkspaceUser.workspacePermission?.permissionType ?? unifiedWorkspaceUser.invitationPermissionType; + + const organizationPermissionType = unifiedWorkspaceUser.organizationPermission?.permissionType; - const orgRole = orgPermissionType ? partitionPermissionType(orgPermissionType)[1] : undefined; + const orgRole = organizationPermissionType ? partitionPermissionType(organizationPermissionType)[1] : undefined; const workspaceRole = workspacePermissionType ? partitionPermissionType(workspacePermissionType)[1] : undefined; // return whatever is the "highest" role ie the lowest index greater than -1. diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/CancelInvitationMenuItem.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/CancelInvitationMenuItem.tsx new file mode 100644 index 00000000000..89ed904d1ac --- /dev/null +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/CancelInvitationMenuItem.tsx @@ -0,0 +1,60 @@ +import { FormattedMessage } from "react-intl"; + +import { Box } from "components/ui/Box"; +import { Text } from "components/ui/Text"; + +import { useCancelUserInvitation, useCurrentWorkspace } from "core/api"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; + +import styles from "./RemoveRoleMenuItem.module.scss"; +import { UnifiedWorkspaceUserModel } from "../components/useGetAccessManagementData"; + +interface CancelInvitationMenuItemProps { + user: UnifiedWorkspaceUserModel; +} + +export const CancelInvitationMenuItem: React.FC = ({ user }) => { + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + + const { name: workspaceName } = useCurrentWorkspace(); + + const { mutateAsync: cancelInvitation } = useCancelUserInvitation(); + + const onClick = () => + openConfirmationModal({ + text: ( + + {user.userEmail} + + ), + resource: ( + + {workspaceName} + + ), + }} + /> + ), + title: , + submitButtonText: "userInvitations.cancel.confirm.title", + onSubmit: async () => { + await cancelInvitation({ inviteCode: user.id }); + closeConfirmationModal(); + }, + submitButtonDataId: "cancel-invite", + }); + + return ( + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItem.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItem.tsx index b93ea5b7a7a..92d109e4bbb 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItem.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItem.tsx @@ -1,24 +1,15 @@ import classNames from "classnames"; -import { FormattedMessage } from "react-intl"; - -import { Box } from "components/ui/Box"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { Text } from "components/ui/Text"; import { useCreatePermission, useCurrentOrganizationInfo, useCurrentWorkspace, useUpdatePermissions } from "core/api"; -import { PermissionType, WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; +import { PermissionType } from "core/api/types/AirbyteClient"; import { useCurrentUser } from "core/services/auth"; import styles from "./ChangeRoleMenuItem.module.scss"; -import { - ResourceType, - permissionStringDictionary, - permissionDescriptionDictionary, -} from "../components/useGetAccessManagementData"; +import { ChangeRoleMenuItemContent } from "./ChangeRoleMenuItemContent"; +import { ResourceType, UnifiedWorkspaceUserModel } from "../components/useGetAccessManagementData"; const useCreateOrUpdateRole = ( - user: WorkspaceUserAccessInfoRead, + user: UnifiedWorkspaceUserModel, resourceType: ResourceType, permissionType: PermissionType ) => { @@ -41,12 +32,12 @@ const useCreateOrUpdateRole = ( throw new Error("Organization info not found"); } return createPermission({ - userId: user.userId, + userId: user.id, permissionType, organizationId: organizationInfo.organizationId, }); } - return createPermission({ userId: user.userId, permissionType, workspaceId }); + return createPermission({ userId: user.id, permissionType, workspaceId }); } return updatePermission({ permissionId: existingPermissionIdForResourceType, permissionType }); @@ -54,7 +45,7 @@ const useCreateOrUpdateRole = ( }; export const disallowedRoles = ( - user: WorkspaceUserAccessInfoRead, + user: UnifiedWorkspaceUserModel | null, targetResourceType: ResourceType, isCurrentUser: boolean ): PermissionType[] => { @@ -74,7 +65,7 @@ export const disallowedRoles = ( return []; } - const organizationRole = user.organizationPermission?.permissionType; + const organizationRole = user?.organizationPermission?.permissionType; if (organizationRole === "organization_editor") { return ["workspace_reader"]; @@ -87,7 +78,7 @@ export const disallowedRoles = ( }; interface RoleMenuItemProps { - user: WorkspaceUserAccessInfoRead; + user: UnifiedWorkspaceUserModel; permissionType: PermissionType; resourceType: ResourceType; onClose: () => void; @@ -96,7 +87,7 @@ interface RoleMenuItemProps { export const ChangeRoleMenuItem: React.FC = ({ user, permissionType, resourceType, onClose }) => { const createOrUpdateRole = useCreateOrUpdateRole(user, resourceType, permissionType); const currentUser = useCurrentUser(); - const isCurrentUser = currentUser.userId === user.userId; + const isCurrentUser = currentUser.userId === user.id; const roleIsActive = permissionType === user.workspacePermission?.permissionType || @@ -115,26 +106,11 @@ export const ChangeRoleMenuItem: React.FC = ({ user, permissi [styles["changeRoleMenuItem__button--active"]]: roleIsActive, })} > - - - - - - - - - - - {roleIsActive && ( - - - - )} - - + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItemContent.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItemContent.tsx new file mode 100644 index 00000000000..8af24f943f2 --- /dev/null +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/ChangeRoleMenuItemContent.tsx @@ -0,0 +1,45 @@ +import { FormattedMessage } from "react-intl"; + +import { Box } from "components/ui/Box"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Text } from "components/ui/Text"; + +import { PermissionType } from "core/api/types/AirbyteClient"; + +import { permissionDescriptionDictionary, permissionStringDictionary } from "../components/useGetAccessManagementData"; + +interface ChangeRoleMenuItemContentProps { + roleIsInvalid: boolean; + roleIsActive: boolean; + permissionType: PermissionType; +} + +export const ChangeRoleMenuItemContent: React.FC = ({ + roleIsActive, + permissionType, + roleIsInvalid, +}) => { + return ( + + + + + + + + + + + {roleIsActive && ( + + + + )} + + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RemoveRoleMenuItem.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RemoveRoleMenuItem.tsx index 4b753ea1474..f37f3f9902c 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RemoveRoleMenuItem.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RemoveRoleMenuItem.tsx @@ -4,15 +4,14 @@ import { Box } from "components/ui/Box"; import { Text } from "components/ui/Text"; import { useCurrentOrganizationInfo, useCurrentWorkspace, useDeletePermissions } from "core/api"; -import { WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; import { useCurrentUser } from "core/services/auth"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import styles from "./RemoveRoleMenuItem.module.scss"; -import { ResourceType } from "../components/useGetAccessManagementData"; +import { ResourceType, UnifiedWorkspaceUserModel } from "../components/useGetAccessManagementData"; interface RemoveRoleMenuItemProps { - user: WorkspaceUserAccessInfoRead; + user: UnifiedWorkspaceUserModel; resourceType: ResourceType; } @@ -53,11 +52,11 @@ export const RemoveRoleMenuItem: React.FC = ({ user, re return ( + ); +}); + +RoleManagementButton.displayName = "RoleManagementButton"; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx new file mode 100644 index 00000000000..28871057744 --- /dev/null +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementCell.tsx @@ -0,0 +1,105 @@ +import { FormattedMessage } from "react-intl"; + +import { Badge } from "components/ui/Badge"; +import { Box } from "components/ui/Box"; +import { FlexContainer } from "components/ui/Flex"; +import { Text } from "components/ui/Text"; +import { Tooltip } from "components/ui/Tooltip"; + +import { useCurrentOrganizationInfo, useCurrentWorkspace } from "core/api"; +import { useCurrentUser } from "core/services/auth"; +import { FeatureItem, useFeature } from "core/services/features"; +import { useIntent } from "core/utils/rbac"; + +import { GuestBadge } from "./GuestBadge"; +import { RoleManagementMenu } from "./RoleManagementMenu"; +import { + ResourceType, + UnifiedWorkspaceUserModel, + getWorkspaceAccessLevel, +} from "../components/useGetAccessManagementData"; +import { UserRoleText } from "../components/UserRoleText"; + +const ViewOnlyRoleBox: React.FC<{ highestPermissionType: "MEMBER" | "ADMIN" | "READER" | "EDITOR" }> = ({ + highestPermissionType, +}) => { + return ( + + + + + + ); +}; + +interface RoleManagementCellProps { + user: UnifiedWorkspaceUserModel; + resourceType: ResourceType; +} + +export const RoleManagementCell: React.FC = ({ user, resourceType }) => { + const { workspaceId } = useCurrentWorkspace(); + const organizationInfo = useCurrentOrganizationInfo(); + const highestPermissionType = getWorkspaceAccessLevel(user); + const currentUser = useCurrentUser(); + const orgPermissionType = user.organizationPermission ? user.organizationPermission.permissionType : undefined; + const canEditPermissions = useIntent( + resourceType === "workspace" ? "UpdateWorkspacePermissions" : "UpdateOrganizationPermissions", + { workspaceId, organizationId: organizationInfo?.organizationId } + ); + const canListOrganizationUsers = useIntent("ListOrganizationMembers", { + organizationId: organizationInfo?.organizationId, + }); + const indicateGuestUsers = useFeature(FeatureItem.IndicateGuestUsers); + const cannotDemoteUser = resourceType === "workspace" && orgPermissionType === "organization_admin"; + const shouldHidePopover = cannotDemoteUser || !canEditPermissions || user.id === currentUser.userId; + + const tooltipContent = + cannotDemoteUser && canEditPermissions + ? "settings.accessManagement.cannotDemoteOrgAdmin" + : user.id === currentUser.userId && canEditPermissions + ? "settings.accessManagement.cannotEditOwnPermissions" + : undefined; + + return ( + + {shouldHidePopover ? ( + tooltipContent ? ( + }> + + + ) : ( + + ) + ) : ( + + )} + {user.organizationPermission?.permissionType === "organization_admin" && resourceType === "workspace" && ( + + + + )} + {canListOrganizationUsers && organizationInfo?.organizationId && indicateGuestUsers && ( + + )} + {user.invitationStatus === "pending" && ( + + + + + + } + > + + + + + + + + )} + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.module.scss b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.module.scss index ca2ac525d75..9c5b213584a 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.module.scss +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.module.scss @@ -2,17 +2,6 @@ @use "scss/variables"; @use "scss/z-indices"; -.roleManagementMenu__popoverButton { - border: none; - border-radius: variables.$border-radius-sm; - background-color: transparent; - cursor: pointer; -} - -.roleManagementMenu__popoverButton:hover { - background-color: colors.$grey-100; -} - .roleManagementMenu__popoverPanel { position: relative; z-index: z-indices.$dropdownMenu; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.tsx index c603d052e85..4ecb67402bb 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenu.tsx @@ -1,131 +1,65 @@ import { autoUpdate, flip, offset, useFloating } from "@floating-ui/react-dom"; import { Popover } from "@headlessui/react"; import React from "react"; -import { FormattedMessage } from "react-intl"; -import { Badge } from "components/ui/Badge"; import { Box } from "components/ui/Box"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; -import { Tooltip } from "components/ui/Tooltip"; -import { useCurrentOrganizationInfo, useCurrentWorkspace } from "core/api"; -import { WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; -import { useCurrentUser } from "core/services/auth"; -import { useIntent } from "core/utils/rbac"; +import { RbacRole } from "core/utils/rbac/rbacPermissionsQuery"; -import { GuestBadge } from "./GuestBadge"; +import { RoleManagementButton } from "./RoleManagementButton"; import styles from "./RoleManagementMenu.module.scss"; import { RoleManagementMenuBody } from "./RoleManagementMenuBody"; -import { getWorkspaceAccessLevel } from "../components/useGetAccessManagementData"; +import { UnifiedWorkspaceUserModel } from "../components/useGetAccessManagementData"; import { UserRoleText } from "../components/UserRoleText"; type ResourceType = "workspace" | "organization" | "instance"; export interface RoleManagementMenuProps { - user: WorkspaceUserAccessInfoRead; + user: UnifiedWorkspaceUserModel; resourceType: ResourceType; + highestPermissionType: RbacRole; } -const RoleManagementButton = React.forwardRef>( - ({ children, ...props }, ref) => { - return ( - - ); - } -); - -RoleManagementButton.displayName = "RoleManagementButton"; - -const ViewOnlyRoleBox: React.FC<{ highestPermissionType: "MEMBER" | "ADMIN" | "READER" | "EDITOR" }> = ({ +export const RoleManagementMenu: React.FC = ({ + user, + resourceType, highestPermissionType, }) => { - return ( - - - - - - ); -}; - -export const RoleManagementMenu: React.FC = ({ user, resourceType }) => { const { x, y, reference, floating, strategy } = useFloating({ middleware: [offset(5), flip()], whileElementsMounted: autoUpdate, placement: "bottom-start", }); - const { workspaceId } = useCurrentWorkspace(); - const organizationInfo = useCurrentOrganizationInfo(); - const highestPermissionType = getWorkspaceAccessLevel(user); - const currentUser = useCurrentUser(); - const orgPermissionType = user.organizationPermission ? user.organizationPermission.permissionType : undefined; - const canEditPermissions = useIntent( - resourceType === "workspace" ? "UpdateWorkspacePermissions" : "UpdateOrganizationPermissions", - { workspaceId, organizationId: organizationInfo?.organizationId } - ); - const canListOrganizationUsers = useIntent("ListOrganizationMembers", { - organizationId: organizationInfo?.organizationId, - }); - const cannotDemoteUser = resourceType === "workspace" && orgPermissionType === "organization_admin"; - const shouldHidePopover = cannotDemoteUser || !canEditPermissions || user.userId === currentUser.userId; - - const tooltipContent = - cannotDemoteUser && canEditPermissions - ? "settings.accessManagement.cannotDemoteOrgAdmin" - : user.userId === currentUser.userId && canEditPermissions - ? "settings.accessManagement.cannotEditOwnPermissions" - : undefined; return ( - - {shouldHidePopover ? ( - tooltipContent ? ( - }> - - - ) : ( - - ) - ) : ( - - {({ close }) => ( - <> - - - - - - - - - - - - - - - )} - - )} - {user.organizationPermission?.permissionType === "organization_admin" && resourceType === "workspace" && ( - - - - )} - {canListOrganizationUsers && organizationInfo?.organizationId && ( - + + {({ close }) => ( + <> + + + + + + + + + + + + + + )} - + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenuBody.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenuBody.tsx index 70bc897e5dc..0773c716d22 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenuBody.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/next/RoleManagementMenuBody.tsx @@ -3,19 +3,20 @@ import { FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; import { Text } from "components/ui/Text"; -import { WorkspaceUserAccessInfoRead } from "core/api/types/AirbyteClient"; import { FeatureItem, useFeature } from "core/services/features"; +import { CancelInvitationMenuItem } from "./CancelInvitationMenuItem"; import { ChangeRoleMenuItem } from "./ChangeRoleMenuItem"; import { RemoveRoleMenuItem } from "./RemoveRoleMenuItem"; import styles from "./RoleManagementMenuBody.module.scss"; import { ResourceType, + UnifiedWorkspaceUserModel, permissionStringDictionary, permissionsByResourceType, } from "../components/useGetAccessManagementData"; interface RoleManagementMenuBodyProps { - user: WorkspaceUserAccessInfoRead; + user: UnifiedWorkspaceUserModel; resourceType: ResourceType; close: () => void; } @@ -27,13 +28,15 @@ export const RoleManagementMenuBody: React.FC = ({ */ const rolesToAllow = - areAllRbacRolesEnabled || resourceType === "organization" ? permissionsByResourceType[resourceType] : []; + !user.invitationStatus && (areAllRbacRolesEnabled || resourceType === "organization") + ? permissionsByResourceType[resourceType] + : []; return (
      {resourceType === "workspace" && - user.organizationPermission?.permissionType && - user.organizationPermission?.permissionType !== "organization_member" && ( + user?.organizationPermission?.permissionType && + user?.organizationPermission?.permissionType !== "organization_member" && (
    • = ({
    • ); })} - {resourceType === "workspace" && ( + {resourceType === "workspace" && !!user.invitationStatus && ( +
    • + +
    • + )} + {resourceType === "workspace" && !user.invitationStatus && (
    • diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx index 7b92866f995..017d88567bc 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx @@ -1,12 +1,8 @@ -import React, { useState } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; -import { useAuth } from "react-oidc-context"; +import React from "react"; +import { useIntl } from "react-intl"; -import { HeadTitle } from "components/common/HeadTitle"; -import { Box } from "components/ui/Box"; -import { Button } from "components/ui/Button"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { FeatureItem, useFeature } from "core/services/features"; @@ -18,36 +14,9 @@ export const AccountPage: React.FC = () => { const isKeycloakAuthenticationEnabled = useFeature(FeatureItem.KeycloakAuthentication); return ( - <> - - - {isKeycloakAuthenticationEnabled ? : } - - {isKeycloakAuthenticationEnabled && } - - ); -}; - -const SignoutButton: React.FC = () => { - const [signoutRedirectPending, setSignnoutRedirectPending] = useState(false); - const auth = useAuth(); - - const handleSignout = () => { - setSignnoutRedirectPending(true); - auth.signoutRedirect(); - }; - return ( - - - - + + {formatMessage({ id: "settings.accountSettings" })} + {isKeycloakAuthenticationEnabled ? : } ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/components/AccountForm.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/components/AccountForm.tsx index 4555fc34060..c4b91d608ea 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/components/AccountForm.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/components/AccountForm.tsx @@ -56,7 +56,7 @@ export const AccountForm: React.FC = () => { defaultValues={{ email: workspace.email ?? "" }} > label={formatMessage({ id: "form.yourEmail" })} fieldType="input" name="email" /> - + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AdvancedSettingsPage/AdvancedSettingsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AdvancedSettingsPage/AdvancedSettingsPage.tsx index d96345cace3..443b4684261 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AdvancedSettingsPage/AdvancedSettingsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AdvancedSettingsPage/AdvancedSettingsPage.tsx @@ -1,7 +1,7 @@ import { useIntl } from "react-intl"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { Message } from "components/ui/Message"; import { Switch } from "components/ui/Switch"; import { Text } from "components/ui/Text"; @@ -36,26 +36,33 @@ export const AdvancedSettingsPage: React.FC = () => { const { formatMessage } = useIntl(); const [workspaceInTitle, setWorkspaceInTitle] = useLocalStorage("airbyte_workspace-in-title", false); const [attemptsStats, setAttemptsStats] = useLocalStorage("airbyte_extended-attempts-stats", false); + const [connectionDetails, setConnectionDetails] = useLocalStorage("airbyte_connection-additional-details", false); return ( - - - - setWorkspaceInTitle(checked)} - label={formatMessage({ id: "settings.advancedSettings.workspaceInTitle" })} - description={formatMessage({ id: "settings.advancedSettings.workspaceInTitleDescription" })} - /> - setAttemptsStats(checked)} - label={formatMessage({ id: "settings.advancedSettings.attemptStats" })} - description={formatMessage({ id: "settings.advancedSettings.attemptStatsDescription" })} - /> - - + + {formatMessage({ id: "settings.advancedSettings.title" })} + + setWorkspaceInTitle(checked)} + label={formatMessage({ id: "settings.advancedSettings.workspaceInTitle" })} + description={formatMessage({ id: "settings.advancedSettings.workspaceInTitleDescription" })} + /> + setAttemptsStats(checked)} + label={formatMessage({ id: "settings.advancedSettings.attemptStats" })} + description={formatMessage({ id: "settings.advancedSettings.attemptStatsDescription" })} + /> + setConnectionDetails(checked)} + label={formatMessage({ id: "settings.advancedSettings.connectionDetails" })} + description={formatMessage({ id: "settings.advancedSettings.connectionDetailsDescription" })} + /> + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddCustomDockerImageConnectorModal.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddCustomDockerImageConnectorModal.tsx new file mode 100644 index 00000000000..017ee2634cd --- /dev/null +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddCustomDockerImageConnectorModal.tsx @@ -0,0 +1,96 @@ +import React, { useState } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; +import * as yup from "yup"; + +import { Form, FormControl } from "components/forms"; +import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmissionButtons"; +import { Box } from "components/ui/Box"; +import { ExternalLink } from "components/ui/Link"; +import { Message } from "components/ui/Message"; +import { ModalBody, ModalFooter } from "components/ui/Modal"; +import { Text } from "components/ui/Text"; + +import { isCloudApp } from "core/utils/app"; +import { links } from "core/utils/links"; + +interface ConnectorDefinition { + name: string; + documentationUrl: string; + dockerImageTag: string; + dockerRepository: string; +} + +export interface AddCustomDockerImageConnectorModalProps { + onCancel: () => void; + onSubmit: (sourceDefinition: ConnectorDefinition) => Promise; +} + +const validationSchema = yup.object().shape({ + name: yup.string().trim().required("form.empty.error"), + documentationUrl: yup.string().trim().url("form.url.error").notRequired().default(""), + dockerImageTag: yup.string().trim().required("form.empty.error"), + dockerRepository: yup.string().trim().required("form.empty.error"), +}); + +const ConnectorControl = FormControl; + +export const AddCustomDockerImageConnectorModal: React.FC = ({ + onCancel, + onSubmit, +}) => { + const { formatMessage } = useIntl(); + const [error, setError] = useState(); + + return ( + + defaultValues={{ + name: "", + documentationUrl: "", + dockerImageTag: "", + dockerRepository: "", + }} + schema={validationSchema} + onSubmit={async (values) => { + setError(undefined); + await onSubmit(values); + }} + onError={(e) => { + setError(e.message || formatMessage({ id: "form.dockerError" })); + }} + > + + + {lnk}, + }} + /> + + + + + + + {error && } + + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddNewConnectorButton.tsx similarity index 58% rename from airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx rename to airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddNewConnectorButton.tsx index aa7cfa0fbbb..0fee33ee727 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnector.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/AddNewConnectorButton.tsx @@ -1,4 +1,4 @@ -import React, { useState } from "react"; +import React from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { useNavigate } from "react-router-dom"; @@ -9,38 +9,34 @@ import { Icon } from "components/ui/Icon"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useCreateDestinationDefinition, useCreateSourceDefinition } from "core/api"; import { FeatureItem, useFeature } from "core/services/features"; +import { useModalService } from "hooks/services/Modal"; import { ConnectorBuilderRoutePaths } from "pages/connectorBuilder/ConnectorBuilderRoutes"; import { DestinationPaths, RoutePaths, SourcePaths } from "pages/routePaths"; -import CreateConnectorModal from "./CreateConnectorModal"; +import { AddCustomDockerImageConnectorModal } from "./AddCustomDockerImageConnectorModal"; -interface IProps { +interface AddNewConnectorButtonProps { type: "sources" | "destinations"; } -interface ICreateProps { +interface ConnectorDefinitionProps { name: string; documentationUrl: string; dockerImageTag: string; dockerRepository: string; } -const CreateConnector: React.FC = ({ type }) => { +export const AddNewConnectorButton: React.FC = ({ type }) => { + const { formatMessage } = useIntl(); + const allowUploadCustomDockerImage = useFeature(FeatureItem.AllowUploadCustomImage); const navigate = useNavigate(); const workspaceId = useCurrentWorkspaceId(); - const [isModalOpen, setIsModalOpen] = useState(false); - const onChangeModalState = () => { - setIsModalOpen(!isModalOpen); - }; - const allowUploadCustomImage = useFeature(FeatureItem.AllowUploadCustomImage); - - const { formatMessage } = useIntl(); + const { openModal } = useModalService(); const { mutateAsync: createSourceDefinition } = useCreateSourceDefinition(); - const { mutateAsync: createDestinationDefinition } = useCreateDestinationDefinition(); - const onSubmitSource = async (sourceDefinition: ICreateProps) => { + const onSubmitSource = async (sourceDefinition: ConnectorDefinitionProps) => { const result = await createSourceDefinition(sourceDefinition); navigate({ @@ -48,7 +44,7 @@ const CreateConnector: React.FC = ({ type }) => { }); }; - const onSubmitDestination = async (destinationDefinition: ICreateProps) => { + const onSubmitDestination = async (destinationDefinition: ConnectorDefinitionProps) => { const result = await createDestinationDefinition(destinationDefinition); navigate({ @@ -56,17 +52,33 @@ const CreateConnector: React.FC = ({ type }) => { }); }; - const onSubmit = (values: ICreateProps) => + const onSubmit = (values: ConnectorDefinitionProps) => type === "sources" ? onSubmitSource(values) : onSubmitDestination(values); - if (type === "destinations" && !allowUploadCustomImage) { + const openAddCustomDockerImageConnectorModal = () => + openModal({ + title: formatMessage({ id: "admin.addNewConnector" }), + content: ({ onComplete, onCancel }) => ( + { + await onSubmit(values); + onComplete(); + }} + /> + ), + }); + + if (type === "destinations" && !allowUploadCustomDockerImage) { return null; } return ( <> - {type === "destinations" && allowUploadCustomImage ? ( - + {type === "destinations" && allowUploadCustomDockerImage ? ( + ) : ( = ({ type }) => { displayName: formatMessage({ id: "admin.newConnector.build" }), internal: true, }, - ...(allowUploadCustomImage + ...(allowUploadCustomDockerImage ? [ { as: "button" as const, @@ -89,28 +101,17 @@ const CreateConnector: React.FC = ({ type }) => { ] : []), ]} - onChange={(data: DropdownMenuOptionType) => data.value === "docker" && onChangeModalState()} + onChange={(data: DropdownMenuOptionType) => + data.value === "docker" && openAddCustomDockerImageConnectorModal() + } > - {() => } + {() => ( + + )} )} - - {isModalOpen && } ); }; - -interface NewConnectorButtonProps { - onClick?: () => void; -} - -const NewConnectorButton = React.forwardRef(({ onClick }, ref) => { - return ( - - ); -}); -NewConnectorButton.displayName = "NewConnectorButton"; - -export default CreateConnector; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx index 051798ad904..9ae83f14058 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/ConnectorsView.tsx @@ -2,7 +2,6 @@ import { createColumnHelper } from "@tanstack/react-table"; import React, { useCallback, useMemo, useState } from "react"; import { FormattedMessage } from "react-intl"; -import { HeadTitle } from "components/common/HeadTitle"; import { ConnectorBuilderProjectTable } from "components/ConnectorBuilderProjectTable"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; @@ -16,10 +15,10 @@ import { FeatureItem, useFeature } from "core/services/features"; import { useIntent } from "core/utils/rbac"; import { RoutePaths } from "pages/routePaths"; +import { AddNewConnectorButton } from "./AddNewConnectorButton"; import { ConnectorCell } from "./ConnectorCell"; import styles from "./ConnectorsView.module.scss"; import { ConnectorsViewContext } from "./ConnectorsViewContext"; -import CreateConnector from "./CreateConnector"; import ImageCell from "./ImageCell"; import { UpdateDestinationConnectorVersionCell } from "./UpdateDestinationConnectorVersionCell"; import { UpdateSourceConnectorVersionCell } from "./UpdateSourceConnectorVersionCell"; @@ -210,22 +209,31 @@ const ConnectorsView: React.FC = ({ sections.push({ title: type === "sources" ? "admin.manageSource" : "admin.manageDestination", content: ( - + ), }); } sections.push({ title: type === "sources" ? "admin.availableSource" : "admin.availableDestinations", - content: , + content: ( + + ), }); return (
      - {sections.map((section, index) => ( @@ -237,7 +245,7 @@ const ConnectorsView: React.FC = ({ {index === 0 && ( - + {allowUpdateConnectors && } )} diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnectorModal.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnectorModal.tsx deleted file mode 100644 index c3f37435c6b..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/CreateConnectorModal.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import React, { useState } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; -import * as yup from "yup"; - -import { Form, FormControl } from "components/forms"; -import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmissionButtons"; -import { Box } from "components/ui/Box"; -import { ExternalLink } from "components/ui/Link"; -import { Message } from "components/ui/Message"; -import { Modal, ModalBody, ModalFooter } from "components/ui/Modal"; -import { Text } from "components/ui/Text"; - -import { isCloudApp } from "core/utils/app"; -import { links } from "core/utils/links"; - -interface ConnectorDefinition { - name: string; - documentationUrl: string; - dockerImageTag: string; - dockerRepository: string; -} - -export interface CreateConnectorModalProps { - onClose: () => void; - onSubmit: (sourceDefinition: ConnectorDefinition) => Promise; -} -const validationSchema = yup.object().shape({ - name: yup.string().trim().required("form.empty.error"), - documentationUrl: yup.string().trim().url("form.url.error").notRequired().default(""), - dockerImageTag: yup.string().trim().required("form.empty.error"), - dockerRepository: yup.string().trim().required("form.empty.error"), -}); - -const ConnectorControl = FormControl; - -const CreateConnectorModal: React.FC = ({ onClose, onSubmit }) => { - const { formatMessage } = useIntl(); - const [error, setError] = useState(); - - return ( - }> - - defaultValues={{ - name: "", - documentationUrl: "", - dockerImageTag: "", - dockerRepository: "", - }} - schema={validationSchema} - onSubmit={async (values) => { - setError(undefined); - await onSubmit(values); - }} - onError={(e) => { - setError(e.message || formatMessage({ id: "form.dockerError" })); - }} - > - - - {lnk}, - }} - /> - - - - - - - {error && } - - - - - - - - ); -}; - -export default CreateConnectorModal; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/UpgradeAllButton.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/UpgradeAllButton.tsx index d040508b6d4..6e82064cea3 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/UpgradeAllButton.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/UpgradeAllButton.tsx @@ -2,7 +2,6 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { useGetConnectorsOutOfDate, useUpdateAllConnectors } from "hooks/services/useConnector"; @@ -30,13 +29,7 @@ const UpgradeAllButton: React.FC = ({ connectorType }) => }; return ( - ); diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell/VersionChangeResult.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell/VersionChangeResult.tsx deleted file mode 100644 index 09e4a151549..00000000000 --- a/airbyte-webapp/src/pages/SettingsPage/pages/ConnectorsPage/components/VersionCell/VersionChangeResult.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import React from "react"; -import { useFormState } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; - -import { Text } from "components/ui/Text"; - -export const VersionChangeResult: React.FC<{ feedback?: string }> = ({ feedback }) => { - const { isDirty } = useFormState(); - - if (feedback === "success" && !isDirty) { - return ; - } - if (feedback && feedback !== "success") { - return ( - - {feedback} - - ); - } - - return null; -}; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/MetricsPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/MetricsPage.tsx index b63e6b36543..45063f9cba7 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/MetricsPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/MetricsPage.tsx @@ -1,8 +1,8 @@ import React from "react"; import { useIntl } from "react-intl"; -import { HeadTitle } from "components/common/HeadTitle"; -import { Card } from "components/ui/Card"; +import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; @@ -13,11 +13,9 @@ export const MetricsPage: React.FC = () => { useTrackPage(PageTrackingCodes.SETTINGS_METRICS); return ( - <> - - - - - + + {formatMessage({ id: "settings.metricsSettings" })} + + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/components/MetricsForm.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/components/MetricsForm.tsx index 0725590584b..886dc16077e 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/components/MetricsForm.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/MetricsPage/components/MetricsForm.tsx @@ -71,7 +71,7 @@ export const MetricsForm: React.FC = () => { description={formatMessage({ id: "preferences.collectData" })} /> - + ); }; diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/NotificationPage/NotificationPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/NotificationPage/NotificationPage.tsx index 0fca1055212..de2b1fba3d7 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/NotificationPage/NotificationPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/NotificationPage/NotificationPage.tsx @@ -1,11 +1,10 @@ import React from "react"; import { useIntl } from "react-intl"; -import { HeadTitle } from "components/common/HeadTitle"; import { NotificationSettingsForm } from "components/NotificationSettingsForm"; -import { PageContainer } from "components/PageContainer"; -import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; +import { Separator } from "components/ui/Separator"; import { WorkspaceEmailForm } from "components/WorkspaceEmailForm"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; @@ -17,18 +16,15 @@ export const NotificationPage: React.FC = () => { const emailNotificationsFeatureEnabled = useFeature(FeatureItem.EmailNotifications); return ( - - - - {emailNotificationsFeatureEnabled && ( - - - - )} - - - - - + + {formatMessage({ id: "settings.notificationSettings" })} + {emailNotificationsFeatureEnabled && ( + <> + + + + )} + + ); }; diff --git a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx index 7323d4e34f2..d1a8eb11449 100644 --- a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx @@ -1,4 +1,4 @@ -import React, { Suspense, useDeferredValue, useMemo, useState } from "react"; +import React, { Suspense, useDeferredValue, useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { useNavigate } from "react-router-dom"; @@ -10,15 +10,13 @@ import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; -import { Icon } from "components/ui/Icon"; import { PageHeader } from "components/ui/PageHeader"; import { Text } from "components/ui/Text"; import { useConnectionList, useCurrentWorkspace, useFilters } from "core/api"; import { JobStatus, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useIntent } from "core/utils/rbac"; -import { useExperiment } from "hooks/services/Experiment"; import styles from "./AllConnectionsPage.module.scss"; import { ConnectionsFilters, FilterValues } from "./ConnectionsFilters"; @@ -42,10 +40,8 @@ const isConnectionFailed = ( connection.latestSyncJobStatus === JobStatus.incomplete; export const AllConnectionsPage: React.FC = () => { - const navigate = useNavigate(); - useTrackPage(PageTrackingCodes.CONNECTIONS_LIST); - const isConnectionsSummaryEnabled = useExperiment("connections.summaryView", false); + const navigate = useNavigate(); const { workspaceId } = useCurrentWorkspace(); const canCreateConnection = useIntent("CreateConnection", { workspaceId }); @@ -53,13 +49,13 @@ export const AllConnectionsPage: React.FC = () => { const connectionList = useConnectionList(); const connections = useMemo(() => connectionList?.connections ?? [], [connectionList?.connections]); - const [searchFilter, setSearchFilter] = useState(""); - const debouncedSearchFilter = useDeferredValue(searchFilter); const [filterValues, setFilterValue, setFilters] = useFilters({ + search: "", status: null, source: null, destination: null, }); + const debouncedSearchFilter = useDeferredValue(filterValues.search); const filteredConnections = useMemo(() => { const statusFilter = filterValues.status; @@ -158,18 +154,16 @@ export const AllConnectionsPage: React.FC = () => { - {isConnectionsSummaryEnabled && ( - - - - )} + + + } endComponent={ - diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap index 95ecc5b9b59..7556895cc7e 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap @@ -656,69 +656,13 @@ exports[`ConnectionReplicationPage should render 1`] = `
      -
      - -
      -
      -
      + />
      -
      -
      -

      - All -

      -
      -
      -
      + />
      @@ -821,18 +765,22 @@ exports[`ConnectionReplicationPage should show an error if there is a schemaErro >
      -
      -
      - +
      +
      +
      - Sorry. Something went wrong... - + + Sorry. Something went wrong... + +
      diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.module.scss b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.module.scss index 7ef70dd5420..84b0f39ddc2 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.module.scss +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.module.scss @@ -12,6 +12,10 @@ .advancedButton { margin-top: variables.$spacing-md; + + &.alignStart { + align-self: flex-start; + } } .advancedPanel { diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx index 92aaa11832c..3d8777b9dcd 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx @@ -1,9 +1,17 @@ import { Disclosure } from "@headlessui/react"; -import React from "react"; +import classnames from "classnames"; +import React, { useCallback } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import * as yup from "yup"; -import { DeleteBlock } from "components/common/DeleteBlock"; +import { ConnectionDangerBlock } from "components/common/ConnectionDangerBlock"; +import { ConnectionDeleteBlock } from "components/common/ConnectionDeleteBlock"; +import { + FormConnectionFormValues, + useConnectionValidationSchema, + useInitialFormValues, +} from "components/connection/ConnectionForm/formConfig"; +import { SimplifiedConnectionsSettingsCard } from "components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard"; import { Form } from "components/forms"; import { DataResidencyDropdown } from "components/forms/DataResidencyDropdown"; import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; @@ -11,11 +19,15 @@ import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; -import { Icon } from "components/ui/Icon"; import { ExternalLink } from "components/ui/Link"; import { Spinner } from "components/ui/Spinner"; -import { useCurrentWorkspace, useDeleteConnection } from "core/api"; +import { + useCurrentWorkspace, + useDeleteConnection, + useDestinationDefinitionVersion, + useResetConnection, +} from "core/api"; import { Geography, WebBackendConnectionUpdate } from "core/api/types/AirbyteClient"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { FeatureItem, useFeature } from "core/services/features"; @@ -24,6 +36,7 @@ import { useIntent } from "core/utils/rbac"; import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; import { useNotificationService } from "hooks/services/Notification"; import styles from "./ConnectionSettingsPage.module.scss"; @@ -60,14 +73,12 @@ const dataResidencyDropdownDescription = ( export const ConnectionSettingsPage: React.FC = () => { const { connection, updateConnection } = useConnectionEditService(); const { mode } = useConnectionFormService(); - const { mutateAsync: deleteConnection } = useDeleteConnection(); const canUpdateDataResidency = useFeature(FeatureItem.AllowChangeDataGeographies); const canSendSchemaUpdateNotifications = useFeature(FeatureItem.AllowAutoDetectSchema); const { registerNotification } = useNotificationService(); const { formatMessage } = useIntl(); const { trackError } = useAppMonitoringService(); useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_SETTINGS); - const onDelete = () => deleteConnection(connection); const { workspaceId } = useCurrentWorkspace(); const canEditConnection = useIntent("EditConnection", { workspaceId }); @@ -102,6 +113,12 @@ export const ConnectionSettingsPage: React.FC = () => { return defaultValues; }; + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); + + if (isSimplifiedCreation) { + return ; + } + return (
      @@ -146,7 +163,7 @@ export const ConnectionSettingsPage: React.FC = () => { - {connection.status !== "deprecated" && } + {connection.status !== "deprecated" && } {({ open }) => ( @@ -154,18 +171,14 @@ export const ConnectionSettingsPage: React.FC = () => { } + icon={open ? "chevronDown" : "chevronRight"} className={styles.advancedButton} > }> - + @@ -174,3 +187,106 @@ export const ConnectionSettingsPage: React.FC = () => {
      ); }; + +const SimplifiedConnectionSettingsPage = () => { + const { connection, updateConnection } = useConnectionEditService(); + const { formatMessage } = useIntl(); + const { registerNotification } = useNotificationService(); + const { trackError } = useAppMonitoringService(); + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + + const { mode } = useConnectionFormService(); + const destDefinitionVersion = useDestinationDefinitionVersion(connection.destinationId); + const simplifiedInitialValues = useInitialFormValues(connection, destDefinitionVersion, mode === "edit"); + + const { workspaceId } = useCurrentWorkspace(); + const canEditConnection = useIntent("EditConnection", { workspaceId }); + + const validationSchema = useConnectionValidationSchema(); + + const { mutateAsync: deleteConnection } = useDeleteConnection(); + const onDelete = () => deleteConnection(connection); + + const { mutateAsync: doResetConnection } = useResetConnection(); + const onReset = useCallback(async () => { + await doResetConnection(connection.connectionId); + registerNotification({ + id: sayClearInsteadOfReset ? "clearData.successfulStart" : "connection_reset_start_success", + text: formatMessage({ + id: sayClearInsteadOfReset ? "form.clearData.successfulStart" : "form.resetData.successfulStart", + }), + type: "success", + }); + }, [doResetConnection, connection.connectionId, registerNotification, sayClearInsteadOfReset, formatMessage]); + + const onSuccess = () => { + registerNotification({ + id: "connection_settings_change_success", + text: formatMessage({ id: "form.changesSaved" }), + type: "success", + }); + }; + + const onError = (e: Error, { name }: FormConnectionFormValues) => { + trackError(e, { connectionName: name }); + registerNotification({ + id: "connection_settings_change_error", + text: formatMessage({ id: "connection.updateFailed" }), + type: "error", + }); + }; + + const isDeprecated = connection.status === "deprecated"; + + return ( + + + trackDirtyChanges + disabled={!canEditConnection} + onSubmit={(values: FormConnectionFormValues) => { + const connectionUpdates: WebBackendConnectionUpdate = { + connectionId: connection.connectionId, + ...values, + }; + + return updateConnection(connectionUpdates); + }} + onError={onError} + onSuccess={onSuccess} + schema={validationSchema} + defaultValues={simplifiedInitialValues} + > + + + + {connection.status !== "deprecated" && } + + + {({ open }) => ( + <> + + + + + }> + + + + + )} + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx index 2cd09a65024..444c770b659 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx @@ -8,10 +8,9 @@ import { CopyButton } from "components/ui/CopyButton"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { Message } from "components/ui/Message"; -import { Text } from "components/ui/Text"; import { useCreateOrUpdateState, useGetConnectionState } from "core/api"; -import { AirbyteCatalog, ConnectionState, StreamState } from "core/api/types/AirbyteClient"; +import { ConnectionState, StreamState } from "core/api/types/AirbyteClient"; import { haveSameShape } from "core/utils/objects"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; @@ -19,7 +18,6 @@ import styles from "./StateBlock.module.scss"; interface StateBlockProps { connectionId: string; - syncCatalog: AirbyteCatalog; disabled?: boolean; } @@ -37,16 +35,12 @@ function convertStateToString(state: ConnectionState): string { } } -export const StateBlock: React.FC = ({ connectionId, syncCatalog, disabled }) => { +export const StateBlock: React.FC = ({ connectionId, disabled }) => { const { formatMessage } = useIntl(); const existingState = useGetConnectionState(connectionId); const { mutateAsync: updateState, isLoading } = useCreateOrUpdateState(); const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); - const hasIncrementalStream = useMemo(() => { - return syncCatalog.streams.some((stream) => stream.config?.syncMode === "incremental"); - }, [syncCatalog.streams]); - const existingStateString = useMemo(() => convertStateToString(existingState), [existingState]); const [stateDraft, setStateDraft] = useState(existingStateString); @@ -85,6 +79,11 @@ export const StateBlock: React.FC = ({ connectionId, syncCatalo return { newState: { connectionId, stateType: "legacy", state: stateDraftJson } }; }, [connectionId, formatMessage, stateDraft]); + // show the error message when both of the following are true: + // 1. an error exists + // 2. the connection has an existing state OR the editor has content (as a proxy for user modifying the non-state) + const showErrorMessage = !!errorMessage && (existingState.stateType !== "not_set" || stateDraft.length > 0); + const handleStateUpdate = useCallback(() => { if (newState === undefined) { return; @@ -115,71 +114,55 @@ export const StateBlock: React.FC = ({ connectionId, syncCatalo return ( - {!hasIncrementalStream ? ( - <> - {title} - - - - - ) : ( - <> - - {title} - - - - { - setStateDraft(value ?? ""); - }} - readOnly={disabled} + + {title} + + + + { + setStateDraft(value ?? ""); + }} + readOnly={disabled} + /> + + + {showErrorMessage ? ( + + ) : ( + } + secondaryText={
      }} />} /> - - - {errorMessage ? ( - - ) : ( - } - secondaryText={ -
      }} /> - } - /> - )} - - - - -
      - - )} + )} + + + + +
      ); diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx index 46209b17b91..097c0f89e6b 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx @@ -11,7 +11,6 @@ import { FeatureItem, useFeature } from "core/services/features"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; import styles from "./ConnectionTransformationPage.module.scss"; -import { CustomTransformationsForm } from "./CustomTransformationsForm"; import { DbtCloudTransformations } from "./DbtCloudTransformations"; import { NormalizationForm } from "./NormalizationForm"; @@ -20,16 +19,14 @@ export const ConnectionTransformationPage: React.FC = () => { useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_TRANSFORMATION); const supportsNormalization = destDefinitionVersion.normalizationConfig.supported; - const supportsDbt = useFeature(FeatureItem.AllowCustomDBT) && destDefinitionVersion.supportsDbt; const supportsCloudDbtIntegration = useFeature(FeatureItem.AllowDBTCloudIntegration) && destDefinitionVersion.supportsDbt; - const noSupportedTransformations = !supportsNormalization && !supportsDbt && !supportsCloudDbtIntegration; + const noSupportedTransformations = !supportsNormalization && !supportsCloudDbtIntegration; return ( {supportsNormalization && } - {supportsDbt && } {supportsCloudDbtIntegration && } {noSupportedTransformations && ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.module.scss deleted file mode 100644 index 74776c9aeca..00000000000 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.module.scss +++ /dev/null @@ -1 +0,0 @@ -@forward "../DbtCloudTransformations.module"; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.tsx b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.tsx index 91ac9e6079c..d8f55f8bd38 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudErrorBoundary/DbtCloudErrorBoundary.tsx @@ -7,15 +7,13 @@ import { Text } from "components/ui/Text"; import { TrackErrorFn } from "hooks/services/AppMonitoringService"; -import styles from "./DbtCloudErrorBoundary.module.scss"; - const DbtCloudErrorCard: React.FC<{ displayMessage?: string | null }> = ({ displayMessage }) => { const { formatMessage } = useIntl(); return ( - + {displayMessage ? ( ) : ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformations.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformations.module.scss deleted file mode 100644 index 36dbd4daea9..00000000000 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformations.module.scss +++ /dev/null @@ -1,6 +0,0 @@ -@use "scss/colors"; -@use "scss/variables"; - -.cardBodyContainer { - background-color: colors.$grey-50; -} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.module.scss index 74776c9aeca..36dbd4daea9 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.module.scss +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.module.scss @@ -1 +1,6 @@ -@forward "../DbtCloudTransformations.module"; +@use "scss/colors"; +@use "scss/variables"; + +.cardBodyContainer { + background-color: colors.$grey-50; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.tsx b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.tsx index d0b705bcd0b..084f1d3ff41 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/DbtCloudTransformationsForm/DbtCloudTransformationsFormControls.tsx @@ -6,7 +6,6 @@ import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { DropdownMenu } from "components/ui/DropdownMenu"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { isSameJob } from "core/api/cloud"; @@ -58,11 +57,7 @@ export const DbtCloudTransformationsFormControls: React.FC {() => ( - )} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/JobListItem/JobListItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/JobListItem/JobListItem.tsx index 79ef2b21404..1aa34fb5128 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/JobListItem/JobListItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/DbtCloudTransformations/JobListItem/JobListItem.tsx @@ -4,7 +4,6 @@ import { useIntl } from "react-intl"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { DbtCloudJob } from "core/api/cloud"; @@ -68,7 +67,7 @@ export const JobListItem: React.FC = ({ variant="clear" onClick={removeJob} disabled={isLoading} - icon={} + icon="cross" aria-label={formatMessage({ id: "connection.dbtCloudJobs.job.deleteButton" })} /> diff --git a/airbyte-webapp/src/pages/connections/CreateConnectionPage/CreateConnectionTitleBlock.tsx b/airbyte-webapp/src/pages/connections/CreateConnectionPage/CreateConnectionTitleBlock.tsx index 0f5b5f2fec5..c7d3e44ac25 100644 --- a/airbyte-webapp/src/pages/connections/CreateConnectionPage/CreateConnectionTitleBlock.tsx +++ b/airbyte-webapp/src/pages/connections/CreateConnectionPage/CreateConnectionTitleBlock.tsx @@ -1,6 +1,6 @@ import { Fragment, Suspense } from "react"; import { FormattedMessage } from "react-intl"; -import { Navigate, useSearchParams } from "react-router-dom"; +import { Navigate, useLocation, useSearchParams } from "react-router-dom"; import { ConnectorIcon } from "components/common/ConnectorIcon"; import { Box } from "components/ui/Box"; @@ -20,6 +20,7 @@ import { useGetSource, } from "core/api"; import { SupportLevel } from "core/api/types/AirbyteClient"; +import { useExperiment } from "hooks/services/Experiment"; import { RoutePaths } from "pages/routePaths"; import styles from "./CreateConnectionTitleBlock.module.scss"; @@ -37,12 +38,21 @@ interface ConnectionSteps { configureConnection: StepStatus; } -const calculateStepStatuses = (source: string | null, destination: string | null): ConnectionSteps | undefined => { +const useCalculateStepStatuses = (source: string | null, destination: string | null): ConnectionSteps | undefined => { + const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", false); + const location = useLocation(); + const isOnContinuedSimplifiedStep = location.pathname.endsWith("/continued"); + if (!source && !destination) { return { defineSource: ACTIVE, defineDestination: INCOMPLETE, - configureConnection: INCOMPLETE, + ...(!isSimplifiedCreation + ? { configureConnection: INCOMPLETE } + : { + selectStreams: INCOMPLETE, + configureConnection: INCOMPLETE, + }), }; } @@ -50,21 +60,36 @@ const calculateStepStatuses = (source: string | null, destination: string | null return { defineSource: COMPLETE, defineDestination: ACTIVE, - configureConnection: INCOMPLETE, + ...(!isSimplifiedCreation + ? { configureConnection: INCOMPLETE } + : { + selectStreams: INCOMPLETE, + configureConnection: INCOMPLETE, + }), }; } if (destination && !source) { return { defineSource: ACTIVE, defineDestination: COMPLETE, - configureConnection: INCOMPLETE, + ...(!isSimplifiedCreation + ? { configureConnection: INCOMPLETE } + : { + selectStreams: INCOMPLETE, + configureConnection: INCOMPLETE, + }), }; } if (source && destination) { return { defineSource: COMPLETE, defineDestination: COMPLETE, - configureConnection: ACTIVE, + ...(!isSimplifiedCreation + ? { configureConnection: ACTIVE } + : { + selectStreams: isOnContinuedSimplifiedStep ? COMPLETE : ACTIVE, + configureConnection: isOnContinuedSimplifiedStep ? ACTIVE : INCOMPLETE, + }), }; } return undefined; @@ -81,7 +106,9 @@ const StepItem: React.FC<{ state: StepStatus; step: keyof ConnectionSteps; value ? "connectionForm.defineSource" : step === "defineDestination" ? "connectionForm.defineDestination" - : "connectionForm.configureConnection"; + : step === "configureConnection" + ? "connectionForm.configureConnection" + : "connectionForm.selectStreams"; return ( @@ -176,7 +203,7 @@ export const CreateConnectionTitleBlock: React.FC = () => { const sourceId = searchParams.get(SOURCEID_PARAM); const destinationId = searchParams.get(DESTINATIONID_PARAM); - const stepStatuses = calculateStepStatuses(sourceId, destinationId); + const stepStatuses = useCalculateStepStatuses(sourceId, destinationId); if (!stepStatuses) { // this should not be a possible state, but we'll handle it just in case return ; diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionRefreshStreamModal.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionRefreshStreamModal.tsx new file mode 100644 index 00000000000..c23c506f111 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionRefreshStreamModal.tsx @@ -0,0 +1,212 @@ +import { Controller, useFormContext } from "react-hook-form"; +import { FormattedMessage } from "react-intl"; +import * as yup from "yup"; + +import { RadioButtonTiles } from "components/connection/CreateConnection/RadioButtonTiles"; +import { Form } from "components/forms"; +import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; +import { Box } from "components/ui/Box"; +import { Button } from "components/ui/Button"; +import { Text } from "components/ui/Text"; + +import { FeatureItem, useFeature } from "core/services/features"; +import { useZendesk } from "packages/cloud/services/thirdParty/zendesk"; + +interface ConnectionRefreshStreamModalProps { + onComplete: () => void; + onCancel: () => void; + canMerge: boolean; + canTruncate: boolean; + streamNamespace?: string; + streamName: string; + refreshStreams: (streams: Array<{ streamName: string; streamNamespace?: string }>) => Promise; +} + +export interface ConnectionRefreshStreamFormValues { + refreshType: "merge" | "truncate"; + streamName: string; + streamNamespace?: string; +} + +const MergeTruncateRadioButtons: React.FC = () => { + const { setValue, control } = useFormContext(); + + return ( +
      + { + return ( + , + description: ( + + ( + + {children} + + ), + }} + /> + + ), + }, + { + value: "truncate", + label: , + description: ( + + ( + + {children} + + ), + }} + /> + + ), + }, + ]} + selectedValue={field.value ?? ""} + onSelectRadioButton={(value) => { + setValue("refreshType", value, { shouldDirty: true }); + }} + name="refreshType" + /> + ); + }} + /> +
      + ); +}; + +export const ConnectionRefreshStreamModal: React.FC = ({ + canTruncate, + canMerge, + refreshStreams, + streamName, + streamNamespace, + onComplete, + onCancel, +}) => { + const { openZendesk } = useZendesk(); + const allowSupportChat = useFeature(FeatureItem.AllowInAppSupportChat); + + const onSubmitRefreshStreamForm = async (values: ConnectionRefreshStreamFormValues) => { + await refreshStreams([{ streamName: values.streamName, streamNamespace: values.streamNamespace }]); + onComplete(); + }; + + const refreshConnectionFormSchema = yup.object().shape({ + refreshType: yup.mixed().oneOf(["merge", "truncate"]).required(), + streamNamespace: yup.string().trim(), + streamName: yup.string().trim().required(), + }); + + return ( + <> + + + ( + + {children} + + ), + }} + /> + {canMerge && canTruncate && ( + + + + )} + + {allowSupportChat && ( + + + ( + + ), + }} + /> + + + )} + + + schema={refreshConnectionFormSchema} + onSubmit={async (values) => { + await onSubmitRefreshStreamForm(values); + }} + defaultValues={{ + streamName, + streamNamespace, + refreshType: canMerge ? "merge" : "truncate", + }} + > + {canMerge ? ( + canTruncate ? ( + + ) : ( + + + ( + + {children} + + ), + }} + /> + + + ) + ) : ( + + + ( + + {children} + + ), + }} + /> + + + )} + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx index 00acff7e9c6..4e401521d6f 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusCard.tsx @@ -33,7 +33,9 @@ export const ConnectionStatusCard: React.FC = () => { } />
      - + + + {showHistoricalOverview && }
      ); diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss index cf024d44608..4136eeb0669 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.module.scss @@ -1,3 +1,5 @@ +@use "scss/mixins"; + .error { flex: 1; } @@ -5,3 +7,16 @@ .breakingChangeButton { align-self: center; } + +.internalErrorMessage { + font-family: monospace; + max-height: calc( + 100vh - var(--message-children-top-distance, 336px) - var(--message-children-bottom-distance, 25px) + ); // 336 offset + 25 for bottom padding + + overflow-y: auto; +} + +.buttonLikeLink { + @include mixins.link-text; +} diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx index 8c2615cd254..81ad599c2f9 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionStatusMessages.tsx @@ -3,15 +3,19 @@ import { useIntl } from "react-intl"; import { useNavigate } from "react-router-dom"; import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; -import { Box } from "components/ui/Box"; -import { FlexContainer } from "components/ui/Flex"; +import { CopyButton } from "components/ui/CopyButton"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Link } from "components/ui/Link"; import { Message, MessageProps, MessageType, isHigherSeverity, MESSAGE_SEVERITY_LEVELS } from "components/ui/Message"; +import { Text } from "components/ui/Text"; import { useCurrentWorkspaceId } from "area/workspace/utils"; import { useDestinationDefinitionVersion, useSourceDefinitionVersion } from "core/api"; -import { ActorDefinitionVersionRead, FailureOrigin, FailureType } from "core/api/types/AirbyteClient"; +import { ActorDefinitionVersionRead, FailureOrigin } from "core/api/types/AirbyteClient"; import { shouldDisplayBreakingChangeBanner, getHumanReadableUpgradeDeadline } from "core/domain/connector"; import { FeatureItem, useFeature } from "core/services/features"; +import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; import { useSchemaChanges } from "hooks/connection/useSchemaChanges"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; @@ -74,27 +78,70 @@ export const ConnectionStatusMessages: React.FC = () => { // If we have an error message and no breaking schema changes, show the error message if (failureReason && !hasBreakingSchemaChange) { - const isConfigError = failureReason.failureType === FailureType.config_error; - const isSourceError = failureReason.failureOrigin === FailureOrigin.source; - const isDestinationError = failureReason.failureOrigin === FailureOrigin.destination; + const failureUiDetails = failureUiDetailsFromReason(failureReason, formatMessage); + + const isError = failureUiDetails.type === "error"; + if (isError) { + const isSourceError = failureUiDetails.origin === FailureOrigin.source; - if (isConfigError && (isSourceError || isDestinationError)) { const targetRoute = isSourceError ? RoutePaths.Source : RoutePaths.Destination; const targetRouteId = isSourceError ? connection.sourceId : connection.destinationId; const configError = { - text: failureReason.externalMessage, + text: formatMessage( + { id: "failureMessage.label" }, + { + type: ( + + {failureUiDetails.typeLabel}: + + ), + message: failureUiDetails.message, + } + ), onAction: () => navigate(`/${RoutePaths.Workspaces}/${workspaceId}/${targetRoute}/${targetRouteId}`), - actionBtnText: formatMessage({ id: "connection.stream.status.gotoSettings" }), - type: "warning", + actionBtnText: formatMessage({ + id: isSourceError + ? "connection.stream.status.checkSourceSettings" + : "connection.stream.status.checkDestinationSettings", + }), + type: "error", } as const; errorMessages.push(configError); } else { + const hasInternalErrorMessage = !!failureUiDetails.secondaryMessage; const goToLogError = { - text: failureReason.externalMessage, - onAction: () => navigate(`../${ConnectionRoutePaths.JobHistory}#${lastSyncJobId}::${lastSyncAttemptNumber}`), - actionBtnText: formatMessage({ id: "connection.stream.status.seeLogs" }), + text: formatMessage( + { id: "failureMessage.label" }, + { + type: ( + + {failureUiDetails.typeLabel}: + + ), + message: failureUiDetails.message, + } + ), type: "warning", + children: hasInternalErrorMessage && ( + + + {failureUiDetails.secondaryMessage} + + + + + + + + + ), + childrenClassName: styles.internalErrorMessage, + isExpandable: hasInternalErrorMessage, } as const; errorMessages.push(goToLogError); } @@ -222,18 +269,16 @@ export const ConnectionStatusMessages: React.FC = () => { if (errorMessagesToDisplay.length > 0) { return ( - - - {errorMessagesToDisplay.map((message, index) => ( - - ))} - - + + {errorMessagesToDisplay.map((message, index) => ( + + ))} + ); } diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx new file mode 100644 index 00000000000..dfed09ed298 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/ConnectionSyncStatusCard.tsx @@ -0,0 +1,24 @@ +import { useIntl } from "react-intl"; + +import { Card } from "components/ui/Card"; + +import { HistoricalOverview } from "area/connection/components"; +import { FeatureItem, useFeature } from "core/services/features"; +import { useExperiment } from "hooks/services/Experiment"; + +export const ConnectionSyncStatusCard: React.FC = () => { + const { formatMessage } = useIntl(); + const showHistoricalOverviewFeature = useFeature(FeatureItem.ConnectionHistoryGraphs); + const showHistoricalOverviewExperiment = useExperiment("connection.streamCentricUI.historicalOverview", false); + const showHistoricalOverview = showHistoricalOverviewFeature && showHistoricalOverviewExperiment; + + if (!showHistoricalOverview) { + return null; + } + + return ( + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.module.scss new file mode 100644 index 00000000000..b238e892235 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.module.scss @@ -0,0 +1,11 @@ +@use "scss/colors"; + +.streamActionsMenu { + &__clearDataModalStreamName { + font-style: italic; + } + + &__clearDataLabel > p { + color: colors.$red; + } +} diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.tsx index bf109bbf583..64ee38d1b52 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamActionsMenu.tsx @@ -1,33 +1,86 @@ -import React from "react"; -import { useIntl } from "react-intl"; +import classNames from "classnames"; +import React, { useMemo } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; import { useNavigate } from "react-router-dom"; import { useConnectionSyncContext } from "components/connection/ConnectionSync/ConnectionSyncContext"; import { StreamWithStatus } from "components/connection/StreamStatus/streamStatusUtils"; +import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; -import { Icon } from "components/ui/Icon"; +import { Text } from "components/ui/Text"; +import { DestinationSyncMode, SyncMode } from "core/api/types/AirbyteClient"; +import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useExperiment } from "hooks/services/Experiment"; +import { useModalService } from "hooks/services/Modal"; +import { ConnectionRefreshStreamModal } from "pages/connections/StreamStatusPage/ConnectionRefreshStreamModal"; import { ConnectionRoutePaths } from "pages/routePaths"; +import styles from "./StreamActionsMenu.module.scss"; + interface StreamActionsMenuProps { - streamState?: StreamWithStatus | undefined; + streamState: StreamWithStatus; } export const StreamActionsMenu: React.FC = ({ streamState }) => { const { formatMessage } = useIntl(); const navigate = useNavigate(); + const sayClearInsteadOfReset = useExperiment("connection.clearNotReset", false); + const newRefreshTypes = useExperiment("platform.activate-refreshes", false); + const destinationSupportsTruncateRefreshes = false; // for local testing. this will be flagged on _only_ for a dev destination starting later in q1b. + const destinationSupportsMergeRefreshes = false; // for local testing. this will be flagged on _only_ for a dev destination starting later in q1b. + + const { syncStarting, jobSyncRunning, resetStarting, jobResetRunning, resetStreams, refreshStreams } = + useConnectionSyncContext(); + const { mode, connection } = useConnectionFormService(); + const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); + const { openModal } = useModalService(); + + const catalogStream = connection.syncCatalog.streams.find( + (catalogStream) => + catalogStream.stream?.name === streamState.streamName && + catalogStream.stream?.namespace === streamState.streamNamespace + ); - const { syncStarting, jobSyncRunning, resetStarting, jobResetRunning, resetStreams } = useConnectionSyncContext(); - const { mode } = useConnectionFormService(); + /** + * In order to refresh a stream, both the destination AND the sync mode must support one of the refresh modes + * Currently, destination support is simply hardcoded in lines 32-33. However, we will be moving to a feature flag + * and/or destination metadata support before release. + */ + const { canMerge, canTruncate } = useMemo(() => { + const hasIncremental = catalogStream?.config?.syncMode === SyncMode.incremental; + const hasAppendDedupe = catalogStream?.config?.destinationSyncMode === DestinationSyncMode.append_dedup; + + return { + canMerge: hasIncremental && destinationSupportsMergeRefreshes, + canTruncate: hasIncremental && hasAppendDedupe && destinationSupportsTruncateRefreshes, + }; + }, [ + catalogStream?.config?.destinationSyncMode, + catalogStream?.config?.syncMode, + destinationSupportsMergeRefreshes, + destinationSupportsTruncateRefreshes, + ]); + + // the platform must support refresh operations AND the stream must support at least one of the refresh types + const showRefreshOption = newRefreshTypes && (canMerge || canTruncate); + + if (!catalogStream) { + return null; + } const options: DropdownMenuOptionType[] = [ - { - displayName: formatMessage({ id: "connection.stream.actions.resetThisStream" }), - value: "resetThisStream", - disabled: syncStarting || jobSyncRunning || resetStarting || jobResetRunning || mode === "readonly", - }, + ...(sayClearInsteadOfReset + ? [] + : [ + { + displayName: formatMessage({ id: "connection.stream.actions.resetThisStream" }), + value: "resetThisStream", + disabled: syncStarting || jobSyncRunning || resetStarting || jobResetRunning || mode === "readonly", + }, + ]), { displayName: formatMessage({ id: "connection.stream.actions.showInReplicationTable" }), value: "showInReplicationTable", @@ -36,6 +89,27 @@ export const StreamActionsMenu: React.FC = ({ streamStat displayName: formatMessage({ id: "connection.stream.actions.openDetails" }), value: "openDetails", }, + ...(showRefreshOption + ? [ + { + displayName: formatMessage({ id: "connection.stream.actions.refreshStream" }), + value: "refreshStream", + disabled: syncStarting || jobSyncRunning || resetStarting || jobResetRunning || mode === "readonly", + }, + ] + : []), + ...(!sayClearInsteadOfReset + ? [] + : [ + { + displayName: formatMessage({ + id: "connection.stream.actions.clearData", + }), + value: "clearStreamData", + disabled: syncStarting || jobSyncRunning || resetStarting || jobResetRunning || mode === "readonly", + className: classNames(styles.streamActionsMenu__clearDataLabel), + }, + ]), ]; const onOptionClick = async ({ value }: DropdownMenuOptionType) => { @@ -45,6 +119,64 @@ export const StreamActionsMenu: React.FC = ({ streamStat }); } + if (value === "clearStreamData") { + openConfirmationModal({ + title: ( + {streamState.streamName} + ), + }} + /> + ), + text: "connection.stream.actions.clearData.confirm.text", + additionalContent: ( + + + + + + ), + submitButtonText: "connection.stream.actions.clearData.confirm.submit", + cancelButtonText: "connection.stream.actions.clearData.confirm.cancel", + onSubmit: async () => { + await resetStreams([{ streamNamespace: streamState.streamNamespace, streamName: streamState.streamName }]); + closeConfirmationModal(); + }, + }); + } + + if (value === "refreshStream") { + openModal({ + size: "md", + title: ( + {streamState.streamName} + ), + }} + /> + ), + content: ({ onComplete, onCancel }) => { + return ( + + ); + }, + }); + } + if (value === "resetThisStream" && streamState) { await resetStreams([{ streamNamespace: streamState.streamNamespace, streamName: streamState.streamName }]); } @@ -52,7 +184,7 @@ export const StreamActionsMenu: React.FC = ({ streamStat return ( - {() => diff --git a/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx b/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx index b3e99f5eda5..7d79d22bc0f 100644 --- a/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx +++ b/airbyte-webapp/src/pages/destination/DestinationItemPage/DestinationItemPage.tsx @@ -3,7 +3,6 @@ import { useIntl } from "react-intl"; import { Outlet, useParams } from "react-router-dom"; import { LoadingPage } from "components"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { HeadTitle } from "components/common/HeadTitle"; import { ConnectorNavigationTabs } from "components/connector/ConnectorNavigationTabs"; import { ConnectorTitleBlock } from "components/connector/ConnectorTitleBlock"; @@ -12,11 +11,9 @@ import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { useGetDestinationFromParams } from "area/connector/utils"; import { useDestinationDefinitionVersion, useDestinationDefinition } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { RoutePaths } from "pages/routePaths"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; import { ConnectorDocumentationWrapper } from "views/Connector/ConnectorDocumentationLayout"; export const DestinationItemPage: React.FC = () => { @@ -27,8 +24,6 @@ export const DestinationItemPage: React.FC = () => { const actorDefinitionVersion = useDestinationDefinitionVersion(destination.destinationId); const { formatMessage } = useIntl(); - const { trackError } = useAppMonitoringService(); - const breadcrumbBasePath = `/${RoutePaths.Workspaces}/${params.workspaceId}/${RoutePaths.Destination}`; const breadcrumbsData = [ @@ -40,7 +35,7 @@ export const DestinationItemPage: React.FC = () => { ]; return ( - } trackError={trackError}> + @@ -52,11 +47,11 @@ export const DestinationItemPage: React.FC = () => { }> - + - + - + ); }; diff --git a/airbyte-webapp/src/pages/destination/DestinationSettingsPage/DestinationSettingsPage.tsx b/airbyte-webapp/src/pages/destination/DestinationSettingsPage/DestinationSettingsPage.tsx index 40af86fa289..0334454692f 100644 --- a/airbyte-webapp/src/pages/destination/DestinationSettingsPage/DestinationSettingsPage.tsx +++ b/airbyte-webapp/src/pages/destination/DestinationSettingsPage/DestinationSettingsPage.tsx @@ -77,7 +77,7 @@ export const DestinationSettingsPage: React.FC = () => { ); }, [connectionsWithDestination]); - const onDeleteClick = useDeleteModal("destination", onDelete, modalAdditionalContent); + const onDeleteClick = useDeleteModal("destination", onDelete, modalAdditionalContent, destination.name); return (
      diff --git a/airbyte-webapp/src/pages/routes.tsx b/airbyte-webapp/src/pages/routes.tsx index 39589d8d3cb..52c29aa0dd2 100644 --- a/airbyte-webapp/src/pages/routes.tsx +++ b/airbyte-webapp/src/pages/routes.tsx @@ -2,13 +2,12 @@ import React, { useMemo } from "react"; import { Navigate, Route, Routes, useLocation, useSearchParams } from "react-router-dom"; import { useEffectOnce } from "react-use"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; - import { useGetInstanceConfiguration, useInvalidateAllWorkspaceScopeOnChange, useListWorkspacesInfinite, } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useAnalyticsIdentifyUser, useAnalyticsRegisterValues } from "core/services/analytics"; import { useAuthService } from "core/services/auth"; import { FeatureItem, useFeature } from "core/services/features"; @@ -49,7 +48,7 @@ const SourceSettingsPage = React.lazy(() => import("./source/SourceSettingsPage" const SourceConnectionsPage = React.lazy(() => import("./source/SourceConnectionsPage")); const AdvancedSettingsPage = React.lazy(() => import("./SettingsPage/pages/AdvancedSettingsPage")); -const WorkspacesPage = React.lazy(() => import("./workspaces/WorkspacesPage")); +const WorkspacesPage = React.lazy(() => import("./workspaces")); const useAddAnalyticsContextForWorkspace = (workspace: WorkspaceRead): void => { const analyticsContext = useMemo( @@ -60,7 +59,10 @@ const useAddAnalyticsContextForWorkspace = (workspace: WorkspaceRead): void => { [workspace.workspaceId, workspace.customerId] ); useAnalyticsRegisterValues(analyticsContext); - useAnalyticsIdentifyUser(workspace.workspaceId); + useAnalyticsIdentifyUser(workspace.workspaceId, { + protocol: window.location.protocol, + isLocalhost: window.location.hostname === "localhost" || window.location.hostname === "127.0.0.1", + }); }; const MainViewRoutes: React.FC = () => { @@ -72,7 +74,7 @@ const MainViewRoutes: React.FC = () => { return ( - + } /> @@ -119,7 +121,7 @@ const MainViewRoutes: React.FC = () => { } /> - + ); }; diff --git a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx index dcac2a3181a..5e12e434e84 100644 --- a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx @@ -1,4 +1,4 @@ -import React, { useDeferredValue, useMemo, useState } from "react"; +import React, { useDeferredValue, useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { Navigate, useNavigate } from "react-router-dom"; @@ -10,31 +10,30 @@ import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { Heading } from "components/ui/Heading"; -import { Icon } from "components/ui/Icon"; import { PageHeader } from "components/ui/PageHeader"; import { SearchInput } from "components/ui/SearchInput"; import { Text } from "components/ui/Text"; -import { useConnectionList, useCurrentWorkspace, useSourceList } from "core/api"; -import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; +import { useConnectionList, useCurrentWorkspace, useFilters, useSourceList } from "core/api"; +import { SourceRead } from "core/api/types/AirbyteClient"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useIntent } from "core/utils/rbac"; import styles from "./AllSourcesPage.module.scss"; import { SourcePaths } from "../../routePaths"; -const AllSourcesPage: React.FC = () => { +const AllSourcesPageInner: React.FC<{ sources: SourceRead[] }> = ({ sources }) => { const navigate = useNavigate(); useTrackPage(PageTrackingCodes.SOURCE_LIST); const onCreateSource = () => navigate(`${SourcePaths.SelectSourceNew}`); const { workspaceId } = useCurrentWorkspace(); const canCreateSource = useIntent("CreateSource", { workspaceId }); - const { sources } = useSourceList(); const connectionList = useConnectionList({ sourceId: sources.map(({ sourceId }) => sourceId) }); const connections = connectionList?.connections ?? []; const data = getEntityTableData(sources, connections, "source"); - const [searchFilter, setSearchFilter] = useState(""); - const debouncedSearchFilter = useDeferredValue(searchFilter); + const [{ search }, setFilterValue] = useFilters<{ search: string }>({ search: "" }); + const debouncedSearchFilter = useDeferredValue(search); const filteredSources = useMemo( () => filterBySearchEntityTableData(debouncedSearchFilter, data), @@ -53,13 +52,7 @@ const AllSourcesPage: React.FC = () => { } endComponent={ - } @@ -68,7 +61,7 @@ const AllSourcesPage: React.FC = () => { > - setSearchFilter(value)} /> + setFilterValue("search", value)} /> {filteredSources.length === 0 && ( @@ -85,4 +78,9 @@ const AllSourcesPage: React.FC = () => { ); }; +const AllSourcesPage: React.FC = () => { + const { sources } = useSourceList(); + return sources.length ? : ; +}; + export default AllSourcesPage; diff --git a/airbyte-webapp/src/pages/source/CreateSourcePage/CreateSourcePage.tsx b/airbyte-webapp/src/pages/source/CreateSourcePage/CreateSourcePage.tsx index 4d48b072390..9864ea3c5ca 100644 --- a/airbyte-webapp/src/pages/source/CreateSourcePage/CreateSourcePage.tsx +++ b/airbyte-webapp/src/pages/source/CreateSourcePage/CreateSourcePage.tsx @@ -8,14 +8,13 @@ import { FormPageContent } from "components/ConnectorBlocks"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { ConnectionConfiguration } from "area/connector/types"; -import { useSourceDefinitionList, useCreateSource } from "core/api"; -import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; +import { useCreateSource, useSourceDefinitionList } from "core/api"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { useFormChangeTrackerService } from "hooks/services/FormChangeTracker"; -import { SourcePaths, RoutePaths } from "pages/routePaths"; +import { RoutePaths, SourcePaths } from "pages/routePaths"; import { ConnectorDocumentationWrapper } from "views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationWrapper"; import { SourceForm } from "./SourceForm"; @@ -69,7 +68,7 @@ export const CreateSourcePage: React.FC = () => { - diff --git a/airbyte-webapp/src/pages/source/CreateSourcePage/SourceForm.tsx b/airbyte-webapp/src/pages/source/CreateSourcePage/SourceForm.tsx index d06fbf7b829..c17bd6ad737 100644 --- a/airbyte-webapp/src/pages/source/CreateSourcePage/SourceForm.tsx +++ b/airbyte-webapp/src/pages/source/CreateSourcePage/SourceForm.tsx @@ -7,10 +7,9 @@ import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { ConnectionConfiguration } from "area/connector/types"; -import { useGetSourceDefinitionSpecificationAsync, LogsRequestError } from "core/api"; +import { useGetSourceDefinitionSpecificationAsync } from "core/api"; import { SourceDefinitionRead } from "core/api/types/AirbyteClient"; import { Connector } from "core/domain/connector"; -import { FormError } from "core/utils/errorStatusMessage"; import { ConnectorCard } from "views/Connector/ConnectorCard"; import { ConnectorCardValues } from "views/Connector/ConnectorForm/types"; @@ -24,7 +23,6 @@ export interface SourceFormValues { interface SourceFormProps { onSubmit: (values: SourceFormValues) => Promise; sourceDefinitions: SourceDefinitionRead[]; - error?: FormError | null; selectedSourceDefinitionId?: string; } @@ -36,12 +34,7 @@ const hasSourceDefinitionId = (state: unknown): state is { sourceDefinitionId: s ); }; -export const SourceForm: React.FC = ({ - onSubmit, - sourceDefinitions, - error, - selectedSourceDefinitionId, -}) => { +export const SourceForm: React.FC = ({ onSubmit, sourceDefinitions, selectedSourceDefinitionId }) => { const location = useLocation(); const [sourceDefinitionId, setSourceDefinitionId] = useState( @@ -92,7 +85,6 @@ export const SourceForm: React.FC = ({ selectedConnectorDefinitionSpecification={sourceDefinitionSpecification} selectedConnectorDefinitionId={sourceDefinitionId} onSubmit={onSubmitForm} - jobInfo={LogsRequestError.extractJobInfo(error)} supportLevel={selectedSourceDefinition?.supportLevel} /> ); diff --git a/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx b/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx index f920e443449..79c352b6bd3 100644 --- a/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx +++ b/airbyte-webapp/src/pages/source/SourceItemPage/SourceItemPage.tsx @@ -2,7 +2,6 @@ import React, { Suspense } from "react"; import { useIntl } from "react-intl"; import { Outlet, useParams } from "react-router-dom"; -import { ApiErrorBoundary } from "components/common/ApiErrorBoundary"; import { HeadTitle } from "components/common/HeadTitle"; import { ConnectorNavigationTabs } from "components/connector/ConnectorNavigationTabs"; import { ConnectorTitleBlock } from "components/connector/ConnectorTitleBlock"; @@ -12,11 +11,9 @@ import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { useGetSourceFromParams } from "area/connector/utils"; import { useSourceDefinitionVersion, useSourceDefinition } from "core/api"; +import { DefaultErrorBoundary } from "core/errors"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; -import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; import { RoutePaths } from "pages/routePaths"; -import { ResourceNotFoundErrorBoundary } from "views/common/ResourceNotFoundErrorBoundary"; -import { StartOverErrorView } from "views/common/StartOverErrorView"; import { ConnectorDocumentationWrapper } from "views/Connector/ConnectorDocumentationLayout"; export const SourceItemPage: React.FC = () => { @@ -37,10 +34,8 @@ export const SourceItemPage: React.FC = () => { { label: source.name }, ]; - const { trackError } = useAppMonitoringService(); - return ( - } trackError={trackError}> + @@ -52,11 +47,11 @@ export const SourceItemPage: React.FC = () => { }> - + - + - + ); }; diff --git a/airbyte-webapp/src/pages/source/SourceSettingsPage/SourceSettingsPage.tsx b/airbyte-webapp/src/pages/source/SourceSettingsPage/SourceSettingsPage.tsx index d707d965f52..01acd978386 100644 --- a/airbyte-webapp/src/pages/source/SourceSettingsPage/SourceSettingsPage.tsx +++ b/airbyte-webapp/src/pages/source/SourceSettingsPage/SourceSettingsPage.tsx @@ -74,7 +74,7 @@ export const SourceSettingsPage: React.FC = () => { ); }, [connectionsWithSource]); - const onDeleteClick = useDeleteModal("source", onDelete, modalAdditionalContent); + const onDeleteClick = useDeleteModal("source", onDelete, modalAdditionalContent, source.name); return (
      diff --git a/airbyte-webapp/src/pages/workspaces/WorkspacesPage.tsx b/airbyte-webapp/src/pages/workspaces/WorkspacesPage.tsx index 13bf4a8a3e0..b91382704c6 100644 --- a/airbyte-webapp/src/pages/workspaces/WorkspacesPage.tsx +++ b/airbyte-webapp/src/pages/workspaces/WorkspacesPage.tsx @@ -26,7 +26,7 @@ import styles from "./WorkspacesPage.module.scss"; export const WORKSPACE_LIST_LENGTH = 50; -const WorkspacesPage: React.FC = () => { +export const WorkspacesPage: React.FC = () => { const { isLoading: isLogoutLoading, mutateAsync: handleLogout } = useMutation(() => logout?.() ?? Promise.resolve()); useTrackPage(PageTrackingCodes.WORKSPACES); const [searchValue, setSearchValue] = useState(""); @@ -120,5 +120,3 @@ const WorkspacesPage: React.FC = () => { ); }; - -export default WorkspacesPage; diff --git a/airbyte-webapp/src/pages/workspaces/components/WorkspacesCreateControl.tsx b/airbyte-webapp/src/pages/workspaces/components/WorkspacesCreateControl.tsx index c600d264858..df4cb62c404 100644 --- a/airbyte-webapp/src/pages/workspaces/components/WorkspacesCreateControl.tsx +++ b/airbyte-webapp/src/pages/workspaces/components/WorkspacesCreateControl.tsx @@ -18,7 +18,6 @@ import { FormSubmissionButtons } from "components/forms/FormSubmissionButtons"; import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; -import { Icon } from "components/ui/Icon"; import { Text } from "components/ui/Text"; import { useListWorkspaces } from "core/api"; @@ -105,7 +104,7 @@ export const WorkspacesCreateControl: React.FC} + icon="plus" className={styles.createButton} > diff --git a/airbyte-webapp/src/pages/workspaces/index.tsx b/airbyte-webapp/src/pages/workspaces/index.tsx index 78fe36a1baf..979270bc9b0 100644 --- a/airbyte-webapp/src/pages/workspaces/index.tsx +++ b/airbyte-webapp/src/pages/workspaces/index.tsx @@ -1,3 +1 @@ -import WorkspacesPage from "./WorkspacesPage"; - -export default WorkspacesPage; +export { WorkspacesPage as default } from "./WorkspacesPage"; diff --git a/airbyte-webapp/src/scss/_mixins.scss b/airbyte-webapp/src/scss/_mixins.scss index 390a66c46a7..896eb6aac60 100644 --- a/airbyte-webapp/src/scss/_mixins.scss +++ b/airbyte-webapp/src/scss/_mixins.scss @@ -1,3 +1,4 @@ +@use "./colors"; @use "./variables"; @mixin overflow-ellipsis { @@ -67,3 +68,44 @@ $stripes-width: 83px; white-space: nowrap; width: 1px; } + +// need something to look like a button but don't want to @forward the button module, this is it! +@mixin base-button { + // base "button" + transition: 0.2s ease-in; + display: inline-flex; + align-items: center; + justify-content: center; + text-decoration: none; + border-radius: variables.$border-radius-sm; + font-weight: 600; + cursor: pointer; + + // sizeXS + height: variables.$button-height-xs; + font-size: variables.$font-size-sm; + line-height: 15px; + padding: 10px; +} + +// looks like a buttonp[variant=link], like if you need a to look like a button +@mixin link-text { + @include base-button; + + // secondary + color: colors.$grey-400; + border: 1px solid colors.$grey-300; + + &:hover { + border-color: colors.$grey-400; + color: colors.$grey-500; + } + + &:active { + border-color: colors.$grey-500; + color: colors.$grey-500; + } + + // custom background (secondary button has no background) + background-color: colors.$foreground; +} diff --git a/airbyte-webapp/src/scss/_variables.scss b/airbyte-webapp/src/scss/_variables.scss index 701a7482dbb..e23bfab098c 100644 --- a/airbyte-webapp/src/scss/_variables.scss +++ b/airbyte-webapp/src/scss/_variables.scss @@ -24,6 +24,7 @@ $box-shadow-popup: var(--box-shadow-popup); $box-shadow-sidebar: var(--box-shadow-sidebar); $box-shadow-inset: var(--box-shadow-inset); $box-shadow-menu: var(--box-shadow-menu); +$box-shadow-highlight: 0 0 47px -5px; $spacing-xs: 3px; $spacing-sm: 5px; diff --git a/airbyte-webapp/src/scss/connection/_stream-status-colors.scss b/airbyte-webapp/src/scss/connection/_stream-status-colors.scss index b9a005beeb7..506a62bae5d 100644 --- a/airbyte-webapp/src/scss/connection/_stream-status-colors.scss +++ b/airbyte-webapp/src/scss/connection/_stream-status-colors.scss @@ -9,12 +9,12 @@ $up-to-date: colors.$green; $cancelled: colors.$grey-400; $action-required-light: colors.$dark-blue-40; -$disabled-light: colors.$grey-40; +$disabled-light: colors.$grey-100; $error-light: colors.$red-40; $late-light: colors.$blue-40; -$pending-light: colors.$grey-40; +$pending-light: colors.$grey-100; $up-to-date-light: colors.$green-40; -$cancelled-light: colors.$grey-40; +$cancelled-light: colors.$grey-100; $by-stream-status: "actionRequired" $action-required $action-required-light, "disabled" $disabled $disabled-light, "error" $error $error-light, "late" $late $late-light, "pending" $pending $pending-light, diff --git a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx index ff2d426a10a..c65c89f34d8 100644 --- a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx +++ b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx @@ -2,6 +2,7 @@ import { UseMutateAsyncFunction, UseQueryResult } from "@tanstack/react-query"; import { dump } from "js-yaml"; import cloneDeep from "lodash/cloneDeep"; import isEqual from "lodash/isEqual"; +import toPath from "lodash/toPath"; import React, { useCallback, useContext, useEffect, useMemo, useRef, useState } from "react"; import { useFormContext, UseFormReturn } from "react-hook-form"; import { useIntl } from "react-intl"; @@ -12,12 +13,13 @@ import { WaitForSavingModal } from "components/connectorBuilder/Builder/WaitForS import { convertToBuilderFormValuesSync } from "components/connectorBuilder/convertManifestToBuilderForm"; import { BuilderState, + convertToManifest, DEFAULT_BUILDER_FORM_VALUES, DEFAULT_JSON_MANIFEST_VALUES, - convertToManifest, + getManifestValuePerComponentPerStream, useBuilderWatch, } from "components/connectorBuilder/types"; -import { useManifestToBuilderForm } from "components/connectorBuilder/useManifestToBuilderForm"; +import { useUpdateLockedInputs } from "components/connectorBuilder/useLockedInputs"; import { formatJson } from "components/connectorBuilder/utils"; import { useCurrentWorkspaceId } from "area/workspace/utils"; @@ -25,17 +27,17 @@ import { BuilderProject, BuilderProjectPublishBody, BuilderProjectWithManifest, - CommonRequestError, + HttpError, NewVersionBody, useBuilderProject, - usePublishBuilderProject, - useReleaseNewBuilderProjectVersion, - useUpdateBuilderProject, + useBuilderProjectReadStream, + useBuilderProjectUpdateTestingValues, useBuilderResolvedManifest, useBuilderResolvedManifestSuspense, useCurrentWorkspace, - useBuilderProjectReadStream, - useBuilderProjectUpdateTestingValues, + usePublishBuilderProject, + useReleaseNewBuilderProjectVersion, + useUpdateBuilderProject, } from "core/api"; import { useIsForeignWorkspace } from "core/api/cloud"; import { @@ -67,6 +69,13 @@ export type SavingState = "loading" | "invalid" | "saved" | "error" | "readonly" export type ConnectorBuilderPermission = "write" | "readOnly" | "adminReadOnly"; +export type TestingValuesUpdate = UseMutateAsyncFunction< + ConnectorBuilderProjectTestingValues, + Error, + Omit, + unknown +>; + interface FormStateContext { jsonManifest: DeclarativeComponentSchema; yamlEditorIsMounted: boolean; @@ -79,6 +88,11 @@ interface FormStateContext { previousManifestDraft: DeclarativeComponentSchema | undefined; displayedVersion: number | undefined; formValuesValid: boolean; + resolvedManifest: ConnectorManifest; + resolveErrorMessage: string | undefined; + resolveError: HttpError | null; + isResolving: boolean; + streamNames: string[]; setDisplayedVersion: (value: number | undefined, manifest: DeclarativeComponentSchema) => void; updateJsonManifest: (jsonValue: ConnectorManifest) => void; setYamlIsValid: (value: boolean) => void; @@ -88,12 +102,7 @@ interface FormStateContext { releaseNewVersion: (options: NewVersionBody) => Promise; toggleUI: (newMode: BuilderState["mode"]) => Promise; setFormValuesValid: (value: boolean) => void; - updateTestingValues: UseMutateAsyncFunction< - ConnectorBuilderProjectTestingValues, - Error, - Omit, - unknown - >; + updateTestingValues: TestingValuesUpdate; } interface TestReadLimits { @@ -103,9 +112,6 @@ interface TestReadLimits { } export interface TestReadContext { - resolvedManifest: ConnectorManifest; - resolveErrorMessage: string | undefined; - resolveError: CommonRequestError | null; streamRead: UseQueryResult; testReadLimits: { recordLimit: number; @@ -116,7 +122,6 @@ export interface TestReadContext { setSliceLimit: (newSliceLimit: number) => void; defaultLimits: TestReadLimits; }; - isResolving: boolean; schemaWarnings: { schemaDifferences: boolean; incompatibleSchemaErrors: string[] | undefined; @@ -128,7 +133,7 @@ interface FormManagementStateContext { setTestingValuesInputOpen: (open: boolean) => void; isTestReadSettingsOpen: boolean; setTestReadSettingsOpen: (open: boolean) => void; - scrollToField: string | undefined; + handleScrollToField: (ref: React.RefObject, path: string) => void; setScrollToField: (field: string | undefined) => void; stateKey: number; setStateKey: React.Dispatch>; @@ -157,15 +162,45 @@ export const ConnectorBuilderFormStateProvider: React.FC = [ + "version", + "type", + "check", + "definitions", + "streams", + "spec", + "metadata", + "schemas", +]; +export function convertJsonToYaml(json: ConnectorManifest): string { + const yamlString = dump(json, { noRefs: true, + sortKeys: (a: keyof ConnectorManifest, b: keyof ConnectorManifest) => { + const orderA = MANIFEST_KEY_ORDER.indexOf(a); + const orderB = MANIFEST_KEY_ORDER.indexOf(b); + if (orderA === -1 && orderB === -1) { + return 0; + } + if (orderA === -1) { + return 1; + } + if (orderB === -1) { + return -1; + } + return orderA - orderB; + }, + }); + + // add newlines between root-level fields + return yamlString.replace(/^\S+.*/gm, (match, offset) => { + return offset > 0 ? `\n${match}` : match; }); } export const InternalConnectorBuilderFormStateProvider: React.FC< React.PropsWithChildren<{ permission: ConnectorBuilderPermission }> > = ({ children, permission }) => { + const { formatMessage } = useIntl(); const { projectId, builderProject, updateProject, updateError } = useInitializedBuilderProject(); const currentProject: BuilderProject = useMemo( @@ -183,7 +218,6 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< const { setStateKey } = useConnectorBuilderFormManagementState(); const { setStoredMode } = useConnectorBuilderLocalStorage(); - const { convertToBuilderFormValues } = useManifestToBuilderForm(); const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const analyticsService = useAnalyticsService(); @@ -198,10 +232,51 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< const [yamlEditorIsMounted, setYamlEditorIsMounted] = useState(true); const [formValuesValid, setFormValuesValid] = useState(true); + const workspaceId = useCurrentWorkspaceId(); + const { setValue, getValues } = useFormContext(); const mode = useBuilderWatch("mode"); const name = useBuilderWatch("name"); + const manifestValuePerComponentPerStream = useMemo( + () => (mode === "ui" ? getManifestValuePerComponentPerStream(jsonManifest) : undefined), + [jsonManifest, mode] + ); + + const { + data: resolveData, + isError: isResolveError, + error: resolveError, + isFetching: isResolving, + } = useBuilderResolvedManifest( + { + manifest: jsonManifest, + workspace_id: workspaceId, + project_id: projectId, + form_generated_manifest: mode === "ui", + }, + // In UI mode, we only need to call resolve if we have YAML components + mode === "yaml" || (mode === "ui" && !!jsonManifest.metadata?.yamlComponents), + manifestValuePerComponentPerStream + ); + const unknownErrorMessage = formatMessage({ id: "connectorBuilder.unknownError" }); + const resolveErrorMessage = isResolveError + ? resolveError instanceof HttpError + ? resolveError.response?.message || unknownErrorMessage + : unknownErrorMessage + : undefined; + + // In UI mode, we can treat the jsonManifest as resolved, since the resolve call is only used to check for invalid YAML + // components in that case. + // Using the resolve data manifest as the resolved manifest would introduce an unnecessary lag effect in UI mode, where + // test reads would use the old manifest until the resolve call completes. + const resolvedManifest = + mode === "ui" ? jsonManifest : ((resolveData?.manifest ?? DEFAULT_JSON_MANIFEST_VALUES) as ConnectorManifest); + + const streams = useBuilderWatch("formValues.streams"); + const streamNames = + mode === "ui" ? streams.map((stream) => stream.name) : resolvedManifest.streams.map((stream) => stream.name ?? ""); + useEffect(() => { if (name !== currentProject.name) { setPreviousManifestDraft(undefined); @@ -229,31 +304,14 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< const toggleUI = useCallback( async (newMode: BuilderState["mode"]) => { if (newMode === "yaml") { - setValue( - "yaml", - dump(jsonManifest, { - noRefs: true, - }) - ); + setValue("yaml", convertJsonToYaml(jsonManifest)); setYamlIsValid(true); setValue("mode", "yaml"); } else { - try { - if (jsonManifest === DEFAULT_JSON_MANIFEST_VALUES) { - setValue("mode", "ui"); - return; - } - const convertedFormValues = await convertToBuilderFormValues(jsonManifest, projectId); - const convertedManifest = removeEmptyProperties(convertToManifest(convertedFormValues)); - // set jsonManifest first so that a save isn't triggered - setJsonManifest(convertedManifest); - setPersistedState({ name: currentProject.name, manifest: convertedManifest }); - setValue("formValues", convertedFormValues, { shouldValidate: true }); - setValue("mode", "ui"); - } catch (e) { + const confirmDiscard = (errorMessage: string) => openConfirmationModal({ text: "connectorBuilder.toggleModal.text", - textValues: { error: e.message as string }, + textValues: { error: errorMessage }, title: "connectorBuilder.toggleModal.title", submitButtonText: "connectorBuilder.toggleModal.submitButton", onSubmit: () => { @@ -264,6 +322,25 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< }); }, }); + + try { + if (jsonManifest === DEFAULT_JSON_MANIFEST_VALUES) { + setValue("mode", "ui"); + return; + } + if (isResolveError) { + confirmDiscard(resolveErrorMessage!); + return; + } + const convertedFormValues = convertToBuilderFormValuesSync(resolvedManifest); + const convertedManifest = removeEmptyProperties(convertToManifest(convertedFormValues)); + // set jsonManifest first so that a save isn't triggered + setJsonManifest(convertedManifest); + setPersistedState({ name: currentProject.name, manifest: convertedManifest }); + setValue("formValues", convertedFormValues, { shouldValidate: true }); + setValue("mode", "ui"); + } catch (e) { + confirmDiscard(e.message); analyticsService.track(Namespace.CONNECTOR_BUILDER, Action.YAML_TO_UI_CONVERSION_FAILURE, { actionDescription: "Failure occured when converting from YAML to UI", error_message: e.message, @@ -274,11 +351,12 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< [ analyticsService, closeConfirmationModal, - convertToBuilderFormValues, currentProject.name, + isResolveError, jsonManifest, openConfirmationModal, - projectId, + resolveErrorMessage, + resolvedManifest, setValue, ] ); @@ -353,10 +431,12 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< [sendNewVersionRequest] ); + const formAndResolveValid = useMemo(() => formValuesValid && resolveError === null, [formValuesValid, resolveError]); + const savingState = getSavingState( jsonManifest, yamlIsValid, - formValuesValid, + formAndResolveValid, mode, name, persistedState, @@ -377,13 +457,17 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< return; } // do not save invalid ui-based manifest (e.g. no streams), but always save yaml-based manifest - if (modeRef.current === "ui" && !formValuesValid) { + if (modeRef.current === "ui" && !formAndResolveValid) { return; } - const newProject: BuilderProjectWithManifest = { name, manifest: jsonManifest }; + const newProject: BuilderProjectWithManifest = { + name, + manifest: jsonManifest, + yamlManifest: convertJsonToYaml(jsonManifest), + }; await updateProject(newProject); setPersistedState(newProject); - }, [permission, name, formValuesValid, jsonManifest, updateProject]); + }, [permission, name, formAndResolveValid, jsonManifest, updateProject]); useDebounce( () => { @@ -409,6 +493,8 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< useUpdateTestingValuesOnSpecChange(jsonManifest.spec, updateTestingValues); + useUpdateLockedInputs(); + const ctx: FormStateContext = { jsonManifest, yamlEditorIsMounted, @@ -421,6 +507,11 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< previousManifestDraft, displayedVersion, formValuesValid, + resolvedManifest, + resolveError, + resolveErrorMessage, + isResolving, + streamNames, setDisplayedVersion: setToVersion, updateJsonManifest, setYamlIsValid, @@ -492,23 +583,21 @@ export function useInitializedBuilderProject() { } const builderProject = useBuilderProject(projectId); const { mutateAsync: updateProject, error: updateError } = useUpdateBuilderProject(projectId); + const persistedManifest = + (builderProject.declarativeManifest?.manifest as ConnectorManifest) ?? DEFAULT_JSON_MANIFEST_VALUES; const resolvedManifest = useBuilderResolvedManifestSuspense(builderProject.declarativeManifest?.manifest, projectId); const [initialFormValues, failedInitialFormValueConversion, initialYaml] = useMemo(() => { if (!resolvedManifest) { // could not resolve manifest, use default form values - return [ - DEFAULT_BUILDER_FORM_VALUES, - true, - convertJsonToYaml(builderProject.declarativeManifest?.manifest ?? DEFAULT_JSON_MANIFEST_VALUES), - ]; + return [DEFAULT_BUILDER_FORM_VALUES, true, convertJsonToYaml(persistedManifest)]; } try { - return [convertToBuilderFormValuesSync(resolvedManifest), false, convertJsonToYaml(resolvedManifest)]; + return [convertToBuilderFormValuesSync(resolvedManifest), false, convertJsonToYaml(persistedManifest)]; } catch (e) { // could not convert to form values, use default form values - return [DEFAULT_BUILDER_FORM_VALUES, true, convertJsonToYaml(resolvedManifest)]; + return [DEFAULT_BUILDER_FORM_VALUES, true, convertJsonToYaml(persistedManifest)]; } - }, [builderProject.declarativeManifest?.manifest, resolvedManifest]); + }, [persistedManifest, resolvedManifest]); return { projectId, @@ -537,9 +626,7 @@ function useBlockOnSavingState(savingState: SavingState) { closeConfirmationModal(); blocker.proceed(); }, - onClose: () => { - setBlockedOnInvalidState(false); - }, + onCancel: () => setBlockedOnInvalidState(false), }); } else { setPendingBlocker(blocker); @@ -562,7 +649,7 @@ function useBlockOnSavingState(savingState: SavingState) { function getSavingState( currentJsonManifest: ConnectorManifest, yamlIsValid: boolean, - formValuesValid: boolean, + formAndResolveValid: boolean, mode: BuilderState["mode"], name: string | undefined, persistedState: { name: string; manifest?: DeclarativeComponentSchema }, @@ -576,7 +663,7 @@ function getSavingState( if (name === undefined) { return "invalid"; } - if (mode === "ui" && !formValuesValid) { + if (mode === "ui" && !formAndResolveValid) { return "invalid"; } if (mode === "yaml" && !yamlIsValid) { @@ -595,37 +682,13 @@ function getSavingState( } export const ConnectorBuilderTestReadProvider: React.FC> = ({ children }) => { - const { formatMessage } = useIntl(); const workspaceId = useCurrentWorkspaceId(); - const { jsonManifest, projectId } = useConnectorBuilderFormState(); + const { projectId, resolvedManifest } = useConnectorBuilderFormState(); const { setValue } = useFormContext(); const mode = useBuilderWatch("mode"); const view = useBuilderWatch("view"); const testStreamIndex = useBuilderWatch("testStreamIndex"); - - const manifest = jsonManifest ?? DEFAULT_JSON_MANIFEST_VALUES; - - const { - data, - isError: isResolveError, - error: resolveError, - isFetching: isResolving, - } = useBuilderResolvedManifest( - { - manifest, - workspace_id: workspaceId, - project_id: projectId, - form_generated_manifest: mode === "ui", - }, - // don't need to resolve manifest in UI mode since it doesn't use $refs or $parameters - mode === "yaml" - ); - const unknownErrorMessage = formatMessage({ id: "connectorBuilder.unknownError" }); - const resolveErrorMessage = isResolveError - ? resolveError instanceof Error - ? resolveError.message || unknownErrorMessage - : unknownErrorMessage - : undefined; + const streams = useBuilderWatch("formValues.streams"); useEffect(() => { if (typeof view === "number") { @@ -633,14 +696,12 @@ export const ConnectorBuilderTestReadProvider: React.FC { export const useSelectedPageAndSlice = () => { const { resolvedManifest: { streams }, - } = useConnectorBuilderTestRead(); + } = useConnectorBuilderFormState(); const testStreamIndex = useBuilderWatch("testStreamIndex"); const selectedStreamName = streams[testStreamIndex]?.name ?? ""; @@ -795,6 +852,11 @@ export const useSelectedPageAndSlice = () => { return { selectedSlice, selectedPage, setSelectedSlice, setSelectedPage }; }; +// check whether paths are equal, normalizing [] and . notation +function arePathsEqual(path1: string, path2: string) { + return isEqual(toPath(path1), toPath(path2)); +} + export const ConnectorBuilderFormManagementStateProvider: React.FC> = ({ children, }) => { @@ -803,18 +865,28 @@ export const ConnectorBuilderFormManagementStateProvider: React.FC(undefined); const [stateKey, setStateKey] = useState(0); + const handleScrollToField = useCallback( + (ref: React.RefObject, path: string) => { + if (ref.current && scrollToField && arePathsEqual(path, scrollToField)) { + ref.current.scrollIntoView({ block: "center" }); + setScrollToField(undefined); + } + }, + [scrollToField] + ); + const ctx = useMemo( () => ({ isTestingValuesInputOpen, setTestingValuesInputOpen, isTestReadSettingsOpen, setTestReadSettingsOpen, - scrollToField, + handleScrollToField, setScrollToField, stateKey, setStateKey, }), - [isTestingValuesInputOpen, isTestReadSettingsOpen, scrollToField, stateKey] + [isTestingValuesInputOpen, isTestReadSettingsOpen, handleScrollToField, stateKey] ); return ( diff --git a/airbyte-webapp/src/test-utils/mock-data/mockInstanceConfig.ts b/airbyte-webapp/src/test-utils/mock-data/mockInstanceConfig.ts index 65c537d8e7e..ecc66bc6522 100644 --- a/airbyte-webapp/src/test-utils/mock-data/mockInstanceConfig.ts +++ b/airbyte-webapp/src/test-utils/mock-data/mockInstanceConfig.ts @@ -7,6 +7,7 @@ export const mockProInstanceConfig: InstanceConfigurationResponse = { }, webappUrl: "http://test-airbyte-webapp-url.com", edition: "pro", + version: "0.50.1", licenseType: "pro", initialSetupComplete: true, defaultUserId: "00000000-0000-0000-0000-000000000000", diff --git a/airbyte-webapp/src/test-utils/setup-tests.ts b/airbyte-webapp/src/test-utils/setup-tests.ts index e846df72298..4f3ba839be1 100644 --- a/airbyte-webapp/src/test-utils/setup-tests.ts +++ b/airbyte-webapp/src/test-utils/setup-tests.ts @@ -22,3 +22,8 @@ global.ResizeObserver = jest.fn().mockImplementation(() => ({ unobserve: jest.fn(), disconnect: jest.fn(), })); + +// retry failed tests when configured to (e.g. `test:ci`) +if (process.env.JEST_RETRIES) { + jest.retryTimes(parseInt(process.env.JEST_RETRIES, 10)); +} diff --git a/airbyte-webapp/src/test-utils/testutils.tsx b/airbyte-webapp/src/test-utils/testutils.tsx index 9423523021f..d318f8e9b60 100644 --- a/airbyte-webapp/src/test-utils/testutils.tsx +++ b/airbyte-webapp/src/test-utils/testutils.tsx @@ -11,7 +11,6 @@ import { SourceRead, WebBackendConnectionRead, } from "core/api/types/AirbyteClient"; -import { ConfigContext, config } from "core/config"; import { defaultOssFeatures, FeatureItem, FeatureService } from "core/services/features"; import { ConfirmationModalService } from "hooks/services/ConfirmationModal"; import { ModalServiceProvider } from "hooks/services/Modal"; @@ -51,19 +50,17 @@ export const TestWrapper: React.FC> features = defaultOssFeatures, }) => ( null}> - - - - - - - {children} - - - - - - + + + + + + {children} + + + + + ); diff --git a/airbyte-webapp/src/types/react-widgets.d.ts b/airbyte-webapp/src/types/react-widgets.d.ts deleted file mode 100644 index f9c6a22bf4e..00000000000 --- a/airbyte-webapp/src/types/react-widgets.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -// List typings are not exported from react-widgets - -declare module "react-widgets/lib/List"; diff --git a/airbyte-webapp/src/types/rehype-urls.d.ts b/airbyte-webapp/src/types/rehype-urls.d.ts deleted file mode 100644 index 94f2316f72e..00000000000 --- a/airbyte-webapp/src/types/rehype-urls.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "rehype-urls"; diff --git a/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx b/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx index b90641ce1fd..9976e49ee2c 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorCard/ConnectorCard.tsx @@ -12,8 +12,8 @@ import { Pre } from "components/ui/Pre"; import { Spinner } from "components/ui/Spinner"; import { useAirbyteCloudIps } from "area/connector/utils/useAirbyteCloudIps"; -import { LogsRequestError } from "core/api"; -import { DestinationRead, SourceRead, SupportLevel, SynchronousJobRead } from "core/api/types/AirbyteClient"; +import { ErrorWithJobInfo } from "core/api"; +import { DestinationRead, SourceRead, SupportLevel } from "core/api/types/AirbyteClient"; import { Connector, ConnectorDefinition, @@ -42,7 +42,6 @@ interface ConnectorCardBaseProps { headerBlock?: React.ReactNode; description?: React.ReactNode; full?: boolean; - jobInfo?: SynchronousJobRead | null; onSubmit: (values: ConnectorCardValues) => Promise | void; reloadConfig?: () => void; onDeleteClick?: () => void; @@ -79,7 +78,6 @@ const getConnectorId = (connectorRead: DestinationRead | SourceRead) => { }; export const ConnectorCard: React.FC = ({ - jobInfo, onSubmit, onDeleteClick, selectedConnectorDefinitionId, @@ -142,7 +140,7 @@ export const ConnectorCard: React.FC { resetConnectorForm(); }} diff --git a/airbyte-webapp/src/views/Connector/ConnectorCard/components/TestCard.tsx b/airbyte-webapp/src/views/Connector/ConnectorCard/components/TestCard.tsx index d401ce56e44..c427c7ae043 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorCard/components/TestCard.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorCard/components/TestCard.tsx @@ -5,7 +5,6 @@ import { JobFailure } from "components/JobFailure"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; import { ProgressBar } from "components/ui/ProgressBar"; import { Text } from "components/ui/Text"; @@ -62,7 +61,7 @@ export const TestCard: React.FC = ({ {isTestConnectionInProgress || !isEditMode ? ( - )} -
      -); diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx index 074f8ff8b07..92f38c86644 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/DocumentationPanel.tsx @@ -9,7 +9,6 @@ import { LoadingPage } from "components"; import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; -import { Icon } from "components/ui/Icon"; import { ExternalLink } from "components/ui/Link"; import { Markdown } from "components/ui/Markdown"; @@ -49,7 +48,7 @@ const ImgRelativePathReplacer: React.FC< if (src === undefined || actorType === undefined) { newSrc = src; - } else if (src.startsWith("../")) { + } else if (src.startsWith("../") || src.startsWith("./")) { if (isDev) { newSrc = actorType === "source" ? path.join(LOCAL_DOCS_SOURCES_PATH, src) : path.join(LOCAL_DOCS_DESTINATIONS_PATH, src); @@ -74,7 +73,7 @@ const LinkRelativePathReplacer: React.FC< {children} ); - } else if (href && href.startsWith("../")) { + } else if (href && (href.startsWith("../") || href.startsWith("./"))) { const docPath = href.replace(/\.md$/, ""); const url = actorType === "source" @@ -179,7 +178,7 @@ export const DocumentationPanel: React.FC = () => { - diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.module.scss b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.module.scss deleted file mode 100644 index 0fce061b21e..00000000000 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.module.scss +++ /dev/null @@ -1,32 +0,0 @@ -@use "scss/variables"; -@use "scss/colors"; - -.requestContainer { - height: 91vh; - margin-top: variables.$spacing-xl; - width: 100%; - - &__erd--previewImage { - background-size: cover; - background-repeat: no-repeat; - background-image: linear-gradient(rgba(0, 0, 0, 50%), rgba(0, 0, 0, 50%)), url("./erd.png"); - } - - &__schema--previewImage { - background-size: cover; - background-repeat: no-repeat; - background-image: linear-gradient(rgba(0, 0, 0, 30%), rgba(0, 0, 0, 30%)), url("./schema.png"); - } - - &__messageBox { - padding: variables.$spacing-xl; - max-width: 70%; - background-color: colors.$blue-100; - border-radius: variables.$border-radius-md; - - p { - color: colors.$dark-blue-900; - text-align: center; - } - } -} diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.tsx deleted file mode 100644 index 48cc1b4b239..00000000000 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/ResourceNotAvailable.tsx +++ /dev/null @@ -1,74 +0,0 @@ -import classNames from "classnames"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { FlexContainer } from "components/ui/Flex"; -import { Text } from "components/ui/Text"; - -import { isSourceDefinition } from "core/domain/connector/source"; -import { useDocumentationPanelContext } from "views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; - -import styles from "./ResourceNotAvailable.module.scss"; -import { useAnalyticsTrackFunctions } from "./useAnalyticsTrackFunctions"; - -interface ResourceNotAvailableProps { - activeTab: "erd" | "schema"; - isRequested: boolean; - setRequested: (val: boolean) => void; -} -export const ResourceNotAvailable: React.FC> = ({ - activeTab, - setRequested, - isRequested, -}) => { - const { selectedConnectorDefinition } = useDocumentationPanelContext(); - const { trackRequest } = useAnalyticsTrackFunctions(); - - return ( - - {isRequested ? ( -
      - - - -
      - ) : ( - - - - - - - - )} -
      - ); -}; diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/erd.png b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/erd.png deleted file mode 100644 index 324000a9c54..00000000000 Binary files a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/erd.png and /dev/null differ diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/index.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/index.tsx deleted file mode 100644 index 7b089711a4f..00000000000 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/index.tsx +++ /dev/null @@ -1 +0,0 @@ -export { ResourceNotAvailable } from "./ResourceNotAvailable"; diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/schema.png b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/schema.png deleted file mode 100644 index 0b23b279eec..00000000000 Binary files a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/schema.png and /dev/null differ diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/useAnalyticsTrackFunctions.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/useAnalyticsTrackFunctions.tsx deleted file mode 100644 index 922df8f6344..00000000000 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ResourceNotAvailable/useAnalyticsTrackFunctions.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import { useCallback } from "react"; - -import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; - -export const useAnalyticsTrackFunctions = () => { - const analytics = useAnalyticsService(); - - const trackRequest = useCallback( - ({ - sourceDefinitionId, - connectorName, - requestType, - }: { - sourceDefinitionId: string; - connectorName: string; - requestType: "schema" | "erd"; - }) => { - const namespace = requestType === "schema" ? Namespace.SCHEMA : Namespace.ERD; - - analytics.track(namespace, Action.REQUEST, { - actionDescription: `Requested source ${requestType}`, - connector_source: connectorName, - connector_source_definition_id: sourceDefinitionId, - request_type: requestType, - }); - }, - [analytics] - ); - return { trackRequest }; -}; diff --git a/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/ConditionSection.tsx b/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/ConditionSection.tsx index 1e0d2a6c550..19d99710d1c 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/ConditionSection.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/ConditionSection.tsx @@ -1,4 +1,5 @@ import classNames from "classnames"; +import { JSONSchema7Type } from "json-schema"; import pick from "lodash/pick"; import React, { useCallback, useMemo } from "react"; import { get, useFormContext, useFormState, useWatch } from "react-hook-form"; @@ -10,7 +11,7 @@ import { RadioButton } from "components/ui/RadioButton"; import { Text } from "components/ui/Text"; import { TextWithHTML } from "components/ui/TextWithHTML"; -import { FormConditionItem } from "core/form/types"; +import { FormConditionItem, FormGroupItem } from "core/form/types"; import { useOptionalDocumentationPanelContext } from "views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; import styles from "./ConditionSection.module.scss"; @@ -33,7 +34,10 @@ export const ConditionSection: React.FC = ({ formField, p const setFocusedField = useOptionalDocumentationPanelContext()?.setFocusedField; const value = useWatch({ name: path }); - const { conditions, selectionConstValues } = formField; + const { conditions, selectionConstValues } = useMemo( + () => getVisibleConditionsAndConstValues(formField), + [formField] + ); const currentSelectionValue = useWatch({ name: `${path}.${formField.selectionKey}` }); let currentlySelectedCondition: number | undefined = selectionConstValues.indexOf(currentSelectionValue); if (currentlySelectedCondition === -1) { @@ -132,3 +136,22 @@ export const ConditionSection: React.FC = ({ formField, p ); }; + +const getVisibleConditionsAndConstValues = ( + formField: FormConditionItem +): { conditions: FormGroupItem[]; selectionConstValues: JSONSchema7Type[] } => { + const conditions: FormGroupItem[] = []; + const selectionConstValues: JSONSchema7Type[] = []; + + formField.conditions.forEach((condition, index) => { + if (!condition.airbyte_hidden) { + conditions.push(condition); + selectionConstValues.push(formField.selectionConstValues[index]); + } + }); + + return { + conditions, + selectionConstValues, + }; +}; diff --git a/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/auth/RevokeButton.tsx b/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/auth/RevokeButton.tsx index 3d272590e5c..7791791ecc8 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/auth/RevokeButton.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorForm/components/Sections/auth/RevokeButton.tsx @@ -2,7 +2,6 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { Button } from "components/ui/Button"; -import { Icon } from "components/ui/Icon"; import { ConnectorDefinitionSpecification } from "core/domain/connector"; @@ -37,7 +36,7 @@ export const RevokeButton: React.FC = ({ sourceId, selectedCo type="button" data-id="oauth-revoke-button" onClick={run} - icon={} + icon="disabled" size="sm" > {buttonLabel} diff --git a/airbyte-webapp/src/views/Connector/RequestConnectorModal/index.tsx b/airbyte-webapp/src/views/Connector/RequestConnectorModal/index.tsx deleted file mode 100644 index 8e5195cc62e..00000000000 --- a/airbyte-webapp/src/views/Connector/RequestConnectorModal/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import RequestConnectorModal from "./RequestConnectorModal"; - -export default RequestConnectorModal; diff --git a/airbyte-webapp/src/views/Connector/RequestConnectorModal/types.ts b/airbyte-webapp/src/views/Connector/RequestConnectorModal/types.ts deleted file mode 100644 index ffefb968849..00000000000 --- a/airbyte-webapp/src/views/Connector/RequestConnectorModal/types.ts +++ /dev/null @@ -1,6 +0,0 @@ -export interface Values { - connectorType: string; - name: string; - additionalInfo?: string; - email?: string; -} diff --git a/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx b/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx index 3437cb0f43c..4d29629b8ea 100644 --- a/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx +++ b/airbyte-webapp/src/views/common/ErrorOccurredView/ErrorOccurredView.tsx @@ -9,20 +9,14 @@ import styles from "./ErrorOccurredView.module.scss"; interface ErrorOccurredViewProps { message: React.ReactNode; - /** - * URL to relevant documentation for the error if available - */ - docLink?: string; ctaButtonText?: React.ReactNode; onCtaButtonClick?: React.MouseEventHandler; } -export const ErrorOccurredView: React.FC = ({ - message, - onCtaButtonClick, - ctaButtonText, - docLink, -}) => { +/** + * @deprecated Replaced by `ErrorDetails` component. Will be removed once the speakeasy portal forward has been removed. + */ +export const ErrorOccurredView: React.FC = ({ message, onCtaButtonClick, ctaButtonText }) => { return (
      @@ -31,13 +25,6 @@ export const ErrorOccurredView: React.FC = ({

      {message}

      - {docLink && ( -

      - - - -

      - )} {onCtaButtonClick && ctaButtonText && (