diff --git a/.env b/.env index 2e58229a82c..9dc1b51c5f5 100644 --- a/.env +++ b/.env @@ -88,11 +88,6 @@ JOB_MAIN_CONTAINER_CPU_LIMIT= JOB_MAIN_CONTAINER_MEMORY_REQUEST= JOB_MAIN_CONTAINER_MEMORY_LIMIT= -NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT= -NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST= -NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT= -NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST= - ### LOGGING/MONITORING/TRACKING ### TRACKING_STRATEGY=segment SEGMENT_WRITE_KEY=7UDdp5K55CyiGgsauOr2pNNujGvmhaeu diff --git a/airbyte-api/build.gradle.kts b/airbyte-api/build.gradle.kts index 4537b2d2dd9..c00630ae44a 100644 --- a/airbyte-api/build.gradle.kts +++ b/airbyte-api/build.gradle.kts @@ -467,6 +467,7 @@ val genConnectorBuilderServerApiClient = tasks.register("genConnec "enumPropertyNaming" to "UPPERCASE", "generatePom" to "false", "interfaceOnly" to "true", + "serializationLibrary" to "jackson", ) doLast { diff --git a/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt index c21e4259916..dfe98688fee 100644 --- a/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt +++ b/airbyte-api/src/main/kotlin/config/ClientSupportFactory.kt @@ -129,20 +129,20 @@ class ClientSupportFactory { r.counter( "$metricPrefix.abort", *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result?.request?.method ?: UNKNOWN), + *getUrlTags(l.result?.request?.url), ).increment() } } .onFailure { l -> - logger.error(l.exception) { "Failed to call ${l.result.request.url}. Last response: ${l.result}" } + logger.error(l.exception) { "Failed to call ${l.result?.request?.url ?: UNKNOWN}. Last response: ${l.result}" } meterRegistry.ifPresent { r -> r.counter( "$metricPrefix.failure", *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result?.request?.method ?: UNKNOWN), + *getUrlTags(l.result?.request?.url), ).increment() } } @@ -153,8 +153,8 @@ class ClientSupportFactory { r.counter( "$metricPrefix.retry", *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "url", "method", l.lastResult.request.method), - *getUrlTags(l.lastResult.request.url), + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.lastResult?.request?.method ?: UNKNOWN), + *getUrlTags(l.lastResult?.request?.url), ).increment() } } @@ -165,8 +165,8 @@ class ClientSupportFactory { r.counter( "$metricPrefix.retries_exceeded", *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result?.request?.method ?: UNKNOWN), + *getUrlTags(l.result?.request?.url), ).increment() } } @@ -177,8 +177,8 @@ class ClientSupportFactory { r.counter( "$metricPrefix.success", *metricTags, - *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result.request.method), - *getUrlTags(l.result.request.url), + *arrayOf("retry-attempt", l.attemptCount.toString(), "method", l.result?.request?.method ?: UNKNOWN), + *getUrlTags(l.result?.request?.url), ).increment() } } @@ -188,12 +188,18 @@ class ClientSupportFactory { .build() } - private fun getUrlTags(httpUrl: HttpUrl): Array { - val last = httpUrl.pathSegments.last() - if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { - return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) - } else { - return arrayOf("url", httpUrl.toString()) - } + private fun getUrlTags(httpUrl: HttpUrl?): Array { + return httpUrl?.let { + val last = httpUrl.pathSegments.last() + if (last.contains("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".toRegex())) { + return arrayOf("url", httpUrl.toString().removeSuffix(last), "workload-id", last) + } else { + return arrayOf("url", httpUrl.toString()) + } + } ?: emptyArray() + } + + companion object { + private const val UNKNOWN = "unknown" } } diff --git a/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt index adb0507da31..dabbffd24c4 100644 --- a/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt +++ b/airbyte-api/src/main/kotlin/config/InternalApiAuthenticationFactory.kt @@ -8,6 +8,7 @@ import com.auth0.jwt.JWT import com.auth0.jwt.JWTCreator import com.google.auth.oauth2.ServiceAccountCredentials import io.github.oshai.kotlinlogging.KotlinLogging +import io.micrometer.core.instrument.MeterRegistry import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Primary import io.micronaut.context.annotation.Prototype @@ -18,6 +19,7 @@ import jakarta.inject.Singleton import java.io.FileInputStream import java.security.interfaces.RSAPrivateKey import java.util.Date +import java.util.Optional import java.util.concurrent.TimeUnit private val logger = KotlinLogging.logger {} @@ -61,6 +63,7 @@ class InternalApiAuthenticationFactory { @Value("\${airbyte.control.plane.auth-endpoint}") controlPlaneAuthEndpoint: String, @Value("\${airbyte.data.plane.service-account.email}") dataPlaneServiceAccountEmail: String, @Value("\${airbyte.data.plane.service-account.credentials-path}") dataPlaneServiceAccountCredentialsPath: String, + meterRegistry: Optional, ): String { return try { val now = Date() @@ -82,9 +85,12 @@ class InternalApiAuthenticationFactory { val cred = ServiceAccountCredentials.fromStream(stream) val key = cred.privateKey as RSAPrivateKey val algorithm: com.auth0.jwt.algorithms.Algorithm = com.auth0.jwt.algorithms.Algorithm.RSA256(null, key) - "Bearer " + token.sign(algorithm) + val signedToken = token.sign(algorithm) + meterRegistry.ifPresent { registry -> registry.counter("airbyte-api-client.auth-token.success").increment() } + return "Bearer $signedToken" } catch (e: Exception) { - logger.error(e) { "An issue occurred while generating a data plane auth token. Defaulting to empty string. Error Message: {}" } + meterRegistry.ifPresent { registry -> registry.counter("airbyte-api-client.auth-token.failure").increment() } + logger.error(e) { "An issue occurred while generating a data plane auth token. Defaulting to empty string." } "" } } diff --git a/airbyte-api/src/main/openapi/cloud-config.yaml b/airbyte-api/src/main/openapi/cloud-config.yaml index 20b50f6211d..bc62f634b3f 100644 --- a/airbyte-api/src/main/openapi/cloud-config.yaml +++ b/airbyte-api/src/main/openapi/cloud-config.yaml @@ -1622,9 +1622,11 @@ components: properties: accountId: type: integer + format: int64 description: The account id associated with the job jobId: type: integer + format: int64 description: The the specific job id returned by the dbt Cloud API jobName: type: string diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index 7d6a1771230..14307d41f73 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -2195,6 +2195,53 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/events/get: + post: + tags: + - connection + summary: Get a single event (including details) in a connection by given event ID + operationId: getConnectionEvent + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionEventIdRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionEventWithDetails" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" + + /v1/connections/events/list: + post: + tags: + - connection + summary: List most recent events in a connection (optional filters may apply) + operationId: listConnectionEvents + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionEventsRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionEventList" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/connections/last_job_per_stream: post: tags: @@ -2310,6 +2357,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connections/diff_catalog: + post: + tags: + - connection + summary: Generate the diff between stored catalog for the connection and catalog provided and postprocess as necessary + operationId: diffCatalogForConnection + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DiffCatalogRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDiscoverSchemaRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/state/get: post: tags: @@ -3381,25 +3451,6 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" - /v1/jobs/get_normalization_status: - post: - tags: - - jobs - - internal - summary: Get normalization status to determine if we can bypass normalization phase - operationId: getAttemptNormalizationStatusesForJob - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/JobIdRequestBody" - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/AttemptNormalizationStatusReadList" /v1/jobs/get_input: post: tags: @@ -4089,6 +4140,25 @@ paths: application/json: schema: $ref: "#/components/schemas/InternalOperationResult" + /v1/attempt/save_stream_metadata: + post: + tags: + - attempt + - internal + summary: Save stream level attempt information + operationId: saveStreamMetadata + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SaveStreamAttemptMetadataRequestBody" + responses: + "200": + description: Successful Operation + content: + application/json: + schema: + $ref: "#/components/schemas/InternalOperationResult" /v1/attempt/save_stats: post: tags: @@ -4582,7 +4652,6 @@ paths: operationId: applicationTokenRequest summary: Grant an Access Token for an Application. description: Takes the client_id and client_secret for an application and returns an Access Token. - # Scoped Configuration /v1/scoped_configuration/list: post: @@ -4868,6 +4937,176 @@ paths: security: [] x-sdk-alias: getHealthCheck x-sdk-group: Health + /public/v1/applications: + get: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationReadList" + examples: + Application List Response Example: + value: + applications: + [ + { + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319, + name: test application, + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e, + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==, + createdAt: 2024-06-09T21:50:30Z, + }, + ] + description: List all Applications a User has permission to view. + "403": + description: Not allowed + operationId: publicListApplications + summary: List Applications + x-sdk-alias: listApplications + x-sdk-group: Applications + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationCreate" + examples: + Application Creation Request Example: + value: + name: test application + required: true + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Creation Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 2024-06-09T21:50:30Z + description: "Creates a new Application." + "400": + description: Invalid data + "403": + description: Not allowed + operationId: publicCreateApplication + summary: Create an Application + x-sdk-alias: createApplication + x-sdk-group: Applications + /public/v1/applications/{applicationId}: + get: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Get Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 2024-06-09T21:50:30Z + description: Get an Application by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: publicGetApplication + x-sdk-alias: getApplication + x-sdk-group: Applications + summary: Get an Application detail + delete: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Delete Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 2024-06-09T21:50:30Z + description: Delete an Application. + "403": + description: Not allowed + "404": + description: Not found + operationId: publicDeleteApplication + x-sdk-alias: deleteApplication + x-sdk-group: Applications + summary: Deletes an Application + parameters: + - name: applicationId + schema: + type: string + in: path + required: true + /public/v1/applications/token: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationTokenRequest" + examples: + Application Token Request Example: + value: + clientId: 0da998a2-0d7b-49c7-bb6e-9f7eb9cc68a0 + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + required: true + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/AccessToken" + examples: + Application Creation Response Example: + value: + access_token: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + description: "Creates an Access Token." + "400": + description: Invalid data + "403": + description: Not allowed + operationId: publicGetAccessToken + summary: Get an Access Token + x-sdk-alias: createAccessToken + x-sdk-group: Applications + security: [] /public/v1/jobs: get: tags: @@ -6054,7 +6293,7 @@ paths: description: >- Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. - In order to determine what the credential configuration needs to be, please see the connector specification of the relevant + In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. parameters: - name: workspaceId @@ -7354,6 +7593,15 @@ components: description: Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach type: integer format: int64 + lastPublished: + description: The time the connector was modified in the codebase. + $ref: "#/components/schemas/ISO8601DateTime" + cdkVersion: + description: "The version of the CDK that the connector was built with. e.g. python:0.1.0, java:0.1.0" + type: string + metrics: + description: Public metrics for the connector + type: object SourceDefinitionReadList: type: object required: @@ -7604,6 +7852,18 @@ components: type: boolean priority: $ref: "#/components/schemas/WorkloadPriority" + DiffCatalogRequestBody: + type: object + required: + - catalogId + - connectionId + properties: + catalogId: + type: string + format: uuid + connectionId: + type: string + format: uuid WorkloadPriority: type: string enum: @@ -7827,8 +8087,6 @@ components: - dockerRepository - dockerImageTag - documentationUrl - - supportsDbt - - normalizationConfig properties: destinationDefinitionId: $ref: "#/components/schemas/DestinationDefinitionId" @@ -7860,11 +8118,15 @@ components: format: date resourceRequirements: $ref: "#/components/schemas/ActorDefinitionResourceRequirements" - supportsDbt: - type: boolean - description: an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used. - normalizationConfig: - $ref: "#/components/schemas/NormalizationDestinationDefinitionConfig" + lastPublished: + description: The time the connector was modified in the codebase. + $ref: "#/components/schemas/ISO8601DateTime" + cdkVersion: + description: "The version of the CDK that the connector was built with. e.g. python:0.1.0, java:0.1.0" + type: string + metrics: + description: Public metrics for the connector + type: object DestinationDefinitionReadList: type: object required: @@ -8109,9 +8371,7 @@ components: required: - dockerRepository - dockerImageTag - - supportsDbt - supportsRefreshes - - normalizationConfig - supportState - isVersionOverrideApplied properties: @@ -8119,12 +8379,8 @@ components: type: string dockerImageTag: type: string - supportsDbt: - type: boolean supportsRefreshes: type: boolean - normalizationConfig: - $ref: "#/components/schemas/NormalizationDestinationDefinitionConfig" isVersionOverrideApplied: type: boolean supportLevel: @@ -8133,6 +8389,12 @@ components: $ref: "#/components/schemas/SupportState" breakingChanges: $ref: "#/components/schemas/ActorDefinitionVersionBreakingChanges" + lastPublished: + description: The time the connector was modified in the codebase. + $ref: "#/components/schemas/ISO8601DateTime" + cdkVersion: + description: "The version of the CDK that the connector was built with. e.g. python:0.1.0, java:0.1.0" + type: string ResolveActorDefinitionVersionResponse: type: object required: @@ -8802,6 +9064,136 @@ components: recordsCommitted: type: integer format: int64 + ConnectionEventId: + type: string + format: UUID + ConnectionEventIdRequestBody: + type: object + required: + - connectionEventId + properties: + connectionEventId: + $ref: "#/components/schemas/ConnectionEventId" + ConnectionEventType: + type: string + enum: + - SYNC_STARTED # only for manual sync jobs + - SYNC_SUCCEEDED + - SYNC_INCOMPLETE + - SYNC_FAILED + - SYNC_CANCELLED + - REFRESH_STARTED + - REFRESH_SUCCEEDED + - REFRESH_INCOMPLETE + - REFRESH_FAILED + - REFRESH_CANCELLED + - CLEAR_STARTED + - CLEAR_SUCCEEDED + - CLEAR_INCOMPLETE + - CLEAR_FAILED + - CLEAR_CANCELLED + - CONNECTION_SETTINGS_UPDATE + - CONNECTION_ENABLED + - CONNECTION_DISABLED + - SCHEMA_UPDATE + - CONNECTOR_UPDATE + UserReadInConnectionEvent: + type: object + required: + - id + - email + properties: + id: + type: string + format: uuid + email: + type: string + format: email + name: + type: string + ConnectionEvent: + type: object + required: + - id + - connectionId + - eventType + - summary + - created_at + properties: + id: + $ref: "#/components/schemas/ConnectionEventId" + connectionId: + $ref: "#/components/schemas/ConnectionId" + eventType: + $ref: "#/components/schemas/ConnectionEventType" + summary: + description: JSON object without event details + type: object + createdAt: + type: integer + format: int64 + user: + $ref: "#/components/schemas/UserReadInConnectionEvent" + ConnectionEventWithDetails: + type: object + required: + - id + - connectionId + - eventType + - summary + - details + - created_at + properties: + id: + $ref: "#/components/schemas/ConnectionEventId" + connectionId: + $ref: "#/components/schemas/ConnectionId" + eventType: + $ref: "#/components/schemas/ConnectionEventType" + summary: + description: JSON object without event details + type: object + details: + description: JSON object with event details + type: object + createdAt: + type: integer + format: int64 + user: + $ref: "#/components/schemas/UserReadInConnectionEvent" + + ConnectionEventList: + type: object + required: + - events + properties: + events: + type: array + items: + $ref: "#/components/schemas/ConnectionEvent" + ConnectionEventsRequestBody: + type: object + required: + - connectionId + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + eventTypes: + description: filter events by event types (optional) + type: array + items: + $ref: "#/components/schemas/ConnectionEventType" + pagination: + $ref: "#/components/schemas/Pagination" + createdAtStart: + description: The start datetime of a time range to filter by + type: string + format: date-time + createdAtEnd: + description: The end datetime of a time range to filter by + type: string + format: date-time + ConnectionLastJobPerStreamRead: type: array items: @@ -9075,41 +9467,14 @@ components: # the jsonschema2pojo does not seem to support it yet: https://github.com/joelittlejohn/jsonschema2pojo/issues/392 operatorType: $ref: "#/components/schemas/OperatorType" - normalization: - $ref: "#/components/schemas/OperatorNormalization" - dbt: - $ref: "#/components/schemas/OperatorDbt" webhook: $ref: "#/components/schemas/OperatorWebhook" OperatorType: type: string enum: - # - destination - normalization - dbt - webhook - # - docker - OperatorNormalization: - type: object - properties: - option: - type: string - enum: - - basic - #- unnesting - OperatorDbt: - type: object - required: - - gitRepoUrl - properties: - gitRepoUrl: - type: string - gitRepoBranch: - type: string - dockerImage: - type: string - dbtArguments: - type: string OperatorWebhook: type: object properties: @@ -10171,8 +10536,6 @@ components: - destination - replication - persistence - - normalization - - dbt - airbyte_platform - unknown FailureType: @@ -10522,27 +10885,6 @@ components: description: A unique identifier for an actor. type: string format: uuid - NormalizationDestinationDefinitionConfig: - description: describes a normalization config for destination definition version - type: object - required: - - supported - additionalProperties: false - properties: - supported: - type: boolean - description: whether the destination definition supports normalization. - default: false - normalizationRepository: - type: string - description: a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used. - normalizationTag: - type: string - description: a field indicating the tag of the docker repository to be used for normalization. - normalizationIntegrationType: - type: string - description: a field indicating the type of integration dialect to use for normalization. - JobTypeResourceLimit: description: sets resource requirements for a specific job type for an actor definition. these values override the default, if both are set. type: object @@ -10948,6 +11290,8 @@ components: - status - isSyncing - schemaChange + - sourceActorDefinitionVersion + - destinationActorDefinitionVersion properties: connectionId: $ref: "#/components/schemas/ConnectionId" @@ -10971,6 +11315,10 @@ components: type: boolean schemaChange: $ref: "#/components/schemas/SchemaChange" + sourceActorDefinitionVersion: + $ref: "#/components/schemas/ActorDefinitionVersionRead" + destinationActorDefinitionVersion: + $ref: "#/components/schemas/ActorDefinitionVersionRead" WebBackendConnectionRead: type: object required: @@ -10987,6 +11335,8 @@ components: - notifySchemaChanges - notifySchemaChangesByEmail - nonBreakingChangesPreference + - sourceActorDefinitionVersion + - destinationActorDefinitionVersion properties: connectionId: $ref: "#/components/schemas/ConnectionId" @@ -11056,6 +11406,10 @@ components: format: int64 backfillPreference: $ref: "#/components/schemas/SchemaChangeBackfillPreference" + sourceActorDefinitionVersion: + $ref: "#/components/schemas/ActorDefinitionVersionRead" + destinationActorDefinitionVersion: + $ref: "#/components/schemas/ActorDefinitionVersionRead" NonBreakingChangesPreference: enum: - ignore # do nothing if we detect a schema change @@ -11170,6 +11524,36 @@ components: $ref: "#/components/schemas/AttemptStreamStats" connectionId: $ref: "#/components/schemas/ConnectionId" + SaveStreamAttemptMetadataRequestBody: + type: object + required: + - jobId + - attemptNumber + - stats + properties: + jobId: + $ref: "#/components/schemas/JobId" + attemptNumber: + $ref: "#/components/schemas/AttemptNumber" + streamMetadata: + type: array + items: + $ref: "#/components/schemas/StreamAttemptMetadata" + StreamAttemptMetadata: + type: object + required: + - streamName + - wasBackfilled + - wasResumed + properties: + streamName: + type: string + streamNamespace: + type: string + wasBackfilled: + type: boolean + wasResumed: + type: boolean AttemptSyncConfig: type: object required: @@ -11220,25 +11604,6 @@ components: catalogId: type: string format: uuid - AttemptNormalizationStatusReadList: - type: object - properties: - attemptNormalizationStatuses: - type: array - items: - $ref: "#/components/schemas/AttemptNormalizationStatusRead" - AttemptNormalizationStatusRead: - type: object - properties: - attemptNumber: - $ref: "#/components/schemas/AttemptNumber" - hasRecordsCommitted: - type: boolean - recordsCommitted: - type: integer - format: int64 - hasNormalizationFailed: - type: boolean StreamStatusId: type: string format: uuid @@ -11841,6 +12206,7 @@ components: properties: name: type: string + x-sdk-component: true ApplicationId: type: string format: uuid @@ -11862,6 +12228,7 @@ components: type: array items: $ref: "#/components/schemas/ApplicationRead" + x-sdk-component: true ApplicationRead: required: - id @@ -11882,6 +12249,7 @@ components: createdAt: type: integer format: int64 + x-sdk-component: true ApplicationTokenRequest: required: - client_id @@ -11892,6 +12260,7 @@ components: type: string client_secret: type: string + x-sdk-component: true AccessToken: required: - access_token @@ -11899,6 +12268,7 @@ components: properties: access_token: type: string + x-sdk-component: true UserInvitationCreateRequestBody: type: object required: @@ -12871,6 +13241,7 @@ components: items: type: string selectedFields: + description: By default (if not provided in the request) all fields will be synced. Otherwise, only the fields in this list will be synced. $ref: "#/components/schemas/SelectedFields" x-sdk-component: true StreamConfigurations: @@ -12931,6 +13302,11 @@ components: - destination x-sdk-component: true + ISO8601DateTime: + type: string + format: date-time + x-field-extra-annotation: '@com.fasterxml.jackson.annotation.JsonFormat(pattern="yyyy-MM-dd''T''HH:mm:ss.SSS''Z''")' + responses: NotFoundResponse: description: Object with given id was not found. diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java index 699ec7982b4..d7a3076079c 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderTest.java @@ -97,8 +97,8 @@ class BootloaderTest { // ⚠️ This line should change with every new migration to show that you meant to make a new // migration to the prod database - private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.57.4.005"; - private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.57.2.003"; + private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "0.57.4.006"; + private static final String CURRENT_JOBS_MIGRATION_VERSION = "0.57.2.005"; private static final String CDK_VERSION = "1.2.3"; @BeforeEach diff --git a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ProtocolConverters.java b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ProtocolConverters.java index 414547d144a..a0a45b166d4 100644 --- a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ProtocolConverters.java +++ b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/ProtocolConverters.java @@ -31,6 +31,13 @@ public static io.airbyte.protocol.models.StreamDescriptor streamDescriptorToProt .withNamespace(apiStreamDescriptor.getNamespace()); } + @SuppressWarnings("LineLength") + public static io.airbyte.config.StreamDescriptor streamDescriptorToDomain(final io.airbyte.protocol.models.StreamDescriptor protocolStreamDescriptor) { + return new io.airbyte.config.StreamDescriptor() + .withName(protocolStreamDescriptor.getName()) + .withNamespace(protocolStreamDescriptor.getNamespace()); + } + @SuppressWarnings("LineLength") public static io.airbyte.protocol.models.StreamDescriptor clientStreamDescriptorToProtocol(final io.airbyte.api.client.model.generated.StreamDescriptor clientStreamDescriptor) { return new io.airbyte.protocol.models.StreamDescriptor().withName(clientStreamDescriptor.getName()) diff --git a/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/ConnectorConfigUpdaterTest.java b/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/ConnectorConfigUpdaterTest.java index 72e91fd0ae5..36021811b12 100644 --- a/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/ConnectorConfigUpdaterTest.java +++ b/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/ConnectorConfigUpdaterTest.java @@ -59,7 +59,7 @@ void setUp() throws IOException { Jsons.jsonNode(Map.of()), DESTINATION_NAME, DESTINATION_NAME, - null, null, null, null, null));; + null, null, null, null, null)); when(mAirbyteApiClient.getDestinationApi()).thenReturn(mDestinationApi); when(mAirbyteApiClient.getSourceApi()).thenReturn(mSourceApi); diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1.java index 1d8856d34d0..20436710d27 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1.java @@ -4,30 +4,14 @@ package io.airbyte.commons.protocol.migrations.v1; -import static io.airbyte.protocol.models.JsonSchemaReferenceTypes.REF_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.json.Jsons; import io.airbyte.commons.protocol.migrations.AirbyteMessageMigration; -import io.airbyte.commons.protocol.migrations.util.RecordMigrations; -import io.airbyte.commons.protocol.migrations.util.RecordMigrations.MigratedNode; -import io.airbyte.commons.version.AirbyteProtocolVersion; import io.airbyte.commons.version.Version; import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.protocol.models.JsonSchemaReferenceTypes; import io.airbyte.validation.json.JsonSchemaValidator; -import java.util.Iterator; -import java.util.Map.Entry; -import java.util.Objects; import java.util.Optional; +import org.apache.commons.lang3.NotImplementedException; /** * V1 Migration. @@ -48,134 +32,23 @@ public AirbyteMessageMigrationV1(final JsonSchemaValidator validator) { } @Override - public io.airbyte.protocol.models.v0.AirbyteMessage downgrade(final AirbyteMessage oldMessage, - final Optional configuredAirbyteCatalog) { - final io.airbyte.protocol.models.v0.AirbyteMessage newMessage = Jsons.object( - Jsons.jsonNode(oldMessage), - io.airbyte.protocol.models.v0.AirbyteMessage.class); - if (oldMessage.getType() == Type.CATALOG && oldMessage.getCatalog() != null) { - for (final io.airbyte.protocol.models.v0.AirbyteStream stream : newMessage.getCatalog().getStreams()) { - final JsonNode schema = stream.getJsonSchema(); - SchemaMigrationV1.downgradeSchema(schema); - } - } else if (oldMessage.getType() == Type.RECORD && oldMessage.getRecord() != null) { - if (configuredAirbyteCatalog.isPresent()) { - final ConfiguredAirbyteCatalog catalog = configuredAirbyteCatalog.get(); - final io.airbyte.protocol.models.v0.AirbyteRecordMessage record = newMessage.getRecord(); - final Optional maybeStream = catalog.getStreams().stream() - .filter(stream -> Objects.equals(stream.getStream().getName(), record.getStream()) - && Objects.equals(stream.getStream().getNamespace(), record.getNamespace())) - .findFirst(); - // If this record doesn't belong to any configured stream, then there's no point downgrading it - // So only do the downgrade if we can find its stream - if (maybeStream.isPresent()) { - final JsonNode schema = maybeStream.get().getStream().getJsonSchema(); - final JsonNode oldData = record.getData(); - final MigratedNode downgradedNode = downgradeRecord(oldData, schema); - record.setData(downgradedNode.node()); - } - } - } - return newMessage; + public io.airbyte.protocol.models.v0.AirbyteMessage downgrade(AirbyteMessage message, Optional configuredAirbyteCatalog) { + throw new NotImplementedException("Migration not implemented."); } @Override - public AirbyteMessage upgrade(final io.airbyte.protocol.models.v0.AirbyteMessage oldMessage, - final Optional configuredAirbyteCatalog) { - // We're not introducing any changes to the structure of the record/catalog - // so just clone a new message object, which we can edit in-place - final AirbyteMessage newMessage = Jsons.object( - Jsons.jsonNode(oldMessage), - AirbyteMessage.class); - if (oldMessage.getType() == io.airbyte.protocol.models.v0.AirbyteMessage.Type.CATALOG && oldMessage.getCatalog() != null) { - for (final AirbyteStream stream : newMessage.getCatalog().getStreams()) { - final JsonNode schema = stream.getJsonSchema(); - SchemaMigrationV1.upgradeSchema(schema); - } - } else if (oldMessage.getType() == io.airbyte.protocol.models.v0.AirbyteMessage.Type.RECORD && oldMessage.getRecord() != null) { - final JsonNode oldData = newMessage.getRecord().getData(); - final JsonNode newData = upgradeRecord(oldData); - newMessage.getRecord().setData(newData); - } - return newMessage; - } - - /** - * Returns a copy of oldData, with numeric values converted to strings. String and boolean values - * are returned as-is for convenience, i.e. this is not a true deep copy. - */ - private static JsonNode upgradeRecord(final JsonNode oldData) { - if (oldData.isNumber()) { - // Base case: convert numbers to strings - return Jsons.convertValue(oldData.asText(), TextNode.class); - } else if (oldData.isObject()) { - // Recurse into each field of the object - final ObjectNode newData = (ObjectNode) Jsons.emptyObject(); - - final Iterator> fieldsIterator = oldData.fields(); - while (fieldsIterator.hasNext()) { - final Entry next = fieldsIterator.next(); - final String key = next.getKey(); - final JsonNode value = next.getValue(); - - final JsonNode newValue = upgradeRecord(value); - newData.set(key, newValue); - } - - return newData; - } else if (oldData.isArray()) { - // Recurse into each element of the array - final ArrayNode newData = Jsons.arrayNode(); - for (final JsonNode element : oldData) { - newData.add(upgradeRecord(element)); - } - return newData; - } else { - // Base case: this is a string or boolean, so we don't need to modify it - return oldData; - } - } - - /** - * We need the schema to recognize which fields are integers, since it would be wrong to just assume - * any numerical string should be parsed out. - * - * Works on a best-effort basis. If the schema doesn't match the data, we'll do our best to - * downgrade anything that we can definitively say is a number. Should _not_ throw an exception if - * bad things happen (e.g. we try to parse a non-numerical string as a number). - */ - private MigratedNode downgradeRecord(final JsonNode data, final JsonNode schema) { - return RecordMigrations.mutateDataNode( - validator, - s -> { - if (s.hasNonNull(REF_KEY)) { - final String type = s.get(REF_KEY).asText(); - return JsonSchemaReferenceTypes.INTEGER_REFERENCE.equals(type) - || JsonSchemaReferenceTypes.NUMBER_REFERENCE.equals(type); - } else { - return false; - } - }, - (s, d) -> { - if (d.asText().matches("-?\\d+(\\.\\d+)?")) { - // If this string is a numeric literal, convert it to a numeric node. - return new MigratedNode(Jsons.deserialize(d.asText()), true); - } else { - // Otherwise, just leave the node unchanged. - return new MigratedNode(d, false); - } - }, - data, schema); + public AirbyteMessage upgrade(io.airbyte.protocol.models.v0.AirbyteMessage message, Optional configuredAirbyteCatalog) { + throw new NotImplementedException("Migration not implemented."); } @Override public Version getPreviousVersion() { - return AirbyteProtocolVersion.V0; + return null; } @Override public Version getCurrentVersion() { - return AirbyteProtocolVersion.V1; + return null; } } diff --git a/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1Test.java b/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1Test.java deleted file mode 100644 index 67c6da513c5..00000000000 --- a/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/migrations/v1/AirbyteMessageMigrationV1Test.java +++ /dev/null @@ -1,1633 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.migrations.v1; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.protocol.models.AirbyteCatalog; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; -import io.airbyte.validation.json.JsonSchemaValidator; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.List; -import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; - -// most of these tests rely on a doTest utility method for brevity, which hides the assertion. -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class AirbyteMessageMigrationV1Test { - - JsonSchemaValidator validator; - private AirbyteMessageMigrationV1 migration; - - @BeforeEach - void setup() throws URISyntaxException { - // TODO this should probably just get generated as part of the airbyte-protocol build, and - // airbyte-workers / airbyte-commons-protocol would reference it directly - final URI parentUri = MoreResources.readResourceAsFile("WellKnownTypes.json").getAbsoluteFile().toURI(); - validator = new JsonSchemaValidator(parentUri); - migration = new AirbyteMessageMigrationV1(validator); - } - - @Test - void testVersionMetadata() { - assertEquals("0.3.0", migration.getPreviousVersion().serialize()); - assertEquals("1.0.0", migration.getCurrentVersion().serialize()); - } - - @Nested - class CatalogUpgradeTest { - - @Test - void testBasicUpgrade() { - // This isn't actually a valid stream schema (since it's not an object) - // but this test case is mostly about preserving the message structure, so it's not super relevant - final JsonNode oldSchema = Jsons.deserialize( - """ - { - "type": "string" - } - """); - - final AirbyteMessage upgradedMessage = migration.upgrade(createCatalogMessage(oldSchema), Optional.empty()); - - final AirbyteMessage expectedMessage = Jsons.deserialize( - """ - { - "type": "CATALOG", - "catalog": { - "streams": [ - { - "json_schema": { - "$ref": "WellKnownTypes.json#/definitions/String" - } - } - ] - } - } - """, - AirbyteMessage.class); - assertEquals(expectedMessage, upgradedMessage); - } - - @Test - void testNullUpgrade() { - final io.airbyte.protocol.models.v0.AirbyteMessage oldMessage = new io.airbyte.protocol.models.v0.AirbyteMessage() - .withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.CATALOG); - final AirbyteMessage upgradedMessage = migration.upgrade(oldMessage, Optional.empty()); - final AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.CATALOG); - assertEquals(expectedMessage, upgradedMessage); - } - - /** - * Utility method to upgrade the oldSchema, and assert that the result is equal to expectedSchema. - * - * @param oldSchemaString The schema to be upgraded - * @param expectedSchemaString The expected schema after upgrading - */ - private void doTest(final String oldSchemaString, final String expectedSchemaString) { - final JsonNode oldSchema = Jsons.deserialize(oldSchemaString); - - final AirbyteMessage upgradedMessage = migration.upgrade(createCatalogMessage(oldSchema), Optional.empty()); - - final JsonNode expectedSchema = Jsons.deserialize(expectedSchemaString); - assertEquals(expectedSchema, upgradedMessage.getCatalog().getStreams().get(0).getJsonSchema()); - } - - @Test - void testUpgradeAllPrimitives() { - doTest( - """ - { - "type": "object", - "properties": { - "example_string": { - "type": "string" - }, - "example_number": { - "type": "number" - }, - "example_integer": { - "type": "integer" - }, - "example_airbyte_integer": { - "type": "number", - "airbyte_type": "integer" - }, - "example_boolean": { - "type": "boolean" - }, - "example_timestamptz": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "example_timestamptz_implicit": { - "type": "string", - "format": "date-time" - }, - "example_timestamp_without_tz": { - "type": "string", - "format": "date-time", - "airbyte_type": "timestamp_without_timezone" - }, - "example_timez": { - "type": "string", - "format": "time", - "airbyte_type": "time_with_timezone" - }, - "example_timetz_implicit": { - "type": "string", - "format": "time" - }, - "example_time_without_tz": { - "type": "string", - "format": "time", - "airbyte_type": "time_without_timezone" - }, - "example_date": { - "type": "string", - "format": "date" - }, - "example_binary": { - "type": "string", - "contentEncoding": "base64" - } - } - } - """, - """ - { - "type": "object", - "properties": { - "example_string": { - "$ref": "WellKnownTypes.json#/definitions/String" - }, - "example_number": { - "$ref": "WellKnownTypes.json#/definitions/Number" - }, - "example_integer": { - "$ref": "WellKnownTypes.json#/definitions/Integer" - }, - "example_airbyte_integer": { - "$ref": "WellKnownTypes.json#/definitions/Integer" - }, - "example_boolean": { - "$ref": "WellKnownTypes.json#/definitions/Boolean" - }, - "example_timestamptz": { - "$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone" - }, - "example_timestamptz_implicit": { - "$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone" - }, - "example_timestamp_without_tz": { - "$ref": "WellKnownTypes.json#/definitions/TimestampWithoutTimezone" - }, - "example_timez": { - "$ref": "WellKnownTypes.json#/definitions/TimeWithTimezone" - }, - "example_timetz_implicit": { - "$ref": "WellKnownTypes.json#/definitions/TimeWithTimezone" - }, - "example_time_without_tz": { - "$ref": "WellKnownTypes.json#/definitions/TimeWithoutTimezone" - }, - "example_date": { - "$ref": "WellKnownTypes.json#/definitions/Date" - }, - "example_binary": { - "$ref": "WellKnownTypes.json#/definitions/BinaryData" - } - } - } - """); - } - - @Test - void testUpgradeNestedFields() { - doTest( - """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {"type": "string"} - }, - "tuple_array": { - "items": [ - {"type": "string"}, - {"type": "integer"} - ], - "additionalItems": {"type": "string"}, - "contains": {"type": "integer"} - }, - "nested_object": { - "properties": { - "id": {"type": "integer"}, - "nested_oneof": { - "oneOf": [ - {"type": "string"}, - {"type": "integer"} - ] - }, - "nested_anyof": { - "anyOf": [ - {"type": "string"}, - {"type": "integer"} - ] - }, - "nested_allof": { - "allOf": [ - {"type": "string"}, - {"type": "integer"} - ] - }, - "nested_not": { - "not": [ - {"type": "string"}, - {"type": "integer"} - ] - } - }, - "patternProperties": { - "integer_.*": {"type": "integer"} - }, - "additionalProperties": {"type": "string"} - } - } - } - """, - """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - "tuple_array": { - "items": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ], - "additionalItems": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "contains": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "nested_object": { - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "nested_oneof": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "nested_anyof": { - "anyOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "nested_allof": { - "allOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "nested_not": { - "not": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - } - }, - "patternProperties": { - "integer_.*": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "additionalProperties": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - } - } - """); - } - - @Test - void testUpgradeBooleanSchemas() { - // Most of these should never happen in reality, but let's handle them just in case - // The only ones that we're _really_ expecting are additionalItems and additionalProperties - final String schemaString = """ - { - "type": "object", - "properties": { - "basic_array": { - "items": true - }, - "tuple_array": { - "items": [true], - "additionalItems": true, - "contains": true - }, - "nested_object": { - "properties": { - "id": true, - "nested_oneof": { - "oneOf": [true] - }, - "nested_anyof": { - "anyOf": [true] - }, - "nested_allof": { - "allOf": [true] - }, - "nested_not": { - "not": [true] - } - }, - "patternProperties": { - "integer_.*": true - }, - "additionalProperties": true - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testUpgradeEmptySchema() { - // Sources shouldn't do this, but we should have handling for it anyway, since it's not currently - // enforced by SATs - final String schemaString = """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {} - }, - "tuple_array": { - "items": [{}], - "additionalItems": {}, - "contains": {} - }, - "nested_object": { - "properties": { - "id": {}, - "nested_oneof": { - "oneOf": [{}] - }, - "nested_anyof": { - "anyOf": [{}] - }, - "nested_allof": { - "allOf": [{}] - }, - "nested_not": { - "not": [{}] - } - }, - "patternProperties": { - "integer_.*": {} - }, - "additionalProperties": {} - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testUpgradeLiteralSchema() { - // Verify that we do _not_ recurse into places we shouldn't - final String schemaString = """ - { - "type": "object", - "properties": { - "example_schema": { - "type": "object", - "default": {"type": "string"}, - "enum": [{"type": "string"}], - "const": {"type": "string"} - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testUpgradeMalformedSchemas() { - // These schemas are "wrong" in some way. For example, normalization will currently treat - // bad_timestamptz as a string timestamp_with_timezone, - // i.e. it will disregard the option for a boolean. - // Generating this sort of schema is just wrong; sources shouldn't do this to begin with. But let's - // verify that we behave mostly correctly here. - doTest( - """ - { - "type": "object", - "properties": { - "bad_timestamptz": { - "type": ["boolean", "string"], - "format": "date-time", - "airbyte_type": "timestamp_with_timezone" - }, - "bad_integer": { - "type": "string", - "format": "date-time", - "airbyte_type": "integer" - } - } - } - """, - """ - { - "type": "object", - "properties": { - "bad_timestamptz": {"$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone"}, - "bad_integer": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - """); - } - - @Test - void testUpgradeMultiTypeFields() { - doTest( - """ - { - "type": "object", - "properties": { - "multityped_field": { - "type": ["string", "object", "array"], - "properties": { - "id": {"type": "string"} - }, - "patternProperties": { - "integer_.*": {"type": "integer"} - }, - "additionalProperties": {"type": "string"}, - "items": {"type": "string"}, - "additionalItems": {"type": "string"}, - "contains": {"type": "string"} - }, - "nullable_multityped_field": { - "type": ["null", "string", "array", "object"], - "items": [{"type": "string"}, {"type": "integer"}], - "properties": { - "id": {"type": "integer"} - } - }, - "multityped_date_field": { - "type": ["string", "integer"], - "format": "date" - }, - "sneaky_singletype_field": { - "type": ["string", "null"], - "format": "date-time" - } - } - } - """, - """ - { - "type": "object", - "properties": { - "multityped_field": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - { - "type": "object", - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - "patternProperties": { - "integer_.*": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "additionalProperties": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "additionalItems": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "contains": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - ] - }, - "nullable_multityped_field": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - { - "type": "array", - "items": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - { - "type": "object", - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - ] - }, - "multityped_date_field": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/Date"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "sneaky_singletype_field": {"$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone"} - } - } - """); - } - - private io.airbyte.protocol.models.v0.AirbyteMessage createCatalogMessage(final JsonNode schema) { - return new io.airbyte.protocol.models.v0.AirbyteMessage().withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.CATALOG) - .withCatalog( - new io.airbyte.protocol.models.v0.AirbyteCatalog().withStreams(List.of(new io.airbyte.protocol.models.v0.AirbyteStream().withJsonSchema( - schema)))); - } - - } - - @Nested - class RecordUpgradeTest { - - @Test - void testBasicUpgrade() { - final JsonNode oldData = Jsons.deserialize( - """ - { - "id": 42 - } - """); - - final AirbyteMessage upgradedMessage = migration.upgrade(createRecordMessage(oldData), Optional.empty()); - - final AirbyteMessage expectedMessage = Jsons.deserialize( - """ - { - "type": "RECORD", - "record": { - "data": { - "id": "42" - } - } - } - """, - AirbyteMessage.class); - assertEquals(expectedMessage, upgradedMessage); - } - - @Test - void testNullUpgrade() { - final io.airbyte.protocol.models.v0.AirbyteMessage oldMessage = new io.airbyte.protocol.models.v0.AirbyteMessage() - .withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.RECORD); - final AirbyteMessage upgradedMessage = migration.upgrade(oldMessage, Optional.empty()); - final AirbyteMessage expectedMessage = new AirbyteMessage().withType(Type.RECORD); - assertEquals(expectedMessage, upgradedMessage); - } - - /** - * Utility method to upgrade the oldData, and assert that the result is equal to expectedData. - * - * @param oldDataString The data of the record to be upgraded - * @param expectedDataString The expected data after upgrading - */ - private void doTest(final String oldDataString, final String expectedDataString) { - final JsonNode oldData = Jsons.deserialize(oldDataString); - - final AirbyteMessage upgradedMessage = migration.upgrade(createRecordMessage(oldData), Optional.empty()); - - final JsonNode expectedData = Jsons.deserialize(expectedDataString); - assertEquals(expectedData, upgradedMessage.getRecord().getData()); - } - - @Test - void testNestedUpgrade() { - doTest( - """ - { - "int": 42, - "float": 42.0, - "float2": 42.2, - "sub_object": { - "sub_int": 42, - "sub_float": 42.0, - "sub_float2": 42.2 - }, - "sub_array": [42, 42.0, 42.2] - } - """, - """ - { - "int": "42", - "float": "42.0", - "float2": "42.2", - "sub_object": { - "sub_int": "42", - "sub_float": "42.0", - "sub_float2": "42.2" - }, - "sub_array": ["42", "42.0", "42.2"] - } - """); - } - - @Test - void testNonUpgradableValues() { - doTest( - """ - { - "boolean": true, - "string": "arst", - "sub_object": { - "boolean": true, - "string": "arst" - }, - "sub_array": [true, "arst"] - } - """, - """ - { - "boolean": true, - "string": "arst", - "sub_object": { - "boolean": true, - "string": "arst" - }, - "sub_array": [true, "arst"] - } - """); - } - - private io.airbyte.protocol.models.v0.AirbyteMessage createRecordMessage(final JsonNode data) { - return new io.airbyte.protocol.models.v0.AirbyteMessage().withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.RECORD) - .withRecord(new io.airbyte.protocol.models.v0.AirbyteRecordMessage().withData(data)); - } - - } - - @Nested - class CatalogDowngradeTest { - - @Test - void testBasicDowngrade() { - // This isn't actually a valid stream schema (since it's not an object) - // but this test case is mostly about preserving the message structure, so it's not super relevant - final JsonNode newSchema = Jsons.deserialize( - """ - { - "$ref": "WellKnownTypes.json#/definitions/String" - } - """); - - final io.airbyte.protocol.models.v0.AirbyteMessage downgradedMessage = migration.downgrade(createCatalogMessage(newSchema), Optional.empty()); - - final io.airbyte.protocol.models.v0.AirbyteMessage expectedMessage = Jsons.deserialize( - """ - { - "type": "CATALOG", - "catalog": { - "streams": [ - { - "json_schema": { - "type": "string" - } - } - ] - } - } - """, - io.airbyte.protocol.models.v0.AirbyteMessage.class); - assertEquals(expectedMessage, downgradedMessage); - } - - @Test - void testNullDowngrade() { - final AirbyteMessage oldMessage = new AirbyteMessage().withType(Type.CATALOG); - final io.airbyte.protocol.models.v0.AirbyteMessage upgradedMessage = migration.downgrade(oldMessage, Optional.empty()); - final io.airbyte.protocol.models.v0.AirbyteMessage expectedMessage = new io.airbyte.protocol.models.v0.AirbyteMessage() - .withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.CATALOG); - assertEquals(expectedMessage, upgradedMessage); - } - - /** - * Utility method to downgrade the oldSchema, and assert that the result is equal to expectedSchema. - * - * @param oldSchemaString The schema to be downgraded - * @param expectedSchemaString The expected schema after downgrading - */ - private void doTest(final String oldSchemaString, final String expectedSchemaString) { - final JsonNode oldSchema = Jsons.deserialize(oldSchemaString); - - final io.airbyte.protocol.models.v0.AirbyteMessage downgradedMessage = migration.downgrade(createCatalogMessage(oldSchema), Optional.empty()); - - final JsonNode expectedSchema = Jsons.deserialize(expectedSchemaString); - assertEquals(expectedSchema, downgradedMessage.getCatalog().getStreams().get(0).getJsonSchema()); - } - - @Test - void testDowngradeAllPrimitives() { - doTest( - """ - { - "type": "object", - "properties": { - "example_string": { - "$ref": "WellKnownTypes.json#/definitions/String" - }, - "example_number": { - "$ref": "WellKnownTypes.json#/definitions/Number" - }, - "example_integer": { - "$ref": "WellKnownTypes.json#/definitions/Integer" - }, - "example_boolean": { - "$ref": "WellKnownTypes.json#/definitions/Boolean" - }, - "example_timestamptz": { - "$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone" - }, - "example_timestamp_without_tz": { - "$ref": "WellKnownTypes.json#/definitions/TimestampWithoutTimezone" - }, - "example_timez": { - "$ref": "WellKnownTypes.json#/definitions/TimeWithTimezone" - }, - "example_time_without_tz": { - "$ref": "WellKnownTypes.json#/definitions/TimeWithoutTimezone" - }, - "example_date": { - "$ref": "WellKnownTypes.json#/definitions/Date" - }, - "example_binary": { - "$ref": "WellKnownTypes.json#/definitions/BinaryData" - } - } - } - """, - """ - { - "type": "object", - "properties": { - "example_string": { - "type": "string" - }, - "example_number": { - "type": "number" - }, - "example_integer": { - "type": "number", - "airbyte_type": "integer" - }, - "example_boolean": { - "type": "boolean" - }, - "example_timestamptz": { - "type": "string", - "airbyte_type": "timestamp_with_timezone", - "format": "date-time" - }, - "example_timestamp_without_tz": { - "type": "string", - "airbyte_type": "timestamp_without_timezone", - "format": "date-time" - }, - "example_timez": { - "type": "string", - "airbyte_type": "time_with_timezone", - "format": "time" - }, - "example_time_without_tz": { - "type": "string", - "airbyte_type": "time_without_timezone", - "format": "time" - }, - "example_date": { - "type": "string", - "format": "date" - }, - "example_binary": { - "type": "string", - "contentEncoding": "base64" - } - } - } - """); - } - - @Test - void testDowngradeNestedFields() { - doTest( - """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - "tuple_array": { - "items": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ], - "additionalItems": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "contains": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "nested_object": { - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "nested_oneof": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone"} - ] - }, - "nested_anyof": { - "anyOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "nested_allof": { - "allOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "nested_not": { - "not": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - } - }, - "patternProperties": { - "integer_.*": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "additionalProperties": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - } - } - """, - """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {"type": "string"} - }, - "tuple_array": { - "items": [ - {"type": "string"}, - {"type": "number", "airbyte_type": "integer"} - ], - "additionalItems": {"type": "string"}, - "contains": {"type": "number", "airbyte_type": "integer"} - }, - "nested_object": { - "properties": { - "id": {"type": "number", "airbyte_type": "integer"}, - "nested_oneof": { - "oneOf": [ - {"type": "string"}, - {"type": "string", "format": "date-time", "airbyte_type": "timestamp_with_timezone"} - ] - }, - "nested_anyof": { - "anyOf": [ - {"type": "string"}, - {"type": "number", "airbyte_type": "integer"} - ] - }, - "nested_allof": { - "allOf": [ - {"type": "string"}, - {"type": "number", "airbyte_type": "integer"} - ] - }, - "nested_not": { - "not": [ - {"type": "string"}, - {"type": "number", "airbyte_type": "integer"} - ] - } - }, - "patternProperties": { - "integer_.*": {"type": "number", "airbyte_type": "integer"} - }, - "additionalProperties": {"type": "string"} - } - } - } - """); - } - - @Test - void testDowngradeBooleanSchemas() { - // Most of these should never happen in reality, but let's handle them just in case - // The only ones that we're _really_ expecting are additionalItems and additionalProperties - final String schemaString = """ - { - "type": "object", - "properties": { - "basic_array": { - "items": true - }, - "tuple_array": { - "items": [true], - "additionalItems": true, - "contains": true - }, - "nested_object": { - "properties": { - "id": true, - "nested_oneof": { - "oneOf": [true] - }, - "nested_anyof": { - "anyOf": [true] - }, - "nested_allof": { - "allOf": [true] - }, - "nested_not": { - "not": [true] - } - }, - "patternProperties": { - "integer_.*": true - }, - "additionalProperties": true - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testDowngradeEmptySchema() { - // Sources shouldn't do this, but we should have handling for it anyway, since it's not currently - // enforced by SATs - final String schemaString = """ - { - "type": "object", - "properties": { - "basic_array": { - "items": {} - }, - "tuple_array": { - "items": [{}], - "additionalItems": {}, - "contains": {} - }, - "nested_object": { - "properties": { - "id": {}, - "nested_oneof": { - "oneOf": [{}] - }, - "nested_anyof": { - "anyOf": [{}] - }, - "nested_allof": { - "allOf": [{}] - }, - "nested_not": { - "not": [{}] - } - }, - "patternProperties": { - "integer_.*": {} - }, - "additionalProperties": {} - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testDowngradeLiteralSchema() { - // Verify that we do _not_ recurse into places we shouldn't - final String schemaString = """ - { - "type": "object", - "properties": { - "example_schema": { - "type": "object", - "default": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "enum": [{"$ref": "WellKnownTypes.json#/definitions/String"}], - "const": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - } - } - """; - doTest(schemaString, schemaString); - } - - @Test - void testDowngradeMultiTypeFields() { - doTest( - """ - { - "type": "object", - "properties": { - "multityped_field": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - { - "type": "object", - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - "patternProperties": { - "integer_.*": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "additionalProperties": {"$ref": "WellKnownTypes.json#/definitions/String"} - }, - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "additionalItems": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "contains": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - ] - }, - "multityped_date_field": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/Date"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - }, - "boolean_field": { - "oneOf": [ - true, - {"$ref": "WellKnownTypes.json#/definitions/String"}, - false - ] - }, - "conflicting_field": { - "oneOf": [ - {"type": "object", "properties": {"id": {"$ref": "WellKnownTypes.json#/definitions/String"}}}, - {"type": "object", "properties": {"name": {"$ref": "WellKnownTypes.json#/definitions/String"}}}, - {"$ref": "WellKnownTypes.json#/definitions/String"} - ] - }, - "conflicting_primitives": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/TimestampWithoutTimezone"}, - {"$ref": "WellKnownTypes.json#/definitions/TimestampWithTimezone"} - ] - } - } - } - """, - """ - { - "type": "object", - "properties": { - "multityped_field": { - "type": ["string", "object", "array"], - "properties": { - "id": {"type": "string"} - }, - "patternProperties": { - "integer_.*": {"type": "number", "airbyte_type": "integer"} - }, - "additionalProperties": {"type": "string"}, - "items": {"type": "string"}, - "additionalItems": {"type": "string"}, - "contains": {"type": "string"} - }, - "multityped_date_field": { - "type": ["string", "number"], - "format": "date", - "airbyte_type": "integer" - }, - "boolean_field": { - "oneOf": [ - true, - {"type": "string"}, - false - ] - }, - "conflicting_field": { - "oneOf": [ - {"type": "object", "properties": {"id": {"type": "string"}}}, - {"type": "object", "properties": {"name": {"type": "string"}}}, - {"type": "string"} - ] - }, - "conflicting_primitives": { - "oneOf": [ - {"type": "string", "format": "date-time", "airbyte_type": "timestamp_without_timezone"}, - {"type": "string", "format": "date-time", "airbyte_type": "timestamp_with_timezone"} - ] - } - } - } - """); - } - - @Test - void testDowngradeWeirdSchemas() { - // old_style_schema isn't actually valid (i.e. v1 schemas should always be using $ref) - // but we should check that it behaves well anyway - doTest( - """ - { - "type": "object", - "properties": { - "old_style_schema": {"type": "string"} - } - } - """, - """ - { - "type": "object", - "properties": { - "old_style_schema": {"type": "string"} - } - } - """); - } - - private AirbyteMessage createCatalogMessage(final JsonNode schema) { - return new AirbyteMessage().withType(AirbyteMessage.Type.CATALOG) - .withCatalog( - new AirbyteCatalog().withStreams(List.of(new AirbyteStream().withJsonSchema( - schema)))); - } - - } - - @Nested - class RecordDowngradeTest { - - private static final String STREAM_NAME = "foo_stream"; - private static final String NAMESPACE_NAME = "foo_namespace"; - - @Test - void testBasicDowngrade() { - final ConfiguredAirbyteCatalog catalog = createConfiguredAirbyteCatalog( - """ - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - """); - final JsonNode oldData = Jsons.deserialize( - """ - "42" - """); - - final io.airbyte.protocol.models.v0.AirbyteMessage downgradedMessage = new AirbyteMessageMigrationV1(validator) - .downgrade(createRecordMessage(oldData), Optional.of(catalog)); - - final io.airbyte.protocol.models.v0.AirbyteMessage expectedMessage = Jsons.deserialize( - """ - { - "type": "RECORD", - "record": { - "stream": "foo_stream", - "namespace": "foo_namespace", - "data": 42 - } - } - """, - io.airbyte.protocol.models.v0.AirbyteMessage.class); - assertEquals(expectedMessage, downgradedMessage); - } - - @Test - void testNullDowngrade() { - final AirbyteMessage oldMessage = new AirbyteMessage().withType(Type.RECORD); - final io.airbyte.protocol.models.v0.AirbyteMessage upgradedMessage = migration.downgrade(oldMessage, Optional.empty()); - final io.airbyte.protocol.models.v0.AirbyteMessage expectedMessage = new io.airbyte.protocol.models.v0.AirbyteMessage() - .withType(io.airbyte.protocol.models.v0.AirbyteMessage.Type.RECORD); - assertEquals(expectedMessage, upgradedMessage); - } - - /** - * Utility method to use the given catalog to downgrade the oldData, and assert that the result is - * equal to expectedDataString. - * - * @param schemaString The JSON schema of the record - * @param oldDataString The data of the record to be downgraded - * @param expectedDataString The expected data after downgrading - */ - private void doTest(final String schemaString, final String oldDataString, final String expectedDataString) { - final ConfiguredAirbyteCatalog catalog = createConfiguredAirbyteCatalog(schemaString); - final JsonNode oldData = Jsons.deserialize(oldDataString); - - final io.airbyte.protocol.models.v0.AirbyteMessage downgradedMessage = new AirbyteMessageMigrationV1(validator) - .downgrade(createRecordMessage(oldData), Optional.of(catalog)); - - final JsonNode expectedDowngradedRecord = Jsons.deserialize(expectedDataString); - assertEquals(expectedDowngradedRecord, downgradedMessage.getRecord().getData()); - } - - @Test - void testNestedDowngrade() { - doTest( - """ - { - "type": "object", - "properties": { - "int": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "num": {"$ref": "WellKnownTypes.json#/definitions/Number"}, - "binary": {"$ref": "WellKnownTypes.json#/definitions/BinaryData"}, - "bool": {"$ref": "WellKnownTypes.json#/definitions/Boolean"}, - "object": { - "type": "object", - "properties": { - "int": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "arr": { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - }, - "array": { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "array_multitype": { - "type": "array", - "items": [{"$ref": "WellKnownTypes.json#/definitions/Integer"}, {"$ref": "WellKnownTypes.json#/definitions/String"}] - }, - "oneof": { - "type": "array", - "items": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - {"$ref": "WellKnownTypes.json#/definitions/Boolean"} - ] - } - } - } - } - """, - """ - { - "int": "42", - "num": "43.2", - "string": "42", - "bool": true, - "object": { - "int": "42" - }, - "array": ["42"], - "array_multitype": ["42", "42"], - "oneof": ["42", true], - "additionalProperty": "42" - } - """, - """ - { - "int": 42, - "num": 43.2, - "string": "42", - "bool": true, - "object": { - "int": 42 - }, - "array": [42], - "array_multitype": [42, "42"], - "oneof": [42, true], - "additionalProperty": "42" - } - """); - } - - @Test - void testWeirdDowngrade() { - doTest( - """ - { - "type": "object", - "properties": { - "raw_int": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "raw_num": {"$ref": "WellKnownTypes.json#/definitions/Number"}, - "bad_int": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "typeless_object": { - "properties": { - "foo": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - }, - "typeless_array": { - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "arr_obj_union1": { - "type": ["array", "object"], - "items": { - "type": "object", - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "name": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "name": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - "arr_obj_union2": { - "type": ["array", "object"], - "items": { - "type": "object", - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "name": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - "properties": { - "id": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "name": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - "empty_oneof": { - "oneOf": [] - } - } - } - """, - """ - { - "raw_int": 42, - "raw_num": 43.2, - "bad_int": "foo", - "typeless_object": { - "foo": "42" - }, - "typeless_array": ["42"], - "arr_obj_union1": [{"id": "42", "name": "arst"}, {"id": "43", "name": "qwfp"}], - "arr_obj_union2": {"id": "42", "name": "arst"}, - "empty_oneof": "42" - } - """, - """ - { - "raw_int": 42, - "raw_num": 43.2, - "bad_int": "foo", - "typeless_object": { - "foo": 42 - }, - "typeless_array": [42], - "arr_obj_union1": [{"id": 42, "name": "arst"}, {"id": 43, "name": "qwfp"}], - "arr_obj_union2": {"id": 42, "name": "arst"}, - "empty_oneof": "42" - } - """); - } - - @Test - void testEmptySchema() { - doTest( - """ - { - "type": "object", - "properties": { - "empty_schema_primitive": {}, - "empty_schema_array": {}, - "empty_schema_object": {}, - "implicit_array": { - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "implicit_object": { - "properties": { - "foo": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - } - } - """, - """ - { - "empty_schema_primitive": "42", - "empty_schema_array": ["42", false], - "empty_schema_object": {"foo": "42"}, - "implicit_array": ["42"], - "implicit_object": {"foo": "42"} - } - """, - """ - { - "empty_schema_primitive": "42", - "empty_schema_array": ["42", false], - "empty_schema_object": {"foo": "42"}, - "implicit_array": [42], - "implicit_object": {"foo": 42} - } - """); - } - - @Test - void testBacktracking() { - // These test cases verify that we correctly choose the most-correct oneOf option. - doTest( - """ - { - "type": "object", - "properties": { - "valid_option": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/Boolean"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - {"$ref": "WellKnownTypes.json#/definitions/String"} - ] - }, - "all_invalid": { - "oneOf": [ - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Boolean"} - } - ] - }, - "nested_oneof": { - "oneOf": [ - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - { - "type": "array", - "items": { - "type": "object", - "properties": { - "foo": { - "oneOf": [ - {"$ref": "WellKnownTypes.json#/definitions/Boolean"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - } - } - } - } - ] - }, - "mismatched_primitive": { - "oneOf": [ - { - "type": "object", - "properties": { - "foo": {"type": "object"}, - "bar": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - { - "type": "object", - "properties": { - "foo": {"$ref": "WellKnownTypes.json#/definitions/Boolean"}, - "bar": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - ] - }, - "mismatched_text": { - "oneOf": [ - { - "type": "object", - "properties": { - "foo": {"type": "object"}, - "bar": {"$ref": "WellKnownTypes.json#/definitions/String"} - } - }, - { - "type": "object", - "properties": { - "foo": {"$ref": "WellKnownTypes.json#/definitions/String"}, - "bar": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - ] - }, - "mismatch_array": { - "oneOf": [ - { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - { - "type": "array", - "items": [ - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/String"}, - {"$ref": "WellKnownTypes.json#/definitions/Integer"} - ] - } - ] - } - } - } - """, - """ - { - "valid_option": "42", - "all_invalid": ["42", "arst"], - "nested_oneof": [{"foo": "42"}], - "mismatched_primitive": { - "foo": true, - "bar": "42" - }, - "mismatched_text": { - "foo": "bar", - "bar": "42" - }, - "mismatch_array": ["arst", "41", "42"] - } - """, - """ - { - "valid_option": 42, - "all_invalid": [42, "arst"], - "nested_oneof": [{"foo": 42}], - "mismatched_primitive": { - "foo": true, - "bar": 42 - }, - "mismatched_text": { - "foo": "bar", - "bar": 42 - }, - "mismatch_array": ["arst", "41", 42] - } - """); - } - - @Test - void testIncorrectSchema() { - doTest( - """ - { - "type": "object", - "properties": { - "bad_int": {"$ref": "WellKnownTypes.json#/definitions/Integer"}, - "bad_int_array": { - "type": "array", - "items": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - }, - "bad_int_obj": { - "type": "object", - "properties": { - "foo": {"$ref": "WellKnownTypes.json#/definitions/Integer"} - } - } - } - } - """, - """ - { - "bad_int": "arst", - "bad_int_array": ["arst"], - "bad_int_obj": {"foo": "arst"} - } - """, - """ - { - "bad_int": "arst", - "bad_int_array": ["arst"], - "bad_int_obj": {"foo": "arst"} - } - """); - } - - private ConfiguredAirbyteCatalog createConfiguredAirbyteCatalog(final String schema) { - return new ConfiguredAirbyteCatalog() - .withStreams(List.of(new ConfiguredAirbyteStream().withStream(new io.airbyte.protocol.models.AirbyteStream() - .withName(STREAM_NAME) - .withNamespace(NAMESPACE_NAME) - .withJsonSchema(Jsons.deserialize(schema))))); - } - - private AirbyteMessage createRecordMessage(final JsonNode data) { - return new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) - .withRecord(new AirbyteRecordMessage().withStream(STREAM_NAME).withNamespace(NAMESPACE_NAME).withData(data)); - } - - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/converters/OperationsConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/converters/OperationsConverter.java index 22c67fc1792..aadeb2ccf1f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/converters/OperationsConverter.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/converters/OperationsConverter.java @@ -9,12 +9,8 @@ import com.google.common.base.Preconditions; import io.airbyte.api.model.generated.OperationRead; import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; import io.airbyte.commons.enums.Enums; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; @@ -34,26 +30,6 @@ public static void populateOperatorConfigFromApi(final OperatorConfiguration ope final StandardWorkspace standardWorkspace) { standardSyncOperation.withOperatorType(Enums.convertTo(operatorConfig.getOperatorType(), OperatorType.class)); switch (operatorConfig.getOperatorType()) { - case NORMALIZATION -> { - Preconditions.checkArgument(operatorConfig.getNormalization() != null); - standardSyncOperation.withOperatorNormalization(new OperatorNormalization() - .withOption(Enums.convertTo(operatorConfig.getNormalization().getOption(), Option.class))); - // Null out the other configs, since it's mutually exclusive. We need to do this if it's an update. - standardSyncOperation.withOperatorDbt(null); - standardSyncOperation.withOperatorWebhook(null); - } - case DBT -> { - Preconditions.checkArgument(operatorConfig.getDbt() != null); - standardSyncOperation.withOperatorDbt(new OperatorDbt() - .withGitRepoUrl(operatorConfig.getDbt().getGitRepoUrl()) - .withGitRepoBranch(operatorConfig.getDbt().getGitRepoBranch()) - .withDockerImage(operatorConfig.getDbt().getDockerImage()) - .withDbtArguments(operatorConfig.getDbt().getDbtArguments())); - // Null out the other configs, since they're mutually exclusive. We need to do this if it's an - // update. - standardSyncOperation.withOperatorNormalization(null); - standardSyncOperation.withOperatorWebhook(null); - } case WEBHOOK -> { Preconditions.checkArgument(operatorConfig.getWebhook() != null); // TODO(mfsiega-airbyte): check that the webhook config id references a real webhook config. @@ -66,9 +42,6 @@ public static void populateOperatorConfigFromApi(final OperatorConfiguration ope }); standardSyncOperation.withOperatorWebhook(webhookOperatorFromConfig(operatorConfig.getWebhook(), customDbtHost)); - // Null out the other configs, since it's mutually exclusive. We need to do this if it's an update. - standardSyncOperation.withOperatorNormalization(null); - standardSyncOperation.withOperatorDbt(null); } } } @@ -85,19 +58,6 @@ public static OperationRead operationReadFromPersistedOperation(final StandardSy .name(standardSyncOperation.getName()); } switch (standardSyncOperation.getOperatorType()) { - case NORMALIZATION -> { - Preconditions.checkArgument(standardSyncOperation.getOperatorNormalization() != null); - operatorConfiguration.normalization(new io.airbyte.api.model.generated.OperatorNormalization() - .option(Enums.convertTo(standardSyncOperation.getOperatorNormalization().getOption(), OptionEnum.class))); - } - case DBT -> { - Preconditions.checkArgument(standardSyncOperation.getOperatorDbt() != null); - operatorConfiguration.dbt(new io.airbyte.api.model.generated.OperatorDbt() - .gitRepoUrl(standardSyncOperation.getOperatorDbt().getGitRepoUrl()) - .gitRepoBranch(standardSyncOperation.getOperatorDbt().getGitRepoBranch()) - .dockerImage(standardSyncOperation.getOperatorDbt().getDockerImage()) - .dbtArguments(standardSyncOperation.getOperatorDbt().getDbtArguments())); - } case WEBHOOK -> { Preconditions.checkArgument(standardSyncOperation.getOperatorWebhook() != null); operatorConfiguration.webhook(webhookOperatorFromPersistence(standardSyncOperation.getOperatorWebhook())); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java index 79f9057926a..0e7bdced167 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/ApiPojoConverters.java @@ -20,7 +20,6 @@ import io.airbyte.api.model.generated.JobType; import io.airbyte.api.model.generated.JobTypeResourceLimit; import io.airbyte.api.model.generated.NonBreakingChangesPreference; -import io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig; import io.airbyte.api.model.generated.ReleaseStage; import io.airbyte.api.model.generated.ResourceRequirements; import io.airbyte.api.model.generated.SchemaChangeBackfillPreference; @@ -36,6 +35,9 @@ import io.airbyte.config.StateWrapper; import io.airbyte.config.helpers.StateMessageHelper; import java.time.LocalDate; +import java.time.OffsetDateTime; +import java.time.ZoneOffset; +import java.util.Date; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; @@ -137,17 +139,6 @@ public static ResourceRequirements resourceRequirementsToApi(final io.airbyte.co .memoryLimit(resourceReqs.getMemoryLimit()); } - public static NormalizationDestinationDefinitionConfig normalizationDestinationDefinitionConfigToApi(final io.airbyte.config.NormalizationDestinationDefinitionConfig normalizationDestinationDefinitionConfig) { - if (normalizationDestinationDefinitionConfig == null) { - return new NormalizationDestinationDefinitionConfig().supported(false); - } - return new NormalizationDestinationDefinitionConfig() - .supported(true) - .normalizationRepository(normalizationDestinationDefinitionConfig.getNormalizationRepository()) - .normalizationTag(normalizationDestinationDefinitionConfig.getNormalizationTag()) - .normalizationIntegrationType(normalizationDestinationDefinitionConfig.getNormalizationIntegrationType()); - } - public static ConnectionRead internalToConnectionRead(final StandardSync standardSync) { final ConnectionRead connectionRead = new ConnectionRead() .connectionId(standardSync.getConnectionId()) @@ -278,6 +269,13 @@ public static LocalDate toLocalDate(final String date) { return LocalDate.parse(date); } + public static OffsetDateTime toOffsetDateTime(Date date) { + if (date == null) { + return null; + } + return date.toInstant().atOffset(ZoneOffset.UTC); + } + public static ConnectionScheduleDataBasicSchedule.TimeUnitEnum toApiBasicScheduleTimeUnit(final BasicSchedule.TimeUnit timeUnit) { return Enums.convertTo(timeUnit, ConnectionScheduleDataBasicSchedule.TimeUnitEnum.class); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java index a991d431567..15f56d2e473 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java @@ -6,7 +6,6 @@ import io.airbyte.api.model.generated.AttemptFailureSummary; import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusRead; import io.airbyte.api.model.generated.AttemptRead; import io.airbyte.api.model.generated.AttemptStats; import io.airbyte.api.model.generated.AttemptStatus; @@ -47,7 +46,6 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; import io.airbyte.persistence.job.models.Job; import jakarta.annotation.Nullable; import jakarta.inject.Singleton; @@ -304,15 +302,6 @@ public SynchronousJobRead getSynchronousJobRead(final SynchronousJobMetadata met .failureReason(getFailureReason(metadata.getFailureReason(), TimeUnit.SECONDS.toMillis(metadata.getEndedAt()))); } - public static AttemptNormalizationStatusRead convertAttemptNormalizationStatus( - final AttemptNormalizationStatus databaseStatus) { - return new AttemptNormalizationStatusRead() - .attemptNumber(databaseStatus.attemptNumber()) - .hasRecordsCommitted(!databaseStatus.recordsCommitted().isEmpty()) - .recordsCommitted(databaseStatus.recordsCommitted().orElse(0L)) - .hasNormalizationFailed(databaseStatus.normalizationFailed()); - } - private static List extractEnabledStreams(final Job job) { final var configuredCatalog = new JobConfigProxy(job.getConfig()).getConfiguredCatalog(); return configuredCatalog != null diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandler.java index 8d90083c270..4b4224c7aa6 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandler.java @@ -34,7 +34,6 @@ import jakarta.inject.Inject; import jakarta.inject.Singleton; import java.io.IOException; -import java.util.Objects; import java.util.Optional; import java.util.UUID; @@ -129,11 +128,11 @@ ActorDefinitionVersionRead createActorDefinitionVersionRead(final ActorDefinitio final ActorDefinitionVersionRead advRead = new ActorDefinitionVersionRead() .dockerRepository(actorDefinitionVersion.getDockerRepository()) .dockerImageTag(actorDefinitionVersion.getDockerImageTag()) - .supportsDbt(Objects.requireNonNullElse(actorDefinitionVersion.getSupportsDbt(), false)) .supportsRefreshes(actorDefinitionVersion.getSupportsRefreshes()) - .normalizationConfig(ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(actorDefinitionVersion.getNormalizationConfig())) .supportState(toApiSupportState(actorDefinitionVersion.getSupportState())) .supportLevel(toApiSupportLevel(actorDefinitionVersion.getSupportLevel())) + .cdkVersion(actorDefinitionVersion.getCdkVersion()) + .lastPublished(ApiPojoConverters.toOffsetDateTime(actorDefinitionVersion.getLastPublished())) .isVersionOverrideApplied(versionWithOverrideStatus.isOverrideApplied()); final Optional breakingChanges = diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java index e48e472975a..776bef511f6 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/AttemptHandler.java @@ -11,6 +11,7 @@ import io.airbyte.api.model.generated.InternalOperationResult; import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; import io.airbyte.api.model.generated.SaveStatsRequestBody; +import io.airbyte.api.model.generated.SaveStreamAttemptMetadataRequestBody; import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.converters.ApiPojoConverters; @@ -38,6 +39,8 @@ import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.StreamAttemptMetadata; +import io.airbyte.data.services.StreamAttemptMetadataService; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.EnableResumableFullRefresh; import io.airbyte.featureflag.FeatureFlagClient; @@ -78,6 +81,7 @@ public class AttemptHandler { private final ConnectionService connectionService; private final DestinationService destinationService; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final StreamAttemptMetadataService streamAttemptMetadataService; public AttemptHandler(final JobPersistence jobPersistence, final StatePersistence statePersistence, @@ -88,7 +92,8 @@ public AttemptHandler(final JobPersistence jobPersistence, final GenerationBumper generationBumper, final ConnectionService connectionService, final DestinationService destinationService, - final ActorDefinitionVersionHelper actorDefinitionVersionHelper) { + final ActorDefinitionVersionHelper actorDefinitionVersionHelper, + final StreamAttemptMetadataService streamAttemptMetadataService) { this.jobPersistence = jobPersistence; this.statePersistence = statePersistence; this.jobConverter = jobConverter; @@ -99,6 +104,7 @@ public AttemptHandler(final JobPersistence jobPersistence, this.connectionService = connectionService; this.destinationService = destinationService; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; + this.streamAttemptMetadataService = streamAttemptMetadataService; } public CreateNewAttemptNumberResponse createNewAttemptNumber(final long jobId) @@ -292,6 +298,25 @@ public InternalOperationResult saveStats(final SaveStatsRequestBody requestBody) return new InternalOperationResult().succeeded(true); } + public InternalOperationResult saveStreamMetadata(final SaveStreamAttemptMetadataRequestBody requestBody) { + try { + streamAttemptMetadataService.upsertStreamAttemptMetadata( + requestBody.getJobId(), + requestBody.getAttemptNumber(), + requestBody.getStreamMetadata().stream().map( + (s) -> new StreamAttemptMetadata( + s.getStreamName(), + s.getStreamNamespace(), + s.getWasBackfilled(), + s.getWasResumed())) + .toList()); + return new InternalOperationResult().succeeded(true); + } catch (final Exception e) { + LOGGER.error("failed to save steam metadata for job:{} attempt:{}", requestBody.getJobId(), requestBody.getAttemptNumber(), e); + return new InternalOperationResult().succeeded(false); + } + } + public InternalOperationResult saveSyncConfig(final SaveAttemptSyncConfigRequestBody requestBody) { try { jobPersistence.writeAttemptSyncConfig( diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java index ea99adeeb7f..ee14678e72e 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java @@ -23,10 +23,16 @@ import io.airbyte.api.model.generated.ConnectionAutoPropagateSchemaChange; import io.airbyte.api.model.generated.ConnectionCreate; import io.airbyte.api.model.generated.ConnectionDataHistoryRequestBody; +import io.airbyte.api.model.generated.ConnectionEventIdRequestBody; +import io.airbyte.api.model.generated.ConnectionEventList; +import io.airbyte.api.model.generated.ConnectionEventType; +import io.airbyte.api.model.generated.ConnectionEventWithDetails; +import io.airbyte.api.model.generated.ConnectionEventsRequestBody; import io.airbyte.api.model.generated.ConnectionLastJobPerStreamReadItem; import io.airbyte.api.model.generated.ConnectionLastJobPerStreamRequestBody; import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionReadList; +import io.airbyte.api.model.generated.ConnectionStatus; import io.airbyte.api.model.generated.ConnectionStatusRead; import io.airbyte.api.model.generated.ConnectionStatusesRequestBody; import io.airbyte.api.model.generated.ConnectionStreamHistoryReadItem; @@ -44,10 +50,12 @@ import io.airbyte.api.model.generated.JobWithAttemptsRead; import io.airbyte.api.model.generated.ListConnectionsForWorkspacesRequestBody; import io.airbyte.api.model.generated.NonBreakingChangesPreference; +import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.model.generated.StreamDescriptor; import io.airbyte.api.model.generated.StreamStats; import io.airbyte.api.model.generated.StreamTransform; import io.airbyte.api.model.generated.StreamTransform.TransformTypeEnum; +import io.airbyte.api.model.generated.UserReadInConnectionEvent; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.api.problems.model.generated.ProblemMessageData; import io.airbyte.api.problems.throwable.generated.UnexpectedProblem; @@ -86,14 +94,19 @@ import io.airbyte.config.StandardSync.Status; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.StreamSyncStats; +import io.airbyte.config.User; import io.airbyte.config.helpers.ScheduleHelpers; import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamGenerationRepository; +import io.airbyte.config.persistence.UserPersistence; import io.airbyte.config.persistence.domain.Generation; import io.airbyte.config.persistence.helper.CatalogGenerationSetter; +import io.airbyte.data.repositories.entities.ConnectionTimelineEvent; +import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.StreamStatusesService; +import io.airbyte.data.services.shared.ConnectionEvent; import io.airbyte.featureflag.CheckWithCatalog; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Workspace; @@ -148,6 +161,8 @@ public class ConnectionsHandler { private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionsHandler.class); + public static final int DEFAULT_PAGE_SIZE = 20; + public static final int DEFAULT_ROW_OFFSET = 0; private final JobPersistence jobPersistence; private final ConfigRepository configRepository; @@ -169,6 +184,8 @@ public class ConnectionsHandler { private final CatalogValidator catalogValidator; private final NotificationHelper notificationHelper; private final StreamStatusesService streamStatusesService; + private final ConnectionTimelineEventService connectionTimelineEventService; + private final UserPersistence userPersistence; @Inject public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, @@ -189,7 +206,9 @@ public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, final CatalogGenerationSetter catalogGenerationSetter, final CatalogValidator catalogValidator, final NotificationHelper notificationHelper, - final StreamStatusesService streamStatusesService) { + final StreamStatusesService streamStatusesService, + final ConnectionTimelineEventService connectionTimelineEventService, + final UserPersistence userPersistence) { this.jobPersistence = jobPersistence; this.configRepository = configRepository; this.uuidGenerator = uuidGenerator; @@ -209,6 +228,8 @@ public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, this.catalogValidator = catalogValidator; this.notificationHelper = notificationHelper; this.streamStatusesService = streamStatusesService; + this.connectionTimelineEventService = connectionTimelineEventService; + this.userPersistence = userPersistence; } /** @@ -784,6 +805,17 @@ public CatalogDiff getDiff(final AirbyteCatalog oldCatalog, final AirbyteCatalog .toList()); } + public CatalogDiff getDiff(final ConnectionRead connectionRead, final AirbyteCatalog discoveredCatalog) + throws JsonValidationException, ConfigNotFoundException, IOException { + + final var catalogWithSelectedFieldsAnnotated = connectionRead.getSyncCatalog(); + final var configuredCatalog = CatalogConverter.toConfiguredProtocol(catalogWithSelectedFieldsAnnotated); + + final var rawCatalog = getConnectionAirbyteCatalog(connectionRead.getConnectionId()); + + return getDiff(rawCatalog.orElse(catalogWithSelectedFieldsAnnotated), discoveredCatalog, configuredCatalog); + } + /** * Returns the list of the streamDescriptor that have their config updated. * @@ -1001,6 +1033,86 @@ public List getConnectionStatuses( return result; } + private List convertConnectionType(final List eventTypes) { + if (eventTypes == null) { + return null; + } + return eventTypes.stream().map(eventType -> ConnectionEvent.Type.valueOf(eventType.name())).collect(Collectors.toList()); + } + + private io.airbyte.api.model.generated.ConnectionEvent convertConnectionEvent(final ConnectionTimelineEvent event) { + final io.airbyte.api.model.generated.ConnectionEvent connectionEvent = new io.airbyte.api.model.generated.ConnectionEvent(); + connectionEvent.id(event.getId()); + connectionEvent.eventType(ConnectionEventType.fromString(event.getEventType())); + connectionEvent.createdAt(event.getCreatedAt().toEpochSecond()); + connectionEvent.connectionId(event.getConnectionId()); + connectionEvent.summary(event.getSummary()); + if (event.getUserId() != null) { + connectionEvent.user(getUserRead(event.getUserId())); + } + return connectionEvent; + } + + private UserReadInConnectionEvent getUserRead(final UUID userId) { + try { + final User user = userPersistence.getUser(userId).orElseThrow(); + return new UserReadInConnectionEvent() + .id(user.getUserId()) + .name(user.getName()) + .email(user.getEmail()); + } catch (final Exception e) { + LOGGER.error("Error while retrieving user information.", e); + return null; + } + } + + private ConnectionEventList convertConnectionEventList(final List events) { + final List eventsRead = + events.stream().map(event -> convertConnectionEvent(event)).collect(Collectors.toList()); + return new ConnectionEventList().events(eventsRead); + } + + public ConnectionEventList listConnectionEvents(final ConnectionEventsRequestBody connectionEventsRequestBody) { + // 1. set page size and offset + final int pageSize = (connectionEventsRequestBody.getPagination() != null && connectionEventsRequestBody.getPagination().getPageSize() != null) + ? connectionEventsRequestBody.getPagination().getPageSize() + : DEFAULT_PAGE_SIZE; + final int rowOffset = (connectionEventsRequestBody.getPagination() != null && connectionEventsRequestBody.getPagination().getPageSize() != null) + ? connectionEventsRequestBody.getPagination().getPageSize() + : DEFAULT_ROW_OFFSET; + // 2. get list of events + final List events = connectionTimelineEventService.listEvents( + connectionEventsRequestBody.getConnectionId(), + convertConnectionType(connectionEventsRequestBody.getEventTypes()), + connectionEventsRequestBody.getCreatedAtStart(), + connectionEventsRequestBody.getCreatedAtEnd(), + pageSize, + rowOffset); + return convertConnectionEventList(events); + } + + public ConnectionEventWithDetails getConnectionEvent(final ConnectionEventIdRequestBody connectionEventIdRequestBody) { + final ConnectionTimelineEvent eventData = connectionTimelineEventService.getEvent(connectionEventIdRequestBody.getConnectionEventId()); + return hydrateConnectionEvent(eventData); + } + + private ConnectionEventWithDetails hydrateConnectionEvent(final ConnectionTimelineEvent event) { + final ConnectionEventWithDetails connectionEventWithDetails = new ConnectionEventWithDetails(); + connectionEventWithDetails.id(event.getId()); + connectionEventWithDetails.connectionId(event.getConnectionId()); + // enforce event type consistency + connectionEventWithDetails.eventType(ConnectionEventType.fromString(event.getEventType())); + connectionEventWithDetails.summary(event.getSummary()); + // TODO: implement details generation. Note this could be a huge json if conn_settings changed or + // schema changed + connectionEventWithDetails.details(null); + connectionEventWithDetails.createdAt(event.getCreatedAt().toEpochSecond()); + if (event.getUserId() != null) { + connectionEventWithDetails.user(getUserRead(event.getUserId())); + } + return connectionEventWithDetails; + } + /** * Returns data history for the given connection for requested number of jobs. * @@ -1009,17 +1121,17 @@ public List getConnectionStatuses( */ public List getConnectionDataHistory(final ConnectionDataHistoryRequestBody connectionDataHistoryRequestBody) { - List jobs; + final List jobs; try { jobs = jobPersistence.listJobs( Set.of(ConfigType.SYNC), Set.of(JobStatus.SUCCEEDED, JobStatus.FAILED), connectionDataHistoryRequestBody.getConnectionId().toString(), connectionDataHistoryRequestBody.getNumberOfJobs()); - } catch (IOException e) { + } catch (final IOException e) { throw new RuntimeException(e); } - Map jobIdToJobRead = StatsAggregationHelper.getJobIdToJobWithAttemptsReadMap(jobs, jobPersistence); + final Map jobIdToJobRead = StatsAggregationHelper.getJobIdToJobWithAttemptsReadMap(jobs, jobPersistence); final List result = new ArrayList<>(); jobs.forEach((job) -> { @@ -1247,10 +1359,10 @@ public List getConnectionLastJobPerStream(fi streamStatusesService.getLastJobIdWithStatsByStream(req.getConnectionId()); // retrieve the full job information for each of those latest jobs - List jobs; + final List jobs; try { jobs = jobPersistence.listJobsLight(new HashSet<>(streamToLastJobIdWithStats.values())); - } catch (IOException e) { + } catch (final IOException e) { throw new UnexpectedProblem("Failed to retrieve the latest job per stream", new ProblemMessageData().message(e.getMessage())); } @@ -1270,6 +1382,60 @@ public List getConnectionLastJobPerStream(fi .collect(Collectors.toList()); } + /** + * For a given discovered catalog and connection, calculate a catalog diff, determine if there are + * breaking changes then disable the connection if necessary. + */ + public SourceDiscoverSchemaRead diffCatalogAndConditionallyDisable(final UUID connectionId, final UUID discoveredCatalogId) + throws JsonValidationException, ConfigNotFoundException, IOException { + final var connectionRead = getConnection(connectionId); + final var source = configRepository.getSourceConnection(connectionRead.getSourceId()); + final var sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); + final var sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), connectionRead.getSourceId()); + + final var discoveredCatalog = retrieveDiscoveredCatalog(discoveredCatalogId, sourceVersion); + + final var diff = getDiff(connectionRead, discoveredCatalog); + final boolean containsBreakingChange = AutoPropagateSchemaChangeHelper.containsBreakingChange(diff); + + if (containsBreakingChange) { + MetricClientFactory.getMetricClient().count(OssMetricsRegistry.BREAKING_SCHEMA_CHANGE_DETECTED, 1, + new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); + } else { + MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NON_BREAKING_SCHEMA_CHANGE_DETECTED, 1, + new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); + } + + final var patch = new ConnectionUpdate() + .breakingChange(containsBreakingChange) + .connectionId(connectionId); + + final var disableForNonBreakingChange = (connectionRead.getNonBreakingChangesPreference() == NonBreakingChangesPreference.DISABLE); + + if (containsBreakingChange || (disableForNonBreakingChange && AutoPropagateSchemaChangeHelper.containsChanges(diff))) { + patch.status(ConnectionStatus.INACTIVE); + } + + final var updated = updateConnection(patch); + + return new SourceDiscoverSchemaRead() + .breakingChange(containsBreakingChange) + .catalogDiff(diff) + .catalog(discoveredCatalog) + .catalogId(discoveredCatalogId) + .connectionStatus(updated.getStatus()); + } + + private AirbyteCatalog retrieveDiscoveredCatalog(final UUID catalogId, final ActorDefinitionVersion sourceVersion) + throws ConfigNotFoundException, IOException { + + final ActorCatalog catalog = configRepository.getActorCatalogById(catalogId); + final io.airbyte.protocol.models.AirbyteCatalog persistenceCatalog = Jsons.object( + catalog.getCatalog(), + io.airbyte.protocol.models.AirbyteCatalog.class); + return CatalogConverter.toApi(persistenceCatalog, sourceVersion); + } + /** * Build a ConnectionLastJobPerStreamReadItem from a stream descriptor and a job read. This method * memoizes the stat-by-stream map for each job to avoid redundant computation in the case where diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java index 9ae60d856d2..3e7aaba574d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandler.java @@ -55,7 +55,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -113,10 +112,10 @@ DestinationDefinitionRead buildDestinationDefinitionRead(final StandardDestinati .supportLevel(ApiPojoConverters.toApiSupportLevel(destinationVersion.getSupportLevel())) .releaseStage(ApiPojoConverters.toApiReleaseStage(destinationVersion.getReleaseStage())) .releaseDate(ApiPojoConverters.toLocalDate(destinationVersion.getReleaseDate())) + .lastPublished(ApiPojoConverters.toOffsetDateTime(destinationVersion.getLastPublished())) + .cdkVersion(destinationVersion.getCdkVersion()) + .metrics(standardDestinationDefinition.getMetrics()) .custom(standardDestinationDefinition.getCustom()) - .supportsDbt(Objects.requireNonNullElse(destinationVersion.getSupportsDbt(), false)) - .normalizationConfig( - ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(destinationVersion.getNormalizationConfig())) .resourceRequirements(ApiPojoConverters.actorDefResourceReqsToApi(standardDestinationDefinition.getResourceRequirements())); } catch (final URISyntaxException | NullPointerException e) { throw new InternalServerKnownException("Unable to process retrieved latest destination definitions list", e); @@ -289,6 +288,7 @@ public DestinationDefinitionRead updateDestinationDefinition(final DestinationDe .withTombstone(currentDestination.getTombstone()) .withPublic(currentDestination.getPublic()) .withCustom(currentDestination.getCustom()) + .withMetrics(currentDestination.getMetrics()) .withResourceRequirements(updatedResourceReqs); final ActorDefinitionVersion newVersion = actorDefinitionHandlerHelper.defaultDefinitionVersionFromUpdate( diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java index 5539cbb9821..c9cc5701e84 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/DestinationHandler.java @@ -41,6 +41,7 @@ import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; +import io.airbyte.featureflag.DeleteSecretsWhenTombstoneActors; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseIconUrlInApiResponse; import io.airbyte.featureflag.Workspace; @@ -147,24 +148,35 @@ public void deleteDestination(final DestinationRead destination) connectionsHandler.deleteConnection(connectionRead.getConnectionId()); } - final JsonNode fullConfig; - try { - fullConfig = destinationService.getDestinationConnectionWithSecrets(destination.getDestinationId()).getConfiguration(); - } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { - throw new ConfigNotFoundException(e.getType(), e.getConfigId()); - } final ConnectorSpecification spec = getSpecForDestinationId(destination.getDestinationDefinitionId(), destination.getWorkspaceId(), destination.getDestinationId()); - // persist - persistDestinationConnection( - destination.getName(), - destination.getDestinationDefinitionId(), - destination.getWorkspaceId(), - destination.getDestinationId(), - fullConfig, - true, - spec); + if (featureFlagClient.boolVariation(DeleteSecretsWhenTombstoneActors.INSTANCE, new Workspace(destination.getWorkspaceId().toString()))) { + try { + destinationService.tombstoneDestination( + destination.getName(), + destination.getWorkspaceId(), + destination.getDestinationId(), spec); + } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { + throw new ConfigNotFoundException(e.getType(), e.getConfigId()); + } + } else { + final JsonNode fullConfig; + try { + fullConfig = destinationService.getDestinationConnectionWithSecrets(destination.getDestinationId()).getConfiguration(); + } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { + throw new ConfigNotFoundException(e.getType(), e.getConfigId()); + } + // persist + persistDestinationConnection( + destination.getName(), + destination.getDestinationDefinitionId(), + destination.getWorkspaceId(), + destination.getDestinationId(), + fullConfig, + true, + spec); + } } public DestinationRead updateDestination(final DestinationUpdate destinationUpdate) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java index 440b904ddbf..91d2c19d26c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java @@ -11,7 +11,6 @@ import com.google.common.base.Preconditions; import datadog.trace.api.Trace; import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionSyncProgressRead; @@ -101,9 +100,6 @@ public class JobHistoryHandler { private final TemporalClient temporalClient; private final FeatureFlagClient featureFlagClient; - private static final Set CONFIG_TYPE_SUPPORTING_PROGRESS = - Set.of(JobConfigType.SYNC, JobConfigType.REFRESH, JobConfigType.RESET_CONNECTION, JobConfigType.CLEAR); - public JobHistoryHandler(final JobPersistence jobPersistence, final WorkerEnvironment workerEnvironment, final LogConfigs logConfigs, @@ -377,7 +373,7 @@ public Optional getLatestRunningSyncJob(final UUID connectionId) throws } public ConnectionSyncProgressRead getConnectionSyncProgress(final ConnectionIdRequestBody connectionIdRequestBody) throws IOException { - final List jobs = jobPersistence.getRunningSyncJobForConnections(List.of(connectionIdRequestBody.getConnectionId())); + final List jobs = jobPersistence.getRunningJobForConnection(connectionIdRequestBody.getConnectionId()); final List jobReads = jobs.stream() .map(JobConverter::getJobWithAttemptsRead) @@ -385,8 +381,7 @@ public ConnectionSyncProgressRead getConnectionSyncProgress(final ConnectionIdRe hydrateWithStats(jobReads, jobs, featureFlagClient.boolVariation(HydrateAggregatedStats.INSTANCE, new Workspace(ANONYMOUS)), jobPersistence); - if (jobReads.isEmpty() || jobReads.getFirst() == null - || !CONFIG_TYPE_SUPPORTING_PROGRESS.contains(jobReads.getFirst().getJob().getConfigType())) { + if (jobReads.isEmpty() || jobReads.getFirst() == null) { return new ConnectionSyncProgressRead().connectionId(connectionIdRequestBody.getConnectionId()).streams(Collections.emptyList()); } final JobWithAttemptsRead runningJob = jobReads.getFirst(); @@ -409,7 +404,7 @@ public ConnectionSyncProgressRead getConnectionSyncProgress(final ConnectionIdRe streamToTrackPerConfigType.put(JobConfigType.REFRESH, streamsToRefresh); streamToTrackPerConfigType.put(JobConfigType.SYNC, enabledStreams.stream().filter(s -> !streamsToRefresh.contains(s)).toList()); } else if (runningJobConfigType.equals(JobConfigType.RESET_CONNECTION) || runningJobConfigType.equals(JobConfigType.CLEAR)) { - streamToTrackPerConfigType.put(JobConfigType.CLEAR, runningJob.getJob().getResetConfig().getStreamsToReset()); + streamToTrackPerConfigType.put(runningJobConfigType, runningJob.getJob().getResetConfig().getStreamsToReset()); } final List finalStreamsWithStats = streamToTrackPerConfigType.entrySet().stream() @@ -438,7 +433,7 @@ public ConnectionSyncProgressRead getConnectionSyncProgress(final ConnectionIdRe return new ConnectionSyncProgressRead() .connectionId(connectionIdRequestBody.getConnectionId()) .jobId(runningJob.getJob().getId()) - .syncStartedAt(runningJob.getJob().getStartedAt()) + .syncStartedAt(runningJob.getJob().getCreatedAt()) .bytesEmitted(aggregatedStats == null ? null : aggregatedStats.getBytesEmitted()) .bytesCommitted(aggregatedStats == null ? null : aggregatedStats.getBytesCommitted()) .recordsEmitted(aggregatedStats == null ? null : aggregatedStats.getRecordsEmitted()) @@ -455,12 +450,6 @@ public List getLatestSyncJobsForConnections(final List c return jobPersistence.getLastSyncJobForConnections(connectionIds); } - public AttemptNormalizationStatusReadList getAttemptNormalizationStatuses(final JobIdRequestBody jobIdRequestBody) throws IOException { - return new AttemptNormalizationStatusReadList() - .attemptNormalizationStatuses(jobPersistence.getAttemptNormalizationStatusesForJob(jobIdRequestBody.getId()).stream() - .map(JobConverter::convertAttemptNormalizationStatus).collect(Collectors.toList())); - } - public List getRunningSyncJobForConnections(final List connectionIds) throws IOException { return jobPersistence.getRunningSyncJobForConnections(connectionIds).stream() .map(JobConverter::getJobRead) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java index 2bceb84571f..0a1e1ff5184 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java @@ -21,7 +21,6 @@ import io.airbyte.commons.converters.ConfigReplacer; import io.airbyte.commons.converters.StateConverter; import io.airbyte.commons.features.FeatureFlags; -import io.airbyte.commons.helper.NormalizationInDestinationHelper; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.handlers.helpers.ContextBuilder; @@ -54,16 +53,9 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.Context; -import io.airbyte.featureflag.DestinationDefinition; import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.Multi; -import io.airbyte.featureflag.NormalizationInDestination; import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricAttribute; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; @@ -181,19 +173,6 @@ public Object getJobInput(final SyncInput input) { destination.getConfiguration()); attemptSyncConfig.setDestinationConfiguration(configInjector.injectConfig(destinationConfiguration, destination.getDestinationDefinitionId())); - final List normalizationInDestinationContext = List.of( - new DestinationDefinition(destination.getDestinationDefinitionId()), - new Workspace(destination.getWorkspaceId())); - - final var normalizationInDestinationMinSupportedVersion = featureFlagClient.stringVariation( - NormalizationInDestination.INSTANCE, new Multi(normalizationInDestinationContext)); - final var shouldNormalizeInDestination = NormalizationInDestinationHelper - .shouldNormalizeInDestination(config.getOperationSequence(), - config.getDestinationDockerImage(), - normalizationInDestinationMinSupportedVersion); - - reportNormalizationInDestinationMetrics(shouldNormalizeInDestination, config, connectionId); - final IntegrationLauncherConfig sourceLauncherConfig = getSourceIntegrationLauncherConfig( jobId, attempt, @@ -209,7 +188,7 @@ public Object getJobInput(final SyncInput input) { config, destinationVersion, attemptSyncConfig.getDestinationConfiguration(), - NormalizationInDestinationHelper.getAdditionalEnvironmentVariables(shouldNormalizeInDestination)); + Map.of()); final List featureFlagContext = new ArrayList<>(); featureFlagContext.add(new Workspace(config.getWorkspaceId())); @@ -232,7 +211,6 @@ public Object getJobInput(final SyncInput input) { .withSyncResourceRequirements(config.getSyncResourceRequirements()) .withConnectionId(connectionId) .withWorkspaceId(config.getWorkspaceId()) - .withNormalizeInDestinationContainer(shouldNormalizeInDestination) .withIsReset(JobConfig.ConfigType.RESET_CONNECTION.equals(jobConfigType)) .withConnectionContext(connectionContext); @@ -396,18 +374,6 @@ private Optional getCurrentConnectionState(final UUID connectionId) throw return Optional.of(StateMessageHelper.getState(internalState)); } - private void reportNormalizationInDestinationMetrics(final boolean shouldNormalizeInDestination, - final JobSyncConfig config, - final UUID connectionId) { - if (shouldNormalizeInDestination) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NORMALIZATION_IN_DESTINATION_CONTAINER, 1, - new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); - } else if (NormalizationInDestinationHelper.normalizationStepRequired(config.getOperationSequence())) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NORMALIZATION_IN_NORMALIZATION_CONTAINER, 1, - new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); - } - } - private IntegrationLauncherConfig getSourceIntegrationLauncherConfig(final long jobId, final int attempt, final UUID connectionId, @@ -442,13 +408,6 @@ private IntegrationLauncherConfig getDestinationIntegrationLauncherConfig(final final Map additionalEnviornmentVariables) throws IOException { final ConfigReplacer configReplacer = new ConfigReplacer(LOGGER); - final String destinationNormalizationDockerImage = destinationVersion.getNormalizationConfig() != null - ? destinationVersion.getNormalizationConfig().getNormalizationRepository() + ":" - + destinationVersion.getNormalizationConfig().getNormalizationTag() - : null; - final String normalizationIntegrationType = - destinationVersion.getNormalizationConfig() != null ? destinationVersion.getNormalizationConfig().getNormalizationIntegrationType() - : null; return new IntegrationLauncherConfig() .withJobId(String.valueOf(jobId)) @@ -458,9 +417,6 @@ private IntegrationLauncherConfig getDestinationIntegrationLauncherConfig(final .withDockerImage(config.getDestinationDockerImage()) .withProtocolVersion(config.getDestinationProtocolVersion()) .withIsCustomConnector(config.getIsDestinationCustomConnector()) - .withNormalizationDockerImage(destinationNormalizationDockerImage) - .withSupportsDbt(destinationVersion.getSupportsDbt()) - .withNormalizationIntegrationType(normalizationIntegrationType) .withAllowedHosts(configReplacer.getAllowedHosts(destinationVersion.getAllowedHosts(), destinationConfiguration)) .withAdditionalEnvironmentVariables(additionalEnviornmentVariables); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java index 8f230523996..52a7266b12a 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobsHandler.java @@ -86,8 +86,8 @@ public InternalOperationResult jobFailure(final JobFailureRequest input) { jobPersistence.failJob(jobId); final Job job = jobPersistence.getJob(jobId); - List attemptStats = new ArrayList<>(); - for (Attempt attempt : job.getAttempts()) { + final List attemptStats = new ArrayList<>(); + for (final Attempt attempt : job.getAttempts()) { attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } if (job.getConfigType().equals(JobConfig.ConfigType.SYNC)) { @@ -126,19 +126,19 @@ public InternalOperationResult jobFailure(final JobFailureRequest input) { } } - private void reportIfLastFailedAttempt(Job job, UUID connectionId, SyncJobReportingContext jobContext) { - Optional lastFailedAttempt = job.getLastFailedAttempt(); + private void reportIfLastFailedAttempt(final Job job, final UUID connectionId, final SyncJobReportingContext jobContext) { + final Optional lastFailedAttempt = job.getLastFailedAttempt(); if (lastFailedAttempt.isPresent()) { - Attempt attempt = lastFailedAttempt.get(); - Optional failureSummaryOpt = attempt.getFailureSummary(); + final Attempt attempt = lastFailedAttempt.get(); + final Optional failureSummaryOpt = attempt.getFailureSummary(); if (failureSummaryOpt.isPresent()) { - AttemptFailureSummary failureSummary = failureSummaryOpt.get(); + final AttemptFailureSummary failureSummary = failureSummaryOpt.get(); AttemptConfigReportingContext attemptConfig = null; - Optional syncConfigOpt = attempt.getSyncConfig(); + final Optional syncConfigOpt = attempt.getSyncConfig(); if (syncConfigOpt.isPresent()) { - AttemptSyncConfig syncConfig = syncConfigOpt.get(); + final AttemptSyncConfig syncConfig = syncConfigOpt.get(); attemptConfig = new AttemptConfigReportingContext( syncConfig.getSourceConfiguration(), syncConfig.getDestinationConfiguration(), @@ -173,8 +173,8 @@ public InternalOperationResult jobSuccessWithAttemptNumber(final JobSuccessWithA final Job job = jobPersistence.getJob(jobId); jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.ATTEMPT_SUCCEEDED_BY_RELEASE_STAGE, job); - List attemptStats = new ArrayList<>(); - for (Attempt attempt : job.getAttempts()) { + final List attemptStats = new ArrayList<>(); + for (final Attempt attempt : job.getAttempts()) { attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } if (job.getConfigType().equals(JobConfig.ConfigType.SYNC)) { @@ -198,7 +198,7 @@ private void storeSyncSuccess(final Job job, final UUID connectionId, final List final LoadedStats stats = buildLoadedStats(job, attemptStats); final SyncSucceededEvent event = new SyncSucceededEvent(jobId, job.getCreatedAtInSecond(), job.getUpdatedAtInSecond(), stats.bytes, stats.records, job.getAttemptsCount()); - connectionEventService.writeEvent(connectionId, event); + connectionEventService.writeEvent(connectionId, event, null); } catch (final Exception e) { log.warn("Failed to persist timeline event for job: {}", jobId, e); } @@ -215,7 +215,7 @@ private void storeSyncFailure(final Job job, final UUID connectionId, final List final SyncFailedEvent event = new SyncFailedEvent(jobId, job.getCreatedAtInSecond(), job.getUpdatedAtInSecond(), stats.bytes, stats.records, job.getAttemptsCount(), firstFailureReasonOfLastAttempt); - connectionEventService.writeEvent(connectionId, event); + connectionEventService.writeEvent(connectionId, event, null); } catch (final Exception e) { log.warn("Failed to persist timeline event for job: {}", jobId, e); } @@ -310,8 +310,8 @@ public void persistJobCancellation(final UUID connectionId, final long jobId, fi jobPersistence.cancelJob(jobId); // post process final var job = jobPersistence.getJob(jobId); - List attemptStats = new ArrayList<>(); - for (Attempt attempt : job.getAttempts()) { + final List attemptStats = new ArrayList<>(); + for (final Attempt attempt : job.getAttempts()) { attemptStats.add(jobPersistence.getAttemptStats(jobId, attempt.getAttemptNumber())); } jobCreationAndStatusUpdateHelper.emitJobToReleaseStagesMetric(OssMetricsRegistry.JOB_CANCELLED_BY_RELEASE_STAGE, job); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java index 303ea74aafe..5e7f0537aab 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OperationsHandler.java @@ -87,12 +87,6 @@ private StandardSyncOperation toStandardSyncOperation(final OperationCreate oper } private void validateOperation(final OperatorConfiguration operatorConfiguration) { - if ((io.airbyte.api.model.generated.OperatorType.NORMALIZATION).equals(operatorConfiguration.getOperatorType())) { - Preconditions.checkArgument(operatorConfiguration.getNormalization() != null); - } - if ((io.airbyte.api.model.generated.OperatorType.DBT).equals(operatorConfiguration.getOperatorType())) { - Preconditions.checkArgument(operatorConfiguration.getDbt() != null); - } if (io.airbyte.api.model.generated.OperatorType.WEBHOOK.equals(operatorConfiguration.getOperatorType())) { Preconditions.checkArgument(operatorConfiguration.getWebhook() != null); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java index aca6e0681c2..fbda72ab6ae 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java @@ -11,7 +11,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.airbyte.api.model.generated.CatalogDiff; @@ -57,12 +56,14 @@ import io.airbyte.commons.server.scheduler.EventRunner; import io.airbyte.commons.server.scheduler.SynchronousResponse; import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; +import io.airbyte.commons.server.support.CurrentUserService; import io.airbyte.commons.temporal.ErrorCode; import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; import io.airbyte.commons.version.Version; import io.airbyte.config.ActorCatalog; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; +import io.airbyte.config.JobConfig; import io.airbyte.config.JobTypeResourceLimit.JobType; import io.airbyte.config.NotificationSettings; import io.airbyte.config.ResourceRequirements; @@ -73,7 +74,6 @@ import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.WorkloadPriority; import io.airbyte.config.helpers.ResourceRequirementsUtils; @@ -84,11 +84,16 @@ import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; +import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; +import io.airbyte.data.services.shared.SyncCancelledEvent; +import io.airbyte.data.services.shared.SyncStartedEvent; +import io.airbyte.featureflag.DiscoverPostprocessInTemporal; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; +import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClientFactory; import io.airbyte.metrics.lib.MetricTags; @@ -113,7 +118,6 @@ import java.util.List; import java.util.Optional; import java.util.UUID; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -153,6 +157,8 @@ public class SchedulerHandler { private final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private final WorkspaceService workspaceService; private final SecretPersistenceConfigService secretPersistenceConfigService; + private final ConnectionTimelineEventService connectionEventService; + private final CurrentUserService currentUserService; private final StreamRefreshesHandler streamRefreshesHandler; private final NotificationHelper notificationHelper; @@ -179,6 +185,8 @@ public SchedulerHandler(final ConfigRepository configRepository, final ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler, final WorkspaceService workspaceService, final SecretPersistenceConfigService secretPersistenceConfigService, + final ConnectionTimelineEventService connectionEventService, + final CurrentUserService currentUserService, final StreamRefreshesHandler streamRefreshesHandler, final NotificationHelper notificationHelper) { this.configRepository = configRepository; @@ -201,6 +209,8 @@ public SchedulerHandler(final ConfigRepository configRepository, this.connectorDefinitionSpecificationHandler = connectorDefinitionSpecificationHandler; this.workspaceService = workspaceService; this.secretPersistenceConfigService = secretPersistenceConfigService; + this.connectionEventService = connectionEventService; + this.currentUserService = currentUserService; this.jobCreationAndStatusUpdateHelper = new JobCreationAndStatusUpdateHelper( jobPersistence, configRepository, @@ -340,10 +350,24 @@ public CheckConnectionRead checkDestinationConnectionFromDestinationIdForUpdate( return checkDestinationConnectionFromDestinationCreate(destinationCoreConfig); } - public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody) + public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final SourceDiscoverSchemaRequestBody req) + throws ConfigNotFoundException, IOException, JsonValidationException { + final SourceConnection source = configRepository.getSourceConnection(req.getSourceId()); + + if (featureFlagClient.boolVariation(DiscoverPostprocessInTemporal.INSTANCE, new Workspace(source.getWorkspaceId()))) { + return discover(req, source); + } else { + return discoverAndGloballyDisable(req, source); + } + } + + /** + * Runs discover schema and performs conditional disabling of all connections. + */ + public SourceDiscoverSchemaRead discoverAndGloballyDisable(final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody, + final SourceConnection source) throws ConfigNotFoundException, IOException, JsonValidationException { final UUID sourceId = discoverSchemaRequestBody.getSourceId(); - final SourceConnection source = configRepository.getSourceConnection(sourceId); final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), sourceId); final boolean isCustomConnector = sourceDef.getCustom(); @@ -392,6 +416,81 @@ public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final Source .catalogId(currentCatalog.get().getId()); } + /** + * Runs discover schema and does not disable other connections. + */ + public SourceDiscoverSchemaRead discover(final SourceDiscoverSchemaRequestBody req, final SourceConnection source) + throws ConfigNotFoundException, IOException, JsonValidationException { + final UUID sourceId = req.getSourceId(); + final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); + final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), sourceId); + + final boolean skipCacheCheck = req.getDisableCache() != null && req.getDisableCache(); + // Skip cache check and run discover. + if (skipCacheCheck) { + return runDiscoverJobDiffAndConditionallyDisable(source, sourceDef, sourceVersion, req.getPriority(), req.getConnectionId()); + } + + // Check cache. + final String configHash = HASH_FUNCTION.hashBytes(Jsons.serialize(source.getConfiguration()).getBytes( + Charsets.UTF_8)).toString(); + final String connectorVersion = sourceVersion.getDockerImageTag(); + + final Optional existingCatalog = + configRepository.getActorCatalog(req.getSourceId(), connectorVersion, configHash); + + // No catalog exists, run discover. + if (existingCatalog.isEmpty()) { + return runDiscoverJobDiffAndConditionallyDisable(source, sourceDef, sourceVersion, req.getPriority(), req.getConnectionId()); + } + + // We have a catalog cached, no need to run discover. Return cached catalog. + final AirbyteCatalog airbyteCatalog = Jsons.object(existingCatalog.get().getCatalog(), AirbyteCatalog.class); + final SynchronousJobRead emptyJob = new SynchronousJobRead() + .configId("NoConfiguration") + .configType(JobConfigType.DISCOVER_SCHEMA) + .id(UUID.randomUUID()) + .createdAt(0L) + .endedAt(0L) + .logs(new LogRead().logLines(new ArrayList<>())) + .succeeded(true); + return new SourceDiscoverSchemaRead() + .catalog(CatalogConverter.toApi(airbyteCatalog, sourceVersion)) + .jobInfo(emptyJob) + .catalogId(existingCatalog.get().getId()); + } + + private SourceDiscoverSchemaRead runDiscoverJobDiffAndConditionallyDisable(final SourceConnection source, + final StandardSourceDefinition sourceDef, + final ActorDefinitionVersion sourceVersion, + final io.airbyte.api.model.generated.WorkloadPriority priority, + final UUID connectionId) + throws ConfigNotFoundException, IOException, JsonValidationException { + final boolean isCustomConnector = sourceDef.getCustom(); + // ResourceRequirements are read from actor definition and can be null; but if it's not null it will + // have higher priority and overwrite + // the default settings in WorkerConfig. + final ResourceRequirements resourceRequirements = + getResourceRequirementsForJobType(sourceDef.getResourceRequirements(), JobType.DISCOVER_SCHEMA) + .orElse(null); + + final SynchronousResponse persistedCatalogId = + synchronousSchedulerClient.createDiscoverSchemaJob( + source, + sourceVersion, + isCustomConnector, + resourceRequirements, + priority == null ? WorkloadPriority.HIGH : WorkloadPriority.fromValue(priority.toString())); + + final var schemaRead = retrieveDiscoveredSchema(persistedCatalogId, sourceVersion); + // no connection to diff + if (connectionId == null) { + return schemaRead; + } + + return connectionsHandler.diffCatalogAndConditionallyDisable(connectionId, schemaRead.getCatalogId()); + } + public void applySchemaChangeForSource(final SourceAutoPropagateChange sourceAutoPropagateChange) throws IOException, JsonValidationException, ConfigNotFoundException { LOGGER.info("Applying schema changes for source '{}' in workspace '{}'", @@ -523,7 +622,7 @@ public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdReq } public JobInfoRead resetConnectionStream(final ConnectionStreamRequestBody connectionStreamRequestBody) - throws IOException { + throws IOException, ConfigNotFoundException { return submitResetConnectionStreamsToWorker(connectionStreamRequestBody.getConnectionId(), connectionStreamRequestBody.getStreams()); } @@ -552,18 +651,6 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc actorDefinitionVersionHelper.getDestinationVersion(destinationDef, destination.getWorkspaceId(), destination.getDestinationId()); final String destinationImageName = destinationVersion.getDockerRepository() + ":" + destinationVersion.getDockerImageTag(); - final List standardSyncOperations = Lists.newArrayList(); - for (final var operationId : standardSync.getOperationIds()) { - final StandardSyncOperation standardSyncOperation = configRepository.getStandardSyncOperation(operationId); - // NOTE: we must run normalization operations during resets, because we rely on them to clear the - // normalized tables. However, we don't want to run other operations (dbt, webhook) because those - // are meant to transform the data after the sync but there's no data to transform. Webhook - // operations particularly will fail because we don't populate some required config during resets. - if (StandardSyncOperation.OperatorType.NORMALIZATION.equals(standardSyncOperation.getOperatorType())) { - standardSyncOperations.add(standardSyncOperation); - } - } - final Optional jobIdOptional = jobCreator.createResetConnectionJob( destination, @@ -573,7 +660,7 @@ public JobInfoRead createJob(final JobCreate jobCreate) throws JsonValidationExc destinationImageName, new Version(destinationVersion.getProtocolVersion()), destinationDef.getCustom(), - standardSyncOperations, + List.of(), streamsToReset, destination.getWorkspaceId()); @@ -695,6 +782,15 @@ private CheckConnectionRead reportConnectionStatus(final SynchronousResponse streams) - throws IOException, IllegalStateException { - return submitResetConnectionToWorker(connectionId, - streams.stream().map(s -> new StreamDescriptor().withName(s.getStreamName()).withNamespace(s.getStreamNamespace())).collect( - Collectors.toList())); + throws IOException, IllegalStateException, ConfigNotFoundException { + final List actualStreamsToReset = streams.isEmpty() + ? configRepository.getAllStreamsForConnection(connectionId) + : streams.stream().map(s -> new StreamDescriptor().withName(s.getStreamName()).withNamespace(s.getStreamNamespace())).toList(); + return submitResetConnectionToWorker(connectionId, actualStreamsToReset); } private JobInfoRead readJobFromResult(final ManualOperationResult manualOperationResult) throws IOException, IllegalStateException { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java index 3e45c3925f0..0adc555b08b 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandler.java @@ -115,6 +115,9 @@ SourceDefinitionRead buildSourceDefinitionRead(final StandardSourceDefinition st .supportLevel(ApiPojoConverters.toApiSupportLevel(sourceVersion.getSupportLevel())) .releaseStage(ApiPojoConverters.toApiReleaseStage(sourceVersion.getReleaseStage())) .releaseDate(ApiPojoConverters.toLocalDate(sourceVersion.getReleaseDate())) + .lastPublished(ApiPojoConverters.toOffsetDateTime(sourceVersion.getLastPublished())) + .cdkVersion(sourceVersion.getCdkVersion()) + .metrics(standardSourceDefinition.getMetrics()) .custom(standardSourceDefinition.getCustom()) .resourceRequirements(ApiPojoConverters.actorDefResourceReqsToApi(standardSourceDefinition.getResourceRequirements())) .maxSecondsBetweenMessages(standardSourceDefinition.getMaxSecondsBetweenMessages()); @@ -291,6 +294,7 @@ public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate .withTombstone(currentSourceDefinition.getTombstone()) .withPublic(currentSourceDefinition.getPublic()) .withCustom(currentSourceDefinition.getCustom()) + .withMetrics(currentSourceDefinition.getMetrics()) .withMaxSecondsBetweenMessages(currentSourceDefinition.getMaxSecondsBetweenMessages()) .withResourceRequirements(updatedResourceReqs); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java index 2c185e15646..bc53d2ce2c2 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java @@ -53,6 +53,7 @@ import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; +import io.airbyte.featureflag.DeleteSecretsWhenTombstoneActors; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseIconUrlInApiResponse; @@ -376,22 +377,33 @@ public void deleteSource(final SourceRead source) } final var spec = getSpecFromSourceId(source.getSourceId()); - final JsonNode fullConfig; - try { - fullConfig = sourceService.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); - } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { - throw new ConfigNotFoundException(e.getType(), e.getConfigId()); - } - // persist - persistSourceConnection( - source.getName(), - source.getSourceDefinitionId(), - source.getWorkspaceId(), - source.getSourceId(), - true, - fullConfig, - spec); + if (featureFlagClient.boolVariation(DeleteSecretsWhenTombstoneActors.INSTANCE, new Workspace(source.getWorkspaceId().toString()))) { + try { + sourceService.tombstoneSource( + source.getName(), + source.getWorkspaceId(), + source.getSourceId(), spec); + } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { + throw new ConfigNotFoundException(e.getType(), e.getConfigId()); + } + } else { + final JsonNode fullConfig; + try { + fullConfig = sourceService.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); + } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { + throw new ConfigNotFoundException(e.getType(), e.getConfigId()); + } + // persist + persistSourceConnection( + source.getName(), + source.getSourceDefinitionId(), + source.getWorkspaceId(), + source.getSourceId(), + true, + fullConfig, + spec); + } } public DiscoverCatalogResult writeDiscoverCatalogResult(final SourceDiscoverSchemaWriteRequestBody request) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java index 6a112f16042..dc335aedb1f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java @@ -9,6 +9,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import datadog.trace.api.Trace; +import io.airbyte.api.model.generated.ActorDefinitionVersionRead; import io.airbyte.api.model.generated.AirbyteCatalog; import io.airbyte.api.model.generated.AirbyteStream; import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration; @@ -97,7 +98,6 @@ public class WebBackendConnectionsHandler { private final ConnectionsHandler connectionsHandler; private final StateHandler stateHandler; private final SourceHandler sourceHandler; - private final DestinationDefinitionsHandler destinationDefinitionsHandler; private final DestinationHandler destinationHandler; private final JobHistoryHandler jobHistoryHandler; private final SchedulerHandler schedulerHandler; @@ -113,7 +113,6 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef final ConnectionsHandler connectionsHandler, final StateHandler stateHandler, final SourceHandler sourceHandler, - final DestinationDefinitionsHandler destinationDefinitionsHandler, final DestinationHandler destinationHandler, final JobHistoryHandler jobHistoryHandler, final SchedulerHandler schedulerHandler, @@ -126,7 +125,6 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef this.connectionsHandler = connectionsHandler; this.stateHandler = stateHandler; this.sourceHandler = sourceHandler; - this.destinationDefinitionsHandler = destinationDefinitionsHandler; this.destinationHandler = destinationHandler; this.jobHistoryHandler = jobHistoryHandler; this.schedulerHandler = schedulerHandler; @@ -155,7 +153,7 @@ public ConnectionStateType getStateType(final ConnectionIdRequestBody connection @SuppressWarnings("LineLength") public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WebBackendConnectionListRequestBody webBackendConnectionListRequestBody) - throws IOException { + throws IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException, ConfigNotFoundException { final StandardSyncQuery query = new StandardSyncQuery( webBackendConnectionListRequestBody.getWorkspaceId(), @@ -221,7 +219,7 @@ private Map getDestinationSnippetReadById(final Li } private WebBackendConnectionRead buildWebBackendConnectionRead(final ConnectionRead connectionRead, final Optional currentSourceCatalogId) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceRead source = getSourceRead(connectionRead.getSourceId()); final DestinationRead destination = getDestinationRead(connectionRead.getDestinationId()); final OperationReadList operations = getOperationReadList(connectionRead); @@ -245,16 +243,25 @@ private WebBackendConnectionRead buildWebBackendConnectionRead(final ConnectionR webBackendConnectionRead.setSchemaChange(schemaChange); + // find any scheduled or past breaking changes to the connectors + final ActorDefinitionVersionRead sourceActorDefinitionVersionRead = actorDefinitionVersionHandler + .getActorDefinitionVersionForSourceId(new SourceIdRequestBody().sourceId(source.getSourceId())); + final ActorDefinitionVersionRead destinationActorDefinitionVersionRead = actorDefinitionVersionHandler + .getActorDefinitionVersionForDestinationId(new DestinationIdRequestBody().destinationId(destination.getDestinationId())); + webBackendConnectionRead.setSourceActorDefinitionVersion(sourceActorDefinitionVersionRead); + webBackendConnectionRead.setDestinationActorDefinitionVersion(destinationActorDefinitionVersionRead); + return webBackendConnectionRead; } - private static WebBackendConnectionListItem buildWebBackendConnectionListItem( - final StandardSync standardSync, - final Map sourceReadById, - final Map destinationReadById, - final Map latestJobByConnectionId, - final Map runningJobByConnectionId, - final Optional latestFetchEvent) { + private WebBackendConnectionListItem buildWebBackendConnectionListItem( + final StandardSync standardSync, + final Map sourceReadById, + final Map destinationReadById, + final Map latestJobByConnectionId, + final Map runningJobByConnectionId, + final Optional latestFetchEvent) + throws JsonValidationException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException, ConfigNotFoundException { final SourceSnippetRead source = sourceReadById.get(standardSync.getSourceId()); final DestinationSnippetRead destination = destinationReadById.get(standardSync.getDestinationId()); @@ -265,6 +272,12 @@ private static WebBackendConnectionListItem buildWebBackendConnectionListItem( final SchemaChange schemaChange = getSchemaChange(connectionRead, currentCatalogId, latestFetchEvent); + // find any scheduled or past breaking changes to the connectors + final ActorDefinitionVersionRead sourceActorDefinitionVersionRead = actorDefinitionVersionHandler + .getActorDefinitionVersionForSourceId(new SourceIdRequestBody().sourceId(source.getSourceId())); + final ActorDefinitionVersionRead destinationActorDefinitionVersionRead = actorDefinitionVersionHandler + .getActorDefinitionVersionForDestinationId(new DestinationIdRequestBody().destinationId(destination.getDestinationId())); + final WebBackendConnectionListItem listItem = new WebBackendConnectionListItem() .connectionId(standardSync.getConnectionId()) .status(ApiPojoConverters.toApiStatus(standardSync.getStatus())) @@ -274,7 +287,9 @@ private static WebBackendConnectionListItem buildWebBackendConnectionListItem( .source(source) .destination(destination) .isSyncing(latestRunningSyncJob.isPresent()) - .schemaChange(schemaChange); + .schemaChange(schemaChange) + .sourceActorDefinitionVersion(sourceActorDefinitionVersionRead) + .destinationActorDefinitionVersion(destinationActorDefinitionVersionRead); latestSyncJob.ifPresent(job -> { listItem.setLatestSyncJobCreatedAt(job.createdAt()); @@ -363,7 +378,7 @@ private static WebBackendConnectionRead getWebBackendConnectionRead(final Connec // tracking selected streams in any reasonable way. We should update that. @Trace public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { ApmTraceUtils.addTagsToTrace(Map.of(MetricTags.CONNECTION_ID, webBackendConnectionRequestBody.getConnectionId().toString())); final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody() .connectionId(webBackendConnectionRequestBody.getConnectionId()); @@ -554,7 +569,7 @@ protected static AirbyteCatalog updateSchemaWithRefreshedDiscoveredCatalog(final } public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final List operationIds = createOperations(webBackendConnectionCreate); final ConnectionCreate connectionCreate = toConnectionCreate(webBackendConnectionCreate, operationIds); @@ -661,7 +676,7 @@ private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendCon final Set allStreamToReset = new HashSet<>(); allStreamToReset.addAll(apiStreamsToReset); allStreamToReset.addAll(changedConfigStreamDescriptors); - List streamsToReset = + final List streamsToReset = allStreamToReset.stream().map(ProtocolConverters::streamDescriptorToProtocol).toList(); if (!streamsToReset.isEmpty()) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java index ecb0ee7a9c6..3b693888584 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java @@ -144,8 +144,8 @@ public ActorDefinitionVersion defaultDefinitionVersionFromUpdate(final ActorDefi .withReleaseStage(currentVersion.getReleaseStage()) .withReleaseDate(currentVersion.getReleaseDate()) .withSupportLevel(currentVersion.getSupportLevel()) - .withNormalizationConfig(currentVersion.getNormalizationConfig()) - .withSupportsDbt(currentVersion.getSupportsDbt()) + .withCdkVersion(currentVersion.getCdkVersion()) + .withLastPublished(currentVersion.getLastPublished()) .withAllowedHosts(currentVersion.getAllowedHosts()); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/AutoPropagateSchemaChangeHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/AutoPropagateSchemaChangeHelper.java index 6d3eedf335a..df53ce77ea8 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/AutoPropagateSchemaChangeHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/AutoPropagateSchemaChangeHelper.java @@ -181,7 +181,7 @@ static Map extractStreamAndConf */ public static boolean shouldAutoPropagate(final CatalogDiff diff, final ConnectionRead connectionRead) { - if (diff.getTransforms().isEmpty()) { + if (!containsChanges(diff)) { // If there's no diff we always propagate because it means there's a diff in a disabled stream, or // some other bit of metadata. // We want to acknowledge it and update to the latest source catalog id, but not bother the user @@ -196,6 +196,10 @@ public static boolean shouldAutoPropagate(final CatalogDiff diff, return nonBreakingChange && autoPropagationIsEnabledForConnection; } + public static boolean containsChanges(final CatalogDiff diff) { + return !diff.getTransforms().isEmpty(); + } + /** * Tests whether the provided catalog diff contains a breaking change. * diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java index 567cbcc8a3f..89f46f8f69d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java @@ -14,6 +14,7 @@ import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -30,14 +31,17 @@ public class ContextBuilder { private final WorkspaceService workspaceService; private final DestinationService destinationService; private final ConnectionService connectionService; + private final SourceService sourceService; public ContextBuilder(final WorkspaceService workspaceService, final DestinationService destinationService, - final ConnectionService connectionService) { + final ConnectionService connectionService, + final SourceService sourceService) { this.workspaceService = workspaceService; this.destinationService = destinationService; this.connectionService = connectionService; + this.sourceService = sourceService; } /** @@ -50,9 +54,12 @@ public ContextBuilder(final WorkspaceService workspaceService, public ConnectionContext fromConnectionId(final UUID connectionId) { StandardSync connection = null; StandardWorkspace workspace = null; + DestinationConnection destination = null; + SourceConnection source = null; try { connection = connectionService.getStandardSync(connectionId); - final DestinationConnection destination = destinationService.getDestinationConnection(connection.getDestinationId()); + source = sourceService.getSourceConnection(connection.getSourceId()); + destination = destinationService.getDestinationConnection(connection.getDestinationId()); workspace = workspaceService.getStandardWorkspaceNoSecrets(destination.getWorkspaceId(), false); } catch (final JsonValidationException | IOException | ConfigNotFoundException e) { log.error("Failed to get connection information for connection id: {}", connectionId, e); @@ -70,6 +77,14 @@ public ConnectionContext fromConnectionId(final UUID connectionId) { .withOrganizationId(workspace.getOrganizationId()); } + if (destination != null) { + context.setDestinationDefinitionId(destination.getDestinationDefinitionId()); + } + + if (source != null) { + context.setSourceDefinitionId(source.getSourceDefinitionId()); + } + return context; } diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt deleted file mode 100644 index 5d1cd0bdb7c..00000000000 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/errors/problems/ConnectionConfigurationProblem.kt +++ /dev/null @@ -1,178 +0,0 @@ -package io.airbyte.commons.server.errors.problems - -import io.airbyte.api.problems.model.generated.ProblemMessageData -import io.airbyte.api.problems.throwable.generated.BadRequestProblem -import io.airbyte.publicApi.server.generated.models.ConnectionSyncModeEnum -import jakarta.validation.Valid - -/** - * Thrown when a configuration for a connection is not valid. - * These were created before standardizing our approach of throwing Problems and should be turned into specific problem types. - */ -@Deprecated("Create a specific problem types for each case instead.") -class ConnectionConfigurationProblem private constructor() { - companion object { - fun handleSyncModeProblem( - connectionSyncMode: @Valid ConnectionSyncModeEnum?, - streamName: String, - validSyncModes: Set, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Cannot set sync mode to $connectionSyncMode for stream $streamName. Valid sync modes are: $validSyncModes", - ), - ) - } - - fun invalidStreamName(validStreamNames: Collection): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Invalid stream found. The list of valid streams include: $validStreamNames.", - ), - ) - } - - fun invalidFieldName( - streamName: String, - validFieldNames: Collection, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Invalid field selected in configuration for stream $streamName. The list of valid field names includes: $validFieldNames.", - ), - ) - } - - fun duplicateStream(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Duplicate stream found in configuration for: $streamName.", - ), - ) - } - - fun duplicateFieldsSelected(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Duplicate fields selected in configuration for stream: $streamName.", - ), - ) - } - - fun sourceDefinedCursorFieldProblem( - streamName: String, - cursorField: List, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Cursor Field " + cursorField + " is already defined by source for stream: " + streamName + - ". Do not include a cursor field configuration for this stream.", - ), - ) - } - - fun missingCursorField(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "No default cursor field for stream: $streamName. Please include a cursor field configuration for this stream.", - ), - ) - } - - fun missingCursorFieldSelected(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Cursor field is not selected properly for stream: $streamName. Please include the cursor field in selected fields for this stream.", - ), - ) - } - - fun invalidCursorField( - streamName: String, - validFields: List?>, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Invalid cursor field for stream: $streamName. The list of valid cursor fields include: $validFields.", - ), - ) - } - - fun missingPrimaryKey(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "No default primary key for stream: $streamName. Please include a primary key configuration for this stream.", - ), - ) - } - - fun missingPrimaryKeySelected(streamName: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Primary key fields are not selected properly for stream: $streamName. " + - "Please include the primary key fields in selected fields for this stream.", - ), - ) - } - - fun primaryKeyAlreadyDefined( - streamName: String, - allowedPrimaryKey: List>, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Primary key for stream: $streamName is already pre-defined. Please remove the primaryKey or provide the value as $allowedPrimaryKey.", - ), - ) - } - - fun invalidPrimaryKey( - streamName: String, - validFields: List?>, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Invalid cursor field for stream: $streamName. The list of valid primary keys fields: $validFields.", - ), - ) - } - - fun duplicatePrimaryKey( - streamName: String, - key: List?>, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "Duplicate primary key detected for stream: $streamName, please don't provide the same column more than once. Key: $key", - ), - ) - } - - fun invalidCronExpressionUnderOneHour(cronExpression: String): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "The cron expression " + cronExpression + - " is not valid or is less than the one hour minimum. The seconds and minutes values cannot be `*`.", - ), - ) - } - - fun invalidCronExpression( - cronExpression: String, - message: String?, - ): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message( - "The cron expression $cronExpression is not valid. Error: $message" + - ". Please check the cron expression format at https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html", - ), - ) - } - - fun missingCronExpression(): BadRequestProblem { - return BadRequestProblem( - ProblemMessageData().message("Missing cron expression in the schedule."), - ) - } - } -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandlerTest.java index 3d9898244a2..087a109c45c 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ActorDefinitionVersionHandlerTest.java @@ -19,14 +19,12 @@ import io.airbyte.api.model.generated.ResolveActorDefinitionVersionResponse; import io.airbyte.api.model.generated.SourceIdRequestBody; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.errors.NotFoundException; import io.airbyte.commons.server.handlers.helpers.ActorDefinitionHandlerHelper; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorDefinitionVersion.SupportState; import io.airbyte.config.ActorType; import io.airbyte.config.DestinationConnection; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardDestinationDefinition; @@ -96,12 +94,7 @@ private ActorDefinitionVersion createActorDefinitionVersion() { } private ActorDefinitionVersion createActorDefinitionVersionWithNormalization() { - return createActorDefinitionVersion() - .withSupportsDbt(true) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("repository") - .withNormalizationTag("dev") - .withNormalizationIntegrationType("integration-type")); + return createActorDefinitionVersion(); } @ParameterizedTest @@ -130,9 +123,7 @@ void testGetActorDefinitionVersionForSource(final boolean isVersionOverrideAppli .supportState(io.airbyte.api.model.generated.SupportState.SUPPORTED) .dockerRepository(actorDefinitionVersion.getDockerRepository()) .dockerImageTag(actorDefinitionVersion.getDockerImageTag()) - .supportsDbt(false) - .supportsRefreshes(false) - .normalizationConfig(ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(null)); + .supportsRefreshes(false); assertEquals(expectedRead, actorDefinitionVersionRead); verify(mSourceService).getSourceConnection(sourceId); @@ -171,9 +162,7 @@ void testGetActorDefinitionVersionForDestination(final boolean isVersionOverride .supportState(io.airbyte.api.model.generated.SupportState.SUPPORTED) .dockerRepository(actorDefinitionVersion.getDockerRepository()) .dockerImageTag(actorDefinitionVersion.getDockerImageTag()) - .supportsDbt(false) - .supportsRefreshes(false) - .normalizationConfig(ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(null)); + .supportsRefreshes(false); assertEquals(expectedRead, actorDefinitionVersionRead); verify(mDestinationService).getDestinationConnection(destinationId); @@ -211,9 +200,7 @@ void testGetActorDefinitionVersionForDestinationWithNormalization(final boolean .supportState(io.airbyte.api.model.generated.SupportState.SUPPORTED) .dockerRepository(actorDefinitionVersion.getDockerRepository()) .dockerImageTag(actorDefinitionVersion.getDockerImageTag()) - .supportsDbt(actorDefinitionVersion.getSupportsDbt()) - .supportsRefreshes(actorDefinitionVersion.getSupportsRefreshes()) - .normalizationConfig(ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(actorDefinitionVersion.getNormalizationConfig())); + .supportsRefreshes(actorDefinitionVersion.getSupportsRefreshes()); assertEquals(expectedRead, actorDefinitionVersionRead); verify(mDestinationService).getDestinationConnection(destinationId); @@ -244,9 +231,7 @@ void testCreateActorDefinitionVersionReadWithBreakingChange() throws IOException .supportState(io.airbyte.api.model.generated.SupportState.DEPRECATED) .dockerRepository(actorDefinitionVersion.getDockerRepository()) .dockerImageTag(actorDefinitionVersion.getDockerImageTag()) - .supportsDbt(false) .supportsRefreshes(false) - .normalizationConfig(ApiPojoConverters.normalizationDestinationDefinitionConfigToApi(null)) .breakingChanges(breakingChanges); assertEquals(expectedRead, actorDefinitionVersionRead); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java index ac29fb8d324..00a87934104 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/AttemptHandlerTest.java @@ -28,9 +28,12 @@ import io.airbyte.api.model.generated.ConnectionStateType; import io.airbyte.api.model.generated.CreateNewAttemptNumberResponse; import io.airbyte.api.model.generated.GlobalState; +import io.airbyte.api.model.generated.InternalOperationResult; import io.airbyte.api.model.generated.LogRead; import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; +import io.airbyte.api.model.generated.SaveStreamAttemptMetadataRequestBody; import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; +import io.airbyte.api.model.generated.StreamAttemptMetadata; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.converters.JobConverter; @@ -49,7 +52,6 @@ import io.airbyte.config.JobOutput; import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.StandardSync; @@ -66,6 +68,7 @@ import io.airbyte.config.persistence.helper.GenerationBumper; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.StreamAttemptMetadataService; import io.airbyte.featureflag.EnableResumableFullRefresh; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; @@ -111,6 +114,7 @@ class AttemptHandlerTest { private final ConnectionService connectionService = mock(ConnectionService.class); private final DestinationService destinationService = mock(DestinationService.class); private final ActorDefinitionVersionHelper actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); + private final StreamAttemptMetadataService streamAttemptMetadataService = mock(StreamAttemptMetadataService.class); private final AttemptHandler handler = new AttemptHandler(jobPersistence, statePersistence, @@ -121,7 +125,8 @@ class AttemptHandlerTest { generationBumper, connectionService, destinationService, - actorDefinitionVersionHelper); + actorDefinitionVersionHelper, + streamAttemptMetadataService); private static final UUID CONNECTION_ID = UUID.randomUUID(); private static final UUID WORKSPACE_ID = UUID.randomUUID(); @@ -132,9 +137,7 @@ class AttemptHandlerTest { private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() - .withStatus(ReplicationStatus.COMPLETED)) - .withNormalizationSummary( - new NormalizationSummary()); + .withStatus(ReplicationStatus.COMPLETED)); private static final JobOutput jobOutput = new JobOutput().withSync(standardSyncOutput); @@ -621,6 +624,40 @@ void failAttemptValidatesSyncOutput(final Object thing) { assertThrows(BadRequestException.class, () -> handler.failAttempt(ATTEMPT_NUMBER, JOB_ID, failureSummary, thing)); } + @Test + void saveStreamMetadata() { + final long jobId = 123L; + final int attemptNumber = 1; + + final var result = handler.saveStreamMetadata(new SaveStreamAttemptMetadataRequestBody() + .jobId(jobId) + .attemptNumber(attemptNumber) + .streamMetadata(List.of( + new StreamAttemptMetadata().streamName("s1").wasBackfilled(false).wasResumed(true), + new StreamAttemptMetadata().streamName("s2").streamNamespace("ns").wasBackfilled(true).wasResumed(false)))); + verify(streamAttemptMetadataService).upsertStreamAttemptMetadata( + jobId, + attemptNumber, + List.of( + new io.airbyte.data.services.StreamAttemptMetadata("s1", null, false, true), + new io.airbyte.data.services.StreamAttemptMetadata("s2", "ns", true, false))); + assertEquals(new InternalOperationResult().succeeded(true), result); + } + + @Test + void saveStreamMetadataFailure() { + final long jobId = 123L; + final int attemptNumber = 1; + + doThrow(new RuntimeException("oops")).when(streamAttemptMetadataService).upsertStreamAttemptMetadata(anyLong(), anyLong(), any()); + + final var result = handler.saveStreamMetadata(new SaveStreamAttemptMetadataRequestBody() + .jobId(jobId) + .attemptNumber(attemptNumber) + .streamMetadata(List.of(new StreamAttemptMetadata().streamName("s").wasBackfilled(false).wasResumed(false)))); + assertEquals(new InternalOperationResult().succeeded(false), result); + } + private static Stream randomObjects() { return Stream.of( Arguments.of(123L), diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java index 2ac40520017..27cb2a9070a 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java @@ -12,6 +12,7 @@ import static io.airbyte.persistence.job.models.Job.REPLICATION_TYPES; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -83,6 +84,7 @@ import io.airbyte.commons.server.converters.ConfigurationUpdate; import io.airbyte.commons.server.errors.BadRequestException; import io.airbyte.commons.server.handlers.helpers.ActorDefinitionHandlerHelper; +import io.airbyte.commons.server.handlers.helpers.AutoPropagateSchemaChangeHelper; import io.airbyte.commons.server.handlers.helpers.CatalogConverter; import io.airbyte.commons.server.handlers.helpers.NotificationHelper; import io.airbyte.commons.server.handlers.helpers.StatsAggregationHelper; @@ -90,6 +92,7 @@ import io.airbyte.commons.server.scheduler.EventRunner; import io.airbyte.commons.server.validation.CatalogValidator; import io.airbyte.commons.server.validation.ValidationError; +import io.airbyte.config.ActorCatalog; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; import io.airbyte.config.AttemptFailureSummary; @@ -131,11 +134,13 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.StreamGenerationRepository; +import io.airbyte.config.persistence.UserPersistence; import io.airbyte.config.persistence.domain.Generation; import io.airbyte.config.persistence.helper.CatalogGenerationSetter; import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; @@ -263,6 +268,8 @@ class ConnectionsHandlerTest { private CatalogValidator catalogValidator; private NotificationHelper notificationHelper; private StreamStatusesService streamStatusesService; + private ConnectionTimelineEventService connectionTimelineEventService; + private UserPersistence userPersistence; @SuppressWarnings("unchecked") @BeforeEach @@ -370,6 +377,8 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); actorDefinitionHandlerHelper = mock(ActorDefinitionHandlerHelper.class); streamStatusesService = mock(StreamStatusesService.class); + connectionTimelineEventService = mock(ConnectionTimelineEventService.class); + userPersistence = mock(UserPersistence.class); featureFlagClient = mock(TestClient.class); @@ -436,7 +445,8 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio catalogGenerationSetter, catalogValidator, notificationHelper, - streamStatusesService); + streamStatusesService, + connectionTimelineEventService, userPersistence); when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -1687,7 +1697,8 @@ void setUp() { catalogGenerationSetter, catalogValidator, notificationHelper, - streamStatusesService); + streamStatusesService, + connectionTimelineEventService, userPersistence); } private Attempt generateMockAttempt(final Instant attemptTime, final long recordsSynced) { @@ -1765,7 +1776,7 @@ void testGetConnectionDataHistory() throws IOException { final long jobTwoBytesEmmitted = 87654L; final long jobTwoRecordsCommitted = 50L; final long jobTwoRecordsEmittted = 60L; - try (MockedStatic mockStatsAggregationHelper = Mockito.mockStatic(StatsAggregationHelper.class)) { + try (final MockedStatic mockStatsAggregationHelper = Mockito.mockStatic(StatsAggregationHelper.class)) { mockStatsAggregationHelper.when(() -> StatsAggregationHelper.getJobIdToJobWithAttemptsReadMap(Mockito.any(), Mockito.any())) .thenReturn(Map.of( jobOneId, new JobWithAttemptsRead().job( @@ -1783,7 +1794,7 @@ jobTwoId, new JobWithAttemptsRead().job( .recordsCommitted(jobTwoRecordsCommitted) .recordsEmitted(jobTwoRecordsEmittted))))); - List expected = List.of( + final List expected = List.of( new JobSyncResultRead() .configType(JobConfigType.SYNC) .jobId(jobOneId) @@ -1937,7 +1948,8 @@ void setUp() { catalogGenerationSetter, catalogValidator, notificationHelper, - streamStatusesService); + streamStatusesService, + connectionTimelineEventService, userPersistence); } @Test @@ -2317,8 +2329,7 @@ void testDiffDifferentDestinationSyncMode() { } @Test - void testConnectionStatus() - throws JsonValidationException, ConfigNotFoundException, IOException { + void testConnectionStatus() throws IOException { final UUID connectionId = UUID.randomUUID(); final AttemptFailureSummary failureSummary = new AttemptFailureSummary(); failureSummary.setFailures(List.of(new FailureReason().withFailureOrigin(FailureReason.FailureOrigin.DESTINATION))); @@ -2382,6 +2393,7 @@ class ApplySchemaChanges { private static final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); private static final UUID WORKSPACE_ID = UUID.randomUUID(); private static final UUID DESTINATION_ID = UUID.randomUUID(); + private static final UUID DISCOVERED_CATALOG_ID = UUID.randomUUID(); private static final io.airbyte.protocol.models.AirbyteCatalog airbyteCatalog = CatalogHelpers.createAirbyteCatalog(SHOES, Field.of(SKU, JsonSchemaType.STRING)); private static final ConfiguredAirbyteCatalog configuredAirbyteCatalog = @@ -2395,6 +2407,10 @@ class ApplySchemaChanges { .withWorkspaceId(WORKSPACE_ID) .withEmail(EMAIL) .withNotificationSettings(NOTIFICATION_SETTINGS); + private static final ActorCatalog actorCatalog = new ActorCatalog() + .withCatalog(Jsons.jsonNode(airbyteCatalog)) + .withCatalogHash("") + .withId(UUID.randomUUID()); @BeforeEach void setup() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -2406,6 +2422,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio .withCatalog(configuredAirbyteCatalog) .withManual(true) .withNonBreakingChangesPreference(StandardSync.NonBreakingChangesPreference.PROPAGATE_FULLY); + + when(configRepository.getActorCatalogById(SOURCE_CATALOG_ID)).thenReturn(actorCatalog); when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); when(configRepository.getSourceConnection(SOURCE_ID)).thenReturn(source); when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, false)).thenReturn(WORKSPACE); @@ -2436,7 +2454,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio catalogGenerationSetter, catalogValidator, notificationHelper, - streamStatusesService); + streamStatusesService, + connectionTimelineEventService, userPersistence); } @Test @@ -2517,6 +2536,76 @@ void testAutoPropagateColumnsOnly() throws JsonValidationException, ConfigNotFou source, EMAIL); } + @Test + void diffCatalogGeneratesADiffAndUpdatesTheConnection() throws JsonValidationException, ConfigNotFoundException, IOException { + final Field newField = Field.of(A_DIFFERENT_COLUMN, JsonSchemaType.STRING); + final var catalogWithDiff = CatalogHelpers.createAirbyteCatalog(SHOES, Field.of(SKU, JsonSchemaType.STRING), newField); + final ActorCatalog discoveredCatalog = new ActorCatalog() + .withCatalog(Jsons.jsonNode(catalogWithDiff)) + .withCatalogHash("") + .withId(UUID.randomUUID()); + when(configRepository.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); + + final CatalogDiff expectedDiff = + new CatalogDiff().addTransformsItem(new StreamTransform() + .transformType(StreamTransform.TransformTypeEnum.UPDATE_STREAM) + .streamDescriptor(new StreamDescriptor().namespace(null).name(SHOES)) + .updateStream(new StreamTransformUpdateStream().addFieldTransformsItem(new FieldTransform() + .addField(new FieldAdd().schema(Jsons.deserialize("{\"type\": \"string\"}"))) + .fieldName(List.of(newField.getName())) + .breaking(false) + .transformType(FieldTransform.TransformTypeEnum.ADD_FIELD)))); + + final var result = connectionsHandler.diffCatalogAndConditionallyDisable(CONNECTION_ID, DISCOVERED_CATALOG_ID); + + assertEquals(expectedDiff, result.getCatalogDiff()); + assertEquals(false, result.getBreakingChange()); + + final ArgumentCaptor syncCaptor = ArgumentCaptor.forClass(StandardSync.class); + verify(configRepository).writeStandardSync(syncCaptor.capture()); + final StandardSync savedSync = syncCaptor.getValue(); + assertNotEquals(Status.INACTIVE, savedSync.getStatus()); + } + + @Test + void diffCatalogADisablesForBreakingChange() throws JsonValidationException, ConfigNotFoundException, IOException { + try (MockedStatic helper = Mockito.mockStatic(AutoPropagateSchemaChangeHelper.class)) { + helper.when(() -> AutoPropagateSchemaChangeHelper.containsBreakingChange(any())).thenReturn(true); + + final var result = connectionsHandler.diffCatalogAndConditionallyDisable(CONNECTION_ID, SOURCE_CATALOG_ID); + assertEquals(true, result.getBreakingChange()); + } + + final ArgumentCaptor syncCaptor = ArgumentCaptor.forClass(StandardSync.class); + verify(configRepository).writeStandardSync(syncCaptor.capture()); + final StandardSync savedSync = syncCaptor.getValue(); + assertEquals(Status.INACTIVE, savedSync.getStatus()); + } + + @Test + void diffCatalogDisablesForNonBreakingChangeIfConfiguredSo() throws IOException, JsonValidationException, ConfigNotFoundException { + // configure the sync to be disabled on non-breaking change + standardSync = standardSync.withNonBreakingChangesPreference(StandardSync.NonBreakingChangesPreference.DISABLE); + when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); + + final Field newField = Field.of(A_DIFFERENT_COLUMN, JsonSchemaType.STRING); + final var catalogWithDiff = CatalogHelpers.createAirbyteCatalog(SHOES, Field.of(SKU, JsonSchemaType.STRING), newField); + final ActorCatalog discoveredCatalog = new ActorCatalog() + .withCatalog(Jsons.jsonNode(catalogWithDiff)) + .withCatalogHash("") + .withId(UUID.randomUUID()); + when(configRepository.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); + + final var result = connectionsHandler.diffCatalogAndConditionallyDisable(CONNECTION_ID, DISCOVERED_CATALOG_ID); + + assertEquals(false, result.getBreakingChange()); + + final ArgumentCaptor syncCaptor = ArgumentCaptor.forClass(StandardSync.class); + verify(configRepository).writeStandardSync(syncCaptor.capture()); + final StandardSync savedSync = syncCaptor.getValue(); + assertEquals(Status.INACTIVE, savedSync.getStatus()); + } + } @Nested @@ -2543,7 +2632,8 @@ void setUp() { catalogGenerationSetter, catalogValidator, notificationHelper, - streamStatusesService); + streamStatusesService, + connectionTimelineEventService, userPersistence); } @Test @@ -2644,7 +2734,7 @@ void testGetConnectionLastJobPerStream() throws IOException { when(jobPersistence.listJobsLight(Set.of(jobId))).thenReturn(jobList); - try (MockedStatic mockStatsAggregationHelper = Mockito.mockStatic(StatsAggregationHelper.class)) { + try (final MockedStatic mockStatsAggregationHelper = Mockito.mockStatic(StatsAggregationHelper.class)) { mockStatsAggregationHelper.when(() -> StatsAggregationHelper.getJobIdToJobWithAttemptsReadMap(eq(jobList), eq(jobPersistence))) .thenReturn(Map.of(jobId, jobWithAttemptsRead)); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java index 05bdd2ff8be..20d89214f2b 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java @@ -655,7 +655,7 @@ private void testStreamReadForProject(final ConnectorBuilderProject project, fin final String responseBody = "[" + Jsons.serialize(record1) + "," + Jsons.serialize(record2) + "]"; final String requestUrl = "https://api.com/users"; final int responseStatus = 200; - final HttpRequest httpRequest = new HttpRequest(requestUrl, null, null, HttpMethod.GET); + final HttpRequest httpRequest = new HttpRequest(requestUrl, HttpMethod.GET, null, null); final HttpResponse httpResponse = new HttpResponse(responseStatus, responseBody, null); final StreamRead streamRead = new StreamRead(Collections.emptyList(), List.of( new StreamReadSlicesInner(List.of(new StreamReadSlicesInnerPagesInner(List.of(record1, record2), httpRequest, httpResponse)), null, null)), diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java index 1539a06eccd..a3ef3f52096 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationDefinitionsHandlerTest.java @@ -18,8 +18,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import io.airbyte.api.model.generated.ActorDefinitionIdWithScope; import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; import io.airbyte.api.model.generated.DestinationDefinitionCreate; @@ -36,6 +34,7 @@ import io.airbyte.api.model.generated.SupportLevel; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.errors.IdNotFoundKnownException; import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; import io.airbyte.commons.server.handlers.helpers.ActorDefinitionHandlerHelper; @@ -46,7 +45,7 @@ import io.airbyte.config.ActorType; import io.airbyte.config.AllowedHosts; import io.airbyte.config.ConnectorRegistryDestinationDefinition; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; +import io.airbyte.config.ConnectorRegistryEntryMetrics; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ScopeType; import io.airbyte.config.StandardDestinationDefinition; @@ -70,10 +69,12 @@ import java.net.URISyntaxException; import java.time.LocalDate; import java.util.Collections; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; +import org.jooq.JSONB; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -88,10 +89,12 @@ class DestinationDefinitionsHandlerTest { private static final String ICON_URL = "https://connectors.airbyte.com/files/metadata/airbyte/destination-presto/latest/icon.svg"; private ConfigRepository configRepository; + private StandardDestinationDefinition destinationDefinition; - private StandardDestinationDefinition destinationDefinitionWithNormalization; + private StandardDestinationDefinition destinationDefinitionWithOptionals; + private ActorDefinitionVersion destinationDefinitionVersion; - private ActorDefinitionVersion destinationDefinitionVersionWithNormalization; + private ActorDefinitionVersion destinationDefinitionVersionWithOptionals; private DestinationDefinitionsHandler destinationDefinitionsHandler; private Supplier uuidSupplier; @@ -110,9 +113,9 @@ void setUp() { configRepository = mock(ConfigRepository.class); uuidSupplier = mock(Supplier.class); destinationDefinition = generateDestinationDefinition(); - destinationDefinitionWithNormalization = generateDestinationDefinition(); + destinationDefinitionWithOptionals = generateDestinationDefinitionWithOptionals(); destinationDefinitionVersion = generateVersionFromDestinationDefinition(destinationDefinition); - destinationDefinitionVersionWithNormalization = generateDestinationDefinitionVersionWithNormalization(destinationDefinitionWithNormalization); + destinationDefinitionVersionWithOptionals = generateDestinationDefinitionVersionWithOptionals(destinationDefinitionWithOptionals); actorDefinitionHandlerHelper = mock(ActorDefinitionHandlerHelper.class); remoteDefinitionsProvider = mock(RemoteDefinitionsProvider.class); destinationHandler = mock(DestinationHandler.class); @@ -145,7 +148,7 @@ private StandardDestinationDefinition generateDestinationDefinition() { private ActorDefinitionVersion generateVersionFromDestinationDefinition(final StandardDestinationDefinition destinationDefinition) { final ConnectorSpecification spec = new ConnectorSpecification() - .withConnectionSpecification(Jsons.jsonNode(ImmutableMap.of("foo", "bar"))); + .withConnectionSpecification(Jsons.jsonNode(Map.of("foo", "bar"))); return new ActorDefinitionVersion() .withActorDefinitionId(destinationDefinition.getDestinationDefinitionId()) @@ -180,24 +183,28 @@ private ActorDefinitionVersion generateCustomVersionFromDestinationDefinition(fi .withAllowedHosts(null); } - private ActorDefinitionVersion generateDestinationDefinitionVersionWithNormalization(final StandardDestinationDefinition destinationDefinition) { + private StandardDestinationDefinition generateDestinationDefinitionWithOptionals() { + final ConnectorRegistryEntryMetrics metrics = + new ConnectorRegistryEntryMetrics().withAdditionalProperty("all", JSONB.valueOf("{'all': {'usage': 'high'}}")); + return generateDestinationDefinition().withMetrics(metrics); + } + + private ActorDefinitionVersion generateDestinationDefinitionVersionWithOptionals(final StandardDestinationDefinition destinationDefinition) { return generateVersionFromDestinationDefinition(destinationDefinition) - .withSupportsDbt(true) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("repository") - .withNormalizationTag("dev") - .withNormalizationIntegrationType("integration-type")); + .withCdkVersion("python:1.2.3") + .withLastPublished(new Date()); } @Test @DisplayName("listDestinationDefinition should return the right list") void testListDestinations() throws IOException, URISyntaxException { - when(configRepository.listStandardDestinationDefinitions(false)) - .thenReturn(Lists.newArrayList(destinationDefinition, destinationDefinitionWithNormalization)); + .thenReturn(List.of(destinationDefinition, destinationDefinitionWithOptionals)); when(configRepository.getActorDefinitionVersions( - List.of(destinationDefinition.getDefaultVersionId(), destinationDefinitionWithNormalization.getDefaultVersionId()))) - .thenReturn(Lists.newArrayList(destinationDefinitionVersion, destinationDefinitionVersionWithNormalization)); + List.of(destinationDefinition.getDefaultVersionId(), + destinationDefinitionWithOptionals.getDefaultVersionId()))) + .thenReturn(List.of(destinationDefinitionVersion, + destinationDefinitionVersionWithOptionals)); final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) @@ -210,38 +217,34 @@ void testListDestinations() throws IOException, URISyntaxException { .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) .jobSpecific(Collections.emptyList())); - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionVersionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionVersionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionVersionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionVersionWithNormalization.getProtocolVersion()) - .supportLevel(SupportLevel.fromValue(destinationDefinitionVersionWithNormalization.getSupportLevel().value())) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionVersionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionVersionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) + final DestinationDefinitionRead expectedDestinationDefinitionReadWithOpts = new DestinationDefinitionRead() + .destinationDefinitionId(destinationDefinitionWithOptionals.getDestinationDefinitionId()) + .name(destinationDefinitionWithOptionals.getName()) + .dockerRepository(destinationDefinitionVersionWithOptionals.getDockerRepository()) + .dockerImageTag(destinationDefinitionVersionWithOptionals.getDockerImageTag()) + .documentationUrl(new URI(destinationDefinitionVersionWithOptionals.getDocumentationUrl())) + .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithOptionals.getIcon())) + .protocolVersion(destinationDefinitionVersionWithOptionals.getProtocolVersion()) + .supportLevel(SupportLevel.fromValue(destinationDefinitionVersionWithOptionals.getSupportLevel().value())) + .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersionWithOptionals.getReleaseStage().value())) + .releaseDate(LocalDate.parse(destinationDefinitionVersionWithOptionals.getReleaseDate())) + .cdkVersion(destinationDefinitionVersionWithOptionals.getCdkVersion()) + .lastPublished(ApiPojoConverters.toOffsetDateTime(destinationDefinitionVersionWithOptionals.getLastPublished())) + .metrics(destinationDefinitionWithOptionals.getMetrics()) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) + .cpuRequest(destinationDefinitionWithOptionals.getResourceRequirements().getDefault().getCpuRequest())) .jobSpecific(Collections.emptyList())); final DestinationDefinitionReadList actualDestinationDefinitionReadList = destinationDefinitionsHandler.listDestinationDefinitions(); assertEquals( - Lists.newArrayList(expectedDestinationDefinitionRead1, expectedDestinationDefinitionRead2), + List.of(expectedDestinationDefinitionRead1, expectedDestinationDefinitionReadWithOpts), actualDestinationDefinitionReadList.getDestinationDefinitions()); } @@ -249,12 +252,8 @@ void testListDestinations() throws IOException, URISyntaxException { @DisplayName("listDestinationDefinitionsForWorkspace should return the right list") void testListDestinationDefinitionsForWorkspace() throws IOException, URISyntaxException, JsonValidationException, ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(HideActorDefinitionFromList.INSTANCE), any())).thenReturn(false); - when(configRepository.listPublicDestinationDefinitions(false)).thenReturn(Lists.newArrayList(destinationDefinition)); - when(configRepository.listGrantedDestinationDefinitions(workspaceId, false)) - .thenReturn(Lists.newArrayList(destinationDefinitionWithNormalization)); + when(configRepository.listPublicDestinationDefinitions(false)).thenReturn(List.of(destinationDefinition)); when(actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId)).thenReturn(destinationDefinitionVersion); - when(actorDefinitionVersionHelper.getDestinationVersion(destinationDefinitionWithNormalization, workspaceId)) - .thenReturn(destinationDefinitionVersionWithNormalization); final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) @@ -267,39 +266,16 @@ void testListDestinationDefinitionsForWorkspace() throws IOException, URISyntaxE .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) .jobSpecific(Collections.emptyList())); - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionVersionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionVersionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionVersionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionVersionWithNormalization.getProtocolVersion()) - .supportLevel(SupportLevel.fromValue(destinationDefinitionVersionWithNormalization.getSupportLevel().value())) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionVersionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionVersionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - final DestinationDefinitionReadList actualDestinationDefinitionReadList = destinationDefinitionsHandler .listDestinationDefinitionsForWorkspace(new WorkspaceIdRequestBody().workspaceId(workspaceId)); assertEquals( - Lists.newArrayList(expectedDestinationDefinitionRead1, expectedDestinationDefinitionRead2), + List.of(expectedDestinationDefinitionRead1), actualDestinationDefinitionReadList.getDestinationDefinitions()); } @@ -313,18 +289,14 @@ void testListDestinationDefinitionsForWorkspaceWithHiddenConnectors() throws IOE new Multi(List.of(new DestinationDefinition(hiddenDestinationDefinition.getDestinationDefinitionId()), new Workspace(workspaceId))))) .thenReturn(true); - when(configRepository.listPublicDestinationDefinitions(false)).thenReturn(Lists.newArrayList(destinationDefinition, hiddenDestinationDefinition)); - when(configRepository.listGrantedDestinationDefinitions(workspaceId, false)) - .thenReturn(Lists.newArrayList(destinationDefinitionWithNormalization)); + when(configRepository.listPublicDestinationDefinitions(false)).thenReturn(List.of(destinationDefinition, hiddenDestinationDefinition)); when(actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId)).thenReturn(destinationDefinitionVersion); - when(actorDefinitionVersionHelper.getDestinationVersion(destinationDefinitionWithNormalization, workspaceId)) - .thenReturn(destinationDefinitionVersionWithNormalization); final DestinationDefinitionReadList actualDestinationDefinitionReadList = destinationDefinitionsHandler .listDestinationDefinitionsForWorkspace(new WorkspaceIdRequestBody().workspaceId(workspaceId)); final List expectedIds = - Lists.newArrayList(destinationDefinition.getDestinationDefinitionId(), destinationDefinitionWithNormalization.getDestinationDefinitionId()); + List.of(destinationDefinition.getDestinationDefinitionId()); assertEquals(expectedIds.size(), actualDestinationDefinitionReadList.getDestinationDefinitions().size()); assertTrue(expectedIds.containsAll(actualDestinationDefinitionReadList.getDestinationDefinitions().stream() @@ -336,12 +308,10 @@ void testListDestinationDefinitionsForWorkspaceWithHiddenConnectors() throws IOE void testListPrivateDestinationDefinitions() throws IOException, URISyntaxException { when(configRepository.listGrantableDestinationDefinitions(workspaceId, false)).thenReturn( - Lists.newArrayList( - Map.entry(destinationDefinition, false), - Map.entry(destinationDefinitionWithNormalization, true))); + List.of(Map.entry(destinationDefinition, false))); when(configRepository.getActorDefinitionVersions( - List.of(destinationDefinition.getDefaultVersionId(), destinationDefinitionWithNormalization.getDefaultVersionId()))) - .thenReturn(Lists.newArrayList(destinationDefinitionVersion, destinationDefinitionVersionWithNormalization)); + List.of(destinationDefinition.getDefaultVersionId()))) + .thenReturn(List.of(destinationDefinitionVersion)); final DestinationDefinitionRead expectedDestinationDefinitionRead1 = new DestinationDefinitionRead() .destinationDefinitionId(destinationDefinition.getDestinationDefinitionId()) @@ -354,46 +324,20 @@ void testListPrivateDestinationDefinitions() throws IOException, URISyntaxExcept .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) .jobSpecific(Collections.emptyList())); - final DestinationDefinitionRead expectedDestinationDefinitionRead2 = new DestinationDefinitionRead() - .destinationDefinitionId(destinationDefinitionWithNormalization.getDestinationDefinitionId()) - .name(destinationDefinitionWithNormalization.getName()) - .dockerRepository(destinationDefinitionVersionWithNormalization.getDockerRepository()) - .dockerImageTag(destinationDefinitionVersionWithNormalization.getDockerImageTag()) - .documentationUrl(new URI(destinationDefinitionVersionWithNormalization.getDocumentationUrl())) - .icon(DestinationDefinitionsHandler.loadIcon(destinationDefinitionWithNormalization.getIcon())) - .protocolVersion(destinationDefinitionVersionWithNormalization.getProtocolVersion()) - .supportLevel(SupportLevel.fromValue(destinationDefinitionVersionWithNormalization.getSupportLevel().value())) - .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersionWithNormalization.getReleaseStage().value())) - .releaseDate(LocalDate.parse(destinationDefinitionVersionWithNormalization.getReleaseDate())) - .supportsDbt(destinationDefinitionVersionWithNormalization.getSupportsDbt()) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(true) - .normalizationRepository(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationRepository()) - .normalizationTag(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationTag()) - .normalizationIntegrationType(destinationDefinitionVersionWithNormalization.getNormalizationConfig().getNormalizationIntegrationType())) - .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() - ._default(new io.airbyte.api.model.generated.ResourceRequirements() - .cpuRequest(destinationDefinitionWithNormalization.getResourceRequirements().getDefault().getCpuRequest())) - .jobSpecific(Collections.emptyList())); - final PrivateDestinationDefinitionRead expectedDestinationDefinitionOptInRead1 = new PrivateDestinationDefinitionRead().destinationDefinition(expectedDestinationDefinitionRead1).granted(false); - final PrivateDestinationDefinitionRead expectedDestinationDefinitionOptInRead2 = - new PrivateDestinationDefinitionRead().destinationDefinition(expectedDestinationDefinitionRead2).granted(true); - final PrivateDestinationDefinitionReadList actualDestinationDefinitionOptInReadList = destinationDefinitionsHandler.listPrivateDestinationDefinitions( new WorkspaceIdRequestBody().workspaceId(workspaceId)); assertEquals( - Lists.newArrayList(expectedDestinationDefinitionOptInRead1, expectedDestinationDefinitionOptInRead2), + List.of(expectedDestinationDefinitionOptInRead1), actualDestinationDefinitionOptInReadList.getDestinationDefinitions()); } @@ -416,8 +360,6 @@ void testGetDestination() throws JsonValidationException, ConfigNotFoundExceptio .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -487,8 +429,6 @@ void testGetDefinitionWithGrantForWorkspace() throws JsonValidationException, Co .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -527,8 +467,6 @@ void testGetDefinitionWithGrantForScope() throws JsonValidationException, Config .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -592,8 +530,6 @@ void testCreateCustomDestinationDefinition() throws URISyntaxException, IOExcept .custom(true) .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.CUSTOM) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(newDestinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -658,8 +594,6 @@ void testCreateCustomDestinationDefinitionUsingScopes() throws URISyntaxExceptio .custom(true) .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.CUSTOM) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(newDestinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -786,8 +720,6 @@ void testUpdateDestination(final boolean useIconUrlInApiResponseFlagValue) .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -872,8 +804,6 @@ void testGrantDestinationDefinitionToWorkspace() throws JsonValidationException, .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -910,8 +840,6 @@ void testGrantDestinationDefinitionToOrganization() throws JsonValidationExcepti .supportLevel(SupportLevel.fromValue(destinationDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(destinationDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(destinationDefinitionVersion.getReleaseDate())) - .supportsDbt(false) - .normalizationConfig(new io.airbyte.api.model.generated.NormalizationDestinationDefinitionConfig().supported(false)) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(destinationDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -962,7 +890,7 @@ void testCorrect() { .withDockerImageTag("1.2.4") .withIcon("dest.svg") .withSpec(new ConnectorSpecification().withConnectionSpecification( - Jsons.jsonNode(ImmutableMap.of("key", "val")))) + Jsons.jsonNode(Map.of("key", "val")))) .withTombstone(false) .withProtocolVersion("0.2.2") .withSupportLevel(io.airbyte.config.SupportLevel.COMMUNITY) diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java index e93664238db..7f004e41e55 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DestinationHandlerTest.java @@ -7,7 +7,10 @@ import static io.airbyte.featureflag.ContextKt.ANONYMOUS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -15,6 +18,8 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; import io.airbyte.api.model.generated.ActorStatus; +import io.airbyte.api.model.generated.ConnectionRead; +import io.airbyte.api.model.generated.ConnectionReadList; import io.airbyte.api.model.generated.DestinationCloneConfiguration; import io.airbyte.api.model.generated.DestinationCloneRequestBody; import io.airbyte.api.model.generated.DestinationCreate; @@ -29,11 +34,13 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.converters.ConfigurationUpdate; import io.airbyte.commons.server.handlers.helpers.ActorDefinitionHandlerHelper; +import io.airbyte.commons.server.helpers.ConnectionHelpers; import io.airbyte.commons.server.helpers.ConnectorSpecificationHelpers; import io.airbyte.commons.server.helpers.DestinationHelpers; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSync; import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus; import io.airbyte.config.persistence.ConfigNotFoundException; @@ -41,6 +48,7 @@ import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.DestinationService; +import io.airbyte.featureflag.DeleteSecretsWhenTombstoneActors; import io.airbyte.featureflag.TestClient; import io.airbyte.featureflag.UseIconUrlInApiResponse; import io.airbyte.featureflag.Workspace; @@ -49,6 +57,7 @@ import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; +import java.util.Collections; import java.util.UUID; import java.util.function.Supplier; import org.junit.jupiter.api.BeforeEach; @@ -333,6 +342,110 @@ void testListDestinationForWorkspace() throws JsonValidationException, ConfigNot .prepareSecretsForOutput(destinationConnection.getConfiguration(), destinationDefinitionSpecificationRead.getConnectionSpecification()); } + @Test + void testDeleteDestination() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { + final JsonNode newConfiguration = destinationConnection.getConfiguration(); + ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); + + final DestinationConnection expectedSourceConnection = Jsons.clone(destinationConnection).withTombstone(true); + + final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(destinationConnection.getDestinationId()); + final StandardSync standardSync = ConnectionHelpers.generateSyncWithDestinationId(destinationConnection.getDestinationId()); + standardSync.setBreakingChange(false); + final ConnectionRead connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); + final ConnectionReadList connectionReadList = new ConnectionReadList().connections(Collections.singletonList(connectionRead)); + final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(destinationConnection.getWorkspaceId()); + + when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())) + .thenReturn(destinationConnection) + .thenReturn(expectedSourceConnection); + when(destinationService.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())) + .thenReturn(destinationConnection) + .thenReturn(expectedSourceConnection); + when(oAuthConfigSupplier.maskSourceOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), + destinationConnection.getWorkspaceId(), + newConfiguration, destinationDefinitionVersion.getSpec())).thenReturn(newConfiguration); + when(configRepository.getStandardDestinationDefinition(destinationDefinitionSpecificationRead.getDestinationDefinitionId())) + .thenReturn(standardDestinationDefinition); + when(actorDefinitionVersionHelper.getDestinationVersion(standardDestinationDefinition, destinationConnection.getWorkspaceId(), + destinationConnection.getDestinationId())) + .thenReturn(destinationDefinitionVersion); + when(configRepository.getDestinationDefinitionFromDestination(destinationConnection.getDestinationId())) + .thenReturn(standardDestinationDefinition); + when(connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)).thenReturn(connectionReadList); + when( + secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), + destinationDefinitionSpecificationRead.getConnectionSpecification())) + .thenReturn(destinationConnection.getConfiguration()); + when(actorDefinitionVersionHelper.getDestinationVersionWithOverrideStatus(standardDestinationDefinition, destinationConnection.getWorkspaceId(), + destinationConnection.getDestinationId())).thenReturn(destinationDefinitionVersionWithOverrideStatus); + // By default feature flag is false + when(featureFlagClient.boolVariation( + eq(DeleteSecretsWhenTombstoneActors.INSTANCE), + any(Workspace.class))).thenReturn(false); + + destinationHandler.deleteDestination(destinationIdRequestBody); + + verify(destinationService).getDestinationConnectionWithSecrets(any()); + verify(destinationService).writeDestinationConnectionWithSecrets(any(), any()); + verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); + verify(connectionsHandler).deleteConnection(connectionRead.getConnectionId()); + } + + @Test + void testDeleteDestinationAndDeleteSecrets() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { + final JsonNode newConfiguration = destinationConnection.getConfiguration(); + ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); + + final DestinationConnection expectedSourceConnection = Jsons.clone(destinationConnection).withTombstone(true); + + final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(destinationConnection.getDestinationId()); + final StandardSync standardSync = ConnectionHelpers.generateSyncWithDestinationId(destinationConnection.getDestinationId()); + standardSync.setBreakingChange(false); + final ConnectionRead connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); + final ConnectionReadList connectionReadList = new ConnectionReadList().connections(Collections.singletonList(connectionRead)); + final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(destinationConnection.getWorkspaceId()); + + when(configRepository.getDestinationConnection(destinationConnection.getDestinationId())) + .thenReturn(destinationConnection) + .thenReturn(expectedSourceConnection); + when(destinationService.getDestinationConnectionWithSecrets(destinationConnection.getDestinationId())) + .thenReturn(destinationConnection) + .thenReturn(expectedSourceConnection); + when(oAuthConfigSupplier.maskSourceOAuthParameters(destinationDefinitionSpecificationRead.getDestinationDefinitionId(), + destinationConnection.getWorkspaceId(), + newConfiguration, destinationDefinitionVersion.getSpec())).thenReturn(newConfiguration); + when(configRepository.getStandardDestinationDefinition(destinationDefinitionSpecificationRead.getDestinationDefinitionId())) + .thenReturn(standardDestinationDefinition); + when(actorDefinitionVersionHelper.getDestinationVersion(standardDestinationDefinition, destinationConnection.getWorkspaceId(), + destinationConnection.getDestinationId())) + .thenReturn(destinationDefinitionVersion); + when(configRepository.getDestinationDefinitionFromDestination(destinationConnection.getDestinationId())) + .thenReturn(standardDestinationDefinition); + when(connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)).thenReturn(connectionReadList); + when( + secretsProcessor.prepareSecretsForOutput(destinationConnection.getConfiguration(), + destinationDefinitionSpecificationRead.getConnectionSpecification())) + .thenReturn(destinationConnection.getConfiguration()); + when(actorDefinitionVersionHelper.getDestinationVersionWithOverrideStatus(standardDestinationDefinition, destinationConnection.getWorkspaceId(), + destinationConnection.getDestinationId())).thenReturn(destinationDefinitionVersionWithOverrideStatus); + // Turn on feature flag to delete secrets + when(featureFlagClient.boolVariation( + eq(DeleteSecretsWhenTombstoneActors.INSTANCE), + any(Workspace.class))).thenReturn(true); + destinationHandler.deleteDestination(destinationIdRequestBody); + + // With the flag on, we should not no longer get secrets or write secrets anymore (since we are + // deleting the destination). + verify(destinationService, times(0)).writeDestinationConnectionWithSecrets(expectedSourceConnection, connectorSpecification); + verify(destinationService, times(0)).getDestinationConnectionWithSecrets(any()); + verify(destinationService).tombstoneDestination(any(), any(), any(), any()); + verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); + verify(connectionsHandler).deleteConnection(connectionRead.getConnectionId()); + } + @Test void testSearchDestinations() throws JsonValidationException, ConfigNotFoundException, IOException { final DestinationRead expectedDestinationRead = new DestinationRead() diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java index f769622205e..aa67ae7cd25 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java @@ -17,8 +17,6 @@ import com.google.common.collect.ImmutableList; import io.airbyte.api.model.generated.AttemptInfoRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusRead; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; import io.airbyte.api.model.generated.AttemptRead; import io.airbyte.api.model.generated.AttemptStreamStats; import io.airbyte.api.model.generated.ConnectionIdRequestBody; @@ -77,7 +75,6 @@ import io.airbyte.persistence.job.JobPersistence.AttemptStats; import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; import io.airbyte.persistence.job.models.AttemptStatus; import io.airbyte.persistence.job.models.Job; import io.airbyte.persistence.job.models.JobStatus; @@ -704,7 +701,7 @@ void testGetConnectionSyncProgressWithRunningJob() throws IOException { .job(jobRead) .attempts(ImmutableList.of(toAttemptRead(testJobAttempt))); - when(jobPersistence.getRunningSyncJobForConnections(List.of(connectionId))).thenReturn(List.of(firstJob)); + when(jobPersistence.getRunningJobForConnection(connectionId)).thenReturn(List.of(firstJob)); try (final MockedStatic mockedConverter = Mockito.mockStatic(JobConverter.class)) { mockedConverter.when(() -> JobConverter.getJobWithAttemptsRead(firstJob)).thenReturn(firstJobWithAttemptRead); @@ -716,6 +713,7 @@ void testGetConnectionSyncProgressWithRunningJob() throws IOException { .bytesEmitted(jobAggregatedStats.getBytesEmitted()) .recordsEmitted(jobAggregatedStats.getRecordsEmitted()) .configType(JobConfigType.SYNC) + .syncStartedAt(CREATED_AT) .streams(List.of( new StreamSyncProgressReadItem() .streamName("stream1") @@ -743,7 +741,7 @@ void testGetConnectionSyncProgressWithRunningJob() throws IOException { } @Test - @DisplayName("Should return data for a running refreshes") + @DisplayName("Should return data for a running refresh") void testGetConnectionSyncProgressWithRefresh() throws IOException { final UUID connectionId = UUID.randomUUID(); final ConnectionIdRequestBody request = new ConnectionIdRequestBody().connectionId(connectionId); @@ -770,7 +768,7 @@ void testGetConnectionSyncProgressWithRefresh() throws IOException { .job(jobRead) .attempts(ImmutableList.of(toAttemptRead(testJobAttempt))); - when(jobPersistence.getRunningSyncJobForConnections(List.of(connectionId))).thenReturn(List.of(firstJob)); + when(jobPersistence.getRunningJobForConnection(connectionId)).thenReturn(List.of(firstJob)); try (final MockedStatic mockedConverter = Mockito.mockStatic(JobConverter.class)) { mockedConverter.when(() -> JobConverter.getJobWithAttemptsRead(firstJob)).thenReturn(firstJobWithAttemptRead); @@ -782,6 +780,7 @@ void testGetConnectionSyncProgressWithRefresh() throws IOException { .bytesEmitted(jobAggregatedStats.getBytesEmitted()) .recordsEmitted(jobAggregatedStats.getRecordsEmitted()) .configType(JobConfigType.REFRESH) + .syncStartedAt(CREATED_AT) .streams(List.of( new StreamSyncProgressReadItem() .streamName("stream3") @@ -801,13 +800,14 @@ void testGetConnectionSyncProgressWithRefresh() throws IOException { } @Test - @DisplayName("Should not return data for a running clear") + @DisplayName("Should return data for a running clear") void testGetConnectionSyncProgressWithClear() throws IOException { final UUID connectionId = UUID.randomUUID(); final ConnectionIdRequestBody request = new ConnectionIdRequestBody().connectionId(connectionId); - final Job firstJob = new Job(JOB_ID, ConfigType.CLEAR, JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(testJobAttempt), JobStatus.RUNNING, - CREATED_AT, CREATED_AT, CREATED_AT); + final Job firstJob = + new Job(JOB_ID, ConfigType.RESET_CONNECTION, JOB_CONFIG_ID, JOB_CONFIG, ImmutableList.of(testJobAttempt), JobStatus.RUNNING, + CREATED_AT, CREATED_AT, CREATED_AT); final JobRead jobRead = toJobInfo(firstJob); jobRead.setResetConfig(new ResetConfig().streamsToReset(List.of( @@ -819,19 +819,20 @@ void testGetConnectionSyncProgressWithClear() throws IOException { .job(jobRead) .attempts(ImmutableList.of(toAttemptRead(testJobAttempt))); - when(jobPersistence.getRunningSyncJobForConnections(List.of(connectionId))).thenReturn(List.of(firstJob)); + when(jobPersistence.getRunningJobForConnection(connectionId)).thenReturn(List.of(firstJob)); try (final MockedStatic mockedConverter = Mockito.mockStatic(JobConverter.class)) { mockedConverter.when(() -> JobConverter.getJobWithAttemptsRead(firstJob)).thenReturn(firstJobWithAttemptRead); final ConnectionSyncProgressRead expected = new ConnectionSyncProgressRead() .connectionId(connectionId) .jobId(JOB_ID) - .configType(JobConfigType.CLEAR) + .configType(JobConfigType.RESET_CONNECTION) + .syncStartedAt(CREATED_AT) .streams(List.of( new StreamSyncProgressReadItem() .streamName("stream1") .streamNamespace("ns1") - .configType(JobConfigType.CLEAR))); + .configType(JobConfigType.RESET_CONNECTION))); final ConnectionSyncProgressRead actual = jobHistoryHandler.getConnectionSyncProgress(request); @@ -868,21 +869,6 @@ void testEnumConversion() { assertTrue(Enums.isCompatible(JobConfig.ConfigType.class, JobConfigType.class)); } - @Test - @DisplayName("Should return attempt normalization info for the job") - void testGetAttemptNormalizationStatuses() throws IOException { - - final AttemptNormalizationStatus databaseReadResult = new AttemptNormalizationStatus(1, Optional.of(10L), /* hasNormalizationFailed= */ false); - - when(jobPersistence.getAttemptNormalizationStatusesForJob(JOB_ID)).thenReturn(List.of(databaseReadResult)); - - final AttemptNormalizationStatusReadList expectedStatus = new AttemptNormalizationStatusReadList().attemptNormalizationStatuses( - List.of(new AttemptNormalizationStatusRead().attemptNumber(1).hasRecordsCommitted(true).hasNormalizationFailed(false).recordsCommitted(10L))); - - assertEquals(expectedStatus, jobHistoryHandler.getAttemptNormalizationStatuses(new JobIdRequestBody().id(JOB_ID))); - - } - @Test @DisplayName("Should test to ensure that JobInfoReadWithoutLogs includes the bytes and records committed") void testGetJobInfoWithoutLogs() throws IOException { diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java index 95bd1b1ec89..114b0f2abda 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobsHandlerTest.java @@ -33,7 +33,6 @@ import io.airbyte.config.JobConfig; import io.airbyte.config.JobOutput; import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StandardSyncSummary.ReplicationStatus; @@ -85,9 +84,7 @@ public class JobsHandlerTest { private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() - .withStatus(ReplicationStatus.COMPLETED)) - .withNormalizationSummary( - new NormalizationSummary()); + .withStatus(ReplicationStatus.COMPLETED)); private static final JobOutput jobOutput = new JobOutput().withSync(standardSyncOutput); private static final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog() @@ -128,7 +125,7 @@ void testJobSuccessWithAttemptNumber() throws IOException { verify(jobPersistence).succeedAttempt(JOB_ID, ATTEMPT_NUMBER); verify(jobNotifier).successJob(any(), any()); verify(helper).trackCompletion(any(), eq(JobStatus.SUCCEEDED)); - verify(connectionTimelineEventService).writeEvent(eq(CONNECTION_ID), any()); + verify(connectionTimelineEventService).writeEvent(eq(CONNECTION_ID), any(), any()); } @Test @@ -197,7 +194,7 @@ void testResetJobNoNotification() throws IOException { .jobId(JOB_ID) .connectionId(UUID.randomUUID()) .standardSyncOutput(standardSyncOutput); - Job job = new Job(JOB_ID, RESET_CONNECTION, "", simpleConfig, List.of(), io.airbyte.persistence.job.models.JobStatus.SUCCEEDED, 0L, 0, 0); + final Job job = new Job(JOB_ID, RESET_CONNECTION, "", simpleConfig, List.of(), io.airbyte.persistence.job.models.JobStatus.SUCCEEDED, 0L, 0, 0); when(jobPersistence.getJob(JOB_ID)).thenReturn(job); jobsHandler.jobSuccessWithAttemptNumber(request); @@ -371,7 +368,7 @@ void setJobFailure() throws IOException { verify(jobPersistence).failJob(JOB_ID); verify(jobNotifier).failJob(Mockito.any(), any()); verify(jobErrorReporter).reportSyncJobFailure(CONNECTION_ID, failureSummary, expectedReportingContext, expectedAttemptConfig); - verify(connectionTimelineEventService).writeEvent(eq(CONNECTION_ID), any()); + verify(connectionTimelineEventService).writeEvent(eq(CONNECTION_ID), any(), any()); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java index c9c819d7966..22d3128dd31 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OperationsHandlerTest.java @@ -13,6 +13,8 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.TextNode; import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.OperationCreate; import io.airbyte.api.model.generated.OperationIdRequestBody; @@ -20,15 +22,12 @@ import io.airbyte.api.model.generated.OperationReadList; import io.airbyte.api.model.generated.OperationUpdate; import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.api.model.generated.OperatorDbt; -import io.airbyte.api.model.generated.OperatorNormalization; -import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; import io.airbyte.api.model.generated.OperatorType; import io.airbyte.api.model.generated.OperatorWebhook; import io.airbyte.api.model.generated.OperatorWebhook.WebhookTypeEnum; import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; import io.airbyte.commons.enums.Enums; -import io.airbyte.config.OperatorNormalization.Option; +import io.airbyte.commons.json.Jsons; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardWorkspace; @@ -59,6 +58,7 @@ class OperationsHandlerTest { private Supplier uuidGenerator; private OperationsHandler operationsHandler; private StandardSyncOperation standardSyncOperation; + private io.airbyte.config.OperatorWebhook operatorWebhook; @SuppressWarnings("unchecked") @BeforeEach @@ -67,42 +67,17 @@ void setUp() throws IOException { uuidGenerator = mock(Supplier.class); operationsHandler = new OperationsHandler(configRepository, uuidGenerator); + operatorWebhook = new io.airbyte.config.OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionBody(Jsons.serialize(new OperatorWebhookDbtCloud().accountId(DBT_CLOUD_WEBHOOK_ACCOUNT_ID).jobId(DBT_CLOUD_WEBHOOK_JOB_ID))) + .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, DBT_CLOUD_WEBHOOK_JOB_ID)); standardSyncOperation = new StandardSyncOperation() .withWorkspaceId(UUID.randomUUID()) .withOperationId(UUID.randomUUID()) .withName("presto to hudi") - .withOperatorType(io.airbyte.config.StandardSyncOperation.OperatorType.NORMALIZATION) - .withOperatorNormalization(new io.airbyte.config.OperatorNormalization().withOption(Option.BASIC)) - .withOperatorDbt(null) - .withTombstone(false); - } - - @Test - void testCreateOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - when(uuidGenerator.get()).thenReturn(standardSyncOperation.getOperationId()); - - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation); - - final OperationCreate operationCreate = new OperationCreate() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); - - final OperationRead actualOperationRead = operationsHandler.createOperation(operationCreate); - - final OperationRead expectedOperationRead = new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); - - assertEquals(expectedOperationRead, actualOperationRead); - - verify(configRepository).writeStandardSyncOperation(standardSyncOperation); + .withTombstone(false) + .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) + .withOperatorWebhook(operatorWebhook); } @Test @@ -120,6 +95,9 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound .operatorConfiguration(new OperatorConfiguration() .operatorType(OperatorType.WEBHOOK).webhook(webhookConfig)); + final JsonNode webhookOperationConfig = mock(JsonNode.class); + when(webhookOperationConfig.get("customDbtHost")).thenReturn(new TextNode("")); + final StandardSyncOperation expectedPersistedOperation = new StandardSyncOperation() .withWorkspaceId(standardSyncOperation.getWorkspaceId()) .withOperationId(WEBHOOK_OPERATION_ID) @@ -132,8 +110,8 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound .withExecutionBody(EXECUTION_BODY)) .withTombstone(false); - StandardWorkspace workspace = new StandardWorkspace(); - when(configRepository.getStandardWorkspaceNoSecrets(standardSyncOperation.getWorkspaceId(), false)).thenReturn(workspace); + final StandardWorkspace workspace = new StandardWorkspace().withWebhookOperationConfigs(webhookOperationConfig); + when(configRepository.getStandardWorkspaceNoSecrets(operationCreate.getWorkspaceId(), false)).thenReturn(workspace); when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(expectedPersistedOperation); final OperationRead actualOperationRead = operationsHandler.createOperation(operationCreate); @@ -152,54 +130,6 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound verify(configRepository).writeStandardSyncOperation(eq(expectedPersistedOperation)); } - @Test - void testUpdateOperation() throws JsonValidationException, ConfigNotFoundException, IOException { - final OperationUpdate operationUpdate = new OperationUpdate() - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.DBT) - .dbt(new OperatorDbt() - .gitRepoUrl("git_repo_url") - .gitRepoBranch("git_repo_branch") - .dockerImage("docker") - .dbtArguments("--full-refresh"))); - - final StandardSyncOperation updatedStandardSyncOperation = new StandardSyncOperation() - .withWorkspaceId(standardSyncOperation.getWorkspaceId()) - .withOperationId(standardSyncOperation.getOperationId()) - .withName(standardSyncOperation.getName()) - .withOperatorType(io.airbyte.config.StandardSyncOperation.OperatorType.DBT) - .withOperatorDbt(new io.airbyte.config.OperatorDbt() - .withGitRepoUrl("git_repo_url") - .withGitRepoBranch("git_repo_branch") - .withDockerImage("docker") - .withDbtArguments("--full-refresh")) - .withOperatorNormalization(null) - .withTombstone(false); - - when(configRepository.getStandardSyncOperation(standardSyncOperation.getOperationId())).thenReturn(standardSyncOperation) - .thenReturn(updatedStandardSyncOperation); - - final OperationRead actualOperationRead = operationsHandler.updateOperation(operationUpdate); - - final OperationRead expectedOperationRead = new OperationRead() - .workspaceId(standardSyncOperation.getWorkspaceId()) - .operationId(standardSyncOperation.getOperationId()) - .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.DBT) - .dbt(new OperatorDbt() - .gitRepoUrl("git_repo_url") - .gitRepoBranch("git_repo_branch") - .dockerImage("docker") - .dbtArguments("--full-refresh"))); - - assertEquals(expectedOperationRead, actualOperationRead); - - verify(configRepository).writeStandardSyncOperation(updatedStandardSyncOperation); - } - @Test void testUpdateWebhookOperation() throws JsonValidationException, ConfigNotFoundException, IOException { when(uuidGenerator.get()).thenReturn(WEBHOOK_OPERATION_ID); @@ -278,9 +208,17 @@ private OperationRead generateOperationRead() { .workspaceId(standardSyncOperation.getWorkspaceId()) .operationId(standardSyncOperation.getOperationId()) .name(standardSyncOperation.getName()) - .operatorConfiguration(new OperatorConfiguration() - .operatorType(OperatorType.NORMALIZATION) - .normalization(new OperatorNormalization().option(OptionEnum.BASIC))); + .operatorConfiguration( + new OperatorConfiguration() + .operatorType(OperatorType.WEBHOOK).webhook( + new OperatorWebhook() + .webhookConfigId(WEBHOOK_CONFIG_ID) + .webhookType(WebhookTypeEnum.DBTCLOUD) + .executionUrl(operatorWebhook.getExecutionUrl()) + .executionBody(operatorWebhook.getExecutionBody()) + .dbtCloud(new OperatorWebhookDbtCloud() + .accountId(DBT_CLOUD_WEBHOOK_ACCOUNT_ID) + .jobId(DBT_CLOUD_WEBHOOK_JOB_ID)))); } @Test @@ -349,8 +287,6 @@ void testDeleteOperationsForConnection() throws JsonValidationException, IOExcep @Test void testEnumConversion() { assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.OperatorType.class, io.airbyte.config.StandardSyncOperation.OperatorType.class)); - assertTrue(Enums.isCompatible(io.airbyte.api.model.generated.OperatorNormalization.OptionEnum.class, - io.airbyte.config.OperatorNormalization.Option.class)); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java index 62ff70b4881..3ce48ddab0f 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java @@ -78,6 +78,7 @@ import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; import io.airbyte.commons.server.scheduler.SynchronousResponse; import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; +import io.airbyte.commons.server.support.CurrentUserService; import io.airbyte.commons.temporal.ErrorCode; import io.airbyte.commons.temporal.JobMetadata; import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; @@ -93,7 +94,6 @@ import io.airbyte.config.JobTypeResourceLimit.JobType; import io.airbyte.config.NotificationItem; import io.airbyte.config.NotificationSettings; -import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.SourceConnection; @@ -111,9 +111,11 @@ import io.airbyte.config.persistence.StreamResetPersistence; import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.db.instance.configs.jooq.generated.enums.RefreshType; +import io.airbyte.featureflag.DiscoverPostprocessInTemporal; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; import io.airbyte.persistence.job.JobCreator; @@ -150,6 +152,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -230,12 +233,6 @@ class SchedulerHandlerTest { .withSupportedDestinationSyncModes(List.of(DestinationSyncMode.OVERWRITE, DestinationSyncMode.APPEND, DestinationSyncMode.APPEND_DEDUP)) .withDocumentationUrl(URI.create("unused"))); - private static final StandardSyncOperation NORMALIZATION_OPERATION = new StandardSyncOperation() - .withOperatorType(StandardSyncOperation.OperatorType.NORMALIZATION) - .withOperatorNormalization(new OperatorNormalization()); - - private static final UUID NORMALIZATION_OPERATION_ID = UUID.randomUUID(); - public static final StandardSyncOperation WEBHOOK_OPERATION = new StandardSyncOperation() .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) .withOperatorWebhook(new OperatorWebhook()); @@ -266,6 +263,8 @@ class SchedulerHandlerTest { private ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler; private WorkspaceService workspaceService; private SecretPersistenceConfigService secretPersistenceConfigService; + private ConnectionTimelineEventService connectionEventService; + private CurrentUserService currentUserService; private StreamRefreshesHandler streamRefreshesHandler; private NotificationHelper notificationHelper; @@ -311,6 +310,8 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio featureFlagClient = mock(TestClient.class); workspaceService = mock(WorkspaceService.class); secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); + connectionEventService = mock(ConnectionTimelineEventService.class); + currentUserService = mock(CurrentUserService.class); when(connectorDefinitionSpecificationHandler.getDestinationSpecification(any())).thenReturn(new DestinationDefinitionSpecificationRead() .supportedDestinationSyncModes( @@ -343,6 +344,8 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio connectorDefinitionSpecificationHandler, workspaceService, secretPersistenceConfigService, + connectionEventService, + currentUserService, streamRefreshesHandler, notificationHelper); } @@ -388,10 +391,9 @@ void createRefreshJob() throws JsonValidationException, ConfigNotFoundException, @Test @DisplayName("Test reset job creation") void createResetJob() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(configRepository.getStandardSyncOperation(NORMALIZATION_OPERATION_ID)).thenReturn(NORMALIZATION_OPERATION); Mockito.when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(WEBHOOK_OPERATION); final StandardSync standardSync = - new StandardSync().withDestinationId(DESTINATION_ID).withOperationIds(List.of(NORMALIZATION_OPERATION_ID, WEBHOOK_OPERATION_ID)); + new StandardSync().withDestinationId(DESTINATION_ID).withOperationIds(List.of(WEBHOOK_OPERATION_ID)); Mockito.when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); final DestinationConnection destination = new DestinationConnection() .withDestinationId(DESTINATION_ID) @@ -413,7 +415,8 @@ void createResetJob() throws JsonValidationException, ConfigNotFoundException, I Mockito .when(jobCreator.createResetConnectionJob(destination, standardSync, destinationDefinition, actorDefinitionVersion, DOCKER_IMAGE_NAME, destinationVersion, - false, List.of(NORMALIZATION_OPERATION), + false, + List.of(), streamsToReset, WORKSPACE_ID)) .thenReturn(Optional.of(JOB_ID)); @@ -763,8 +766,10 @@ void testCheckConnectionReadFormat(final Optional standardCheckConnectio } - @Test - void testDiscoverSchemaForSourceFromSourceId() throws IOException, JsonValidationException, ConfigNotFoundException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOException, JsonValidationException, ConfigNotFoundException { + when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -813,8 +818,11 @@ void testDiscoverSchemaForSourceFromSourceId() throws IOException, JsonValidatio verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, sourceVersion, false, RESOURCE_REQUIREMENT, WorkloadPriority.HIGH); } - @Test - void testDiscoverSchemaForSourceFromSourceIdCachedCatalog() throws IOException, JsonValidationException, ConfigNotFoundException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testDiscoverSchemaForSourceFromSourceIdCachedCatalog(final boolean enabled) + throws IOException, JsonValidationException, ConfigNotFoundException { + when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -852,8 +860,11 @@ void testDiscoverSchemaForSourceFromSourceIdCachedCatalog() throws IOException, verify(synchronousSchedulerClient, never()).createDiscoverSchemaJob(any(), any(), anyBoolean(), any(), any()); } - @Test - void testDiscoverSchemaForSourceFromSourceIdDisableCache() throws IOException, JsonValidationException, ConfigNotFoundException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testDiscoverSchemaForSourceFromSourceIdDisableCache(final boolean enabled) + throws IOException, JsonValidationException, ConfigNotFoundException { + when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).disableCache(true); @@ -890,13 +901,14 @@ void testDiscoverSchemaForSourceFromSourceIdDisableCache() throws IOException, J assertNotNull(actual.getJobInfo()); assertTrue(actual.getJobInfo().getSucceeded()); verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), any(), any()); verify(actorDefinitionVersionHelper).getSourceVersion(sourceDefinition, source.getWorkspaceId(), source.getSourceId()); verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, sourceVersion, false, null, WorkloadPriority.HIGH); } - @Test - void testDiscoverSchemaForSourceFromSourceIdFailed() throws IOException, JsonValidationException, ConfigNotFoundException { + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testDiscoverSchemaForSourceFromSourceIdFailed(final boolean enabled) throws IOException, JsonValidationException, ConfigNotFoundException { + when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -926,6 +938,82 @@ void testDiscoverSchemaForSourceFromSourceIdFailed() throws IOException, JsonVal verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, sourceVersion, false, null, WorkloadPriority.HIGH); } + // TODO: to be removed once we swap to new discover flow + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(final boolean enabled) + throws IOException, JsonValidationException, ConfigNotFoundException { + when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); + when(envVariableFeatureFlags.autoDetectSchema()).thenReturn(false); + final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); + final UUID connectionId1 = UUID.randomUUID(); + final UUID connectionId2 = UUID.randomUUID(); + final UUID discoveredCatalogId = UUID.randomUUID(); + final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; + final SourceDiscoverSchemaRequestBody request = + new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).connectionId(connectionId1).disableCache(true).notifySchemaChange(true); + final StreamTransform streamTransform = new StreamTransform().transformType(TransformTypeEnum.UPDATE_STREAM) + .streamDescriptor(new io.airbyte.api.model.generated.StreamDescriptor().name(DOGS)) + .updateStream(new StreamTransformUpdateStream().addFieldTransformsItem(new FieldTransform().transformType( + FieldTransform.TransformTypeEnum.REMOVE_FIELD).breaking(true))); + final CatalogDiff catalogDiff = new CatalogDiff().addTransformsItem(streamTransform); + final StandardSourceDefinition sourceDef = new StandardSourceDefinition() + .withSourceDefinitionId(source.getSourceDefinitionId()); + final ActorDefinitionVersion sourceVersion = new ActorDefinitionVersion() + .withDockerRepository(SOURCE_DOCKER_REPO) + .withProtocolVersion(SOURCE_PROTOCOL_VERSION) + .withDockerImageTag(SOURCE_DOCKER_TAG); + when(configRepository.getStandardSourceDefinition(source.getSourceDefinitionId())) + .thenReturn(sourceDef); + when(actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), source.getSourceId())) + .thenReturn(sourceVersion); + when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); + when(synchronousSchedulerClient.createDiscoverSchemaJob(source, sourceVersion, false, null, WorkloadPriority.HIGH)) + .thenReturn(discoverResponse); + when(webUrlHelper.getConnectionReplicationPageUrl(source.getWorkspaceId(), connectionId1)).thenReturn(CONNECTION_URL); + + when(discoverResponse.isSuccess()).thenReturn(true); + when(discoverResponse.getOutput()).thenReturn(discoveredCatalogId); + + final AirbyteCatalog airbyteCatalogCurrent = new AirbyteCatalog().withStreams(Lists.newArrayList( + CatalogHelpers.createAirbyteStream(SHOES, Field.of(SKU, JsonSchemaType.STRING)), + CatalogHelpers.createAirbyteStream(DOGS, Field.of(NAME, JsonSchemaType.STRING)))); + + final ConnectionRead connectionRead1 = + new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceVersion)).status(ConnectionStatus.ACTIVE) + .connectionId(connectionId1) + .sourceId(source.getSourceId()) + .notifySchemaChanges(true); + final ConnectionRead connectionRead2 = + new ConnectionRead().syncCatalog(CatalogConverter.toApi(airbyteCatalogCurrent, sourceVersion)).status(ConnectionStatus.ACTIVE) + .connectionId(connectionId2) + .sourceId(source.getSourceId()) + .notifySchemaChanges(true); + when(connectionsHandler.getConnection(request.getConnectionId())).thenReturn(connectionRead1); + when(connectionsHandler.getDiff(any(), any(), any())).thenReturn(catalogDiff); + final ConnectionReadList connectionReadList = new ConnectionReadList().connections(List.of(connectionRead1, connectionRead2)); + when(connectionsHandler.listConnectionsForSource(source.getSourceId(), false)).thenReturn(connectionReadList); + + final ActorCatalog actorCatalog = new ActorCatalog() + .withCatalog(Jsons.jsonNode(airbyteCatalog)) + .withCatalogHash("") + .withId(discoveredCatalogId); + when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + + schedulerHandler.discoverSchemaForSourceFromSourceId(request); + + // if the FF is disabled, the diff and update should be called for _each_ connection using the + // source + final var expectedOldPathCalls = enabled ? 0 : 2; + verify(connectionsHandler, times(expectedOldPathCalls)).getDiff(any(), any(), any()); + verify(connectionsHandler, times(expectedOldPathCalls)).updateConnection(any()); + + // if the ff is on, we use the ala cart diff and disabling logic for just the connection specified + final var expectedNewPathCalls = enabled ? 1 : 0; + verify(connectionsHandler, times(expectedNewPathCalls)).diffCatalogAndConditionallyDisable(any(), any()); + } + + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -985,6 +1073,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() verify(actorDefinitionVersionHelper).getSourceVersion(sourceDef, source.getWorkspaceId(), source.getSourceId()); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceNoFeatureFlag() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -1045,6 +1134,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP assertEquals(actual.getConnectionStatus(), ConnectionStatus.ACTIVE); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlag() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -1107,6 +1197,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP verifyNoInteractions(eventRunner); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdBreaking() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -1173,6 +1264,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdBreaking() verify(connectionsHandler).updateConnection(expectedConnectionUpdate); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException { @@ -1238,6 +1330,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() verify(connectionsHandler).updateConnection(expectedConnectionUpdate); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlagNoDiff() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -1297,6 +1390,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP verifyNoInteractions(eventRunner); } + // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() throws IOException, JsonValidationException, ConfigNotFoundException { @@ -1604,7 +1698,7 @@ void testResetConnection() throws IOException, JsonValidationException, ConfigNo } @Test - void testResetConnectionStream() throws IOException { + void testResetConnectionStream() throws IOException, ConfigNotFoundException { final UUID connectionId = UUID.randomUUID(); final String streamName = "name"; final String streamNamespace = "namespace"; @@ -1632,6 +1726,37 @@ void testResetConnectionStream() throws IOException { verify(eventRunner).resetConnection(connectionId, streamDescriptors); } + @Test + void testResetConnectionStreamWithEmptyList() throws IOException, ConfigNotFoundException { + final UUID connectionId = UUID.randomUUID(); + final String streamName = "name"; + final String streamNamespace = "namespace"; + + final long jobId = 123L; + final ManualOperationResult manualOperationResult = ManualOperationResult + .builder() + .failingReason(Optional.empty()) + .jobId(Optional.of(jobId)) + .build(); + final List streamDescriptors = List.of(new StreamDescriptor().withName(streamName).withNamespace(streamNamespace)); + final ConnectionStreamRequestBody connectionStreamRequestBody = new ConnectionStreamRequestBody() + .connectionId(connectionId) + .streams(List.of()); + + when(configRepository.getAllStreamsForConnection(connectionId)) + .thenReturn(streamDescriptors); + when(eventRunner.resetConnection(connectionId, streamDescriptors)) + .thenReturn(manualOperationResult); + + doReturn(new JobInfoRead()) + .when(jobConverter).getJobInfoRead(any()); + + schedulerHandler + .resetConnectionStream(connectionStreamRequestBody); + + verify(eventRunner).resetConnection(connectionId, streamDescriptors); + } + @Test void testCancelJob() throws IOException { final UUID connectionId = UUID.randomUUID(); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java index 554a5852fc7..faf3a0be08b 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceDefinitionsHandlerTest.java @@ -37,6 +37,7 @@ import io.airbyte.api.model.generated.SupportLevel; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.errors.IdNotFoundKnownException; import io.airbyte.commons.server.errors.UnsupportedProtocolVersionException; import io.airbyte.commons.server.handlers.helpers.ActorDefinitionHandlerHelper; @@ -46,6 +47,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; import io.airbyte.config.AllowedHosts; +import io.airbyte.config.ConnectorRegistryEntryMetrics; import io.airbyte.config.ConnectorRegistrySourceDefinition; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ScopeType; @@ -71,10 +73,12 @@ import java.net.URISyntaxException; import java.time.LocalDate; import java.util.Collections; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; +import org.jooq.JSONB; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -90,7 +94,9 @@ class SourceDefinitionsHandlerTest { private ConfigRepository configRepository; private StandardSourceDefinition sourceDefinition; + private StandardSourceDefinition sourceDefinitionWithOptionals; private ActorDefinitionVersion sourceDefinitionVersion; + private ActorDefinitionVersion sourceDefinitionVersionWithOptionals; private SourceDefinitionsHandler sourceDefinitionsHandler; private Supplier uuidSupplier; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; @@ -115,6 +121,8 @@ void setUp() { organizationId = UUID.randomUUID(); sourceDefinition = generateSourceDefinition(); sourceDefinitionVersion = generateVersionFromSourceDefinition(sourceDefinition); + sourceDefinitionWithOptionals = generateSourceDefinitionWithOptionals(); + sourceDefinitionVersionWithOptionals = generateSourceDefinitionVersionWithOptionals(sourceDefinitionWithOptionals); featureFlagClient = mock(TestClient.class); actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); @@ -172,15 +180,29 @@ private ActorDefinitionVersion generateCustomVersionFromSourceDefinition(final S .withAllowedHosts(null); } + private StandardSourceDefinition generateSourceDefinitionWithOptionals() { + final ConnectorRegistryEntryMetrics metrics = + new ConnectorRegistryEntryMetrics().withAdditionalProperty("all", JSONB.valueOf("{'all': {'usage': 'high'}}")); + return generateSourceDefinition().withMetrics(metrics); + } + + private ActorDefinitionVersion generateSourceDefinitionVersionWithOptionals(final StandardSourceDefinition sourceDefinition) { + return generateVersionFromSourceDefinition(sourceDefinition) + .withCdkVersion("python:1.2.3") + .withLastPublished(new Date()); + } + @Test @DisplayName("listSourceDefinition should return the right list") void testListSourceDefinitions() throws IOException, URISyntaxException { final StandardSourceDefinition sourceDefinition2 = generateSourceDefinition(); final ActorDefinitionVersion sourceDefinitionVersion2 = generateVersionFromSourceDefinition(sourceDefinition2); - when(configRepository.listStandardSourceDefinitions(false)).thenReturn(Lists.newArrayList(sourceDefinition, sourceDefinition2)); - when(configRepository.getActorDefinitionVersions(List.of(sourceDefinition.getDefaultVersionId(), sourceDefinition2.getDefaultVersionId()))) - .thenReturn(Lists.newArrayList(sourceDefinitionVersion, sourceDefinitionVersion2)); + when(configRepository.listStandardSourceDefinitions(false)) + .thenReturn(Lists.newArrayList(sourceDefinition, sourceDefinition2, sourceDefinitionWithOptionals)); + when(configRepository.getActorDefinitionVersions(List.of(sourceDefinition.getDefaultVersionId(), sourceDefinition2.getDefaultVersionId(), + sourceDefinitionWithOptionals.getDefaultVersionId()))) + .thenReturn(Lists.newArrayList(sourceDefinitionVersion, sourceDefinitionVersion2, sourceDefinitionVersionWithOptionals)); final SourceDefinitionRead expectedSourceDefinitionRead1 = new SourceDefinitionRead() .sourceDefinitionId(sourceDefinition.getSourceDefinitionId()) @@ -192,6 +214,9 @@ void testListSourceDefinitions() throws IOException, URISyntaxException { .supportLevel(SupportLevel.fromValue(sourceDefinitionVersion.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(sourceDefinitionVersion.getReleaseStage().value())) .releaseDate(LocalDate.parse(sourceDefinitionVersion.getReleaseDate())) + .cdkVersion(null) + .lastPublished(null) + .metrics(null) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) @@ -207,15 +232,36 @@ void testListSourceDefinitions() throws IOException, URISyntaxException { .supportLevel(SupportLevel.fromValue(sourceDefinitionVersion2.getSupportLevel().value())) .releaseStage(ReleaseStage.fromValue(sourceDefinitionVersion2.getReleaseStage().value())) .releaseDate(LocalDate.parse(sourceDefinitionVersion2.getReleaseDate())) + .cdkVersion(null) + .lastPublished(null) + .metrics(null) .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() ._default(new io.airbyte.api.model.generated.ResourceRequirements() .cpuRequest(sourceDefinition2.getResourceRequirements().getDefault().getCpuRequest())) .jobSpecific(Collections.emptyList())); + final SourceDefinitionRead expectedSourceDefinitionReadWithOpts = new SourceDefinitionRead() + .sourceDefinitionId(sourceDefinitionWithOptionals.getSourceDefinitionId()) + .name(sourceDefinitionWithOptionals.getName()) + .dockerRepository(sourceDefinitionVersionWithOptionals.getDockerRepository()) + .dockerImageTag(sourceDefinitionVersionWithOptionals.getDockerImageTag()) + .documentationUrl(new URI(sourceDefinitionVersionWithOptionals.getDocumentationUrl())) + .icon(SourceDefinitionsHandler.loadIcon(sourceDefinitionWithOptionals.getIcon())) + .supportLevel(SupportLevel.fromValue(sourceDefinitionVersionWithOptionals.getSupportLevel().value())) + .releaseStage(ReleaseStage.fromValue(sourceDefinitionVersionWithOptionals.getReleaseStage().value())) + .releaseDate(LocalDate.parse(sourceDefinitionVersionWithOptionals.getReleaseDate())) + .cdkVersion(sourceDefinitionVersionWithOptionals.getCdkVersion()) + .lastPublished(ApiPojoConverters.toOffsetDateTime(sourceDefinitionVersionWithOptionals.getLastPublished())) + .metrics(sourceDefinitionWithOptionals.getMetrics()) + .resourceRequirements(new io.airbyte.api.model.generated.ActorDefinitionResourceRequirements() + ._default(new io.airbyte.api.model.generated.ResourceRequirements() + .cpuRequest(sourceDefinition.getResourceRequirements().getDefault().getCpuRequest())) + .jobSpecific(Collections.emptyList())); + final SourceDefinitionReadList actualSourceDefinitionReadList = sourceDefinitionsHandler.listSourceDefinitions(); assertEquals( - Lists.newArrayList(expectedSourceDefinitionRead1, expectedSourceDefinitionRead2), + Lists.newArrayList(expectedSourceDefinitionRead1, expectedSourceDefinitionRead2, expectedSourceDefinitionReadWithOpts), actualSourceDefinitionReadList.getSourceDefinitions()); } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java index c71928397ec..da26e6c96ab 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java @@ -8,6 +8,7 @@ import static io.airbyte.protocol.models.CatalogHelpers.createAirbyteStream; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -59,6 +60,7 @@ import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; +import io.airbyte.featureflag.DeleteSecretsWhenTombstoneActors; import io.airbyte.featureflag.TestClient; import io.airbyte.featureflag.UseIconUrlInApiResponse; import io.airbyte.featureflag.Workspace; @@ -497,6 +499,10 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, .thenReturn(sourceConnection.getConfiguration()); when(actorDefinitionVersionHelper.getSourceVersionWithOverrideStatus(standardSourceDefinition, sourceConnection.getWorkspaceId(), sourceConnection.getSourceId())).thenReturn(sourceDefinitionVersionWithOverrideStatus); + // By default feature flag is false + when(featureFlagClient.boolVariation( + eq(DeleteSecretsWhenTombstoneActors.INSTANCE), + any(Workspace.class))).thenReturn(false); sourceHandler.deleteSource(sourceIdRequestBody); @@ -505,6 +511,56 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, verify(connectionsHandler).deleteConnection(connectionRead.getConnectionId()); } + @Test + void testDeleteSourceAndDeleteSecrets() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { + final JsonNode newConfiguration = sourceConnection.getConfiguration(); + ((ObjectNode) newConfiguration).put("apiKey", "987-xyz"); + + final SourceConnection expectedSourceConnection = Jsons.clone(sourceConnection).withTombstone(true); + + final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceConnection.getSourceId()); + final StandardSync standardSync = ConnectionHelpers.generateSyncWithSourceId(sourceConnection.getSourceId()); + final ConnectionRead connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); + final ConnectionReadList connectionReadList = new ConnectionReadList().connections(Collections.singletonList(connectionRead)); + final WorkspaceIdRequestBody workspaceIdRequestBody = new WorkspaceIdRequestBody().workspaceId(sourceConnection.getWorkspaceId()); + + when(configRepository.getSourceConnection(sourceConnection.getSourceId())) + .thenReturn(sourceConnection) + .thenReturn(expectedSourceConnection); + when(sourceService.getSourceConnectionWithSecrets(sourceConnection.getSourceId())) + .thenReturn(sourceConnection) + .thenReturn(expectedSourceConnection); + when(oAuthConfigSupplier.maskSourceOAuthParameters(sourceDefinitionSpecificationRead.getSourceDefinitionId(), + sourceConnection.getWorkspaceId(), + newConfiguration, sourceDefinitionVersion.getSpec())).thenReturn(newConfiguration); + when(configRepository.getStandardSourceDefinition(sourceDefinitionSpecificationRead.getSourceDefinitionId())) + .thenReturn(standardSourceDefinition); + when(actorDefinitionVersionHelper.getSourceVersion(standardSourceDefinition, sourceConnection.getWorkspaceId(), sourceConnection.getSourceId())) + .thenReturn(sourceDefinitionVersion); + when(configRepository.getSourceDefinitionFromSource(sourceConnection.getSourceId())).thenReturn(standardSourceDefinition); + when(connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)).thenReturn(connectionReadList); + when( + secretsProcessor.prepareSecretsForOutput(sourceConnection.getConfiguration(), sourceDefinitionSpecificationRead.getConnectionSpecification())) + .thenReturn(sourceConnection.getConfiguration()); + when(actorDefinitionVersionHelper.getSourceVersionWithOverrideStatus(standardSourceDefinition, sourceConnection.getWorkspaceId(), + sourceConnection.getSourceId())).thenReturn(sourceDefinitionVersionWithOverrideStatus); + // Turn on feature flag to delete secrets + when(featureFlagClient.boolVariation( + eq(DeleteSecretsWhenTombstoneActors.INSTANCE), + any(Workspace.class))).thenReturn(true); + + sourceHandler.deleteSource(sourceIdRequestBody); + + // With the flag on, we should not no longer get secrets or write secrets anymore (since we are + // deleting the source). + verify(sourceService, times(0)).writeSourceConnectionWithSecrets(expectedSourceConnection, connectorSpecification); + verify(sourceService, times(0)).getSourceConnectionWithSecrets(any()); + verify(sourceService).tombstoneSource(any(), any(), any(), any()); + verify(connectionsHandler).listConnectionsForWorkspace(workspaceIdRequestBody); + verify(connectionsHandler).deleteConnection(connectionRead.getConnectionId()); + } + @Test void testWriteDiscoverCatalogResult() throws JsonValidationException, IOException { final UUID actorId = UUID.randomUUID(); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java index 5455c1e53cb..6101368a2d0 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java @@ -236,7 +236,6 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio connectionsHandler, stateHandler, sourceHandler, - destinationDefinitionsHandler, destinationHandler, jobHistoryHandler, schedulerHandler, @@ -460,7 +459,8 @@ void testGetWorkspaceStateEmpty() throws IOException { } @Test - void testWebBackendListConnectionsForWorkspace() throws IOException { + void testWebBackendListConnectionsForWorkspace() + throws IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException, ConfigNotFoundException { final WebBackendConnectionListRequestBody webBackendConnectionListRequestBody = new WebBackendConnectionListRequestBody(); webBackendConnectionListRequestBody.setWorkspaceId(sourceRead.getWorkspaceId()); @@ -475,7 +475,8 @@ void testWebBackendListConnectionsForWorkspace() throws IOException { } @Test - void testWebBackendGetConnection() throws ConfigNotFoundException, IOException, JsonValidationException { + void testWebBackendGetConnection() + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody(); connectionIdRequestBody.setConnectionId(connectionRead.getConnectionId()); @@ -496,7 +497,7 @@ void testWebBackendGetConnection() throws ConfigNotFoundException, IOException, WebBackendConnectionRead testWebBackendGetConnection(final boolean withCatalogRefresh, final ConnectionRead connectionRead, final OperationReadList operationReadList) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody(); connectionIdRequestBody.setConnectionId(connectionRead.getConnectionId()); @@ -514,7 +515,7 @@ WebBackendConnectionRead testWebBackendGetConnection(final boolean withCatalogRe @Test void testWebBackendGetConnectionWithDiscoveryAndNewSchema() throws ConfigNotFoundException, - IOException, JsonValidationException { + IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); @@ -532,7 +533,7 @@ void testWebBackendGetConnectionWithDiscoveryAndNewSchema() throws ConfigNotFoun @Test void testWebBackendGetConnectionWithDiscoveryAndNewSchemaBreakingChange() throws ConfigNotFoundException, - IOException, JsonValidationException { + IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); @@ -551,7 +552,7 @@ void testWebBackendGetConnectionWithDiscoveryAndNewSchemaBreakingChange() throws @Test void testWebBackendGetConnectionWithDiscoveryMissingCatalogUsedToMakeConfiguredCatalog() - throws IOException, ConfigNotFoundException, JsonValidationException { + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); @@ -569,7 +570,7 @@ void testWebBackendGetConnectionWithDiscoveryMissingCatalogUsedToMakeConfiguredC @Test void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionAddField() throws ConfigNotFoundException, - IOException, JsonValidationException { + IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Mock this because the API uses it to determine whether there was a schema change. when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); @@ -622,7 +623,7 @@ void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionAddField() throws @Test void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionRemoveField() throws ConfigNotFoundException, - IOException, JsonValidationException { + IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Mock this because the API uses it to determine whether there was a schema change. when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); @@ -670,14 +671,15 @@ void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionRemoveField() thro @Test void testWebBackendGetConnectionNoRefreshCatalog() - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final WebBackendConnectionRead result = testWebBackendGetConnection(false, connectionRead, operationReadList); verify(schedulerHandler, never()).discoverSchemaForSourceFromSourceId(any()); assertEquals(expected, result); } @Test - void testWebBackendGetConnectionNoDiscoveryWithNewSchema() throws JsonValidationException, ConfigNotFoundException, IOException { + void testWebBackendGetConnectionNoDiscoveryWithNewSchema() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); @@ -686,7 +688,8 @@ void testWebBackendGetConnectionNoDiscoveryWithNewSchema() throws JsonValidation } @Test - void testWebBackendGetConnectionNoDiscoveryWithNewSchemaBreaking() throws JsonValidationException, ConfigNotFoundException, IOException { + void testWebBackendGetConnectionNoDiscoveryWithNewSchemaBreaking() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { when(connectionsHandler.getConnection(brokenConnectionRead.getConnectionId())).thenReturn(brokenConnectionRead); when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/ContextBuilderTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/ContextBuilderTest.kt new file mode 100644 index 00000000000..b15c62f8bbb --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/ContextBuilderTest.kt @@ -0,0 +1,69 @@ +package io.airbyte.commons.server.handlers.helpers + +import io.airbyte.config.ConnectionContext +import io.airbyte.config.DestinationConnection +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardWorkspace +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.DestinationService +import io.airbyte.data.services.SourceService +import io.airbyte.data.services.WorkspaceService +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class ContextBuilderTest { + private val workspaceService = mockk() + private val destinationService = mockk() + private val connectionService = mockk() + private val sourceService = mockk() + private val contextBuilder = ContextBuilder(workspaceService, destinationService, connectionService, sourceService) + + private val connectionId = UUID.randomUUID() + private val sourceId = UUID.randomUUID() + private val destinationId = UUID.randomUUID() + private val workspaceId = UUID.randomUUID() + private val organizationId = UUID.randomUUID() + private val destinationDefinitionId = UUID.randomUUID() + private val sourceDefinitionId = UUID.randomUUID() + + @Test + fun `test the creation of the connection context`() { + every { connectionService.getStandardSync(connectionId) } returns + StandardSync() + .withConnectionId(connectionId) + .withSourceId(sourceId) + .withDestinationId(destinationId) + + every { workspaceService.getStandardWorkspaceNoSecrets(workspaceId, false) } returns + StandardWorkspace() + .withWorkspaceId(workspaceId) + .withOrganizationId(organizationId) + + every { sourceService.getSourceConnection(sourceId) } returns + SourceConnection() + .withSourceDefinitionId(sourceDefinitionId) + + every { destinationService.getDestinationConnection(destinationId) } returns + DestinationConnection() + .withWorkspaceId(workspaceId) + .withDestinationDefinitionId(destinationDefinitionId) + + val context = contextBuilder.fromConnectionId(connectionId) + + assertEquals( + ConnectionContext() + .withConnectionId(connectionId) + .withSourceId(sourceId) + .withDestinationId(destinationId) + .withWorkspaceId(workspaceId) + .withOrganizationId(organizationId) + .withSourceDefinitionId(sourceDefinitionId) + .withDestinationDefinitionId(destinationDefinitionId), + context, + ) + } +} diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/factories/WorkflowClientFactory.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/factories/WorkflowClientFactory.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/factories/WorkflowClientFactory.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/factories/WorkflowClientFactory.kt diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/factories/WorkflowServiceStubsFactory.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/factories/WorkflowServiceStubsFactory.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/factories/WorkflowServiceStubsFactory.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/factories/WorkflowServiceStubsFactory.kt diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/Internal.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/Internal.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/Internal.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/Internal.kt diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/MessageConsumer.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/MessageConsumer.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/MessageConsumer.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/MessageConsumer.kt diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/TemporalMessageProducer.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/TemporalMessageProducer.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/queue/TemporalMessageProducer.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/queue/TemporalMessageProducer.kt diff --git a/airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt b/airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt similarity index 100% rename from airbyte-commons-temporal-core/src/main/java/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt rename to airbyte-commons-temporal-core/src/main/kotlin/io/airbyte/commons/temporal/utils/ActivityFailureClassifier.kt diff --git a/airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/queue/BasicQueueTest.kt b/airbyte-commons-temporal-core/src/test/kotlin/io/airbyte/commons/temporal/queue/BasicQueueTest.kt similarity index 100% rename from airbyte-commons-temporal-core/src/test/java/io/airbyte/commons/temporal/queue/BasicQueueTest.kt rename to airbyte-commons-temporal-core/src/test/kotlin/io/airbyte/commons/temporal/queue/BasicQueueTest.kt diff --git a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/helper/NormalizationInDestinationHelper.java b/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/helper/NormalizationInDestinationHelper.java deleted file mode 100644 index b55a3331aa8..00000000000 --- a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/helper/NormalizationInDestinationHelper.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.helper; - -import io.airbyte.commons.version.Version; -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import io.micronaut.core.util.CollectionUtils; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * This should be a temporary class to assist during the transition from "normalization" containers - * running after a destination container sync, to moving "normalization" into the destination. - * container. "Normalization" will also be rebranded to "Typing" and "de-duping" - */ -public class NormalizationInDestinationHelper { - - private static final Logger LOGGER = LoggerFactory.getLogger(NormalizationInDestinationHelper.class); - - private static final Map IN_DESTINATION_NORMALIZATION_ENV_VAR = Map.of("NORMALIZATION_TECHNIQUE", "LEGACY"); - - public static boolean normalizationStepRequired(final List standardSyncOperations) { - return CollectionUtils.isNotEmpty(standardSyncOperations) - && standardSyncOperations.stream().anyMatch(op -> OperatorType.NORMALIZATION.equals(op.getOperatorType())); - } - - public static Map getAdditionalEnvironmentVariables(final boolean shouldNormalizeInDestination) { - return shouldNormalizeInDestination ? IN_DESTINATION_NORMALIZATION_ENV_VAR : Collections.emptyMap(); - } - - /** - * Whether this replication should normalize in the destination container. - * - * @param standardSyncOperations the sync operations for the replication job - * @param containerName the name of the destination container - * @param minSupportedVersion the minimum version that supports normalization for this destination, - * if this workspace has opted into the feature flag for normalization in destination - * containers (otherwise an empty string) - * @return a boolean value of whether normalization should be run in the destination container - */ - public static boolean shouldNormalizeInDestination(final List standardSyncOperations, - final String containerName, - final String minSupportedVersion) { - final var requiresNormalization = normalizationStepRequired(standardSyncOperations); - final var normalizationSupported = connectorSupportsNormalizationInDestination(containerName, minSupportedVersion); - LOGGER.info("Requires Normalization: {}, Normalization Supported: {}, Feature Flag Enabled: {}", - requiresNormalization, normalizationSupported, !minSupportedVersion.isBlank()); - return requiresNormalization && normalizationSupported; - } - - private static boolean connectorSupportsNormalizationInDestination(final String containerName, - final String minSupportedVersion) { - if (!minSupportedVersion.isBlank()) { - return DockerImageNameHelper.extractImageVersion(containerName) - .map(version -> version.greaterThanOrEqualTo(new Version(minSupportedVersion))) - .orElse(false); - } - return false; - } - -} diff --git a/airbyte-commons-with-dependencies/src/test/java/io/airbyte/commons/helper/NormalizeInDestinationHelperTest.java b/airbyte-commons-with-dependencies/src/test/java/io/airbyte/commons/helper/NormalizeInDestinationHelperTest.java deleted file mode 100644 index 66d370bb078..00000000000 --- a/airbyte-commons-with-dependencies/src/test/java/io/airbyte/commons/helper/NormalizeInDestinationHelperTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.helper; - -import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; -import java.util.List; -import java.util.stream.Stream; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtensionContext; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.ArgumentsProvider; -import org.junit.jupiter.params.provider.ArgumentsSource; - -/** - * Unit Test class for {@link NormalizationInDestinationHelper}. - */ -class NormalizeInDestinationHelperTest { - - private static final StandardSyncOperation NORMALIZATION_OPERATION = - new StandardSyncOperation().withOperatorType(OperatorType.NORMALIZATION); - - private static final StandardSyncOperation SOMETHING_ELSE = - new StandardSyncOperation().withOperatorType(OperatorType.WEBHOOK); - - /** - * Argument provider for - * {@link NormalizeInDestinationHelperTest#testNormalizationStepRequired(List, boolean)}. - */ - public static class NormalizeStepRequiredArgumentsProvider implements ArgumentsProvider { - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - - return Stream.of( - Arguments.of(List.of(SOMETHING_ELSE), false), - Arguments.of(List.of(SOMETHING_ELSE, NORMALIZATION_OPERATION), true)); - } - - } - - @ParameterizedTest - @ArgumentsSource(NormalizeStepRequiredArgumentsProvider.class) - void testNormalizationStepRequired(final List standardSyncOperations, final boolean expected) { - final var actual = NormalizationInDestinationHelper.normalizationStepRequired(standardSyncOperations); - Assertions.assertEquals(expected, actual); - } - - @Test - void testGetAdditionalEnvironmentVariables() { - final var shouldBeEmpty = NormalizationInDestinationHelper.getAdditionalEnvironmentVariables(false); - Assertions.assertTrue(shouldBeEmpty.isEmpty()); - final var shouldBePopulated = NormalizationInDestinationHelper.getAdditionalEnvironmentVariables(true); - Assertions.assertFalse(shouldBePopulated.isEmpty()); - } - - /** - * Argument provider for - * {@link NormalizeInDestinationHelperTest#testShouldNormalizeInDestination(List, String, String, boolean)}. - */ - public static class ShouldNormalizeInDestinationArgumentsProvider implements ArgumentsProvider { - - // for testing only - private static final String MIN_SUPPORTED_VERSION_OFF = ""; - private static final String MIN_SUPPORTED_VERSION_BIGQUERY = "1.3.1"; - private static final String MIN_SUPPORTED_VERSION_SNOWFLAKE = "1.0.0"; - - @Override - public Stream provideArguments(final ExtensionContext context) throws Exception { - return Stream.of( - // Normalization not required - Arguments.of(List.of(SOMETHING_ELSE), "destination-bigquery:1.3.2", MIN_SUPPORTED_VERSION_BIGQUERY, false), - // Container doesn't support it - Arguments.of(List.of(NORMALIZATION_OPERATION), "hello:dev", MIN_SUPPORTED_VERSION_OFF, false), - Arguments.of(List.of(NORMALIZATION_OPERATION), "hello:1.3.1", MIN_SUPPORTED_VERSION_OFF, false), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:1.3.0", MIN_SUPPORTED_VERSION_BIGQUERY, false), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-snowflake:0.0.0", MIN_SUPPORTED_VERSION_SNOWFLAKE, false), - // Feature Flag off - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:dev", MIN_SUPPORTED_VERSION_OFF, false), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:1.3.1", MIN_SUPPORTED_VERSION_OFF, false), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:2.0.0", MIN_SUPPORTED_VERSION_OFF, false), - // Supported - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:dev", MIN_SUPPORTED_VERSION_BIGQUERY, true), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:1.3.1", MIN_SUPPORTED_VERSION_BIGQUERY, true), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-bigquery:2.0.0", MIN_SUPPORTED_VERSION_BIGQUERY, true), - Arguments.of(List.of(NORMALIZATION_OPERATION), "destination-snowflake:2.0.0", MIN_SUPPORTED_VERSION_SNOWFLAKE, true)); - } - - } - - @ParameterizedTest - @ArgumentsSource(ShouldNormalizeInDestinationArgumentsProvider.class) - void testShouldNormalizeInDestination(final List syncOperations, - final String imageName, - final String minSupportedVersion, - final boolean expected) { - final var actual = NormalizationInDestinationHelper.shouldNormalizeInDestination(syncOperations, imageName, minSupportedVersion); - Assertions.assertEquals(expected, actual); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java index 3428129d922..6d4ede3bc6b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java @@ -61,7 +61,12 @@ public RecordSchemaValidator(final Map // Rather than allowing connectors to use any version, we enforce validation using V7 final var schema = streams.get(stream); ((ObjectNode) schema).put("$schema", "http://json-schema.org/draft-07/schema#"); - validator.initializeSchemaValidator(stream.toString(), schema); + // Starting with draft-06 of JSON schema, "id" is a reserved keyword. To use "id" in + // a JSON schema, it must be escaped as "$id". Because this mistake exists in connectors, + // the platform will attempt to migrate "id" property names to the escaped equivalent of "$id". + // Copy the schema before modification to ensure that it doesn't mutate the actual catalog schema + // used elsewhere in the platform. + validator.initializeSchemaValidator(stream.toString(), updateIdNodePropertyName(schema.deepCopy())); } } @@ -119,4 +124,31 @@ public void close() throws IOException { validationExecutor.shutdownNow(); } + /** + * Migrates the reserved property name id in JSON Schema to its escaped equivalent + * $id. The id keyword has been reserved since draft-06 + * of JSON Schema. Connectors have been built that violate this, so this code is to correct that + * without needing to force update connector version. + * + * @param node A {@link JsonNode} in the JSON Schema for a connector's catalog. + * @return The possible modified {@link JsonNode} with any references to id escaped. + */ + private JsonNode updateIdNodePropertyName(final JsonNode node) { + if (node != null) { + if (node.has("id")) { + ((ObjectNode) node).set("$id", node.get("id")); + ((ObjectNode) node).remove("id"); + } + + for (final JsonNode child : node) { + if (child.isContainerNode()) { + updateIdNodePropertyName(child); + } + } + } + + return node; + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java index df2729e8113..31fdb167a38 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java @@ -5,6 +5,7 @@ package io.airbyte.workers; import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.model.generated.ActorType; import io.airbyte.api.client.model.generated.ConnectionAndJobIdRequestBody; @@ -16,10 +17,11 @@ import io.airbyte.api.client.model.generated.DestinationIdRequestBody; import io.airbyte.api.client.model.generated.JobOptionalRead; import io.airbyte.api.client.model.generated.ResolveActorDefinitionVersionRequestBody; +import io.airbyte.api.client.model.generated.SaveStreamAttemptMetadataRequestBody; import io.airbyte.api.client.model.generated.ScopeType; import io.airbyte.api.client.model.generated.SecretPersistenceConfig; import io.airbyte.api.client.model.generated.SecretPersistenceConfigGetRequestBody; -import io.airbyte.api.client.model.generated.StreamDescriptor; +import io.airbyte.api.client.model.generated.StreamAttemptMetadata; import io.airbyte.commons.converters.CatalogClientConverters; import io.airbyte.commons.converters.ProtocolConverters; import io.airbyte.commons.converters.StateConverter; @@ -29,21 +31,30 @@ import io.airbyte.commons.protocol.CatalogTransforms; import io.airbyte.config.State; import io.airbyte.config.StateWrapper; +import io.airbyte.config.StreamDescriptor; import io.airbyte.config.helpers.StateMessageHelper; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.featureflag.AutoBackfillOnNewColumns; +import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; +import io.airbyte.featureflag.UseStreamAttemptMetadata; import io.airbyte.featureflag.Workspace; +import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.workers.helper.BackfillHelper; +import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.models.ReplicationActivityInput; import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.UUID; +import java.util.stream.Collectors; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,13 +64,16 @@ public class ReplicationInputHydrator { private static final Logger LOGGER = LoggerFactory.getLogger(ReplicationInputHydrator.class); private final AirbyteApiClient airbyteApiClient; + private final ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper; private final SecretsRepositoryReader secretsRepositoryReader; private final FeatureFlagClient featureFlagClient; public ReplicationInputHydrator(final AirbyteApiClient airbyteApiClient, + final ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper, final SecretsRepositoryReader secretsRepositoryReader, final FeatureFlagClient featureFlagClient) { this.airbyteApiClient = airbyteApiClient; + this.resumableFullRefreshStatsHelper = resumableFullRefreshStatsHelper; this.secretsRepositoryReader = secretsRepositoryReader; this.featureFlagClient = featureFlagClient; } @@ -75,7 +89,7 @@ public ReplicationInputHydrator(final AirbyteApiClient airbyteApiClient, * @throws Exception from the Airbyte API */ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInput replicationActivityInput) throws Exception { - + ApmTraceUtils.addTagsToTrace(Map.of("api_base_url", airbyteApiClient.getDestinationApi().getBaseUrl())); final var destination = airbyteApiClient.getDestinationApi().getDestination(new DestinationIdRequestBody(replicationActivityInput.getDestinationId())); final var tag = DockerImageName.INSTANCE.extractTag(replicationActivityInput.getDestinationLauncherConfig().getDockerImage()); @@ -83,9 +97,10 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp new ResolveActorDefinitionVersionRequestBody(destination.getDestinationDefinitionId(), ActorType.DESTINATION, tag)); // Retrieve the connection, which we need in a few places. + final long jobId = Long.parseLong(replicationActivityInput.getJobRunConfig().getJobId()); final ConnectionRead connectionInfo = resolvedDestinationVersion.getSupportRefreshes() - ? airbyteApiClient.getConnectionApi().getConnectionForJob(new ConnectionAndJobIdRequestBody(replicationActivityInput.getConnectionId(), - Long.parseLong(replicationActivityInput.getJobRunConfig().getJobId()))) + ? airbyteApiClient.getConnectionApi() + .getConnectionForJob(new ConnectionAndJobIdRequestBody(replicationActivityInput.getConnectionId(), jobId)) : airbyteApiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody(replicationActivityInput.getConnectionId())); final ConfiguredAirbyteCatalog catalog = retrieveCatalog(connectionInfo); @@ -95,13 +110,28 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp } // Retrieve the state. State state = retrieveState(replicationActivityInput); + List streamsToBackfill = null; final boolean backfillEnabledForWorkspace = featureFlagClient.boolVariation(AutoBackfillOnNewColumns.INSTANCE, new Workspace(replicationActivityInput.getWorkspaceId())); if (backfillEnabledForWorkspace && BackfillHelper.syncShouldBackfill(replicationActivityInput, connectionInfo)) { + streamsToBackfill = BackfillHelper.getStreamsToBackfill(replicationActivityInput.getSchemaRefreshOutput().getAppliedDiff(), catalog); state = getUpdatedStateForBackfill(state, replicationActivityInput.getSchemaRefreshOutput(), replicationActivityInput.getConnectionId(), catalog); } + if (featureFlagClient.boolVariation(UseStreamAttemptMetadata.INSTANCE, new Connection(replicationActivityInput.getConnectionId()))) { + try { + trackBackfillAndResume( + jobId, + replicationActivityInput.getJobRunConfig().getAttemptId(), + resumableFullRefreshStatsHelper.getStreamsWithStates(state).stream().toList(), + streamsToBackfill); + } catch (final Exception e) { + LOGGER.error("Failed to track stream metadata for connectionId:{} attempt:{}", replicationActivityInput.getConnectionId(), + replicationActivityInput.getJobRunConfig().getAttemptId(), e); + } + } + // Hydrate the secrets. final JsonNode fullDestinationConfig; final JsonNode fullSourceConfig; @@ -136,7 +166,6 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp .withSyncResourceRequirements(replicationActivityInput.getSyncResourceRequirements()) .withWorkspaceId(replicationActivityInput.getWorkspaceId()) .withConnectionId(replicationActivityInput.getConnectionId()) - .withNormalizeInDestinationContainer(replicationActivityInput.getNormalizeInDestinationContainer()) .withIsReset(replicationActivityInput.getIsReset()) .withJobRunConfig(replicationActivityInput.getJobRunConfig()) .withSourceLauncherConfig(replicationActivityInput.getSourceLauncherConfig()) @@ -145,6 +174,32 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp .withState(state); } + @VisibleForTesting + void trackBackfillAndResume(final Long jobId, + final Long attemptNumber, + final List streamsWithStates, + final List streamsToBackfill) + throws IOException { + final Map metadataPerStream = streamsWithStates != null ? streamsWithStates + .stream() + .map(s -> Map.entry(s, new StreamAttemptMetadata(s.getName(), false, true, s.getNamespace()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) : new HashMap<>(); + + if (streamsToBackfill != null) { + for (final StreamDescriptor stream : streamsToBackfill) { + final StreamAttemptMetadata attemptMetadata = metadataPerStream.get(stream); + if (attemptMetadata == null) { + metadataPerStream.put(stream, new StreamAttemptMetadata(stream.getName(), true, false, stream.getNamespace())); + } else { + metadataPerStream.put(stream, new StreamAttemptMetadata(stream.getName(), true, true, stream.getNamespace())); + } + } + } + + airbyteApiClient.getAttemptApi() + .saveStreamMetadata(new SaveStreamAttemptMetadataRequestBody(jobId, attemptNumber.intValue(), metadataPerStream.values().stream().toList())); + } + private State getUpdatedStateForBackfill(final State state, final RefreshSchemaActivityOutput schemaRefreshOutput, final UUID connectionId, @@ -152,7 +207,7 @@ private State getUpdatedStateForBackfill(final State state, throws Exception { if (schemaRefreshOutput != null && schemaRefreshOutput.getAppliedDiff() != null) { final var streamsToBackfill = BackfillHelper.getStreamsToBackfill(schemaRefreshOutput.getAppliedDiff(), catalog); - LOGGER.debug("Backfilling streams: {}", String.join(", ", streamsToBackfill.stream().map(StreamDescriptor::getName).toList())); + LOGGER.debug("Backfilling streams: {}", String.join(", ", streamsToBackfill.stream().map(sd -> sd.getName()).toList())); final State resetState = BackfillHelper.clearStateForStreamsToBackfill(state, streamsToBackfill); if (resetState != null) { // We persist the state here in case the attempt fails, the subsequent attempt will continue the diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java index 1531ca8a6f0..a306d01d94c 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/BufferedReplicationWorker.java @@ -160,7 +160,7 @@ public ReplicationOutput run(final ReplicationInput replicationInput, final Path try { final ReplicationContext replicationContext = getReplicationContext(replicationInput); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog(), replicationInput.getState()); final CloseableWithTimeout destinationWithCloseTimeout = new CloseableWithTimeout(destination, mdc, flags); // note: resources are closed in the opposite order in which they are declared. thus source will be diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java deleted file mode 100644 index f54502257bb..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationRunner.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.workers.process.Metadata.CUSTOM_STEP; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.constants.WorkerConstants; -import io.airbyte.commons.helper.DockerImageNameHelper; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.commons.resources.MoreResources; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.ProcessFactory; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import org.apache.tools.ant.types.Commandline; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * DbtTransformationRunner. A large portion of this code is taken from the legacy DBT-based - * Normalization Runner. Historically, this was done to reuse DBT set up. However, with the - * deprecation of the DBT-based runner, and the unknown future of Custom Transformation support in - * OSS, we are not investing in refactoring this code for now. - */ -public class DbtTransformationRunner implements AutoCloseable { - - private static final Logger LOGGER = LoggerFactory.getLogger(DbtTransformationRunner.class); - private static final String DBT_ENTRYPOINT_SH = "entrypoint.sh"; - private static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix(LoggingHelper.CUSTOM_TRANSFORMATION_LOGGER_PREFIX) - .setPrefixColor(Color.PURPLE_BACKGROUND); - - private final ProcessFactory processFactory; - private final String destinationImage; - private Process process = null; - - public DbtTransformationRunner(final ProcessFactory processFactory, final String destinationImage) { - this.processFactory = processFactory; - this.destinationImage = destinationImage; - } - - /** - * The docker image used by the DbtTransformationRunner is provided by the User, so we can't ensure - * to have the right python, dbt, dependencies etc software installed to successfully run our - * transform-config scripts (to translate Airbyte Catalogs into Dbt profiles file). Thus, we depend - * on a pre-build prep image to configure the dbt project with the appropriate destination settings - * and pull the custom git repository into the workspace. - *

- * Once the workspace folder/files is setup to run, we invoke the custom transformation command as - * provided by the user to execute whatever extra transformation has been implemented. - */ - public boolean run(final String jobId, - final int attempt, - final UUID connectionId, - final UUID workspaceId, - final Path jobRoot, - final JsonNode config, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - if (!configureDbt(jobId, attempt, connectionId, workspaceId, jobRoot, config, resourceRequirements, dbtConfig)) { - return false; - } - return transform(jobId, attempt, jobRoot, resourceRequirements, dbtConfig); - } - - /** - * Transform data (i.e. run normalization). - * - * @param jobId job id - * @param attempt attempt number - * @param jobRoot job root - * @param resourceRequirements resource requirements - * @param dbtConfig dbt config - * @return true, if succeeded. otherwise, false. - * @throws Exception while executing - */ - public boolean transform(final String jobId, - final int attempt, - final Path jobRoot, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - try { - final Map files = ImmutableMap.of( - DBT_ENTRYPOINT_SH, MoreResources.readResource("dbt_transformation_entrypoint.sh"), - "sshtunneling.sh", MoreResources.readResource("sshtunneling.sh")); - final List dbtArguments = new ArrayList<>(); - dbtArguments.add(DBT_ENTRYPOINT_SH); - if (Strings.isNullOrEmpty(dbtConfig.getDbtArguments())) { - throw new WorkerException("Dbt Arguments are required"); - } - Collections.addAll(dbtArguments, Commandline.translateCommandline(dbtConfig.getDbtArguments())); - process = - processFactory.create( - ResourceType.DEFAULT, - CUSTOM_STEP, - jobId, - attempt, - null, // TODO: Provide connectionId - null, // TODO: Provide workspaceId - jobRoot, - dbtConfig.getDockerImage(), - false, - false, - files, - "/bin/bash", - // We should use the AirbyteIntegrationLauncher instead - AirbyteIntegrationLauncher.buildGenericConnectorResourceRequirements(resourceRequirements), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, CUSTOM_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), dbtArguments.toArray(new String[0])); - LineGobbler.gobble(process.getInputStream(), LOGGER::info, CONTAINER_LOG_MDC_BUILDER); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - - WorkerUtils.wait(process); - - return process.exitValue() == 0; - } catch (final Exception e) { - // make sure we kill the process on failure to avoid zombies. - if (process != null) { - WorkerUtils.cancelProcess(process); - } - throw e; - } - } - - @Override - public void close() throws Exception { - - if (process == null) { - return; - } - - LOGGER.debug("Closing dbt transformation process"); - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); - if (process.isAlive() || process.exitValue() != 0) { - throw new WorkerException("Dbt transformation process wasn't successful"); - } - } - - /* - * FROM HERE ON, CODE IS ADAPTED FROM THE LEGACY DBT-BASED NORMALIZATION RUNNER. - */ - - /** - * Prepare a configured folder to run dbt commands from (similar to what is required by - * normalization models) However, this does not run the normalization file generation process or dbt - * at all. This is pulling files from a distant git repository instead of the dbt-project-template. - * - * @return true if configuration succeeded. otherwise false. - * @throws Exception - any exception thrown from configuration will be handled gracefully by the - * caller. - */ - public boolean configureDbt(final String jobId, - final int attempt, - final UUID connectionId, - final UUID workspaceId, - final Path jobRoot, - final JsonNode config, - final ResourceRequirements resourceRequirements, - final OperatorDbt dbtConfig) - throws Exception { - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config)); - final String gitRepoUrl = dbtConfig.getGitRepoUrl(); - final String type = getAirbyteDestinationName(destinationImage); - if (Strings.isNullOrEmpty(gitRepoUrl)) { - throw new WorkerException("Git Repo Url is required"); - } - final String gitRepoBranch = dbtConfig.getGitRepoBranch(); - if (Strings.isNullOrEmpty(gitRepoBranch)) { - return runConfigureProcess(jobId, attempt, connectionId, workspaceId, jobRoot, files, resourceRequirements, "configure-dbt", - "--integration-type", type, - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--git-repo", gitRepoUrl); - } else { - return runConfigureProcess(jobId, attempt, connectionId, workspaceId, jobRoot, files, resourceRequirements, "configure-dbt", - "--integration-type", type, - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--git-repo", gitRepoUrl, - "--git-branch", gitRepoBranch); - } - } - - /** - * Extract the destination name from the docker image name. This is an artifact of the old - * dbt-based-normalization set up process that needs to know what destination it is processing in - * order to correctly parse the destination config, connect to the destination, and run the - * transformation. - * - * @param image name with attached version prefixed with destination- e.g. - * airbyte/destination-snowflake:0.1.0. - */ - @VisibleForTesting - protected static String getAirbyteDestinationName(String image) { - return DockerImageNameHelper.extractImageNameWithoutVersion(image).split("/")[1].split("-")[1]; - } - - @VisibleForTesting - protected boolean runConfigureProcess(final String jobId, - final int attempt, - final UUID connectionId, - final UUID workspaceId, - final Path jobRoot, - final Map files, - final ResourceRequirements resourceRequirements, - final String... args) - throws Exception { - try { - process = processFactory.create( - ResourceType.DEFAULT, - CUSTOM_STEP, - jobId, - attempt, - connectionId, - workspaceId, - jobRoot, - "airbyte/custom-transformation-prep:1.0", - // custom connector does not use normalization - false, - false, files, - null, - AirbyteIntegrationLauncher.buildGenericConnectorResourceRequirements(resourceRequirements), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, CUSTOM_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), args); - LineGobbler.gobble(process.getInputStream(), LOGGER::info, CONTAINER_LOG_MDC_BUILDER); - LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - - WorkerUtils.wait(process); - return process.exitValue() == 0; - } catch (final Exception e) { - // make sure we kill the process on failure to avoid zombies. - if (process != null) { - WorkerUtils.cancelProcess(process); - } - throw e; - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java deleted file mode 100644 index 58b1924b908..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DbtTransformationWorker.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.workers.Worker; -import io.airbyte.workers.exception.WorkerException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Dbt Transformation Worker. Worker that executes a dbt transformation. - */ -@SuppressWarnings("PMD.AvoidPrintStackTrace") -public class DbtTransformationWorker implements Worker { - - private static final Logger LOGGER = LoggerFactory.getLogger(DbtTransformationWorker.class); - - private final String jobId; - private final int attempt; - private final DbtTransformationRunner dbtTransformationRunner; - private final ResourceRequirements resourceRequirements; - - private final AtomicBoolean cancelled; - private final VoidCallable onTransformationRunning; - - public DbtTransformationWorker(final String jobId, - final int attempt, - final ResourceRequirements resourceRequirements, - final DbtTransformationRunner dbtTransformationRunner, - final VoidCallable onTransformationRunning) { - this.jobId = jobId; - this.attempt = attempt; - this.dbtTransformationRunner = dbtTransformationRunner; - this.resourceRequirements = resourceRequirements; - this.onTransformationRunning = onTransformationRunning; - - this.cancelled = new AtomicBoolean(false); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public Void run(final OperatorDbtInput operatorDbtInput, final Path jobRoot) throws WorkerException { - final long startTime = System.currentTimeMillis(); - LineGobbler.startSection("DBT TRANSFORMATION"); - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot)); - - try (dbtTransformationRunner) { - LOGGER.info("Running dbt transformation."); - onTransformationRunning.call(); - final Path transformRoot = Files.createDirectories(jobRoot.resolve("transform")); - if (!dbtTransformationRunner.run( - jobId, - attempt, - operatorDbtInput.getConnectionId(), - operatorDbtInput.getWorkspaceId(), - transformRoot, - operatorDbtInput.getDestinationConfiguration(), - resourceRequirements, - operatorDbtInput.getOperatorDbt())) { - throw new WorkerException("DBT Transformation Failed."); - } - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - throw new WorkerException("Dbt Transformation Failed.", e); - } - if (cancelled.get()) { - LOGGER.info("Dbt Transformation was cancelled."); - } - - final Duration duration = Duration.ofMillis(System.currentTimeMillis() - startTime); - LOGGER.info("Dbt Transformation executed in {}.", duration.toMinutesPart()); - LineGobbler.endSection("DBT TRANSFORMATION"); - - return null; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - LOGGER.info("Cancelling Dbt Transformation runner..."); - try { - cancelled.set(true); - dbtTransformationRunner.close(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Unable to cancel Dbt Transformation runner.", e); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java index da5ca0a36f4..cc9d7798664 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultDiscoverCatalogWorker.java @@ -52,7 +52,7 @@ public class DefaultDiscoverCatalogWorker implements DiscoverCatalogWorker { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDiscoverCatalogWorker.class); - private static final String WRITE_DISCOVER_CATALOG_LOGS_TAG = "call to write discover schema result"; + private static final String DISCOVER_SECTION_NAME = "DISCOVER SOURCE CATALOG"; private final IntegrationLauncher integrationLauncher; private final AirbyteStreamFactory streamFactory; @@ -73,6 +73,7 @@ public DefaultDiscoverCatalogWorker(final AirbyteApiClient airbyteApiClient, @Trace(operationName = WORKER_OPERATION_NAME) @Override public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaInput, final Path jobRoot) throws WorkerException { + LineGobbler.startSection(DISCOVER_SECTION_NAME); ApmTraceUtils.addTagsToTrace(generateTraceTags(discoverSchemaInput, jobRoot)); try { final JsonNode inputConfig = discoverSchemaInput.getConnectionConfiguration(); @@ -127,11 +128,14 @@ public ConnectorJobOutput run(final StandardDiscoverCatalogInput discoverSchemaI } else if (failureReasonOptional.isEmpty()) { WorkerUtils.throwWorkerException("Integration failed to output a catalog struct and did not output a failure reason", process); } + LineGobbler.endSection(DISCOVER_SECTION_NAME); return jobOutput; } catch (final WorkerException e) { + LineGobbler.endSection(DISCOVER_SECTION_NAME); ApmTraceUtils.addExceptionToTrace(e); throw e; } catch (final Exception e) { + LineGobbler.endSection(DISCOVER_SECTION_NAME); ApmTraceUtils.addExceptionToTrace(e); throw new WorkerException("Error while discovering schema", e); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java deleted file mode 100644 index a2cdbd05122..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultNormalizationWorker.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; - -import datadog.trace.api.Trace; -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.FailureReason; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.helper.FailureHelper; -import io.airbyte.workers.normalization.NormalizationRunner; -import io.airbyte.workers.normalization.NormalizationWorker; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.lang3.time.DurationFormatUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Default Normalization Worker. - */ -@SuppressWarnings("PMD.AvoidPrintStackTrace") -public class DefaultNormalizationWorker implements NormalizationWorker { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationWorker.class); - - private final String jobId; - private final int attempt; - private final NormalizationRunner normalizationRunner; - private final WorkerEnvironment workerEnvironment; - private final List traceFailureReasons = new ArrayList<>(); - private final VoidCallable onNormalizationRunning; - private boolean failed = false; - - private final AtomicBoolean cancelled; - - public DefaultNormalizationWorker(final String jobId, - final int attempt, - final NormalizationRunner normalizationRunner, - final WorkerEnvironment workerEnvironment, - final VoidCallable onNormalizationRunning) { - this.jobId = jobId; - this.attempt = attempt; - this.normalizationRunner = normalizationRunner; - this.workerEnvironment = workerEnvironment; - this.onNormalizationRunning = onNormalizationRunning; - - this.cancelled = new AtomicBoolean(false); - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public NormalizationSummary run(final NormalizationInput input, final Path jobRoot) throws WorkerException { - final long startTime = System.currentTimeMillis(); - - ApmTraceUtils.addTagsToTrace(Map.of(JOB_ID_KEY, jobId, JOB_ROOT_KEY, jobRoot)); - - try (normalizationRunner) { - LineGobbler.startSection("DEFAULT NORMALIZATION"); - normalizationRunner.start(); - onNormalizationRunning.call(); - Path normalizationRoot = null; - // There are no shared volumes on Kube; only create this for Docker. - if (workerEnvironment.equals(WorkerEnvironment.DOCKER)) { - normalizationRoot = Files.createDirectories(jobRoot.resolve("normalize")); - } - - if (!normalizationRunner.normalize(jobId, attempt, input.getConnectionId(), input.getWorkspaceId(), normalizationRoot, - input.getDestinationConfiguration(), input.getCatalog(), - input.getResourceRequirements())) { - buildFailureReasonsAndSetFailure(); - } - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Normalization failed for job {}.", jobId, e); - buildFailureReasonsAndSetFailure(); - } - - if (cancelled.get()) { - LOGGER.info("Normalization was cancelled for job {}.", jobId); - } - - final long endTime = System.currentTimeMillis(); - final Duration duration = Duration.ofMillis(endTime - startTime); - final String durationDescription = DurationFormatUtils.formatDurationWords(duration.toMillis(), true, true); - LOGGER.info("Normalization executed in {} for job {}.", durationDescription, jobId); - - final NormalizationSummary summary = new NormalizationSummary() - .withStartTime(startTime) - .withEndTime(endTime); - - if (!traceFailureReasons.isEmpty()) { - summary.setFailures(traceFailureReasons); - } else if (failed) { - throw new WorkerException("Normalization Failed."); - } - - LOGGER.info("Normalization summary: {}", summary); - LineGobbler.endSection("DEFAULT NORMALIZATION"); - - return summary; - } - - private void buildFailureReasonsAndSetFailure() { - normalizationRunner.getTraceMessages() - .filter(traceMessage -> traceMessage.getType() == AirbyteTraceMessage.Type.ERROR) - .forEach(traceMessage -> traceFailureReasons.add(FailureHelper.normalizationFailure(traceMessage, Long.valueOf(jobId), attempt))); - failed = true; - } - - @Trace(operationName = WORKER_OPERATION_NAME) - @Override - public void cancel() { - LOGGER.info("Cancelling normalization runner..."); - try { - cancelled.set(true); - normalizationRunner.close(); - } catch (final Exception e) { - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Unable to cancel normalization runner.", e); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java index 6357a29666d..3346ad8d22f 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java @@ -145,7 +145,7 @@ public final ReplicationOutput run(final ReplicationInput replicationInput, fina replicationWorkerHelper.getDestinationDefinitionIdForDestinationId(replicationInput.getDestinationId())); final ReplicationFeatureFlags flags = replicationFeatureFlagReader.readReplicationFeatureFlags(); - replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog()); + replicationWorkerHelper.initialize(replicationContext, flags, jobRoot, replicationInput.getCatalog(), replicationInput.getState()); replicate(jobRoot, replicationInput, flags); return replicationWorkerHelper.getReplicationOutput(); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/BackfillHelper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/BackfillHelper.java index e15f7f32fdc..fdc310e2772 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/BackfillHelper.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/BackfillHelper.java @@ -5,19 +5,19 @@ package io.airbyte.workers.helper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import io.airbyte.api.client.model.generated.CatalogDiff; import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.FieldTransform; import io.airbyte.api.client.model.generated.SchemaChangeBackfillPreference; -import io.airbyte.api.client.model.generated.StreamDescriptor; -import io.airbyte.api.client.model.generated.StreamTransform; import io.airbyte.commons.converters.CatalogClientConverters; import io.airbyte.commons.converters.ProtocolConverters; +import io.airbyte.config.CatalogDiff; +import io.airbyte.config.FieldTransform; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.State; import io.airbyte.config.StateType; import io.airbyte.config.StateWrapper; +import io.airbyte.config.StreamDescriptor; import io.airbyte.config.StreamSyncStats; +import io.airbyte.config.StreamTransform; import io.airbyte.config.helpers.StateMessageHelper; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; @@ -78,7 +78,7 @@ public static State clearStateForStreamsToBackfill(final State inputState, final continue; } if (!streamsToBackfill.contains( - ProtocolConverters.streamDescriptorToClient(stateMessage.getStream().getStreamDescriptor()))) { + ProtocolConverters.streamDescriptorToDomain(stateMessage.getStream().getStreamDescriptor()))) { continue; } // It's listed in the streams to backfill, so we write the state to null. @@ -125,7 +125,7 @@ public static void markBackfilledStreams(final List streamsToB return; // No streams to backfill, no backfill. } for (final StreamSyncStats streamStat : syncOutput.getStandardSyncSummary().getStreamStats()) { - if (streamsToBackfill.contains(new StreamDescriptor(streamStat.getStreamName(), streamStat.getStreamNamespace()))) { + if (streamsToBackfill.contains(new StreamDescriptor().withName(streamStat.getStreamName()).withNamespace(streamStat.getStreamNamespace()))) { streamStat.setWasBackfilled(true); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java index 1ffbb2c78f9..b49fd95ffcf 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/helper/FailureHelper.java @@ -14,8 +14,11 @@ import io.airbyte.config.Metadata; import io.airbyte.config.StreamDescriptor; import io.airbyte.protocol.models.AirbyteTraceMessage; +import io.airbyte.workers.exception.WorkloadLauncherException; +import io.airbyte.workers.exception.WorkloadMonitorException; import java.util.Comparator; import java.util.List; +import java.util.Objects; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -63,8 +66,6 @@ public String toString() { private static final String ACTIVITY_TYPE_REPLICATE = "Replicate"; private static final String ACTIVITY_TYPE_REPLICATEV2 = "ReplicateV2"; private static final String ACTIVITY_TYPE_PERSIST = "Persist"; - private static final String ACTIVITY_TYPE_NORMALIZE = "Normalize"; - private static final String ACTIVITY_TYPE_DBT_RUN = "Run"; /** * Create generic failure. @@ -291,9 +292,29 @@ public static FailureReason checkFailure(final Throwable t, * @return failure reason */ public static FailureReason replicationFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.REPLICATION) - .withExternalMessage("Something went wrong during replication"); + final FailureReason failure = genericFailure(t, jobId, attemptNumber) + .withFailureOrigin(FailureOrigin.REPLICATION); + if (isInstanceOf(t, WorkloadLauncherException.class)) { + return failure.withFailureType(FailureType.TRANSIENT_ERROR) + .withExternalMessage("Airbyte could not start the sync process."); + } else if (isInstanceOf(t, WorkloadMonitorException.class)) { + return failure.withFailureType(FailureType.TRANSIENT_ERROR) + .withExternalMessage("Airbyte could not start the sync process or track the progress of the sync."); + } else { + return failure.withExternalMessage("Something went wrong during replication"); + } + } + + private static boolean isInstanceOf(final Throwable exception, final Class exceptionType) { + Throwable current = exception; + while (current != null) { + if (exceptionType.isInstance(exception)) { + return true; + } + current = current.getCause(); + } + + return Objects.nonNull(exception) && Objects.nonNull(exception.getMessage()) && exception.getMessage().contains(exceptionType.getName()); } /** @@ -310,47 +331,6 @@ public static FailureReason persistenceFailure(final Throwable t, final Long job .withExternalMessage("Something went wrong during state persistence"); } - /** - * Create normalization failure. - * - * @param t throwable that caused the failure - * @param jobId job id - * @param attemptNumber attempt number - * @return failure reason - */ - public static FailureReason normalizationFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withExternalMessage("Something went wrong during normalization"); - } - - /** - * Create normalization failure. - * - * @param jobId job id - * @param attemptNumber attempt number - * @return failure reason - */ - public static FailureReason normalizationFailure(final AirbyteTraceMessage m, final Long jobId, final Integer attemptNumber) { - return genericFailure(m, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withExternalMessage(m.getError().getMessage()); - } - - /** - * Create dbt failure. - * - * @param t throwable that caused the failure - * @param jobId job id - * @param attemptNumber attempt number - * @return failure reason - */ - public static FailureReason dbtFailure(final Throwable t, final Long jobId, final Integer attemptNumber) { - return genericFailure(t, jobId, attemptNumber) - .withFailureOrigin(FailureOrigin.DBT) - .withExternalMessage("Something went wrong during dbt"); - } - /** * Create unknown origin failure. * @@ -420,10 +400,6 @@ public static FailureReason failureReasonFromWorkflowAndActivity(final String wo return replicationFailure(t, jobId, attemptNumber); } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_PERSIST.equals(activityType)) { return persistenceFailure(t, jobId, attemptNumber); - } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_NORMALIZE.equals(activityType)) { - return normalizationFailure(t, jobId, attemptNumber); - } else if (WORKFLOW_TYPE_SYNC.equals(workflowType) && ACTIVITY_TYPE_DBT_RUN.equals(activityType)) { - return dbtFailure(t, jobId, attemptNumber); } else { return unknownOriginFailure(t, jobId, attemptNumber); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java index 5b843c834f5..ab7201cb2f6 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java @@ -12,6 +12,7 @@ import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.WorkerMetricReporter; +import io.micronaut.core.util.StringUtils; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -102,7 +103,7 @@ public void filterSelectedFields(final AirbyteMessage airbyteMessage) { if (data.isObject()) { ((ObjectNode) data).retain(selectedFields); } else { - throw new RuntimeException(String.format("Unexpected data in record: %s", data.toString())); + throw new RuntimeException(String.format("Unexpected data in record: %s", data)); } } @@ -145,7 +146,7 @@ private void populatedStreamToSelectedFields(final ConfiguredAirbyteCatalog cata final List selectedFields = new ArrayList<>(); final JsonNode propertiesNode = s.getStream().getJsonSchema().findPath("properties"); if (propertiesNode.isObject()) { - propertiesNode.fieldNames().forEachRemaining((fieldName) -> selectedFields.add(fieldName)); + propertiesNode.fieldNames().forEachRemaining((fieldName) -> selectedFields.add(replaceEscapeCharacter(fieldName))); } else { throw new RuntimeException("No properties node in stream schema"); } @@ -164,7 +165,7 @@ private void populateStreamToAllFields(final ConfiguredAirbyteCatalog catalog) { final Set fields = new HashSet<>(); final JsonNode propertiesNode = s.getStream().getJsonSchema().findPath("properties"); if (propertiesNode.isObject()) { - propertiesNode.fieldNames().forEachRemaining((fieldName) -> fields.add(fieldName)); + propertiesNode.fieldNames().forEachRemaining((fieldName) -> fields.add(replaceEscapeCharacter(fieldName))); } else { throw new RuntimeException("No properties node in stream schema"); } @@ -223,4 +224,15 @@ private static Set getUnexpectedFieldNames(final AirbyteRecordMessage re return unexpectedFieldNames; } + /** + * Removes JSON Schema escape character ($) from field names in order to ensure that + * the field name will map the property name in a record. + * + * @param fieldName A field name in the JSON schema in a catalog. + * @return The unescaped field name. + */ + private String replaceEscapeCharacter(final String fieldName) { + return StringUtils.isNotEmpty(fieldName) ? fieldName.replaceAll("\\$", "") : fieldName; + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt new file mode 100644 index 00000000000..5dd45bc070d --- /dev/null +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt @@ -0,0 +1,5 @@ +package io.airbyte.workers.models + +import java.util.UUID + +data class PostprocessCatalogInput(val catalogId: UUID?, val connectionId: UUID?, val workspaceId: UUID?) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt new file mode 100644 index 00000000000..27c4568e73d --- /dev/null +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt @@ -0,0 +1,18 @@ +package io.airbyte.workers.models + +import io.airbyte.config.CatalogDiff + +/** + * A very basic discriminated union of a successful catalog postprocess and an error. Allows bypassing + * extraneous exception wrapping / propagation. Written naively to allow interop with Java. + */ +data class PostprocessCatalogOutput private constructor(val diff: CatalogDiff?, val error: Throwable?) { + val isSuccess = error == null + val isFailure = error != null + + companion object { + fun success(diff: CatalogDiff?): PostprocessCatalogOutput = PostprocessCatalogOutput(diff, null) + + fun failure(t: Throwable): PostprocessCatalogOutput = PostprocessCatalogOutput(null, t) + } +} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java deleted file mode 100644 index 85e93b3f205..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/DefaultNormalizationRunner.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.NORMALIZE_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.constants.WorkerConstants; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.io.LineGobbler; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.commons.logging.MdcScope.Builder; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper; -import io.airbyte.persistence.job.errorreporter.SentryExceptionHelper.ErrorMapKeys; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage; -import io.airbyte.protocol.models.AirbyteErrorTraceMessage.FailureType; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.WorkerUtils; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.ProcessFactory; -import java.io.InputStream; -import java.nio.file.Path; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Default Normalization Runner. Executes normalization. - */ -public class DefaultNormalizationRunner implements NormalizationRunner { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationRunner.class); - private static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() - .setLogPrefix(LoggingHelper.NORMALIZATION_LOGGER_PREFIX) - .setPrefixColor(Color.GREEN_BACKGROUND); - - private final String normalizationIntegrationType; - private final ProcessFactory processFactory; - private final String normalizationImageName; - private final NormalizationAirbyteStreamFactory streamFactory = new NormalizationAirbyteStreamFactory(CONTAINER_LOG_MDC_BUILDER); - private Map> airbyteMessagesByType; - private String dbtErrorStack; - - private Process process = null; - - public DefaultNormalizationRunner(final ProcessFactory processFactory, - final String normalizationImage, - final String normalizationIntegrationType) { - this.processFactory = processFactory; - this.normalizationImageName = normalizationImage; - this.normalizationIntegrationType = normalizationIntegrationType; - } - - @Override - public boolean normalize(final String jobId, - final int attempt, - final UUID connectionId, - final UUID workspaceId, - final Path jobRoot, - final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final ResourceRequirements resourceRequirements) - throws Exception { - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, Jsons.serialize(catalog)); - - return runProcess(jobId, attempt, connectionId, workspaceId, jobRoot, files, resourceRequirements, "run", - "--integration-type", normalizationIntegrationType.toLowerCase(), - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--catalog", WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME); - } - - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - private boolean runProcess(final String jobId, - final int attempt, - final UUID connectionId, - final UUID workspaceId, - final Path jobRoot, - final Map files, - final ResourceRequirements resourceRequirements, - final String... args) - throws Exception { - try { - LOGGER.info("Running with normalization version: {}", normalizationImageName); - process = processFactory.create( - ResourceType.NORMALIZATION, - NORMALIZE_STEP, - jobId, - attempt, - connectionId, - workspaceId, - jobRoot, - normalizationImageName, - // custom connector does not use normalization - false, - false, files, - null, - // We should use the AirbyteIntegrationLauncher, but normalization is going away so wontfix. - AirbyteIntegrationLauncher.buildGenericConnectorResourceRequirements(resourceRequirements), - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, NORMALIZE_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), args); - - try (final InputStream stdout = process.getInputStream()) { - // finds and collects any AirbyteMessages from stdout - // also builds a list of raw dbt errors and stores in streamFactory - airbyteMessagesByType = streamFactory.create(IOs.newBufferedReader(stdout)) - .collect(Collectors.groupingBy(AirbyteMessage::getType)); - - // picks up error logs from dbt - dbtErrorStack = String.join("\n", streamFactory.getDbtErrors()); - - if (!"".equals(dbtErrorStack)) { - final AirbyteMessage dbtTraceMessage = new AirbyteMessage() - .withType(Type.TRACE) - .withTrace(new AirbyteTraceMessage() - .withType(AirbyteTraceMessage.Type.ERROR) - .withEmittedAt((double) System.currentTimeMillis()) - .withError(new AirbyteErrorTraceMessage() - .withFailureType(FailureType.SYSTEM_ERROR) // TODO: decide on best FailureType for this - .withMessage("Normalization failed during the dbt run. This may indicate a problem with the data itself.") - .withInternalMessage(buildInternalErrorMessageFromDbtStackTrace()) - // due to the lack of consistent defining features in dbt errors we're injecting a breadcrumb to the - // stacktrace so we can confidently identify all dbt errors when parsing and sending to Sentry - // see dbt error examples: https://docs.getdbt.com/guides/legacy/debugging-errors for more context - .withStackTrace("AirbyteDbtError: \n".concat(dbtErrorStack)))); - - airbyteMessagesByType.putIfAbsent(Type.TRACE, List.of(dbtTraceMessage)); - } - } - LineGobbler.gobble(process.getErrorStream(), LOGGER::error, CONTAINER_LOG_MDC_BUILDER); - - WorkerUtils.wait(process); - - return process.exitValue() == 0; - } catch (final Exception e) { - // make sure we kill the process on failure to avoid zombies. - if (process != null) { - WorkerUtils.cancelProcess(process); - } - throw e; - } - } - - @Override - public void close() throws Exception { - if (process == null) { - return; - } - - LOGGER.info("Terminating normalization process..."); - WorkerUtils.gentleClose(process, 1, TimeUnit.MINUTES); - - /* - * After attempting to close the process check the following: - * - * Did the process actually terminate? If "yes", did it do so nominally? - */ - if (process.isAlive()) { - throw new WorkerException("Normalization process did not terminate after 1 minute."); - } else if (process.exitValue() != 0) { - throw new WorkerException("Normalization process did not terminate normally (exit code: " + process.exitValue() + ")"); - } else { - LOGGER.info("Normalization process successfully terminated."); - } - } - - @Override - public Stream getTraceMessages() { - if (airbyteMessagesByType != null && airbyteMessagesByType.get(Type.TRACE) != null) { - return airbyteMessagesByType.get(Type.TRACE).stream().map(AirbyteMessage::getTrace); - } - return Stream.empty(); - } - - private String buildInternalErrorMessageFromDbtStackTrace() { - final Map errorMap = SentryExceptionHelper.getUsefulErrorMessageAndTypeFromDbtError(dbtErrorStack); - return errorMap.get(ErrorMapKeys.ERROR_MAP_MESSAGE_KEY); - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java deleted file mode 100644 index 966de3376e3..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationAirbyteStreamFactory.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.JsonNodeType; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.MdcScope; -import io.airbyte.protocol.models.AirbyteLogMessage; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.internal.AirbyteStreamFactory; -import java.io.BufferedReader; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Creates a stream from an input stream. The produced stream attempts to parse each line of the - * InputStream into a AirbyteMessage. If the line cannot be parsed into a AirbyteMessage it is - * assumed to be from dbt. dbt [error] messages are also parsed - * - *

- * If a line starts with a AirbyteMessage and then has other characters after it, that - * AirbyteMessage will still be parsed. If there are multiple AirbyteMessage records on the same - * line, only the first will be parsed. - */ -@SuppressWarnings("PMD.MoreThanOneLogger") -public class NormalizationAirbyteStreamFactory implements AirbyteStreamFactory { - - private static final Logger LOGGER = LoggerFactory.getLogger(NormalizationAirbyteStreamFactory.class); - - private final MdcScope.Builder containerLogMdcBuilder; - private final Logger logger; - private final List dbtErrors = new ArrayList<>(); - - public NormalizationAirbyteStreamFactory(final MdcScope.Builder containerLogMdcBuilder) { - this(LOGGER, containerLogMdcBuilder); - } - - NormalizationAirbyteStreamFactory(final Logger logger, final MdcScope.Builder containerLogMdcBuilder) { - this.logger = logger; - this.containerLogMdcBuilder = containerLogMdcBuilder; - } - - @Override - public Stream create(final BufferedReader bufferedReader) { - return bufferedReader - .lines() - .flatMap(this::filterOutAndHandleNonJsonLines) - .flatMap(this::filterOutAndHandleNonAirbyteMessageLines) - // so now we are just left with AirbyteMessages - .filter(airbyteMessage -> { - final boolean isLog = airbyteMessage.getType() == AirbyteMessage.Type.LOG; - if (isLog) { - try (final var mdcScope = containerLogMdcBuilder.build()) { - internalLog(airbyteMessage.getLog()); - } - } - return !isLog; - }); - } - - private Stream filterOutAndHandleNonJsonLines(final String line) { - final Optional jsonLine = Jsons.tryDeserialize(line); - if (jsonLine.isEmpty()) { - // we log as info all the lines that are not valid json. - try (final var mdcScope = containerLogMdcBuilder.build()) { - logger.info(line); - // this is really hacky and vulnerable to picking up lines we don't want, - // however it is only for destinations that are using dbt version < 1.0. - // For v1 + we switch on JSON logging and parse those in the next block. - if (line.contains("[error]")) { - dbtErrors.add(line); - } - } - } - return jsonLine.stream(); - } - - private Stream filterOutAndHandleNonAirbyteMessageLines(final JsonNode jsonLine) { - final Optional m = Jsons.tryObject(jsonLine, AirbyteMessage.class); - if (m.isEmpty()) { - // valid JSON but not an AirbyteMessage, so we assume this is a dbt json log - try { - final String logLevel = (jsonLine.getNodeType() == JsonNodeType.NULL || jsonLine.get("level").isNull()) - ? "" - : jsonLine.get("level").asText(); - final String logMsg = jsonLine.get("msg").isNull() ? "" : jsonLine.get("msg").asText(); - try (final var mdcScope = containerLogMdcBuilder.build()) { - switch (logLevel) { - case "debug" -> logger.debug(logMsg); - case "info" -> logger.info(logMsg); - case "warn" -> logger.warn(logMsg); - case "error" -> logAndCollectErrorMessage(logMsg); - default -> logger.info(jsonLine.toPrettyString()); // this shouldn't happen but logging it to avoid hiding unexpected lines. - } - } - } catch (final Exception e) { - logger.info(jsonLine.toPrettyString()); - } - } - return m.stream(); - } - - private void logAndCollectErrorMessage(final String logMsg) { - logger.error(logMsg); - dbtErrors.add(logMsg); - } - - public List getDbtErrors() { - return dbtErrors; - } - - private void internalLog(final AirbyteLogMessage logMessage) { - switch (logMessage.getLevel()) { - case FATAL, ERROR -> logger.error(logMessage.getMessage()); - case WARN -> logger.warn(logMessage.getMessage()); - case DEBUG -> logger.debug(logMessage.getMessage()); - case TRACE -> logger.trace(logMessage.getMessage()); - default -> logger.info(logMessage.getMessage()); - } - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java deleted file mode 100644 index f7ecd0fddc7..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationRunner.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import java.nio.file.Path; -import java.util.UUID; -import java.util.stream.Stream; - -/** - * Normalization Runner. Executes normalization. - */ -public interface NormalizationRunner extends AutoCloseable { - - /** - * After this method is called, the caller must call close. Previous to this method being called a - * NormalizationRunner can be instantiated and not worry about close being called. - * - * @throws Exception - any exception thrown from normalization will be handled gracefully by the - * caller. - */ - default void start() throws Exception { - // no-op. - } - - /** - * Executes normalization of the data in the destination. - * - * @param jobId - id of the job that launched normalization - * @param attempt - current attempt - * @param jobRoot - root dir available for the runner to use. - * @param config - configuration for connecting to the destination - * @param catalog - the schema of the json blob in the destination. it is used normalize the blob - * into typed columns. - * @param resourceRequirements - resource requirements - * @return true of normalization succeeded. otherwise false. - * @throws Exception - any exception thrown from normalization will be handled gracefully by the - * caller. - */ - boolean normalize(String jobId, - int attempt, - final UUID connectionId, - final UUID workspaceId, - Path jobRoot, - JsonNode config, - ConfiguredAirbyteCatalog catalog, - ResourceRequirements resourceRequirements) - throws Exception; - - Stream getTraceMessages(); - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java deleted file mode 100644 index 623db7a5bff..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/normalization/NormalizationWorker.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.workers.Worker; - -/** - * Worker that runs normalization. - */ -public interface NormalizationWorker extends Worker {} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java index bc962b74240..6e7e212d36f 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AirbyteIntegrationLauncher.java @@ -9,8 +9,6 @@ import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; import static io.airbyte.workers.process.Metadata.CHECK_JOB; -import static io.airbyte.workers.process.Metadata.CHECK_STEP_KEY; -import static io.airbyte.workers.process.Metadata.CONNECTOR_STEP; import static io.airbyte.workers.process.Metadata.DISCOVER_JOB; import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; import static io.airbyte.workers.process.Metadata.READ_STEP; @@ -161,7 +159,7 @@ public Process check(final Path jobRoot, final String configFilename, final Stri null, buildGenericConnectorResourceRequirements(resourceRequirement), allowedHosts, - getLabels(Map.of(JOB_TYPE_KEY, CHECK_JOB, CHECK_STEP_KEY, CONNECTOR_STEP)), + getLabels(Map.of(JOB_TYPE_KEY, CHECK_JOB)), getWorkerMetadata(), Collections.emptyMap(), additionalEnvironmentVariables, diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java index 1cbe6d1ebb6..d024f08f80e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubeProcessFactory.java @@ -8,7 +8,6 @@ import static io.airbyte.workers.process.Metadata.AWS_ASSUME_ROLE_EXTERNAL_ID; import static io.airbyte.workers.process.Metadata.AWS_SECRET_ACCESS_KEY; -import autovalue.shaded.org.jetbrains.annotations.NotNull; import com.google.common.annotations.VisibleForTesting; import io.airbyte.commons.envvar.EnvVar; import io.airbyte.commons.helper.DockerImageNameHelper; @@ -32,6 +31,7 @@ import io.airbyte.workers.helper.ConnectorApmSupportHelper; import io.airbyte.workers.models.SecretMetadata; import io.fabric8.kubernetes.client.KubernetesClient; +import jakarta.validation.constraints.NotNull; import java.net.InetAddress; import java.nio.file.Path; import java.util.ArrayList; diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java index 9a58da52567..6aabe2e7231 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/Metadata.java @@ -49,7 +49,4 @@ public final class Metadata { public static final String AWS_ASSUME_ROLE_SECRET_NAME = "AWS_ASSUME_ROLE_SECRET_NAME"; public static final String AWS_ASSUME_ROLE_EXTERNAL_ID = "AWS_ASSUME_ROLE_EXTERNAL_ID"; - public static final String CHECK_STEP_KEY = "check_step"; - public static final String CONNECTOR_STEP = "connector"; - } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java deleted file mode 100644 index 1992538b40f..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import static io.airbyte.workers.process.Metadata.ORCHESTRATOR_DBT_NORMALIZATION_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import java.util.Map; -import java.util.UUID; - -/** - * Dbt Launcher Worker. - */ -public class DbtLauncherWorker extends LauncherWorker { - - public static final String DBT = "dbt-orchestrator"; - private static final String POD_NAME_PREFIX = "orchestrator-dbt"; - public static final String INIT_FILE_DESTINATION_LAUNCHER_CONFIG = "destinationLauncherConfig.json"; - - public DbtLauncherWorker(final UUID connectionId, - final UUID workspaceId, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final WorkerConfigs workerConfigs, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final Integer serverPort, - final FeatureFlagClient featureFlagClient, - final MetricClient metricClient, - final WorkloadIdGenerator workloadIdGenerator) { - super( - connectionId, - workspaceId, - DBT, - POD_NAME_PREFIX, - jobRunConfig, - Map.of( - INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), - containerOrchestratorConfig, - workerConfigs.getResourceRequirements(), - Void.class, - serverPort, - workerConfigs, - featureFlagClient, - // Custom connector does not use Dbt at this moment, thus this flag for runnning job under - // isolated pool can be set to false. - false, - metricClient, - workloadIdGenerator); - } - - @Override - protected Map generateCustomMetadataLabels() { - return Map.of(SYNC_STEP_KEY, ORCHESTRATOR_DBT_NORMALIZATION_STEP); - } - - @Override - protected String getLauncherType() { - return "DBT"; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java deleted file mode 100644 index af201c1571a..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.sync; - -import static io.airbyte.workers.process.Metadata.ORCHESTRATOR_NORMALIZATION_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; - -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import java.util.Map; -import java.util.UUID; - -/** - * Normalization Launcher Worker. - */ -public class NormalizationLauncherWorker extends LauncherWorker { - - public static final String NORMALIZATION = "normalization-orchestrator"; - private static final String POD_NAME_PREFIX = "orchestrator-norm"; - public static final String INIT_FILE_DESTINATION_LAUNCHER_CONFIG = "destinationLauncherConfig.json"; - - public NormalizationLauncherWorker(final UUID connectionId, - final UUID workspaceId, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final WorkerConfigs workerConfigs, - final ContainerOrchestratorConfig containerOrchestratorConfig, - final Integer serverPort, - final FeatureFlagClient featureFlagClient, - final MetricClient metricClient, - final WorkloadIdGenerator workloadIdGenerator) { - super( - connectionId, - workspaceId, - NORMALIZATION, - POD_NAME_PREFIX, - jobRunConfig, - Map.of( - INIT_FILE_DESTINATION_LAUNCHER_CONFIG, Jsons.serialize(destinationLauncherConfig)), - containerOrchestratorConfig, - workerConfigs.getResourceRequirements(), - NormalizationSummary.class, - serverPort, - workerConfigs, - featureFlagClient, - // Normalization process will happen only on a fixed set of connectors, - // thus they are not going to be run under custom connectors. Setting this to false. - false, - metricClient, - workloadIdGenerator); - - } - - @Override - protected Map generateCustomMetadataLabels() { - return Map.of(SYNC_STEP_KEY, ORCHESTRATOR_NORMALIZATION_STEP); - } - - @Override - protected String getLauncherType() { - return "Normalization"; - } - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java index 2be31154113..f4ee93b3d8f 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java @@ -5,6 +5,7 @@ package io.airbyte.workers.sync; import static io.airbyte.config.helpers.LogClientSingleton.fullLogPath; +import static io.airbyte.metrics.lib.MetricEmittingApps.WORKLOAD_LAUNCHER; import dev.failsafe.Failsafe; import dev.failsafe.RetryPolicy; @@ -27,6 +28,8 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.workers.Worker; import io.airbyte.workers.exception.WorkerException; +import io.airbyte.workers.exception.WorkloadLauncherException; +import io.airbyte.workers.exception.WorkloadMonitorException; import io.airbyte.workers.internal.exception.DestinationException; import io.airbyte.workers.internal.exception.SourceException; import io.airbyte.workers.models.ReplicationActivityInput; @@ -64,6 +67,8 @@ public class WorkloadApiWorker implements Worker WORKLOAD_MONITOR = Set.of("workload-monitor-start", "workload-monitor-claim", "workload-monitor-heartbeat"); + private static final Logger log = LoggerFactory.getLogger(WorkloadApiWorker.class); private static final Set TERMINAL_STATUSES = Set.of(WorkloadStatus.CANCELLED, WorkloadStatus.FAILURE, WorkloadStatus.SUCCESS); private final JobOutputDocStore jobOutputDocStore; @@ -188,6 +193,10 @@ private void throwFallbackError(final Workload workload, final Exception e) thro throw new SourceException(workload.getTerminationReason(), e); } else if (DESTINATION.equals(workload.getTerminationSource())) { throw new DestinationException(workload.getTerminationReason(), e); + } else if (WORKLOAD_LAUNCHER.getApplicationName().equals(workload.getTerminationSource())) { + throw new WorkloadLauncherException(workload.getTerminationReason()); + } else if (workload.getTerminationSource() != null && WORKLOAD_MONITOR.contains(workload.getTerminationSource())) { + throw new WorkloadMonitorException(workload.getTerminationReason()); } else { throw new WorkerException(workload.getTerminationReason(), e); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java index 9f1d8ccde55..05c7f37118a 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java @@ -9,15 +9,11 @@ import io.airbyte.config.ConnectionContext; import io.airbyte.config.DestinationConnection; import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.State; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; import io.airbyte.persistence.job.models.ReplicationInput; @@ -49,7 +45,8 @@ public class TestConfigHelpers { * * @return sync config and sync input. */ - public static ImmutablePair createSyncConfig(final UUID organizationId) { + public static ImmutablePair createSyncConfig(final UUID organizationId, + final UUID sourceDefinitionId) { final ImmutablePair replicationInputPair = createReplicationConfig(); final var replicationInput = replicationInputPair.getRight(); // For now, these are identical, so we delegate to createReplicationConfig and copy it over for @@ -65,7 +62,9 @@ public static ImmutablePair createSyncConfig(fi .withSourceConfiguration(replicationInput.getSourceConfiguration()) .withOperationSequence(replicationInput.getOperationSequence()) .withWorkspaceId(replicationInput.getWorkspaceId()) - .withConnectionContext(new ConnectionContext().withOrganizationId(organizationId))); + .withConnectionContext(new ConnectionContext() + .withOrganizationId(organizationId) + .withSourceDefinitionId(sourceDefinitionId))); } public static ImmutablePair createReplicationConfig() { @@ -117,19 +116,11 @@ public static ImmutablePair createReplicationCon final StandardSyncOperation normalizationOperation = new StandardSyncOperation() .withOperationId(normalizationOperationId) .withName("Normalization") - .withOperatorType(OperatorType.NORMALIZATION) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) .withTombstone(false); final StandardSyncOperation customDbtOperation = new StandardSyncOperation() .withOperationId(dbtOperationId) .withName("Custom Transformation") - .withOperatorType(OperatorType.DBT) - .withOperatorDbt(new OperatorDbt() - .withDockerImage("docker") - .withDbtArguments("--help") - .withGitRepoUrl("git url") - .withGitRepoBranch("git url")) .withTombstone(false); final ConfiguredAirbyteCatalog catalog = new ConfiguredAirbyteCatalog(); diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadLauncherException.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadLauncherException.kt new file mode 100644 index 00000000000..0788f8c8c0d --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadLauncherException.kt @@ -0,0 +1,7 @@ +package io.airbyte.workers.exception + +class WorkloadLauncherException : RuntimeException { + constructor(message: String?) : super(message) + + constructor(message: String?, cause: Throwable?) : super(message, cause) +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadMonitorException.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadMonitorException.kt new file mode 100644 index 00000000000..ccb175ae48a --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/exception/WorkloadMonitorException.kt @@ -0,0 +1,7 @@ +package io.airbyte.workers.exception + +class WorkloadMonitorException : RuntimeException { + constructor(message: String?) : super(message) + + constructor(message: String?, cause: Throwable?) : super(message, cause) +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt index 0b6cadc3cfb..d611b563680 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/ReplicationWorkerHelper.kt @@ -22,6 +22,7 @@ import io.airbyte.config.PerformanceMetrics import io.airbyte.config.ReplicationAttemptSummary import io.airbyte.config.ReplicationOutput import io.airbyte.config.StandardSyncSummary.ReplicationStatus +import io.airbyte.config.State import io.airbyte.config.SyncStats import io.airbyte.config.WorkerDestinationConfig import io.airbyte.metrics.lib.ApmTraceUtils @@ -42,6 +43,7 @@ import io.airbyte.workers.context.ReplicationContext import io.airbyte.workers.context.ReplicationFeatureFlags import io.airbyte.workers.exception.WorkloadHeartbeatException import io.airbyte.workers.helper.FailureHelper +import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper import io.airbyte.workers.helper.StreamStatusCompletionTracker import io.airbyte.workers.internal.AirbyteDestination import io.airbyte.workers.internal.AirbyteMapper @@ -180,6 +182,7 @@ class ReplicationWorkerHelper( replicationFeatureFlags: ReplicationFeatureFlags, jobRoot: Path, configuredAirbyteCatalog: ConfiguredAirbyteCatalog, + state: State?, ) { timeTracker.trackReplicationStartTime() @@ -204,6 +207,17 @@ class ReplicationWorkerHelper( dockerImageTag = DockerImageName.extractTag(ctx.destinationImage), ), ).supportRefreshes + + if (supportRefreshes) { + // if configured airbyte catalog has full refresh with state + val resumedFRStreams = ResumableFullRefreshStatsHelper().getResumedFullRefreshStreams(configuredAirbyteCatalog, state) + logger.info { "Number of Resumed Full Refresh Streams: {${resumedFRStreams.size}}" } + if (resumedFRStreams.isNotEmpty()) { + resumedFRStreams.forEach { streamDescriptor -> + logger.info { " Resumed stream name: ${streamDescriptor.name} namespace: ${streamDescriptor.namespace}" } + } + } + } streamStatusCompletionTracker.startTracking(configuredAirbyteCatalog, supportRefreshes) } @@ -390,7 +404,7 @@ class ReplicationWorkerHelper( if (destinationRawMessage.type == Type.STATE) { val airbyteStateMessage = destinationRawMessage.state recordStateStatsMetrics(metricClient, airbyteStateMessage, AirbyteMessageOrigin.DESTINATION, ctx!!) - syncPersistence.persist(context.connectionId, destinationRawMessage.state) + syncPersistence.accept(context.connectionId, destinationRawMessage.state) metricClient.count(OssMetricsRegistry.STATE_PROCESSED_FROM_DESTINATION, 1, *metricAttrs.toTypedArray()) } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt index 5dcaa06389d..5467341dcab 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt @@ -12,6 +12,7 @@ import io.airbyte.commons.json.Jsons import io.airbyte.featureflag.Connection import io.airbyte.featureflag.EmitStateStatsToSegment import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.LogStreamNamesInSateMessage import io.airbyte.featureflag.Multi import io.airbyte.featureflag.Workspace import io.airbyte.protocol.models.AirbyteStateMessage @@ -58,6 +59,13 @@ class StateCheckSumCountEventHandler( val connectionContext = Multi(listOf(Connection(connectionId), Workspace(workspaceId))) featureFlagClient.boolVariation(EmitStateStatsToSegment, connectionContext) } + + // Temp piece of code for debug + val logIncomingStreamNames: Boolean by lazy { + val connectionContext = Multi(listOf(Connection(connectionId), Workspace(workspaceId))) + featureFlagClient.boolVariation(LogStreamNamesInSateMessage, connectionContext) + } + private val deployment: Deployment by lazy { retry { deploymentFetcher.get() } } private val trackingIdentity: TrackingIdentity by lazy { retry { trackingIdentityFetcher.apply(workspaceId) } } @@ -76,6 +84,9 @@ class StateCheckSumCountEventHandler( @Volatile private var sourceStateMessageSeen = false + @Volatile + private var isClosed = false + @Volatile private var destinationStateMessageSeen = false @@ -363,7 +374,7 @@ class StateCheckSumCountEventHandler( fun close(completedSuccessfully: Boolean) { logger.info { "Closing StateCheckSumCountEventHandler" } - if (completedSuccessfully && sourceStateMessageSeen && destinationStateMessageSeen && noCheckSumError) { + if (completedSuccessfully && !isClosed && sourceStateMessageSeen && destinationStateMessageSeen && noCheckSumError) { logger.info { "No checksum errors were reported in the entire sync." } val dummyState = DUMMY_STATE_MESSAGE trackStateCountMetrics( @@ -374,6 +385,7 @@ class StateCheckSumCountEventHandler( ), EventType.SUCCESS, ) + isClosed = true } pubSubWriter.ifPresent { it.close() } } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/CatalogDiffConverter.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/CatalogDiffConverter.kt new file mode 100644 index 00000000000..efa937f9fcd --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/CatalogDiffConverter.kt @@ -0,0 +1,92 @@ +package io.airbyte.workers.helper + +import io.airbyte.commons.enums.Enums +import io.airbyte.api.client.model.generated.CatalogDiff as ApiCatalogDiff +import io.airbyte.api.client.model.generated.FieldTransform as ApiFieldTransform +import io.airbyte.api.client.model.generated.StreamAttributeTransform as ApiStreamAttributeTransform +import io.airbyte.api.client.model.generated.StreamTransform as ApiStreamTransform +import io.airbyte.api.client.model.generated.StreamTransformUpdateStream as ApiStreamTransformUpdateStream +import io.airbyte.config.CatalogDiff as DomainCatalogDiff +import io.airbyte.config.FieldSchemaUpdate as DomainFieldSchemaUpdate +import io.airbyte.config.FieldTransform as DomainFieldTransform +import io.airbyte.config.StreamAttributePrimaryKeyUpdate as DomainStreamAttributePrimaryKeyUpdate +import io.airbyte.config.StreamAttributeTransform as DomainStreamAttributeTransform +import io.airbyte.config.StreamDescriptor as DomainStreamDescriptor +import io.airbyte.config.StreamTransform as DomainStreamTransform +import io.airbyte.config.UpdateStream as DomainUpdateStream + +object CatalogDiffConverter { + @JvmStatic + fun toDomain(domainCatalogDiff: ApiCatalogDiff): DomainCatalogDiff { + val streamTransforms = + domainCatalogDiff.transforms + .map { streamTransform -> toDomain(streamTransform) } + + return DomainCatalogDiff() + .withTransforms(streamTransforms) + } + + private fun toDomain(streamTransform: ApiStreamTransform): DomainStreamTransform { + return DomainStreamTransform() + .withTransformType(Enums.convertTo(streamTransform.transformType, DomainStreamTransform.TransformType::class.java)) + .withStreamDescriptor( + DomainStreamDescriptor() + .withName(streamTransform.streamDescriptor.name) + .withNamespace(streamTransform.streamDescriptor.namespace), + ) + .withUpdateStream( + toDomain(streamTransform.updateStream), + ) + } + + private fun toDomain(streamTransformUpdateStream: ApiStreamTransformUpdateStream?): DomainUpdateStream { + if (streamTransformUpdateStream == null) { + return DomainUpdateStream() + } + + return DomainUpdateStream() + .withFieldTransforms(streamTransformUpdateStream.fieldTransforms.map { fieldTransform -> toDomain(fieldTransform) }) + .withStreamAttributeTransforms( + streamTransformUpdateStream.streamAttributeTransforms.map { + streamAttributeTransform -> + toDomain(streamAttributeTransform) + }, + ) + } + + private fun toDomain(fieldTransform: ApiFieldTransform): DomainFieldTransform { + val result = + DomainFieldTransform() + .withTransformType(Enums.convertTo(fieldTransform.transformType, DomainFieldTransform.TransformType::class.java)) + .withFieldName(fieldTransform.fieldName) + .withBreaking(fieldTransform.breaking) + .withAddField(fieldTransform.addField?.schema) + .withRemoveField(fieldTransform.removeField?.schema) + .withUpdateFieldSchema( + DomainFieldSchemaUpdate() + .withOldSchema(fieldTransform.updateFieldSchema?.oldSchema) + .withNewSchema(fieldTransform.updateFieldSchema?.newSchema), + ) + + // if (fieldTransform.addField != null) { + // result.addField = fieldTransform.addField?.schema + // } + // + // if (fieldTransform.removeField != null) { + // + // } + + return result + } + + private fun toDomain(streamAttributeTransform: ApiStreamAttributeTransform): DomainStreamAttributeTransform { + return DomainStreamAttributeTransform() + .withTransformType(Enums.convertTo(streamAttributeTransform.transformType, DomainStreamAttributeTransform.TransformType::class.java)) + .withBreaking(streamAttributeTransform.breaking) + .withUpdatePrimaryKey( + DomainStreamAttributePrimaryKeyUpdate() + .withOldPrimaryKey(streamAttributeTransform.updatePrimaryKey?.oldPrimaryKey) + .withNewPrimaryKey(streamAttributeTransform.updatePrimaryKey?.newPrimaryKey), + ) + } +} diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt similarity index 66% rename from airbyte-workers/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt rename to airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt index 9b16ebaf828..fe1f68e39b2 100644 --- a/airbyte-workers/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelper.kt @@ -1,12 +1,15 @@ package io.airbyte.workers.helper import io.airbyte.config.StandardSyncOutput +import io.airbyte.config.State import io.airbyte.config.StateType import io.airbyte.config.StateWrapper import io.airbyte.config.StreamDescriptor import io.airbyte.config.StreamSyncStats import io.airbyte.config.helpers.StateMessageHelper import io.airbyte.persistence.job.models.ReplicationInput +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.SyncMode import io.github.oshai.kotlinlogging.KotlinLogging import jakarta.inject.Singleton @@ -21,11 +24,7 @@ class ResumableFullRefreshStatsHelper { hydratedInput: ReplicationInput, standardSyncOutput: StandardSyncOutput, ) { - val streamsWithStates: Set = - StateMessageHelper - .getTypedState(hydratedInput.state?.state) - .map(this::getStreams).orElse(listOf()) - .toSet() + val streamsWithStates: Set = getStreamsWithStates(hydratedInput.state) standardSyncOutput.standardSyncSummary?.streamStats?.let { it @@ -34,6 +33,27 @@ class ResumableFullRefreshStatsHelper { } } + fun getResumedFullRefreshStreams( + catalog: ConfiguredAirbyteCatalog, + state: State?, + ): Set { + val streamsWithStates: Set = getStreamsWithStates(state) + + val fullRefreshStreams = + catalog.streams + .filter { s -> s.syncMode == SyncMode.FULL_REFRESH } + .map { s -> StreamDescriptor().withNamespace(s.stream.namespace).withName(s.stream.name) } + .toSet() + + return streamsWithStates intersect fullRefreshStreams + } + + fun getStreamsWithStates(state: State?): Set = + StateMessageHelper + .getTypedState(state?.state) + .map(this::getStreams).orElse(listOf()) + .toSet() + private fun getStreams(stateWrapper: StateWrapper): List { return when (stateWrapper.stateType) { StateType.STREAM -> stateWrapper.stateMessages.map { it.stream.streamDescriptor } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt index e262c59279e..b7c92897a3b 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTracker.kt @@ -32,8 +32,11 @@ class StreamStatusCompletionTracker( open fun track(streamStatus: AirbyteStreamStatusTraceMessage) { if (shouldEmitStreamStatus && streamStatus.status == AirbyteStreamStatusTraceMessage.AirbyteStreamStatus.COMPLETE) { - hasCompletedStatus[streamStatus.streamDescriptor] ?: run { - throw WorkerException("A stream status has been detected for a stream not present in the catalog") + if (hasCompletedStatus[streamStatus.streamDescriptor] == null) { + throw WorkerException( + "A stream status (${streamStatus.streamDescriptor.namespace}.${streamStatus.streamDescriptor.name}) " + + "has been detected for a stream not present in the catalog", + ) } hasCompletedStatus[streamStatus.streamDescriptor] = true } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt index c378c3a9ed2..e14bf77adb5 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/bookkeeping/ParallelStreamStatsTracker.kt @@ -8,6 +8,7 @@ import io.airbyte.protocol.models.AirbyteEstimateTraceMessage.Type import io.airbyte.protocol.models.AirbyteRecordMessage import io.airbyte.protocol.models.AirbyteStateMessage import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair +import io.airbyte.protocol.models.AirbyteStreamState import io.airbyte.protocol.models.StreamDescriptor import io.airbyte.workers.context.ReplicationFeatureFlags import io.airbyte.workers.general.StateCheckSumCountEventHandler @@ -77,6 +78,7 @@ class ParallelStreamStatsTracker( when (stateMessage.type) { AirbyteStateMessage.AirbyteStateType.GLOBAL -> { stateMessage.global.streamStates.forEach { it -> + logStreamNameIfEnabled(it) val statsTracker = getOrCreateStreamStatsTracker(getNameNamespacePair(it.streamDescriptor)) statsTracker.trackStateFromSource(stateMessage) updateChecksumValidationStatus( @@ -107,6 +109,17 @@ class ParallelStreamStatsTracker( } } + private fun logStreamNameIfEnabled(it: AirbyteStreamState) { + try { + if (stateCheckSumEventHandler.logIncomingStreamNames) { + val nameNamespacePair = getNameNamespacePair(it.streamDescriptor) + logger.info { "Stream in state message ${nameNamespacePair.namespace}.${nameNamespacePair.name} " } + } + } catch (e: Exception) { + logger.error(e) { "Exception while logging stream name" } + } + } + override fun updateDestinationStateStats(stateMessage: AirbyteStateMessage) { val failOnInvalidChecksum = replicationFeatureFlags?.failOnInvalidChecksum ?: false diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt index 4b2c27df480..8fe57d3bd18 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/internal/syncpersistence/SyncPersistence.kt @@ -10,6 +10,7 @@ import io.airbyte.api.client.model.generated.SaveStatsRequestBody import io.airbyte.commons.converters.StateConverter import io.airbyte.config.SyncStats import io.airbyte.config.helpers.StateMessageHelper +import io.airbyte.featureflag.FeatureFlagClient import io.airbyte.metrics.lib.MetricAttribute import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricClientFactory @@ -38,12 +39,12 @@ import kotlin.jvm.optionals.getOrNull interface SyncPersistence : SyncStatsTracker, AutoCloseable { /** - * Persist a state for a given connectionId. + * Buffers a state for a given connectionId for eventual persistence. * * @param connectionId the connection * @param stateMessage stateMessage to persist */ - fun persist( + fun accept( connectionId: UUID, stateMessage: AirbyteStateMessage, ) @@ -65,6 +66,7 @@ class SyncPersistenceImpl @Named("syncPersistenceExecutorService") private val stateFlushExecutorService: ScheduledExecutorService, @Value("\${airbyte.worker.replication.persistence-flush-period-sec}") private val stateFlushPeriodInSeconds: Long, private val metricClient: MetricClient, + private val featureFlagClient: FeatureFlagClient, @param:Parameter private val syncStatsTracker: SyncStatsTracker, @param:Parameter private val connectionId: UUID, @param:Parameter private val workspaceId: UUID, @@ -76,6 +78,7 @@ class SyncPersistenceImpl private var stateFlushFuture: ScheduledFuture<*>? = null private var isReceivingStats = false private var stateToFlush: StateAggregator? = null + private var persistedStats: SaveStatsRequestBody? = null private var statsToPersist: SaveStatsRequestBody? = null private var retryWithJitterConfig: RetryWithJitterConfig? = null @@ -91,6 +94,7 @@ class SyncPersistenceImpl jobId: Long, attemptNumber: Int, catalog: ConfiguredAirbyteCatalog, + featureFlagClient: FeatureFlagClient, ) : this( airbyteApiClient = airbyteApiClient, stateAggregatorFactory = stateAggregatorFactory, @@ -103,12 +107,17 @@ class SyncPersistenceImpl jobId = jobId, attemptNumber = attemptNumber, catalog = catalog, + featureFlagClient = featureFlagClient, ) { this.retryWithJitterConfig = retryWithJitterConfig } + init { + startBackgroundFlushStateTask(connectionId) + } + @Trace - override fun persist( + override fun accept( connectionId: UUID, stateMessage: AirbyteStateMessage, ) { @@ -118,14 +127,9 @@ class SyncPersistenceImpl metricClient.count(OssMetricsRegistry.STATE_BUFFERING, 1) stateBuffer.ingest(stateMessage) - startBackgroundFlushStateTask(connectionId) } private fun startBackgroundFlushStateTask(connectionId: UUID) { - if (stateFlushFuture != null) { - return - } - // Making sure we only start one of background flush task synchronized(this) { if (stateFlushFuture == null) { @@ -256,16 +260,11 @@ class SyncPersistenceImpl stateToFlush?.ingest(stateBufferToFlush) } - // We prepare stats to commit. We generate the payload here to keep track as close as possible to - // the states that are going to be persisted. - // We also only want to generate the stats payload when roll-over state buffers. This is to avoid - // updating the committed data counters ahead of the states because this counter is currently - // decoupled from the state persistence. - // This design favoring accuracy of committed data counters over freshness of emitted data counters. - if (isReceivingStats && stateToFlush?.isEmpty() == false) { - // TODO figure out a way to remove the double-bangs - statsToPersist = buildSaveStatsRequest(syncStatsTracker, jobId, attemptNumber, connectionId) + if (!isReceivingStats) { + return } + + statsToPersist = buildSaveStatsRequest(syncStatsTracker, jobId, attemptNumber, connectionId) } private fun doFlushState() { @@ -306,13 +305,14 @@ class SyncPersistenceImpl throw e } + persistedStats = statsToPersist statsToPersist = null metricClient.count(OssMetricsRegistry.STATS_COMMIT_ATTEMPT_SUCCESSFUL, 1) } private fun hasStatesToFlush(): Boolean = !stateBuffer.isEmpty() || stateToFlush != null - private fun hasStatsToFlush(): Boolean = isReceivingStats && statsToPersist != null + private fun hasStatsToFlush(): Boolean = isReceivingStats && statsToPersist != null && statsToPersist != persistedStats override fun updateStats(recordMessage: AirbyteRecordMessage) { isReceivingStats = true @@ -333,6 +333,10 @@ class SyncPersistenceImpl isReceivingStats = true syncStatsTracker.updateDestinationStateStats(stateMessage) } + + override fun endOfReplication(completedSuccessfully: Boolean) { + syncStatsTracker.endOfReplication(completedSuccessfully) + } } private fun isStateEmpty(connectionState: ConnectionState?) = connectionState?.state?.isEmpty ?: false diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadIdGenerator.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadIdGenerator.kt index 8ce839c9100..df3ef927992 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadIdGenerator.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadIdGenerator.kt @@ -35,6 +35,23 @@ class WorkloadIdGenerator { return "${actorDefinitionId}_${jobId}_${attemptNumber}_discover" } + fun generateDiscoverWorkloadIdV2( + actorId: UUID, + timestampMs: Long, + ): String { + return "${actorId}_${timestampMs}_discover" + } + + fun generateDiscoverWorkloadIdV2WithSnap( + actorId: UUID, + timestampMs: Long, + windowWidthMs: Long, + ): String { + val snapped = timestampMs - (timestampMs % windowWidthMs) + + return generateDiscoverWorkloadIdV2(actorId, snapped) + } + fun generateSpecWorkloadId(differentiator: String): String { return "${differentiator}_spec" } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java index d723ee77e84..cd639c84abc 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java @@ -6,16 +6,20 @@ import static org.junit.Assert.assertEquals; -import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.resources.MoreResources; import io.airbyte.config.StandardSync; import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStream; import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.Jsons; import io.airbyte.workers.test_utils.AirbyteMessageUtils; import io.airbyte.workers.test_utils.TestConfigHelpers; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; @@ -33,7 +37,7 @@ class RecordSchemaValidatorTest { private static final String FIELD_NAME = "favorite_color"; private static final AirbyteMessage VALID_RECORD = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "blue"); private static final AirbyteMessage INVALID_RECORD_1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, 3); - private static final AirbyteMessage INVALID_RECORD_2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, ImmutableMap.of(FIELD_NAME, true)); + private static final AirbyteMessage INVALID_RECORD_2 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, Map.of(FIELD_NAME, true)); private ConcurrentHashMap, Integer>> validationErrors; private ConcurrentHashMap> uncountedValidationErrors; @@ -96,4 +100,22 @@ void testValidateInvalidSchemaWithoutCounting() throws InterruptedException { assertEquals(2, uncountedValidationErrors.get(AIRBYTE_STREAM_NAME_NAMESPACE_PAIR).size()); } + @Test + void testMigrationOfIdPropertyToEscapedVersion() throws InterruptedException, IOException { + final String jsonSchema = MoreResources.readResource("catalog-json-schema-with-id.json"); + final AirbyteStream airbyteStream = new AirbyteStream().withJsonSchema(Jsons.deserialize(jsonSchema)); + final var executorService = Executors.newFixedThreadPool(1); + final var recordSchemaValidator = + new RecordSchemaValidator(Map.of(AIRBYTE_STREAM_NAME_NAMESPACE_PAIR, airbyteStream.getJsonSchema()), executorService); + final List messagesToValidate = List.of(AirbyteMessageUtils.createRecordMessage(STREAM_NAME, "id", "5")); + + messagesToValidate.forEach(message -> recordSchemaValidator.validateSchemaWithoutCounting( + message.getRecord(), + AIRBYTE_STREAM_NAME_NAMESPACE_PAIR, + uncountedValidationErrors)); + + executorService.awaitTermination(3, TimeUnit.SECONDS); + assertEquals(0, uncountedValidationErrors.size()); + } + } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java index e10a8ab05ae..87d5350eb6c 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java @@ -7,12 +7,14 @@ import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.generated.ActorDefinitionVersionApi; +import io.airbyte.api.client.generated.AttemptApi; import io.airbyte.api.client.generated.ConnectionApi; import io.airbyte.api.client.generated.DestinationApi; import io.airbyte.api.client.generated.JobsApi; @@ -37,7 +39,9 @@ import io.airbyte.api.client.model.generated.JobStatus; import io.airbyte.api.client.model.generated.ResetConfig; import io.airbyte.api.client.model.generated.ResolveActorDefinitionVersionResponse; +import io.airbyte.api.client.model.generated.SaveStreamAttemptMetadataRequestBody; import io.airbyte.api.client.model.generated.SchemaChangeBackfillPreference; +import io.airbyte.api.client.model.generated.StreamAttemptMetadata; import io.airbyte.api.client.model.generated.StreamDescriptor; import io.airbyte.api.client.model.generated.StreamTransform; import io.airbyte.api.client.model.generated.StreamTransformUpdateStream; @@ -56,14 +60,19 @@ import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; import io.airbyte.persistence.job.models.JobRunConfig; +import io.airbyte.workers.helper.CatalogDiffConverter; +import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.models.ReplicationActivityInput; import java.io.IOException; import java.util.List; import java.util.UUID; +import org.assertj.core.api.CollectionAssert; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.ArgumentCaptor; /** * Tests for the replication activity specifically. @@ -134,7 +143,8 @@ class ReplicationInputHydratorTest { }] """)); private static final Long JOB_ID = 123L; - private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig().withJobId(JOB_ID.toString()); + private static final Long ATTEMPT_NUMBER = 2L; + private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig().withJobId(JOB_ID.toString()).withAttemptId(ATTEMPT_NUMBER); private static final IntegrationLauncherConfig DESTINATION_LAUNCHER_CONFIG = new IntegrationLauncherConfig().withDockerImage("dockerimage:dockertag"); private static final IntegrationLauncherConfig SOURCE_LAUNCHER_CONFIG = new IntegrationLauncherConfig(); @@ -175,12 +185,15 @@ class ReplicationInputHydratorTest { private static FeatureFlagClient featureFlagClient; private SecretsPersistenceConfigApi secretsPersistenceConfigApi; private ActorDefinitionVersionApi actorDefinitionVersionApi; + private AttemptApi attemptApi; private DestinationApi destinationApi; + private ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper; @BeforeEach void setup() throws IOException { secretsRepositoryReader = mock(SecretsRepositoryReader.class); airbyteApiClient = mock(AirbyteApiClient.class); + attemptApi = mock(AttemptApi.class); connectionApi = mock(ConnectionApi.class); stateApi = mock(StateApi.class); jobsApi = mock(JobsApi.class); @@ -188,8 +201,12 @@ void setup() throws IOException { secretsPersistenceConfigApi = mock(SecretsPersistenceConfigApi.class); actorDefinitionVersionApi = mock(ActorDefinitionVersionApi.class); destinationApi = mock(DestinationApi.class); + resumableFullRefreshStatsHelper = mock(ResumableFullRefreshStatsHelper.class); + when(destinationApi.getBaseUrl()).thenReturn("http://localhost:8001/api"); when(destinationApi.getDestination(any())).thenReturn(DESTINATION_READ); + when(airbyteApiClient.getAttemptApi()).thenReturn(attemptApi); when(airbyteApiClient.getConnectionApi()).thenReturn(connectionApi); + when(airbyteApiClient.getDestinationApi()).thenReturn(destinationApi); when(airbyteApiClient.getStateApi()).thenReturn(stateApi); when(airbyteApiClient.getJobsApi()).thenReturn(jobsApi); when(airbyteApiClient.getSecretPersistenceConfigApi()).thenReturn(secretsPersistenceConfigApi); @@ -199,7 +216,7 @@ void setup() throws IOException { } private ReplicationInputHydrator getReplicationInputHydrator() { - return new ReplicationInputHydrator(airbyteApiClient, secretsRepositoryReader, featureFlagClient); + return new ReplicationInputHydrator(airbyteApiClient, resumableFullRefreshStatsHelper, secretsRepositoryReader, featureFlagClient); } private ReplicationActivityInput getDefaultReplicationActivityInputForTest() { @@ -214,7 +231,6 @@ private ReplicationActivityInput getDefaultReplicationActivityInputForTest() { SYNC_RESOURCE_REQUIREMENTS, WORKSPACE_ID, CONNECTION_ID, - false, "unused", false, JobSyncConfig.NamespaceDefinitionType.CUSTOMFORMAT, @@ -289,12 +305,99 @@ void testGenerateReplicationInputHandlesBackfills(final boolean withRefresh) thr mockEnableBackfillForConnection(withRefresh); final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); final ReplicationActivityInput input = getDefaultReplicationActivityInputForTest(); - input.setSchemaRefreshOutput(new RefreshSchemaActivityOutput(CATALOG_DIFF)); + input.setSchemaRefreshOutput(new RefreshSchemaActivityOutput(CatalogDiffConverter.toDomain(CATALOG_DIFF))); final var replicationInput = replicationInputHydrator.getHydratedReplicationInput(input); final var typedState = StateMessageHelper.getTypedState(replicationInput.getState().getState()); assertEquals(JsonNodeFactory.instance.nullNode(), typedState.get().getStateMessages().get(0).getStream().getStreamState()); } + @Test + void testTrackBackfillAndResume() throws IOException { + final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); + final io.airbyte.config.StreamDescriptor stream1 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns1"); + final io.airbyte.config.StreamDescriptor stream2 = new io.airbyte.config.StreamDescriptor().withName("s1"); + final io.airbyte.config.StreamDescriptor stream3 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns2"); + final io.airbyte.config.StreamDescriptor stream4 = new io.airbyte.config.StreamDescriptor().withName("s2"); + replicationInputHydrator.trackBackfillAndResume( + 1L, + 2L, + List.of(stream1, stream2, stream4), + List.of(stream1, stream3, stream4)); + + final SaveStreamAttemptMetadataRequestBody expectedRequest = new SaveStreamAttemptMetadataRequestBody( + 1, + 2, + List.of( + new StreamAttemptMetadata("s1", true, true, "ns1"), + new StreamAttemptMetadata("s1", false, true, null), + new StreamAttemptMetadata("s1", true, false, "ns2"), + new StreamAttemptMetadata("s2", true, true, null))); + + ArgumentCaptor captor = ArgumentCaptor.forClass(SaveStreamAttemptMetadataRequestBody.class); + verify(attemptApi).saveStreamMetadata(captor.capture()); + assertEquals(expectedRequest.getJobId(), captor.getValue().getJobId()); + assertEquals(expectedRequest.getAttemptNumber(), captor.getValue().getAttemptNumber()); + CollectionAssert.assertThatCollection(captor.getValue().getStreamMetadata()) + .containsExactlyInAnyOrderElementsOf(expectedRequest.getStreamMetadata()); + } + + @Test + void testTrackBackfillAndResumeWithoutBackfill() throws IOException { + final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); + final io.airbyte.config.StreamDescriptor stream1 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns1"); + final io.airbyte.config.StreamDescriptor stream2 = new io.airbyte.config.StreamDescriptor().withName("s1"); + final io.airbyte.config.StreamDescriptor stream3 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns2"); + final io.airbyte.config.StreamDescriptor stream4 = new io.airbyte.config.StreamDescriptor().withName("s2"); + replicationInputHydrator.trackBackfillAndResume( + 1L, + 2L, + List.of(stream1, stream2, stream4), + null); + + final SaveStreamAttemptMetadataRequestBody expectedRequest = new SaveStreamAttemptMetadataRequestBody( + 1, + 2, + List.of( + new StreamAttemptMetadata("s1", false, true, "ns1"), + new StreamAttemptMetadata("s1", false, true, null), + new StreamAttemptMetadata("s2", false, true, null))); + + ArgumentCaptor captor = ArgumentCaptor.forClass(SaveStreamAttemptMetadataRequestBody.class); + verify(attemptApi).saveStreamMetadata(captor.capture()); + assertEquals(expectedRequest.getJobId(), captor.getValue().getJobId()); + assertEquals(expectedRequest.getAttemptNumber(), captor.getValue().getAttemptNumber()); + CollectionAssert.assertThatCollection(captor.getValue().getStreamMetadata()) + .containsExactlyInAnyOrderElementsOf(expectedRequest.getStreamMetadata()); + } + + @Test + void testTrackBackfillAndResumeWithoutResume() throws IOException { + final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); + final io.airbyte.config.StreamDescriptor stream1 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns1"); + final io.airbyte.config.StreamDescriptor stream3 = new io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns2"); + final io.airbyte.config.StreamDescriptor stream4 = new io.airbyte.config.StreamDescriptor().withName("s2"); + replicationInputHydrator.trackBackfillAndResume( + 1L, + 2L, + null, + List.of(stream1, stream3, stream4)); + + final SaveStreamAttemptMetadataRequestBody expectedRequest = new SaveStreamAttemptMetadataRequestBody( + 1, + 2, + List.of( + new StreamAttemptMetadata("s1", true, false, "ns1"), + new StreamAttemptMetadata("s1", true, false, "ns2"), + new StreamAttemptMetadata("s2", true, false, null))); + + ArgumentCaptor captor = ArgumentCaptor.forClass(SaveStreamAttemptMetadataRequestBody.class); + verify(attemptApi).saveStreamMetadata(captor.capture()); + assertEquals(expectedRequest.getJobId(), captor.getValue().getJobId()); + assertEquals(expectedRequest.getAttemptNumber(), captor.getValue().getAttemptNumber()); + CollectionAssert.assertThatCollection(captor.getValue().getStreamMetadata()) + .containsExactlyInAnyOrderElementsOf(expectedRequest.getStreamMetadata()); + } + private void mockEnableFeatureFlagForWorkspace(final Flag flag, final UUID workspaceId) { when(featureFlagClient.boolVariation(flag, new Workspace(workspaceId))).thenReturn(true); } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DbtTransformationRunnerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DbtTransformationRunnerTest.java deleted file mode 100644 index 862b19d67e2..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DbtTransformationRunnerTest.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static io.airbyte.workers.process.Metadata.CUSTOM_STEP; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.constants.WorkerConstants; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.workers.config.WorkerConfigsProvider; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.ProcessFactory; -import java.io.InputStream; -import java.nio.file.Path; -import java.util.Collections; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; - -@SuppressWarnings("JavadocMethod") -@RunWith(MockitoJUnitRunner.class) -class DbtTransformationRunnerTest { - - /** - * It is simpler to assert the custom transformation prep image is called with the correct - * arguments. The alternative is to set up an E2E Custom Transformation test, that would, among - * other things require a separate DBT test repo just for this test. - */ - @Test - void configureDbtTest() throws Exception { - final var processFac = mock(ProcessFactory.class); - final var process = mock(Process.class); - - final var connId = UUID.randomUUID(); - final var workspaceId = UUID.randomUUID(); - final var path = Path.of("/"); - final var config = Jsons.emptyObject(); - final var resourceReq = new ResourceRequirements(); - - when(processFac.create( - WorkerConfigsProvider.ResourceType.DEFAULT, - CUSTOM_STEP, "1", 0, connId, workspaceId, path, "airbyte/custom-transformation-prep:1.0", false, false, - ImmutableMap.of(WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config)), null, - AirbyteIntegrationLauncher.buildGenericConnectorResourceRequirements(resourceReq), - null, Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, CUSTOM_STEP), - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap(), "configure-dbt", "--integration-type", "bigquery", "--config", "destination_config.json", "--git-repo", "test url")) - .thenReturn(process); - - final var inputStream = mock(InputStream.class); - when(process.getInputStream()).thenReturn(inputStream); - when(process.getErrorStream()).thenReturn(inputStream); - when(process.exitValue()).thenReturn(0); - - final var runner = new DbtTransformationRunner(processFac, "airbyte/destination-bigquery:0.1.0"); - final var runnerSpy = spy(runner); - - final var dbtConfig = new OperatorDbt() - .withGitRepoUrl("test url") - .withDockerImage("test image"); - - runnerSpy.configureDbt("1", 0, connId, workspaceId, path, config, resourceReq, dbtConfig); - - // The key pieces to verify: 1) the correct integration type is called 2) the correct repo is passed - // in. - verify(runnerSpy).runConfigureProcess("1", 0, connId, workspaceId, path, Map.of(WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, "{}"), - resourceReq, "configure-dbt", "--integration-type", "bigquery", "--config", "destination_config.json", "--git-repo", "test url"); - - } - - @ParameterizedTest - @CsvSource({ - "airbyte/destination-bigquery:0.1.0, bigquery", - "airbyte/destination-snowflake:0.1.0, snowflake", - }) - void getAirbyteDestinationNameTest(String image, String expected) { - String name = DbtTransformationRunner.getAirbyteDestinationName(image); - assertEquals(name, expected); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java deleted file mode 100644 index 32b29c8b315..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultNormalizationWorkerTest.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.general; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.commons.concurrency.VoidCallable; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StandardSync; -import io.airbyte.persistence.job.models.ReplicationInput; -import io.airbyte.protocol.models.AirbyteTraceMessage; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.normalization.NormalizationRunner; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import io.airbyte.workers.test_utils.TestConfigHelpers; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.UUID; -import java.util.stream.Stream; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DefaultNormalizationWorkerTest { - - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - private static final Path WORKSPACE_ROOT = Path.of("workspaces/10"); - private static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = - AirbyteMessageUtils.createErrorTraceMessage("a normalization error occurred", 123.0); - - private WorkerConfigs workerConfigs; - private Path jobRoot; - private Path normalizationRoot; - private NormalizationInput normalizationInput; - private NormalizationRunner normalizationRunner; - private VoidCallable onNormalizationRunning; - - @BeforeEach - void setup() throws Exception { - workerConfigs = new WorkerConfigs(new EnvConfigs()); - jobRoot = Files.createDirectories(Files.createTempDirectory("test").resolve(WORKSPACE_ROOT)); - normalizationRoot = jobRoot.resolve("normalize"); - - final ImmutablePair syncPair = TestConfigHelpers.createReplicationConfig(); - normalizationInput = new NormalizationInput() - .withDestinationConfiguration(syncPair.getValue().getDestinationConfiguration()) - .withCatalog(syncPair.getValue().getCatalog()) - .withResourceRequirements(workerConfigs.getResourceRequirements()) - .withConnectionId(CONNECTION_ID) - .withWorkspaceId(WORKSPACE_ID); - - normalizationRunner = mock(NormalizationRunner.class); - - when(normalizationRunner.normalize( - JOB_ID, - JOB_ATTEMPT, - CONNECTION_ID, - WORKSPACE_ID, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(true); - - onNormalizationRunning = mock(VoidCallable.class); - } - - @Test - void test() throws Exception { - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER, onNormalizationRunning); - - final NormalizationSummary normalizationOutput = normalizationWorker.run(normalizationInput, jobRoot); - - verify(normalizationRunner).start(); - verify(onNormalizationRunning).call(); - verify(normalizationRunner).normalize( - JOB_ID, - JOB_ATTEMPT, - CONNECTION_ID, - WORKSPACE_ID, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements()); - verify(normalizationRunner).close(); - assertNotNull(normalizationOutput.getStartTime()); - assertNotNull(normalizationOutput.getEndTime()); - } - - // This test verifies the expected behaviour prior to adding TRACE message handling - // if no TRACE messages are emitted we should throw a WorkerException as before - @Test - void testFailure() throws Exception { - when(normalizationRunner.normalize(JOB_ID, - JOB_ATTEMPT, - CONNECTION_ID, - WORKSPACE_ID, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(false); - - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER, () -> {}); - - assertThrows(WorkerException.class, () -> normalizationWorker.run(normalizationInput, jobRoot)); - - verify(normalizationRunner).start(); - } - - // This test verifies failure behaviour when we have TRACE messages emitted from normalization - // instead of throwing an exception, we should return the summary with a non-empty FailureReasons - // array - @Test - void testFailureWithTraceMessage() throws Exception { - when(normalizationRunner.normalize(JOB_ID, - JOB_ATTEMPT, - CONNECTION_ID, - WORKSPACE_ID, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements())) - .thenReturn(false); - - when(normalizationRunner.getTraceMessages()).thenReturn(Stream.of(ERROR_TRACE_MESSAGE)); - - final DefaultNormalizationWorker normalizationWorker = - new DefaultNormalizationWorker(JOB_ID, JOB_ATTEMPT, normalizationRunner, WorkerEnvironment.DOCKER, onNormalizationRunning); - - final NormalizationSummary normalizationOutput = normalizationWorker.run(normalizationInput, jobRoot); - - verify(normalizationRunner).start(); - verify(onNormalizationRunning).call(); - verify(normalizationRunner).normalize( - JOB_ID, - JOB_ATTEMPT, - CONNECTION_ID, - WORKSPACE_ID, - normalizationRoot, - normalizationInput.getDestinationConfiguration(), - normalizationInput.getCatalog(), workerConfigs.getResourceRequirements()); - verify(normalizationRunner).close(); - assertNotNull(normalizationOutput.getStartTime()); - assertNotNull(normalizationOutput.getEndTime()); - assertFalse(normalizationOutput.getFailures().isEmpty()); - assertTrue(normalizationOutput.getFailures().stream() - .anyMatch(f -> f.getFailureOrigin().equals(FailureOrigin.NORMALIZATION) - && f.getExternalMessage().contains(ERROR_TRACE_MESSAGE.getError().getMessage()))); - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java index 4d2f9595ed1..6621772d58c 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java @@ -24,6 +24,7 @@ import io.airbyte.api.client.model.generated.ResolveActorDefinitionVersionResponse; import io.airbyte.commons.concurrency.VoidCallable; import io.airbyte.commons.converters.ThreadedTimeTracker; +import io.airbyte.config.State; import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.protocol.models.AirbyteAnalyticsTraceMessage; import io.airbyte.protocol.models.AirbyteLogMessage; @@ -129,7 +130,8 @@ void testGetReplicationOutput(final boolean supportRefreshes) throws IOException replicationContext, mock(ReplicationFeatureFlags.class), mock(Path.class), - catalog); + catalog, + mock(State.class)); verify(streamStatusCompletionTracker).startTracking(catalog, supportRefreshes); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( @@ -155,7 +157,8 @@ void testAnalyticsMessageHandling() throws IOException { replicationContext, mock(ReplicationFeatureFlags.class), mock(Path.class), - mock(ConfiguredAirbyteCatalog.class)); + mock(ConfiguredAirbyteCatalog.class), + mock(State.class)); // Need to have a configured catalog for getReplicationOutput replicationWorkerHelper.startDestination( mock(AirbyteDestination.class), @@ -220,7 +223,8 @@ void callsStreamStatusTrackerOnSourceMessage() throws IOException { replicationContext, mock(ReplicationFeatureFlags.class), mock(Path.class), - mock(ConfiguredAirbyteCatalog.class)); + mock(ConfiguredAirbyteCatalog.class), + mock(State.class)); final AirbyteMessage message = mock(AirbyteMessage.class); @@ -237,7 +241,8 @@ void callsStreamStatusTrackerOnDestinationMessage() throws IOException { replicationContext, mock(ReplicationFeatureFlags.class), mock(Path.class), - mock(ConfiguredAirbyteCatalog.class)); + mock(ConfiguredAirbyteCatalog.class), + mock(State.class)); final AirbyteMessage message = mock(AirbyteMessage.class); when(mapper.revertMap(message)).thenReturn(message); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/BackfillHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/BackfillHelperTest.java index 75ab591ccde..421f2d93d41 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/BackfillHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/helper/BackfillHelperTest.java @@ -30,6 +30,9 @@ class BackfillHelperTest { private static final String ANOTHER_STREAM_NAME = "another-stream-name"; private static final String ANOTHER_STREAM_NAMESPACE = "another-stream-namespace"; private static final StreamDescriptor STREAM_DESCRIPTOR = new StreamDescriptor(STREAM_NAME, STREAM_NAMESPACE); + private static final io.airbyte.config.StreamDescriptor DOMAIN_STREAM_DESCRIPTOR = new io.airbyte.config.StreamDescriptor() + .withName(STREAM_NAME) + .withNamespace(STREAM_NAMESPACE); private static final StreamDescriptor ANOTHER_STREAM_DESCRIPTOR = new StreamDescriptor(ANOTHER_STREAM_NAME, ANOTHER_STREAM_NAMESPACE); private static final ConfiguredAirbyteStream INCREMENTAL_STREAM = new ConfiguredAirbyteStream() @@ -76,8 +79,8 @@ private static StreamTransform addFieldForStream(final StreamDescriptor streamDe @Test void testGetStreamsToBackfillWithNewColumn() { assertEquals( - List.of(STREAM_DESCRIPTOR), - BackfillHelper.getStreamsToBackfill(SINGLE_STREAM_ADD_COLUMN_DIFF, INCREMENTAL_CATALOG)); + List.of(DOMAIN_STREAM_DESCRIPTOR), + BackfillHelper.getStreamsToBackfill(CatalogDiffConverter.toDomain(SINGLE_STREAM_ADD_COLUMN_DIFF), INCREMENTAL_CATALOG)); } @Test @@ -86,15 +89,15 @@ void testGetStreamsToBackfillExcludesFullRefresh() { // Verify that the second stream is ignored because it's Full Refresh. assertEquals( 1, - BackfillHelper.getStreamsToBackfill(TWO_STREAMS_ADD_COLUMN_DIFF, testCatalog).size()); + BackfillHelper.getStreamsToBackfill(CatalogDiffConverter.toDomain(TWO_STREAMS_ADD_COLUMN_DIFF), testCatalog).size()); assertEquals( - List.of(STREAM_DESCRIPTOR), - BackfillHelper.getStreamsToBackfill(TWO_STREAMS_ADD_COLUMN_DIFF, testCatalog)); + List.of(DOMAIN_STREAM_DESCRIPTOR), + BackfillHelper.getStreamsToBackfill(CatalogDiffConverter.toDomain(TWO_STREAMS_ADD_COLUMN_DIFF), testCatalog)); } @Test void testClearStateForStreamsToBackfill() { - final List streamsToBackfill = List.of(STREAM_DESCRIPTOR); + final List streamsToBackfill = List.of(DOMAIN_STREAM_DESCRIPTOR); final State updatedState = BackfillHelper.clearStateForStreamsToBackfill(STATE, streamsToBackfill); assertNotNull(updatedState); final var typedState = StateMessageHelper.getTypedState(updatedState.getState()); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java index 779dac3f15e..246ce1b2ff5 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/syncpersistence/SyncPersistenceImplTest.java @@ -4,7 +4,6 @@ package io.airbyte.workers.internal.syncpersistence; -import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.GLOBAL; import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.LEGACY; import static io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType.STREAM; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -29,13 +28,15 @@ import io.airbyte.api.client.model.generated.ConnectionStateType; import io.airbyte.api.client.model.generated.StreamState; import io.airbyte.commons.json.Jsons; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.TestClient; import io.airbyte.protocol.models.AirbyteEstimateTraceMessage; -import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.StreamDescriptor; +import io.airbyte.workers.internal.bookkeeping.ParallelStreamStatsTracker; import io.airbyte.workers.internal.bookkeeping.SyncStatsTracker; import io.airbyte.workers.internal.stateaggregator.StateAggregatorFactory; import java.io.IOException; @@ -52,7 +53,7 @@ class SyncPersistenceImplTest { - private final long flushPeriod = 60; + private final long flushPeriod = 10; private SyncPersistenceImpl syncPersistence; private SyncStatsTracker syncStatsTracker; @@ -67,6 +68,7 @@ class SyncPersistenceImplTest { private Integer attemptNumber; private ConfiguredAirbyteCatalog catalog; private AirbyteApiClient airbyteApiClient; + private FeatureFlagClient ffClient; @BeforeEach void beforeEach() { @@ -76,6 +78,7 @@ void beforeEach() { attemptNumber = (int) (Math.random() * Integer.MAX_VALUE); catalog = mock(ConfiguredAirbyteCatalog.class); airbyteApiClient = mock(AirbyteApiClient.class); + ffClient = mock(TestClient.class); // Setting up an ArgumentCaptor to be able to manually trigger the actual flush method rather than // relying on the ScheduledExecutorService and having to deal with Thread.sleep in the tests. @@ -86,16 +89,17 @@ void beforeEach() { when(executorService.scheduleAtFixedRate(actualFlushMethod.capture(), eq(0L), eq(flushPeriod), eq(TimeUnit.SECONDS))) .thenReturn(mock(ScheduledFuture.class)); - syncStatsTracker = mock(SyncStatsTracker.class); + syncStatsTracker = new ParallelStreamStatsTracker(mock(), mock()); // Setting syncPersistence stateApi = mock(StateApi.class); attemptApi = mock(AttemptApi.class); when(airbyteApiClient.getAttemptApi()).thenReturn(attemptApi); when(airbyteApiClient.getStateApi()).thenReturn(stateApi); + syncPersistence = new SyncPersistenceImpl(airbyteApiClient, new StateAggregatorFactory(), syncStatsTracker, executorService, flushPeriod, new RetryWithJitterConfig(1, 1, 4), - connectionId, workspaceId, jobId, attemptNumber, catalog); + connectionId, workspaceId, jobId, attemptNumber, catalog, ffClient); } @AfterEach @@ -105,10 +109,9 @@ void afterEach() throws Exception { } @Test - void testPersistHappyPath() throws IOException { + void testHappyPath() throws IOException { final AirbyteStateMessage stateA1 = getStreamState("A", 1); - syncPersistence.persist(connectionId, stateA1); - verify(executorService).scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(flushPeriod), eq(TimeUnit.SECONDS)); + syncPersistence.accept(connectionId, stateA1); clearInvocations(executorService, stateApi); // Simulating the expected flush execution @@ -118,8 +121,8 @@ void testPersistHappyPath() throws IOException { final AirbyteStateMessage stateB1 = getStreamState("B", 1); final AirbyteStateMessage stateC2 = getStreamState("C", 2); - syncPersistence.persist(connectionId, stateB1); - syncPersistence.persist(connectionId, stateC2); + syncPersistence.accept(connectionId, stateB1); + syncPersistence.accept(connectionId, stateC2); // This should only happen the first time before we schedule the task verify(stateApi, never()).getState(any()); @@ -139,9 +142,9 @@ void testPersistHappyPath() throws IOException { } @Test - void testPersistWithApiFailures() throws IOException { + void testFlushWithApiFailures() throws IOException { final AirbyteStateMessage stateF1 = getStreamState("F", 1); - syncPersistence.persist(connectionId, stateF1); + syncPersistence.accept(connectionId, stateF1); // Set API call to fail when(stateApi.createOrUpdateState(any())).thenThrow(new IOException()); @@ -153,7 +156,7 @@ void testPersistWithApiFailures() throws IOException { // Adding more states final AirbyteStateMessage stateG1 = getStreamState("G", 1); - syncPersistence.persist(connectionId, stateG1); + syncPersistence.accept(connectionId, stateG1); // Flushing again actualFlushMethod.getValue().run(); @@ -162,7 +165,7 @@ void testPersistWithApiFailures() throws IOException { // Adding more states final AirbyteStateMessage stateF2 = getStreamState("F", 2); - syncPersistence.persist(connectionId, stateF2); + syncPersistence.accept(connectionId, stateF2); // Flushing again actualFlushMethod.getValue().run(); @@ -186,25 +189,25 @@ void testPersistWithApiFailures() throws IOException { @Test void testStatsFlushBasicEmissions() throws IOException { syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, getStreamState("a", 1)); + syncPersistence.accept(connectionId, getStreamState("a", 1)); actualFlushMethod.getValue().run(); verify(stateApi).createOrUpdateState(any()); verify(attemptApi).saveStats(any()); clearInvocations(stateApi, attemptApi); - // We should not emit stats if there is no state to persist - syncPersistence.updateStats(new AirbyteRecordMessage()); + // We emit stats even if there is no state to persist + syncPersistence.updateStats(new AirbyteRecordMessage().withStream("stream1")); actualFlushMethod.getValue().run(); verify(stateApi, never()).createOrUpdateState(any()); - verify(attemptApi, never()).saveStats(any()); + verify(attemptApi).saveStats(any()); } @Test void testStatsAreNotPersistedWhenStateFails() throws IOException { // We should not save stats if persist state failed syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, getStreamState("b", 2)); + syncPersistence.accept(connectionId, getStreamState("b", 2)); when(stateApi.createOrUpdateState(any())).thenThrow(new IOException()); actualFlushMethod.getValue().run(); verify(stateApi).createOrUpdateState(any()); @@ -222,7 +225,7 @@ void testStatsAreNotPersistedWhenStateFails() throws IOException { void testStatsFailuresAreRetriedOnFollowingRunsEvenWithoutNewStates() throws IOException { // If we failed to save stats, we should retry on the next schedule even if there were no new states syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, getStreamState("a", 3)); + syncPersistence.accept(connectionId, getStreamState("a", 3)); when(attemptApi.saveStats(any())).thenThrow(new IOException()); actualFlushMethod.getValue().run(); verify(stateApi).createOrUpdateState(any()); @@ -236,12 +239,55 @@ void testStatsFailuresAreRetriedOnFollowingRunsEvenWithoutNewStates() throws IOE verify(attemptApi).saveStats(any()); } + @Test + void startsFlushThreadOnInit() { + // syncPersistence is created and init in the @BeforeEach block + verify(executorService).scheduleAtFixedRate(any(Runnable.class), eq(0L), eq(flushPeriod), eq(TimeUnit.SECONDS)); + } + + @Test + void statsDontPersistIfTheresBeenNoChanges() throws IOException { + // update stats + syncPersistence.updateStats(new AirbyteRecordMessage().withStream("stream1")); + + // stats have updated so we should save + actualFlushMethod.getValue().run(); + verify(attemptApi).saveStats(any()); + clearInvocations(stateApi, attemptApi); + + // stats have NOT updated so we should not save + actualFlushMethod.getValue().run(); + verify(attemptApi, never()).saveStats(any()); + clearInvocations(stateApi, attemptApi); + + // update stats for different stream + syncPersistence.updateStats(new AirbyteRecordMessage().withStream("stream2").withNamespace("other")); + + // stats have updated so we should save + actualFlushMethod.getValue().run(); + verify(attemptApi).saveStats(any()); + clearInvocations(stateApi, attemptApi); + + // stats have NOT updated so we should not save + actualFlushMethod.getValue().run(); + verify(attemptApi, never()).saveStats(any()); + clearInvocations(stateApi, attemptApi); + + // update stats for original stream + syncPersistence.updateStats(new AirbyteRecordMessage().withStream("stream1")); + + // stats have updated so we should save + actualFlushMethod.getValue().run(); + verify(attemptApi).saveStats(any()); + clearInvocations(stateApi, attemptApi); + } + @Test void testClose() throws Exception { // Adding a state to flush, this state should get flushed when we close syncPersistence final AirbyteStateMessage stateA2 = getStreamState("A", 2); syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, stateA2); + syncPersistence.accept(connectionId, stateA2); // Shutdown, we expect the executor service to be stopped and an stateApi to be called when(executorService.awaitTermination(anyLong(), any())).thenReturn(true); @@ -255,14 +301,14 @@ void testClose() throws Exception { void testCloseMergeStatesFromPreviousFailure() throws Exception { // Adding a state to flush, this state should get flushed when we close syncPersistence final AirbyteStateMessage stateA2 = getStreamState("closeA", 2); - syncPersistence.persist(connectionId, stateA2); + syncPersistence.accept(connectionId, stateA2); // Trigger a failure when(stateApi.createOrUpdateState(any())).thenThrow(new IOException()); actualFlushMethod.getValue().run(); final AirbyteStateMessage stateB1 = getStreamState("closeB", 1); - syncPersistence.persist(connectionId, stateB1); + syncPersistence.accept(connectionId, stateB1); // Final flush reset(stateApi); @@ -275,7 +321,7 @@ void testCloseMergeStatesFromPreviousFailure() throws Exception { void testCloseShouldAttemptToRetryFinalFlush() throws Exception { final AirbyteStateMessage state = getStreamState("final retry", 2); syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, state); + syncPersistence.accept(connectionId, state); when(stateApi.createOrUpdateState(any())) .thenReturn(mock(ConnectionState.class)); @@ -294,7 +340,7 @@ void testBadFinalStateFlushThrowsAnException() throws IOException, InterruptedEx final AirbyteStateMessage state = getStreamState("final retry", 2); syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, state); + syncPersistence.accept(connectionId, state); // Final flush when(executorService.awaitTermination(anyLong(), any())).thenReturn(true); @@ -307,7 +353,7 @@ void testBadFinalStateFlushThrowsAnException() throws IOException, InterruptedEx void testBadFinalStatsFlushThrowsAnException() throws IOException, InterruptedException { final AirbyteStateMessage state = getStreamState("final retry", 2); syncPersistence.updateStats(new AirbyteRecordMessage()); - syncPersistence.persist(connectionId, state); + syncPersistence.accept(connectionId, state); // Setup some API failures when(attemptApi.saveStats(any())).thenThrow(new IOException()); @@ -321,7 +367,7 @@ void testBadFinalStatsFlushThrowsAnException() throws IOException, InterruptedEx @Test void testCloseWhenFailBecauseFlushTookTooLong() throws Exception { - syncPersistence.persist(connectionId, getStreamState("oops", 42)); + syncPersistence.accept(connectionId, getStreamState("oops", 42)); // Simulates a flush taking too long to terminate when(executorService.awaitTermination(anyLong(), any())).thenReturn(false); @@ -334,7 +380,7 @@ void testCloseWhenFailBecauseFlushTookTooLong() throws Exception { @Test void testCloseWhenFailBecauseThreadInterrupted() throws Exception { - syncPersistence.persist(connectionId, getStreamState("oops", 42)); + syncPersistence.accept(connectionId, getStreamState("oops", 42)); // Simulates a flush taking too long to terminate when(executorService.awaitTermination(anyLong(), any())).thenThrow(new InterruptedException()); @@ -357,9 +403,9 @@ void testCloseWithPendingFlushShouldCallTheApi() throws Exception { @Test void testPreventMixingDataFromDifferentConnections() { final AirbyteStateMessage message = getStreamState("stream", 5); - syncPersistence.persist(connectionId, message); + syncPersistence.accept(connectionId, message); - assertThrows(IllegalArgumentException.class, () -> syncPersistence.persist(UUID.randomUUID(), message)); + assertThrows(IllegalArgumentException.class, () -> syncPersistence.accept(UUID.randomUUID(), message)); } @Test @@ -367,7 +413,7 @@ void testLegacyStatesAreGettingIntoTheScheduledFlushLogic() throws Exception { final ArgumentCaptor captor = ArgumentCaptor.forClass(ConnectionStateCreateOrUpdate.class); final AirbyteStateMessage message = getLegacyState("myFirstState"); - syncPersistence.persist(connectionId, message); + syncPersistence.accept(connectionId, message); verify(executorService).scheduleAtFixedRate(any(), anyLong(), anyLong(), any()); actualFlushMethod.getValue().run(); @@ -377,8 +423,8 @@ void testLegacyStatesAreGettingIntoTheScheduledFlushLogic() throws Exception { final AirbyteStateMessage otherMessage1 = getLegacyState("myOtherState1"); final AirbyteStateMessage otherMessage2 = getLegacyState("myOtherState2"); - syncPersistence.persist(connectionId, otherMessage1); - syncPersistence.persist(connectionId, otherMessage2); + syncPersistence.accept(connectionId, otherMessage1); + syncPersistence.accept(connectionId, otherMessage2); when(executorService.awaitTermination(anyLong(), any())).thenReturn(true); syncPersistence.close(); verify(stateApi).createOrUpdateState(captor.capture()); @@ -387,6 +433,11 @@ void testLegacyStatesAreGettingIntoTheScheduledFlushLogic() throws Exception { @Test void testSyncStatsTrackerWrapping() { + syncStatsTracker = mock(); + syncPersistence = new SyncPersistenceImpl(airbyteApiClient, new StateAggregatorFactory(), syncStatsTracker, executorService, + flushPeriod, new RetryWithJitterConfig(1, 1, 4), + connectionId, workspaceId, jobId, attemptNumber, catalog, ffClient); + syncPersistence.updateStats(new AirbyteRecordMessage()); verify(syncStatsTracker).updateStats(new AirbyteRecordMessage()); clearInvocations(syncStatsTracker); @@ -530,11 +581,6 @@ private AirbyteStateMessage getStreamState(final String streamName, final int st .withStreamState(Jsons.jsonNode(stateValue))); } - private AirbyteStateMessage getGlobalState(final int stateValue) { - return new AirbyteStateMessage().withType(GLOBAL) - .withGlobal(new AirbyteGlobalState().withSharedState(Jsons.deserialize("{\"globalState\":" + stateValue + "}"))); - } - private AirbyteStateMessage getLegacyState(final String stateValue) { return new AirbyteStateMessage().withType(LEGACY) .withData(Jsons.deserialize("{\"state\":\"" + stateValue + "\"}")); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java deleted file mode 100644 index 02f622f7280..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/normalization/DefaultNormalizationRunnerTest.java +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.normalization; - -import static io.airbyte.commons.logging.LoggingHelper.RESET; -import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; -import static io.airbyte.workers.process.Metadata.NORMALIZE_STEP; -import static io.airbyte.workers.process.Metadata.SYNC_JOB; -import static io.airbyte.workers.process.Metadata.SYNC_STEP_KEY; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.collect.ImmutableMap; -import io.airbyte.commons.constants.WorkerConstants; -import io.airbyte.commons.io.IOs; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.logging.LoggingHelper.Color; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.EnvConfigs; -import io.airbyte.config.helpers.LogClientSingleton; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.exception.WorkerException; -import io.airbyte.workers.process.AirbyteIntegrationLauncher; -import io.airbyte.workers.process.ConnectorResourceRequirements; -import io.airbyte.workers.process.ProcessFactory; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collections; -import java.util.Map; -import java.util.UUID; -import java.util.stream.Stream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class DefaultNormalizationRunnerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultNormalizationRunnerTest.class); - - private static final String JOB_ID = "0"; - private static final int JOB_ATTEMPT = 0; - private static final UUID CONNECTION_ID = null; - private static final UUID WORKSPACE_ID = null; - - private static final String NORMALIZATION_IMAGE = "airbyte/normalization"; - private static final String NORMALIZATION_TAG = "42.42.42"; - private static final String INTEGRATION_TYPE = "postgres"; - - private static Path logJobRoot; - - static { - try { - logJobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); - LogClientSingleton.getInstance().setJobMdc(WorkerEnvironment.DOCKER, LogConfigs.EMPTY, logJobRoot); - } catch (final IOException e) { - LOGGER.error(e.getMessage()); - } - } - - private WorkerConfigs workerConfigs; - private Path jobRoot; - private ProcessFactory processFactory; - private Process process; - private JsonNode config; - private ConfiguredAirbyteCatalog catalog; - - @BeforeEach - void setup() throws IOException, WorkerException { - workerConfigs = new WorkerConfigs(new EnvConfigs()); - jobRoot = Files.createDirectories(Files.createTempDirectory("test")); - processFactory = mock(ProcessFactory.class); - process = mock(Process.class); - - config = mock(JsonNode.class); - catalog = mock(ConfiguredAirbyteCatalog.class); - - final Map files = ImmutableMap.of( - WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, Jsons.serialize(config), - WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME, Jsons.serialize(catalog)); - - final ConnectorResourceRequirements expectedResourceRequirements = - AirbyteIntegrationLauncher.buildGenericConnectorResourceRequirements(new WorkerConfigs(new EnvConfigs()).getResourceRequirements()); - when(processFactory.create(ResourceType.NORMALIZATION, NORMALIZE_STEP, JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, - getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), false, false, files, null, - expectedResourceRequirements, - null, - Map.of(JOB_TYPE_KEY, SYNC_JOB, SYNC_STEP_KEY, NORMALIZE_STEP), - Map.of(), - Map.of(), - Collections.emptyMap(), "run", - "--integration-type", INTEGRATION_TYPE, - "--config", WorkerConstants.DESTINATION_CONFIG_JSON_FILENAME, - "--catalog", WorkerConstants.DESTINATION_CATALOG_JSON_FILENAME)) - .thenReturn(process); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); - when(process.getErrorStream()).thenReturn(new ByteArrayInputStream("hello".getBytes(StandardCharsets.UTF_8))); - } - - @AfterEach - public void tearDown() throws IOException { - // The log file needs to be present and empty - final Path logFile = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - if (Files.exists(logFile)) { - Files.delete(logFile); - } - Files.createFile(logFile); - } - - @Test - void test() throws Exception { - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - - when(process.exitValue()).thenReturn(0); - - assertTrue(runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - } - - @Test - void testLog() throws Exception { - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - - when(process.exitValue()).thenReturn(0); - - assertTrue(runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - final Path logPath = logJobRoot.resolve(LogClientSingleton.LOG_FILENAME); - final Stream logs = IOs.readFile(logPath).lines(); - - logs - .filter(line -> !line.contains("EnvConfigs(getEnvOrDefault)")) - .forEach(line -> { - org.assertj.core.api.Assertions.assertThat(line) - .startsWith(Color.GREEN_BACKGROUND.getCode() + "normalization" + RESET); - }); - } - - @Test - void testClose() throws Exception { - when(process.isAlive()).thenReturn(true).thenReturn(false); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements()); - runner.close(); - - verify(process).waitFor(); - } - - @Test - void testFailure() throws Exception { - when(process.exitValue()).thenReturn(1); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse( - runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @Test - void testFailureWithTraceMessage() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String errorTraceString = """ - {"type": "TRACE", "trace": { - "type": "ERROR", "emitted_at": 123.0, "error": { - "message": "Something went wrong in normalization.", "internal_message": "internal msg", - "stack_trace": "abc.xyz", "failure_type": "system_error"}}} - """.replace("\n", ""); - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(errorTraceString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), - INTEGRATION_TYPE); - assertFalse( - runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @Test - void testFailureWithDbtError() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String dbtErrorString = """ - [info ] [MainThread]: Completed with 1 error and 0 warnings: - [info ] [MainThread]: - [error] [MainThread]: Database Error in model xyz (models/generated/airbyte_incremental/abc/xyz.sql) - [error] [MainThread]: 1292 (22007): Truncated incorrect DOUBLE value: 'ABC' - [error] [MainThread]: compiled SQL at ../build/run/airbyte_utils/models/generated/airbyte_incremental/abc/xyz.sql - [info ] [MainThread]: - [info ] [MainThread]: Done. PASS=1 WARN=0 ERROR=1 SKIP=0 TOTAL=2 - """; - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(dbtErrorString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse( - runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - @SuppressWarnings("LineLength") - @Test - void testFailureWithDbtErrorJsonFormat() throws Exception { - when(process.exitValue()).thenReturn(1); - - final String dbtErrorString = - """ - {"code": "Q035", "data": {"description": "table model public.start_products", "execution_time": 0.1729569435119629, "index": 1, "status": "error", "total": 2}, "invocation_id": "6ada8ee5-11c1-4239-8bd0-7e45178217c5", "level": "error", "log_version": 1, "msg": "1 of 2 ERROR creating table model public.start_products................................................................. [\\u001b[31mERROR\\u001b[0m in 0.17s]", "node_info": {"materialized": "table", "node_finished_at": null, "node_name": "start_products", "node_path": "generated/airbyte_incremental/public/start_products.sql", "node_started_at": "2022-07-18T15:04:27.036328", "node_status": "compiling", "resource_type": "model", "type": "node_status", "unique_id": "model.airbyte_utils.start_products"}, "pid": 14, "thread_name": "Thread-1", "ts": "2022-07-18T15:04:27.215077Z", "type": "log_line"} - """; - when(process.getInputStream()).thenReturn(new ByteArrayInputStream(dbtErrorString.getBytes(StandardCharsets.UTF_8))); - - final NormalizationRunner runner = - new DefaultNormalizationRunner(processFactory, getTaggedImageName(NORMALIZATION_IMAGE, NORMALIZATION_TAG), INTEGRATION_TYPE); - assertFalse( - runner.normalize(JOB_ID, JOB_ATTEMPT, CONNECTION_ID, WORKSPACE_ID, jobRoot, config, catalog, workerConfigs.getResourceRequirements())); - - assertEquals(1, runner.getTraceMessages().count()); - - verify(process).waitFor(); - - assertThrows(WorkerException.class, runner::close); - } - - static String getTaggedImageName(final String repository, final String tag) { - return repository + ":" + tag; - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java index 118e9c1d6b5..c7ece637ca4 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/process/AirbyteIntegrationLauncherTest.java @@ -5,8 +5,6 @@ package io.airbyte.workers.process; import static io.airbyte.workers.process.Metadata.CHECK_JOB; -import static io.airbyte.workers.process.Metadata.CHECK_STEP_KEY; -import static io.airbyte.workers.process.Metadata.CONNECTOR_STEP; import static io.airbyte.workers.process.Metadata.DISCOVER_JOB; import static io.airbyte.workers.process.Metadata.JOB_TYPE_KEY; import static io.airbyte.workers.process.Metadata.READ_STEP; @@ -153,7 +151,7 @@ void check() throws WorkerException { false, false, CONFIG_FILES, null, expectedResourceRequirements, null, - Map.of(JOB_TYPE_KEY, CHECK_JOB, CHECK_STEP_KEY, CONNECTOR_STEP), + Map.of(JOB_TYPE_KEY, CHECK_JOB), JOB_METADATA, Map.of(), Collections.emptyMap(), "check", diff --git a/airbyte-workers/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt similarity index 67% rename from airbyte-workers/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt rename to airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt index e72083ebedb..870a4d74782 100644 --- a/airbyte-workers/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/ResumableFullRefreshStatsHelperTest.kt @@ -10,8 +10,12 @@ import io.airbyte.config.SyncStats import io.airbyte.persistence.job.models.ReplicationInput import io.airbyte.protocol.models.AirbyteGlobalState import io.airbyte.protocol.models.AirbyteStateMessage +import io.airbyte.protocol.models.AirbyteStream import io.airbyte.protocol.models.AirbyteStreamState +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.ConfiguredAirbyteStream import io.airbyte.protocol.models.StreamDescriptor +import io.airbyte.protocol.models.SyncMode import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test import org.junit.jupiter.params.ParameterizedTest @@ -47,6 +51,65 @@ class ResumableFullRefreshStatsHelperTest { assertEquals(expected, output) } + @ParameterizedTest + @ValueSource(strings = ["STREAM", "GLOBAL"]) + fun `test get streams with states`(stateType: String) { + val input = + replicationInputWithStates( + StateType.valueOf(stateType), + Stream(namespace = null, name = "s0"), + Stream(namespace = "ns", name = "s1"), + ) + + val expected = + setOf( + io.airbyte.config.StreamDescriptor().withName("s0"), + io.airbyte.config.StreamDescriptor().withName("s1").withNamespace("ns"), + ) + + val streamsWithStates = ResumableFullRefreshStatsHelper().getStreamsWithStates(input.state) + assertEquals(expected, streamsWithStates) + } + + @ParameterizedTest + @ValueSource(strings = ["STREAM", "GLOBAL"]) + fun `test we correctly return only full refresh streams with states`(stateType: String) { + val input = + replicationInputWithStates( + StateType.valueOf(stateType), + Stream(namespace = null, name = "s0"), + Stream(namespace = "ns", name = "s1"), + ) + + val catalog = + ConfiguredAirbyteCatalog().withStreams( + listOf( + ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(AirbyteStream().withNamespace(null).withName("s0")), + ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(AirbyteStream().withNamespace("ns").withName("s1")), + ), + ) + + assertEquals( + setOf(io.airbyte.config.StreamDescriptor().withName("s0")), + ResumableFullRefreshStatsHelper().getResumedFullRefreshStreams(catalog, input.state), + ) + } + + @Test + fun `test empty state is handled correctly when getting full refresh streams`() { + val input = ReplicationInput() + + val catalog = + ConfiguredAirbyteCatalog().withStreams( + listOf( + ConfiguredAirbyteStream().withSyncMode(SyncMode.FULL_REFRESH).withStream(AirbyteStream().withNamespace(null).withName("s0")), + ConfiguredAirbyteStream().withSyncMode(SyncMode.INCREMENTAL).withStream(AirbyteStream().withNamespace("ns").withName("s1")), + ), + ) + + assertEquals(emptySet(), ResumableFullRefreshStatsHelper().getResumedFullRefreshStreams(catalog, input.state)) + } + @Test fun `test we do not fail if there are no states`() { val input = ReplicationInput() diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt index 4f1495aed12..55f8fd33ef4 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/StreamStatusCompletionTrackerTest.kt @@ -87,6 +87,22 @@ internal class StreamStatusCompletionTrackerTest { ) } + @Test + fun `test that we support multiple completed status`() { + streamStatusCompletionTracker.startTracking(catalog, true) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + streamStatusCompletionTracker.track(getStreamStatusCompletedMessage("name1").trace.streamStatus) + val result = streamStatusCompletionTracker.finalize(0, mapper) + + assertEquals( + listOf( + getStreamStatusCompletedMessage("name1"), + getStreamStatusCompletedMessage("name2", "namespace2"), + ), + result, + ) + } + @Test fun `test that we get no streams if the exit code is 1 and no stream status is send`() { streamStatusCompletionTracker.startTracking(catalog, true) diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/FieldSelectorTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/FieldSelectorTest.kt new file mode 100644 index 00000000000..c92df0011c2 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/internal/FieldSelectorTest.kt @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.workers.internal + +import io.airbyte.commons.json.Jsons +import io.airbyte.protocol.models.AirbyteMessage +import io.airbyte.protocol.models.AirbyteRecordMessage +import io.airbyte.protocol.models.AirbyteStream +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.ConfiguredAirbyteStream +import io.airbyte.workers.RecordSchemaValidator +import io.airbyte.workers.WorkerUtils +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +internal class FieldSelectorTest { + companion object { + private const val STREAM_NAME = "name" + + private val SCHEMA = + """ + { + "type": ["null", "object"], + "properties": { + "key": {"type": ["null", "string"]}, + "value": {"type": ["null", "string"]} + } + } + """.trimIndent() + + private const val ESCAPED_ID = "\$id" + private val SCHEMA_WITH_ESCAPE = + """ + { + "type": ["null", "object"], + "properties": { + "$ESCAPED_ID": {"type": ["null", "string"]}, + "key": {"type": ["null", "string"]}, + "value": {"type": ["null", "string"]} + } + } + """.trimIndent() + + private val RECORD_WITH_EXTRA = + """ + { + "id": "myId", + "key": "myKey", + "value": "myValue", + "unexpected": "strip me" + } + """.trimIndent() + + private val RECORD_WITHOUT_EXTRA = + """ + { + "key": "myKey", + "value": "myValue" + } + """.trimIndent() + + private val RECORD_WITH_ID_WITHOUT_EXTRA = + """ + { + "id": "myId", + "key": "myKey", + "value": "myValue" + } + """.trimIndent() + } + + @ParameterizedTest + @ValueSource(booleans = [true, false]) + internal fun `test that we filter columns`(fieldSelectionEnabled: Boolean) { + val configuredCatalog = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream().withName(STREAM_NAME).withJsonSchema(Jsons.deserialize(SCHEMA)), + ), + ), + ) + + val fieldSelector = createFieldSelector(configuredCatalog, fieldSelectionEnabled = fieldSelectionEnabled) + + val message = createRecord(RECORD_WITH_EXTRA) + fieldSelector.filterSelectedFields(message) + + val expectedMessage = if (fieldSelectionEnabled) createRecord(RECORD_WITHOUT_EXTRA) else createRecord(RECORD_WITH_EXTRA) + assertEquals(expectedMessage, message) + } + + @Test + internal fun `test that escaped properties in schema are still filtered`() { + val configuredCatalog = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream() + .withStream( + AirbyteStream().withName(STREAM_NAME).withJsonSchema(Jsons.deserialize(SCHEMA_WITH_ESCAPE)), + ), + ), + ) + + val fieldSelector = createFieldSelector(configuredCatalog, fieldSelectionEnabled = true) + + val message = createRecord(RECORD_WITH_EXTRA) + fieldSelector.filterSelectedFields(message) + + val expectedMessage = createRecord(RECORD_WITH_ID_WITHOUT_EXTRA) + assertEquals(expectedMessage, message) + } + + private fun createFieldSelector( + configuredCatalog: ConfiguredAirbyteCatalog, + fieldSelectionEnabled: Boolean, + ): FieldSelector { + val schemaValidator = RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(configuredCatalog)) + val fieldSelector = + FieldSelector( + schemaValidator, + mockk(), + fieldSelectionEnabled, + false, + ) + fieldSelector.populateFields(configuredCatalog) + return fieldSelector + } + + private fun createRecord(jsonData: String): AirbyteMessage = + AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord( + AirbyteRecordMessage() + .withStream(STREAM_NAME) + .withData(Jsons.deserialize(jsonData)), + ) +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/workload/WorkloadIdGeneratorTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/workload/WorkloadIdGeneratorTest.kt index ca4dbd5fe8f..14c16b7cee8 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/workload/WorkloadIdGeneratorTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/workload/WorkloadIdGeneratorTest.kt @@ -9,6 +9,7 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.Arguments import org.junit.jupiter.params.provider.MethodSource +import java.time.OffsetDateTime import java.util.UUID import java.util.stream.Stream @@ -43,6 +44,34 @@ class WorkloadIdGeneratorTest { ) } + @ParameterizedTest + @MethodSource("actorTimestampMatrix") + internal fun `the correct v2 workload ID is generated for discover`( + actorId: UUID, + timestampMs: Long, + ) { + val generatedWorkloadId = generator.generateDiscoverWorkloadIdV2(actorId, timestampMs) + assertEquals( + "${actorId}_${timestampMs}_discover", + generatedWorkloadId, + ) + } + + @ParameterizedTest + @MethodSource("windowSnapMatrix") + internal fun `the correct v2 workload ID is generated for discover with snapping behavior`( + timestampMs: Long, + expectedSnappedTimestampMs: Long, + windowWidthMs: Long, + ) { + val actorId = UUID.randomUUID() + val generatedWorkloadId = generator.generateDiscoverWorkloadIdV2WithSnap(actorId, timestampMs, windowWidthMs) + assertEquals( + "${actorId}_${expectedSnappedTimestampMs}_discover", + generatedWorkloadId, + ) + } + @Test internal fun `test that the correct workload ID is generated for specs`() { val jobId = UUID.randomUUID() @@ -77,5 +106,65 @@ class WorkloadIdGeneratorTest { Arguments.of(UUID.randomUUID(), "any string really", 0), ) } + + @JvmStatic + private fun actorTimestampMatrix(): Stream { + return Stream.of( + Arguments.of(UUID.randomUUID(), System.currentTimeMillis() + 12412431L), + Arguments.of(UUID.randomUUID(), 89127421L), + Arguments.of(UUID.randomUUID(), 0), + Arguments.of(UUID.randomUUID(), System.currentTimeMillis()), + Arguments.of(UUID.randomUUID(), System.currentTimeMillis() - 12412431L), + ) + } + + @JvmStatic + private fun windowSnapMatrix(): Stream { + val oneMinMs = 60000 + val tenMinMs = 600000 + val fifteenMinMs = 900000 + val thirtyMinMs = 1800000 + + return Stream.of( + Arguments.of(timestampMs(16, 0, 40), timestampMs(16, 0, 0), oneMinMs), + Arguments.of(timestampMs(15, 59, 59), timestampMs(15, 59, 0), oneMinMs), + Arguments.of(timestampMs(0, 0, 0), timestampMs(0, 0, 0), oneMinMs), + Arguments.of(timestampMs(0, 0, 1), timestampMs(0, 0, 0), oneMinMs), + Arguments.of(timestampMs(16, 0, 40), timestampMs(16, 0, 0), tenMinMs), + Arguments.of(timestampMs(15, 59, 59), timestampMs(15, 50, 0), tenMinMs), + Arguments.of(timestampMs(0, 0, 0), timestampMs(0, 0, 0), tenMinMs), + Arguments.of(timestampMs(0, 0, 1), timestampMs(0, 0, 0), tenMinMs), + Arguments.of(timestampMs(3, 16, 52), timestampMs(3, 10, 0), tenMinMs), + Arguments.of(timestampMs(6, 9, 11), timestampMs(6, 0, 0), tenMinMs), + Arguments.of(timestampMs(16, 0, 40), timestampMs(16, 0, 0), fifteenMinMs), + Arguments.of(timestampMs(15, 59, 59), timestampMs(15, 45, 0), fifteenMinMs), + Arguments.of(timestampMs(0, 0, 0), timestampMs(0, 0, 0), fifteenMinMs), + Arguments.of(timestampMs(0, 0, 1), timestampMs(0, 0, 0), fifteenMinMs), + Arguments.of(timestampMs(3, 16, 52), timestampMs(3, 15, 0), fifteenMinMs), + Arguments.of(timestampMs(6, 9, 11), timestampMs(6, 0, 0), fifteenMinMs), + Arguments.of(timestampMs(6, 39, 11), timestampMs(6, 30, 0), fifteenMinMs), + Arguments.of(timestampMs(16, 0, 40), timestampMs(16, 0, 0), thirtyMinMs), + Arguments.of(timestampMs(15, 59, 59), timestampMs(15, 30, 0), thirtyMinMs), + Arguments.of(timestampMs(0, 0, 0), timestampMs(0, 0, 0), thirtyMinMs), + Arguments.of(timestampMs(0, 0, 1), timestampMs(0, 0, 0), thirtyMinMs), + Arguments.of(timestampMs(3, 16, 52), timestampMs(3, 0, 0), thirtyMinMs), + Arguments.of(timestampMs(6, 9, 11), timestampMs(6, 0, 0), thirtyMinMs), + Arguments.of(timestampMs(6, 39, 11), timestampMs(6, 30, 0), thirtyMinMs), + ) + } + + private fun timestampMs( + hr: Int, + min: Int, + sec: Int, + ): Long { + return OffsetDateTime.now() + .withHour(hr) + .withMinute(min) + .withSecond(sec) + .withNano(0) // zero this out so we don't get remainders + .toInstant() + .toEpochMilli() + } } } diff --git a/airbyte-commons-worker/src/test/resources/catalog-json-schema-with-id.json b/airbyte-commons-worker/src/test/resources/catalog-json-schema-with-id.json new file mode 100644 index 00000000000..d1f1d6d4600 --- /dev/null +++ b/airbyte-commons-worker/src/test/resources/catalog-json-schema-with-id.json @@ -0,0 +1,75 @@ +{ + "type": "object", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "id": { + "type": "string" + }, + "bot": { + "type": ["null", "object"], + "properties": { + "owner": { + "type": "object", + "properties": { + "info": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "object": { + "type": ["null", "string"] + }, + "person": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + } + } + }, + "avatar_url": { + "type": ["null", "string"] + } + }, + "type": { + "type": "string" + }, + "workspace": { + "type": ["null", "boolean"] + } + } + }, + "workspace_name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "name": { + "type": ["null", "string"] + }, + "type": { + "enum": ["person", "bot"] + }, + "object": { + "enum": ["user"] + }, + "person": { + "type": ["null", "object"], + "properties": { + "email": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "avatar_url": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/logging/LoggingHelper.java b/airbyte-commons/src/main/java/io/airbyte/commons/logging/LoggingHelper.java index d2d7a29cce7..2e212eac81c 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/logging/LoggingHelper.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/logging/LoggingHelper.java @@ -11,9 +11,7 @@ */ public class LoggingHelper { - public static final String CUSTOM_TRANSFORMATION_LOGGER_PREFIX = "dbt"; public static final String DESTINATION_LOGGER_PREFIX = "destination"; - public static final String NORMALIZATION_LOGGER_PREFIX = "normalization"; public static final String SOURCE_LOGGER_PREFIX = "source"; public static final String PLATFORM_LOGGER_PREFIX = "platform"; diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java index 67ef60855c4..94499ab6c72 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/ConfigSchema.java @@ -109,8 +109,6 @@ public enum ConfigSchema implements AirbyteConfig { // worker STANDARD_SYNC_INPUT("StandardSyncInput.yaml", StandardSyncInput.class), - NORMALIZATION_INPUT("NormalizationInput.yaml", NormalizationInput.class), - OPERATOR_DBT_INPUT("OperatorDbtInput.yaml", OperatorDbtInput.class), STANDARD_SYNC_OUTPUT("StandardSyncOutput.yaml", StandardSyncOutput.class), REPLICATION_OUTPUT("ReplicationOutput.yaml", ReplicationOutput.class), STATE("State.yaml", State.class), diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ConnectorRegistryConverters.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ConnectorRegistryConverters.java index 5d18fc8338d..3022c09f518 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ConnectorRegistryConverters.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/helpers/ConnectorRegistryConverters.java @@ -9,8 +9,12 @@ import io.airbyte.config.ActorDefinitionBreakingChange; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.BreakingChangeScope; +import io.airbyte.config.ConnectorPackageInfo; import io.airbyte.config.ConnectorRegistryDestinationDefinition; +import io.airbyte.config.ConnectorRegistryEntryGeneratedFields; +import io.airbyte.config.ConnectorRegistryEntryMetrics; import io.airbyte.config.ConnectorRegistrySourceDefinition; +import io.airbyte.config.SourceFileInfo; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSourceDefinition.SourceType; @@ -19,8 +23,10 @@ import io.airbyte.protocol.models.ConnectorSpecification; import jakarta.annotation.Nullable; import java.util.Collections; +import java.util.Date; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; @@ -37,6 +43,11 @@ public static StandardSourceDefinition toStandardSourceDefinition(@Nullable fina return null; } + ConnectorRegistryEntryMetrics metrics = Optional.of(def) + .map(ConnectorRegistrySourceDefinition::getGenerated) + .map(ConnectorRegistryEntryGeneratedFields::getMetrics) + .orElse(null); + return new StandardSourceDefinition() .withSourceDefinitionId(def.getSourceDefinitionId()) .withName(def.getName()) @@ -46,6 +57,7 @@ public static StandardSourceDefinition toStandardSourceDefinition(@Nullable fina .withTombstone(def.getTombstone()) .withPublic(def.getPublic()) .withCustom(def.getCustom()) + .withMetrics(metrics) .withResourceRequirements(def.getResourceRequirements()) .withMaxSecondsBetweenMessages(def.getMaxSecondsBetweenMessages()); } @@ -58,6 +70,11 @@ public static StandardDestinationDefinition toStandardDestinationDefinition(@Nul return null; } + ConnectorRegistryEntryMetrics metrics = Optional.of(def) + .map(ConnectorRegistryDestinationDefinition::getGenerated) + .map(ConnectorRegistryEntryGeneratedFields::getMetrics) + .orElse(null); + return new StandardDestinationDefinition() .withDestinationDefinitionId(def.getDestinationDefinitionId()) .withName(def.getName()) @@ -66,6 +83,7 @@ public static StandardDestinationDefinition toStandardDestinationDefinition(@Nul .withTombstone(def.getTombstone()) .withPublic(def.getPublic()) .withCustom(def.getCustom()) + .withMetrics(metrics) .withResourceRequirements(def.getResourceRequirements()); } @@ -78,6 +96,17 @@ public static ActorDefinitionVersion toActorDefinitionVersion(@Nullable final Co return null; } + Date lastModified = Optional.of(def) + .map(ConnectorRegistrySourceDefinition::getGenerated) + .map(ConnectorRegistryEntryGeneratedFields::getSourceFileInfo) + .map(SourceFileInfo::getMetadataLastModified) + .orElse(null); + + String cdkVersion = Optional.of(def) + .map(ConnectorRegistrySourceDefinition::getPackageInfo) + .map(ConnectorPackageInfo::getCdkVersion) + .orElse(null); + validateDockerImageTag(def.getDockerImageTag()); return new ActorDefinitionVersion() .withActorDefinitionId(def.getSourceDefinitionId()) @@ -90,6 +119,8 @@ public static ActorDefinitionVersion toActorDefinitionVersion(@Nullable final Co .withReleaseDate(def.getReleaseDate()) .withSupportLevel(def.getSupportLevel() == null ? SupportLevel.NONE : def.getSupportLevel()) .withReleaseStage(def.getReleaseStage()) + .withLastPublished(lastModified) + .withCdkVersion(cdkVersion) .withSuggestedStreams(def.getSuggestedStreams()); } @@ -102,6 +133,17 @@ public static ActorDefinitionVersion toActorDefinitionVersion(@Nullable final Co return null; } + Date lastModified = Optional.of(def) + .map(ConnectorRegistryDestinationDefinition::getGenerated) + .map(ConnectorRegistryEntryGeneratedFields::getSourceFileInfo) + .map(SourceFileInfo::getMetadataLastModified) + .orElse(null); + + String cdkVersion = Optional.of(def) + .map(ConnectorRegistryDestinationDefinition::getPackageInfo) + .map(ConnectorPackageInfo::getCdkVersion) + .orElse(null); + validateDockerImageTag(def.getDockerImageTag()); return new ActorDefinitionVersion() .withActorDefinitionId(def.getDestinationDefinitionId()) @@ -114,8 +156,8 @@ public static ActorDefinitionVersion toActorDefinitionVersion(@Nullable final Co .withReleaseDate(def.getReleaseDate()) .withReleaseStage(def.getReleaseStage()) .withSupportLevel(def.getSupportLevel() == null ? SupportLevel.NONE : def.getSupportLevel()) - .withNormalizationConfig(def.getNormalizationConfig()) - .withSupportsDbt(def.getSupportsDbt()) + .withLastPublished(lastModified) + .withCdkVersion(cdkVersion) .withSupportsRefreshes(def.getSupportsRefreshes() != null && def.getSupportsRefreshes()); } diff --git a/airbyte-config/config-models/src/main/resources/types/ActorDefinitionVersion.yaml b/airbyte-config/config-models/src/main/resources/types/ActorDefinitionVersion.yaml index e9cce400233..2a6ae08062d 100644 --- a/airbyte-config/config-models/src/main/resources/types/ActorDefinitionVersion.yaml +++ b/airbyte-config/config-models/src/main/resources/types/ActorDefinitionVersion.yaml @@ -44,14 +44,16 @@ properties: "$ref": AllowedHosts.yaml suggestedStreams: "$ref": SuggestedStreams.yaml - normalizationConfig: - "$ref": NormalizationDestinationDefinitionConfig.yaml - supportsDbt: - type: boolean - description: an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used. supportsRefreshes: type: boolean description: an optional flag indicating whether a destination connector supports refreshes. default: false supportState: "$ref": SupportState.yaml + lastPublished: + description: The time the connector was modified in the codebase. + type: string + format: date-time + cdkVersion: + description: "The version of the CDK that the connector was built with. e.g. python:0.1.0, java:0.1.0" + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/CatalogDiff.yaml b/airbyte-config/config-models/src/main/resources/types/CatalogDiff.yaml new file mode 100644 index 00000000000..abf68413d23 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/CatalogDiff.yaml @@ -0,0 +1,15 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/CatalogDiff.yaml +title: CatalogDiff +type: object +description: Describes the difference between two Airbyte catalogs. +additionalProperties: true +required: + - transforms +properties: + transforms: + description: list of stream transformations. order does not matter. + type: array + items: + $ref: StreamTransform.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ConnectorPackageInfo.yaml b/airbyte-config/config-models/src/main/resources/types/ConnectorPackageInfo.yaml new file mode 100644 index 00000000000..7d2e3d04caa --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ConnectorPackageInfo.yaml @@ -0,0 +1,9 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-config/config-models/src/main/resources/types/ConnectorPackageInfo.yaml +title: ConnectorPackageInfo +description: Information about the contents of the connector image +type: object +properties: + cdk_version: + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryDestinationDefinition.yaml b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryDestinationDefinition.yaml index 9f5db0a8e03..20337ac1090 100644 --- a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryDestinationDefinition.yaml +++ b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryDestinationDefinition.yaml @@ -61,11 +61,6 @@ properties: protocolVersion: type: string description: the Airbyte Protocol version supported by the connector - normalizationConfig: - "$ref": NormalizationDestinationDefinitionConfig.yaml - supportsDbt: - type: boolean - description: an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used. allowedHosts: "$ref": AllowedHosts.yaml releases: @@ -73,3 +68,7 @@ properties: supportsRefreshes: type: boolean description: an optional flag indicating whether the refresh operation is available for this destination. + generated: + "$ref": ConnectorRegistryEntryGeneratedFields.yaml + packageInfo: + "$ref": ConnectorPackageInfo.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryGeneratedFields.yaml b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryGeneratedFields.yaml new file mode 100644 index 00000000000..7049903bfbe --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryGeneratedFields.yaml @@ -0,0 +1,40 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryGeneratedFields.yaml +title: ConnectorRegistryEntryGeneratedFields +description: Optional schema for fields generated as metadata is processed +type: object +properties: + git: + type: object + additionalProperties: true + properties: + commit_sha: + type: string + description: The git commit sha of the last commit that modified this file. + commit_timestamp: + type: string + format: date-time + description: The git commit timestamp of the last commit that modified this file. + commit_author: + type: string + description: The git commit author of the last commit that modified this file. + commit_author_email: + type: string + description: The git commit author email of the last commit that modified this file. + source_file_info: + type: object + additionalProperties: true + properties: + metadata_etag: + type: string + metadata_file_path: + type: string + metadata_bucket_name: + type: string + metadata_last_modified: + format: date-time + registry_entry_generated_at: + format: date-time + metrics: + "$ref": ConnectorRegistryEntryMetrics.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryMetrics.yaml b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryMetrics.yaml new file mode 100644 index 00000000000..9fa98da003d --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryMetrics.yaml @@ -0,0 +1,7 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte-platform/blob/main/airbyte-config/config-models/src/main/resources/types/ConnectorRegistryEntryMetrics.yaml +title: ConnectorRegistryEntryMetrics +description: Public metrics for a given Connector from the registry (unstable) +type: object +additionalProperties: true diff --git a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistrySourceDefinition.yaml b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistrySourceDefinition.yaml index b4381407a62..c93e08c2892 100644 --- a/airbyte-config/config-models/src/main/resources/types/ConnectorRegistrySourceDefinition.yaml +++ b/airbyte-config/config-models/src/main/resources/types/ConnectorRegistrySourceDefinition.yaml @@ -77,3 +77,7 @@ properties: type: integer releases: "$ref": ConnectorReleases.yaml + generated: + "$ref": ConnectorRegistryEntryGeneratedFields.yaml + packageInfo: + "$ref": ConnectorPackageInfo.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml b/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml index cd5c5fbfe45..bb15b508a37 100644 --- a/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml +++ b/airbyte-config/config-models/src/main/resources/types/FailureReason.yaml @@ -15,8 +15,6 @@ properties: - destination - replication - persistence - - normalization - - dbt - airbyte_platform - unknown failureType: diff --git a/airbyte-config/config-models/src/main/resources/types/FieldName.yaml b/airbyte-config/config-models/src/main/resources/types/FieldName.yaml new file mode 100644 index 00000000000..2fa1ef5c1e9 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/FieldName.yaml @@ -0,0 +1,9 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/FieldTransform.yaml +title: FieldTransform +type: array +additionalProperties: true +description: A field name is a list of strings that form the path to the field. +items: + type: string diff --git a/airbyte-config/config-models/src/main/resources/types/FieldSchemaUpdate.yaml b/airbyte-config/config-models/src/main/resources/types/FieldSchemaUpdate.yaml new file mode 100644 index 00000000000..1eaf02b73ad --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/FieldSchemaUpdate.yaml @@ -0,0 +1,16 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/FieldSchemaUpdate.yaml +title: FieldSchemaUpdate +type: object +additionalProperties: true +required: + - oldSchema + - newSchema +properties: + oldSchema: + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + newSchema: + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode diff --git a/airbyte-config/config-models/src/main/resources/types/FieldTransform.yaml b/airbyte-config/config-models/src/main/resources/types/FieldTransform.yaml new file mode 100644 index 00000000000..7d0b19d87b2 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/FieldTransform.yaml @@ -0,0 +1,33 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/FieldTransform.yaml +title: FieldTransform +type: object +additionalProperties: true +description: "Describes the difference in a field between two Streams." +required: + - transformType + - fieldName + - breaking +properties: + transformType: + type: string + enum: + - add_field + - remove_field + - update_field_schema + fieldName: + description: A field name is a list of strings that form the path to the field. + type: array + items: + type: string + breaking: + type: boolean + addField: + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + removeField: + type: object + existingJavaType: com.fasterxml.jackson.databind.JsonNode + updateFieldSchema: + $ref: FieldSchemaUpdate.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/NormalizationDestinationDefinitionConfig.yaml b/airbyte-config/config-models/src/main/resources/types/NormalizationDestinationDefinitionConfig.yaml deleted file mode 100644 index 69aacfa2dea..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/NormalizationDestinationDefinitionConfig.yaml +++ /dev/null @@ -1,21 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/NormalizationDestinationDefinitionConfig.yaml -title: NormalizationDestinationDefinitionConfig -description: describes a normalization config for destination definition -type: object -required: - - normalizationRepository - - normalizationTag - - normalizationIntegrationType -additionalProperties: true -properties: - normalizationRepository: - type: string - description: a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used. - normalizationTag: - type: string - description: a field indicating the tag of the docker repository to be used for normalization. - normalizationIntegrationType: - type: string - description: a field indicating the type of integration dialect to use for normalization. diff --git a/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml b/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml deleted file mode 100644 index bf04e318640..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/NormalizationInput.yaml +++ /dev/null @@ -1,35 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/NormalizationInput.yaml -title: NormalizationInput -description: job normalization config -type: object -additionalProperties: true -required: - - destinationConfiguration -properties: - destinationConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - catalog: - # NOTE: we may leave the catalog null when we invoke the normalization workflow activity, and only hydrate it after - description: the configured airbyte catalog. this version of the catalog represents the schema of the data in json blobs in the raw tables. - type: object - existingJavaType: io.airbyte.protocol.models.ConfiguredAirbyteCatalog - connectionId: - description: The id of the connection associated with the normalization - type: string - format: uuid - resourceRequirements: - type: object - description: optional resource requirements to run sync workers - existingJavaType: io.airbyte.config.ResourceRequirements - workspaceId: - description: The id of the workspace associated with this sync - type: string - format: uuid - connectionContext: - description: Context object with IDs of the relevant connection, source, destination, etc. - type: object - "$ref": ConnectionContext.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml b/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml deleted file mode 100644 index 3e529d8055f..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/NormalizationSummary.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/NormalizationSummary.yaml -title: NormalizationSummary -description: information output by syncs for which a normalization step was performed -type: object -required: - - startTime - - endTime -additionalProperties: true -properties: - startTime: - type: integer - endTime: - type: integer - failures: - type: array - items: - "$ref": FailureReason.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorDbt.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorDbt.yaml deleted file mode 100644 index d2986e03095..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/OperatorDbt.yaml +++ /dev/null @@ -1,18 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/OperatorDbt.yaml -title: OperatorDbt -description: Settings for a DBT operator -type: object -required: - - gitRepoUrl -additionalProperties: true -properties: - gitRepoUrl: - type: string - gitRepoBranch: - type: string - dockerImage: - type: string - dbtArguments: - type: string diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml deleted file mode 100644 index a76d0a78747..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/OperatorDbtInput.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/OperatorDbtInput.yaml -title: Operator Dbt Input -description: Input configuration for DBT Transformation operator -type: object -additionalProperties: true -required: - - destinationConfiguration - - operatorDbt -properties: - connectionId: - description: The id of the connection associated with the dbt transformation. - type: string - format: uuid - workspaceId: - description: The id of the workspace associated with the dbt transformation. - type: string - format: uuid - destinationConfiguration: - description: Integration specific blob. Must be a valid JSON string. - type: object - existingJavaType: com.fasterxml.jackson.databind.JsonNode - operatorDbt: - "$ref": OperatorDbt.yaml - connectionContext: - description: Context object with IDs of the relevant connection, source, destination, etc. - type: object - "$ref": ConnectionContext.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorNormalization.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorNormalization.yaml deleted file mode 100644 index 6b972138181..00000000000 --- a/airbyte-config/config-models/src/main/resources/types/OperatorNormalization.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -"$schema": http://json-schema.org/draft-07/schema# -"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/OperatorNormalization.yaml -title: OperatorNormalization -description: Settings for a normalization operator -type: object -additionalProperties: true -properties: - option: - type: string - enum: - - basic - #- unnesting diff --git a/airbyte-config/config-models/src/main/resources/types/OperatorType.yaml b/airbyte-config/config-models/src/main/resources/types/OperatorType.yaml index b31bf194121..c8997d3ef7a 100644 --- a/airbyte-config/config-models/src/main/resources/types/OperatorType.yaml +++ b/airbyte-config/config-models/src/main/resources/types/OperatorType.yaml @@ -5,7 +5,6 @@ title: OperatorType description: Type of Operator type: string enum: - # - destination - normalization - dbt - webhook diff --git a/airbyte-config/config-models/src/main/resources/types/StandardDestinationDefinition.yaml b/airbyte-config/config-models/src/main/resources/types/StandardDestinationDefinition.yaml index 5a141eb6887..dc426697108 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardDestinationDefinition.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardDestinationDefinition.yaml @@ -36,3 +36,5 @@ properties: default: false resourceRequirements: "$ref": ActorDefinitionResourceRequirements.yaml + metrics: + "$ref": ConnectorRegistryEntryMetrics.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSourceDefinition.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSourceDefinition.yaml index aebb7bec74a..6821fe0a8b5 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSourceDefinition.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSourceDefinition.yaml @@ -46,3 +46,5 @@ properties: maxSecondsBetweenMessages: description: Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach type: integer + metrics: + "$ref": ConnectorRegistryEntryMetrics.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml index 33527acf726..8d50c48507d 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncInput.yaml @@ -56,10 +56,6 @@ properties: description: The id of the connection associated with this sync type: string format: uuid - normalizeInDestinationContainer: - description: whether normalization should be run in the destination container - type: boolean - default: false isReset: description: whether this 'sync' is performing a logical reset type: boolean diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncOperation.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncOperation.yaml index 935daeff1a1..c2e337c2eff 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncOperation.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncOperation.yaml @@ -20,10 +20,6 @@ properties: # the jsonschema2pojo does not seem to support it yet: https://github.com/joelittlejohn/jsonschema2pojo/issues/392 operatorType: "$ref": OperatorType.yaml - operatorNormalization: - "$ref": OperatorNormalization.yaml - operatorDbt: - "$ref": OperatorDbt.yaml operatorWebhook: "$ref": OperatorWebhook.yaml tombstone: diff --git a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml index 16516c32756..c176810ef41 100644 --- a/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StandardSyncOutput.yaml @@ -10,8 +10,6 @@ required: properties: standardSyncSummary: "$ref": StandardSyncSummary.yaml - normalizationSummary: - "$ref": NormalizationSummary.yaml webhookOperationSummary: "$ref": WebhookOperationSummary.yaml failures: diff --git a/airbyte-config/config-models/src/main/resources/types/StreamAttributePrimaryKeyUpdate.yaml b/airbyte-config/config-models/src/main/resources/types/StreamAttributePrimaryKeyUpdate.yaml new file mode 100644 index 00000000000..d30e4c4ada0 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StreamAttributePrimaryKeyUpdate.yaml @@ -0,0 +1,15 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StreamAttributePrimaryKeyUpdate.yaml +title: StreamAttributePrimaryKeyUpdate +type: object +additionalProperties: true +properties: + oldPrimaryKey: + type: array + items: + $ref: FieldName.yaml + newPrimaryKey: + type: array + items: + $ref: FieldName.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StreamAttributeTransform.yaml b/airbyte-config/config-models/src/main/resources/types/StreamAttributeTransform.yaml new file mode 100644 index 00000000000..7f7f6068f8b --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StreamAttributeTransform.yaml @@ -0,0 +1,19 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StreamAttributeTransform.yaml +title: StreamAttributeTransform +type: object +additionalProperties: true +description: "Describes the difference in an attribute between two Streams." +required: + - transformType + - breaking +properties: + transformType: + type: string + enum: + - update_primary_key + breaking: + type: boolean + updatePrimaryKey: + $ref: StreamAttributePrimaryKeyUpdate.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/StreamTransform.yaml b/airbyte-config/config-models/src/main/resources/types/StreamTransform.yaml new file mode 100644 index 00000000000..5b84359eef4 --- /dev/null +++ b/airbyte-config/config-models/src/main/resources/types/StreamTransform.yaml @@ -0,0 +1,34 @@ +--- +"$schema": http://json-schema.org/draft-07/schema# +"$id": https://github.com/airbytehq/airbyte/blob/master/airbyte-config/models/src/main/resources/types/StreamTransform.yaml +title: StreamTransform +type: object +additionalProperties: true +required: + - transformType + - streamDescriptor +properties: + transformType: + type: string + enum: + - add_stream + - remove_stream + - update_stream + streamDescriptor: + $ref: StreamDescriptor.yaml + updateStream: + type: object + required: + - fieldTransforms + - streamAttributeTransforms + properties: + fieldTransforms: + type: array + description: list of field transformations. order does not matter. + items: + $ref: FieldTransform.yaml + streamAttributeTransforms: + type: array + description: list of stream attribute transformations. order does not matter. + items: + $ref: StreamAttributeTransform.yaml diff --git a/airbyte-config/config-models/src/main/resources/types/SyncStats.yaml b/airbyte-config/config-models/src/main/resources/types/SyncStats.yaml index bff76de8147..ddc78728169 100644 --- a/airbyte-config/config-models/src/main/resources/types/SyncStats.yaml +++ b/airbyte-config/config-models/src/main/resources/types/SyncStats.yaml @@ -55,3 +55,9 @@ properties: sourceStateMessagesEmitted: description: Number of State messages emitted by the Source Connector type: integer + discoverSchemaEndTime: + type: integer + description: The end of the refresh schema + discoverSchemaStartTime: + type: integer + description: The start of the refresh schema diff --git a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ConnectorRegistryConvertersTest.java b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ConnectorRegistryConvertersTest.java index 71aeefe64d1..238eb0342da 100644 --- a/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ConnectorRegistryConvertersTest.java +++ b/airbyte-config/config-models/src/test/java/io/airbyte/config/helpers/ConnectorRegistryConvertersTest.java @@ -21,7 +21,6 @@ import io.airbyte.config.ConnectorRegistryDestinationDefinition; import io.airbyte.config.ConnectorRegistrySourceDefinition; import io.airbyte.config.ConnectorReleases; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.StandardDestinationDefinition; @@ -150,11 +149,6 @@ void testConvertRegistrySourceDefaults() { @Test void testConvertRegistryDestinationToInternalTypes() { - final NormalizationDestinationDefinitionConfig normalizationConfig = new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("normalization") - .withNormalizationTag("0.1.0") - .withNormalizationIntegrationType("bigquery"); - final ConnectorRegistryDestinationDefinition registryDestinationDef = new ConnectorRegistryDestinationDefinition() .withDestinationDefinitionId(DEF_ID) .withName(CONNECTOR_NAME) @@ -171,8 +165,6 @@ void testConvertRegistryDestinationToInternalTypes() { .withProtocolVersion(PROTOCOL_VERSION) .withAllowedHosts(ALLOWED_HOSTS) .withResourceRequirements(RESOURCE_REQUIREMENTS) - .withNormalizationConfig(normalizationConfig) - .withSupportsDbt(true) .withReleases(new ConnectorReleases().withBreakingChanges(registryBreakingChanges)); final StandardDestinationDefinition stdDestinationDef = new StandardDestinationDefinition() @@ -193,9 +185,7 @@ void testConvertRegistryDestinationToInternalTypes() { .withReleaseStage(ReleaseStage.GENERALLY_AVAILABLE) .withReleaseDate(RELEASE_DATE) .withProtocolVersion(PROTOCOL_VERSION) - .withAllowedHosts(ALLOWED_HOSTS) - .withNormalizationConfig(normalizationConfig) - .withSupportsDbt(true); + .withAllowedHosts(ALLOWED_HOSTS); assertEquals(stdDestinationDef, ConnectorRegistryConverters.toStandardDestinationDefinition(registryDestinationDef)); assertEquals(actorDefinitionVersion, ConnectorRegistryConverters.toActorDefinitionVersion(registryDestinationDef)); @@ -219,7 +209,6 @@ void testConvertRegistryDestinationDefaults() { .withProtocolVersion(PROTOCOL_VERSION) .withAllowedHosts(ALLOWED_HOSTS) .withResourceRequirements(RESOURCE_REQUIREMENTS) - .withSupportsDbt(true) .withReleases(new ConnectorReleases().withBreakingChanges(registryBreakingChanges)); final ActorDefinitionVersion convertedAdv = ConnectorRegistryConverters.toActorDefinitionVersion(registryDestinationDef); @@ -243,7 +232,6 @@ void testConvertRegistryDestinationWithoutScopedImpact() { .withProtocolVersion(PROTOCOL_VERSION) .withAllowedHosts(ALLOWED_HOSTS) .withResourceRequirements(RESOURCE_REQUIREMENTS) - .withSupportsDbt(true) .withReleases(new ConnectorReleases().withBreakingChanges(registryBreakingChangesWithoutScopedImpact)); final List actorDefinitionBreakingChanges = diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java deleted file mode 100644 index 11e5cbdf7ac..00000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigPersistence.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * We are moving migrating away from this interface entirely. Use ConfigRepository instead. - */ -@Deprecated(forRemoval = true) -public interface ConfigPersistence { - - T getConfig(AirbyteConfig configType, String configId, Class clazz) throws ConfigNotFoundException, JsonValidationException, IOException; - - List listConfigs(AirbyteConfig configType, Class clazz) throws JsonValidationException, IOException; - - ConfigWithMetadata getConfigWithMetadata(AirbyteConfig configType, String configId, Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException; - - List> listConfigsWithMetadata(AirbyteConfig configType, Class clazz) throws JsonValidationException, IOException; - - void writeConfig(AirbyteConfig configType, String configId, T config) throws JsonValidationException, IOException; - - void writeConfigs(AirbyteConfig configType, Map configs) throws IOException, JsonValidationException; - - void deleteConfig(AirbyteConfig configType, String configId) throws ConfigNotFoundException, IOException; - -} diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java index fdfb1ac6908..2f183418fec 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StatePersistence.java @@ -129,16 +129,32 @@ public void bulkDelete(final UUID connectionId, final Set stre return; } - final var conditions = streamsToDelete.stream().map(stream -> { - var nameCondition = DSL.field(DSL.name(STATE.STREAM_NAME.getName())).eq(stream.getName()); - var connCondition = DSL.field(DSL.name(STATE.CONNECTION_ID.getName())).eq(connectionId); - var namespaceCondition = stream.getNamespace() == null - ? DSL.field(DSL.name(STATE.NAMESPACE.getName())).isNull() - : DSL.field(DSL.name(STATE.NAMESPACE.getName())).eq(stream.getNamespace()); - - return DSL.and(namespaceCondition, nameCondition, connCondition); - }).reduce(DSL.noCondition(), DSL::or); - this.database.transaction(ctx -> ctx.deleteFrom(STATE).where(conditions).execute()); + final Optional maybeCurrentState = getCurrentState(connectionId); + if (maybeCurrentState.isEmpty()) { + return; + } + + final Set streamsInState = maybeCurrentState.get().getStateType() == StateType.GLOBAL + ? maybeCurrentState.get().getGlobal().getGlobal().getStreamStates().stream().map(AirbyteStreamState::getStreamDescriptor) + .collect(Collectors.toSet()) + : maybeCurrentState.get().getStateMessages().stream().map(airbyteStateMessage -> airbyteStateMessage.getStream().getStreamDescriptor()) + .collect(Collectors.toSet()); + + if (streamsInState.equals(streamsToDelete)) { + eraseState(connectionId); + } else { + + final var conditions = streamsToDelete.stream().map(stream -> { + var nameCondition = DSL.field(DSL.name(STATE.STREAM_NAME.getName())).eq(stream.getName()); + var connCondition = DSL.field(DSL.name(STATE.CONNECTION_ID.getName())).eq(connectionId); + var namespaceCondition = stream.getNamespace() == null + ? DSL.field(DSL.name(STATE.NAMESPACE.getName())).isNull() + : DSL.field(DSL.name(STATE.NAMESPACE.getName())).eq(stream.getNamespace()); + + return DSL.and(namespaceCondition, nameCondition, connCondition); + }).reduce(DSL.noCondition(), DSL::or); + this.database.transaction(ctx -> ctx.deleteFrom(STATE).where(conditions).execute()); + } } private static void clearLegacyState(final DSLContext ctx, final UUID connectionId) { diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java deleted file mode 100644 index d7208e7d1fb..00000000000 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ValidatingConfigPersistence.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.AirbyteConfig; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Validates that json input and outputs for the ConfigPersistence against their schemas. - */ -@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes") -@Deprecated(forRemoval = true) -public class ValidatingConfigPersistence implements ConfigPersistence { - - private final JsonSchemaValidator schemaValidator; - private final ConfigPersistence decoratedPersistence; - - public ValidatingConfigPersistence(final ConfigPersistence decoratedPersistence) { - this(decoratedPersistence, new JsonSchemaValidator()); - } - - public ValidatingConfigPersistence(final ConfigPersistence decoratedPersistence, final JsonSchemaValidator schemaValidator) { - this.decoratedPersistence = decoratedPersistence; - this.schemaValidator = schemaValidator; - } - - @Override - public T getConfig(final AirbyteConfig configType, final String configId, final Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException { - final T config = decoratedPersistence.getConfig(configType, configId, clazz); - validateJson(config, configType); - return config; - } - - @Override - public List listConfigs(final AirbyteConfig configType, final Class clazz) throws JsonValidationException, IOException { - final List configs = decoratedPersistence.listConfigs(configType, clazz); - for (final T config : configs) { - validateJson(config, configType); - } - return configs; - } - - @Override - public ConfigWithMetadata getConfigWithMetadata(final AirbyteConfig configType, final String configId, final Class clazz) - throws ConfigNotFoundException, JsonValidationException, IOException { - final ConfigWithMetadata config = decoratedPersistence.getConfigWithMetadata(configType, configId, clazz); - validateJson(config.getConfig(), configType); - return config; - } - - @Override - public List> listConfigsWithMetadata(final AirbyteConfig configType, final Class clazz) - throws JsonValidationException, IOException { - final List> configs = decoratedPersistence.listConfigsWithMetadata(configType, clazz); - for (final ConfigWithMetadata config : configs) { - validateJson(config.getConfig(), configType); - } - return configs; - } - - @Override - public void writeConfig(final AirbyteConfig configType, final String configId, final T config) throws JsonValidationException, IOException { - - final Map configIdToConfig = new HashMap<>(); - configIdToConfig.put(configId, config); - - writeConfigs(configType, configIdToConfig); - } - - @Override - public void writeConfigs(final AirbyteConfig configType, final Map configs) - throws IOException, JsonValidationException { - for (final Map.Entry config : configs.entrySet()) { - validateJson(Jsons.jsonNode(config.getValue()), configType); - } - decoratedPersistence.writeConfigs(configType, configs); - } - - @Override - public void deleteConfig(final AirbyteConfig configType, final String configId) throws ConfigNotFoundException, IOException { - decoratedPersistence.deleteConfig(configType, configId); - } - - private void validateJson(final T config, final AirbyteConfig configType) throws JsonValidationException { - final JsonNode schema = JsonSchemaValidator.getSchema(configType.getConfigSchemaFile()); - schemaValidator.ensure(schema, Jsons.jsonNode(config)); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java index e1b5239b3a3..7d36301a6b1 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java @@ -21,7 +21,6 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorDefinitionVersion.SupportState; import io.airbyte.config.AllowedHosts; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.SuggestedStreams; @@ -103,12 +102,7 @@ private static ActorDefinitionVersion baseActorDefinitionVersion(final UUID acto .withReleaseDate("2021-01-21") .withSuggestedStreams(new SuggestedStreams().withStreams(List.of("users"))) .withProtocolVersion("0.1.0") - .withAllowedHosts(new AllowedHosts().withHosts(List.of("https://airbyte.com"))) - .withSupportsDbt(true) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("airbyte/normalization") - .withNormalizationTag("tag") - .withNormalizationIntegrationType("bigquery")); + .withAllowedHosts(new AllowedHosts().withHosts(List.of("https://airbyte.com"))); } private ConfigRepository configRepository; diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java index 2ad059bf663..d9acc2c80ff 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java @@ -576,7 +576,7 @@ private UUID writeOperationForConnection(final UUID connectionId) throws SQLExce .withOperationId(standardSyncOperationId) .withName("name") .withWorkspaceId(workspaceId) - .withOperatorType(StandardSyncOperation.OperatorType.DBT)); + .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK)); database.transaction(ctx -> ctx.insertInto(CONNECTION_OPERATION) .set(CONNECTION_OPERATION.ID, operationId) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java index ca85b3ea224..f746d7757a0 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java @@ -4,6 +4,7 @@ package io.airbyte.config.persistence; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import com.fasterxml.jackson.databind.JsonNode; @@ -714,6 +715,38 @@ void testBulkDeleteGlobal() throws IOException { assertEquals(exp, curr.get()); } + @Test + void testBulkDeleteGlobalAllStreams() throws IOException { + final StateWrapper globalToModify = new StateWrapper() + .withStateType(StateType.GLOBAL) + .withGlobal(new AirbyteStateMessage() + .withType(AirbyteStateType.GLOBAL) + .withGlobal(new AirbyteGlobalState() + .withSharedState(Jsons.deserialize("\"woot\"")) + .withStreamStates(Arrays.asList( + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n1")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-2")) + .withStreamState(Jsons.deserialize("")), + new AirbyteStreamState() + .withStreamDescriptor(new StreamDescriptor().withName("del-1").withNamespace("del-n2")) + .withStreamState(Jsons.deserialize("")))))); + + statePersistence.updateOrCreateState(connectionId, clone(globalToModify)); + + final var toDelete = Set.of( + new StreamDescriptor().withName("del-1").withNamespace("del-n1"), + new StreamDescriptor().withName("del-2"), + new StreamDescriptor().withName("del-1").withNamespace("del-n2")); + statePersistence.bulkDelete(connectionId, toDelete); + + var curr = statePersistence.getCurrentState(connectionId); + + assertTrue(curr.isEmpty()); + } + @Test void testBulkDeleteCorrectConnection() throws IOException, JsonValidationException { final StateWrapper globalToModify = new StateWrapper() diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java index 003669cc4ce..37700f7f630 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java @@ -10,9 +10,6 @@ import static org.mockito.Mockito.mock; import io.airbyte.config.Geography; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; @@ -51,40 +48,18 @@ class SyncOperationPersistenceTest extends BaseConfigDatabaseTest { private ConfigRepository configRepository; - private static final StandardSyncOperation DBT_OP = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url")) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); - private static final StandardSyncOperation NORMALIZATION_OP = new StandardSyncOperation() - .withName("operation-1") - .withTombstone(false) - .withOperationId(UUID.randomUUID()) - .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); private static final StandardSyncOperation WEBHOOK_OP = new StandardSyncOperation() .withName("webhook-operation") .withTombstone(false) .withOperationId(UUID.randomUUID()) .withWorkspaceId(WORKSPACE_ID) .withOperatorType(OperatorType.WEBHOOK) - .withOperatorDbt(null) - .withOperatorNormalization(null) .withOperatorWebhook( new OperatorWebhook() .withWebhookConfigId(WEBHOOK_CONFIG_ID) .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); - private static final List OPS = List.of(DBT_OP, NORMALIZATION_OP, WEBHOOK_OP); + private static final List OPS = List.of(WEBHOOK_OP); @BeforeEach void beforeEach() throws Exception { diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java deleted file mode 100644 index 500efc3ff4a..00000000000 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ValidatingConfigPersistenceTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.config.persistence; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; -import static org.mockito.Mockito.when; - -import com.google.common.collect.Sets; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.ConfigWithMetadata; -import io.airbyte.config.StandardSourceDefinition; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.IOException; -import java.time.Instant; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class ValidatingConfigPersistenceTest { - - public static final UUID UUID_1 = new UUID(0, 1); - public static final Instant INSTANT = Instant.now(); - public static final StandardSourceDefinition SOURCE_1 = new StandardSourceDefinition(); - - static { - SOURCE_1.withSourceDefinitionId(UUID_1).withName("apache storm"); - } - - public static final UUID UUID_2 = new UUID(0, 2); - public static final StandardSourceDefinition SOURCE_2 = new StandardSourceDefinition(); - - static { - SOURCE_2.withSourceDefinitionId(UUID_2).withName("apache storm"); - } - - private JsonSchemaValidator schemaValidator; - - private ValidatingConfigPersistence configPersistence; - private ConfigPersistence decoratedConfigPersistence; - private static final String ERROR_MESSAGE = "error"; - - @BeforeEach - void setUp() { - schemaValidator = mock(JsonSchemaValidator.class); - - decoratedConfigPersistence = mock(ConfigPersistence.class); - configPersistence = new ValidatingConfigPersistence(decoratedConfigPersistence, schemaValidator); - } - - @Test - void testWriteConfigSuccess() throws IOException, JsonValidationException { - configPersistence.writeConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), SOURCE_1); - final Map aggregatedSource = new HashMap<>(); - aggregatedSource.put(UUID_1.toString(), SOURCE_1); - verify(decoratedConfigPersistence).writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, aggregatedSource); - } - - @Test - void testWriteConfigsSuccess() throws IOException, JsonValidationException { - final Map sourceDefinitionById = new HashMap<>(); - sourceDefinitionById.put(UUID_1.toString(), SOURCE_1); - sourceDefinitionById.put(UUID_2.toString(), SOURCE_2); - - configPersistence.writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById); - verify(decoratedConfigPersistence).writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById); - } - - @Test - void testWriteConfigFailure() throws JsonValidationException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - assertThrows(JsonValidationException.class, - () -> configPersistence.writeConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), SOURCE_1)); - - verifyNoInteractions(decoratedConfigPersistence); - } - - @Test - void testWriteConfigsFailure() throws JsonValidationException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - - final Map sourceDefinitionById = new HashMap<>(); - sourceDefinitionById.put(UUID_1.toString(), SOURCE_1); - sourceDefinitionById.put(UUID_2.toString(), SOURCE_2); - - assertThrows(JsonValidationException.class, - () -> configPersistence.writeConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, sourceDefinitionById)); - - verifyNoInteractions(decoratedConfigPersistence); - } - - @Test - void testGetConfigSuccess() throws IOException, JsonValidationException, ConfigNotFoundException { - when(decoratedConfigPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(SOURCE_1); - final StandardSourceDefinition actualConfig = configPersistence - .getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class); - - assertEquals(SOURCE_1, actualConfig); - } - - @Test - void testGetConfigFailure() throws IOException, JsonValidationException, ConfigNotFoundException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(SOURCE_1); - - assertThrows( - JsonValidationException.class, - () -> configPersistence.getConfig(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)); - } - - @Test - void testListConfigsSuccess() throws JsonValidationException, IOException { - when(decoratedConfigPersistence.listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(SOURCE_1, SOURCE_2)); - - final List actualConfigs = configPersistence - .listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class); - - assertEquals( - Sets.newHashSet(SOURCE_1, SOURCE_2), - Sets.newHashSet(actualConfigs)); - } - - @Test - void testListConfigsFailure() throws JsonValidationException, IOException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(SOURCE_1, SOURCE_2)); - - assertThrows(JsonValidationException.class, () -> configPersistence - .listConfigs(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)); - } - - @Test - void testGetConfigWithMetadataSuccess() throws IOException, JsonValidationException, ConfigNotFoundException { - when(decoratedConfigPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(withMetadata(SOURCE_1)); - final ConfigWithMetadata actualConfig = configPersistence - .getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class); - - assertEquals(withMetadata(SOURCE_1), actualConfig); - } - - @Test - void testGetConfigWithMetadataFailure() throws IOException, JsonValidationException, ConfigNotFoundException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)) - .thenReturn(withMetadata(SOURCE_1)); - - assertThrows( - JsonValidationException.class, - () -> configPersistence.getConfigWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, UUID_1.toString(), StandardSourceDefinition.class)); - } - - @Test - void testListConfigsWithMetadataSuccess() throws JsonValidationException, IOException { - when(decoratedConfigPersistence.listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(withMetadata(SOURCE_1), withMetadata(SOURCE_2))); - - final List> actualConfigs = configPersistence - .listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class); - - // noinspection unchecked - assertEquals( - Sets.newHashSet(withMetadata(SOURCE_1), withMetadata(SOURCE_2)), - Sets.newHashSet(actualConfigs)); - } - - @Test - void testListConfigsWithMetadataFailure() throws JsonValidationException, IOException { - doThrow(new JsonValidationException(ERROR_MESSAGE)).when(schemaValidator).ensure(any(), any()); - when(decoratedConfigPersistence.listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)) - .thenReturn(List.of(withMetadata(SOURCE_1), withMetadata(SOURCE_2))); - - assertThrows(JsonValidationException.class, () -> configPersistence - .listConfigsWithMetadata(ConfigSchema.STANDARD_SOURCE_DEFINITION, StandardSourceDefinition.class)); - } - - private static ConfigWithMetadata withMetadata(final StandardSourceDefinition sourceDef) { - return new ConfigWithMetadata<>(sourceDef.getSourceDefinitionId().toString(), - ConfigSchema.STANDARD_SOURCE_DEFINITION.name(), - INSTANT, - INSTANT, - sourceDef); - } - -} diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ActorDefinitionVersionResolverTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ActorDefinitionVersionResolverTest.java index ea8da035d25..0bccb1dbf90 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ActorDefinitionVersionResolverTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ActorDefinitionVersionResolverTest.java @@ -17,7 +17,6 @@ import io.airbyte.config.ActorType; import io.airbyte.config.AllowedHosts; import io.airbyte.config.ConnectorRegistrySourceDefinition; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.SuggestedStreams; import io.airbyte.config.helpers.ConnectorRegistryConverters; @@ -47,10 +46,6 @@ class ActorDefinitionVersionResolverTest { .withConnectionSpecification(Jsons.jsonNode(Map.of( "key", "value"))); private static final String DOCS_URL = "https://airbyte.io/docs/"; - private static final NormalizationDestinationDefinitionConfig NORMALIZATION_CONFIG = new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("airbyte/normalization") - .withNormalizationTag("tag") - .withNormalizationIntegrationType("bigquery"); private static final AllowedHosts ALLOWED_HOSTS = new AllowedHosts().withHosts(List.of("https://airbyte.io")); private static final SuggestedStreams SUGGESTED_STREAMS = new SuggestedStreams().withStreams(List.of("users")); private static final ActorDefinitionVersion ACTOR_DEFINITION_VERSION = new ActorDefinitionVersion() @@ -62,9 +57,7 @@ class ActorDefinitionVersionResolverTest { .withDocumentationUrl(DOCS_URL) .withReleaseStage(ReleaseStage.BETA) .withSuggestedStreams(SUGGESTED_STREAMS) - .withAllowedHosts(ALLOWED_HOSTS) - .withSupportsDbt(true) - .withNormalizationConfig(NORMALIZATION_CONFIG); + .withAllowedHosts(ALLOWED_HOSTS); private static final ConnectorRegistrySourceDefinition REGISTRY_DEF = new ConnectorRegistrySourceDefinition() .withSourceDefinitionId(ACTOR_DEFINITION_ID) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java index f5cf0097690..c8d35da0cb7 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/version_overrides/ConfigurationDefinitionVersionOverrideProviderTest.java @@ -21,7 +21,6 @@ import io.airbyte.config.ConfigResourceType; import io.airbyte.config.ConfigSchema; import io.airbyte.config.ConfigScopeType; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.ScopedConfiguration; import io.airbyte.config.StandardWorkspace; @@ -67,10 +66,6 @@ class ConfigurationDefinitionVersionOverrideProviderTest { .withProtocolVersion("0.2.0") .withConnectionSpecification(Jsons.jsonNode(Map.of( "theSpec", "goesHere"))); - private static final NormalizationDestinationDefinitionConfig NORMALIZATION_CONFIG = new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository("airbyte/normalization") - .withNormalizationTag("tag") - .withNormalizationIntegrationType("bigquery"); private static final ActorDefinitionVersion DEFAULT_VERSION = new ActorDefinitionVersion() .withVersionId(UUID.randomUUID()) .withDockerRepository(DOCKER_REPOSITORY) @@ -81,9 +76,7 @@ class ConfigurationDefinitionVersionOverrideProviderTest { .withDocumentationUrl(DOCS_URL) .withReleaseStage(ReleaseStage.BETA) .withSuggestedStreams(SUGGESTED_STREAMS) - .withAllowedHosts(ALLOWED_HOSTS) - .withSupportsDbt(true) - .withNormalizationConfig(NORMALIZATION_CONFIG); + .withAllowedHosts(ALLOWED_HOSTS); private static final ActorDefinitionVersion OVERRIDE_VERSION = new ActorDefinitionVersion() .withVersionId(UUID.randomUUID()) .withDockerRepository(DOCKER_REPOSITORY) @@ -94,9 +87,7 @@ class ConfigurationDefinitionVersionOverrideProviderTest { .withDocumentationUrl(DOCS_URL) .withReleaseStage(ReleaseStage.BETA) .withSuggestedStreams(SUGGESTED_STREAMS) - .withAllowedHosts(ALLOWED_HOSTS) - .withSupportsDbt(true) - .withNormalizationConfig(NORMALIZATION_CONFIG); + .withAllowedHosts(ALLOWED_HOSTS); private WorkspaceService mWorkspaceService; private ActorDefinitionService mActorDefinitionService; diff --git a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java index 343beb15723..d5018aaaa11 100644 --- a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java +++ b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java @@ -28,9 +28,6 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.config.Notification; import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.Organization; import io.airbyte.config.Permission; @@ -636,43 +633,45 @@ public static List destinationOauthParameters() { } public static List standardSyncOperations() { - final OperatorDbt operatorDbt = new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url"); final StandardSyncOperation standardSyncOperation1 = new StandardSyncOperation() .withName("operation-1") .withTombstone(false) .withOperationId(OPERATION_ID_1) .withWorkspaceId(WORKSPACE_ID_1) - .withOperatorDbt(operatorDbt) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); + .withOperatorType(OperatorType.WEBHOOK) + .withOperatorWebhook( + new OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) + .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); final StandardSyncOperation standardSyncOperation2 = new StandardSyncOperation() .withName("operation-1") .withTombstone(false) .withOperationId(OPERATION_ID_2) .withWorkspaceId(WORKSPACE_ID_1) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); + .withOperatorType(OperatorType.WEBHOOK) + .withOperatorWebhook( + new OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) + .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); final StandardSyncOperation standardSyncOperation3 = new StandardSyncOperation() .withName("operation-3") .withTombstone(false) .withOperationId(OPERATION_ID_3) .withWorkspaceId(WORKSPACE_ID_2) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); + .withOperatorType(OperatorType.WEBHOOK) + .withOperatorWebhook( + new OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(WEBHOOK_OPERATION_EXECUTION_URL) + .withExecutionBody(WEBHOOK_OPERATION_EXECUTION_BODY)); final StandardSyncOperation standardSyncOperation4 = new StandardSyncOperation() .withName("webhook-operation") .withTombstone(false) .withOperationId(OPERATION_ID_4) .withWorkspaceId(WORKSPACE_ID_1) .withOperatorType(OperatorType.WEBHOOK) - .withOperatorDbt(null) - .withOperatorNormalization(null) .withOperatorWebhook( new OperatorWebhook() .withWebhookConfigId(WEBHOOK_CONFIG_ID) diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt index f018909cce6..52bdc27cb92 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/SecretsRepositoryWriter.kt @@ -65,6 +65,42 @@ open class SecretsRepositoryWriter( return splitSecretConfig(workspaceId, fullConfig, connSpec, activePersistence) } + /** + * Pure function to delete secrets from persistence. + * + * @param config secret config to be deleted + * @param spec connector specification + * @param runtimeSecretPersistence to use as an override + */ + @Throws(JsonValidationException::class) + fun deleteFromConfig( + config: JsonNode, + spec: JsonNode, + runtimeSecretPersistence: RuntimeSecretPersistence? = null, + ) { + val pathToSecrets = SecretsHelpers.getSortedSecretPaths(spec) + pathToSecrets.forEach { path -> + JsonPaths.getValues(config, path).forEach { jsonWithCoordinate -> + SecretsHelpers.getExistingCoordinateIfExists(jsonWithCoordinate)?.let { coordinate -> + val secretCoord = SecretCoordinate.fromFullCoordinate(coordinate) + logger.info { "Deleting: ${secretCoord.fullCoordinate}" } + try { + (runtimeSecretPersistence ?: secretPersistence).delete(secretCoord) + metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) + } catch (e: Exception) { + // Multiple versions within one secret is a legacy concern. This is no longer + // possible moving forward. Catch the exception to best-effort disable other secret versions. + // The other reason to catch this is propagating the exception prevents the database + // from being updated with the new coordinates. + metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "false")) + logger.error(e) { "Error deleting secret: ${secretCoord.fullCoordinate}" } + } + } + } + } + logger.info { "Deleting secrets done!" } + } + /** * This method merges an existing partial config with a new full config. It writes the secrets to the * secrets store and returns the partial config with the secrets removed and replaced with secret coordinates. @@ -100,29 +136,10 @@ open class SecretsRepositoryWriter( runtimeSecretPersistence?.write(coordinate, payload) ?: secretPersistence.write(coordinate, payload) metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - - val pathToSecrets = SecretsHelpers.getSortedSecretPaths(spec) - pathToSecrets.forEach { path -> - JsonPaths.getValues(oldPartialConfig, path).forEach { jsonWithCoordinate -> - SecretsHelpers.getExistingCoordinateIfExists(jsonWithCoordinate)?.let { coordinate -> - - if (featureFlagClient.boolVariation(DeleteDanglingSecrets, Workspace(workspaceId))) { - val secretCoord = SecretCoordinate.fromFullCoordinate(coordinate) - logger.info { "Deleting: ${secretCoord.fullCoordinate}" } - try { - (runtimeSecretPersistence ?: secretPersistence).delete(secretCoord) - metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) - } catch (e: Exception) { - // Multiple versions within one secret is a legacy concern. This is no longer - // possible moving forward. Catch the exception to best-effort disable other secret versions. - // The other reason to catch this is propagating the exception prevents the database - // from being updated with the new coordinates. - metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "false")) - logger.error(e) { "Error deleting secret: ${secretCoord.fullCoordinate}" } - } - } - } - } + // Delete old secrets (controlled by a workspace level feature flag). + // TODO: remove this flag after testing so that by default we are always cleaning up old secrets + if (featureFlagClient.boolVariation(DeleteDanglingSecrets, Workspace(workspaceId))) { + deleteFromConfig(oldPartialConfig, spec, runtimeSecretPersistence) } return updatedSplitConfig.partialConfig } diff --git a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt index b694da88faa..c0847d01b34 100644 --- a/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt +++ b/airbyte-config/config-secrets/src/main/kotlin/secrets/persistence/LocalTestingSecretPersistence.kt @@ -62,6 +62,7 @@ open class LocalTestingSecretPersistence( } override fun delete(coordinate: SecretCoordinate) { - return + initialize() + dslContext.execute("DELETE FROM secrets WHERE coordinate = ?;", coordinate.fullCoordinate) } } diff --git a/airbyte-config/config-secrets/src/test/kotlin/secrets/SecretsRepositoryWriterTest.kt b/airbyte-config/config-secrets/src/test/kotlin/secrets/SecretsRepositoryWriterTest.kt index 2c75424723b..7c94e6264cd 100644 --- a/airbyte-config/config-secrets/src/test/kotlin/secrets/SecretsRepositoryWriterTest.kt +++ b/airbyte-config/config-secrets/src/test/kotlin/secrets/SecretsRepositoryWriterTest.kt @@ -23,6 +23,7 @@ import io.mockk.spyk import io.mockk.verify import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertDoesNotThrow import java.util.UUID @@ -55,248 +56,260 @@ internal class SecretsRepositoryWriterTest { } @Test - fun testUpdateSecretSameValueShouldWriteNewCoordinateAndDelete() { - val secret = "secret-1" - val oldCoordinate = "existing_coordinate_v1" - secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), secret) - + fun testDeleteSecrets() { every { metricClient.count(any(), any()) } returns Unit every { metricClient.count(any(), any(), any()) } returns Unit - every { featureFlagClient.boolVariation(any(), any()) } returns true - - val updatedFullConfigNoSecretChange = - Jsons.deserialize( - """ - { "username": "airbyte1", "password": "$secret"} - """.trimIndent(), - ) + val secret = "test-secret" + val coordinate = "existing_coordinate_v1" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(coordinate), secret) + val config = injectCoordinate(coordinate) + secretsRepositoryWriter.deleteFromConfig( + config, + SPEC.connectionSpecification, + null, + ) + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(coordinate)) } + assertEquals("", secretPersistence.read(SecretCoordinate.fromFullCoordinate(coordinate))) + verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } + } - val oldPartialConfig = injectCoordinate(oldCoordinate) - val updatedPartialConfig = - secretsRepositoryWriter.updateFromConfig( - WORKSPACE_ID, - oldPartialConfig, - updatedFullConfigNoSecretChange, - SPEC.connectionSpecification, - null, - ) + @Nested + inner class TestUpdateSecrets { + @BeforeEach + fun setup() { + every { metricClient.count(any(), any()) } returns Unit + every { metricClient.count(any(), any(), any()) } returns Unit + every { featureFlagClient.boolVariation(any(), any()) } returns true + } - val newCoordinate = "existing_coordinate_v2" - val expPartialConfig = - Jsons.deserialize( - """ - {"username":"airbyte1","password":{"_secret":"$newCoordinate"}} - """.trimIndent(), - ) - assertEquals(expPartialConfig, updatedPartialConfig) + @Test + fun testUpdateSecretSameValueShouldWriteNewCoordinateAndDelete() { + val secret = "secret-1" + val oldCoordinate = "existing_coordinate_v1" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), secret) + + val updatedFullConfigNoSecretChange = + Jsons.deserialize( + """ + { "username": "airbyte1", "password": "$secret"} + """.trimIndent(), + ) - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), secret) } - assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) - verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + val oldPartialConfig = injectCoordinate(oldCoordinate) + val updatedPartialConfig = + secretsRepositoryWriter.updateFromConfig( + WORKSPACE_ID, + oldPartialConfig, + updatedFullConfigNoSecretChange, + SPEC.connectionSpecification, + null, + ) - verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } - assertEquals("", secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) - verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } - } + val newCoordinate = "existing_coordinate_v2" + val expPartialConfig = + Jsons.deserialize( + """ + {"username":"airbyte1","password":{"_secret":"$newCoordinate"}} + """.trimIndent(), + ) + assertEquals(expPartialConfig, updatedPartialConfig) - @Test - fun testUpdateSecretNewValueShouldWriteNewCoordinateAndDelete() { - val oldCoordinate = "existing_coordinate_v1" - secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), "secret-1") + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), secret) } + assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) + verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - every { metricClient.count(any(), any()) } returns Unit - every { metricClient.count(any(), any(), any()) } returns Unit - every { featureFlagClient.boolVariation(any(), any()) } returns true + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } + assertEquals("", secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) + verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } + } - val newSecret = "secret-2" - val updatedFullConfigSecretChange = - Jsons.deserialize( - """ - { "username": "airbyte", "password": "$newSecret"} - """.trimIndent(), - ) + @Test + fun testUpdateSecretNewValueShouldWriteNewCoordinateAndDelete() { + val oldCoordinate = "existing_coordinate_v1" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), "secret-1") + + val newSecret = "secret-2" + val updatedFullConfigSecretChange = + Jsons.deserialize( + """ + { "username": "airbyte", "password": "$newSecret"} + """.trimIndent(), + ) - val oldPartialConfig = injectCoordinate(oldCoordinate) - val updatedPartialConfig = - secretsRepositoryWriter.updateFromConfig( - WORKSPACE_ID, - oldPartialConfig, - updatedFullConfigSecretChange, - SPEC.connectionSpecification, - null, - ) + val oldPartialConfig = injectCoordinate(oldCoordinate) + val updatedPartialConfig = + secretsRepositoryWriter.updateFromConfig( + WORKSPACE_ID, + oldPartialConfig, + updatedFullConfigSecretChange, + SPEC.connectionSpecification, + null, + ) - val newCoordinate = "existing_coordinate_v2" - val expPartialConfig = - Jsons.deserialize( - """ - {"username":"airbyte","password":{"_secret":"$newCoordinate"}} - """.trimIndent(), - ) - assertEquals(expPartialConfig, updatedPartialConfig) + val newCoordinate = "existing_coordinate_v2" + val expPartialConfig = + Jsons.deserialize( + """ + {"username":"airbyte","password":{"_secret":"$newCoordinate"}} + """.trimIndent(), + ) + assertEquals(expPartialConfig, updatedPartialConfig) - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), newSecret) } - assertEquals(newSecret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) - verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), newSecret) } + assertEquals(newSecret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) + verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } - assertEquals("", secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) - verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } - } + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } + assertEquals("", secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) + verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } + } - @Test - fun testUpdateSecretsComplexShouldWriteNewCoordinateAndDelete() { - val spec = - Jsons.deserialize( - """ - { "properties": { "username": { "type": "string" }, "credentials": { "type" : "object", "properties" : { "client_id": { "type": "string", "airbyte_secret": true }, "password": { "type": "string", "airbyte_secret": true } } } } } - """.trimIndent(), - ) - every { metricClient.count(any(), any()) } returns Unit - every { metricClient.count(any(), any(), any()) } returns Unit - every { featureFlagClient.boolVariation(any(), any()) } returns true - - val oldCoordinate1 = "existing-coordinate-0_v1" - val oldSecret1 = "abc" - secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate1), oldSecret1) - val oldCoordinate2 = "existing-coordinate-1_v1" - val oldSecret2 = "def" - secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate2), oldSecret2) - val oldPartialConfig = - Jsons.deserialize( - """ - { "username": "airbyte", "credentials": { "client_id": { "_secret": "$oldCoordinate1" }, "password": { "_secret": "$oldCoordinate2" } } } - """.trimIndent(), - ) + @Test + fun testUpdateSecretsComplexShouldWriteNewCoordinateAndDelete() { + val spec = + Jsons.deserialize( + """ + { "properties": { "username": { "type": "string" }, "credentials": { "type" : "object", "properties" : { "client_id": { "type": "string", "airbyte_secret": true }, "password": { "type": "string", "airbyte_secret": true } } } } } + """.trimIndent(), + ) + val oldCoordinate1 = "existing-coordinate-0_v1" + val oldSecret1 = "abc" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate1), oldSecret1) + val oldCoordinate2 = "existing-coordinate-1_v1" + val oldSecret2 = "def" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate2), oldSecret2) + val oldPartialConfig = + Jsons.deserialize( + """ + { "username": "airbyte", "credentials": { "client_id": { "_secret": "$oldCoordinate1" }, "password": { "_secret": "$oldCoordinate2" } } } + """.trimIndent(), + ) - val newSecret = "ghi" - val newFullConfig = - Jsons.deserialize( - """ - { "username": "airbyte", "credentials": { "client_id": "$oldSecret1", "password": "$newSecret" } } - """.trimIndent(), - ) + val newSecret = "ghi" + val newFullConfig = + Jsons.deserialize( + """ + { "username": "airbyte", "credentials": { "client_id": "$oldSecret1", "password": "$newSecret" } } + """.trimIndent(), + ) - val updatedPartialConfig = - secretsRepositoryWriter.updateFromConfig( - WORKSPACE_ID, - oldPartialConfig, - newFullConfig, - spec, - null, - ) + val updatedPartialConfig = + secretsRepositoryWriter.updateFromConfig( + WORKSPACE_ID, + oldPartialConfig, + newFullConfig, + spec, + null, + ) - val newCoordinate1 = "existing-coordinate-0_v2" - val newCoordinate2 = "existing-coordinate-1_v2" - val expPartialConfig = - Jsons.deserialize( - """ - { "username": "airbyte", "credentials": { "client_id": { "_secret": "$newCoordinate1" }, "password": { "_secret": "$newCoordinate2" } } } - """.trimIndent(), - ) - assertEquals(expPartialConfig, updatedPartialConfig) + val newCoordinate1 = "existing-coordinate-0_v2" + val newCoordinate2 = "existing-coordinate-1_v2" + val expPartialConfig = + Jsons.deserialize( + """ + { "username": "airbyte", "credentials": { "client_id": { "_secret": "$newCoordinate1" }, "password": { "_secret": "$newCoordinate2" } } } + """.trimIndent(), + ) + assertEquals(expPartialConfig, updatedPartialConfig) - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate1), oldSecret1) } - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate2), newSecret) } - verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate1), oldSecret1) } + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate2), newSecret) } + verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate1)) } - verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate2)) } - verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } - verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } - } + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate1)) } + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate2)) } + verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } + verify { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "true")) } + } - @Test - fun testUpdateSecretFeatureFlagFalseShouldNotDelete() { - val secret = "secret-1" - val oldCoordinate = "existing_coordinate_v1" - secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), secret) + @Test + fun testUpdateSecretFeatureFlagFalseShouldNotDelete() { + val secret = "secret-1" + val oldCoordinate = "existing_coordinate_v1" + secretPersistence.write(SecretCoordinate.fromFullCoordinate(oldCoordinate), secret) - every { metricClient.count(any(), any()) } returns Unit - every { featureFlagClient.boolVariation(any(), any()) } returns false + every { featureFlagClient.boolVariation(any(), any()) } returns false - val updatedFullConfigNoSecretChange = - Jsons.deserialize( - """ - { "username": "airbyte1", "password": "$secret"} - """.trimIndent(), - ) + val updatedFullConfigNoSecretChange = + Jsons.deserialize( + """ + { "username": "airbyte1", "password": "$secret"} + """.trimIndent(), + ) - val oldPartialConfig = injectCoordinate(oldCoordinate) - val updatedPartialConfig = - secretsRepositoryWriter.updateFromConfig( - WORKSPACE_ID, - oldPartialConfig, - updatedFullConfigNoSecretChange, - SPEC.connectionSpecification, - null, - ) + val oldPartialConfig = injectCoordinate(oldCoordinate) + val updatedPartialConfig = + secretsRepositoryWriter.updateFromConfig( + WORKSPACE_ID, + oldPartialConfig, + updatedFullConfigNoSecretChange, + SPEC.connectionSpecification, + null, + ) - val newCoordinate = "existing_coordinate_v2" - val expPartialConfig = - Jsons.deserialize( - """ - {"username":"airbyte1","password":{"_secret":"$newCoordinate"}} - """.trimIndent(), - ) - assertEquals(expPartialConfig, updatedPartialConfig) + val newCoordinate = "existing_coordinate_v2" + val expPartialConfig = + Jsons.deserialize( + """ + {"username":"airbyte1","password":{"_secret":"$newCoordinate"}} + """.trimIndent(), + ) + assertEquals(expPartialConfig, updatedPartialConfig) - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), secret) } - assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) - verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), secret) } + assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(newCoordinate))) + verify { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - verify(exactly = 0) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } - assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) - verify(exactly = 0) { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1) } - } + verify(exactly = 0) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } + assertEquals(secret, secretPersistence.read(SecretCoordinate.fromFullCoordinate(oldCoordinate))) + verify(exactly = 0) { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1) } + } - @Test - fun testUpdateSecretDeleteErrorShouldNotPropagate() { - secretPersistence = mockk() - secretsRepositoryWriter = - SecretsRepositoryWriter( - secretPersistence, - metricClient, - featureFlagClient, - ) + @Test + fun testUpdateSecretDeleteErrorShouldNotPropagate() { + secretPersistence = mockk() + secretsRepositoryWriter = + SecretsRepositoryWriter( + secretPersistence, + metricClient, + featureFlagClient, + ) - every { secretPersistence.write(any(), any()) } returns Unit - every { secretPersistence.read(any()) } returns "something" - every { metricClient.count(any(), any()) } returns Unit - every { metricClient.count(any(), any(), any()) } returns Unit - every { featureFlagClient.boolVariation(any(), any()) } returns true - every { secretPersistence.delete(any()) } throws RuntimeException("disable error") + every { secretPersistence.write(any(), any()) } returns Unit + every { secretPersistence.read(any()) } returns "something" + every { secretPersistence.delete(any()) } throws RuntimeException("disable error") - val oldCoordinate = "existing_coordinate_v1" - val oldPartialConfig = injectCoordinate(oldCoordinate) + val oldCoordinate = "existing_coordinate_v1" + val oldPartialConfig = injectCoordinate(oldCoordinate) - val newSecret = "secret-2" - val updatedFullConfigSecretChange = - Jsons.deserialize( - """ - { "username": "airbyte", "password": "$newSecret"} - """.trimIndent(), - ) + val newSecret = "secret-2" + val updatedFullConfigSecretChange = + Jsons.deserialize( + """ + { "username": "airbyte", "password": "$newSecret"} + """.trimIndent(), + ) - assertDoesNotThrow { - secretsRepositoryWriter.updateFromConfig( - WORKSPACE_ID, - oldPartialConfig, - updatedFullConfigSecretChange, - SPEC.connectionSpecification, - null, - ) - } + assertDoesNotThrow { + secretsRepositoryWriter.updateFromConfig( + WORKSPACE_ID, + oldPartialConfig, + updatedFullConfigSecretChange, + SPEC.connectionSpecification, + null, + ) + } - // The new secret should still be written, despite the disable error. - val newCoordinate = "existing_coordinate_v2" - verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), newSecret) } - verify(exactly = 1) { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } + // The new secret should still be written, despite the disable error. + val newCoordinate = "existing_coordinate_v2" + verify(exactly = 1) { secretPersistence.write(SecretCoordinate.fromFullCoordinate(newCoordinate), newSecret) } + verify(exactly = 1) { metricClient.count(OssMetricsRegistry.UPDATE_SECRET_DEFAULT_STORE, 1) } - verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } - // No metric is emitted because we were not successful. - verify(exactly = 1) { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "false")) } + verify(exactly = 1) { secretPersistence.delete(SecretCoordinate.fromFullCoordinate(oldCoordinate)) } + // No metric is emitted because we were not successful. + verify(exactly = 1) { metricClient.count(OssMetricsRegistry.DELETE_SECRET_DEFAULT_STORE, 1, MetricAttribute(MetricTags.SUCCESS, "false")) } + } } // TODO - port this to source service test diff --git a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/LocalDeclarativeManifestImageVersionsProvider.kt b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/LocalDeclarativeManifestImageVersionsProvider.kt index 4278efd734d..9497cce073f 100644 --- a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/LocalDeclarativeManifestImageVersionsProvider.kt +++ b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/LocalDeclarativeManifestImageVersionsProvider.kt @@ -10,6 +10,7 @@ class LocalDeclarativeManifestImageVersionsProvider : DeclarativeManifestImageVe return mapOf( 0 to "0.90.0", 1 to "1.0.1", + 2 to "2.0.0", ) } } diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index 4cda8f19edc..276cbf9e285 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -1.5.2 +2.3.0 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index 0845127a34c..6412ac16642 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -2,7 +2,7 @@ ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.1.3 FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=1.5.2 +ARG CDK_VERSION=2.3.0 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.9/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK diff --git a/airbyte-connector-builder-server/README.md b/airbyte-connector-builder-server/README.md index 714cfa73d38..b603ec1d499 100644 --- a/airbyte-connector-builder-server/README.md +++ b/airbyte-connector-builder-server/README.md @@ -6,17 +6,38 @@ we deprecated the old server and all image versions from `0.45.20` onward are ba ## Getting started -Install dependencies, compile, and build the server +### Install CDK dependencies -The Connector Builder API server sends commands to an entrypoint of the Airbyte CDK. Therefore, to build the Connector Builder server, an installation of the CDK needs to be available and the `CDK_VERSION` environment needs to be set to a compatible version of the CDK. +The Connector Builder API server sends commands to an entrypoint of the Airbyte CDK. Therefore, to build the Connector Builder server, an installation of the CDK needs to be available and the `CDK_VERSION` environment needs to be set to a compatible version of the CDK. The earliest compatible version of the CDK is 0.31.1. -The earliest compatible version of the CDK is 0.31.1. +To set up a local CDK environment, navigate to the airbyte-cdk/python folder in your local airbyte repo and create your CDK environment: + +```bash +poetry install +``` + +You will need to add the path to this environment in the next steps. You can run `poetry show -v` to verify it. + +#### Setting your Python version + +The Python CDK is not currently compatible with the latest versions of Python. If you are using an incompatible version Python, we recommend using `pyenv` to manage the Python version locally: + +```bash +pyenv local 3.10 +poetry env use $(pyenv which python) +poetry install +``` + +### Compile, build and run the server + +From the root folder of your local airbyte-platform-internal repo, run the build command: ```bash ./gradlew -p oss airbyte-connector-builder-server:build ``` To develop the server locally (without Docker), the `CDK_PYTHON` and `CDK_ENTRYPOINT` environment variables need to be set, where `CDK_PYTHON` is the path to the python interpreter you want to use, i.e. in which the CDK is installed, and `CDK_ENTRYPOINT` is the path to the Connector Builder entrypoint, located at . + ```bash export CDK_PYTHON= export CDK_ENTRYPOINT= @@ -29,12 +50,21 @@ export CDK_ENTRYPOINT=~/code/airbyte/airbyte-cdk/python/airbyte_cdk/connector_bu ``` Then run the server (You can also do this w/o build) + ```bash ./gradlew -p oss airbyte-connector-builder-server:run ``` +If you experience any issues, try running the full command as: + +```bash +sudo CDK_PYTHON= CDK_ENTRYPOINT= ./gradlew -p oss airbyte-connector-builder-server:run +``` + The server is now reachable on localhost:8080 +### Run the full platform locally + If you want to run the full platform with this local instance, you must edit the `.env` file as follows: ``` bash @@ -45,6 +75,12 @@ CONNECTOR_BUILDER_SERVER_API_HOST=http://airbyte-connector-builder-server:8080 CONNECTOR_BUILDER_SERVER_API_HOST=http://host.docker.internal:8080 ``` +To run the platform, use the following command. Replace the PATH_TO_CONNECTORS placeholder with the actual path to your connectors folder in the airbyte repo (at airbyte-integrations/connectors): + +```bash +BASIC_AUTH_USERNAME="" BASIC_AUTH_PASSWORD="" PATH_TO_CONNECTORS=~/airbyte/airbyte-integrations/connectors VERSION=dev docker compose -f docker-compose.yaml -f docker-compose.builder.yaml up +``` + Note: there are two different, but very similarly-named, environment variables; you must edit `CONNECTOR_BUILDER_SERVER_API_HOST`, not `CONNECTOR_BUILDER_API_HOST`. ### Running the platform with support for custom components (docker-compose only) diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index 2b39eb1f679..2e4e58cb555 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==1.5.2 +airbyte-cdk==2.3.0 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index d302b7c5a3b..78a1d4df13f 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,10 +4,12 @@ # # pip-compile # -airbyte-cdk==1.5.2 +airbyte-cdk==2.3.0 # via -r requirements.in -airbyte-protocol-models==0.12.2 +airbyte-protocol-models-pdv2==0.12.2 # via airbyte-cdk +annotated-types==0.7.0 + # via pydantic attrs==23.2.0 # via # cattrs @@ -21,7 +23,7 @@ cachetools==5.3.3 # via airbyte-cdk cattrs==23.2.3 # via requests-cache -certifi==2024.6.2 +certifi==2024.7.4 # via requests cffi==1.16.0 # via cryptography @@ -53,11 +55,11 @@ jsonschema==3.2.0 # via airbyte-cdk langchain-core==0.1.42 # via airbyte-cdk -langsmith==0.1.79 +langsmith==0.1.83 # via langchain-core markupsafe==2.1.5 # via jinja2 -orjson==3.10.5 +orjson==3.10.6 # via langsmith packaging==23.2 # via langchain-core @@ -67,12 +69,14 @@ platformdirs==4.2.2 # via requests-cache pycparser==2.22 # via cffi -pydantic==1.10.16 +pydantic==2.8.2 # via # airbyte-cdk - # airbyte-protocol-models + # airbyte-protocol-models-pdv2 # langchain-core # langsmith +pydantic-core==2.20.1 + # via pydantic pyjwt==2.8.0 # via airbyte-cdk pyrate-limiter==3.1.1 @@ -96,7 +100,7 @@ requests==2.32.3 # airbyte-cdk # langsmith # requests-cache -requests-cache==1.2.0 +requests-cache==1.2.1 # via airbyte-cdk six==1.16.0 # via @@ -104,12 +108,13 @@ six==1.16.0 # jsonschema # python-dateutil # url-normalize -tenacity==8.4.1 +tenacity==8.5.0 # via langchain-core typing-extensions==4.12.2 # via # cattrs # pydantic + # pydantic-core url-normalize==1.4.3 # via requests-cache urllib3==2.2.2 diff --git a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml index 1c38f154fa0..80ca2ed308a 100644 --- a/airbyte-connector-builder-server/src/main/openapi/openapi.yaml +++ b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml @@ -213,6 +213,7 @@ components: type: object required: - url + - http_method properties: url: type: string @@ -227,7 +228,6 @@ components: type: string enum: ["GET", "POST", "PUT", "PATCH"] description: The http method of the request ("GET", "POST", "PUT", or "PATCH") - default: "GET" HttpResponse: type: object required: diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java index 9cab558d6da..2381af358f6 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactory.java @@ -11,10 +11,8 @@ import io.airbyte.commons.workers.config.WorkerConfigsProvider; import io.airbyte.config.EnvConfigs; import io.airbyte.container_orchestrator.AsyncStateManager; -import io.airbyte.container_orchestrator.orchestrator.DbtJobOrchestrator; import io.airbyte.container_orchestrator.orchestrator.JobOrchestrator; import io.airbyte.container_orchestrator.orchestrator.NoOpOrchestrator; -import io.airbyte.container_orchestrator.orchestrator.NormalizationJobOrchestrator; import io.airbyte.container_orchestrator.orchestrator.ReplicationJobOrchestrator; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.metrics.lib.MetricClient; @@ -31,8 +29,6 @@ import io.airbyte.workers.storage.DocumentType; import io.airbyte.workers.storage.StorageClient; import io.airbyte.workers.storage.StorageClientFactory; -import io.airbyte.workers.sync.DbtLauncherWorker; -import io.airbyte.workers.sync.NormalizationLauncherWorker; import io.airbyte.workers.sync.OrchestratorConstants; import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; @@ -90,7 +86,6 @@ ProcessFactory dockerProcessFactory(final WorkerConfigsProvider workerConfigsPro ProcessFactory kubeProcessFactory( final WorkerConfigsProvider workerConfigsProvider, final FeatureFlagClient featureFlagClient, - final EnvConfigs configs, @Value("${micronaut.server.port}") final int serverPort, @Value("${airbyte.worker.job.kube.serviceAccount}") final String serviceAccount) throws UnknownHostException { @@ -115,8 +110,6 @@ JobOrchestrator jobOrchestrator( @Named("application") final String application, @Named("configDir") final String configDir, final EnvConfigs envConfigs, - final ProcessFactory processFactory, - final WorkerConfigsProvider workerConfigsProvider, final JobRunConfig jobRunConfig, final ReplicationWorkerFactory replicationWorkerFactory, final AsyncStateManager asyncStateManager, @@ -127,8 +120,6 @@ JobOrchestrator jobOrchestrator( return switch (application) { case ReplicationLauncherWorker.REPLICATION -> new ReplicationJobOrchestrator(configDir, envConfigs, jobRunConfig, replicationWorkerFactory, asyncStateManager, workloadApiClient, workloadIdGenerator, workloadEnabled, jobOutputDocStore); - case NormalizationLauncherWorker.NORMALIZATION -> new NormalizationJobOrchestrator(envConfigs, processFactory, jobRunConfig, asyncStateManager); - case DbtLauncherWorker.DBT -> new DbtJobOrchestrator(envConfigs, workerConfigsProvider, processFactory, jobRunConfig, asyncStateManager); case AsyncOrchestratorPodProcess.NO_OP -> new NoOpOrchestrator(); default -> throw new IllegalStateException("Could not find job orchestrator for application: " + application); }; diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java deleted file mode 100644 index 7028546d918..00000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/DbtJobOrchestrator.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.JOB_ORCHESTRATOR_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.commons.workers.config.WorkerConfigsProvider; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.Configs; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.container_orchestrator.AsyncStateManager; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.general.DbtTransformationRunner; -import io.airbyte.workers.general.DbtTransformationWorker; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.KubePodProcess; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import java.lang.invoke.MethodHandles; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Run the dbt normalization container. - */ -public class DbtJobOrchestrator implements JobOrchestrator { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final Configs configs; - private final WorkerConfigsProvider workerConfigsProvider; - private final ProcessFactory processFactory; - private final JobRunConfig jobRunConfig; - // Used by the orchestrator to mark the job RUNNING once the relevant pods are spun up. - private final AsyncStateManager asyncStateManager; - - public DbtJobOrchestrator(final Configs configs, - final WorkerConfigsProvider workerConfigsProvider, - final ProcessFactory processFactory, - final JobRunConfig jobRunConfig, - final AsyncStateManager asyncStateManager) { - this.configs = configs; - this.workerConfigsProvider = workerConfigsProvider; - this.processFactory = processFactory; - this.jobRunConfig = jobRunConfig; - this.asyncStateManager = asyncStateManager; - } - - @Override - public String getOrchestratorName() { - return "DBT Transformation"; - } - - @Override - public Class getInputClass() { - return OperatorDbtInput.class; - } - - @Trace(operationName = JOB_ORCHESTRATOR_OPERATION_NAME) - @Override - public Optional runJob() throws Exception { - final OperatorDbtInput dbtInput = readInput(); - - final IntegrationLauncherConfig destinationLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, - ReplicationLauncherWorker.INIT_FILE_DESTINATION_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - - ApmTraceUtils - .addTagsToTrace(Map.of(JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - - log.info("Setting up dbt worker..."); - final WorkerConfigs workerConfigs = workerConfigsProvider.getConfig(ResourceType.DEFAULT); - final DbtTransformationWorker worker = new DbtTransformationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - workerConfigs.getResourceRequirements(), - new DbtTransformationRunner(processFactory, destinationLauncherConfig.getDockerImage()), - this::markJobRunning); - - log.info("Running dbt worker..."); - final Path jobRoot = TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), - jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - worker.run(dbtInput, jobRoot); - - return Optional.empty(); - } - - private void markJobRunning() { - asyncStateManager.write(AsyncKubePodStatus.RUNNING); - } - -} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java deleted file mode 100644 index 872108f0e13..00000000000 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/orchestrator/NormalizationJobOrchestrator.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.container_orchestrator.orchestrator; - -import static io.airbyte.metrics.lib.ApmTraceConstants.JOB_ORCHESTRATOR_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.TemporalUtils; -import io.airbyte.config.Configs; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.container_orchestrator.AsyncStateManager; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.general.DefaultNormalizationWorker; -import io.airbyte.workers.normalization.DefaultNormalizationRunner; -import io.airbyte.workers.normalization.NormalizationWorker; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.KubePodProcess; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.ReplicationLauncherWorker; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; - -/** - * Run normalization worker. - */ -@Slf4j -public class NormalizationJobOrchestrator implements JobOrchestrator { - - private final Configs configs; - private final ProcessFactory processFactory; - private final JobRunConfig jobRunConfig; - // Used by the orchestrator to mark the job RUNNING once the relevant pods are spun up. - private final AsyncStateManager asyncStateManager; - - public NormalizationJobOrchestrator(final Configs configs, - final ProcessFactory processFactory, - final JobRunConfig jobRunConfig, - final AsyncStateManager asyncStateManager) { - this.configs = configs; - this.processFactory = processFactory; - this.jobRunConfig = jobRunConfig; - this.asyncStateManager = asyncStateManager; - } - - @Override - public String getOrchestratorName() { - return "Normalization"; - } - - @Override - public Class getInputClass() { - return NormalizationInput.class; - } - - @Trace(operationName = JOB_ORCHESTRATOR_OPERATION_NAME) - @Override - public Optional runJob() throws Exception { - // final JobRunConfig jobRunConfig = readJobRunConfig(); - final NormalizationInput normalizationInput = readInput(); - - final IntegrationLauncherConfig destinationLauncherConfig = JobOrchestrator.readAndDeserializeFile( - Path.of(KubePodProcess.CONFIG_DIR, - ReplicationLauncherWorker.INIT_FILE_DESTINATION_LAUNCHER_CONFIG), - IntegrationLauncherConfig.class); - - ApmTraceUtils - .addTagsToTrace(Map.of(JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - - log.info("Setting up normalization worker..."); - final NormalizationWorker normalizationWorker = new DefaultNormalizationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - new DefaultNormalizationRunner( - processFactory, - destinationLauncherConfig.getNormalizationDockerImage(), - destinationLauncherConfig.getNormalizationIntegrationType()), - configs.getWorkerEnvironment(), - this::markJobRunning); - - log.info("Running normalization worker..."); - final Path jobRoot = TemporalUtils.getJobRoot(configs.getWorkspaceRoot(), - jobRunConfig.getJobId(), jobRunConfig.getAttemptId()); - final NormalizationSummary normalizationSummary = normalizationWorker.run(normalizationInput, - jobRoot); - - return Optional.of(Jsons.serialize(normalizationSummary)); - } - - private void markJobRunning() { - asyncStateManager.write(AsyncKubePodStatus.RUNNING); - } - -} diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml index fd8af6b481b..4be6c991151 100644 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ b/airbyte-container-orchestrator/src/main/resources/application.yml @@ -133,7 +133,7 @@ airbyte: memory-limit: ${REPLICATION_ORCHESTRATOR_MEMORY_LIMIT:} memory-request: ${REPLICATION_ORCHESTRATOR_MEMORY_REQUEST:} replication: - persistence-flush-period-sec: ${REPLICATION_FLUSH_PERIOD_SECONDS:60} + persistence-flush-period-sec: ${REPLICATION_FLUSH_PERIOD_SECONDS:10} workload-api: base-path: ${WORKLOAD_API_HOST:} bearer-token: ${WORKLOAD_API_BEARER_TOKEN:} diff --git a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java index a2533dce87e..8735276a533 100644 --- a/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java +++ b/airbyte-container-orchestrator/src/test/java/io/airbyte/container_orchestrator/config/ContainerOrchestratorFactoryTest.java @@ -25,8 +25,6 @@ import io.airbyte.workers.process.AsyncOrchestratorPodProcess; import io.airbyte.workers.process.DockerProcessFactory; import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.DbtLauncherWorker; -import io.airbyte.workers.sync.NormalizationLauncherWorker; import io.airbyte.workers.sync.ReplicationLauncherWorker; import io.airbyte.workers.workload.JobOutputDocStore; import io.airbyte.workers.workload.WorkloadIdGenerator; @@ -126,29 +124,18 @@ void jobOrchestrator() { final var factory = new ContainerOrchestratorFactory(); final var repl = factory.jobOrchestrator( - ReplicationLauncherWorker.REPLICATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, + ReplicationLauncherWorker.REPLICATION, configDir, envConfigs, jobRunConfig, replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("Replication", repl.getOrchestratorName()); - final var norm = factory.jobOrchestrator( - NormalizationLauncherWorker.NORMALIZATION, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, - replicationWorkerFactory, - asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); - assertEquals("Normalization", norm.getOrchestratorName()); - - final var dbt = factory.jobOrchestrator( - DbtLauncherWorker.DBT, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, - replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); - assertEquals("DBT Transformation", dbt.getOrchestratorName()); - final var noop = factory.jobOrchestrator( - AsyncOrchestratorPodProcess.NO_OP, configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, + AsyncOrchestratorPodProcess.NO_OP, configDir, envConfigs, jobRunConfig, replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); assertEquals("NO_OP", noop.getOrchestratorName()); var caught = false; try { - factory.jobOrchestrator("does not exist", configDir, envConfigs, processFactory, workerConfigsProvider, jobRunConfig, replicationWorkerFactory, + factory.jobOrchestrator("does not exist", configDir, envConfigs, jobRunConfig, replicationWorkerFactory, asyncStateManager, workloadApiClient, new WorkloadIdGenerator(), false, jobOutputDocStore); } catch (final Exception e) { caught = true; diff --git a/airbyte-cron/src/main/kotlin/io/airbyte/cron/jobs/WorkloadMonitor.kt b/airbyte-cron/src/main/kotlin/io/airbyte/cron/jobs/WorkloadMonitor.kt index 26c4c430eed..b205bf2c698 100644 --- a/airbyte-cron/src/main/kotlin/io/airbyte/cron/jobs/WorkloadMonitor.kt +++ b/airbyte-cron/src/main/kotlin/io/airbyte/cron/jobs/WorkloadMonitor.kt @@ -63,7 +63,11 @@ open class WorkloadMonitor( status = listOf(WorkloadStatus.CLAIMED), ), ) - failWorkloads(notStartedWorkloads.workloads, "Not started within time limit", CHECK_START) + failWorkloads( + notStartedWorkloads.workloads, + "Airbyte could not start the process within time limit. The workload was claimed but never started.", + CHECK_START, + ) } @Trace @@ -85,7 +89,11 @@ open class WorkloadMonitor( ), ) - failWorkloads(notClaimedWorkloads.workloads, "Not claimed within time limit", CHECK_CLAIMS) + failWorkloads( + notClaimedWorkloads.workloads, + "Airbyte could not start the process within time limit. The workload was never claimed.", + CHECK_CLAIMS, + ) } @Trace @@ -107,7 +115,12 @@ open class WorkloadMonitor( ), ) - failWorkloads(nonHeartbeatingWorkloads.workloads, "No heartbeat within time limit", CHECK_HEARTBEAT) + failWorkloads( + nonHeartbeatingWorkloads.workloads, + "Airbyte could not track the sync progress. " + + "No heartbeat within the time limit indicates the process might have died.", + CHECK_HEARTBEAT, + ) } @Trace diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/DestinationService.java b/airbyte-data/src/main/java/io/airbyte/data/services/DestinationService.java index a38b17ce8aa..f3bffde9e72 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/DestinationService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/DestinationService.java @@ -78,4 +78,11 @@ void writeDestinationConnectionWithSecrets( ConnectorSpecification connectorSpecification) throws JsonValidationException, IOException, ConfigNotFoundException; + void tombstoneDestination( + final String name, + final UUID workspaceId, + final UUID destinationId, + final ConnectorSpecification spec) + throws ConfigNotFoundException, JsonValidationException, IOException; + } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java b/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java index cf4fa440826..ce5dd5172f8 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/SourceService.java @@ -74,4 +74,11 @@ void writeSourceConnectionWithSecrets(final SourceConnection source, final ConnectorSpecification connectorSpecification) throws JsonValidationException, IOException, ConfigNotFoundException; + void tombstoneSource( + final String name, + final UUID workspaceId, + final UUID sourceId, + final ConnectorSpecification spec) + throws ConfigNotFoundException, JsonValidationException, IOException; + } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java index 4a08e52b642..75ea25c81fc 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectorMetadataJooqHelper.java @@ -16,8 +16,8 @@ import io.airbyte.db.instance.configs.jooq.generated.enums.SupportLevel; import java.time.LocalDate; import java.time.OffsetDateTime; +import java.time.ZoneOffset; import java.util.List; -import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; @@ -72,19 +72,6 @@ public static ActorDefinitionVersion writeActorDefinitionVersion(final ActorDefi ReleaseStage.class).orElseThrow()) .set(Tables.ACTOR_DEFINITION_VERSION.RELEASE_DATE, actorDefinitionVersion.getReleaseDate() == null ? null : LocalDate.parse(actorDefinitionVersion.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_REPOSITORY, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationRepository() - : null) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_TAG, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationTag() - : null) - .set(Tables.ACTOR_DEFINITION_VERSION.SUPPORTS_DBT, actorDefinitionVersion.getSupportsDbt()) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_INTEGRATION_TYPE, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationIntegrationType() - : null) .set(Tables.ACTOR_DEFINITION_VERSION.ALLOWED_HOSTS, actorDefinitionVersion.getAllowedHosts() == null ? null : JSONB.valueOf(Jsons.serialize(actorDefinitionVersion.getAllowedHosts()))) .set(Tables.ACTOR_DEFINITION_VERSION.SUGGESTED_STREAMS, @@ -95,6 +82,9 @@ public static ActorDefinitionVersion writeActorDefinitionVersion(final ActorDefi .set(Tables.ACTOR_DEFINITION_VERSION.SUPPORT_STATE, Enums.toEnum(actorDefinitionVersion.getSupportState().value(), io.airbyte.db.instance.configs.jooq.generated.enums.SupportState.class) .orElseThrow()) + .set(ACTOR_DEFINITION_VERSION.LAST_PUBLISHED, actorDefinitionVersion.getLastPublished() == null ? null + : actorDefinitionVersion.getLastPublished().toInstant().atOffset(ZoneOffset.UTC)) + .set(ACTOR_DEFINITION_VERSION.CDK_VERSION, actorDefinitionVersion.getCdkVersion()) .where(ACTOR_DEFINITION_VERSION.ID.eq(versionId)) .execute(); } else { @@ -120,19 +110,6 @@ public static ActorDefinitionVersion writeActorDefinitionVersion(final ActorDefi ReleaseStage.class).orElseThrow()) .set(Tables.ACTOR_DEFINITION_VERSION.RELEASE_DATE, actorDefinitionVersion.getReleaseDate() == null ? null : LocalDate.parse(actorDefinitionVersion.getReleaseDate())) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_REPOSITORY, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationRepository() - : null) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_TAG, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationTag() - : null) - .set(Tables.ACTOR_DEFINITION_VERSION.SUPPORTS_DBT, actorDefinitionVersion.getSupportsDbt()) - .set(Tables.ACTOR_DEFINITION_VERSION.NORMALIZATION_INTEGRATION_TYPE, - Objects.nonNull(actorDefinitionVersion.getNormalizationConfig()) - ? actorDefinitionVersion.getNormalizationConfig().getNormalizationIntegrationType() - : null) .set(Tables.ACTOR_DEFINITION_VERSION.ALLOWED_HOSTS, actorDefinitionVersion.getAllowedHosts() == null ? null : JSONB.valueOf(Jsons.serialize(actorDefinitionVersion.getAllowedHosts()))) .set(Tables.ACTOR_DEFINITION_VERSION.SUGGESTED_STREAMS, @@ -140,6 +117,9 @@ public static ActorDefinitionVersion writeActorDefinitionVersion(final ActorDefi : JSONB.valueOf(Jsons.serialize(actorDefinitionVersion.getSuggestedStreams()))) .set(ACTOR_DEFINITION_VERSION.SUPPORTS_REFRESHES, actorDefinitionVersion.getSupportsRefreshes() != null && actorDefinitionVersion.getSupportsRefreshes()) + .set(ACTOR_DEFINITION_VERSION.LAST_PUBLISHED, actorDefinitionVersion.getLastPublished() == null ? null + : actorDefinitionVersion.getLastPublished().toInstant().atOffset(ZoneOffset.UTC)) + .set(ACTOR_DEFINITION_VERSION.CDK_VERSION, actorDefinitionVersion.getCdkVersion()) .set(Tables.ACTOR_DEFINITION_VERSION.SUPPORT_STATE, Enums.toEnum(actorDefinitionVersion.getSupportState().value(), io.airbyte.db.instance.configs.jooq.generated.enums.SupportState.class) .orElseThrow()) diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java index 9ba81b755ab..68e00b3cff4 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java @@ -38,13 +38,13 @@ import io.airbyte.config.AllowedHosts; import io.airbyte.config.BreakingChangeScope; import io.airbyte.config.ConnectorBuilderProject; +import io.airbyte.config.ConnectorRegistryEntryMetrics; import io.airbyte.config.DeclarativeManifest; import io.airbyte.config.DestinationConnection; import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.FieldSelectionData; import io.airbyte.config.Geography; import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.Notification; import io.airbyte.config.NotificationSettings; import io.airbyte.config.Organization; @@ -79,8 +79,8 @@ import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.ArrayList; +import java.util.Date; import java.util.List; -import java.util.Objects; import java.util.Optional; import java.util.UUID; import org.jooq.Record; @@ -276,6 +276,9 @@ public static StandardSourceDefinition buildStandardSourceDefinition(final Recor .withResourceRequirements(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS) == null ? null : Jsons.deserialize(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS).data(), ActorDefinitionResourceRequirements.class)) + .withMetrics(record.get(ACTOR_DEFINITION.METRICS) == null + ? null + : Jsons.deserialize(record.get(ACTOR_DEFINITION.METRICS).data(), ConnectorRegistryEntryMetrics.class)) .withMaxSecondsBetweenMessages(maxSecondsBetweenMessage); } @@ -295,6 +298,9 @@ public static StandardDestinationDefinition buildStandardDestinationDefinition(f .withTombstone(record.get(ACTOR_DEFINITION.TOMBSTONE)) .withPublic(record.get(ACTOR_DEFINITION.PUBLIC)) .withCustom(record.get(ACTOR_DEFINITION.CUSTOM)) + .withMetrics(record.get(ACTOR_DEFINITION.METRICS) == null + ? null + : Jsons.deserialize(record.get(ACTOR_DEFINITION.METRICS).data(), ConnectorRegistryEntryMetrics.class)) .withResourceRequirements(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS) == null ? null : Jsons.deserialize(record.get(ACTOR_DEFINITION.RESOURCE_REQUIREMENTS).data(), ActorDefinitionResourceRequirements.class)); @@ -510,6 +516,9 @@ public static ActorDefinitionVersion buildActorDefinitionVersion(final Record re : Enums.toEnum(record.get(ACTOR_DEFINITION_VERSION.RELEASE_STAGE, String.class), ReleaseStage.class).orElseThrow()) .withReleaseDate(record.get(ACTOR_DEFINITION_VERSION.RELEASE_DATE) == null ? null : record.get(ACTOR_DEFINITION_VERSION.RELEASE_DATE).toString()) + .withLastPublished(record.get(ACTOR_DEFINITION_VERSION.LAST_PUBLISHED) == null ? null + : Date.from(record.get(ACTOR_DEFINITION_VERSION.LAST_PUBLISHED).toInstant())) + .withCdkVersion(record.get(ACTOR_DEFINITION_VERSION.CDK_VERSION)) .withAllowedHosts(record.get(ACTOR_DEFINITION_VERSION.ALLOWED_HOSTS) == null ? null : Jsons.deserialize(record.get(ACTOR_DEFINITION_VERSION.ALLOWED_HOSTS).data(), AllowedHosts.class)) @@ -517,16 +526,6 @@ public static ActorDefinitionVersion buildActorDefinitionVersion(final Record re ? null : Jsons.deserialize(record.get(ACTOR_DEFINITION_VERSION.SUGGESTED_STREAMS).data(), SuggestedStreams.class)) - .withSupportsDbt(record.get(ACTOR_DEFINITION_VERSION.SUPPORTS_DBT)) - .withNormalizationConfig( - Objects.nonNull(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_REPOSITORY)) - && Objects.nonNull(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_TAG)) - && Objects.nonNull(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_INTEGRATION_TYPE)) - ? new NormalizationDestinationDefinitionConfig() - .withNormalizationRepository(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_REPOSITORY)) - .withNormalizationTag(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_TAG)) - .withNormalizationIntegrationType(record.get(ACTOR_DEFINITION_VERSION.NORMALIZATION_INTEGRATION_TYPE)) - : null) .withSupportsRefreshes(record.get(ACTOR_DEFINITION_VERSION.SUPPORTS_REFRESHES)) .withSupportState(Enums.toEnum(record.get(ACTOR_DEFINITION_VERSION.SUPPORT_STATE, String.class), SupportState.class).orElseThrow()); } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java index 357b71a12bd..30d093b2c2d 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java @@ -651,6 +651,9 @@ static void writeStandardDestinationDefinition(final List organizationId = getOrganizationIdFromWorkspaceId(workspaceId); + RuntimeSecretPersistence secretPersistence = null; + if (organizationId.isPresent() && featureFlagClient.boolVariation(UseRuntimeSecretPersistence.INSTANCE, new Organization(organizationId.get()))) { + final SecretPersistenceConfig secretPersistenceConfig = secretPersistenceConfigService.get(ScopeType.ORGANIZATION, organizationId.get()); + secretPersistence = new RuntimeSecretPersistence(secretPersistenceConfig); + } + secretsRepositoryWriter.deleteFromConfig( + config, + spec.getConnectionSpecification(), + secretPersistence); + + // 2. Tombstone destination and void config + final DestinationConnection newDestinationConnection = new DestinationConnection() + .withName(name) + .withDestinationDefinitionId(destinationConnection.getDestinationDefinitionId()) + .withWorkspaceId(workspaceId) + .withDestinationId(destinationId) + .withConfiguration(null) + .withTombstone(true); + writeDestinationConnectionNoSecrets(newDestinationConnection); + } + /** * Write a destination with its secrets to the appropriate persistence. Secrets go to secrets store * and the rest of the object (with pointers to the secrets store) get saved in the db. diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OperationServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OperationServiceJooqImpl.java index 4e6c37bd79f..5377ec07127 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OperationServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OperationServiceJooqImpl.java @@ -14,8 +14,6 @@ import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; import io.airbyte.config.ConfigSchema; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; @@ -178,8 +176,6 @@ private void writeStandardSyncOperation(final List config .set(OPERATION.NAME, standardSyncOperation.getName()) .set(OPERATION.OPERATOR_TYPE, Enums.toEnum(standardSyncOperation.getOperatorType().value(), io.airbyte.db.instance.configs.jooq.generated.enums.OperatorType.class).orElseThrow()) - .set(OPERATION.OPERATOR_NORMALIZATION, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(OPERATION.OPERATOR_DBT, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) .set(OPERATION.OPERATOR_WEBHOOK, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorWebhook()))) .set(OPERATION.TOMBSTONE, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) .set(OPERATION.UPDATED_AT, timestamp) @@ -193,8 +189,6 @@ private void writeStandardSyncOperation(final List config .set(OPERATION.NAME, standardSyncOperation.getName()) .set(OPERATION.OPERATOR_TYPE, Enums.toEnum(standardSyncOperation.getOperatorType().value(), io.airbyte.db.instance.configs.jooq.generated.enums.OperatorType.class).orElseThrow()) - .set(OPERATION.OPERATOR_NORMALIZATION, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(OPERATION.OPERATOR_DBT, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) .set(OPERATION.OPERATOR_WEBHOOK, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorWebhook()))) .set(OPERATION.TOMBSTONE, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) .set(OPERATION.CREATED_AT, timestamp) @@ -210,8 +204,6 @@ private static StandardSyncOperation buildStandardSyncOperation(final Record rec .withName(record.get(OPERATION.NAME)) .withWorkspaceId(record.get(OPERATION.WORKSPACE_ID)) .withOperatorType(Enums.toEnum(record.get(OPERATION.OPERATOR_TYPE, String.class), OperatorType.class).orElseThrow()) - .withOperatorNormalization(Jsons.deserialize(record.get(OPERATION.OPERATOR_NORMALIZATION).data(), OperatorNormalization.class)) - .withOperatorDbt(Jsons.deserialize(record.get(OPERATION.OPERATOR_DBT).data(), OperatorDbt.class)) .withOperatorWebhook(record.get(OPERATION.OPERATOR_WEBHOOK) == null ? null : Jsons.deserialize(record.get(OPERATION.OPERATOR_WEBHOOK).data(), OperatorWebhook.class)) .withTombstone(record.get(OPERATION.TOMBSTONE)); diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java index 5191dca6430..758cf03f047 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java @@ -597,6 +597,9 @@ private static void writeStandardSourceDefinition(final List organizationId = getOrganizationIdFromWorkspaceId(workspaceId); + RuntimeSecretPersistence secretPersistence = null; + if (organizationId.isPresent() && featureFlagClient.boolVariation(UseRuntimeSecretPersistence.INSTANCE, new Organization(organizationId.get()))) { + final SecretPersistenceConfig secretPersistenceConfig = secretPersistenceConfigService.get(ScopeType.ORGANIZATION, organizationId.get()); + secretPersistence = new RuntimeSecretPersistence(secretPersistenceConfig); + } + secretsRepositoryWriter.deleteFromConfig( + config, + spec.getConnectionSpecification(), + secretPersistence); + + // 2. Tombstone source and void config + final SourceConnection newSourceConnection = new SourceConnection() + .withName(name) + .withSourceDefinitionId(sourceConnection.getSourceDefinitionId()) + .withWorkspaceId(workspaceId) + .withSourceId(sourceId) + .withConfiguration(null) + .withTombstone(true); + writeSourceConnectionNoSecrets(newSourceConnection); + } + /** * Write a source with its secrets to the appropriate persistence. Secrets go to secrets store and * the rest of the object (with pointers to the secrets store) get saved in the db. @@ -728,7 +776,6 @@ public void writeSourceConnectionWithSecrets( final SecretPersistenceConfig secretPersistenceConfig = secretPersistenceConfigService.get(ScopeType.ORGANIZATION, organizationId.get()); secretPersistence = new RuntimeSecretPersistence(secretPersistenceConfig); } - final JsonNode partialConfig; if (previousSourceConnection.isPresent()) { partialConfig = secretsRepositoryWriter.updateFromConfig( diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java index 379b94c05d9..860280f13f7 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java @@ -735,7 +735,7 @@ public void writeWorkspaceWithSecrets(final StandardWorkspace workspace) throws secretPersistence = new RuntimeSecretPersistence(secretPersistenceConfig); } - JsonNode partialConfig; + final JsonNode partialConfig; if (previousWebhookConfigs.isPresent()) { partialConfig = secretsRepositoryWriter.updateFromConfig( workspace.getWorkspaceId(), diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java index 473d605343d..eb94cd09d3a 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java @@ -8,7 +8,6 @@ import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; import io.airbyte.commons.auth.config.AuthMode; import io.airbyte.commons.auth.keycloak.ClientScopeConfigurator; -import io.airbyte.commons.auth.support.UserAuthenticationResolver; import io.airbyte.config.Application; import io.airbyte.config.User; import io.airbyte.data.services.ApplicationService; @@ -28,6 +27,7 @@ import java.util.Map; import java.util.Optional; import java.util.UUID; +import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; import org.keycloak.admin.client.resource.ClientsResource; @@ -41,6 +41,7 @@ * An Application for a user or non-user entity i.e. an organization. */ @Singleton +@Slf4j @RequiresAuthMode(AuthMode.OIDC) public class ApplicationServiceKeycloakImpl implements ApplicationService { @@ -50,19 +51,16 @@ public class ApplicationServiceKeycloakImpl implements ApplicationService { public static final String CLIENT_ID = "client_id"; private final AirbyteKeycloakConfiguration keycloakConfiguration; private final Keycloak keycloakAdminClient; - private final UserAuthenticationResolver userAuthenticationResolver; private final ClientScopeConfigurator clientScopeConfigurator; private final Duration accessTokenExpirationTime; public ApplicationServiceKeycloakImpl( final Keycloak keycloakAdminClient, final AirbyteKeycloakConfiguration keycloakConfiguration, - final UserAuthenticationResolver userAuthenticationResolver, final ClientScopeConfigurator clientScopeConfigurator, @Named("access-token-expiration-time") final Duration accessTokenExpirationTime) { this.keycloakAdminClient = keycloakAdminClient; this.keycloakConfiguration = keycloakConfiguration; - this.userAuthenticationResolver = userAuthenticationResolver; this.clientScopeConfigurator = clientScopeConfigurator; this.accessTokenExpirationTime = accessTokenExpirationTime; } @@ -78,8 +76,7 @@ public ApplicationServiceKeycloakImpl( @SuppressWarnings("PMD.PreserveStackTrace") public Application createApplication(final User user, final String name) { try { - final String userRealmName = getCurrentUserRealmName(); - final RealmResource realmResource = keycloakAdminClient.realm(userRealmName); + final RealmResource realmResource = keycloakAdminClient.realm(keycloakConfiguration.getClientRealm()); final ClientsResource clientsResource = realmResource.clients(); final UsersResource usersResource = realmResource.users(); @@ -97,7 +94,7 @@ public Application createApplication(final User user, final String name) { .anyMatch(clientRepresentation -> clientRepresentation.getName().equals(name))) { throw new BadRequestException("User already has a key with this name"); } - final var clientRepresentation = buildClientRepresentation(user, name, existingClients.size()); + final var clientRepresentation = buildClientRepresentation(name); try (var response = realmResource.clients().create(clientRepresentation)) { if (response.getStatus() != Response.Status.CREATED.getStatusCode()) { @@ -137,15 +134,16 @@ public Application createApplication(final User user, final String name) { */ @Override public List listApplicationsByUser(final User user) { + final var clientRealm = keycloakConfiguration.getClientRealm(); final var clientUsers = keycloakAdminClient - .realm(getCurrentUserRealmName()) + .realm(clientRealm) .users() .searchByAttributes(USER_ID + ":" + user.getAuthUserId()); final var existingClient = new ArrayList(); for (final var clientUser : clientUsers) { final var client = keycloakAdminClient - .realm(getCurrentUserRealmName()) + .realm(clientRealm) .clients() .findByClientId(clientUser .getAttributes() @@ -171,9 +169,9 @@ public List listApplicationsByUser(final User user) { */ @Override public Optional deleteApplication(final User user, final String applicationId) { - final var userRealm = getCurrentUserRealmName(); + final var clientRealm = keycloakConfiguration.getClientRealm(); final var client = keycloakAdminClient - .realm(getCurrentUserRealmName()) + .realm(clientRealm) .clients() .findByClientId(applicationId) .stream() @@ -191,7 +189,7 @@ public Optional deleteApplication(final User user, final String app } keycloakAdminClient - .realm(userRealm) + .realm(clientRealm) .clients() .get(client.get().getId()) .remove(); @@ -208,11 +206,10 @@ public Optional deleteApplication(final User user, final String app */ @Override public String getToken(final String clientId, final String clientSecret) { - final var userRealm = getCurrentUserRealmName(); final var keycloakClient = KeycloakBuilder .builder() .serverUrl(keycloakConfiguration.getServerUrl()) - .realm(userRealm) + .realm(keycloakConfiguration.getClientRealm()) .grantType("client_credentials") .clientId(clientId) .clientSecret(clientSecret) @@ -230,13 +227,11 @@ public String getToken(final String clientId, final String clientSecret) { /** * Build a client representation for a user. * - * @param user The user to build the client representation for. * @param name The name of the client. - * @param index The index of the client. * @return The built client representation. */ @Nonnull - private ClientRepresentation buildClientRepresentation(final User user, final String name, final int index) { + private ClientRepresentation buildClientRepresentation(final String name) { final var client = new ClientRepresentation(); client.setClientId(String.valueOf(UUID.randomUUID())); client.setServiceAccountsEnabled(true); @@ -282,9 +277,4 @@ private static Application toApplication(final ClientRepresentation client) { ZoneOffset.UTC).format(DateTimeFormatter.ISO_DATE_TIME)); } - private String getCurrentUserRealmName() { - return userAuthenticationResolver.resolveSsoRealm().orElseThrow( - () -> new BadRequestException("Could not determine realm for current user")); - } - } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/micronaut/ApplicationServiceMicronautImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/micronaut/ApplicationServiceMicronautImpl.java index eaf3aa2d0fe..2b5d1d1f835 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/micronaut/ApplicationServiceMicronautImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/micronaut/ApplicationServiceMicronautImpl.java @@ -58,13 +58,15 @@ public String getToken(final String clientId, final String clientSecret) { roles.addAll(AuthRole.buildAuthRolesSet(AuthRole.ADMIN)); roles.addAll(WorkspaceAuthRole.buildWorkspaceAuthRolesSet(WorkspaceAuthRole.WORKSPACE_ADMIN)); roles.addAll(OrganizationAuthRole.buildOrganizationAuthRolesSet(OrganizationAuthRole.ORGANIZATION_ADMIN)); - final var token = jwtTokenGenerator.generateToken( + return jwtTokenGenerator.generateToken( Map.of( "iss", "airbyte-server", "sub", DEFAULT_AUTH_USER_ID, "roles", roles, - "exp", Instant.now().plus(24, ChronoUnit.HOURS).getEpochSecond())); - return token.orElseThrow(() -> new BadRequestException("Token generation failed")); + "exp", Instant.now().plus(24, ChronoUnit.HOURS).getEpochSecond())) + // Necessary now that this is no longer optional, but I don't know under what conditions we could + // end up here. + .orElseThrow(() -> new BadRequestException("Could not generate token")); } throw new BadRequestException("Invalid client id or token"); } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/AttemptsRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/AttemptsRepository.kt index 87b0297b719..a89dc27ded4 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/AttemptsRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/AttemptsRepository.kt @@ -6,4 +6,9 @@ import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") -interface AttemptsRepository : PageableRepository +interface AttemptsRepository : PageableRepository { + fun findByJobIdAndAttemptNumber( + jobId: Long, + attemptNumber: Long, + ): Attempt +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt index 26c20f01a09..a10bbe359ac 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepository.kt @@ -1,12 +1,37 @@ package io.airbyte.data.repositories import io.airbyte.data.repositories.entities.ConnectionTimelineEvent +import io.airbyte.data.services.shared.ConnectionEvent +import io.micronaut.data.annotation.Expandable +import io.micronaut.data.annotation.Query import io.micronaut.data.jdbc.annotation.JdbcRepository import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository +import java.time.OffsetDateTime import java.util.UUID @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") interface ConnectionTimelineEventRepository : PageableRepository { fun findByConnectionId(connectionId: UUID): List + + @Query( + """ + SELECT * FROM connection_timeline_event + WHERE connection_id = :connectionId + AND ((:eventTypes) IS NULL OR event_type = ANY(CAST(ARRAY[:eventTypes] AS text[])) ) + AND (CAST(:createdAtStart AS timestamptz) IS NULL OR created_at >= CAST(:createdAtStart AS timestamptz)) + AND (CAST(:createdAtEnd AS timestamptz) IS NULL OR created_at <= CAST(:createdAtEnd AS timestamptz)) + ORDER BY created_at DESC + LIMIT :pageSize + OFFSET :rowOffset + """, + ) + fun findByConnectionIdWithFilters( + connectionId: UUID, + @Expandable eventTypes: List?, + createdAtStart: OffsetDateTime?, + createdAtEnd: OffsetDateTime?, + pageSize: Int, + rowOffset: Int, + ): List } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/StreamAttemptMetadataRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/StreamAttemptMetadataRepository.kt new file mode 100644 index 00000000000..7cb3fbe3fde --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/StreamAttemptMetadataRepository.kt @@ -0,0 +1,12 @@ +package io.airbyte.data.repositories + +import io.airbyte.data.repositories.entities.StreamAttemptMetadata +import io.micronaut.data.jdbc.annotation.JdbcRepository +import io.micronaut.data.model.query.builder.sql.Dialect +import io.micronaut.data.repository.PageableRepository +import java.util.UUID + +@JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") +interface StreamAttemptMetadataRepository : PageableRepository { + fun findAllByAttemptId(attemptId: Long): List +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/StreamAttemptMetadata.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/StreamAttemptMetadata.kt new file mode 100644 index 00000000000..85755bb2b06 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/StreamAttemptMetadata.kt @@ -0,0 +1,18 @@ +package io.airbyte.data.repositories.entities + +import io.micronaut.data.annotation.AutoPopulated +import io.micronaut.data.annotation.Id +import io.micronaut.data.annotation.MappedEntity +import java.util.UUID + +@MappedEntity("stream_attempt_metadata") +data class StreamAttemptMetadata( + @field:Id + @AutoPopulated + val id: UUID? = null, + val attemptId: Long, + val streamNamespace: String?, + val streamName: String, + val wasBackfilled: Boolean, + val wasResumed: Boolean, +) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt index 8d6f8f0c112..9b7617c5f4b 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ConnectionTimelineEventService.kt @@ -2,11 +2,24 @@ package io.airbyte.data.services import io.airbyte.data.repositories.entities.ConnectionTimelineEvent import io.airbyte.data.services.shared.ConnectionEvent +import java.time.OffsetDateTime import java.util.UUID interface ConnectionTimelineEventService { fun writeEvent( connectionId: UUID, event: ConnectionEvent, + userId: UUID? = null, ): ConnectionTimelineEvent + + fun getEvent(eventId: UUID): ConnectionTimelineEvent + + fun listEvents( + connectionId: UUID, + eventTypes: List? = null, + createdAtStart: OffsetDateTime? = null, + createdAtEnd: OffsetDateTime? = null, + pageSize: Int, + rowOffset: Int, + ): List } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/StreamAttemptMetadataService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/StreamAttemptMetadataService.kt new file mode 100644 index 00000000000..e81bfbc4638 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/StreamAttemptMetadataService.kt @@ -0,0 +1,74 @@ +package io.airbyte.data.services + +import io.airbyte.data.repositories.AttemptsRepository +import io.airbyte.data.repositories.StreamAttemptMetadataRepository +import jakarta.inject.Singleton + +data class StreamAttemptMetadata( + val streamName: String, + val streamNamespace: String? = null, + val wasBackfilled: Boolean, + val wasResumed: Boolean, +) + +@Singleton +class StreamAttemptMetadataService( + private val attemptsRepository: AttemptsRepository, + private val streamAttemptMetadataRepository: StreamAttemptMetadataRepository, +) { + fun upsertStreamAttemptMetadata( + jobId: Long, + attemptNumber: Long, + streamMetadata: List, + ) { + val attemptId: Long = getAttemptId(jobId, attemptNumber) + val entitiesToSave = + streamMetadata.map { + io.airbyte.data.repositories.entities.StreamAttemptMetadata( + attemptId = attemptId, + streamName = it.streamName, + streamNamespace = it.streamNamespace, + wasBackfilled = it.wasBackfilled, + wasResumed = it.wasResumed, + ) + } + try { + // Optimistic insertion here + // We expect the default case to be always inserting new metadata and never to be updated, + // but this can happen in case of retries. + // The goal here is to simulate a `ON CONFLICT UPDATE` if we were to write SQL directly. + streamAttemptMetadataRepository.saveAll(entitiesToSave) + } catch (e: Exception) { + val existingStreams = + streamAttemptMetadataRepository.findAllByAttemptId(attemptId) + .associate { Pair(it.streamName, it.streamNamespace) to it.id } + val partitionedEntities = + entitiesToSave + .map { it.copy(id = existingStreams[Pair(it.streamName, it.streamNamespace)]) } + .partition { it.id != null } + streamAttemptMetadataRepository.saveAll(partitionedEntities.second) + streamAttemptMetadataRepository.updateAll(partitionedEntities.first) + } + } + + fun getStreamAttemptMetadata( + jobId: Long, + attemptNumber: Long, + ): List { + val attemptId: Long = getAttemptId(jobId, attemptNumber) + val entities = streamAttemptMetadataRepository.findAllByAttemptId(attemptId) + return entities.map { + StreamAttemptMetadata( + streamName = it.streamName, + streamNamespace = it.streamNamespace, + wasBackfilled = it.wasBackfilled, + wasResumed = it.wasResumed, + ) + } + } + + private fun getAttemptId( + jobId: Long, + attemptNumber: Long, + ): Long = attemptsRepository.findByJobIdAndAttemptNumber(jobId, attemptNumber).id ?: throw NoSuchElementException() +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImpl.kt index 9b8f30ccf54..22d0d8e24bf 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImpl.kt @@ -10,6 +10,7 @@ import io.airbyte.data.repositories.entities.ConnectionTimelineEvent import io.airbyte.data.services.ConnectionTimelineEventService import io.airbyte.data.services.shared.ConnectionEvent import jakarta.inject.Singleton +import java.time.OffsetDateTime import java.util.UUID @Singleton @@ -20,10 +21,26 @@ class ConnectionTimelineEventServiceDataImpl( override fun writeEvent( connectionId: UUID, event: ConnectionEvent, + userId: UUID?, ): ConnectionTimelineEvent { val serializedEvent = mapper.writeValueAsString(event) val timelineEvent = - ConnectionTimelineEvent(null, connectionId, event.getUserId(), event.getEventType().toString(), serializedEvent, null) + ConnectionTimelineEvent(null, connectionId, userId, event.getEventType().toString(), serializedEvent, null) return repository.save(timelineEvent) } + + override fun getEvent(eventId: UUID): ConnectionTimelineEvent { + return repository.findById(eventId).get() + } + + override fun listEvents( + connectionId: UUID, + eventTypes: List?, + createdAtStart: OffsetDateTime?, + createdAtEnd: OffsetDateTime?, + pageSize: Int, + rowOffset: Int, + ): List { + return repository.findByConnectionIdWithFilters(connectionId, eventTypes, createdAtStart, createdAtEnd, pageSize, rowOffset) + } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionEvent.kt index b907d99ac90..318ba876475 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionEvent.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionEvent.kt @@ -1,15 +1,33 @@ package io.airbyte.data.services.shared -import java.util.UUID +import io.micronaut.data.annotation.TypeDef +import io.micronaut.data.model.DataType interface ConnectionEvent { + // These enums are also defined in openapi config.yaml, please maintain the consistency between them. + // If any change made in one place, please do the same in the other place. + @TypeDef(type = DataType.STRING) enum class Type { + SYNC_STARTED, // only for manual sync jobs SYNC_SUCCEEDED, + SYNC_INCOMPLETE, SYNC_FAILED, - } - - fun getUserId(): UUID? { - return null + SYNC_CANCELLED, + REFRESH_STARTED, + REFRESH_SUCCEEDED, + REFRESH_INCOMPLETE, + REFRESH_FAILED, + REFRESH_CANCELLED, + CLEAR_STARTED, + CLEAR_SUCCEEDED, + CLEAR_INCOMPLETE, + CLEAR_FAILED, + CLEAR_CANCELLED, + CONNECTION_SETTINGS_UPDATE, + CONNECTION_ENABLED, + CONNECTION_DISABLED, + SCHEMA_UPDATE, + CONNECTOR_UPDATE, } fun getEventType(): Type diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncCancelledEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncCancelledEvent.kt new file mode 100644 index 00000000000..5d050f073bc --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncCancelledEvent.kt @@ -0,0 +1,18 @@ +package io.airbyte.data.services.shared + +data class SyncCancelledEvent( + private val jobId: Long, + private val cancelTimeEpochSeconds: Long, +) : ConnectionEvent { + fun getJobId(): Long { + return jobId + } + + fun getCancelTimeEpochSeconds(): Long { + return cancelTimeEpochSeconds + } + + override fun getEventType(): ConnectionEvent.Type { + return ConnectionEvent.Type.SYNC_CANCELLED + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncFailedEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncFailedEvent.kt index e2a9c952e9f..cc0f63aead0 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncFailedEvent.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncFailedEvent.kt @@ -12,11 +12,11 @@ class SyncFailedEvent( private val attemptsCount: Int, private val failureReason: Optional, ) : ConnectionEvent { - fun startTimeEpochSeconds(): Long { - return this.startTimeEpochSeconds + fun getStartTimeEpochSeconds(): Long { + return startTimeEpochSeconds } - fun endTimeEpochSeconds(): Long { + fun getEndTimeEpochSeconds(): Long { return endTimeEpochSeconds } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncStartedEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncStartedEvent.kt new file mode 100644 index 00000000000..57795a01053 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncStartedEvent.kt @@ -0,0 +1,18 @@ +package io.airbyte.data.services.shared + +class SyncStartedEvent( + private val jobId: Long, + private val startTimeEpochSeconds: Long, +) : ConnectionEvent { + fun getJobId(): Long { + return jobId + } + + fun getStartTimeEpochSeconds(): Long { + return startTimeEpochSeconds + } + + override fun getEventType(): ConnectionEvent.Type { + return ConnectionEvent.Type.SYNC_STARTED + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncSucceededEvent.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncSucceededEvent.kt index a65d1674ba4..94dceffc39b 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncSucceededEvent.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/SyncSucceededEvent.kt @@ -8,11 +8,11 @@ class SyncSucceededEvent( private val recordsLoaded: Long, private val attemptsCount: Int, ) : ConnectionEvent { - fun startTimeEpochSeconds(): Long { - return this.startTimeEpochSeconds + fun getStartTimeEpochSeconds(): Long { + return startTimeEpochSeconds } - fun endTimeEpochSeconds(): Long { + fun getEndTimeEpochSeconds(): Long { return endTimeEpochSeconds } diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImplTests.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImplTests.java index 63cf634d944..79d7e100d75 100644 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImplTests.java +++ b/airbyte-data/src/test/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImplTests.java @@ -14,7 +14,6 @@ import io.airbyte.commons.auth.config.AirbyteKeycloakConfiguration; import io.airbyte.commons.auth.keycloak.ClientScopeConfigurator; -import io.airbyte.commons.auth.support.UserAuthenticationResolver; import io.airbyte.config.Application; import io.airbyte.config.User; import jakarta.ws.rs.BadRequestException; @@ -52,7 +51,6 @@ class ApplicationServiceKeycloakImplTests { private final ClientResource clientResource = mock(ClientResource.class); private final UsersResource usersResource = mock(UsersResource.class); private final UserResource userResource = mock(UserResource.class); - private final UserAuthenticationResolver userAuthenticationResolver = mock(UserAuthenticationResolver.class); private final ClientScopeConfigurator clientScopeConfigurator = mock(ClientScopeConfigurator.class); private ApplicationServiceKeycloakImpl apiKeyServiceKeycloakImpl; @@ -66,7 +64,6 @@ void setUp() { when(keycloakClient.realm(REALM_NAME)).thenReturn(realmResource); when(realmResource.clients()).thenReturn(clientsResource); when(realmResource.users()).thenReturn(usersResource); - when(userAuthenticationResolver.resolveSsoRealm()).thenReturn(Optional.of(REALM_NAME)); when(clientsResource.create(any(ClientRepresentation.class))) .thenReturn(Response.created(URI.create("https://company.example")).build()); @@ -74,7 +71,6 @@ void setUp() { apiKeyServiceKeycloakImpl = spy(new ApplicationServiceKeycloakImpl( keycloakClient, keycloakConfiguration, - userAuthenticationResolver, clientScopeConfigurator, Duration.ofMinutes(30))); } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt index a7adc64609e..e5d7cfd65f5 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/ConnectionTimelineEventRepositoryTest.kt @@ -1,10 +1,14 @@ package io.airbyte.data.repositories import io.airbyte.data.repositories.entities.ConnectionTimelineEvent +import io.airbyte.data.services.shared.ConnectionEvent import io.airbyte.db.instance.configs.jooq.generated.Keys import io.airbyte.db.instance.configs.jooq.generated.Tables import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.junit.jupiter.api.AfterEach import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested import org.junit.jupiter.api.Test import java.util.UUID @@ -23,7 +27,6 @@ internal class ConnectionTimelineEventRepositoryTest : AbstractConfigRepositoryT @Test fun `test db insertion`() { - val eventId = java.util.UUID.randomUUID() val event = ConnectionTimelineEvent( connectionId = UUID.randomUUID(), @@ -36,4 +39,150 @@ internal class ConnectionTimelineEventRepositoryTest : AbstractConfigRepositoryT val persistedEvent = connectionTimelineEventRepository.findById(saved.id!!).get() assert(persistedEvent.connectionId == event.connectionId) } + + @Nested + inner class ListEventsTest { + private val connectionId: UUID = UUID.randomUUID() + private val event1 = + ConnectionTimelineEvent( + connectionId = connectionId, + eventType = ConnectionEvent.Type.SYNC_STARTED.name, + ) + private val event2 = + ConnectionTimelineEvent( + connectionId = connectionId, + eventType = ConnectionEvent.Type.SYNC_CANCELLED.name, + ) + private val event3 = + ConnectionTimelineEvent( + connectionId = connectionId, + eventType = ConnectionEvent.Type.REFRESH_STARTED.name, + ) + private val event4 = + ConnectionTimelineEvent( + connectionId = connectionId, + eventType = ConnectionEvent.Type.REFRESH_SUCCEEDED.name, + ) + + @BeforeEach + fun setup() { + // save some events + val allEvents = listOf(event1, event2, event3, event4) + allEvents.forEach { event -> connectionTimelineEventRepository.save(event) } + } + + @AfterEach + fun reset() { + connectionTimelineEventRepository.deleteAll() + } + + @Test + fun `should list ALL events order by timestamp`() { + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = null, + createdAtEnd = null, + pageSize = 200, + rowOffset = 0, + ) + assert(connectionTimelineEventRepository.count() == 4L) + assert(res.size == 4) + assert(res[0].id == event4.id) + } + + @Test + fun `should list STARTED events only`() { + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = + listOf( + ConnectionEvent.Type.SYNC_STARTED, + ConnectionEvent.Type.REFRESH_STARTED, + ConnectionEvent.Type.CLEAR_STARTED, + ), + createdAtStart = null, + createdAtEnd = null, + pageSize = 200, + rowOffset = 0, + ) + assert(res.size == 2) + } + + @Test + fun `should list events after given time range`() { + val allEvents = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = null, + createdAtEnd = null, + pageSize = 200, + rowOffset = 0, + ) + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = allEvents[2].createdAt, + createdAtEnd = null, + pageSize = 200, + rowOffset = 0, + ) + assert(res.size == 3) + } + + @Test + fun `should list events between a given time range`() { + val allEvents = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = null, + createdAtEnd = null, + pageSize = 200, + rowOffset = 0, + ) + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = allEvents[2].createdAt, + createdAtEnd = allEvents[1].createdAt, + pageSize = 200, + rowOffset = 0, + ) + assert(res.size == 2) + } + + @Test + fun `should list events with limit`() { + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = null, + createdAtEnd = null, + pageSize = 1, + rowOffset = 0, + ) + assert(res.size == 1) + } + + @Test + fun `should list events with row offset`() { + val res = + connectionTimelineEventRepository.findByConnectionIdWithFilters( + connectionId = connectionId, + eventTypes = null, + createdAtStart = null, + createdAtEnd = null, + pageSize = 200, + rowOffset = 2, + ) + assert(res.size == 2) + } + } } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImplTest.kt index 18f403fe66f..3718475e53f 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImplTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ConnectionTimelineEventServiceDataImplTest.kt @@ -48,7 +48,7 @@ internal class ConnectionTimelineEventServiceDataImplTest { attemptsCount = 5, failureReason = Optional.of(FailureReason()), ) - service.writeEvent(connectionId = connectionId, event = syncFailedEvent) + service.writeEvent(connectionId = connectionId, event = syncFailedEvent, userId = null) verify { repository.save(any()) } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/StreamAttemptMetadataServiceTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/StreamAttemptMetadataServiceTest.kt new file mode 100644 index 00000000000..4accdf524c2 --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/StreamAttemptMetadataServiceTest.kt @@ -0,0 +1,96 @@ +package io.airbyte.data.services.impls.data + +import io.airbyte.data.repositories.AbstractConfigRepositoryTest +import io.airbyte.data.repositories.AttemptsRepository +import io.airbyte.data.repositories.StreamAttemptMetadataRepository +import io.airbyte.data.repositories.entities.Attempt +import io.airbyte.data.services.StreamAttemptMetadata +import io.airbyte.data.services.StreamAttemptMetadataService +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +@MicronautTest +internal class StreamAttemptMetadataServiceTest : AbstractConfigRepositoryTest() { + val streamAttemptMetadataService = context.getBean(StreamAttemptMetadataService::class.java)!! + + @BeforeEach + fun setupEach() { + context.getBean(AttemptsRepository::class.java).deleteAll() + context.getBean(StreamAttemptMetadataRepository::class.java).deleteAll() + } + + @Test + fun `test insertion and find by attempt id`() { + val jobId = 12L + createAttempt(jobId, 0, 12) + createAttempt(jobId, 1, 13) + + val streamAttemptMetadata0 = + listOf( + StreamAttemptMetadata(streamName = "stream1", wasBackfilled = false, wasResumed = false), + StreamAttemptMetadata(streamName = "stream2", streamNamespace = "ns1", wasBackfilled = false, wasResumed = true), + ) + val streamAttemptMetadata1 = + listOf( + StreamAttemptMetadata(streamName = "s1", streamNamespace = "ns", wasBackfilled = true, wasResumed = true), + StreamAttemptMetadata(streamName = "stream2", streamNamespace = "ns1", wasBackfilled = true, wasResumed = false), + ) + streamAttemptMetadataService.upsertStreamAttemptMetadata(jobId, 0, streamAttemptMetadata0) + streamAttemptMetadataService.upsertStreamAttemptMetadata(jobId, 1, streamAttemptMetadata1) + + val actualMetadata0 = streamAttemptMetadataService.getStreamAttemptMetadata(jobId, 0) + assertThat(actualMetadata0).isEqualTo(streamAttemptMetadata0) + + val actualMetadata1 = streamAttemptMetadataService.getStreamAttemptMetadata(jobId, 1) + assertThat(actualMetadata1).isEqualTo(streamAttemptMetadata1) + } + + @Test + fun `test upsert updates existing rows while adding new ones`() { + val jobId = 13L + createAttempt(jobId, 0, 10) + createAttempt(jobId, 1, 11) + + val sanityCheck = + listOf( + StreamAttemptMetadata(streamName = "s1", wasBackfilled = true, wasResumed = true), + StreamAttemptMetadata(streamName = "s1", streamNamespace = "ns1", wasBackfilled = true, wasResumed = true), + ) + streamAttemptMetadataService.upsertStreamAttemptMetadata(jobId, 1, sanityCheck) + + val metadata0 = + listOf( + StreamAttemptMetadata(streamName = "s1", wasBackfilled = false, wasResumed = false), + StreamAttemptMetadata(streamName = "s2", streamNamespace = "ns1", wasBackfilled = false, wasResumed = true), + ) + streamAttemptMetadataService.upsertStreamAttemptMetadata(jobId, 0, metadata0) + + val actualMetadata0 = streamAttemptMetadataService.getStreamAttemptMetadata(jobId, 0) + assertThat(actualMetadata0).isEqualTo(metadata0) + + val metadata1 = + listOf( + StreamAttemptMetadata(streamName = "s1", wasBackfilled = true, wasResumed = false), + StreamAttemptMetadata(streamName = "s2", streamNamespace = "ns1", wasBackfilled = true, wasResumed = true), + StreamAttemptMetadata(streamName = "s3", wasBackfilled = false, wasResumed = false), + ) + streamAttemptMetadataService.upsertStreamAttemptMetadata(jobId, 0, metadata1) + val actualMetadata1 = streamAttemptMetadataService.getStreamAttemptMetadata(jobId, 0) + assertThat(actualMetadata1).isEqualTo(metadata1) + + // This is verifying that the upsert didn't modify extra rows from other attempts + val actualSanityCheck = streamAttemptMetadataService.getStreamAttemptMetadata(jobId, 1) + assertThat(actualSanityCheck).isEqualTo(sanityCheck) + } + + private fun createAttempt( + jobId: Long, + attemptNumber: Long, + attemptId: Long, + ): Long = + attemptsRepository.save( + Attempt(id = attemptId, jobId = jobId, attemptNumber = attemptNumber), + ).id ?: throw Exception("failed to create attempt for jobId:$jobId with attemptNumber:$attemptNumber") +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java index 457c298f964..5056500c825 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization.java @@ -523,9 +523,7 @@ private static void createAndPopulateOperation(final DSLContext ctx) { .set(workspaceId, standardSyncOperation.getWorkspaceId()) .set(name, standardSyncOperation.getName()) .set(operatorType, standardSyncOperation.getOperatorType() == null ? null - : Enums.toEnum(standardSyncOperation.getOperatorType().value(), OperatorType.class).orElseThrow()) - .set(operatorNormalization, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorNormalization()))) - .set(operatorDbt, JSONB.valueOf(Jsons.serialize(standardSyncOperation.getOperatorDbt()))) + : Enums.toEnum(standardSyncOperation.getOperatorType().value(), OperatorType.class).orElse(OperatorType.normalization)) .set(tombstone, standardSyncOperation.getTombstone() != null && standardSyncOperation.getTombstone()) .set(createdAt, OffsetDateTime.ofInstant(configWithMetadata.getCreatedAt(), ZoneOffset.UTC)) .set(updatedAt, OffsetDateTime.ofInstant(configWithMetadata.getUpdatedAt(), ZoneOffset.UTC)) diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion.java new file mode 100644 index 00000000000..d9d98ed61b9 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion.class); + + static void addCdkVersionToActorDefinitionVersion(final DSLContext ctx) { + ctx.alterTable("actor_definition_version") + .addColumnIfNotExists(DSL.field("cdk_version", SQLDataType.VARCHAR(256).nullable(true))) + .execute(); + } + + static void addLastPublishedToActorDefinitionVersion(final DSLContext ctx) { + ctx.alterTable("actor_definition_version") + .addColumnIfNotExists(DSL.field("last_published", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(true))) + .execute(); + } + + static void addMetricsToActorDefinition(final DSLContext ctx) { + ctx.alterTable("actor_definition") + .addColumnIfNotExists(DSL.field("metrics", SQLDataType.JSONB.nullable(true))) + .execute(); + } + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + addCdkVersionToActorDefinitionVersion(ctx); + addLastPublishedToActorDefinitionVersion(ctx); + addMetricsToActorDefinition(ctx); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_004__AddStreamAttemptMetadata.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_004__AddStreamAttemptMetadata.java new file mode 100644 index 00000000000..77d0ae5e0cc --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_004__AddStreamAttemptMetadata.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; + +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_57_2_004__AddStreamAttemptMetadata extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_57_2_004__AddStreamAttemptMetadata.class); + private static final String STREAM_ATTEMPT_METADATA_TABLE_NAME = "stream_attempt_metadata"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + final Field id = DSL.field("id", SQLDataType.UUID.notNull()); + final Field attemptId = DSL.field("attempt_id", SQLDataType.INTEGER.nullable(false)); + final Field streamNamespace = DSL.field("stream_namespace", SQLDataType.VARCHAR); + final Field streamName = DSL.field("stream_name", SQLDataType.VARCHAR.notNull()); + final Field wasBackfilled = DSL.field("was_backfilled", SQLDataType.BOOLEAN.nullable(false).defaultValue(false)); + final Field wasResumed = DSL.field("was_resumed", SQLDataType.BOOLEAN.nullable(false).defaultValue(false)); + + ctx.createTableIfNotExists(STREAM_ATTEMPT_METADATA_TABLE_NAME) + .columns(id, attemptId, streamNamespace, streamName, wasBackfilled, wasResumed) + .constraints( + primaryKey(id), + foreignKey(attemptId).references("attempts", "id").onDeleteCascade()) + .execute(); + + // We expect attemptId based look ups + ctx.createIndexIfNotExists("stream_attempt_metadata__attempt_id_idx") + .on(STREAM_ATTEMPT_METADATA_TABLE_NAME, attemptId.getName()) + .execute(); + + // Uniqueness constraint on name, namespace per attempt to avoid duplicates + ctx.createUniqueIndexIfNotExists("stream_attempt_metadata__attempt_id_name_namespace_idx") + .on(STREAM_ATTEMPT_METADATA_TABLE_NAME, attemptId.getName(), streamNamespace.getName(), streamName.getName()) + .where(streamNamespace.isNotNull()) + .execute(); + + // Workaround for namespace being null and pg dropping null values from indexes + ctx.createUniqueIndexIfNotExists("stream_attempt_metadata__attempt_id_name_idx") + .on(STREAM_ATTEMPT_METADATA_TABLE_NAME, attemptId.getName(), streamName.getName()) + .where(streamNamespace.isNull()) + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_005__FixStreamAttemptMetadataAttemptIdDataType.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_005__FixStreamAttemptMetadataAttemptIdDataType.java new file mode 100644 index 00000000000..08663e534fa --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_57_2_005__FixStreamAttemptMetadataAttemptIdDataType.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// TODO: update migration description in the class name +public class V0_57_2_005__FixStreamAttemptMetadataAttemptIdDataType extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_57_2_005__FixStreamAttemptMetadataAttemptIdDataType.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + ctx.alterTable("stream_attempt_metadata") + .alterColumn("attempt_id").set(SQLDataType.BIGINT) + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index 65c3a750890..667d3c8bafa 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -54,6 +54,7 @@ create table "public"."actor_definition" ( "max_seconds_between_messages" int, "default_version_id" uuid, "icon_url" varchar(256), + "metrics" jsonb, constraint "actor_definition_pkey" primary key ("id") ); create table "public"."actor_definition_breaking_change" ( @@ -96,6 +97,8 @@ create table "public"."actor_definition_version" ( "support_state" "public"."support_state" not null default cast('supported' as support_state), "support_level" "public"."support_level" not null, "supports_refreshes" boolean not null default false, + "cdk_version" varchar(256), + "last_published" timestamp(6) with time zone, constraint "actor_definition_version_pkey" primary key ("id"), constraint "actor_definition_version_actor_definition_id_version_key" unique ("actor_definition_id", "docker_image_tag") ); diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt index ffd11189dd0..a91be4f799f 100644 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt @@ -70,6 +70,15 @@ create table "public"."retry_states" ( constraint "retry_states_pkey" primary key ("id"), constraint "uniq_job_id" unique ("job_id") ); +create table "public"."stream_attempt_metadata" ( + "id" uuid not null, + "attempt_id" bigint not null, + "stream_namespace" varchar(2147483647), + "stream_name" varchar(2147483647) not null, + "was_backfilled" boolean not null default false, + "was_resumed" boolean not null default false, + constraint "stream_attempt_metadata_pkey" primary key ("id") +); create table "public"."stream_stats" ( "id" uuid not null, "attempt_id" bigint not null, @@ -135,12 +144,18 @@ where ((status <> ALL (ARRAY['failed'::job_status, 'succeeded'::job_status, 'can create index "normalization_summary_attempt_id_idx" on "public"."normalization_summaries"("attempt_id" asc); create index "retry_state_connection_id_idx" on "public"."retry_states"("connection_id" asc); create index "retry_state_job_id_idx" on "public"."retry_states"("job_id" asc); +create index "stream_attempt_metadata__attempt_id_idx" on "public"."stream_attempt_metadata"("attempt_id" asc); +create unique index "stream_attempt_metadata__attempt_id_name_idx" on "public"."stream_attempt_metadata"("attempt_id" asc, "stream_name" asc) +where ((stream_namespace IS NULL)); +create unique index "stream_attempt_metadata__attempt_id_name_namespace_idx" on "public"."stream_attempt_metadata"("attempt_id" asc, "stream_namespace" asc, "stream_name" asc) +where ((stream_namespace IS NOT NULL)); create index "index" on "public"."stream_stats"("attempt_id" asc); create index "stream_status_connection_id_idx" on "public"."stream_statuses"("connection_id" asc); create index "stream_status_job_id_idx" on "public"."stream_statuses"("job_id" asc); create index "attempt_id_idx" on "public"."sync_stats"("attempt_id" asc); alter table "public"."normalization_summaries" add constraint "normalization_summaries_attempt_id_fkey" foreign key ("attempt_id") references "public"."attempts" ("id"); alter table "public"."retry_states" add constraint "retry_states_job_id_fkey" foreign key ("job_id") references "public"."jobs" ("id"); +alter table "public"."stream_attempt_metadata" add constraint "stream_attempt_metadata_attempt_id_fkey" foreign key ("attempt_id") references "public"."attempts" ("id"); alter table "public"."stream_stats" add constraint "stream_stats_attempt_id_fkey" foreign key ("attempt_id") references "public"."attempts" ("id"); alter table "public"."stream_statuses" add constraint "stream_statuses_job_id_fkey" foreign key ("job_id") references "public"."jobs" ("id"); alter table "public"."sync_stats" add constraint "sync_stats_attempt_id_fkey" foreign key ("attempt_id") references "public"."attempts" ("id"); diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java index b73632cdc68..59a63d5d89b 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/SetupForNormalizedTablesTest.java @@ -17,9 +17,7 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.config.Notification; import io.airbyte.config.Notification.NotificationType; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; +import io.airbyte.config.OperatorWebhook; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.Schedule; import io.airbyte.config.Schedule.TimeUnit; @@ -29,7 +27,6 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.Status; import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.StandardSyncState; import io.airbyte.config.StandardWorkspace; import io.airbyte.config.State; @@ -299,27 +296,20 @@ public static List destinationOauthParameters() { } public static List standardSyncOperations() { - final OperatorDbt operatorDbt = new OperatorDbt() - .withDbtArguments("dbt-arguments") - .withDockerImage("image-tag") - .withGitRepoBranch("git-repo-branch") - .withGitRepoUrl("git-repo-url"); final StandardSyncOperation standardSyncOperation1 = new StandardSyncOperation() .withName("operation-1") .withTombstone(false) .withOperationId(OPERATION_ID_1) .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(operatorDbt) - .withOperatorNormalization(null) - .withOperatorType(OperatorType.DBT); + .withOperatorType(StandardSyncOperation.OperatorType.NORMALIZATION) + .withOperatorWebhook(new OperatorWebhook()); final StandardSyncOperation standardSyncOperation2 = new StandardSyncOperation() .withName("operation-1") .withTombstone(false) .withOperationId(OPERATION_ID_2) .withWorkspaceId(WORKSPACE_ID) - .withOperatorDbt(null) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) - .withOperatorType(OperatorType.NORMALIZATION); + .withOperatorType(StandardSyncOperation.OperatorType.NORMALIZATION) + .withOperatorWebhook(new OperatorWebhook()); return Arrays.asList(standardSyncOperation1, standardSyncOperation2); } diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java index e2686e688f3..16f358e8024 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_32_8_001__AirbyteConfigDatabaseDenormalization_Test.java @@ -23,8 +23,7 @@ import io.airbyte.config.DestinationConnection; import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.Notification; -import io.airbyte.config.OperatorDbt; -import io.airbyte.config.OperatorNormalization; +import io.airbyte.config.OperatorWebhook; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.Schedule; import io.airbyte.config.SourceConnection; @@ -340,8 +339,6 @@ private void assertDataForOperations(final DSLContext context) { final Field workspaceId = DSL.field("workspace_id", SQLDataType.UUID.nullable(false)); final Field name = DSL.field("name", SQLDataType.VARCHAR(256).nullable(false)); final Field operatorType = DSL.field("operator_type", SQLDataType.VARCHAR.asEnumDataType(OperatorType.class).nullable(false)); - final Field operatorNormalization = DSL.field("operator_normalization", SQLDataType.JSONB.nullable(true)); - final Field operatorDbt = DSL.field("operator_dbt", SQLDataType.JSONB.nullable(true)); final Field tombstone = DSL.field("tombstone", SQLDataType.BOOLEAN.nullable(true)); final Field createdAt = DSL.field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); final Field updatedAt = DSL.field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false)); @@ -357,9 +354,9 @@ private void assertDataForOperations(final DSLContext context) { .withOperationId(record.get(id)) .withName(record.get(name)) .withWorkspaceId(record.get(workspaceId)) - .withOperatorType(Enums.toEnum(record.get(operatorType, String.class), StandardSyncOperation.OperatorType.class).orElseThrow()) - .withOperatorNormalization(Jsons.deserialize(record.get(operatorNormalization).data(), OperatorNormalization.class)) - .withOperatorDbt(Jsons.deserialize(record.get(operatorDbt).data(), OperatorDbt.class)) + .withOperatorType(Enums.toEnum(record.get(operatorType, String.class), StandardSyncOperation.OperatorType.class) + .orElse(StandardSyncOperation.OperatorType.WEBHOOK)) + .withOperatorWebhook(new OperatorWebhook()) .withTombstone(record.get(tombstone)); Assertions.assertTrue(expectedDefinitions.contains(standardSyncOperation)); diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersionTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersionTest.java new file mode 100644 index 00000000000..92a46ee3b7a --- /dev/null +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/instance/configs/migrations/V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersionTest.java @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import static org.junit.jupiter.api.Assertions.*; + +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.AbstractConfigsDatabaseTest; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; +import io.airbyte.db.instance.configs.jooq.generated.enums.ReleaseStage; +import io.airbyte.db.instance.configs.jooq.generated.enums.SupportLevel; +import io.airbyte.db.instance.development.DevDatabaseMigrator; +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.Flyway; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.jooq.DSLContext; +import org.jooq.JSONB; +import org.jooq.impl.DSL; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersionTest extends AbstractConfigsDatabaseTest { + + @BeforeEach + void beforeEach() { + final Flyway flyway = + FlywayFactory.create(dataSource, "V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersionTest", ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + final ConfigsDatabaseMigrator configsDbMigrator = new ConfigsDatabaseMigrator(database, flyway); + + final BaseJavaMigration previousMigration = new V0_57_4_006__AddCdkVersionLastModifiedToActorDefVersion(); + final DevDatabaseMigrator devConfigsDbMigrator = new DevDatabaseMigrator(configsDbMigrator, previousMigration.getVersion()); + devConfigsDbMigrator.createBaseline(); + } + + private static boolean columnExists(final DSLContext ctx, final String tableName, final String columnName) { + return ctx.fetchExists(DSL.select() + .from("information_schema.columns") + .where(DSL.field("table_name").eq(tableName) + .and(DSL.field("column_name").eq(columnName)))); + } + + private static void insertADVWithCdk( + final DSLContext ctx, + final String cdkVersion) { + ctx.insertInto(DSL.table("actor_definition_version")) + .columns( + DSL.field("id"), + DSL.field("actor_definition_id"), + DSL.field("docker_repository"), + DSL.field("docker_image_tag"), + DSL.field("spec"), + DSL.field("release_stage"), + DSL.field("support_level"), + DSL.field("cdk_version")) + .values( + UUID.randomUUID(), + UUID.randomUUID(), + "repo", + "1.0.0", + JSONB.valueOf("{}"), + ReleaseStage.alpha, + SupportLevel.community, + cdkVersion) + .execute(); + } + + private static void insertADVWithLastPublished( + final DSLContext ctx, + final String lastPublished) { + ctx.insertInto(DSL.table("actor_definition_version")) + .columns( + DSL.field("id"), + DSL.field("actor_definition_id"), + DSL.field("docker_repository"), + DSL.field("docker_image_tag"), + DSL.field("spec"), + DSL.field("release_stage"), + DSL.field("support_level"), + DSL.field("last_published")) + .values( + UUID.randomUUID(), + UUID.randomUUID(), + "repo", + "1.0.0", + JSONB.valueOf("{}"), + ReleaseStage.alpha, + SupportLevel.community, + lastPublished != null ? OffsetDateTime.parse(lastPublished) : null) + .execute(); + } + + private static void insertADWithMetrics( + final DSLContext ctx, + final String metrics) { + ctx.insertInto(DSL.table("actor_definition")) + .columns( + DSL.field("id"), + DSL.field("name"), + DSL.field("actor_type"), + DSL.field("metrics")) + .values( + UUID.randomUUID(), + "name", + ActorType.source, + JSONB.valueOf(metrics)) + .execute(); + } + + @Test + void addCdkVersionToActorDefinitionVersion() { + final DSLContext ctx = getDslContext(); + + // ignore all foreign key constraints + ctx.execute("SET session_replication_role = replica;"); + + // tests that the column was added + Assertions.assertTrue(columnExists(ctx, "actor_definition_version", "cdk_version")); + + // tests that we can insert a row with a cdk_version + insertADVWithCdk(ctx, "python:0.1.0"); + insertADVWithCdk(ctx, null); + } + + @Test + void addLastPublishedToActorDefinitionVersion() { + final DSLContext ctx = getDslContext(); + + // ignore all foreign key constraints + ctx.execute("SET session_replication_role = replica;"); + + // tests that the column was added + Assertions.assertTrue(columnExists(ctx, "actor_definition_version", "last_published")); + + // tests that we can insert a row with a last_published + insertADVWithLastPublished(ctx, "2021-01-01T00:00:00Z"); + insertADVWithLastPublished(ctx, "2024-05-30T15:02:26.841000+00:00"); + insertADVWithLastPublished(ctx, null); + } + + @Test + void addMetricsToActorDefinition() { + final DSLContext ctx = getDslContext(); + + // ignore all foreign key constraints + ctx.execute("SET session_replication_role = replica;"); + + // tests that the column was added + Assertions.assertTrue(columnExists(ctx, "actor_definition", "metrics")); + + // tests that we can insert a row with metrics + insertADWithMetrics(ctx, "{}"); + insertADWithMetrics(ctx, "{\"foo\": \"bar\"}"); + insertADWithMetrics(ctx, null); + } + +} diff --git a/airbyte-featureflag/src/main/kotlin/Context.kt b/airbyte-featureflag/src/main/kotlin/Context.kt index 5d48267a09a..ba5bb5f1427 100644 --- a/airbyte-featureflag/src/main/kotlin/Context.kt +++ b/airbyte-featureflag/src/main/kotlin/Context.kt @@ -232,3 +232,9 @@ data class Priority(override val key: String) : Context { val HIGH_PRIORITY = "high" } } + +// This is aimed to be used with the EnvFeatureFlag +data object Empty : Context { + override val kind: String = "empty" + override val key: String = "" +} diff --git a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt index bd73857a13c..e13abd0496a 100644 --- a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt +++ b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt @@ -19,8 +19,6 @@ object AutoDetectSchema : EnvVar(envVar = "AUTO_DETECT_SCHEMA") object RemoveValidationLimit : Temporary(key = "validation.removeValidationLimit", default = false) -object NormalizationInDestination : Temporary(key = "connectors.normalizationInDestination", default = "") - object FieldSelectionEnabled : Temporary(key = "connection.columnSelection", default = false) object CheckWithCatalog : Temporary(key = "check-with-catalog", default = false) @@ -156,6 +154,8 @@ object UseWorkloadApi : Temporary(key = "platform.use-workload-api", de object EmitStateStatsToSegment : Temporary(key = "platform.emit-state-stats-segment", default = false) +object LogStreamNamesInSateMessage : Temporary(key = "platform.logs-stream-names-state", default = false) + object ProcessRateLimitedMessage : Temporary(key = "platform.process-rate-limited-message", default = false) object AddInitialCreditsForWorkspace : Temporary(key = "add-credits-at-workspace-creation-for-org", default = 0) @@ -180,6 +180,16 @@ object ConnectionFieldLimitOverride : Permanent(key = "connection-field-lim object DeleteDanglingSecrets : Temporary(key = "platform.delete-dangling-secrets", default = false) +object DeleteSecretsWhenTombstoneActors : Temporary(key = "platform.delete-secrets-when-tombstone-actors", default = false) + object EnableResumableFullRefresh : Temporary(key = "platform.enable-resumable-full-refresh", default = false) object AlwaysRunCheckBeforeSync : Permanent(key = "platform.always-run-check-before-sync", default = false) + +object WorkloadLauncherEnabled : EnvVar(envVar = "WORKLOAD_LAUNCHER_ENABLED", default = false) + +object WorkloadApiServerEnabled : EnvVar(envVar = "WORKLOAD_API_SERVER_ENABLED", default = false) + +object DiscoverPostprocessInTemporal : Permanent(key = "platform.discover-postprocess-in-temporal", default = false) + +object UseStreamAttemptMetadata : Temporary(key = "platform.use-stream-attempt-metadata", default = false) diff --git a/airbyte-json-validation/build.gradle.kts b/airbyte-json-validation/build.gradle.kts index 9dc6ad460ec..46edfd0e112 100644 --- a/airbyte-json-validation/build.gradle.kts +++ b/airbyte-json-validation/build.gradle.kts @@ -6,7 +6,7 @@ plugins { dependencies { implementation(project(":airbyte-commons")) implementation(libs.guava) - implementation("com.networknt:json-schema-validator:1.0.72") + implementation(libs.json.schema.validator) // needed so that we can follow $ref when parsing json. jackson does not support this natively. implementation("me.andrz.jackson:jackson-json-reference-core:0.3.2") diff --git a/airbyte-json-validation/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java b/airbyte-json-validation/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java index a2cf1c3d733..68d6beb4d9c 100644 --- a/airbyte-json-validation/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java +++ b/airbyte-json-validation/src/main/java/io/airbyte/validation/json/JsonSchemaValidator.java @@ -8,8 +8,12 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.networknt.schema.JsonMetaSchema; +import com.networknt.schema.JsonNodePath; import com.networknt.schema.JsonSchema; import com.networknt.schema.JsonSchemaFactory; +import com.networknt.schema.PathType; +import com.networknt.schema.SchemaLocation; +import com.networknt.schema.SchemaValidatorsConfig; import com.networknt.schema.SpecVersion; import com.networknt.schema.ValidationContext; import com.networknt.schema.ValidationMessage; @@ -210,15 +214,15 @@ private JsonSchema getSchemaValidator(JsonNode schemaJson) { } final ValidationContext context = new ValidationContext( - jsonSchemaFactory.getUriFactory(), - null, metaschema, jsonSchemaFactory, - null); - final JsonSchema schema = new JsonSchema( + new SchemaValidatorsConfig()); + final JsonSchema schema = jsonSchemaFactory.create( context, - baseUri, - schemaJson); + SchemaLocation.of(baseUri.toString()), + new JsonNodePath(PathType.LEGACY), + schemaJson, + null); return schema; } diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java index 98e2928c874..af6ca69e8f7 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricTags.java @@ -54,6 +54,7 @@ public class MetricTags { public static final String RECORD_COUNT_TYPE = "record_count_type"; public static final String RELEASE_STAGE = "release_stage"; public static final String SOURCE_ID = "source_id"; + public static final String SOURCE_DEFINITION_ID = "source_definition_id"; public static final String SOURCE_IMAGE = "source_image"; public static final String SOURCE_IMAGE_IS_DEFAULT = "source_image_is_default"; public static final String STATUS = "status"; diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java index ee4a1971149..4a87ac3e30e 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java @@ -465,7 +465,19 @@ public enum OssMetricsRegistry implements MetricsRegistry { REPLICATION_CONTEXT_NOT_INITIALIZED_ERROR(MetricEmittingApps.ORCHESTRATOR, "replication_context_not_initialized_error", - "The replication context was not initialized when it was expected to be."); + "The replication context was not initialized when it was expected to be."), + + DISCOVER_CATALOG_RUN_TIME(MetricEmittingApps.WORKER, + "discover_catalog_run_time", + "Time to run a discover catalog before a replication."), + + REPLICATION_RUN_TIME(MetricEmittingApps.ORCHESTRATOR, + "replication_run_time", + "Time to run a replication withing a sync."), + + SYNC_TOTAL_TIME(MetricEmittingApps.ORCHESTRATOR, + "sync_total_time", + "Time to run a sync workflow."); private final MetricEmittingApp application; private final String metricName; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java index 034733b2651..e3ed53badb4 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java @@ -6,12 +6,11 @@ import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; -import static io.airbyte.db.instance.jobs.jooq.generated.Tables.NORMALIZATION_SUMMARIES; +import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_ATTEMPT_METADATA; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_STATS; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.SYNC_STATS; import static io.airbyte.persistence.job.models.JobStatus.TERMINAL_STATUSES; -import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Sets; @@ -28,12 +27,10 @@ import io.airbyte.commons.version.Version; import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.AttemptSyncConfig; -import io.airbyte.config.FailureReason; import io.airbyte.config.JobConfig; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; import io.airbyte.config.JobOutput.OutputType; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncStats; import io.airbyte.config.persistence.PersistenceHelpers; @@ -43,7 +40,6 @@ import io.airbyte.db.instance.jobs.jooq.generated.tables.records.JobsRecord; import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; import io.airbyte.persistence.job.models.AttemptStatus; import io.airbyte.persistence.job.models.AttemptWithJobInfo; import io.airbyte.persistence.job.models.Job; @@ -79,6 +75,7 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.jooq.DSLContext; +import org.jooq.Field; import org.jooq.JSONB; import org.jooq.Query; import org.jooq.Record; @@ -330,10 +327,15 @@ private static void hydrateStreamStats(final String jobIdsStr, final DSLContext final var streamResults = ctx.fetch( "SELECT atmpt.id, atmpt.attempt_number, atmpt.job_id, " + "stats.stream_name, stats.stream_namespace, stats.estimated_bytes, stats.estimated_records, stats.bytes_emitted, stats.records_emitted," - + "stats.bytes_committed, stats.records_committed " + + "stats.bytes_committed, stats.records_committed, sam.was_backfilled, sam.was_resumed " + "FROM stream_stats stats " + "INNER JOIN attempts atmpt ON atmpt.id = stats.attempt_id " - + "WHERE attempt_id IN " + + "LEFT JOIN stream_attempt_metadata sam ON (" + + "sam.attempt_id = stats.attempt_id and " + + "sam.stream_name = stats.stream_name and " + + "((sam.stream_namespace is null and stats.stream_namespace is null) or (sam.stream_namespace = stats.stream_namespace))" + + ") " + + "WHERE stats.attempt_id IN " + "( SELECT id FROM attempts WHERE job_id IN ( " + jobIdsStr + "));"); streamResults.forEach(r -> { @@ -341,8 +343,13 @@ private static void hydrateStreamStats(final String jobIdsStr, final DSLContext final String streamName = r.get(STREAM_STATS.STREAM_NAME); final long attemptId = r.get(ATTEMPTS.ID); final var streamDescriptor = new StreamDescriptor().withName(streamName).withNamespace(streamNamespace); - final boolean wasBackfilled = backFilledStreamsPerAttemptId.getOrDefault(attemptId, new HashSet<>()).contains(streamDescriptor); - final boolean wasResumed = resumedStreamsPerAttemptId.getOrDefault(attemptId, new HashSet<>()).contains(streamDescriptor); + + // We merge the information from the database and what is retrieved from the attemptOutput because + // the historical data is only present in the attemptOutput + final boolean wasBackfilled = getOrDefaultFalse(r, STREAM_ATTEMPT_METADATA.WAS_BACKFILLED) + || backFilledStreamsPerAttemptId.getOrDefault(attemptId, new HashSet<>()).contains(streamDescriptor); + final boolean wasResumed = getOrDefaultFalse(r, STREAM_ATTEMPT_METADATA.WAS_RESUMED) + || resumedStreamsPerAttemptId.getOrDefault(attemptId, new HashSet<>()).contains(streamDescriptor); final var streamSyncStats = new StreamSyncStats() .withStreamNamespace(streamNamespace) @@ -366,6 +373,10 @@ private static void hydrateStreamStats(final String jobIdsStr, final DSLContext }); } + private static boolean getOrDefaultFalse(final Record r, final Field field) { + return r.get(field) == null ? false : r.get(field); + } + @VisibleForTesting static Long getAttemptId(final long jobId, final int attemptNumber, final DSLContext ctx) { final Optional record = @@ -403,22 +414,6 @@ private static RecordMapper getStreamStatsRecordsMapper }; } - private static RecordMapper getNormalizationSummaryRecordMapper() { - return record -> { - try { - return new NormalizationSummary().withStartTime(record.get(NORMALIZATION_SUMMARIES.START_TIME).toInstant().toEpochMilli()) - .withEndTime(record.get(NORMALIZATION_SUMMARIES.END_TIME).toInstant().toEpochMilli()) - .withFailures(record.get(NORMALIZATION_SUMMARIES.FAILURES, String.class) == null ? null : deserializeFailureReasons(record)); - } catch (final JsonProcessingException e) { - throw new RuntimeException(e); - } - }; - } - - private static List deserializeFailureReasons(final Record record) throws JsonProcessingException { - return List.of(Jsons.deserialize(String.valueOf(record.get(NORMALIZATION_SUMMARIES.FAILURES)), FailureReason[].class)); - } - // Retrieves only Job information from the record, without any attempt info private static Job getJobFromRecord(final Record record) { return new Job(record.get(JOB_ID, Long.class), @@ -822,20 +817,6 @@ public void writeOutput(final long jobId, final int attemptNumber, final JobOutp if (CollectionUtils.isNotEmpty(streamSyncStats)) { saveToStreamStatsTableBatch(now, output.getSync().getStandardSyncSummary().getStreamStats(), attemptId, connectionId, ctx); } - - final NormalizationSummary normalizationSummary = output.getSync().getNormalizationSummary(); - if (normalizationSummary != null) { - ctx.insertInto(NORMALIZATION_SUMMARIES) - .set(NORMALIZATION_SUMMARIES.ID, UUID.randomUUID()) - .set(NORMALIZATION_SUMMARIES.UPDATED_AT, now) - .set(NORMALIZATION_SUMMARIES.CREATED_AT, now) - .set(NORMALIZATION_SUMMARIES.ATTEMPT_ID, attemptId) - .set(NORMALIZATION_SUMMARIES.START_TIME, - OffsetDateTime.ofInstant(Instant.ofEpochMilli(normalizationSummary.getStartTime()), ZoneOffset.UTC)) - .set(NORMALIZATION_SUMMARIES.END_TIME, OffsetDateTime.ofInstant(Instant.ofEpochMilli(normalizationSummary.getEndTime()), ZoneOffset.UTC)) - .set(NORMALIZATION_SUMMARIES.FAILURES, JSONB.valueOf(Jsons.serialize(normalizationSummary.getFailures()))) - .execute(); - } return null; }); @@ -938,18 +919,6 @@ public SyncStats getAttemptCombinedStats(final long jobId, final int attemptNumb }); } - @Override - public List getNormalizationSummary(final long jobId, final int attemptNumber) throws IOException { - return jobDatabase - .query(ctx -> { - final Long attemptId = getAttemptId(jobId, attemptNumber, ctx); - return ctx.select(DSL.asterisk()).from(NORMALIZATION_SUMMARIES).where(NORMALIZATION_SUMMARIES.ATTEMPT_ID.eq(attemptId)) - .fetch(getNormalizationSummaryRecordMapper()) - .stream() - .toList(); - }); - } - @Override public Job getJob(final long jobId) throws IOException { return jobDatabase.query(ctx -> getJob(ctx, jobId)); @@ -1436,6 +1405,24 @@ public List getRunningSyncJobForConnections(final List connectionIds) .collect(Collectors.toList())); } + /** + * For the connection ID in the input, find that connection's most recent non-terminal + * clear/reset/sync/refresh job and return it if one exists. + */ + @Override + public List getRunningJobForConnection(final UUID connectionId) throws IOException { + + return jobDatabase.query(ctx -> ctx + .fetch("SELECT DISTINCT ON (scope) * FROM jobs " + + WHERE + "CAST(jobs.config_type AS VARCHAR) in " + toSqlInFragment(Job.REPLICATION_TYPES) + + AND + "jobs.scope = '" + connectionId + "'" + + AND + JOB_STATUS_IS_NON_TERMINAL + + "ORDER BY scope, created_at DESC LIMIT 1") + .stream() + .flatMap(r -> getJobOptional(ctx, r.get("id", Long.class)).stream()) + .collect(Collectors.toList())); + } + private String scopeInList(final Collection connectionIds) { return String.format("scope IN (%s) ", connectionIds.stream() @@ -1488,18 +1475,6 @@ public List listAttemptsWithJobInfo(final ConfigType configT limit))); } - @Override - public List getAttemptNormalizationStatusesForJob(final Long jobId) throws IOException { - return jobDatabase - .query(ctx -> ctx.select(ATTEMPTS.ATTEMPT_NUMBER, SYNC_STATS.RECORDS_COMMITTED, NORMALIZATION_SUMMARIES.FAILURES) - .from(ATTEMPTS) - .join(SYNC_STATS).on(SYNC_STATS.ATTEMPT_ID.eq(ATTEMPTS.ID)) - .leftJoin(NORMALIZATION_SUMMARIES).on(NORMALIZATION_SUMMARIES.ATTEMPT_ID.eq(ATTEMPTS.ID)) - .where(ATTEMPTS.JOB_ID.eq(jobId)) - .fetch(record -> new AttemptNormalizationStatus(record.get(ATTEMPTS.ATTEMPT_NUMBER), - Optional.ofNullable(record.get(SYNC_STATS.RECORDS_COMMITTED)), record.get(NORMALIZATION_SUMMARIES.FAILURES) != null))); - } - @Override public void updateJobConfig(Long jobId, JobConfig config) throws IOException { jobDatabase.query(ctx -> { diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java index ba553039be3..7d2424733cd 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java @@ -11,11 +11,9 @@ import io.airbyte.config.JobConfig; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.JobOutput; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncStats; import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; import io.airbyte.persistence.job.models.AttemptWithJobInfo; import io.airbyte.persistence.job.models.Job; import io.airbyte.persistence.job.models.JobStatus; @@ -70,8 +68,6 @@ public interface JobPersistence { */ SyncStats getAttemptCombinedStats(long jobId, int attemptNumber) throws IOException; - List getNormalizationSummary(long jobId, int attemptNumber) throws IOException; - Job getJob(long jobId) throws IOException; /** @@ -387,6 +383,8 @@ List listJobStatusAndTimestampWithConnection(UUID con List getRunningSyncJobForConnections(final List connectionIds) throws IOException; + List getRunningJobForConnection(final UUID connectionId) throws IOException; + Optional getFirstReplicationJob(UUID connectionId) throws IOException; Optional getNextJob() throws IOException; @@ -455,8 +453,6 @@ List listJobStatusAndTimestampWithConnection(UUID con // a deployment references a setup of airbyte. it is created the first time the docker compose or // K8s is ready. - List getAttemptNormalizationStatusesForJob(final Long jobId) throws IOException; - void updateJobConfig(Long jobId, JobConfig config) throws IOException; /** diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java index 737ca30161e..8affb86292e 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/JobErrorReporter.java @@ -52,7 +52,6 @@ public class JobErrorReporter { private static final String CONNECTOR_DEFINITION_ID_META_KEY = "connector_definition_id"; private static final String CONNECTOR_RELEASE_STAGE_META_KEY = "connector_release_stage"; private static final String CONNECTOR_COMMAND_META_KEY = "connector_command"; - private static final String NORMALIZATION_REPOSITORY_META_KEY = "normalization_repository"; private static final String JOB_ID_KEY = "job_id"; private static final ImmutableSet UNSUPPORTED_FAILURETYPES = @@ -122,41 +121,6 @@ public void reportSyncJobFailure(final UUID connectionId, MoreMaps.merge(commonMetadata, getDestinationMetadata(destinationDefinition, dockerImage, destinationVersion.getReleaseStage())); reportJobFailureReason(workspace, failureReason, dockerImage, metadata, attemptConfig); - } else if (failureOrigin == FailureOrigin.NORMALIZATION) { - final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromConnection(connectionId); - final StandardDestinationDefinition destinationDefinition = configRepository.getDestinationDefinitionFromConnection(connectionId); - final ActorDefinitionVersion destinationVersion = configRepository.getActorDefinitionVersion(jobContext.destinationVersionId()); - // null check because resets don't have sources - final @Nullable ActorDefinitionVersion sourceVersion = - jobContext.sourceVersionId() != null ? configRepository.getActorDefinitionVersion(jobContext.sourceVersionId()) : null; - - final Map destinationMetadata = getDestinationMetadata( - destinationDefinition, - ActorDefinitionVersionHelper.getDockerImageName(destinationVersion), - destinationVersion.getReleaseStage()); - - // prefixing source keys, so we don't overlap (destination as 'true' keys since normalization runs - // on the destination) - final Map sourceMetadata = sourceVersion != null - ? prefixConnectorMetadataKeys(getSourceMetadata( - sourceDefinition, - ActorDefinitionVersionHelper.getDockerImageName(sourceVersion), - sourceVersion.getReleaseStage()), "source") - : Map.of(); - - // since error could be arising from source or destination or normalization itself, we want all the - // metadata - final Map metadata = MoreMaps.merge( - commonMetadata, - getNormalizationMetadata(destinationVersion.getNormalizationConfig().getNormalizationRepository()), - sourceMetadata, - destinationMetadata); - - final String normalizationDockerImage = - destinationVersion.getNormalizationConfig().getNormalizationRepository() + ":" - + destinationVersion.getNormalizationConfig().getNormalizationTag(); - - reportJobFailureReason(workspace, failureReason, normalizationDockerImage, metadata, attemptConfig); } else { // We only care about the above failure origins, i.e. those that come from connectors. // The rest are ignored. @@ -293,19 +257,6 @@ private Map getSourceMetadata(final StandardSourceDefinition sou return metadata; } - private Map getNormalizationMetadata(final String normalizationImage) { - return Map.ofEntries( - Map.entry(NORMALIZATION_REPOSITORY_META_KEY, normalizationImage)); - } - - private Map prefixConnectorMetadataKeys(final Map connectorMetadata, final String prefix) { - final Map prefixedMetadata = new HashMap<>(); - for (final Map.Entry entry : connectorMetadata.entrySet()) { - prefixedMetadata.put(String.format("%s_%s", prefix, entry.getKey()), entry.getValue()); - } - return prefixedMetadata; - } - private Map getFailureReasonMetadata(final FailureReason failureReason) { final Map failureReasonAdditionalProps = failureReason.getMetadata() != null ? failureReason.getMetadata().getAdditionalProperties() : Map.of(); diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java index ac44f0fb8a6..3a87bdda096 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelper.java @@ -84,7 +84,7 @@ public Optional buildSentryExceptions(final String stackt if (stacktrace.startsWith("Traceback (most recent call last):")) { return buildPythonSentryExceptions(stacktrace); } - if (stacktrace.contains("\tat ") && stacktrace.contains(".java")) { + if (stacktrace.contains("\tat ") && (stacktrace.contains(".java") || stacktrace.contains(".kt"))) { return buildJavaSentryExceptions(stacktrace); } if (stacktrace.startsWith("AirbyteDbtError: ")) { @@ -173,7 +173,7 @@ private static Optional buildJavaSentryExceptions(final S @SuppressWarnings("LineLength") // Use a regex to grab stack trace frame information final Pattern framePattern = Pattern.compile( - "\n\tat (?:[\\w.$/]+/)?(?[\\w$.]+)\\.(?[\\w<>$]+)\\((?:(?[\\w]+\\.java):(?\\d+)\\)|(?[\\w\\s]*))"); + "\n\tat (?:[\\w.$/]+/)?(?[\\w$.]+)\\.(?[\\w<>$]+)\\((?:(?[\\w]+\\.(?java|kt)):(?\\d+)\\)|(?[\\w\\s]*))"); final Matcher matcher = framePattern.matcher(exceptionStr); while (matcher.find()) { diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java deleted file mode 100644 index 20eb9b9aeb2..00000000000 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/models/AttemptNormalizationStatus.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.persistence.job.models; - -import java.util.Optional; - -/** - * Status of a normalization run. - * - * @param attemptNumber attempt number - * @param recordsCommitted num records committed - * @param normalizationFailed whether normalization failed - */ -public record AttemptNormalizationStatus(int attemptNumber, Optional recordsCommitted, boolean normalizationFailed) {} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java index 5d9450a3df0..22d93cf6674 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/JobTracker.java @@ -36,8 +36,11 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.featureflag.Empty; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseWorkloadApi; +import io.airbyte.featureflag.WorkloadApiServerEnabled; +import io.airbyte.featureflag.WorkloadLauncherEnabled; import io.airbyte.featureflag.Workspace; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.WorkspaceHelper; @@ -200,8 +203,11 @@ public void trackDiscover(final UUID jobId, final Map failureReasonMetadata = generateFailureReasonMetadata(failureReason); final Map sourceDefMetadata = generateSourceDefinitionMetadata(sourceDefinitionId, workspaceId, actorId); final Map stateMetadata = generateStateMetadata(jobState); - final Map workloadMetadata = - Map.of("workload_enabled", featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId))); + + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); + final Map workloadMetadata = Map.of("workload_enabled", ffCheck || envCheck); track(workspaceId, DISCOVER_EVENT, MoreMaps.merge(jobMetadata, failureReasonMetadata, sourceDefMetadata, stateMetadata, workloadMetadata)); }); @@ -490,7 +496,10 @@ private static Map generateStateMetadata(final JobState jobState */ private Map generateCheckConnectionMetadata(final @Nullable StandardCheckConnectionOutput output, final UUID workspaceId) { final Map metadata = new HashMap<>(); - metadata.put("workload_enabled", featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId))); + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); + metadata.put("workload_enabled", ffCheck || envCheck); if (output == null) { return metadata; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java index b00c1ad48ff..61aa2ef028b 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java @@ -13,7 +13,6 @@ import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.FailureReason; import io.airbyte.config.JobOutput; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.ScheduleData; import io.airbyte.config.StandardDestinationDefinition; @@ -160,8 +159,6 @@ public static Map generateJobAttemptMetadata(final Job job) { } final StandardSyncSummary syncSummary = jobOutput.getSync().getStandardSyncSummary(); final SyncStats totalStats = syncSummary.getTotalStats(); - final NormalizationSummary normalizationSummary = jobOutput.getSync().getNormalizationSummary(); - if (syncSummary.getStartTime() != null) { metadata.put("sync_start_time", syncSummary.getStartTime()); } @@ -216,16 +213,6 @@ public static Map generateJobAttemptMetadata(final Job job) { metadata.put("destination_write_end_time", totalStats.getDestinationWriteEndTime()); } - if (normalizationSummary != null) { - if (normalizationSummary.getStartTime() != null) { - metadata.put("normalization_start_time", normalizationSummary.getStartTime()); - - } - if (normalizationSummary.getEndTime() != null) { - metadata.put("normalization_end_time", normalizationSummary.getEndTime()); - } - } - final List failureReasons = failureReasonsList(attempts); if (!failureReasons.isEmpty()) { metadata.put("failure_reasons", failureReasonsListAsJson(failureReasons).toString()); diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java index 39d97073773..3d99e052732 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobCreatorTest.java @@ -28,8 +28,6 @@ import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.config.JobTypeResourceLimit; import io.airbyte.config.JobTypeResourceLimit.JobType; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.RefreshConfig; import io.airbyte.config.RefreshStream; import io.airbyte.config.ResetSourceConfiguration; @@ -41,7 +39,6 @@ import io.airbyte.config.StandardSourceDefinition.SourceType; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.StateType; import io.airbyte.config.StateWrapper; import io.airbyte.config.SyncResourceRequirements; @@ -190,9 +187,7 @@ class DefaultJobCreatorTest { STANDARD_SYNC_OPERATION = new StandardSyncOperation() .withOperationId(operationId) .withName("normalize") - .withTombstone(false) - .withOperatorType(OperatorType.NORMALIZATION) - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)); + .withTombstone(false); PERSISTED_WEBHOOK_CONFIGS = Jsons.deserialize( String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java index dc1f253bbf8..942fa1535a6 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java @@ -7,6 +7,7 @@ import static io.airbyte.db.instance.jobs.jooq.generated.Tables.AIRBYTE_METADATA; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.ATTEMPTS; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; +import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_ATTEMPT_METADATA; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.STREAM_STATS; import static io.airbyte.db.instance.jobs.jooq.generated.Tables.SYNC_STATS; import static io.airbyte.persistence.job.DefaultJobPersistence.toSqlName; @@ -38,7 +39,6 @@ import io.airbyte.config.JobGetSpecConfig; import io.airbyte.config.JobOutput; import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.State; @@ -52,7 +52,6 @@ import io.airbyte.persistence.job.JobPersistence.AttemptStats; import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; import io.airbyte.persistence.job.models.Attempt; -import io.airbyte.persistence.job.models.AttemptNormalizationStatus; import io.airbyte.persistence.job.models.AttemptStatus; import io.airbyte.persistence.job.models.AttemptWithJobInfo; import io.airbyte.persistence.job.models.Job; @@ -235,6 +234,7 @@ private void resetDb() throws SQLException { jobDatabase.query(ctx -> ctx.truncateTable(ATTEMPTS).cascade().execute()); jobDatabase.query(ctx -> ctx.truncateTable(AIRBYTE_METADATA).cascade().execute()); jobDatabase.query(ctx -> ctx.truncateTable(SYNC_STATS)); + jobDatabase.query(ctx -> ctx.truncateTable(STREAM_ATTEMPT_METADATA)); } private Result getJobRecord(final long jobId) throws SQLException { @@ -324,17 +324,14 @@ void testWriteOutput() throws IOException { new SyncStats().withBytesEmitted(100L).withRecordsEmitted(9L).withEstimatedBytes(200L).withEstimatedRecords(10L)) .withStreamNamespace(streamNamespace).withStreamName(streamName); final FailureReason failureReason1 = new FailureReason().withFailureOrigin(FailureOrigin.DESTINATION).withFailureType(FailureType.SYSTEM_ERROR) - .withExternalMessage("There was a normalization error"); + .withExternalMessage("There was an error"); final FailureReason failureReason2 = new FailureReason().withFailureOrigin(FailureOrigin.SOURCE).withFailureType(FailureType.CONFIG_ERROR) - .withExternalMessage("There was another normalization error"); + .withExternalMessage("There was another error"); - final NormalizationSummary normalizationSummary = - new NormalizationSummary().withStartTime(10L).withEndTime(500L).withFailures(List.of(failureReason1, failureReason2)); final StandardSyncOutput standardSyncOutput = new StandardSyncOutput().withStandardSyncSummary(new StandardSyncSummary() .withTotalStats(syncStats) - .withStreamStats(List.of(streamSyncStats))) - .withNormalizationSummary(normalizationSummary); + .withStreamStats(List.of(streamSyncStats))); final JobOutput jobOutput = new JobOutput().withOutputType(JobOutput.OutputType.DISCOVER_CATALOG).withSync(standardSyncOutput); when(timeSupplier.get()).thenReturn(Instant.ofEpochMilli(4242)); @@ -366,11 +363,6 @@ void testWriteOutput() throws IOException { assertEquals(streamSyncStats.getStats().getRecordsEmitted(), storedStreamSyncStats.get(0).getStats().getRecordsEmitted()); assertEquals(streamSyncStats.getStats().getEstimatedRecords(), storedStreamSyncStats.get(0).getStats().getEstimatedRecords()); assertEquals(streamSyncStats.getStats().getEstimatedBytes(), storedStreamSyncStats.get(0).getStats().getEstimatedBytes()); - - final NormalizationSummary storedNormalizationSummary = jobPersistence.getNormalizationSummary(jobId, attemptNumber).stream().findFirst().get(); - assertEquals(10L, storedNormalizationSummary.getStartTime()); - assertEquals(500L, storedNormalizationSummary.getEndTime()); - assertEquals(List.of(failureReason1, failureReason2), storedNormalizationSummary.getFailures()); } @Test @@ -795,7 +787,7 @@ void testGetStatsNoResult() throws IOException { @Test @DisplayName("Retrieving all attempts stats for a job should return the right information") - void testGetMultipleStats() throws IOException { + void testGetMultipleStats() throws IOException, SQLException { final long jobOneId = jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); final int jobOneAttemptNumberOne = jobPersistence.createAttempt(jobOneId, LOG_PATH); @@ -803,8 +795,12 @@ void testGetMultipleStats() throws IOException { var streamStats = List.of( new StreamSyncStats().withStreamName("name1") .withStats(new SyncStats() - .withBytesEmitted(500L).withRecordsEmitted(500L) - .withEstimatedBytes(10000L).withEstimatedRecords(2000L))); + .withBytesEmitted(1L).withRecordsEmitted(1L) + .withEstimatedBytes(2L).withEstimatedRecords(2L)), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(1L).withRecordsEmitted(1L) + .withEstimatedBytes(2L).withEstimatedRecords(2L))); jobPersistence.writeStats(jobOneId, jobOneAttemptNumberOne, 1000L, 1000L, 1000L, 1000L, 1000L, 1000L, CONNECTION_ID, streamStats); // Second write for first attempt. This is the record that should be returned. @@ -812,13 +808,29 @@ void testGetMultipleStats() throws IOException { streamStats = List.of( new StreamSyncStats().withStreamName("name1") .withStats(new SyncStats() - .withBytesEmitted(1000L).withRecordsEmitted(1000L) - .withEstimatedBytes(10000L).withEstimatedRecords(2000L) - .withBytesCommitted(1000L).withRecordsCommitted(1000L))); - jobPersistence.writeStats(jobOneId, jobOneAttemptNumberOne, 2000L, 2000L, 2000L, 2000L, 2000L, 2000L, CONNECTION_ID, streamStats); + .withBytesEmitted(100L).withRecordsEmitted(10L) + .withEstimatedBytes(200L).withEstimatedRecords(20L) + .withBytesCommitted(100L).withRecordsCommitted(10L)), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(1000L).withRecordsEmitted(100L) + .withEstimatedBytes(2000L).withEstimatedRecords(200L) + .withBytesCommitted(888L).withRecordsCommitted(88L))); + jobPersistence.writeStats(jobOneId, jobOneAttemptNumberOne, 220L, 2200L, 110L, 1100L, 98L, 988L, CONNECTION_ID, streamStats); jobPersistence.failAttempt(jobOneId, jobOneAttemptNumberOne); // Second attempt for first job. + streamStats = List.of( + new StreamSyncStats().withStreamName("name1") + .withStats(new SyncStats() + .withBytesEmitted(1000L).withRecordsEmitted(100L) + .withEstimatedBytes(2000L).withEstimatedRecords(200L) + .withBytesCommitted(1000L).withRecordsCommitted(100L)), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(10000L).withRecordsEmitted(1000L) + .withEstimatedBytes(20000L).withEstimatedRecords(2000L) + .withBytesCommitted(8880L).withRecordsCommitted(880L))); final int jobOneAttemptNumberTwo = jobPersistence.createAttempt(jobOneId, LOG_PATH); jobPersistence.writeStats(jobOneId, jobOneAttemptNumberTwo, 1000L, 1000L, 1000L, 1000L, 1000L, 1000L, CONNECTION_ID, streamStats); @@ -829,24 +841,55 @@ void testGetMultipleStats() throws IOException { new StreamSyncStats().withStreamName("name1") .withStats(new SyncStats() .withBytesEmitted(1000L).withRecordsEmitted(1000L) - .withEstimatedBytes(10000L).withEstimatedRecords(2000L))); + .withEstimatedBytes(10000L).withEstimatedRecords(2000L)), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(5000L).withRecordsEmitted(5000L) + .withEstimatedBytes(100000L).withEstimatedRecords(20000L))); jobPersistence.writeStats(jobTwoId, jobTwoAttemptNumberOne, 1000L, 1000L, 1000L, 1000L, 1000L, 1000L, CONNECTION_ID, streamStats); + final List jobOneAttemptIds = jobDatabase.query( + ctx -> ctx.select(ATTEMPTS.ID).from(ATTEMPTS).where(ATTEMPTS.JOB_ID.eq(jobOneId)).orderBy(ATTEMPTS.ID).fetch() + .map(r -> r.get(ATTEMPTS.ID))); + final List jobTwoAttemptIds = jobDatabase.query( + ctx -> ctx.select(ATTEMPTS.ID).from(ATTEMPTS).where(ATTEMPTS.JOB_ID.eq(jobTwoId)).orderBy(ATTEMPTS.ID).fetch() + .map(r -> r.get(ATTEMPTS.ID))); + jobDatabase.query( + ctx -> ctx.insertInto( + STREAM_ATTEMPT_METADATA, + STREAM_ATTEMPT_METADATA.ID, + STREAM_ATTEMPT_METADATA.ATTEMPT_ID, + STREAM_ATTEMPT_METADATA.STREAM_NAME, + STREAM_ATTEMPT_METADATA.STREAM_NAMESPACE, + STREAM_ATTEMPT_METADATA.WAS_BACKFILLED, + STREAM_ATTEMPT_METADATA.WAS_RESUMED) + .values(UUID.randomUUID(), jobOneAttemptIds.get(0), "name1", null, true, false) + .values(UUID.randomUUID(), jobOneAttemptIds.get(1), "name1", null, false, true) + .values(UUID.randomUUID(), jobTwoAttemptIds.get(0), "name2", "ns", true, false) + .execute()); + final var stats = jobPersistence.getAttemptStats(List.of(jobOneId, jobTwoId)); final var exp = Map.of( new JobAttemptPair(jobOneId, jobOneAttemptNumberOne), new AttemptStats( new SyncStats() - .withRecordsEmitted(2000L).withBytesEmitted(2000L) - .withEstimatedBytes(2000L).withEstimatedRecords(2000L) - .withBytesCommitted(2000L).withRecordsCommitted(2000L), + .withBytesEmitted(1100L).withRecordsEmitted(110L) + .withEstimatedBytes(2200L).withEstimatedRecords(220L) + .withBytesCommitted(988L).withRecordsCommitted(98L), List.of(new StreamSyncStats().withStreamName("name1").withStats( new SyncStats() - .withEstimatedBytes(10000L).withEstimatedRecords(2000L) - .withBytesEmitted(1000L).withRecordsEmitted(1000L) - .withBytesCommitted(1000L).withRecordsCommitted(1000L)) - .withWasBackfilled(false) - .withWasResumed(false))), + .withBytesEmitted(100L).withRecordsEmitted(10L) + .withEstimatedBytes(200L).withEstimatedRecords(20L) + .withBytesCommitted(100L).withRecordsCommitted(10L)) + .withWasBackfilled(true) + .withWasResumed(false), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(1000L).withRecordsEmitted(100L) + .withEstimatedBytes(2000L).withEstimatedRecords(200L) + .withBytesCommitted(888L).withRecordsCommitted(88L)) + .withWasBackfilled(false) + .withWasResumed(false))), new JobAttemptPair(jobOneId, jobOneAttemptNumberTwo), new AttemptStats( new SyncStats() @@ -855,11 +898,18 @@ void testGetMultipleStats() throws IOException { .withBytesCommitted(1000L).withRecordsCommitted(1000L), List.of(new StreamSyncStats().withStreamName("name1").withStats( new SyncStats() - .withEstimatedBytes(10000L).withEstimatedRecords(2000L) - .withBytesEmitted(1000L).withRecordsEmitted(1000L) - .withBytesCommitted(1000L).withRecordsCommitted(1000L)) + .withBytesEmitted(1000L).withRecordsEmitted(100L) + .withEstimatedBytes(2000L).withEstimatedRecords(200L) + .withBytesCommitted(1000L).withRecordsCommitted(100L)) .withWasBackfilled(false) - .withWasResumed(false))), + .withWasResumed(true), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withBytesEmitted(10000L).withRecordsEmitted(1000L) + .withEstimatedBytes(20000L).withEstimatedRecords(2000L) + .withBytesCommitted(8880L).withRecordsCommitted(880L)) + .withWasBackfilled(false) + .withWasResumed(false))), new JobAttemptPair(jobTwoId, jobTwoAttemptNumberOne), new AttemptStats( new SyncStats() @@ -868,12 +918,18 @@ void testGetMultipleStats() throws IOException { .withBytesCommitted(1000L).withRecordsCommitted(1000L), List.of(new StreamSyncStats().withStreamName("name1").withStats( new SyncStats() - .withEstimatedBytes(10000L).withEstimatedRecords(2000L) - .withBytesEmitted(1000L).withRecordsEmitted(1000L)) + .withBytesEmitted(1000L).withRecordsEmitted(1000L) + .withEstimatedBytes(10000L).withEstimatedRecords(2000L)) .withWasBackfilled(false) - .withWasResumed(false)))); + .withWasResumed(false), + new StreamSyncStats().withStreamName("name2").withStreamNamespace("ns") + .withStats(new SyncStats() + .withEstimatedBytes(100000L).withEstimatedRecords(20000L) + .withBytesEmitted(5000L).withRecordsEmitted(5000L)) + .withWasBackfilled(true) + .withWasResumed(false)))); - assertEquals(exp, stats); + assertEquals(Jsons.canonicalJsonSerialize(exp), Jsons.canonicalJsonSerialize(stats)); } @@ -1597,6 +1653,59 @@ void testGetRunningSyncJobsForConnectionsEmptyBecauseOnlyReset() throws IOExcept } + @Nested + @DisplayName("When getting a running job for a single") + class GetRunningJobForConnection { + + private static final UUID CONNECTION_ID_1 = UUID.randomUUID(); + + private static final String SCOPE_1 = CONNECTION_ID_1.toString(); + + @Test + @DisplayName("Should return nothing if no sync job exists") + void testGetRunningSyncJobsForConnectionsEmpty() throws IOException { + final List actual = jobPersistence.getRunningJobForConnection(CONNECTION_ID_1); + + assertTrue(actual.isEmpty()); + } + + @Test + @DisplayName("Should return a running sync job for the connection") + void testGetRunningJobForConnection() throws IOException { + final long scope1Job1 = jobPersistence.enqueueJob(SCOPE_1, SYNC_JOB_CONFIG).orElseThrow(); + jobPersistence.createAttempt(scope1Job1, LOG_PATH); + final Attempt scope1Job1Attempt = jobPersistence.getJob(scope1Job1).getAttempts().stream().findFirst().orElseThrow(); + + final Instant afterNow = NOW; + when(timeSupplier.get()).thenReturn(afterNow); + + final List expected = new ArrayList<>(); + expected.add(createJob(scope1Job1, SYNC_JOB_CONFIG, JobStatus.RUNNING, List.of(scope1Job1Attempt), afterNow.getEpochSecond(), SCOPE_1)); + + final List actual = jobPersistence.getRunningJobForConnection(CONNECTION_ID_1); + assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected)); + } + + @Test + @DisplayName("Should return job if only a running reset job exists") + void testGetRunningSyncJobsForConnectionsEmptyBecauseOnlyReset() throws IOException { + final long jobId = jobPersistence.enqueueJob(SCOPE_1, RESET_JOB_CONFIG).orElseThrow(); + jobPersistence.createAttempt(jobId, LOG_PATH); + final Attempt scope1Job1Attempt = jobPersistence.getJob(jobId).getAttempts().stream().findFirst().orElseThrow(); + + final Instant afterNow = NOW; + when(timeSupplier.get()).thenReturn(afterNow); + + final List expected = new ArrayList<>(); + expected.add(createJob(jobId, RESET_JOB_CONFIG, JobStatus.RUNNING, List.of(scope1Job1Attempt), afterNow.getEpochSecond(), SCOPE_1)); + + final List actual = jobPersistence.getRunningJobForConnection(CONNECTION_ID_1); + + assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected)); + } + + } + @Nested @DisplayName("When getting first replication job") class GetFirstReplicationJob { @@ -2751,30 +2860,6 @@ void testMultipleConfigTypes() throws IOException { assertEquals(JobStatus.FAILED, allJobs.get(1).getStatus()); } - @Test - @DisplayName("Should be able to get attempt normalization status") - void testGetAttemptNormalizationStatusesForJob() throws IOException { - final Supplier timeSupplier = incrementingSecondSupplier(NOW); - jobPersistence = new DefaultJobPersistence(jobDatabase, timeSupplier, DEFAULT_MINIMUM_AGE_IN_DAYS, DEFAULT_EXCESSIVE_NUMBER_OF_JOBS, - DEFAULT_MINIMUM_RECENCY_COUNT); - - // Create and fail initial job - final long syncJobId1 = jobPersistence.enqueueJob(SCOPE, SYNC_JOB_CONFIG).orElseThrow(); - final int syncJobAttemptNumber1 = jobPersistence.createAttempt(syncJobId1, LOG_PATH); - jobPersistence.writeStats(syncJobId1, syncJobAttemptNumber1, 10L, 100L, 5L, 50L, null, null, CONNECTION_ID, List.of()); - jobPersistence.failAttempt(syncJobId1, syncJobAttemptNumber1); - - final int syncJobAttemptNumber2 = jobPersistence.createAttempt(syncJobId1, LOG_PATH); - jobPersistence.writeStats(syncJobId1, syncJobAttemptNumber2, 10L, 100L, 10L, 100L, 10L, 100L, CONNECTION_ID, List.of()); - jobPersistence.succeedAttempt(syncJobId1, syncJobAttemptNumber2); - - // Check to see current status of all jobs from beginning of time, expecting all jobs in createAt - // descending order (most recent first) - final List allAttempts = - jobPersistence.getAttemptNormalizationStatusesForJob(syncJobId1); - assertEquals(2, allAttempts.size()); - } - } } diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java index 2ca53167fea..4134fe38247 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/WorkspaceHelperTest.java @@ -17,12 +17,9 @@ import io.airbyte.config.DestinationConnection; import io.airbyte.config.JobConfig; import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.OperatorNormalization; -import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncOperation; -import io.airbyte.config.StandardSyncOperation.OperatorType; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.persistence.job.models.Job; @@ -68,9 +65,7 @@ class WorkspaceHelperTest { private static final StandardSyncOperation OPERATION = new StandardSyncOperation() .withOperationId(OPERATION_ID) .withWorkspaceId(WORKSPACE_ID) - .withOperatorType(OperatorType.DBT) .withName("the new normal") - .withOperatorNormalization(new OperatorNormalization().withOption(Option.BASIC)) .withTombstone(false); ConfigRepository configRepository; diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java index f4b105ba135..92ca4e56419 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/JobErrorReporterTest.java @@ -14,7 +14,6 @@ import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; import io.airbyte.config.Metadata; -import io.airbyte.config.NormalizationDestinationDefinitionConfig; import io.airbyte.config.ReleaseStage; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; @@ -42,9 +41,6 @@ class JobErrorReporterTest { private static final String WORKSPACE_URL = "http://localhost:8000/workspace/my_workspace"; private static final DeploymentMode DEPLOYMENT_MODE = DeploymentMode.OSS; private static final String AIRBYTE_VERSION = "0.1.40"; - private static final String NORMALIZATION_IMAGE = "airbyte/normalization"; - private static final String NORMALIZATION_VERSION = "0.2.24"; - private static final String NORMALIZATION_INTEGRATION_TYPE = "snowflake"; private static final String DOCKER_IMAGE_TAG = "1.2.3"; private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); @@ -77,7 +73,6 @@ class JobErrorReporterTest { private static final String CONNECTOR_NAME_KEY = "connector_name"; private static final String CONNECTOR_RELEASE_STAGE_KEY = "connector_release_stage"; private static final String CONNECTOR_COMMAND_KEY = "connector_command"; - private static final String NORMALIZATION_REPOSITORY_KEY = "normalization_repository"; private static final String CHECK_COMMAND = "check"; private static final String DISCOVER_COMMAND = "discover"; private static final String SPEC_COMMAND = "spec"; @@ -119,16 +114,11 @@ void testReportSyncJobFailure() throws ConfigNotFoundException, IOException { .withFailureOrigin(FailureOrigin.DESTINATION) .withFailureType(FailureType.SYSTEM_ERROR); - final FailureReason normalizationFailureReason = new FailureReason() - .withMetadata(new Metadata().withAdditionalProperty(FROM_TRACE_MESSAGE, true)) - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withFailureType(FailureType.SYSTEM_ERROR); - final FailureReason nonTraceMessageFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.SOURCE); final FailureReason replicationFailureReason = new FailureReason().withFailureOrigin(FailureOrigin.REPLICATION); Mockito.when(mFailureSummary.getFailures()).thenReturn(List.of( - sourceFailureReason, destinationFailureReason, normalizationFailureReason, nonTraceMessageFailureReason, replicationFailureReason)); + sourceFailureReason, destinationFailureReason, nonTraceMessageFailureReason, replicationFailureReason)); final long syncJobId = 1L; final SyncJobReportingContext jobReportingContext = new SyncJobReportingContext( @@ -160,11 +150,7 @@ void testReportSyncJobFailure() throws ConfigNotFoundException, IOException { .thenReturn(new ActorDefinitionVersion() .withDockerRepository(DESTINATION_DOCKER_REPOSITORY) .withDockerImageTag(DOCKER_IMAGE_TAG) - .withReleaseStage(DESTINATION_RELEASE_STAGE) - .withNormalizationConfig(new NormalizationDestinationDefinitionConfig() - .withNormalizationTag(NORMALIZATION_VERSION) - .withNormalizationRepository(NORMALIZATION_IMAGE) - .withNormalizationIntegrationType(NORMALIZATION_INTEGRATION_TYPE))); + .withReleaseStage(DESTINATION_RELEASE_STAGE)); final StandardWorkspace mWorkspace = Mockito.mock(StandardWorkspace.class); Mockito.when(mWorkspace.getWorkspaceId()).thenReturn(WORKSPACE_ID); @@ -204,33 +190,10 @@ void testReportSyncJobFailure() throws ConfigNotFoundException, IOException { Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString())); - final Map expectedNormalizationMetadata = Map.ofEntries( - Map.entry(JOB_ID_KEY, String.valueOf(syncJobId)), - Map.entry(WORKSPACE_ID_KEY, WORKSPACE_ID.toString()), - Map.entry(WORKSPACE_URL_KEY, WORKSPACE_URL), - Map.entry(CONNECTION_ID_KEY, CONNECTION_ID.toString()), - Map.entry(CONNECTION_URL_KEY, CONNECTION_URL), - Map.entry(DEPLOYMENT_MODE_KEY, DEPLOYMENT_MODE.name()), - Map.entry(AIRBYTE_VERSION_KEY, AIRBYTE_VERSION), - Map.entry(FAILURE_ORIGIN_KEY, "normalization"), - Map.entry(FAILURE_TYPE_KEY, SYSTEM_ERROR), - Map.entry(NORMALIZATION_REPOSITORY_KEY, NORMALIZATION_IMAGE), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_DEFINITION_ID_KEY), SOURCE_DEFINITION_ID.toString()), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_REPOSITORY_KEY), SOURCE_DOCKER_REPOSITORY), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_NAME_KEY), SOURCE_DEFINITION_NAME), - Map.entry(String.format(PREFIX_FORMAT_STRING, SOURCE, CONNECTOR_RELEASE_STAGE_KEY), SOURCE_RELEASE_STAGE.toString()), - Map.entry(CONNECTOR_DEFINITION_ID_KEY, DESTINATION_DEFINITION_ID.toString()), - Map.entry(CONNECTOR_REPOSITORY_KEY, DESTINATION_DOCKER_REPOSITORY), - Map.entry(CONNECTOR_NAME_KEY, DESTINATION_DEFINITION_NAME), - Map.entry(CONNECTOR_RELEASE_STAGE_KEY, DESTINATION_RELEASE_STAGE.toString())); - Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, sourceFailureReason, SOURCE_DOCKER_IMAGE, expectedSourceMetadata, attemptConfig); Mockito.verify(jobErrorReportingClient).reportJobFailureReason(mWorkspace, destinationFailureReason, DESTINATION_DOCKER_IMAGE, expectedDestinationMetadata, attemptConfig); - Mockito.verify(jobErrorReportingClient).reportJobFailureReason( - mWorkspace, normalizationFailureReason, String.format("%s:%s", NORMALIZATION_IMAGE, NORMALIZATION_VERSION), expectedNormalizationMetadata, - attemptConfig); Mockito.verifyNoMoreInteractions(jobErrorReportingClient); } diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java index ecae3fa75b8..8d9c4e39b8f 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/errorreporter/SentryExceptionHelperTest.java @@ -307,6 +307,68 @@ void testBuildSentryExceptionsJavaChained() { FUNCTION, "lambda$getDestinationOutputRunnable$7"))); } + @Test + void testBuildSentryExceptionsKotlin() { + final String stacktrace = + """ + io.airbyte.commons.exceptions.ConfigErrorException: Some error message + at io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory$Companion.streamDescToWriteConfig(StagingConsumerFactory.kt:226) + at io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory$Companion.access$streamDescToWriteConfig(StagingConsumerFactory.kt:159) + at io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory.createAsync(StagingConsumerFactory.kt:124) + at io.airbyte.integrations.destination.snowflake.SnowflakeDestination.getSerializedMessageConsumer(SnowflakeDestination.kt:194) + at io.airbyte.cdk.integrations.base.IntegrationRunner.run(IntegrationRunner.kt:116) + at io.airbyte.cdk.integrations.base.adaptive.AdaptiveDestinationRunner$Runner.run(AdaptiveDestinationRunner.kt:68) + at io.airbyte.integrations.destination.snowflake.SnowflakeDestinationKt.main(SnowflakeDestination.kt:279) + """; + + final Optional optionalSentryExceptions = exceptionHelper.buildSentryExceptions(stacktrace); + Assertions.assertTrue(optionalSentryExceptions.isPresent()); + + final SentryParsedException parsedException = optionalSentryExceptions.get(); + final List exceptionList = parsedException.exceptions(); + Assertions.assertEquals(SentryExceptionPlatform.JAVA, parsedException.platform()); + Assertions.assertEquals(1, exceptionList.size()); + + assertExceptionContent(exceptionList.get(0), "io.airbyte.commons.exceptions.ConfigErrorException", "Some error message", + List.of( + Map.of( + FILENAME, "SnowflakeDestination.kt", + LINE_NO, 279, + MODULE, "io.airbyte.integrations.destination.snowflake.SnowflakeDestinationKt", + FUNCTION, "main"), + Map.of( + FILENAME, "AdaptiveDestinationRunner.kt", + LINE_NO, 68, + MODULE, "io.airbyte.cdk.integrations.base.adaptive.AdaptiveDestinationRunner$Runner", + FUNCTION, "run"), + Map.of( + FILENAME, "IntegrationRunner.kt", + LINE_NO, 116, + MODULE, "io.airbyte.cdk.integrations.base.IntegrationRunner", + FUNCTION, "run"), + Map.of( + FILENAME, "SnowflakeDestination.kt", + LINE_NO, 194, + MODULE, "io.airbyte.integrations.destination.snowflake.SnowflakeDestination", + FUNCTION, "getSerializedMessageConsumer"), + Map.of( + FILENAME, "StagingConsumerFactory.kt", + LINE_NO, 124, + MODULE, "io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory", + FUNCTION, "createAsync"), + Map.of( + FILENAME, "StagingConsumerFactory.kt", + LINE_NO, 159, + MODULE, "io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory$Companion", + FUNCTION, "access$streamDescToWriteConfig"), + Map.of( + FILENAME, "StagingConsumerFactory.kt", + LINE_NO, 226, + MODULE, "io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory$Companion", + FUNCTION, "streamDescToWriteConfig"))); + + } + @Test void testBuildSentryExceptionsJavaMultilineValue() { final String stacktrace = diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java index 826f8798bbf..8b74446ff6a 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/tracker/JobTrackerTest.java @@ -31,7 +31,6 @@ import io.airbyte.config.JobSyncConfig; import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; import io.airbyte.config.Metadata; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.RefreshConfig; import io.airbyte.config.RefreshStream; import io.airbyte.config.Schedule; @@ -145,8 +144,6 @@ class JobTrackerTest { .put("source_read_end_time", 10L) .put("destination_write_start_time", 11L) .put("destination_write_end_time", 12L) - .put("normalization_start_time", 13L) - .put("normalization_end_time", 14L) .build(); private static final ImmutableMap SYNC_CONFIG_METADATA = ImmutableMap.builder() .put(JobTracker.CONFIG + ".source", "{\"key\":\"set\"}") @@ -686,7 +683,6 @@ private Attempt getAttemptMock() { final JobOutput jobOutput = mock(JobOutput.class); final StandardSyncOutput syncOutput = mock(StandardSyncOutput.class); final StandardSyncSummary syncSummary = mock(StandardSyncSummary.class); - final NormalizationSummary normalizationSummary = mock(NormalizationSummary.class); final SyncStats syncStats = mock(SyncStats.class); when(syncSummary.getStartTime()).thenReturn(SYNC_START_TIME); @@ -694,7 +690,6 @@ private Attempt getAttemptMock() { when(syncSummary.getBytesSynced()).thenReturn(SYNC_BYTES_SYNC); when(syncSummary.getRecordsSynced()).thenReturn(SYNC_RECORDS_SYNC); when(syncOutput.getStandardSyncSummary()).thenReturn(syncSummary); - when(syncOutput.getNormalizationSummary()).thenReturn(normalizationSummary); when(syncSummary.getTotalStats()).thenReturn(syncStats); when(jobOutput.getSync()).thenReturn(syncOutput); when(attempt.getOutput()).thenReturn(java.util.Optional.of(jobOutput)); @@ -710,8 +705,6 @@ private Attempt getAttemptMock() { when(syncStats.getSourceReadEndTime()).thenReturn(10L); when(syncStats.getDestinationWriteStartTime()).thenReturn(11L); when(syncStats.getDestinationWriteEndTime()).thenReturn(12L); - when(normalizationSummary.getStartTime()).thenReturn(13L); - when(normalizationSummary.getEndTime()).thenReturn(14L); return attempt; } diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 925749f5f52..22b66877f9b 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -4,6 +4,7 @@ plugins { id("io.airbyte.gradle.jvm.app") id("io.airbyte.gradle.docker") id("io.airbyte.gradle.publish") + id("io.airbyte.gradle.kube-reload") } dependencies { @@ -150,6 +151,11 @@ airbyte { imageName = "server" } + kubeReload { + deployment = "ab-server" + container = "airbyte-server-container" + } + spotbugs { excludes = listOf(" \n" + " \n" + diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java index 08ebd8beb6a..19d91038fe6 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java @@ -18,6 +18,7 @@ import io.airbyte.api.model.generated.InternalOperationResult; import io.airbyte.api.model.generated.SaveAttemptSyncConfigRequestBody; import io.airbyte.api.model.generated.SaveStatsRequestBody; +import io.airbyte.api.model.generated.SaveStreamAttemptMetadataRequestBody; import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; import io.airbyte.commons.server.handlers.AttemptHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; @@ -77,6 +78,14 @@ public InternalOperationResult saveStats(@Body final SaveStatsRequestBody reques return ApiHelper.execute(() -> attemptHandler.saveStats(requestBody)); } + @Override + @Post(uri = "/save_stream_metadata", + processes = MediaType.APPLICATION_JSON) + @ExecuteOn(AirbyteTaskExecutors.IO) + public InternalOperationResult saveStreamMetadata(@Body final SaveStreamAttemptMetadataRequestBody requestBody) { + return ApiHelper.execute(() -> attemptHandler.saveStreamMetadata(requestBody)); + } + @Override @Post(uri = "/set_workflow_in_attempt", processes = MediaType.APPLICATION_JSON) diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java index 7c61a225fa8..c4807fd531e 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java @@ -18,6 +18,10 @@ import io.airbyte.api.model.generated.ConnectionAutoPropagateSchemaChange; import io.airbyte.api.model.generated.ConnectionCreate; import io.airbyte.api.model.generated.ConnectionDataHistoryRequestBody; +import io.airbyte.api.model.generated.ConnectionEventIdRequestBody; +import io.airbyte.api.model.generated.ConnectionEventList; +import io.airbyte.api.model.generated.ConnectionEventWithDetails; +import io.airbyte.api.model.generated.ConnectionEventsRequestBody; import io.airbyte.api.model.generated.ConnectionIdRequestBody; import io.airbyte.api.model.generated.ConnectionLastJobPerStreamReadItem; import io.airbyte.api.model.generated.ConnectionLastJobPerStreamRequestBody; @@ -33,11 +37,13 @@ import io.airbyte.api.model.generated.ConnectionSyncProgressRead; import io.airbyte.api.model.generated.ConnectionUpdate; import io.airbyte.api.model.generated.ConnectionUptimeHistoryRequestBody; +import io.airbyte.api.model.generated.DiffCatalogRequestBody; import io.airbyte.api.model.generated.GetTaskQueueNameRequest; import io.airbyte.api.model.generated.InternalOperationResult; import io.airbyte.api.model.generated.JobInfoRead; import io.airbyte.api.model.generated.JobSyncResultRead; import io.airbyte.api.model.generated.ListConnectionsForWorkspacesRequestBody; +import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.model.generated.TaskQueueNameRead; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.server.errors.BadRequestException; @@ -60,6 +66,8 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; import java.util.ArrayList; import java.util.List; import lombok.extern.slf4j.Slf4j; @@ -188,6 +196,22 @@ public List getConnectionDataHistory(@Body final ConnectionDa return ApiHelper.execute(() -> connectionsHandler.getConnectionDataHistory(connectionDataHistoryRequestBody)); } + @Override + @Post(uri = "/events/get") + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + public ConnectionEventWithDetails getConnectionEvent(final ConnectionEventIdRequestBody connectionEventIdRequestBody) { + return ApiHelper.execute(() -> connectionsHandler.getConnectionEvent(connectionEventIdRequestBody)); + } + + @Override + @Post(uri = "/events/list") + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + public ConnectionEventList listConnectionEvents(@Valid @NotNull final ConnectionEventsRequestBody connectionEventsRequestBody) { + return ApiHelper.execute(() -> connectionsHandler.listConnectionEvents(connectionEventsRequestBody)); + } + @Override @Post(uri = "/getForJob") @Secured({WORKSPACE_READER, ORGANIZATION_READER}) @@ -281,7 +305,7 @@ public JobInfoRead resetConnectionStream(@Body final ConnectionStreamRequestBody @Post(uri = "/clear") @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) - public JobInfoRead clearConnection(@Body ConnectionIdRequestBody connectionIdRequestBody) { + public JobInfoRead clearConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { return ApiHelper.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); } @@ -289,7 +313,7 @@ public JobInfoRead clearConnection(@Body ConnectionIdRequestBody connectionIdReq @Post(uri = "/clear/stream") @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) - public JobInfoRead clearConnectionStream(@Body ConnectionStreamRequestBody connectionStreamRequestBody) { + public JobInfoRead clearConnectionStream(@Body final ConnectionStreamRequestBody connectionStreamRequestBody) { return ApiHelper.execute(() -> schedulerHandler.resetConnectionStream(connectionStreamRequestBody)); } @@ -301,6 +325,13 @@ public ConnectionAutoPropagateResult applySchemaChangeForConnection(@Body final return ApiHelper.execute(() -> connectionsHandler.applySchemaChange(request)); } + @Post("/diff_catalog") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.SCHEDULER) + public SourceDiscoverSchemaRead diffCatalogForConnection(final DiffCatalogRequestBody req) { + return ApiHelper.execute(() -> connectionsHandler.diffCatalogAndConditionallyDisable(req.getConnectionId(), req.getCatalogId())); + } + @Override @Post(uri = "/get_task_queue_name") @Secured({ADMIN}) diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java index cc409e49b95..e1fc3e003a1 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java @@ -12,7 +12,6 @@ import static io.airbyte.commons.auth.AuthRoleConstants.WORKSPACE_READER; import io.airbyte.api.generated.JobsApi; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; import io.airbyte.api.model.generated.BooleanRead; import io.airbyte.api.model.generated.CheckInput; import io.airbyte.api.model.generated.ConnectionIdRequestBody; @@ -99,14 +98,6 @@ public void failNonTerminalJobs(@Body final ConnectionIdRequestBody connectionId }); } - @Post("/get_normalization_status") - @Secured({ADMIN}) - @ExecuteOn(AirbyteTaskExecutors.IO) - @Override - public AttemptNormalizationStatusReadList getAttemptNormalizationStatusesForJob(@Body final JobIdRequestBody jobIdRequestBody) { - return ApiHelper.execute(() -> jobHistoryHandler.getAttemptNormalizationStatuses(jobIdRequestBody)); - } - @Post("/get_check_input") @Secured({WORKSPACE_READER, ORGANIZATION_READER}) @ExecuteOn(AirbyteTaskExecutors.IO) diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java index b685cbff6bb..bb3d9a7accc 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java @@ -15,6 +15,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.persistence.job.errorreporter.JobErrorReporter; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; @@ -51,8 +52,9 @@ public SynchronousSchedulerClient synchronousSchedulerClient(final TemporalClien @Singleton public ContextBuilder contextBuilder(final WorkspaceService workspaceService, final DestinationService destinationService, - final ConnectionService connectionService) { - return new ContextBuilder(workspaceService, destinationService, connectionService); + final ConnectionService connectionService, + final SourceService sourceService) { + return new ContextBuilder(workspaceService, destinationService, connectionService, sourceService); } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt index 0e7461111c3..92a334fe234 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/constants/ServerConstants.kt @@ -9,6 +9,8 @@ const val DESTINATION_TYPE = "destinationType" const val API_PATH = "/api" const val ROOT_PATH = "/public" +const val APPLICATIONS_PATH = "$ROOT_PATH/v1/applications" +const val APPLICATIONS_PATH_WITH_ID = "$ROOT_PATH/v1/applications/{applicationId}" const val CONNECTIONS_PATH = "$ROOT_PATH/v1/connections" const val CONNECTIONS_WITH_ID_PATH = "$CONNECTIONS_PATH/{connectionId}" const val STREAMS_PATH = "$ROOT_PATH/v1/streams" diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ApplicationsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ApplicationsController.kt new file mode 100644 index 00000000000..230f1f15212 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ApplicationsController.kt @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.publicapi.controllers + +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.Application +import io.airbyte.config.User +import io.airbyte.data.services.ApplicationService +import io.airbyte.publicApi.server.generated.apis.PublicApplicationsApi +import io.airbyte.publicApi.server.generated.models.AccessToken +import io.airbyte.publicApi.server.generated.models.ApplicationCreate +import io.airbyte.publicApi.server.generated.models.ApplicationRead +import io.airbyte.publicApi.server.generated.models.ApplicationReadList +import io.airbyte.publicApi.server.generated.models.ApplicationTokenRequest +import io.airbyte.server.apis.publicapi.apiTracking.TrackingHelper +import io.airbyte.server.apis.publicapi.constants.API_PATH +import io.airbyte.server.apis.publicapi.constants.APPLICATIONS_PATH +import io.airbyte.server.apis.publicapi.constants.APPLICATIONS_PATH_WITH_ID +import io.airbyte.server.apis.publicapi.constants.DELETE +import io.airbyte.server.apis.publicapi.constants.GET +import io.airbyte.server.apis.publicapi.constants.POST +import io.airbyte.server.apis.publicapi.mappers.toApplicationRead +import io.micronaut.context.annotation.Requires +import io.micronaut.http.annotation.Controller +import io.micronaut.scheduling.annotation.ExecuteOn +import io.micronaut.security.annotation.Secured +import io.micronaut.security.rules.SecurityRule +import jakarta.ws.rs.core.Response +import java.util.Optional + +@Controller(API_PATH) +@Secured(SecurityRule.IS_AUTHENTICATED) +@Requires(bean = ApplicationService::class) +open class ApplicationsController( + private val applicationService: ApplicationService, + private val currentUserService: CurrentUserService, + private val trackingHelper: TrackingHelper, +) : PublicApplicationsApi { + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) + override fun publicCreateApplication(applicationCreate: ApplicationCreate): Response { + val user: User = currentUserService.currentUser + + // process and monitor the request + val applicationRead = + toApplicationRead( + trackingHelper.callWithTracker( + { + applicationService.createApplication(user, applicationCreate.name) + }, + APPLICATIONS_PATH, + POST, + user.userId, + ), + ) + + return Response + .status(Response.Status.OK.statusCode) + .entity(applicationRead) + .build() + } + + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) + override fun publicDeleteApplication(applicationId: String): Response { + val user: User = currentUserService.currentUser + + // process and monitor the request + val application: Optional = + trackingHelper.callWithTracker( + { + applicationService.deleteApplication(user, applicationId) + }, + APPLICATIONS_PATH_WITH_ID, + DELETE, + user.userId, + ) + + if (application.isEmpty) { + throw ResourceNotFoundProblem( + detail = "The application with the provided id was not found.", + data = null, + ) + } + + return Response + .status(Response.Status.OK.statusCode) + .entity( + toApplicationRead(application.get()), + ) + .build() + } + + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) + @Secured(SecurityRule.IS_ANONYMOUS) + override fun publicGetAccessToken(applicationTokenRequest: ApplicationTokenRequest): Response { + return Response + .status(Response.Status.OK.statusCode) + .entity( + AccessToken( + applicationService + .getToken(applicationTokenRequest.clientId, applicationTokenRequest.clientSecret), + ), + ) + .build() + } + + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) + override fun publicGetApplication(applicationId: String): Response { + val user: User = currentUserService.currentUser + + // process and monitor the request + val application: Optional = + trackingHelper.callWithTracker( + { + applicationService + .listApplicationsByUser(user) + .stream() + .filter { app -> app.id.equals(applicationId) } + .map { app -> toApplicationRead(app) } + .findFirst() + }, + APPLICATIONS_PATH_WITH_ID, + GET, + user.userId, + ) + + if (application.isEmpty) { + throw ResourceNotFoundProblem( + detail = "The application with the provided id was not found.", + data = null, + ) + } + + return Response + .status(Response.Status.OK) + .entity(application.get()) + .build() + } + + @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) + override fun publicListApplications(): Response { + val user: User = currentUserService.currentUser + + // process and monitor the request + val applications = + trackingHelper.callWithTracker( + { + applicationService + .listApplicationsByUser(user) + .stream() + .map { app -> toApplicationRead(app) } + .toList() + }, + APPLICATIONS_PATH, + GET, + user.userId, + ) + + return Response + .status(Response.Status.OK) + .entity( + ApplicationReadList( + applications = applications, + ), + ) + .build() + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt index 91bbf3ec708..df0d24216d7 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt @@ -350,8 +350,8 @@ open class ConnectionsController( val schemaResponse = trackingHelper.callWithTracker( { sourceService.getSourceSchema(UUID.fromString(currentConnection.sourceId), false) }, - CONNECTIONS_PATH, - POST, + CONNECTIONS_WITH_ID_PATH, + PUT, userId, ) val catalogId = schemaResponse.catalogId @@ -379,8 +379,8 @@ open class ConnectionsController( ) } }, - CONNECTIONS_PATH, - POST, + CONNECTIONS_WITH_ID_PATH, + PUT, userId, ) @@ -388,24 +388,15 @@ open class ConnectionsController( for (streamConfiguration in validConnectionPatchRequest.configurations!!.streams!!) { val validStreamAndConfig = validStreams[streamConfiguration.name] val schemaStream = validStreamAndConfig!!.stream - val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() - updatedValidStreamAndConfig.stream = schemaStream - updatedValidStreamAndConfig.config = - AirbyteCatalogHelper.updateAirbyteStreamConfiguration( - validStreamAndConfig.config, - schemaStream, - streamConfiguration, - ) - + // validate config for each stream val validDestinationSyncModes = trackingHelper.callWithTracker( { destinationService.getDestinationSyncModes(destinationRead) }, - CONNECTIONS_PATH, - POST, + CONNECTIONS_WITH_ID_PATH, + PUT, userId, ) as List - // set user configs trackingHelper.callWithTracker( { AirbyteCatalogHelper.validateStreamConfig( @@ -414,10 +405,21 @@ open class ConnectionsController( airbyteStream = schemaStream, ) }, - CONNECTIONS_PATH, - POST, + CONNECTIONS_WITH_ID_PATH, + PUT, userId, ) + + // set user inputs + val updatedValidStreamAndConfig = AirbyteStreamAndConfiguration() + updatedValidStreamAndConfig.stream = schemaStream + updatedValidStreamAndConfig.config = + AirbyteCatalogHelper.updateAirbyteStreamConfiguration( + validStreamAndConfig.config, + schemaStream, + streamConfiguration, + ) + // set user configs configuredCatalog!!.addStreamsItem(updatedValidStreamAndConfig) } } else { @@ -437,8 +439,8 @@ open class ConnectionsController( destinationRead.workspaceId, ) }, - CONNECTIONS_PATH, - POST, + CONNECTIONS_WITH_ID_PATH, + PUT, userId, )!! diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt index a00857b459f..86dcdfac38c 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt @@ -19,10 +19,9 @@ import io.airbyte.api.model.generated.AirbyteStreamAndConfiguration import io.airbyte.api.model.generated.AirbyteStreamConfiguration import io.airbyte.api.model.generated.DestinationSyncMode import io.airbyte.api.model.generated.SyncMode +import io.airbyte.api.problems.model.generated.ProblemMessageData +import io.airbyte.api.problems.throwable.generated.BadRequestProblem import io.airbyte.api.problems.throwable.generated.UnexpectedProblem -import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem -import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem.Companion.duplicateStream -import io.airbyte.commons.server.errors.problems.ConnectionConfigurationProblem.Companion.invalidStreamName import io.airbyte.publicApi.server.generated.models.AirbyteApiConnectionSchedule import io.airbyte.publicApi.server.generated.models.ConnectionSyncModeEnum import io.airbyte.publicApi.server.generated.models.ScheduleTypeEnum @@ -107,9 +106,17 @@ object AirbyteCatalogHelper { val alreadyConfiguredStreams: MutableSet = HashSet() for (streamConfiguration in streamConfigurations.streams!!) { if (!validStreams.containsKey(streamConfiguration.name)) { - throw invalidStreamName(validStreams.keys) + throw BadRequestProblem( + ProblemMessageData().message( + "Invalid stream found. The list of valid streams include: ${validStreams.keys}.", + ), + ) } else if (alreadyConfiguredStreams.contains(streamConfiguration.name)) { - throw duplicateStream(streamConfiguration.name) + throw BadRequestProblem( + ProblemMessageData().message( + "Duplicate stream found in configuration for: ${streamConfiguration.name}.", + ), + ) } alreadyConfiguredStreams.add(streamConfiguration.name) } @@ -127,33 +134,88 @@ object AirbyteCatalogHelper { streamConfiguration: StreamConfiguration, sourceStream: AirbyteStream, ) { - if (streamConfiguration.selectedFields.isNullOrEmpty()) { - log.debug("No fields selected specifically. Bypass validation.") + if (streamConfiguration.selectedFields == null) { + log.debug("Selected fields not provided. Bypass validation.") return } - val allSelectedFields = streamConfiguration.selectedFields!!.mapNotNull { it.fieldPath?.firstOrNull() } + val allTopLevelStreamFields = getStreamTopLevelFields(sourceStream.jsonSchema).toSet() + if (streamConfiguration.selectedFields!!.isEmpty()) { + // User puts an empty list of selected fields to sync, which is a bad request. + throw BadRequestProblem( + ProblemMessageData().message( + "No fields selected for stream ${sourceStream.name}. The list of valid field names includes: $allTopLevelStreamFields.", + ), + ) + } + // Validate input selected fields. + val allSelectedFields = + streamConfiguration.selectedFields!!.map { it -> + if (it.fieldPath.isNullOrEmpty()) { + throw BadRequestProblem( + ProblemMessageData().message( + "Selected field path cannot be empty for stream: ${sourceStream.name}.", + ), + ) + } + if (it.fieldPath!!.size > 1) { + // We do not support nested field selection. Only top level properties can be selected. + throw BadRequestProblem( + ProblemMessageData().message( + "Nested field selection not supported for stream ${sourceStream.name}.", + ), + ) + } + it.fieldPath!!.first() + } // 1. Avoid duplicate fields selection. val allSelectedFieldsSet = allSelectedFields.toSet() if (allSelectedFields.size != allSelectedFieldsSet.size) { - throw ConnectionConfigurationProblem.duplicateFieldsSelected(sourceStream.name) + throw BadRequestProblem( + ProblemMessageData().message( + "Duplicate fields selected in configuration for stream: ${sourceStream.name}.", + ), + ) } // 2. Avoid non-existing fields selection. - val allTopLevelStreamFields = getStreamTopLevelFields(sourceStream.jsonSchema).toSet() require(allSelectedFields.all { it in allTopLevelStreamFields }) { - throw ConnectionConfigurationProblem.invalidFieldName(sourceStream.name, allTopLevelStreamFields) + throw BadRequestProblem( + ProblemMessageData().message( + "Invalid fields selected for stream ${sourceStream.name}. The list of valid field names includes: $allTopLevelStreamFields.", + ), + ) } - // 3. Selected fields must contain primary key(s). - val primaryKeys = selectPrimaryKey(sourceStream, streamConfiguration) - val primaryKeyFields = primaryKeys?.mapNotNull { it.firstOrNull() } ?: emptyList() - require(primaryKeyFields.all { it in allSelectedFieldsSet }) { - throw ConnectionConfigurationProblem.missingPrimaryKeySelected(sourceStream.name) + // 3. Selected fields must contain primary key(s) in dedup mode. + if (streamConfiguration.syncMode == ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY) { + val primaryKeys = selectPrimaryKey(sourceStream, streamConfiguration) + val primaryKeyFields = primaryKeys?.mapNotNull { it.firstOrNull() } ?: emptyList() + require(primaryKeyFields.all { it in allSelectedFieldsSet }) { + throw BadRequestProblem( + ProblemMessageData().message( + "Primary key fields are not selected properly for stream: ${sourceStream.name}. " + + "Please include primary key(s) in the configuration for this stream.", + ), + ) + } } - // 4. Selected fields must contain the cursor field. - val cursorField = selectCursorField(sourceStream, streamConfiguration) - if (!cursorField.isNullOrEmpty() && !allSelectedFieldsSet.contains(cursorField.first())) { - // first element is the top level field, and it has to be present in selected fields - throw ConnectionConfigurationProblem.missingCursorFieldSelected(sourceStream.name) + + // 4. Selected fields must contain the cursor field in incremental modes. + val incrementalSyncModes: Set = + setOf( + ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY, + ConnectionSyncModeEnum.INCREMENTAL_APPEND, + ) + if (streamConfiguration.syncMode in incrementalSyncModes) { + val cursorField = selectCursorField(sourceStream, streamConfiguration) + if (!cursorField.isNullOrEmpty() && !allSelectedFieldsSet.contains(cursorField.first())) { + // first element is the top level field, and it has to be present in selected fields + throw BadRequestProblem( + ProblemMessageData().message( + "Cursor field is not selected properly for stream: ${sourceStream.name}. " + + "Please include the cursor field in selected fields for this stream.", + ), + ) + } } } @@ -182,7 +244,9 @@ object AirbyteCatalogHelper { if (connectionSchedule != null) { if (connectionSchedule.scheduleType != null && connectionSchedule.scheduleType === ScheduleTypeEnum.CRON) { if (connectionSchedule.cronExpression == null) { - throw ConnectionConfigurationProblem.missingCronExpression() + throw BadRequestProblem( + ProblemMessageData().message("Missing cron expression in the schedule."), + ) } val cronExpression = normalizeCronExpression(connectionSchedule)?.cronExpression try { @@ -198,11 +262,21 @@ object AirbyteCatalogHelper { } catch (e: NumberFormatException) { log.debug("Invalid cron expression: $cronExpression") log.debug("NumberFormatException: $e") - throw ConnectionConfigurationProblem.invalidCronExpressionUnderOneHour(cronExpression.toString()) + throw BadRequestProblem( + ProblemMessageData().message( + "The cron expression ${connectionSchedule.cronExpression}" + + " is not valid or is less than the one hour minimum. The seconds and minutes values cannot be `*`.", + ), + ) } catch (e: IllegalArgumentException) { log.debug("Invalid cron expression: $cronExpression") log.debug("IllegalArgumentException: $e") - throw ConnectionConfigurationProblem.invalidCronExpression(cronExpression.toString(), e.message) + throw BadRequestProblem( + ProblemMessageData().message( + "The cron expression ${connectionSchedule.cronExpression} is not valid. Error: ${e.message}" + + ". Please check the cron expression format at https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html", + ), + ) } } } @@ -245,9 +319,10 @@ object AirbyteCatalogHelper { updatedStreamConfiguration.selected = true updatedStreamConfiguration.aliasName = config.aliasName updatedStreamConfiguration.fieldSelectionEnabled = config.fieldSelectionEnabled - if (!streamConfiguration.selectedFields.isNullOrEmpty()) { - // We will ignore the null or empty input and sync all fields by default, - // which is consistent with Cloud UI where all fields are selected by default. + updatedStreamConfiguration.selectedFields = config.selectedFields + if (streamConfiguration.selectedFields != null) { + // Override and update + updatedStreamConfiguration.fieldSelectionEnabled = true updatedStreamConfiguration.selectedFields = streamConfiguration.selectedFields!!.map { selectedFieldInfoConverter(it) } } updatedStreamConfiguration.suggested = config.suggested @@ -338,10 +413,11 @@ object AirbyteCatalogHelper { } val validCombinedSyncModes: Set = validCombinedSyncModes(airbyteStream.supportedSyncModes, validDestinationSyncModes) if (!validCombinedSyncModes.contains(streamConfiguration.syncMode)) { - throw ConnectionConfigurationProblem.handleSyncModeProblem( - streamConfiguration.syncMode, - streamConfiguration.name, - validCombinedSyncModes, + throw BadRequestProblem( + ProblemMessageData().message( + "Cannot set sync mode to ${streamConfiguration.syncMode} for stream ${streamConfiguration.name}. " + + "Valid sync modes are: $validCombinedSyncModes", + ), ) } @@ -368,7 +444,12 @@ object AirbyteCatalogHelper { if (!cursorField.isNullOrEmpty()) { // if cursor given is not empty and is NOT the same as the default, throw error if (java.util.Set.copyOf(cursorField) != java.util.Set.copyOf(airbyteStream.defaultCursorField)) { - throw ConnectionConfigurationProblem.sourceDefinedCursorFieldProblem(airbyteStream.name, airbyteStream.defaultCursorField!!) + throw BadRequestProblem( + ProblemMessageData().message( + "Cursor Field " + cursorField + " is already defined by source for stream: " + airbyteStream.name + + ". Do not include a cursor field configuration for this stream.", + ), + ) } } } else { @@ -376,12 +457,20 @@ object AirbyteCatalogHelper { // validate cursor field val validCursorFields: List> = getStreamFields(airbyteStream.jsonSchema!!) if (!validCursorFields.contains(cursorField)) { - throw ConnectionConfigurationProblem.invalidCursorField(airbyteStream.name, validCursorFields) + throw BadRequestProblem( + ProblemMessageData().message( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid cursor fields include: $validCursorFields.", + ), + ) } } else { // no default or given cursor field if (airbyteStream.defaultCursorField == null || airbyteStream.defaultCursorField!!.isEmpty()) { - throw ConnectionConfigurationProblem.missingCursorField(airbyteStream.name) + throw BadRequestProblem( + ProblemMessageData().message( + "No default cursor field for stream: ${airbyteStream.name}. Please include a cursor field configuration for this stream.", + ), + ) } } } @@ -398,14 +487,23 @@ object AirbyteCatalogHelper { if (sourceDefinedPrimaryKeyExists && configuredPrimaryKeyExists) { if (airbyteStream.sourceDefinedPrimaryKey != primaryKey) { - throw ConnectionConfigurationProblem.primaryKeyAlreadyDefined(airbyteStream.name, airbyteStream.sourceDefinedPrimaryKey) + throw BadRequestProblem( + ProblemMessageData().message( + "Primary key for stream: ${airbyteStream.name} is already pre-defined. " + + "Please remove the primaryKey or provide the value as ${airbyteStream.sourceDefinedPrimaryKey}.", + ), + ) } } // Ensure that we've passed at least some kind of primary key val noPrimaryKey = !configuredPrimaryKeyExists && !sourceDefinedPrimaryKeyExists if (noPrimaryKey) { - throw ConnectionConfigurationProblem.missingPrimaryKey(airbyteStream.name) + throw BadRequestProblem( + ProblemMessageData().message( + "No default primary key for stream: ${airbyteStream.name}. Please include a primary key configuration for this stream.", + ), + ) } // Validate the actual key passed in @@ -414,11 +512,19 @@ object AirbyteCatalogHelper { primaryKey?.let { for (singlePrimaryKey in primaryKey) { if (!validPrimaryKey.contains(singlePrimaryKey)) { // todo double check if the .contains() for list of strings works as intended - throw ConnectionConfigurationProblem.invalidPrimaryKey(airbyteStream.name, validPrimaryKey) + throw BadRequestProblem( + ProblemMessageData().message( + "Invalid cursor field for stream: ${airbyteStream.name}. The list of valid primary keys fields: $validPrimaryKey.", + ), + ) } - if (singlePrimaryKey.distinct() != singlePrimaryKey) { - throw ConnectionConfigurationProblem.duplicatePrimaryKey(airbyteStream.name, primaryKey) + throw BadRequestProblem( + ProblemMessageData().message( + "Duplicate primary key detected for stream: ${airbyteStream.name}, " + + "please don't provide the same column more than once. Key: $primaryKey", + ), + ) } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapper.kt new file mode 100644 index 00000000000..3700d8717b0 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapper.kt @@ -0,0 +1,19 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.config.Application +import io.airbyte.publicApi.server.generated.models.ApplicationRead +import java.time.OffsetDateTime +import java.time.format.DateTimeFormatter + +fun toApplicationRead(application: Application): ApplicationRead { + return ApplicationRead( + id = application.id, + name = application.name, + clientId = application.clientId, + clientSecret = application.clientSecret, + createdAt = + OffsetDateTime + .parse(application.createdOn, DateTimeFormatter.ISO_OFFSET_DATE_TIME) + .toEpochSecond(), + ) +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/RetryStatesRepository.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/RetryStatesRepository.kt index b9bb4a55cb4..e587c1e9be3 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/RetryStatesRepository.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/RetryStatesRepository.kt @@ -5,6 +5,7 @@ package io.airbyte.server.repositories import io.airbyte.server.repositories.domain.RetryState +import io.micronaut.data.annotation.Query import io.micronaut.data.jdbc.annotation.JdbcRepository import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository @@ -15,10 +16,15 @@ import java.util.UUID abstract class RetryStatesRepository : PageableRepository { abstract fun findByJobId(jobId: Long?): Optional? - abstract fun updateByJobId( - jobId: Long?, - update: RetryState, + @Query( + "UPDATE retry_states SET " + + "successive_complete_failures = :successiveCompleteFailures, " + + "total_complete_failures = :totalCompleteFailures, " + + "successive_partial_failures = :successivePartialFailures, " + + "total_partial_failures = :totalPartialFailures " + + "WHERE job_id = :jobId", ) + abstract fun updateByJobId(retryState: RetryState) abstract fun existsByJobId(jobId: Long): Boolean @@ -29,7 +35,7 @@ abstract class RetryStatesRepository : PageableRepository { val exists = existsByJobId(jobId) if (exists) { - updateByJobId(jobId, payload) + updateByJobId(retryState = payload) } else { save(payload) } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/domain/RetryState.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/domain/RetryState.kt index de93061fa8d..45897258e6b 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/domain/RetryState.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/repositories/domain/RetryState.kt @@ -147,8 +147,11 @@ class RetryState( fun build(): RetryState { return RetryState( - this.id, this.connectionId, this.jobId, this.createdAt, this.updatedAt, this.successiveCompleteFailures, - this.totalCompleteFailures, this.successivePartialFailures, this.totalPartialFailures, + id = this.id, connectionId = this.connectionId, jobId = this.jobId, createdAt = this.createdAt, updatedAt = this.updatedAt, + successiveCompleteFailures = this.successiveCompleteFailures, + totalCompleteFailures = this.totalCompleteFailures, + successivePartialFailures = this.successivePartialFailures, + totalPartialFailures = this.totalPartialFailures, ) } diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java index 0df16a6818c..f23458b0a87 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java @@ -50,7 +50,7 @@ class ConnectionApiControllerTest { RouterService routerService; @Test - void testConnectionStreamReset() throws IOException { + void testConnectionStreamReset() throws IOException, ConfigNotFoundException { final UUID connectionId = UUID.randomUUID(); final String streamName = "tableA"; final String streamNamespace = "schemaA"; diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java index 1e22eae43c1..a5a14f89556 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java @@ -4,7 +4,6 @@ package io.airbyte.server.apis; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; import io.airbyte.api.model.generated.JobCreate; import io.airbyte.api.model.generated.JobDebugInfoRead; import io.airbyte.api.model.generated.JobIdRequestBody; @@ -49,16 +48,6 @@ void testCancelJob() throws IOException { HttpStatus.OK); } - @Test - void testGetAttemptNormalizationStatusesForJob() throws IOException { - Mockito.when(jobHistoryHandler.getAttemptNormalizationStatuses(Mockito.any())) - .thenReturn(new AttemptNormalizationStatusReadList()); - final String path = "/api/v1/jobs/get_normalization_status"; - testEndpointStatus( - HttpRequest.POST(path, new JobIdRequestBody()), - HttpStatus.OK); - } - @Test void testGetJobDebugInfo() throws IOException, JsonValidationException, ConfigNotFoundException { Mockito.when(jobHistoryHandler.getJobDebugInfo(Mockito.any())) diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java index 3fba2de7fc8..3856b6135e6 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java @@ -73,7 +73,8 @@ void testWebBackendCheckUpdates() { } @Test - void testWebBackendCreateConnection() throws JsonValidationException, ConfigNotFoundException, IOException { + void testWebBackendCreateConnection() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { Mockito.when(webBackendConnectionsHandler.webBackendCreateConnection(Mockito.any())) .thenReturn(new WebBackendConnectionRead()) .thenThrow(new ConfigNotFoundException("", "")); @@ -87,7 +88,8 @@ void testWebBackendCreateConnection() throws JsonValidationException, ConfigNotF } @Test - void testWebBackendGetConnection() throws JsonValidationException, ConfigNotFoundException, IOException { + void testWebBackendGetConnection() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final String path = "/api/v1/web_backend/connections/get"; Mockito.when(webBackendConnectionsHandler.webBackendGetConnection(Mockito.any())) @@ -137,7 +139,8 @@ void testWebBackendGetWorkspaceState() throws IOException { } @Test - void testWebBackendListConnectionsForWorkspace() throws IOException { + void testWebBackendListConnectionsForWorkspace() + throws IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException, ConfigNotFoundException { Mockito.when(webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(Mockito.any())) .thenReturn(new WebBackendConnectionReadList()); final String path = "/api/v1/web_backend/connections/list"; diff --git a/airbyte-server/src/test/java/io/airbyte/server/repositories/RetryStatesRepositoryTest.java b/airbyte-server/src/test/java/io/airbyte/server/repositories/RetryStatesRepositoryTest.java index eeebdf1afc8..0e0049de981 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/repositories/RetryStatesRepositoryTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/repositories/RetryStatesRepositoryTest.java @@ -4,6 +4,9 @@ package io.airbyte.server.repositories; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.init.DatabaseInitializationException; import io.airbyte.db.instance.DatabaseConstants; @@ -95,8 +98,8 @@ void testInsert() { final var found = repo.findById(inserted.getId()); - Assertions.assertTrue(found.isPresent()); - Assertions.assertEquals(inserted, found.get()); + assertTrue(found.isPresent()); + assertEquals(inserted, found.get()); } @Test @@ -115,15 +118,15 @@ void testUpdateByJobId() { .successivePartialFailures(0) .build(); - repo.updateByJobId(Fixtures.jobId2, updated); + repo.updateByJobId(updated); final var found2 = repo.findById(id); - Assertions.assertTrue(found1.isPresent()); - Assertions.assertEquals(s, found1.get()); + assertTrue(found1.isPresent()); + assertEquals(s, found1.get()); - Assertions.assertTrue(found2.isPresent()); - Assertions.assertEquals(updated, found2.get()); + assertTrue(found2.isPresent()); + assertEquals(updated, found2.get()); } @Test @@ -150,14 +153,14 @@ void findByJobId() { final var found2 = repo.findByJobId(Fixtures.jobId3); final var found3 = repo.findByJobId(Fixtures.jobId1); - Assertions.assertTrue(found1.isPresent()); - Assertions.assertEquals(s1, found1.get()); + assertTrue(found1.isPresent()); + assertEquals(s1, found1.get()); - Assertions.assertTrue(found2.isPresent()); - Assertions.assertEquals(s2, found2.get()); + assertTrue(found2.isPresent()); + assertEquals(s2, found2.get()); - Assertions.assertTrue(found3.isPresent()); - Assertions.assertEquals(s3, found3.get()); + assertTrue(found3.isPresent()); + assertEquals(s3, found3.get()); } @Test @@ -171,7 +174,7 @@ void testExistsByJobId() { final var exists1 = repo.existsByJobId(Fixtures.jobId3); final var exists2 = repo.existsByJobId(Fixtures.jobId2); - Assertions.assertTrue(exists1); + assertTrue(exists1); Assertions.assertFalse(exists2); } @@ -195,11 +198,11 @@ void testCreateOrUpdateByJobIdUpdate() { final var found2 = repo.findById(id); - Assertions.assertTrue(found1.isPresent()); - Assertions.assertEquals(s, found1.get()); + assertTrue(found1.isPresent()); + assertEquals(s, found1.get()); - Assertions.assertTrue(found2.isPresent()); - Assertions.assertEquals(updated, found2.get()); + assertTrue(found2.isPresent()); + assertEquals(updated, found2.get()); } @Test @@ -212,8 +215,8 @@ void testCreateOrUpdateByJobIdCreate() { final var found1 = repo.findByJobId(Fixtures.jobId4); - Assertions.assertTrue(found1.isPresent()); - Assertions.assertEquals(s, found1.get()); + assertTrue(found1.isPresent()); + assertEquals(s, found1.get()); } private static class Fixtures { diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt index 37c02ca22fe..1c5a822c482 100644 --- a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelperTest.kt @@ -28,6 +28,7 @@ import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Nested import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.EnumSource @@ -612,12 +613,7 @@ internal class AirbyteCatalogHelperTest { @Nested inner class ValidateFieldSelection { - private val selectedFields = - listOf( - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("f1", "f2", "f3"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1", "m2"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("y1"))), - ) + private val streamConfiguration = StreamConfiguration(name = "testStream") private val jsonSchemaString = """ { @@ -661,48 +657,105 @@ internal class AirbyteCatalogHelperTest { @BeforeEach fun setUp() { - schemaConfiguration.syncMode = SyncMode.FULL_REFRESH schemaConfiguration.destinationSyncMode = DestinationSyncMode.OVERWRITE - sourceStream.name = "testStream" + schemaConfiguration.fieldSelectionEnabled = null + schemaConfiguration.selectedFields = null sourceStream.jsonSchema = Jsons.deserialize(jsonSchemaString) sourceStream.defaultCursorField = listOf("b1") sourceStream.sourceDefinedPrimaryKey = listOf(listOf("f1")) } @Test - fun `Null selected fields should be excluded in the updated config`() { + fun `Selected fields data is provided in the request, should be included in the updated config`() { val streamConfiguration = StreamConfiguration( name = "testStream", - selectedFields = null, + selectedFields = + listOf( + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + ), ) val updatedConfig = AirbyteCatalogHelper.updateAirbyteStreamConfiguration(schemaConfiguration, sourceStream, streamConfiguration) - assertEquals(listOf(), updatedConfig.selectedFields) + assertEquals(true, updatedConfig.fieldSelectionEnabled) + assertEquals(2, updatedConfig.selectedFields.size) } @Test - fun `Empty selected fields should be excluded in the updated config`() { + fun `Selected fields data is not provided in the request, should use the original config`() { val streamConfiguration = StreamConfiguration( name = "testStream", - selectedFields = emptyList(), + selectedFields = null, ) val updatedConfig = AirbyteCatalogHelper.updateAirbyteStreamConfiguration(schemaConfiguration, sourceStream, streamConfiguration) - assertEquals(listOf(), updatedConfig.selectedFields) + assertEquals(null, updatedConfig.fieldSelectionEnabled) + assertEquals(null, updatedConfig.selectedFields) } @Test - fun `Non-empty selected fields should be included in the updated config`() { + fun `Should bypass validation if selected fields are not being set specifically`() { val streamConfiguration = StreamConfiguration( name = "testStream", - selectedFields = selectedFields, + selectedFields = null, ) - val updatedConfig = AirbyteCatalogHelper.updateAirbyteStreamConfiguration(schemaConfiguration, sourceStream, streamConfiguration) - // test size - assertEquals(selectedFields.size, updatedConfig.selectedFields.size) - // test type converter - assertEquals(selectedFields[0].fieldPath?.get(0), updatedConfig.selectedFields[0].fieldPath[0]) + assertDoesNotThrow { AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) } + } + + @Test + fun `Should throw error if input selected fields is set to an empty list`() { + val streamConfiguration = + StreamConfiguration( + name = "testStream", + selectedFields = listOf(), + ) + val throwable = + assertThrows(BadRequestProblem::class.java) { + AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) + } + val problemData: ProblemMessageData = throwable.problem.data as ProblemMessageData + assertEquals(true, problemData.message.contains("No fields selected")) + } + + @Test + fun `Should throw error if any selected field contains empty field path`() { + val streamConfiguration = + StreamConfiguration( + name = "testStream", + selectedFields = + listOf( + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf()), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("b1")), + ), + ) + val throwable = + assertThrows(BadRequestProblem::class.java) { + AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) + } + val problemData: ProblemMessageData = throwable.problem.data as ProblemMessageData + assertEquals(true, problemData.message.contains("Selected field path cannot be empty")) + } + + @Test + fun `Should throw error if any selected field contains nested field path`() { + val streamConfiguration = + StreamConfiguration( + name = "testStream", + selectedFields = + listOf( + // f1 -> f2 -> f3 is a nested field path + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1", "f2", "f3")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("b1")), + ), + ) + val throwable = + assertThrows(BadRequestProblem::class.java) { + AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) + } + val problemData: ProblemMessageData = throwable.problem.data as ProblemMessageData + assertEquals(true, problemData.message.contains("Nested field selection not supported")) } @Test @@ -712,11 +765,11 @@ internal class AirbyteCatalogHelperTest { name = "testStream", selectedFields = listOf( - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("f1", "f2", "f3"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1", "m2"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), // `m1` is a dup field - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1", "m3"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("b1"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("b1")), ), ) val throwable = @@ -734,11 +787,11 @@ internal class AirbyteCatalogHelperTest { name = "testStream", selectedFields = listOf( - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("f1"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), // `x1` is not existed in source schema - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("x1"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("b1"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("x1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("b1")), ), ) var throwable = @@ -746,19 +799,20 @@ internal class AirbyteCatalogHelperTest { AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) } val problemData: ProblemMessageData = throwable.problem.data as ProblemMessageData - assertEquals(true, problemData.message.contains("Invalid field selected")) + assertEquals(true, problemData.message.contains("Invalid fields selected")) } @Test - fun `Should throw error if primary key(s) are not selected`() { + fun `Should throw error if primary key(s) are not selected in dedup mode`() { val streamConfiguration = StreamConfiguration( name = "testStream", + syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY, selectedFields = listOf( // "f1" as primary key is missing - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("b1"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("b1")), ), ) var throwable = @@ -770,16 +824,39 @@ internal class AirbyteCatalogHelperTest { } @Test - fun `Should throw error if cursor field is not selected`() { + fun `Should throw error if cursor field is not selected in incremental_dedup mode`() { + val streamConfiguration = + StreamConfiguration( + name = "testStream", + syncMode = ConnectionSyncModeEnum.INCREMENTAL_DEDUPED_HISTORY, + selectedFields = + listOf( + // "b1" as cursor field is missing + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("y1")), + ), + ) + var throwable = + assertThrows(BadRequestProblem::class.java) { + AirbyteCatalogHelper.validateFieldSelection(streamConfiguration, sourceStream) + } + val problemData: ProblemMessageData = throwable.problem.data as ProblemMessageData + assertEquals(true, problemData.message.contains("Cursor field is not selected properly")) + } + + @Test + fun `Should throw error if cursor field is not selected in incremental_append mode`() { val streamConfiguration = StreamConfiguration( name = "testStream", + syncMode = ConnectionSyncModeEnum.INCREMENTAL_APPEND, selectedFields = listOf( // "b1" as cursor field is missing - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("f1"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("m1"))), - io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = (listOf("y1"))), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("f1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("m1")), + io.airbyte.publicApi.server.generated.models.SelectedFieldInfo(fieldPath = listOf("y1")), ), ) var throwable = diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapperTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapperTest.kt new file mode 100644 index 00000000000..b9fca74d782 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/mappers/ApplicationReadMapperTest.kt @@ -0,0 +1,38 @@ +package io.airbyte.server.apis.publicapi.mappers + +import io.airbyte.config.Application +import io.airbyte.publicApi.server.generated.models.ApplicationRead +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.time.OffsetDateTime +import java.util.UUID + +class ApplicationReadMapperTest { + @Test + fun `from should convert an Application object from keycloak to an ApplicationRead to be returned to the API`() { + // Given + val now = OffsetDateTime.now() + val application = + Application().apply { + this.id = UUID.randomUUID().toString() + this.name = "applicationName" + this.clientId = UUID.randomUUID().toString() + this.clientSecret = "shhhhh" + this.createdOn = now.toString() + } + + // When + val applicationRead = toApplicationRead(application) + + // Then + val expected = + ApplicationRead( + id = application.id, + clientId = application.clientId, + clientSecret = application.clientSecret, + createdAt = now.toEpochSecond(), + name = application.name, + ) + assertEquals(expected, applicationRead) + } +} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java index 3a2665691a6..8192aa4496c 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AcceptanceTestHarness.java @@ -60,7 +60,6 @@ import io.airbyte.api.client.model.generated.OperationIdRequestBody; import io.airbyte.api.client.model.generated.OperationRead; import io.airbyte.api.client.model.generated.OperatorConfiguration; -import io.airbyte.api.client.model.generated.OperatorNormalization; import io.airbyte.api.client.model.generated.OperatorType; import io.airbyte.api.client.model.generated.OperatorWebhook; import io.airbyte.api.client.model.generated.OperatorWebhookDbtCloud; @@ -167,6 +166,8 @@ public class AcceptanceTestHarness { private static final DockerImageName SOURCE_POSTGRES_IMAGE_NAME = DockerImageName.parse("debezium/postgres:15-alpine") .asCompatibleSubstituteFor("postgres"); + private static final String TEMPORAL_HOST = "temporal.airbyte.dev:80"; + private static final String SOURCE_E2E_TEST_CONNECTOR_VERSION = "0.1.2"; private static final String DESTINATION_E2E_TEST_CONNECTOR_VERSION = "0.1.1"; @@ -516,7 +517,7 @@ private void assignEnvVars() { private WorkflowClient getWorkflowClient() { final TemporalUtils temporalUtils = new TemporalUtils(null, null, null, null, null, null, null); final WorkflowServiceStubs temporalService = temporalUtils.createTemporalService( - TemporalWorkflowUtils.getAirbyteTemporalOptions("localhost:7233", new TemporalSdkTimeouts()), + TemporalWorkflowUtils.getAirbyteTemporalOptions(TEMPORAL_HOST, new TemporalSdkTimeouts()), TemporalUtils.DEFAULT_NAMESPACE); return WorkflowClient.newInstance(temporalService); } @@ -848,25 +849,6 @@ public CheckConnectionRead.Status checkDestination(final UUID destinationId) thr .checkConnectionToDestination(new DestinationIdRequestBody(destinationId)).getStatus(); } - public OperationRead createNormalizationOperation() throws IOException { - return createNormalizationOperation(defaultWorkspaceId); - } - - public OperationRead createNormalizationOperation(final UUID workspaceId) throws IOException { - final OperatorConfiguration normalizationConfig = new OperatorConfiguration( - OperatorType.NORMALIZATION, - new OperatorNormalization(OperatorNormalization.Option.BASIC), - null, - null); - final OperationCreate operationCreate = new OperationCreate( - workspaceId, - "AccTestDestination-" + UUID.randomUUID(), - normalizationConfig); - final OperationRead operation = apiClient.getOperationApi().createOperation(operationCreate); - operationIds.add(operation.getOperationId()); - return operation; - } - public OperationRead createDbtCloudWebhookOperation(final UUID workspaceId, final UUID webhookConfigId) throws Exception { return apiClient.getOperationApi().createOperation( new OperationCreate( @@ -874,8 +856,6 @@ public OperationRead createDbtCloudWebhookOperation(final UUID workspaceId, fina "reqres test", new OperatorConfiguration( OperatorType.WEBHOOK, - null, - null, new OperatorWebhook( webhookConfigId, OperatorWebhook.WebhookType.DBT_CLOUD, diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/Asserts.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/Asserts.java index 899e5d9aa73..464cecb011e 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/Asserts.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/Asserts.java @@ -4,13 +4,10 @@ package io.airbyte.test.utils; -import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; -import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_NAME; import static io.airbyte.test.utils.AcceptanceTestHarness.FINAL_INTERVAL_SECS; import static io.airbyte.test.utils.AcceptanceTestHarness.JITTER_MAX_INTERVAL_SECS; import static io.airbyte.test.utils.AcceptanceTestHarness.MAX_TRIES; import static io.airbyte.test.utils.AcceptanceTestHarness.OUTPUT_STREAM_PREFIX; -import static io.airbyte.test.utils.AcceptanceTestHarness.STREAM_NAME; import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -131,36 +128,6 @@ public static void assertRawDestinationContains(final Database dst, } } - public static void assertNormalizedDestinationContains(final Database dst, final String outputSchema, final List sourceRecords) - throws Exception { - assertNormalizedDestinationContains(dst, outputSchema, sourceRecords, STREAM_NAME); - } - - private static void assertNormalizedDestinationContains(final Database dst, - final String outputSchema, - final List sourceRecords, - final String streamName) - throws Exception { - final String finalDestinationTable = String.format("%s.%s%s", outputSchema, OUTPUT_STREAM_PREFIX, streamName.replace(".", "_")); - final List destinationRecords = Databases.retrieveRecordsFromDatabase(dst, finalDestinationTable); - for (final var record : destinationRecords) { - LOGGER.info("destination record: {}", record.toPrettyString()); - } - dropAirbyteSystemColumns(destinationRecords); - - assertEquals(sourceRecords.size(), destinationRecords.size(), - String.format("destination contains: %s record. source contains: %s", sourceRecords.size(), destinationRecords.size())); - for (final JsonNode sourceStreamRecord : sourceRecords) { - LOGGER.info("sourceStreamRecord: {}", sourceStreamRecord.toPrettyString()); - assertTrue(recordIsContainedIn(sourceStreamRecord, destinationRecords)); - assertTrue( - destinationRecords.stream() - .anyMatch(r -> r.get(COLUMN_NAME).asText().equals(sourceStreamRecord.get(COLUMN_NAME).asText()) - && r.get(COLUMN_ID).asInt() == sourceStreamRecord.get(COLUMN_ID).asInt()), - String.format("destination does not contain record:\n %s \n destination contains:\n %s\n", sourceStreamRecord, destinationRecords)); - } - } - @SuppressWarnings("PMD.ForLoopCanBeForeach") private static boolean recordIsContainedIn(JsonNode sourceStreamRecord, final List destinationRecords) { // NOTE: I would expect the simple `equals` method to do this deep comparison, but it didn't seem to diff --git a/airbyte-tests/build.gradle.kts b/airbyte-tests/build.gradle.kts index 8a7f1facf09..4d47b32081f 100644 --- a/airbyte-tests/build.gradle.kts +++ b/airbyte-tests/build.gradle.kts @@ -3,57 +3,25 @@ import org.gradle.api.tasks.testing.logging.TestLogEvent plugins { id("io.airbyte.gradle.jvm.lib") } - -@Suppress("UnstableApiUsage") -testing { - registerTestSuite(name = "acceptanceTest", type = "acceptance-test", dirName = "test-acceptance") { - implementation.add(project()) - - implementation(project(":airbyte-api")) - implementation(project(":airbyte-commons")) - implementation(project(":airbyte-commons-auth")) - implementation(project(":airbyte-commons-temporal")) - implementation(project(":airbyte-config:config-models")) - implementation(project(":airbyte-config:config-persistence")) - implementation(project(":airbyte-db:db-lib")) - implementation(project(":airbyte-tests")) - implementation(project(":airbyte-test-utils")) - implementation(project(":airbyte-commons-worker")) - - implementation(libs.failsafe) - implementation(libs.jackson.databind) - implementation(libs.okhttp) - implementation(libs.temporal.sdk) - implementation(libs.platform.testcontainers.postgresql) - implementation(libs.postgresql) - - // needed for fabric to connect to k8s. - runtimeOnly(libs.bouncycastle.bcpkix) - runtimeOnly(libs.bouncycastle.bcprov) - } -} - /** * Registers a test-suite with Gradle's JvmTestSuite * @param name name the name of the test suite, must be unique, will match the name of the created task - * @param type name the name of this test suite, passed directly to the testType property - * @param dirName directory name which corresponds to this test-suite, assumes that this directory is located in `src` - * @param deps lambda for registering dependencies specific to this test-suite with this test-suite + * @param includeTags tags of the tests to be included in this test-suite */ @Suppress("UnstableApiUsage") -fun registerTestSuite(name: String, type: String, dirName: String, deps: JvmComponentDependencies.() -> Unit) { +fun registerTestSuite(name: String, includeTags: Array = emptyArray()) { testing { suites.register(name) { - testType.set(type) - - deps(dependencies) + dependencies { + implementation(project()) + } sources { java { - setSrcDirs(listOf("src/$dirName/java")) + setSrcDirs(listOf("src/test-acceptance/java")) } resources { - setSrcDirs(listOf("src/$dirName/resources")) + setSrcDirs(listOf("src/test-acceptance/resources")) } } @@ -68,6 +36,9 @@ fun registerTestSuite(name: String, type: String, dirName: String, deps: JvmComp // we use this property for our log4j2 configuration. Gradle creates a new JVM to run tests, so we need to explicitly pass this property "ciMode" to ciMode) + useJUnitPlatform { + includeTags(*includeTags) + } testLogging { events = setOf(TestLogEvent.PASSED, TestLogEvent.FAILED, TestLogEvent.STARTED, TestLogEvent.SKIPPED) } @@ -84,6 +55,12 @@ fun registerTestSuite(name: String, type: String, dirName: String, deps: JvmComp } } +registerTestSuite(name = "syncAcceptanceTest", includeTags = arrayOf("sync")) +registerTestSuite(name = "apiAcceptanceTest", includeTags = arrayOf("api")) +registerTestSuite(name = "builderAcceptanceTest", includeTags = arrayOf("builder")) +registerTestSuite(name = "enterpriseAcceptanceTest", includeTags = arrayOf("enterprise")) +registerTestSuite(name = "acceptanceTest") + configurations.configureEach { // Temporary hack to avoid dependency conflicts exclude(group = "io.micronaut.email") @@ -91,12 +68,29 @@ configurations.configureEach { dependencies { implementation(project(":airbyte-api")) + implementation(project(":airbyte-commons")) + implementation(project(":airbyte-commons-auth")) + implementation(project(":airbyte-commons-temporal")) + implementation(project(":airbyte-config:config-models")) + implementation(project(":airbyte-config:config-persistence")) + implementation(project(":airbyte-db:db-lib")) + implementation(project(":airbyte-test-utils")) + implementation(project(":airbyte-commons-worker")) implementation(project(":airbyte-container-orchestrator")) - testImplementation("com.airbyte:api:0.39.2") - implementation(libs.bundles.kubernetes.client) implementation(libs.platform.testcontainers) + implementation(libs.failsafe) + implementation(libs.jackson.databind) + implementation(libs.okhttp) + implementation(libs.temporal.sdk) + implementation(libs.platform.testcontainers.postgresql) + implementation(libs.postgresql) + + runtimeOnly(libs.bouncycastle.bcpkix) + runtimeOnly(libs.bouncycastle.bcprov) + + testImplementation("com.airbyte:api:0.39.2") testImplementation(libs.bundles.junit) testImplementation(libs.assertj.core) diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java index dca2306ee87..2b115990f5f 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AdvancedAcceptanceTests.java @@ -28,6 +28,8 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; @@ -56,6 +58,7 @@ @SuppressWarnings({"ConstantConditions"}) @TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestInstance(Lifecycle.PER_CLASS) +@Tags({@Tag("sync"), @Tag("enterprise")}) class AdvancedAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(AdvancedAcceptanceTests.class); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java index 7f7cc129671..39db14ae640 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ApiAcceptanceTests.java @@ -5,10 +5,8 @@ package io.airbyte.test.acceptance; import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITHOUT_SCD_TABLE; import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_NAME; -import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; import static io.airbyte.test.utils.AcceptanceTestUtils.IS_GKE; import static io.airbyte.test.utils.AcceptanceTestUtils.modifyCatalog; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; @@ -25,17 +23,13 @@ import io.airbyte.api.client.model.generated.DestinationSyncMode; import io.airbyte.api.client.model.generated.JobInfoRead; import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.OperationRead; import io.airbyte.api.client.model.generated.SourceDefinitionSpecificationRead; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.client.model.generated.SourceRead; import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.api.client.model.generated.WebhookConfigWrite; -import io.airbyte.api.client.model.generated.WorkspaceRead; import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.test.utils.AcceptanceTestHarness; -import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; import io.micronaut.http.HttpStatus; import java.io.IOException; @@ -45,6 +39,7 @@ import java.util.UUID; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.openapitools.client.infrastructure.ClientException; @@ -68,8 +63,7 @@ */ @SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert", "DataFlowIssue", "SqlDialectInspection", "SqlNoDataSourceInspection", "PMD.AvoidDuplicateLiterals"}) -@DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", - matches = "true") +@Tag("api") class ApiAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(ApiAcceptanceTests.class); @@ -261,59 +255,4 @@ void testDeleteConnection() throws Exception { } } - @Test - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = "GKE deployment applies extra validation") - void testWebhookOperationExecutesSuccessfully() throws Exception { - // create workspace webhook config - final WorkspaceRead workspaceRead = - testHarness.updateWorkspaceWebhookConfigs(workspaceId, List.of(new WebhookConfigWrite("reqres test", null, null, null))); - // create a webhook operation - final OperationRead operationRead = testHarness.createDbtCloudWebhookOperation(workspaceId, workspaceRead.getWebhookConfigs().get(0).getId()); - // create a connection with the new operation. - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - // NOTE: this is a normalization operation. - final UUID normalizationOpId = testHarness.createNormalizationOperation().getOperationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; - final AirbyteCatalog catalog = modifyCatalog( - discoverResult.getCatalog(), - Optional.of(srcSyncMode), - Optional.of(dstSyncMode), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .setNormalizationOperationId(normalizationOpId) - .setAdditionalOperationIds(List.of(operationRead.getOperationId())) - .build()); - final var connectionId = conn.getConnectionId(); - - // run the sync - final var jobInfoRead = testHarness.syncConnection(connectionId); - testResources.waitForSuccessfulJobWithRetries(jobInfoRead.getJob()); - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - conn.getNamespaceFormat(), true, - WITHOUT_SCD_TABLE); - testHarness.deleteConnection(connectionId); - // remove connection to avoid exception during tear down - testHarness.removeConnection(connectionId); - // TODO(mfsiega-airbyte): add webhook info to the jobs api to verify the webhook execution status. - } - } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java index eaab6dafff4..91e9ce701bb 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/ConnectorBuilderTests.java @@ -43,6 +43,8 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestMethodOrder; @@ -53,8 +55,6 @@ /** * Connector Builder-only acceptance tests. */ -@DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", - matches = "true") @DisabledIfEnvironmentVariable(named = "IS_GKE", matches = "TRUE", disabledReason = "Cloud GKE environment is preventing unsecured http requests") @@ -63,6 +63,7 @@ // The tests methods already have the right visibility, but PMD complains. // Silence it as it's a bug. @SuppressWarnings("PMD.JUnit5TestShouldBePackagePrivate") +@Tags({@Tag("builder"), @Tag("enterprise")}) public class ConnectorBuilderTests { private static final String ECHO_SERVER_IMAGE = "mendhak/http-https-echo:29"; diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java index b3906622f42..446619c5011 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java @@ -32,7 +32,6 @@ import io.airbyte.api.client.model.generated.WorkspaceCreate; import io.airbyte.commons.json.Jsons; import io.airbyte.test.utils.AcceptanceTestHarness; -import io.airbyte.test.utils.Asserts; import io.airbyte.test.utils.TestConnectionCreate; import java.io.IOException; import java.net.URISyntaxException; @@ -46,9 +45,9 @@ import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; -import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; @@ -57,11 +56,10 @@ /** * Tests for the various schema management functionalities e.g., auto-detect, auto-propagate. */ -@DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", - matches = "true") @Timeout(value = 2, unit = TimeUnit.MINUTES) // Default timeout of 2 minutes; individual tests should override if they need longer. @Execution(ExecutionMode.CONCURRENT) +@Tag("api") class SchemaManagementTests { private static final Logger LOGGER = LoggerFactory.getLogger(SchemaManagementTests.class); @@ -85,7 +83,6 @@ private void createTestConnections() throws Exception { final UUID sourceId = testHarness.createPostgresSource().getSourceId(); final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID normalizationOpId = testHarness.createNormalizationOperation().getOperationId(); // Use incremental append-dedup with a primary key column, so we can simulate a breaking change by // removing that column. final SyncMode syncMode = SyncMode.INCREMENTAL; @@ -108,9 +105,7 @@ private void createTestConnections() throws Exception { sourceId, destinationId, catalog, - discoverResult.getCatalogId()) - .setNormalizationOperationId(normalizationOpId) - .build()); + discoverResult.getCatalogId()).build()); LOGGER.info("Created connection: {}", createdConnection); // Create a connection that shares the source, to verify that the schema management actions are // applied to all connections with the same source. @@ -205,8 +200,6 @@ void testPropagateAllChangesViaSyncRefresh() throws Exception { final AirbyteCatalog catalogWithPropagatedChanges = getExpectedCatalogWithExtraColumnAndTable(); assertEquals(catalogWithPropagatedChanges, currentConnection.getSyncCatalog()); assertEquals(ConnectionStatus.ACTIVE, currentConnection.getStatus()); - Asserts.assertNormalizedDestinationContains(testHarness.getDestinationDatabase(), currentConnection.getNamespaceFormat(), - getExpectedRecordsForIdAndNameWithUpdatedCatalog()); // This connection does not have auto propagation, so it should have stayed the same. final ConnectionRead currentConnectionWithSameSource = testHarness.getConnection(createdConnectionWithSameSource.getConnectionId()); @@ -224,8 +217,6 @@ void testBackfillDisabled() throws Exception { // Run a sync with the initial data. final var jobRead = testHarness.syncConnection(createdConnection.getConnectionId()).getJob(); testHarness.waitForSuccessfulSyncNoTimeout(jobRead); - Asserts.assertNormalizedDestinationContains(testHarness.getDestinationDatabase(), createdConnection.getNamespaceFormat(), - getExpectedRecordsForIdAndName()); // Modify the source to add a new column and populate it with default values. testHarness.runSqlScriptInSource("postgres_add_column_with_default_value.sql"); @@ -236,8 +227,6 @@ void testBackfillDisabled() throws Exception { testHarness.waitForSuccessfulSyncNoTimeout(jobReadWithBackfills); final var currentConnection = testHarness.getConnection(createdConnection.getConnectionId()); assertEquals(3, currentConnection.getSyncCatalog().getStreams().getFirst().getStream().getJsonSchema().get("properties").size()); - Asserts.assertNormalizedDestinationContains(testHarness.getDestinationDatabase(), createdConnection.getNamespaceFormat(), - getExpectedRecordsForIdAndNameWithUpdatedCatalog()); } @Test @@ -249,9 +238,7 @@ void testBackfillOnNewColumn() throws Exception { SchemaChangeBackfillPreference.ENABLED); // Run a sync with the initial data. final var jobRead = testHarness.syncConnection(createdConnection.getConnectionId()).getJob(); - testHarness.waitForSuccessfulSyncNoTimeout(jobRead); - Asserts.assertNormalizedDestinationContains(testHarness.getDestinationDatabase(), createdConnection.getNamespaceFormat(), - getExpectedRecordsForIdAndName()); + testHarness.waitForSuccessfulSyncNoTimeout(jobRead);; // Modify the source to add a new column, which will be populated with a default value. testHarness.runSqlScriptInSource("postgres_add_column_with_default_value.sql"); @@ -262,8 +249,6 @@ void testBackfillOnNewColumn() throws Exception { final var currentConnection = testHarness.getConnection(createdConnection.getConnectionId()); // Expect that we have the two original fields, plus the new one. assertEquals(3, currentConnection.getSyncCatalog().getStreams().getFirst().getStream().getJsonSchema().get("properties").size()); - Asserts.assertNormalizedDestinationContains(testHarness.getDestinationDatabase(), createdConnection.getNamespaceFormat(), - getExpectedRecordsForIdAndNameWithBackfilledColumn()); } @Test diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java index 22bf1439750..3f905f14457 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java @@ -6,27 +6,21 @@ import static io.airbyte.test.acceptance.AcceptanceTestsResources.FINAL_INTERVAL_SECS; import static io.airbyte.test.acceptance.AcceptanceTestsResources.JITTER_MAX_INTERVAL_SECS; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.KUBE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.MAX_TRIES; import static io.airbyte.test.acceptance.AcceptanceTestsResources.TRUE; import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITHOUT_SCD_TABLE; -import static io.airbyte.test.acceptance.AcceptanceTestsResources.WITH_SCD_TABLE; -import static io.airbyte.test.utils.AcceptanceTestHarness.COLUMN_ID; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC; import static io.airbyte.test.utils.AcceptanceTestHarness.PUBLIC_SCHEMA_NAME; -import static io.airbyte.test.utils.AcceptanceTestHarness.STREAM_NAME; import static io.airbyte.test.utils.AcceptanceTestUtils.IS_GKE; import static io.airbyte.test.utils.AcceptanceTestUtils.modifyCatalog; import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import dev.failsafe.Failsafe; import dev.failsafe.RetryPolicy; import io.airbyte.api.client.model.generated.AirbyteCatalog; import io.airbyte.api.client.model.generated.CheckConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionRead; import io.airbyte.api.client.model.generated.ConnectionScheduleData; import io.airbyte.api.client.model.generated.ConnectionScheduleDataCron; import io.airbyte.api.client.model.generated.ConnectionScheduleType; @@ -34,16 +28,12 @@ import io.airbyte.api.client.model.generated.JobInfoRead; import io.airbyte.api.client.model.generated.JobRead; import io.airbyte.api.client.model.generated.JobStatus; -import io.airbyte.api.client.model.generated.OperationRead; -import io.airbyte.api.client.model.generated.SelectedFieldInfo; import io.airbyte.api.client.model.generated.SourceDefinitionRead; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.client.model.generated.SourceRead; -import io.airbyte.api.client.model.generated.StreamDescriptor; import io.airbyte.api.client.model.generated.StreamStatusJobType; import io.airbyte.api.client.model.generated.StreamStatusRunState; import io.airbyte.api.client.model.generated.SyncMode; -import io.airbyte.api.client.model.generated.WebBackendConnectionUpdate; import io.airbyte.commons.json.Jsons; import io.airbyte.db.Database; import io.airbyte.test.utils.AcceptanceTestHarness; @@ -57,16 +47,12 @@ import java.util.Optional; import java.util.Set; import java.util.UUID; -import java.util.stream.Collectors; -import org.jooq.impl.DSL; -import org.jooq.impl.SQLDataType; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; -import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; import org.slf4j.Logger; @@ -90,9 +76,8 @@ @SuppressWarnings({"PMD.JUnitTestsShouldIncludeAssert", "DataFlowIssue", "SqlDialectInspection", "SqlNoDataSourceInspection", "PMD.AvoidDuplicateLiterals"}) -@DisabledIfEnvironmentVariable(named = "SKIP_BASIC_ACCEPTANCE_TESTS", - matches = "true") @Execution(ExecutionMode.CONCURRENT) +@Tag("sync") class SyncAcceptanceTests { private static final Logger LOGGER = LoggerFactory.getLogger(SyncAcceptanceTests.class); @@ -356,279 +341,6 @@ void testMultipleSchemasAndTablesSyncAndReset() throws Exception { assertDestinationDbEmpty(testHarness.getDestinationDatabase()); } - // TODO (Angel): Enable once we fix the docker compose tests - @Test - @EnabledIfEnvironmentVariable(named = KUBE, - matches = TRUE) - @DisabledIfEnvironmentVariable(named = IS_GKE, - matches = TRUE, - disabledReason = SLOW_TEST_IN_GKE) - void testPartialResetResetAllWhenSchemaIsModified(final TestInfo testInfo) throws Exception { - LOGGER.info("Running: " + testInfo.getDisplayName()); - - // Add Table - final String additionalTable = "additional_table"; - final Database sourceDb = testHarness.getSourceDatabase(); - sourceDb.query(ctx -> { - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(1, - "1").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(2, - "2").execute(); - return null; - }); - UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final OperationRead operation = testHarness.createNormalizationOperation(); - final AirbyteCatalog catalog = modifyCatalog( - discoverResult.getCatalog(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - testHarness.setIncrementalAppendSyncMode(catalog, List.of(COLUMN_ID)); - - final ConnectionRead connection = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .build()); - - // Run initial sync - final JobInfoRead syncRead = testHarness.syncConnection(connection.getConnectionId()); - testHarness.waitForSuccessfulJob(syncRead.getJob()); - - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - connection.getNamespaceFormat(), false, WITHOUT_SCD_TABLE); - Asserts.assertStreamStateContainsStream(testHarness, connection.getConnectionId(), List.of( - new StreamDescriptor(ID_AND_NAME, PUBLIC), - new StreamDescriptor(additionalTable, PUBLIC))); - - LOGGER.info("Initial sync ran, now running an update with a stream being removed."); - - /* - * Remove stream - */ - sourceDb.query(ctx -> ctx.dropTableIfExists(additionalTable).execute()); - - // Update with refreshed catalog - AirbyteCatalog refreshedCatalog = modifyCatalog( - testHarness.discoverSourceSchemaWithoutCache(sourceId), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - WebBackendConnectionUpdate update = testHarness.getUpdateInput(connection, refreshedCatalog, - operation); - testHarness.webBackendUpdateConnection(update); - - // Wait until the sync from the UpdateConnection is finished - final JobRead syncFromTheUpdate1 = - testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), - syncRead.getJob().getId()); - testHarness.waitForSuccessfulJob(syncFromTheUpdate1); - - // We do not check that the source and the dest are in sync here because removing a stream - // doesn't remove that - Asserts.assertStreamStateContainsStream(testHarness, connection.getConnectionId(), List.of( - new StreamDescriptor(ID_AND_NAME, PUBLIC))); - - LOGGER.info("Remove done, now running an update with a stream being added."); - - /* - * Add a stream -- the value of in the table are different than the initial import to ensure that it - * is properly reset. - */ - sourceDb.query(ctx -> { - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR)).execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(3, - "3").execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD)).values(4, - "4").execute(); - return null; - }); - - sourceId = testHarness.createPostgresSource().getSourceId(); - refreshedCatalog = refreshedCatalog = modifyCatalog( - testHarness.discoverSourceSchema(sourceId), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); - testHarness.webBackendUpdateConnection(update); - - final JobRead syncFromTheUpdate2 = - testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), - syncFromTheUpdate1.getId()); - testHarness.waitForSuccessfulJob(syncFromTheUpdate2); - - // We do not check that the source and the dest are in sync here because removing a stream - // doesn't remove that - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - connection.getNamespaceFormat(), true, WITHOUT_SCD_TABLE); - Asserts.assertStreamStateContainsStream(testHarness, connection.getConnectionId(), List.of( - new StreamDescriptor(ID_AND_NAME, PUBLIC), - new StreamDescriptor(additionalTable, PUBLIC))); - - LOGGER.info("Addition done, now running an update with a stream being updated."); - - // Update - sourceDb.query(ctx -> { - ctx.dropTableIfExists(additionalTable).execute(); - ctx.createTableIfNotExists(additionalTable) - .columns(DSL.field("id", SQLDataType.INTEGER), DSL.field(FIELD, SQLDataType.VARCHAR), - DSL.field("another_field", SQLDataType.VARCHAR)) - .execute(); - ctx.truncate(additionalTable).execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), - DSL.field("another_field")).values(3, "3", "three") - .execute(); - ctx.insertInto(DSL.table(additionalTable)).columns(DSL.field("id"), DSL.field(FIELD), - DSL.field("another_field")).values(4, "4", "four") - .execute(); - return null; - }); - - sourceId = testHarness.createPostgresSource().getSourceId(); - refreshedCatalog = modifyCatalog( - testHarness.discoverSourceSchema(sourceId), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - update = testHarness.getUpdateInput(connection, refreshedCatalog, operation); - testHarness.webBackendUpdateConnection(update); - - final JobRead syncFromTheUpdate3 = - testHarness.waitUntilTheNextJobIsStarted(connection.getConnectionId(), - syncFromTheUpdate2.getId()); - testHarness.waitForSuccessfulJob(syncFromTheUpdate3); - - // We do not check that the source and the dest are in sync here because removing a stream - // doesn't remove that - Asserts.assertSourceAndDestinationDbRawRecordsInSync( - testHarness.getSourceDatabase(), testHarness.getDestinationDatabase(), PUBLIC_SCHEMA_NAME, - connection.getNamespaceFormat(), true, WITHOUT_SCD_TABLE); - Asserts.assertStreamStateContainsStream(testHarness, connection.getConnectionId(), List.of( - new StreamDescriptor(ID_AND_NAME, PUBLIC), - new StreamDescriptor(additionalTable, PUBLIC))); - } - - @Test - void testIncrementalDedupeSyncRemoveOneColumn() throws Exception { - final UUID sourceId = testHarness.createPostgresSource().getSourceId(); - final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); - final UUID normalizationOpId = testHarness.createNormalizationOperation().getOperationId(); - final SourceDiscoverSchemaRead discoverResult = testHarness.discoverSourceSchemaWithId(sourceId); - final SyncMode srcSyncMode = SyncMode.INCREMENTAL; - final DestinationSyncMode dstSyncMode = DestinationSyncMode.APPEND_DEDUP; - final AirbyteCatalog catalog = modifyCatalog( - discoverResult.getCatalog(), - Optional.of(srcSyncMode), - Optional.of(dstSyncMode), - Optional.of(List.of(COLUMN_ID)), - Optional.of(List.of(List.of(COLUMN_ID))), - Optional.of(true), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - final var conn = - testHarness.createConnection(new TestConnectionCreate.Builder( - sourceId, - destinationId, - catalog, - discoverResult.getCatalogId()) - .setNormalizationOperationId(normalizationOpId) - .build()); - final var connectionId = conn.getConnectionId(); - // sync from start - LOGGER.info("First incremental sync"); - final JobInfoRead connectionSyncRead1 = testHarness.syncConnection(connectionId); - testHarness.waitForSuccessfulJob(connectionSyncRead1.getJob()); - LOGGER.info("state after sync: {}", testHarness.getConnectionState(connectionId)); - - final var dst = testHarness.getDestinationDatabase(); - Asserts.assertSourceAndDestinationDbRawRecordsInSync(testHarness.getSourceDatabase(), dst, PUBLIC_SCHEMA_NAME, conn.getNamespaceFormat(), true, - WITH_SCD_TABLE); - - // Update the catalog, so we only select the id column. - final AirbyteCatalog updatedCatalog = modifyCatalog( - catalog, - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.of(true), - Optional.of(List.of(new SelectedFieldInfo(List.of("id")))), - Optional.empty(), - Optional.empty(), - Optional.empty(), - Optional.empty()); - testHarness.updateConnectionCatalog(connectionId, updatedCatalog); - - // add new records and run again. - LOGGER.info("Adding new records to source database"); - final Database source = testHarness.getSourceDatabase(); - final List expectedRawRecords = testHarness.retrieveRecordsFromDatabase(source, STREAM_NAME); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(6, 'mike')")); - source.query(ctx -> ctx.execute("INSERT INTO id_and_name(id, name) VALUES(7, 'chris')")); - // The expected new raw records should only have the ID column. - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 6).build())); - expectedRawRecords.add(Jsons.jsonNode(ImmutableMap.builder().put(COLUMN_ID, 7).build())); - final JobInfoRead connectionSyncRead2 = testHarness.syncConnection(connectionId); - LOGGER.info("Running second sync: job {} with status {}", connectionSyncRead2.getJob().getId(), connectionSyncRead2.getJob().getStatus()); - testHarness.waitForSuccessfulJob(connectionSyncRead2.getJob()); - LOGGER.info("state after sync: {}", testHarness.getConnectionState(connectionId)); - - // For the normalized records, they should all only have the ID column. - final List expectedNormalizedRecords = testHarness.retrieveRecordsFromDatabase(source, STREAM_NAME).stream() - .map((record) -> ((ObjectNode) record).retain(COLUMN_ID)).collect(Collectors.toList()); - Asserts.assertRawDestinationContains(dst, expectedRawRecords, conn.getNamespaceFormat(), STREAM_NAME); - testHarness.assertNormalizedDestinationContainsIdColumn(conn.getNamespaceFormat(), expectedNormalizedRecords); - } - static void assertDestinationDbEmpty(final Database dst) throws Exception { final Set destinationTables = Databases.listAllTables(dst); diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java index 3bbcfaaf1c4..ebda0eac38d 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/VersioningAcceptanceTests.java @@ -23,12 +23,15 @@ import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; @TestInstance(Lifecycle.PER_CLASS) +@Tags({@Tag("sync"), @Tag("enterprise")}) class VersioningAcceptanceTests { private static AirbyteApiClient apiClient2; diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java index ba16472e77c..f7246113b6f 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadBasicAcceptanceTests.java @@ -17,6 +17,8 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Tags; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; @@ -26,6 +28,7 @@ * components, and we migrate more operations to them, we will run these tests in CI to catch * regressions. */ +@Tags({@Tag("sync"), @Tag("enterprise")}) public class WorkloadBasicAcceptanceTests { AcceptanceTestsResources testResources = new AcceptanceTestsResources(); diff --git a/airbyte-webapp/.eslintrc.js b/airbyte-webapp/.eslintrc.js index 8d93ac41af2..5214bd8e6fb 100644 --- a/airbyte-webapp/.eslintrc.js +++ b/airbyte-webapp/.eslintrc.js @@ -85,6 +85,7 @@ module.exports = { "check-file", "react", "react-hooks", + "no-only-tests", ], parser: "@typescript-eslint/parser", parserOptions: { @@ -474,6 +475,7 @@ module.exports = { extends: ["plugin:jest/recommended"], rules: { "jest/consistent-test-it": ["warn", { fn: "it", withinDescribe: "it" }], + "no-only-tests/no-only-tests": "error", }, }, { @@ -488,6 +490,7 @@ module.exports = { "cypress/no-unnecessary-waiting": "warn", "no-template-curly-in-string": "off", "@typescript-eslint/no-unused-expressions": "off", + "no-only-tests/no-only-tests": "error", }, }, { diff --git a/airbyte-webapp/cypress/cloud-e2e/connector-builder.cy.ts b/airbyte-webapp/cypress/cloud-e2e/connector-builder.cy.ts deleted file mode 100644 index 2f43b6a7ca7..00000000000 --- a/airbyte-webapp/cypress/cloud-e2e/connector-builder.cy.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { appendRandomString } from "@cy/commands/common"; - -const builderLink = "a[data-testid='builderLink']"; -const nameLabel = "[data-testid='connector-name-label']"; -const nameInput = "[data-testid='connector-name-input']"; - -describe("running the connector builder on cloud", () => { - let connectorName = ""; - beforeEach(() => { - cy.on("uncaught:exception", () => false); - connectorName = appendRandomString("localhost"); - cy.login(); - cy.selectWorkspace(); - cy.get(builderLink, { timeout: Cypress.config("pageLoadTimeout") }).click(); - - // handle case where the workspace already has a builder project - cy.url().then((url) => { - if (url.endsWith("/connector-builder")) { - cy.visit(`${url}/create`); - } - }); - }); - - afterEach(() => { - cy.get(`button[data-testid='exit-builder']`, { timeout: Cypress.config("pageLoadTimeout") }).click(); - cy.get(`button[data-testid='delete-project-button_${connectorName}']`).click(); - cy.contains("Delete").click(); - cy.logout(); - }); - - // try to run a test read against localhost to verify that the builder can execute the CDK - // without needing to rely on an actual API or set up a mock server - it("returns an error when trying to run against localhost", () => { - cy.get("button[data-testid='start-from-scratch']", { timeout: Cypress.config("pageLoadTimeout") }).click(); - cy.get(nameLabel).first().click(); - cy.get(nameInput, { timeout: Cypress.config("pageLoadTimeout") }).clear(); - cy.get(nameInput).type(connectorName); - cy.get("input[name='formValues.global.urlBase']").type("https://localhost:8000"); - cy.get("button[data-testid='add-stream']").click(); - cy.get("input[name='streamName']").type("test_stream"); - cy.get("input[name='urlPath']").type(`test_path{enter}`); - cy.get("button[data-testid='read-stream']").click(); - cy.get("pre", { timeout: 30000 }).contains( - "Invalid URL endpoint: The endpoint that data is being requested from belongs to a private network." - ); - }); -}); diff --git a/airbyte-webapp/cypress/commands/cloud.ts b/airbyte-webapp/cypress/commands/cloud.ts index 12e45b068ee..a23c68ba07f 100644 --- a/airbyte-webapp/cypress/commands/cloud.ts +++ b/airbyte-webapp/cypress/commands/cloud.ts @@ -38,7 +38,7 @@ Cypress.Commands.add("login", (user: TestUserCredentials = testUser) => { // TODO rewrite to logout programmatically, instead of by clicking through the UI. This // will be faster and less brittle. Cypress.Commands.add("logout", () => { - cy.get("[data-testid='sidebar.userDropdown']").click(); + cy.get("[data-testid='sidebar.userDropdown']").click({ force: true }); cy.get("[data-testid='sidebar.signout']").click({ force: true }); cy.hasNavigatedTo("/login"); }); diff --git a/airbyte-webapp/cypress/commands/connector.ts b/airbyte-webapp/cypress/commands/connector.ts index 1d59cd3b65e..b5186b5298f 100644 --- a/airbyte-webapp/cypress/commands/connector.ts +++ b/airbyte-webapp/cypress/commands/connector.ts @@ -26,7 +26,7 @@ export const fillPostgresForm = ( ) => { cy.intercept("/api/v1/source_definition_specifications/get").as("getSourceSpecifications"); - selectServiceType("Postgres"); + selectServiceType("Postgres", "certified"); if (openOptional) { openOptionalFields(); @@ -44,7 +44,7 @@ export const fillPostgresForm = ( export const fillPokeAPIForm = (name: string, pokeName: string) => { cy.intercept("/api/v1/source_definition_specifications/get").as("getSourceSpecifications"); - selectServiceType("PokeAPI"); + selectServiceType("PokeAPI", "marketplace"); enterName(name); enterPokemonName(pokeName); @@ -53,7 +53,7 @@ export const fillPokeAPIForm = (name: string, pokeName: string) => { export const fillDummyApiForm = (name: string, apiKey: string) => { cy.intercept("/api/v1/source_definition_specifications/get").as("getSourceSpecifications"); - selectServiceType(name); + selectServiceType(name, "custom"); enterName(name); enterApiKey(apiKey); @@ -62,7 +62,7 @@ export const fillDummyApiForm = (name: string, apiKey: string) => { export const fillLocalJsonForm = (name: string, destinationPath: string) => { cy.intercept("/api/v1/destination_definition_specifications/get").as("getDestinationSpecifications"); - selectServiceType("Local JSON"); + selectServiceType("Local JSON", "marketplace"); cy.wait("@getDestinationSpecifications"); diff --git a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts index bd2c8e61f09..4a32aaae386 100644 --- a/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/autoDetectSchema.cy.ts @@ -80,7 +80,7 @@ describe("Connection - Auto-detect schema changes", () => { it("does not show non-breaking change on list page", () => { connectionListPage.visit(); - connectionListPage.getSchemaChangeIcon(connection, "non_breaking").should("not.exist"); + connectionListPage.getSchemaChangeIcon(connection, "warning").should("not.exist"); connectionListPage.getConnectionStateSwitch(connection).should("be.checked").and("be.enabled"); }); @@ -149,7 +149,7 @@ describe("Connection - Auto-detect schema changes", () => { it("shows breaking change on list page", () => { connectionListPage.visit(); - connectionListPage.getSchemaChangeIcon(connection, "breaking").should("exist"); + connectionListPage.getSchemaChangeIcon(connection, "error").should("exist"); connectionListPage.getConnectionStateSwitch(connection).should("not.be.checked").and("not.be.enabled"); }); diff --git a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts index 54f5fec0996..560166db759 100644 --- a/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/configuration.cy.ts @@ -471,16 +471,14 @@ describe("Connection Configuration", () => { row.checkSyncModeDropdownDisabled(); }); }); - it("Stream filters are disabled and not applied", () => { + it("Stream filters are still enabled", () => { cy.get("@postgresConnection").then((connection) => { cy.visit(`/${RoutePaths.Connections}/${connection.connectionId}/${ConnectionRoutePaths.Replication}`); // input for filtering streams by name - cy.get('input[placeholder*="Search stream name"]').should("be.disabled"); - cy.get('input[placeholder*="Search stream name"]').should("be.empty"); + cy.get('input[placeholder*="Search stream name"]').should("be.enabled"); // "hide disabled streams" switch - cy.get('[data-testid="hideDisableStreams-switch"]').should("be.disabled"); - cy.get('[data-testid="hideDisableStreams-switch"]').should("be.not.checked"); + cy.get('[data-testid="hideDisableStreams-switch"]').should("be.enabled"); }); }); }); @@ -547,7 +545,7 @@ describe("Connection Configuration", () => { it("should show empty streams table", () => { cy.get("@postgresConnection").then((connection) => { cy.visit(`/${RoutePaths.Connections}/${connection.connectionId}/`); - cy.contains("Re-enable the connection to show stream sync progress"); + cy.contains("users").should("exist"); }); }); diff --git a/airbyte-webapp/cypress/e2e/connection/createConnection.cy.ts b/airbyte-webapp/cypress/e2e/connection/createConnection.cy.ts index 173d2eeaa6e..a4ba915e0c1 100644 --- a/airbyte-webapp/cypress/e2e/connection/createConnection.cy.ts +++ b/airbyte-webapp/cypress/e2e/connection/createConnection.cy.ts @@ -1,3 +1,4 @@ +import { getWorkspaceId } from "@cy/commands/api/workspace"; import { createNewConnectionViaApi, createPostgresDestinationViaApi, @@ -32,6 +33,7 @@ import * as connectionConfigurationForm from "pages/connection/connectionFormPag import * as connectionListPage from "pages/connection/connectionListPageObject"; import * as replicationPage from "pages/connection/connectionReplicationPageObject"; import * as newConnectionPage from "pages/connection/createConnectionPageObject"; +import { nextButton } from "pages/connection/createConnectionPageObject"; import { streamDetails } from "pages/connection/StreamDetailsPageObject"; import { StreamRowPageObject } from "pages/connection/StreamRowPageObject"; import { streamsTable } from "pages/connection/StreamsTablePageObject"; @@ -72,7 +74,9 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { describe("Set up connection", () => { describe("From connection page", () => { it("should open 'New connection' page", () => { - connectionListPage.visit(); + // using ConnectionsListPage.visit() intercepts connections/list endpoint, which will not be called if this is the first connection being created + cy.visit(`/workspaces/${getWorkspaceId()}/connections`); + interceptGetSourcesListRequest(); interceptGetSourceDefinitionsRequest(); @@ -170,7 +174,7 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { cy.location("search").should("eq", `?destinationId=${destination.destinationId}&sourceType=new`); const testPokeSourceName = appendRandomString("Cypress Test Poke"); - fillPokeAPIForm(testPokeSourceName, "ditto"); + fillPokeAPIForm(testPokeSourceName, "bulbasaur"); cy.get("button").contains("Set up source").click(); cy.wait("@createSource", { timeout: 30000 }).then((interception) => { const createdSourceId = interception.response?.body.sourceId; @@ -194,6 +198,14 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { `/${RoutePaths.Connections}/${ConnectionRoutePaths.ConnectionNew}/${ConnectionRoutePaths.Configure}?sourceId=${source.sourceId}&destinationId=${destination.destinationId}` ); waitForDiscoverSchemaRequest(); + const dummyStreamRow = new StreamRowPageObject("public", "dummy_table_1"); + + dummyStreamRow.toggleStreamSync(); + dummyStreamRow.isStreamSyncEnabled(true); + dummyStreamRow.selectSyncMode("full_refresh", "overwrite"); + + cy.get(nextButton).scrollIntoView(); + cy.get(nextButton).click(); connectionConfigurationForm.selectScheduleType("Manual"); }); }); @@ -211,30 +223,13 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { newConnectionPage.checkColumnNames(); }); - it("should check total amount of table streams", () => { - // dummy tables amount + users table - newConnectionPage.checkAmountOfStreamTableRows(21); - }); - - it("should allow to scroll table to desired stream table row and it should be visible", () => { - const desiredStreamTableRow = "dummy_table_18"; - - newConnectionPage.scrollTableToStream(desiredStreamTableRow); - newConnectionPage.isStreamTableRowVisible(desiredStreamTableRow); - }); - it("should filter table by stream name", () => { streamsTable.searchStream("dummy_table_10"); newConnectionPage.checkAmountOfStreamTableRows(1); }); - - it("should clear stream search input field and show all available streams", () => { - streamsTable.clearStreamSearch(); - newConnectionPage.checkAmountOfStreamTableRows(21); - }); }); - describe.only("Stream", () => { + describe("Stream", () => { before(() => { interceptDiscoverSchemaRequest(); @@ -301,6 +296,7 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { describe("Submit form", () => { it("should set up a connection", () => { interceptCreateConnectionRequest(); + cy.get(nextButton).click(); submitButtonClick(true); waitForCreateConnectionRequest().then((interception) => { @@ -309,7 +305,7 @@ describe("Connection - Create new connection", { testIsolation: false }, () => { const connection: Partial = { name: `${source.name} → ${destination.name}`, - scheduleType: "manual", + scheduleType: "basic", }; expect(interception.request.body).to.contain(connection); expect(interception.response?.body).to.contain(connection); diff --git a/airbyte-webapp/cypress/e2e/source.cy.ts b/airbyte-webapp/cypress/e2e/source.cy.ts index d9f5e81b8f4..381c4ea56a7 100644 --- a/airbyte-webapp/cypress/e2e/source.cy.ts +++ b/airbyte-webapp/cypress/e2e/source.cy.ts @@ -76,7 +76,7 @@ describe("Unsaved changes modal on create source page", () => { it("Check leaving Source page without any changes after selection type", () => { goToSourcePage(); openNewSourcePage(); - selectServiceType("PokeAPI"); + selectServiceType("PokeAPI", "marketplace"); openHomepage(); diff --git a/airbyte-webapp/cypress/pages/connection/StreamRowPageObject.ts b/airbyte-webapp/cypress/pages/connection/StreamRowPageObject.ts index 5e803a84ab9..5f590748318 100644 --- a/airbyte-webapp/cypress/pages/connection/StreamRowPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/StreamRowPageObject.ts @@ -115,7 +115,7 @@ export class StreamRowPageObject { // instead of using .contains(), e.g. "Incremental | Append" and "Incremental | Append + Dedupe" .filter((_, element) => Cypress.$(element).text().trim() === syncMode) .should("have.length", 1) - .click(); + .click({ force: true }); }); } diff --git a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts index 928d4c86995..e064244d2a5 100644 --- a/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/connectionListPageObject.ts @@ -3,7 +3,7 @@ import { getWorkspaceId } from "commands/api/workspace"; const schemaChangeCell = (connectionId: string) => `[data-testid='link-replication-${connectionId}']`; -const changesStatusIcon = (type: string) => `[data-testId='changesStatusIcon-${type}']`; +const changesStatusIcon = (type: string) => `[data-testId='entitywarnings-${type}']`; const connectionStateSwitch = (connectionId: string) => `[data-testId='connection-state-switch-${connectionId}']`; const newConnectionButton = "[data-testid='new-connection-button']"; diff --git a/airbyte-webapp/cypress/pages/connection/createConnectionPageObject.ts b/airbyte-webapp/cypress/pages/connection/createConnectionPageObject.ts index 010b9c34cfd..f217bb02f60 100644 --- a/airbyte-webapp/cypress/pages/connection/createConnectionPageObject.ts +++ b/airbyte-webapp/cypress/pages/connection/createConnectionPageObject.ts @@ -9,7 +9,7 @@ const getNewConnectorTypeOption = (connectorType: ConnectorType) => const catalogTreeTableHeader = `div[data-testid='catalog-tree-table-header']`; const catalogTreeTableBody = `div[data-testid='catalog-tree-table-body']`; - +export const nextButton = `a[data-testid='next-creation-page']`; export const selectExistingConnectorFromList = (connectorType: ConnectorType, connectorName: string) => { cy.get(getExistingConnectorItemButton(connectorType, connectorName)).click(); }; @@ -40,7 +40,7 @@ export const isAtConnectionOverviewPage = (connectionId: string) => */ export const checkColumnNames = () => { - const columnNames = ["Sync", "Data destination", "Stream", "Sync mode"]; + const columnNames = ["Sync", "Namespace", "Stream", "Sync mode"]; cy.get(catalogTreeTableHeader).within(($header) => { columnNames.forEach((columnName) => { cy.contains(columnName); diff --git a/airbyte-webapp/cypress/pages/createConnectorPage.ts b/airbyte-webapp/cypress/pages/createConnectorPage.ts index e4a068a5e40..47ee633cc19 100644 --- a/airbyte-webapp/cypress/pages/createConnectorPage.ts +++ b/airbyte-webapp/cypress/pages/createConnectorPage.ts @@ -1,4 +1,5 @@ import { updateField } from "@cy/commands/common"; +import { ConnectorTab } from "@src/components/source/SelectConnector"; const nameInput = "input[name=name]"; const apiKeyInput = "input[name='connectionConfiguration.api_key']"; @@ -12,9 +13,9 @@ const destinationPathInput = "input[name='connectionConfiguration.destination_pa const optionalFieldsButton = "button[data-testid='optional-fields']"; const xminOption = "label[data-testid='radio-option.1']"; -export const selectServiceType = (type: string) => { - // Make sure community connectors are visible in the grid, since they are hidden by default - cy.get("#filter-support-level-community").check({ force: true }); +export const selectServiceType = (type: string, tab: ConnectorTab) => { + // Click on the corresponding tab to see the desired connector + cy.get(`button[data-id='${tab}-step']`).click(); cy.contains("button", type).click(); }; diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 03185355898..d40098604d0 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -70,6 +70,7 @@ "@tanstack/react-query-devtools": "^4.29.6", "@tanstack/react-table": "^8.7.0", "@types/diff": "^5.0.7", + "@types/mdast": "^4.0.4", "@types/path-browserify": "^1.0.1", "@types/segment-analytics": "^0.0.36", "@types/semver": "^7.3.13", @@ -88,10 +89,11 @@ "framer-motion": "^6.3.11", "js-yaml": "^4.1.0", "json-schema": "^0.4.0", + "jwt-decode": "^4.0.0", "keycloak-js": "^23.0.7", "launchdarkly-js-client-sdk": "^3.1.0", "lodash": "^4.17.21", - "markdown-to-jsx": "^7.3.2", + "markdown-to-jsx": "^7.4.7", "monaco-editor": "^0.34.1", "normalize.css": "^8.0.1", "oidc-client-ts": "^2.4.0", @@ -125,6 +127,7 @@ "semver": "^7.5.4", "ts-pattern": "^4.2.1", "typesafe-actions": "^5.1.0", + "unified": "^10.1.2", "uuid": "^9.0.1", "victory-vendor": "36.6.8", "yup": "^0.32.11" @@ -188,6 +191,7 @@ "eslint-plugin-import": "^2.29.1", "eslint-plugin-jest": "^27.6.3", "eslint-plugin-jsx-a11y": "^6.8.0", + "eslint-plugin-no-only-tests": "^3.1.0", "eslint-plugin-prettier": "^5.1.3", "eslint-plugin-react": "^7.33.2", "eslint-plugin-react-hooks": "^4.6.0", diff --git a/airbyte-webapp/pnpm-lock.yaml b/airbyte-webapp/pnpm-lock.yaml index d7993403d67..4e122673af6 100644 --- a/airbyte-webapp/pnpm-lock.yaml +++ b/airbyte-webapp/pnpm-lock.yaml @@ -58,6 +58,9 @@ dependencies: '@types/diff': specifier: ^5.0.7 version: 5.0.7 + '@types/mdast': + specifier: ^4.0.4 + version: 4.0.4 '@types/path-browserify': specifier: ^1.0.1 version: 1.0.1 @@ -112,6 +115,9 @@ dependencies: json-schema: specifier: ^0.4.0 version: 0.4.0 + jwt-decode: + specifier: ^4.0.0 + version: 4.0.0 keycloak-js: specifier: ^23.0.7 version: 23.0.7 @@ -122,8 +128,8 @@ dependencies: specifier: ^4.17.21 version: 4.17.21 markdown-to-jsx: - specifier: ^7.3.2 - version: 7.3.2(react@18.2.0) + specifier: ^7.4.7 + version: 7.4.7(react@18.2.0) monaco-editor: specifier: ^0.34.1 version: 0.34.1 @@ -223,6 +229,9 @@ dependencies: typesafe-actions: specifier: ^5.1.0 version: 5.1.0 + unified: + specifier: ^10.1.2 + version: 10.1.2 uuid: specifier: ^9.0.1 version: 9.0.1 @@ -408,6 +417,9 @@ devDependencies: eslint-plugin-jsx-a11y: specifier: ^6.8.0 version: 6.8.0(eslint@8.57.0) + eslint-plugin-no-only-tests: + specifier: ^3.1.0 + version: 3.1.0 eslint-plugin-prettier: specifier: ^5.1.3 version: 5.1.3(eslint-config-prettier@9.1.0)(eslint@8.57.0)(prettier@3.0.3) @@ -5188,7 +5200,7 @@ packages: color-convert: 2.0.1 dequal: 2.0.3 lodash: 4.17.21 - markdown-to-jsx: 7.3.2(react@18.2.0) + markdown-to-jsx: 7.4.7(react@18.2.0) memoizerific: 1.11.3 polished: 4.3.1 react: 18.2.0 @@ -6335,6 +6347,12 @@ packages: '@types/unist': 2.0.6 dev: false + /@types/mdast@4.0.4: + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + dependencies: + '@types/unist': 2.0.6 + dev: false + /@types/mdurl@1.0.2: resolution: {integrity: sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==} dev: false @@ -9658,6 +9676,11 @@ packages: object.fromentries: 2.0.7 dev: true + /eslint-plugin-no-only-tests@3.1.0: + resolution: {integrity: sha512-Lf4YW/bL6Un1R6A76pRZyE1dl1vr31G/ev8UzIc/geCgFWyrKil8hVjYqWVKGB/UIGmb6Slzs9T0wNezdSVegw==} + engines: {node: '>=5.0.0'} + dev: true + /eslint-plugin-prettier@5.1.3(eslint-config-prettier@9.1.0)(eslint@8.57.0)(prettier@3.0.3): resolution: {integrity: sha512-C9GCVAs4Eq7ZC/XFQHITLiHJxQngdtraXaM+LoUFoFp/lHNl2Zn8f3WQbe9HvTBBQ9YnKFB0/2Ajdqwo5D1EAw==} engines: {node: ^14.18.0 || >=16.0.0} @@ -12810,8 +12833,8 @@ packages: resolution: {integrity: sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==} dev: false - /markdown-to-jsx@7.3.2(react@18.2.0): - resolution: {integrity: sha512-B+28F5ucp83aQm+OxNrPkS8z0tMKaeHiy0lHJs3LqCyDQFtWuenaIrkaVTgAm1pf1AU85LXltva86hlaT17i8Q==} + /markdown-to-jsx@7.4.7(react@18.2.0): + resolution: {integrity: sha512-0+ls1IQZdU6cwM1yu0ZjjiVWYtkbExSyUIFU2ZeDIFuZM1W42Mh4OlJ4nb4apX4H8smxDHRdFaoIVJGwfv5hkg==} engines: {node: '>= 10'} peerDependencies: react: '>= 0.14.0' diff --git a/airbyte-webapp/src/App.tsx b/airbyte-webapp/src/App.tsx index f5f33df8b65..1ffd042d3fd 100644 --- a/airbyte-webapp/src/App.tsx +++ b/airbyte-webapp/src/App.tsx @@ -60,7 +60,7 @@ const App: React.FC = () => { return ( - + }> diff --git a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx index 8831383d1c3..6441fa4fde8 100644 --- a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx +++ b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx @@ -1,5 +1,5 @@ import React from "react"; -import { FormattedDate, FormattedMessage, FormattedTimeParts, useIntl } from "react-intl"; +import { FormattedDate, FormattedMessage, useIntl } from "react-intl"; import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; @@ -74,10 +74,7 @@ export const AttemptDetails: React.FC = ({ {showEndedAt && attempt.endedAt && ( <> - - {(parts) => {`${parts[0].value}:${parts[2].value}${parts[4].value} `}} - - + | diff --git a/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx b/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx index d737a66a245..c7a85c8af15 100644 --- a/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx +++ b/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx @@ -122,7 +122,14 @@ export const ClickToJob = (chartState: CategoricalChartState & { height: number return ( - + ); }; diff --git a/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.module.scss b/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.module.scss deleted file mode 100644 index 1dfd4fab352..00000000000 --- a/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.module.scss +++ /dev/null @@ -1,3 +0,0 @@ -.minHeight { - min-height: 50px; -} diff --git a/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.tsx b/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.tsx index d37d5a49076..10f576f5184 100644 --- a/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.tsx +++ b/airbyte-webapp/src/area/connection/components/HistoricalOverview/NoDataMessage.tsx @@ -1,14 +1,29 @@ import { FormattedMessage } from "react-intl"; -import { FlexContainer } from "components/ui/Flex"; -import { Text } from "components/ui/Text"; - -import styles from "./NoDataMessage.module.scss"; - -export const NoDataMessage: React.FC = () => ( - - - - - -); +import { EmptyState } from "components/common/EmptyState"; +import { useConnectionSyncContext } from "components/connection/ConnectionSync/ConnectionSyncContext"; +import { Button } from "components/ui/Button"; + +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; + +export const NoDataMessage: React.FC = () => { + const { mode } = useConnectionFormService(); + const { syncConnection, isSyncConnectionAvailable } = useConnectionSyncContext(); + + return ( + } + button={ + + } + /> + ); +}; diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx index a354ef537d1..9741d771dc6 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/JobHistoryItem.tsx @@ -1,6 +1,6 @@ import classNames from "classnames"; import { Suspense, useCallback, useRef } from "react"; -import { FormattedDate, FormattedMessage, FormattedTimeParts, useIntl } from "react-intl"; +import { FormattedDate, FormattedMessage, useIntl } from "react-intl"; import { useEffectOnce } from "react-use"; import { Box } from "components/ui/Box"; @@ -185,15 +185,7 @@ export const JobHistoryItem: React.FC = ({ jobWithAttempts - - {(parts) => {`${parts[0].value}:${parts[2].value}${parts[4].value} `}} - - + diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss index 4660f30789c..738bfd3775c 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.module.scss @@ -6,6 +6,7 @@ greenVar: colors.$green; darkBlueVar: colors.$dark-blue-300; redVar: colors.$red; + blueVar: colors.$blue-100; blackVar: colors.$inverse; emptyVar: colors.$foreground; } diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.tsx b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.tsx index 57593f62308..1a65989d124 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.tsx +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraph.tsx @@ -1,3 +1,4 @@ +import dayjs from "dayjs"; import React, { ComponentPropsWithoutRef, useEffect, useMemo, useState } from "react"; import { ResponsiveContainer, Tooltip, XAxis } from "recharts"; // these are not worth typing @@ -8,11 +9,12 @@ import { generateCategoricalChart } from "recharts/es6/chart/generateCategorical // @ts-ignore-next-line import { formatAxisMap } from "recharts/es6/util/CartesianUtils"; +import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; import { ConnectionStatusIndicatorStatus } from "components/connection/ConnectionStatusIndicator"; import { getStreamKey } from "area/connection/utils"; -import { useGetConnectionUptimeHistory } from "core/api"; -import { JobStatus } from "core/api/types/AirbyteClient"; +import { useGetConnectionSyncProgress, useGetConnectionUptimeHistory } from "core/api"; +import { ConnectionSyncProgressRead, ConnectionUptimeHistoryRead, JobStatus } from "core/api/types/AirbyteClient"; import { assertNever } from "core/utils/asserts"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { useAirbyteTheme } from "hooks/theme/useAirbyteTheme"; @@ -37,6 +39,38 @@ const StreamChart = generateCategoricalChart({ formatAxisMap, }); +const generatePlaceholderHistory = ( + connectionSyncProgress?: ConnectionSyncProgressRead +): ConnectionUptimeHistoryRead => { + if ( + !connectionSyncProgress || + connectionSyncProgress.configType === "clear" || + connectionSyncProgress.configType === "reset_connection" + ) { + return []; + } + + return [ + { + bytesCommitted: connectionSyncProgress.bytesCommitted ?? 0, + bytesEmitted: connectionSyncProgress.bytesCommitted ?? 0, + configType: connectionSyncProgress.configType, + jobId: connectionSyncProgress.jobId ?? 0, + jobCreatedAt: connectionSyncProgress.syncStartedAt ?? dayjs().unix(), + jobUpdatedAt: dayjs().unix(), + recordsCommitted: connectionSyncProgress.recordsCommitted ?? 0, + recordsEmitted: connectionSyncProgress.recordsEmitted ?? 0, + streamStatuses: connectionSyncProgress.streams.map((syncProgressItem) => { + return { + status: "running", + streamName: syncProgressItem.streamName, + streamNamespace: syncProgressItem.streamNamespace ?? "", + }; + }), + }, + ]; +}; + type SortableStream = Pick & Partial>; const statusOrder: ConnectionStatusIndicatorStatus[] = [ @@ -183,20 +217,31 @@ export const UptimeStatusGraph: React.FC = React.memo(() => { darkBlue: colorValues[styles.darkBlueVar], red: colorValues[styles.redVar], black: colorValues[styles.blackVar], + blue: colorValues[styles.blueVar], empty: colorValues[styles.emptyVar], }; setColorMap(colorMap); }, [colorValues]); const { connection } = useConnectionEditService(); - const data = useGetConnectionUptimeHistory(connection.connectionId); - const hasData = data.length > 0; + const uptimeHistoryData = useGetConnectionUptimeHistory(connection.connectionId); + const { isRunning } = useConnectionStatus(connection.connectionId); + const { data: syncProgressData } = useGetConnectionSyncProgress(connection.connectionId, isRunning); + + const placeholderHistory = useMemo( + () => generatePlaceholderHistory(isRunning ? syncProgressData : undefined), + [syncProgressData, isRunning] + ); + const hasHistoryData = uptimeHistoryData.length > 0 || placeholderHistory.length > 0; - const { uptimeData, streamIdentities } = useMemo(() => formatDataForChart(data), [data]); + const { uptimeData, streamIdentities } = useMemo( + () => formatDataForChart([...uptimeHistoryData, ...placeholderHistory]), + [placeholderHistory, uptimeHistoryData] + ); const maxStreamsCount = Math.max(...uptimeData.map(({ streams: { length } }) => length)); - if (!hasData) { + if (!hasHistoryData) { return ; } diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraphTooltip.tsx b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraphTooltip.tsx index 3ab3e2885b4..1a01e340653 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraphTooltip.tsx +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/UptimeStatusGraphTooltip.tsx @@ -73,6 +73,9 @@ export const UptimeStatusGraphTooltip: ContentType = ({ active, } ); + const showStreamStatusesSection = + statusesByCount && Object.values(statusesByCount).some((streams) => streams.length > 0); + return ( @@ -97,7 +100,7 @@ export const UptimeStatusGraphTooltip: ContentType = ({ active, - {!!streams?.length && ( + {!!streams?.length && showStreamStatusesSection && ( diff --git a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/WaffleChart.tsx b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/WaffleChart.tsx index eaabf149d0e..45dee4b5330 100644 --- a/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/WaffleChart.tsx +++ b/airbyte-webapp/src/area/connection/components/UptimeStatusGraph/WaffleChart.tsx @@ -40,7 +40,7 @@ interface InjectedStreamWaffleChartProps extends StreamWaffleChartProps { isTooltipActive: boolean; } -type WaffleColor = "green" | "darkBlue" | "red" | "black" | "empty"; +type WaffleColor = "green" | "darkBlue" | "red" | "black" | "blue" | "empty"; const getCellColor = (streamStatus: ConnectionStatusIndicatorStatus): WaffleColor => { switch (streamStatus) { case ConnectionStatusIndicatorStatus.OnTime: @@ -56,14 +56,16 @@ const getCellColor = (streamStatus: ConnectionStatusIndicatorStatus): WaffleColo case ConnectionStatusIndicatorStatus.ActionRequired: return "black"; + case ConnectionStatusIndicatorStatus.Queued: + case ConnectionStatusIndicatorStatus.Syncing: + case ConnectionStatusIndicatorStatus.Refreshing: + return "blue"; + case ConnectionStatusIndicatorStatus.Disabled: case ConnectionStatusIndicatorStatus.Pending: case ConnectionStatusIndicatorStatus.Paused: - case ConnectionStatusIndicatorStatus.Queued: case ConnectionStatusIndicatorStatus.QueuedForNextSync: - case ConnectionStatusIndicatorStatus.Syncing: case ConnectionStatusIndicatorStatus.Clearing: - case ConnectionStatusIndicatorStatus.Refreshing: return "empty"; } }; @@ -106,8 +108,13 @@ export const Waffle: React.FC = (props) => { rowIndex: number, status: ConnectionStatusIndicatorStatus, skipRecurse = false - ): CellOperation => { + ): CellOperation | null => { const cellOffset = rowIndex * cellHeight; + + if (columnIndex >= orderedTooltipTicks.length) { + return null; + } + const xCoordinate = orderedTooltipTicks[columnIndex].coordinate; const myOperation = { @@ -127,6 +134,10 @@ export const Waffle: React.FC = (props) => { // vertical correction if (CELL_VERTICAL_GAP > 0 && rowIndex < streamsCount - 1) { const siblingOperationY = computeCellOperation(columnIndex, rowIndex + 1, status, true); + if (!siblingOperationY) { + return null; + } + const gapY = siblingOperationY.y - (myOperation.y + myOperation.height); const extraGapY = CELL_VERTICAL_GAP - gapY; myOperation.height -= extraGapY; @@ -159,6 +170,11 @@ export const Waffle: React.FC = (props) => { const { status } = streams[j]; const operation = computeCellOperation(i, rowOffset + j, status); + + if (!operation) { + continue; + } + if (status === ConnectionStatusIndicatorStatus.OnTime) { ontimeOperations.push(operation); } else { @@ -177,9 +193,11 @@ export const Waffle: React.FC = (props) => { // tooltip highlight if (isTooltipActive && activeTooltipIndex >= 0) { const coordinates = computeCellOperation(activeTooltipIndex, 0, ConnectionStatusIndicatorStatus.OnTime); - ctx.fillStyle = "rgba(255, 255, 255, 0.4)"; - ctx.fillRect(coordinates.x, coordinates.y, coordinates.width, availableHeight); - ctx.restore(); + if (coordinates) { + ctx.fillStyle = "rgba(255, 255, 255, 0.4)"; + ctx.fillRect(coordinates.x, coordinates.y, coordinates.width, availableHeight); + ctx.restore(); + } } } } diff --git a/airbyte-webapp/src/area/connection/types/index.ts b/airbyte-webapp/src/area/connection/types/index.ts deleted file mode 100644 index 1e212050ff1..00000000000 --- a/airbyte-webapp/src/area/connection/types/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./normalization"; diff --git a/airbyte-webapp/src/area/connection/types/normalization.ts b/airbyte-webapp/src/area/connection/types/normalization.ts deleted file mode 100644 index 0f927ff5954..00000000000 --- a/airbyte-webapp/src/area/connection/types/normalization.ts +++ /dev/null @@ -1,4 +0,0 @@ -export enum NormalizationType { - basic = "basic", - raw = "raw", -} diff --git a/airbyte-webapp/src/area/connection/utils/computeStreamStatus.test.ts b/airbyte-webapp/src/area/connection/utils/computeStreamStatus.test.ts index 8c48573fa71..8169e437816 100644 --- a/airbyte-webapp/src/area/connection/utils/computeStreamStatus.test.ts +++ b/airbyte-webapp/src/area/connection/utils/computeStreamStatus.test.ts @@ -66,95 +66,6 @@ describe("getStreamKey", () => { }); describe("computeStreamStatus", () => { - describe("undefined", () => { - it('returns "undefined" when there are no statuses', () => { - const result = computeStreamStatus({ - statuses: [], - recordsExtracted: 0, - scheduleType: undefined, - scheduleData: undefined, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: undefined, - isRunning: false, - lastSuccessfulSync: undefined, - }); - }); - }); - - describe("pending", () => { - it('returns "Pending" when the most recent run state is pending', () => { - const status = buildStreamStatusRead({ runState: StreamStatusRunState.PENDING }); - const result = computeStreamStatus({ - statuses: [status], - recordsExtracted: 0, - scheduleType: undefined, - scheduleData: undefined, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: ConnectionStatusIndicatorStatus.Pending, - isRunning: false, - lastSuccessfulSync: undefined, - }); - }); - - it('returns "Pending" when the most recent job is reset and is complete', () => { - const status = buildStreamStatusRead({ - jobType: StreamStatusJobType.RESET, - runState: StreamStatusRunState.COMPLETE, - }); - const result = computeStreamStatus({ - statuses: [status], - recordsExtracted: 0, - scheduleType: undefined, - scheduleData: undefined, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: ConnectionStatusIndicatorStatus.Pending, - isRunning: false, - lastSuccessfulSync: undefined, - }); - }); - - it('returns "Pending" when the most recent job is reset even if it is incomplete', () => { - const status = buildStreamStatusRead({ - jobType: StreamStatusJobType.RESET, - runState: StreamStatusRunState.INCOMPLETE, - }); - const result = computeStreamStatus({ - statuses: [status], - recordsExtracted: 0, - scheduleType: undefined, - scheduleData: undefined, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: ConnectionStatusIndicatorStatus.Pending, - isRunning: false, - lastSuccessfulSync: undefined, - }); - }); - }); - describe("on time", () => { it('returns "OnTime" when the most recent sync was successful, unscheduled', () => { const status = buildStreamStatusRead({ runState: StreamStatusRunState.COMPLETE }); @@ -166,7 +77,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -186,7 +96,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -206,7 +115,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -234,7 +142,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -256,7 +163,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -284,7 +190,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -304,7 +209,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -325,7 +229,6 @@ describe("computeStreamStatus", () => { scheduleData: basicScheduleData, hasBreakingSchemaChange: true, lateMultiplier: 2, - showSyncProgress: false, errorMultiplier: 2, isSyncing: false, }); @@ -350,7 +253,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: true, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -375,7 +277,6 @@ describe("computeStreamStatus", () => { scheduleData: basicScheduleData, hasBreakingSchemaChange: false, lateMultiplier: 2, - showSyncProgress: false, errorMultiplier: 2, isSyncing: false, }); @@ -398,7 +299,6 @@ describe("computeStreamStatus", () => { scheduleType: ConnectionScheduleType.manual, hasBreakingSchemaChange: false, lateMultiplier: 2, - showSyncProgress: false, errorMultiplier: 2, isSyncing: false, }); @@ -426,7 +326,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ @@ -437,94 +336,27 @@ describe("computeStreamStatus", () => { }); }); - describe("without sync progress shown", () => { - describe("queued", () => { - it("returns undefined with only a currently running sync (no history)", () => { - const runningStatus = buildStreamStatusRead({ - runState: StreamStatusRunState.RUNNING, - transitionedAt: oneHourAgo, - }); - const cancelStatus = buildStreamStatusRead({ - runState: StreamStatusRunState.INCOMPLETE, - incompleteRunCause: StreamStatusIncompleteRunCause.CANCELED, - transitionedAt: fiveHoursAgo, - }); - + describe("with sync progress shown", () => { + describe("queued for next sync", () => { + it('returns "Queued for next sync" when the most recent run state is pending', () => { + const status = buildStreamStatusRead({ runState: StreamStatusRunState.PENDING }); const result = computeStreamStatus({ - statuses: [runningStatus, cancelStatus], + statuses: [status], recordsExtracted: 0, - scheduleType: ConnectionScheduleType.basic, - scheduleData: basicScheduleData, + scheduleType: undefined, + scheduleData: undefined, hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: false, isSyncing: false, }); expect(result).toEqual({ - status: undefined, - isRunning: true, + status: ConnectionStatusIndicatorStatus.QueuedForNextSync, + isRunning: false, lastSuccessfulSync: undefined, }); }); - it("returns late with a currently running sync that is behind schedule", () => { - const status = buildStreamStatusRead({ runState: StreamStatusRunState.RUNNING, transitionedAt: oneHourAgo }); - const prevStatus = buildStreamStatusRead({ - runState: StreamStatusRunState.COMPLETE, - transitionedAt: fiveHoursAgo, - }); - const result = computeStreamStatus({ - statuses: [status, prevStatus], - recordsExtracted: 0, - scheduleType: ConnectionScheduleType.basic, - scheduleData: basicScheduleData, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: ConnectionStatusIndicatorStatus.Late, - isRunning: true, - lastSuccessfulSync: prevStatus, - }); - }); - }); - describe("syncing", () => { - it('returns "undefined" if records were extracted - with only a currently running sync (no history)', () => { - const runningStatus = buildStreamStatusRead({ - runState: StreamStatusRunState.RUNNING, - transitionedAt: oneHourAgo, - }); - const cancelStatus = buildStreamStatusRead({ - runState: StreamStatusRunState.INCOMPLETE, - incompleteRunCause: StreamStatusIncompleteRunCause.CANCELED, - transitionedAt: fiveHoursAgo, - }); - - const result = computeStreamStatus({ - statuses: [runningStatus, cancelStatus], - recordsExtracted: 1, - scheduleType: ConnectionScheduleType.basic, - scheduleData: basicScheduleData, - hasBreakingSchemaChange: false, - lateMultiplier: 2, - errorMultiplier: 2, - showSyncProgress: false, - isSyncing: false, - }); - expect(result).toEqual({ - status: undefined, - isRunning: true, - lastSuccessfulSync: undefined, - }); - }); - }); - }); - describe("with sync progress shown", () => { - describe("queued for next sync", () => { it('returns "queued for next sync" when there are no statuses', () => { const result = computeStreamStatus({ statuses: [], @@ -534,7 +366,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: false, }); expect(result).toEqual({ @@ -563,7 +394,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: false, }); expect(result).toEqual({ @@ -574,6 +404,29 @@ describe("computeStreamStatus", () => { }); }); describe("queued", () => { + it("returns 'Queued' with a currently running sync that is behind schedule", () => { + const status = buildStreamStatusRead({ runState: StreamStatusRunState.RUNNING, transitionedAt: oneHourAgo }); + const prevStatus = buildStreamStatusRead({ + runState: StreamStatusRunState.COMPLETE, + transitionedAt: fiveHoursAgo, + }); + + const result = computeStreamStatus({ + statuses: [status, prevStatus], + recordsExtracted: 0, + scheduleType: ConnectionScheduleType.basic, + scheduleData: basicScheduleData, + hasBreakingSchemaChange: false, + lateMultiplier: 2, + errorMultiplier: 2, + isSyncing: false, + }); + expect(result).toEqual({ + status: ConnectionStatusIndicatorStatus.Queued, + isRunning: true, + lastSuccessfulSync: prevStatus, + }); + }); it('returns "queued" with only a currently running sync (no history)', () => { const runningStatus = buildStreamStatusRead({ runState: StreamStatusRunState.RUNNING, @@ -587,7 +440,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.sync, recordsExtracted: 0, @@ -613,7 +465,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.sync, }); @@ -638,7 +489,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.refresh, }); @@ -664,7 +514,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.sync, }); @@ -689,7 +538,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.sync, }); @@ -715,7 +563,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.reset_connection, }); @@ -740,7 +587,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.clear, }); @@ -767,7 +613,6 @@ describe("computeStreamStatus", () => { hasBreakingSchemaChange: false, lateMultiplier: 2, errorMultiplier: 2, - showSyncProgress: true, isSyncing: true, runningJobConfigType: JobConfigType.refresh, }); diff --git a/airbyte-webapp/src/area/connection/utils/computeStreamStatus.ts b/airbyte-webapp/src/area/connection/utils/computeStreamStatus.ts index 5f27fc5f887..5639a88d643 100644 --- a/airbyte-webapp/src/area/connection/utils/computeStreamStatus.ts +++ b/airbyte-webapp/src/area/connection/utils/computeStreamStatus.ts @@ -102,7 +102,6 @@ export const computeStreamStatus = ({ hasBreakingSchemaChange, lateMultiplier, errorMultiplier, - showSyncProgress, isSyncing, recordsExtracted, runningJobConfigType, @@ -113,7 +112,6 @@ export const computeStreamStatus = ({ hasBreakingSchemaChange: boolean; lateMultiplier: number; errorMultiplier: number; - showSyncProgress: boolean; isSyncing: boolean; recordsExtracted?: number; runningJobConfigType?: string; @@ -121,7 +119,7 @@ export const computeStreamStatus = ({ // no statuses if (statuses == null || statuses.length === 0) { return { - status: showSyncProgress ? ConnectionStatusIndicatorStatus.QueuedForNextSync : undefined, + status: ConnectionStatusIndicatorStatus.QueuedForNextSync, isRunning: false, lastSuccessfulSync: undefined, }; @@ -136,53 +134,51 @@ export const computeStreamStatus = ({ ({ jobType, runState }) => jobType === StreamStatusJobType.SYNC && runState === StreamStatusRunState.COMPLETE ); - if (showSyncProgress) { - // queued for next sync - if ( - !isRunning && - (statuses[0].runState === StreamStatusRunState.PENDING || statuses[0].jobType === StreamStatusJobType.RESET) - ) { + // queued for next sync + if ( + !isRunning && + (statuses[0].runState === StreamStatusRunState.PENDING || statuses[0].jobType === StreamStatusJobType.RESET) + ) { + return { + status: ConnectionStatusIndicatorStatus.QueuedForNextSync, + isRunning, + + lastSuccessfulSync, + }; + } + + // queued + if (isRunning) { + if (runningJobConfigType === JobConfigType.reset_connection || runningJobConfigType === JobConfigType.clear) { return { - status: ConnectionStatusIndicatorStatus.QueuedForNextSync, + status: ConnectionStatusIndicatorStatus.Clearing, isRunning, - lastSuccessfulSync, }; } - // queued - if (isRunning) { - if (runningJobConfigType === JobConfigType.reset_connection || runningJobConfigType === JobConfigType.clear) { + if (!recordsExtracted || recordsExtracted === 0) { + return { + status: ConnectionStatusIndicatorStatus.Queued, + isRunning, + lastSuccessfulSync, + }; + } + if (recordsExtracted && recordsExtracted > 0) { + // syncing or refreshing + if (runningJobConfigType === "sync") { return { - status: ConnectionStatusIndicatorStatus.Clearing, + status: ConnectionStatusIndicatorStatus.Syncing, isRunning, lastSuccessfulSync, }; - } - - if (!recordsExtracted || recordsExtracted === 0) { + } else if (runningJobConfigType === "refresh") { return { - status: ConnectionStatusIndicatorStatus.Queued, + status: ConnectionStatusIndicatorStatus.Refreshing, isRunning, lastSuccessfulSync, }; } - if (recordsExtracted && recordsExtracted > 0) { - // syncing or refreshing - if (runningJobConfigType === "sync") { - return { - status: ConnectionStatusIndicatorStatus.Syncing, - isRunning, - lastSuccessfulSync, - }; - } else if (runningJobConfigType === "refresh") { - return { - status: ConnectionStatusIndicatorStatus.Refreshing, - isRunning, - lastSuccessfulSync, - }; - } - } } } diff --git a/airbyte-webapp/src/area/connection/utils/operation.ts b/airbyte-webapp/src/area/connection/utils/operation.ts index 12e42a5bd4c..5330f9a926d 100644 --- a/airbyte-webapp/src/area/connection/utils/operation.ts +++ b/airbyte-webapp/src/area/connection/utils/operation.ts @@ -1,15 +1,4 @@ -import { DbtOperationRead } from "components/connection/TransformationForm"; - import { OperationCreate, OperationRead, OperatorType } from "core/api/types/AirbyteClient"; - -export const isDbtTransformation = (op: OperationRead): op is DbtOperationRead => { - return op.operatorConfiguration.operatorType === OperatorType.dbt; -}; - -export const isNormalizationTransformation = (op: OperationCreate): op is OperationRead => { - return op.operatorConfiguration.operatorType === OperatorType.normalization; -}; - export const isWebhookTransformation = (op: OperationCreate): op is OperationRead => { return op.operatorConfiguration.operatorType === OperatorType.webhook; }; diff --git a/airbyte-webapp/src/area/connection/utils/useStreamsStatuses.ts b/airbyte-webapp/src/area/connection/utils/useStreamsStatuses.ts index 91da777e6bb..66b0649ac44 100644 --- a/airbyte-webapp/src/area/connection/utils/useStreamsStatuses.ts +++ b/airbyte-webapp/src/area/connection/utils/useStreamsStatuses.ts @@ -51,19 +51,18 @@ export const useStreamsStatuses = ( const connection = useGetConnection(connectionId); const { hasBreakingSchemaChange } = useSchemaChanges(connection.schemaChange); - const showSyncProgress = useExperiment("connection.syncProgress", false); const lateMultiplier = useLateMultiplierExperiment(); const errorMultiplier = useErrorMultiplierExperiment(); const connectionStatus = useConnectionStatus(connectionId); - const isConnectionDisabled = connectionStatus.status === ConnectionStatusIndicatorStatus.Paused; + const isConnectionDisabled = connection.status !== "active"; // TODO: Ideally we can pull this from the stream status endpoint directly once the "pending" status has been updated to reflect the correct status // for now, we'll use this - const syncProgressMap = useStreamsSyncProgress(connectionId, connectionStatus.isRunning, showSyncProgress); + const syncProgressMap = useStreamsSyncProgress(connectionId, connectionStatus.isRunning); const enabledStreams: AirbyteStreamAndConfigurationWithEnforcedStream[] = connection.syncCatalog.streams.filter( (stream) => - (showSyncProgress && !!stream.stream && syncProgressMap.has(getStreamKey(stream.stream))) || + (!!stream.stream && syncProgressMap.has(getStreamKey(stream.stream))) || (stream.config?.selected && stream.stream) ) as AirbyteStreamAndConfigurationWithEnforcedStream[]; const streamStatuses = new Map(); @@ -94,8 +93,7 @@ export const useStreamsStatuses = ( if (!hasPerStreamStatuses) { streamStatus.status = connectionStatus.status; - streamStatus.isRunning = showSyncProgress ? !!syncProgressItem : connectionStatus.isRunning; - streamStatus.isRunning = showSyncProgress ? !!syncProgressItem : connectionStatus.isRunning; + streamStatus.isRunning = !!syncProgressItem; streamStatus.lastSuccessfulSyncAt = connectionStatus.lastSuccessfulSync ? connectionStatus.lastSuccessfulSync * 1000 // unix timestamp in seconds -> milliseconds : undefined; @@ -129,8 +127,7 @@ export const useStreamsStatuses = ( hasBreakingSchemaChange, lateMultiplier, errorMultiplier, - showSyncProgress, - isSyncing: showSyncProgress && !!syncProgressItem ? true : false, + isSyncing: !!syncProgressItem ? true : false, recordsExtracted: syncProgressMap.get(streamKey)?.recordsEmitted, runningJobConfigType: syncProgressItem?.configType, }); diff --git a/airbyte-webapp/src/area/connection/utils/useStreamsSyncProgress.ts b/airbyte-webapp/src/area/connection/utils/useStreamsSyncProgress.ts index 5966f3d5c58..ae68d62aca9 100644 --- a/airbyte-webapp/src/area/connection/utils/useStreamsSyncProgress.ts +++ b/airbyte-webapp/src/area/connection/utils/useStreamsSyncProgress.ts @@ -7,10 +7,9 @@ import { getStreamKey } from "./computeStreamStatus"; export const useStreamsSyncProgress = ( connectionId: string, - isRunning: boolean, - showSyncProgress: boolean + isRunning: boolean ): Map => { - const { data: connectionSyncProgress } = useGetConnectionSyncProgress(connectionId, showSyncProgress && isRunning); + const { data: connectionSyncProgress } = useGetConnectionSyncProgress(connectionId, isRunning); const syncProgressMap = useMemo(() => { if (isRunning !== true) { diff --git a/airbyte-webapp/src/area/connection/utils/useStreamsTableAnalytics.ts b/airbyte-webapp/src/area/connection/utils/useStreamsTableAnalytics.ts new file mode 100644 index 00000000000..cc6d8cd2763 --- /dev/null +++ b/airbyte-webapp/src/area/connection/utils/useStreamsTableAnalytics.ts @@ -0,0 +1,43 @@ +import isEqual from "lodash/isEqual"; +import { useEffect, useRef } from "react"; + +import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; + +import { useGetConnectionSyncProgress } from "core/api"; +import { ConnectionSyncProgressRead } from "core/api/types/AirbyteClient"; +import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; + +export const useTrackSyncProgress = (connectionId: string, trackCountRef: React.MutableRefObject) => { + const { connection } = useConnectionFormService(); + const { isRunning } = useConnectionStatus(connectionId); + const { data: connectionSyncProgress } = useGetConnectionSyncProgress(connectionId, isRunning); + const analyticsService = useAnalyticsService(); + + const prevSyncProgressRef = useRef(null); + + useEffect(() => { + if (!connectionSyncProgress || trackCountRef.current > 1) { + return; + } + + const hasProgressChanged = !isEqual(prevSyncProgressRef.current, connectionSyncProgress); + + if (hasProgressChanged) { + analyticsService.track(Namespace.CONNECTION, Action.SYNC_PROGRESS, { + connector_source_definition: connection.source.sourceName, + connector_source_definition_id: connection.source.sourceDefinitionId, + connector_destination_definition: connection.destination.destinationName, + connector_destination_definition_id: connection.destination.destinationDefinitionId, + job_id: connectionSyncProgress.jobId, + records_emitted: connectionSyncProgress.recordsEmitted, + records_committed: connectionSyncProgress.recordsCommitted, + }); + + trackCountRef.current++; + prevSyncProgressRef.current = connectionSyncProgress; + } + }, [connectionSyncProgress, connection, analyticsService, trackCountRef]); + + return null; +}; diff --git a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts index a2bbc57cfa9..afe1782aba9 100644 --- a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts +++ b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts @@ -11,9 +11,9 @@ import { useEffect, useState } from "react"; import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; import { ConnectionStatusIndicatorStatus } from "components/connection/ConnectionStatusIndicator"; -import { connectionsKeys } from "core/api"; +import { connectionsKeys, useGetConnectionSyncProgress } from "core/api"; import { JobConfigType, StreamStatusJobType, StreamStatusRunState } from "core/api/types/AirbyteClient"; -import { useExperiment } from "hooks/services/Experiment"; +import { useStreamsListContext } from "pages/connections/StreamStatusPage/StreamsListContext"; import { getStreamKey } from "./computeStreamStatus"; import { useHistoricalStreamData } from "./useStreamsHistoricalData"; @@ -24,8 +24,8 @@ interface UIStreamState { streamName: string; streamNamespace?: string; activeJobConfigType?: JobConfigType; - activeJobStartedAt?: number; // date? - dataFreshAsOf?: number; // date? + activeJobStartedAt?: number; + dataFreshAsOf?: number; recordsExtracted?: number; recordsLoaded?: number; bytesLoaded?: number; @@ -35,24 +35,21 @@ interface UIStreamState { export const useUiStreamStates = (connectionId: string): UIStreamState[] => { const connectionStatus = useConnectionStatus(connectionId); + const { filteredStreamsByName } = useStreamsListContext(); const [wasRunning, setWasRunning] = useState(connectionStatus.isRunning); const [isFetchingPostJob, setIsFetchingPostJob] = useState(false); - const isSyncProgressEnabled = useExperiment("connection.syncProgress", false); + const { data: connectionSyncProgress } = useGetConnectionSyncProgress(connectionId, connectionStatus.isRunning); + const currentJobId = connectionSyncProgress?.jobId; const queryClient = useQueryClient(); - const { streamStatuses, enabledStreams } = useStreamsStatuses(connectionId); - const syncProgress = useStreamsSyncProgress(connectionId, connectionStatus.isRunning, isSyncProgressEnabled); + const { streamStatuses } = useStreamsStatuses(connectionId); + const syncProgress = useStreamsSyncProgress(connectionId, connectionStatus.isRunning); const isClearOrResetJob = (configType?: JobConfigType) => configType === JobConfigType.clear || configType === JobConfigType.reset_connection; - const streamsToList = enabledStreams - .map((stream) => { - return { streamName: stream.stream?.name ?? "", streamNamespace: stream.stream?.namespace }; - }) - .sort((a, b) => a.streamName.localeCompare(b.streamName)); - const { historicalStreamsData, isFetching: isLoadingHistoricalData } = useHistoricalStreamData(connectionId); + // if we just finished a job, re-fetch the historical data and set wasRunning to false useEffect(() => { if (wasRunning && !connectionStatus.isRunning) { @@ -72,7 +69,7 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { } }, [wasRunning, connectionStatus.isRunning, queryClient, connectionId, isFetchingPostJob, isLoadingHistoricalData]); - const uiStreamStates = streamsToList.map((streamItem) => { + const uiStreamStates = filteredStreamsByName.map((streamItem) => { // initialize the state as undefined const uiState: UIStreamState = { streamName: streamItem.streamName, @@ -102,7 +99,10 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { // also, for clear jobs, we should not show anything in this column uiState.recordsExtracted = syncProgressItem.recordsEmitted; uiState.recordsLoaded = syncProgressItem.recordsCommitted; - uiState.activeJobStartedAt = streamStatus?.relevantHistory[0]?.transitionedAt; + uiState.activeJobStartedAt = + currentJobId === streamStatus?.relevantHistory[0]?.jobId + ? streamStatus?.relevantHistory[0]?.transitionedAt + : undefined; } else if (historicalItem && !isClearOrResetJob(historicalItem.configType)) { uiState.recordsLoaded = historicalItem.recordsCommitted; uiState.bytesLoaded = historicalItem.bytesCommitted; diff --git a/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.module.scss b/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.module.scss index b8a48151dda..0079412653e 100644 --- a/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.module.scss +++ b/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.module.scss @@ -1,4 +1,10 @@ +@use "scss/variables"; + .container { max-width: 100%; min-width: 0; } + +.removeButton { + padding-top: variables.$spacing-sm; +} diff --git a/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.tsx b/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.tsx index 5755c1a97db..e9577067640 100644 --- a/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.tsx +++ b/airbyte-webapp/src/area/connector/components/ArrayOfObjectsSection/ArrayOfObjectsSection.tsx @@ -54,11 +54,11 @@ export const ArrayOfObjectsSection: React.FC = ({ fo } > {items.map((item, index) => ( - + - remove(index)} /> + remove(index)} /> ))} diff --git a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/index.ts b/airbyte-webapp/src/area/connector/components/SuggestedConnectors/index.ts deleted file mode 100644 index 0fb6a034d5b..00000000000 --- a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./SuggestedConnectors"; diff --git a/airbyte-webapp/src/area/connector/components/index.ts b/airbyte-webapp/src/area/connector/components/index.ts deleted file mode 100644 index 0fb6a034d5b..00000000000 --- a/airbyte-webapp/src/area/connector/components/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./SuggestedConnectors"; diff --git a/airbyte-webapp/src/area/connector/utils/SvgIcon.module.scss b/airbyte-webapp/src/area/connector/utils/SvgIcon.module.scss index 167428df968..b4ef7dfeaab 100644 --- a/airbyte-webapp/src/area/connector/utils/SvgIcon.module.scss +++ b/airbyte-webapp/src/area/connector/utils/SvgIcon.module.scss @@ -5,6 +5,7 @@ height: 100%; width: 100%; object-fit: contain; + display: block; } .background { diff --git a/airbyte-webapp/src/area/connector/utils/useSuggestedSources.ts b/airbyte-webapp/src/area/connector/utils/useSuggestedSources.ts index dfd65e33836..aa6d4a65b56 100644 --- a/airbyte-webapp/src/area/connector/utils/useSuggestedSources.ts +++ b/airbyte-webapp/src/area/connector/utils/useSuggestedSources.ts @@ -4,5 +4,12 @@ import { useExperiment } from "hooks/services/Experiment/ExperimentService"; export const useSuggestedSources = () => { const suggestedSourceConnectors = useExperiment("connector.suggestedSourceConnectors", ""); - return useMemo(() => suggestedSourceConnectors.split(",").map((id) => id.trim()), [suggestedSourceConnectors]); + return useMemo( + () => + suggestedSourceConnectors + .split(",") + .filter(Boolean) + .map((id) => id.trim()), + [suggestedSourceConnectors] + ); }; diff --git a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss index 7234ffe3b8a..be6e8cb6f7e 100644 --- a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss +++ b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.module.scss @@ -4,8 +4,4 @@ padding: 0 variables.$spacing-md variables.$spacing-xl; margin: variables.$spacing-lg auto 0; max-width: variables.$page-width; - - &.cloud { - padding-bottom: variables.$spacing-page-bottom-cloud; - } } diff --git a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx index e46db741090..b4c31cd8ef5 100644 --- a/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx +++ b/airbyte-webapp/src/components/ConnectorBlocks/FormPageContent.tsx @@ -1,20 +1,9 @@ -import classNames from "classnames"; import { PropsWithChildren } from "react"; -import { isCloudApp } from "core/utils/app"; - import styles from "./FormPageContent.module.scss"; const FormPageContent: React.FC> = ({ children }) => { - return ( -

- {children} -
- ); + return
{children}
; }; export default FormPageContent; diff --git a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx index 83d52f00250..e6855566371 100644 --- a/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ConnectionTable.tsx @@ -10,9 +10,9 @@ import { RoutePaths } from "pages/routePaths"; import { ConnectionStatusCell } from "./components/ConnectionStatusCell"; import { ConnectorNameCell } from "./components/ConnectorNameCell"; +import { EntityWarningsCell } from "./components/EntityWarningsCell"; import { FrequencyCell } from "./components/FrequencyCell"; import { LastSyncCell } from "./components/LastSyncCell"; -import { SchemaChangeCell } from "./components/SchemaChangeCell"; import { StateSwitchCell } from "./components/StateSwitchCell"; import { StreamsStatusCell } from "./components/StreamStatusCell"; import styles from "./ConnectionTable.module.scss"; @@ -163,17 +163,12 @@ const ConnectionTable: React.FC = ({ data, entity, variant ), enableSorting: false, }), - columnHelper.accessor("schemaChange", { + columnHelper.accessor("connection", { header: "", meta: { thClassName: styles.thConnectionSettings, }, - cell: (props) => ( - - ), + cell: (props) => , enableSorting: false, }), ], diff --git a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx b/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx index 8dc608b3784..6f7d1977bc3 100644 --- a/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx +++ b/airbyte-webapp/src/components/EntityTable/ImplementationTable.tsx @@ -2,8 +2,14 @@ import { createColumnHelper } from "@tanstack/react-table"; import React from "react"; import { FormattedMessage } from "react-intl"; +import { Icon } from "components/ui/Icon"; import { Link } from "components/ui/Link"; import { Table } from "components/ui/Table"; +import { Tooltip } from "components/ui/Tooltip"; + +import { getHumanReadableUpgradeDeadline, shouldDisplayBreakingChangeBanner } from "core/domain/connector"; +import { FeatureItem, useFeature } from "core/services/features"; +import { getBreakingChangeErrorMessage } from "pages/connections/StreamStatusPage/ConnectionStatusMessages"; import AllConnectionsStatusCell from "./components/AllConnectionsStatusCell"; import ConnectEntitiesCell from "./components/ConnectEntitiesCell"; @@ -20,6 +26,7 @@ interface ImplementationTableProps { const ImplementationTable: React.FC = ({ data, entity }) => { const columnHelper = createColumnHelper(); + const connectorBreakingChangeDeadlinesEnabled = useFeature(FeatureItem.ConnectorBreakingChangeDeadlines); const columns = React.useMemo( () => [ @@ -95,8 +102,49 @@ const ImplementationTable: React.FC = ({ data, entity ), enableSorting: false, }), + columnHelper.accessor("breakingChanges", { + header: () => null, + id: "breakingChanges", + meta: { + noPadding: true, + }, + cell: (props) => { + if (props.row.original.supportState != null && shouldDisplayBreakingChangeBanner(props.row.original)) { + const { errorMessageId, errorType } = getBreakingChangeErrorMessage( + props.row.original as Parameters[0], + connectorBreakingChangeDeadlinesEnabled + ); + return ( + + + + } + > + + + ); + } + return null; + }, + enableSorting: false, + }), ], - [columnHelper, entity] + [columnHelper, entity, connectorBreakingChangeDeadlinesEnabled] ); return ( diff --git a/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.module.scss b/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.module.scss new file mode 100644 index 00000000000..3334446c959 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.module.scss @@ -0,0 +1,7 @@ +@use "scss/colors"; +@use "scss/variables"; + +.tooltipContainer { + display: flex !important; + align-items: center !important; +} diff --git a/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.tsx b/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.tsx new file mode 100644 index 00000000000..76bafc84831 --- /dev/null +++ b/airbyte-webapp/src/components/EntityTable/components/EntityWarningsCell.tsx @@ -0,0 +1,163 @@ +import { PropsOf } from "@headlessui/react/dist/types"; +import React, { ReactNode } from "react"; +import { FormattedMessage } from "react-intl"; + +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Link } from "components/ui/Link"; +import { MessageType } from "components/ui/Message"; +import { NumberBadge } from "components/ui/NumberBadge"; +import { Tooltip } from "components/ui/Tooltip"; + +import { useCurrentWorkspaceLink } from "area/workspace/utils"; +import { SchemaChange, WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; +import { getHumanReadableUpgradeDeadline, shouldDisplayBreakingChangeBanner } from "core/domain/connector"; +import { FeatureItem, useFeature } from "core/services/features"; +import { convertSnakeToCamel } from "core/utils/strings"; +import { getBreakingChangeErrorMessage } from "pages/connections/StreamStatusPage/ConnectionStatusMessages"; +import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; + +import styles from "./EntityWarningsCell.module.scss"; + +interface EntityWarningCellProps { + connection: WebBackendConnectionListItem; +} + +const schemaChangeToMessageType: Record = { + breaking: "error", + non_breaking: "warning", + no_change: "success", +}; + +const typetoIcon: Record = { + warning: , + success: null, + error: , + info: null, +}; + +export const EntityWarningsCell: React.FC = ({ connection }) => { + const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); + const connectorBreakingChangeDeadlinesEnabled = useFeature(FeatureItem.ConnectorBreakingChangeDeadlines); + const createLink = useCurrentWorkspaceLink(); + + const { + connectionId, + schemaChange, + sourceActorDefinitionVersion: source, + destinationActorDefinitionVersion: destination, + } = connection; + + const warningsToShow: Array<[MessageType, ReactNode, PropsOf & Record<"data-testid", string>]> = []; + + if (allowAutoDetectSchema && schemaChange === SchemaChange.breaking) { + warningsToShow.push([ + schemaChangeToMessageType[schemaChange], + , + { + to: `${connectionId}/${ConnectionRoutePaths.Replication}`, + "data-testid": `link-replication-${connectionId}`, + }, + ]); + } + + if (shouldDisplayBreakingChangeBanner(source)) { + const { errorMessageId, errorType } = getBreakingChangeErrorMessage( + source, + connectorBreakingChangeDeadlinesEnabled + ); + + warningsToShow.push([ + errorType, + , + { + to: createLink(`/${RoutePaths.Source}/${connection.source.sourceId}`), + "data-testid": `link-source-${connectionId}`, + }, + ]); + } + + if (shouldDisplayBreakingChangeBanner(destination)) { + const { errorMessageId, errorType } = getBreakingChangeErrorMessage( + destination, + connectorBreakingChangeDeadlinesEnabled + ); + warningsToShow.push([ + errorType, + , + { + to: createLink(`/${RoutePaths.Destination}/${connection.destination.destinationId}`), + "data-testid": `link-source-${connectionId}`, + }, + ]); + } + + if (warningsToShow.length === 0) { + return null; + } + + if (warningsToShow.length === 1) { + const [messageType, messageNode, linkProps] = warningsToShow[0]; + return ( + {typetoIcon[messageType]}} + > + {messageNode} + + ); + } + + warningsToShow.sort(([a], [b]) => { + if (a === "error" && b !== "error") { + return -1; + } + if (b === "error" && a !== "error") { + return 1; + } + return 0; + }); + + const highestMessageType = warningsToShow[0][0]; + + return ( + + } + > + + {warningsToShow.map(([messageType, messageNode], idx) => ( + + {typetoIcon[messageType]} + {messageNode} + + ))} + + + ); +}; diff --git a/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx b/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx deleted file mode 100644 index fd2f9b41d26..00000000000 --- a/airbyte-webapp/src/components/EntityTable/components/SchemaChangeCell.tsx +++ /dev/null @@ -1,28 +0,0 @@ -import React from "react"; - -import { Link } from "components/ui/Link"; - -import { ConnectionId, SchemaChange } from "core/api/types/AirbyteClient"; -import { FeatureItem, useFeature } from "core/services/features"; -import { ConnectionRoutePaths } from "pages/routePaths"; - -import { ChangesStatusIcon } from "./ChangesStatusIcon"; - -interface SchemaChangeCellProps { - connectionId: ConnectionId; - schemaChange: SchemaChange; -} - -export const SchemaChangeCell: React.FC = ({ connectionId, schemaChange }) => { - const allowAutoDetectSchema = useFeature(FeatureItem.AllowAutoDetectSchema); - - if (!allowAutoDetectSchema || schemaChange !== SchemaChange.breaking) { - return null; - } - - return ( - - - - ); -}; diff --git a/airbyte-webapp/src/components/EntityTable/components/StreamStatusCell.tsx b/airbyte-webapp/src/components/EntityTable/components/StreamStatusCell.tsx index ec1b75fea56..befdd1bfecf 100644 --- a/airbyte-webapp/src/components/EntityTable/components/StreamStatusCell.tsx +++ b/airbyte-webapp/src/components/EntityTable/components/StreamStatusCell.tsx @@ -9,12 +9,11 @@ import { ConnectionStatusIndicatorStatus, } from "components/connection/ConnectionStatusIndicator"; import { StreamWithStatus, sortStreamsByStatus } from "components/connection/StreamStatus/streamStatusUtils"; -import { StreamStatusIndicator, StreamStatusLoadingSpinner } from "components/connection/StreamStatusIndicator"; +import { StreamStatusIndicator } from "components/connection/StreamStatusIndicator"; import { LoadingSpinner } from "components/ui/LoadingSpinner"; import { Tooltip } from "components/ui/Tooltip"; import { AirbyteStreamAndConfigurationWithEnforcedStream, useStreamsStatuses } from "area/connection/utils"; -import { useExperiment } from "hooks/services/Experiment"; import styles from "./StreamStatusCell.module.scss"; import { ConnectionTableDataItem } from "../types"; @@ -59,16 +58,11 @@ const StreamsBar: React.FC<{ const SyncingStreams: React.FC<{ streams: StreamWithStatus[] }> = ({ streams }) => { const syncingStreamsCount = streams.filter((stream) => stream.isRunning).length; - const showSyncProgress = useExperiment("connection.syncProgress", false); if (syncingStreamsCount) { return (
- {!showSyncProgress ? ( - - ) : ( - - )} + {syncingStreamsCount} running
); diff --git a/airbyte-webapp/src/components/EntityTable/types.ts b/airbyte-webapp/src/components/EntityTable/types.ts index e73d6b3ecd6..68e834454dd 100644 --- a/airbyte-webapp/src/components/EntityTable/types.ts +++ b/airbyte-webapp/src/components/EntityTable/types.ts @@ -1,7 +1,10 @@ import { + ActorDefinitionVersionBreakingChanges, + ActorDefinitionVersionRead, ConnectionScheduleData, ConnectionScheduleType, SchemaChange, + SupportState, WebBackendConnectionListItem, } from "../../core/api/types/AirbyteClient"; @@ -19,6 +22,9 @@ interface EntityTableDataItem { lastSync?: number | null; connectorIcon?: string; isActive: boolean; + breakingChanges?: ActorDefinitionVersionBreakingChanges; + isVersionOverrideApplied: boolean; + supportState?: SupportState; } interface ConnectionTableDataItem { @@ -33,6 +39,8 @@ interface ConnectionTableDataItem { scheduleData?: ConnectionScheduleData; scheduleType?: ConnectionScheduleType; schemaChange: SchemaChange; + source: ActorDefinitionVersionRead; + destination: ActorDefinitionVersionRead; lastSyncStatus: string | null; connectorIcon?: string; entityIcon?: string; diff --git a/airbyte-webapp/src/components/EntityTable/utils.tsx b/airbyte-webapp/src/components/EntityTable/utils.tsx index 54e9c8694ba..4f23484566b 100644 --- a/airbyte-webapp/src/components/EntityTable/utils.tsx +++ b/airbyte-webapp/src/components/EntityTable/utils.tsx @@ -47,6 +47,9 @@ export function getEntityTableData< lastSync: null, connectEntities: [], isActive: entityItem.status === ActorStatus.active, + breakingChanges: entityItem.breakingChanges, + isVersionOverrideApplied: entityItem.isVersionOverrideApplied ?? false, + supportState: entityItem.supportState, }; } @@ -72,6 +75,9 @@ export function getEntityTableData< connectEntities, connectorIcon: entityItem.icon, isActive: entityItem.status === ActorStatus.active, + breakingChanges: entityItem.breakingChanges, + isVersionOverrideApplied: entityItem.isVersionOverrideApplied ?? false, + supportState: entityItem.supportState, }; }); @@ -92,6 +98,8 @@ export const getConnectionTableData = ( lastSync: connection.latestSyncJobCreatedAt, enabled: connection.status === ConnectionStatus.active, schemaChange: connection.schemaChange, + source: connection.sourceActorDefinitionVersion, + destination: connection.destinationActorDefinitionVersion, scheduleData: connection.scheduleData, scheduleType: connection.scheduleType, status: connection.status, diff --git a/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss b/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss index 1fc97d5e36c..372b21741b6 100644 --- a/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss +++ b/airbyte-webapp/src/components/InitialBadge/InitialBadge.module.scss @@ -49,4 +49,5 @@ height: 24px; border-radius: variables.$border-radius-xs; aspect-ratio: 1 / 1; + flex-shrink: 0; } diff --git a/airbyte-webapp/src/components/common/ConnectionActionsBlock/ConnectionActionsBlock.tsx b/airbyte-webapp/src/components/common/ConnectionActionsBlock/ConnectionActionsBlock.tsx index 82cf941bf60..5ae81e4c169 100644 --- a/airbyte-webapp/src/components/common/ConnectionActionsBlock/ConnectionActionsBlock.tsx +++ b/airbyte-webapp/src/components/common/ConnectionActionsBlock/ConnectionActionsBlock.tsx @@ -47,7 +47,12 @@ export const ConnectionActionsBlock: React.FC = () => { }); }, [clearStreams, registerNotification, formatMessage]); - const onDeleteButtonClick = useDeleteModal("connection", onDelete, undefined, connection?.name); + const onDeleteButtonClick = useDeleteModal( + "connection", + onDelete, + undefined, + formatMessage({ id: "tables.connectionDeleteConfirmationText" }) + ); const connectionStatus = useConnectionStatus(connection.connectionId ?? ""); const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); diff --git a/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx index 5413997f105..01c259e8e8f 100644 --- a/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx +++ b/airbyte-webapp/src/components/common/ConnectionDeleteBlock/ConnectionDeleteBlock.tsx @@ -1,5 +1,5 @@ import React from "react"; -import { FormattedMessage } from "react-intl"; +import { FormattedMessage, useIntl } from "react-intl"; import { Button } from "components/ui/Button"; import { Card } from "components/ui/Card"; @@ -18,8 +18,14 @@ export const ConnectionDeleteBlock: React.FC = () => { const { connection } = useConnectionEditService(); const { mutateAsync: deleteConnection } = useDeleteConnection(); const onDelete = () => deleteConnection(connection); + const { formatMessage } = useIntl(); - const onDeleteButtonClick = useDeleteModal("connection", onDelete, undefined, connection.name); + const onDeleteButtonClick = useDeleteModal( + "connection", + onDelete, + undefined, + formatMessage({ id: "tables.connectionDeleteConfirmationText" }) + ); return ( diff --git a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx b/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx index 1950b719519..42c82fa149e 100644 --- a/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx +++ b/airbyte-webapp/src/components/common/ConnectorIcon/ConnectorIcon.tsx @@ -1,6 +1,8 @@ import classNames from "classnames"; import React from "react"; +import { FlexContainer } from "components/ui/Flex"; + import { SvgIcon } from "area/connector/utils"; import styles from "./ConnectorIcon.module.scss"; @@ -11,7 +13,7 @@ interface ConnectorIconProps { } export const ConnectorIcon: React.FC = ({ className, icon }) => ( - + ); diff --git a/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.module.scss b/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.module.scss deleted file mode 100644 index 55e4d773511..00000000000 --- a/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.module.scss +++ /dev/null @@ -1,28 +0,0 @@ -@use "scss/colors"; -@use "scss/variables"; - -.content { - padding: 74px 0 111px; - text-align: center; - font-size: 20px; - line-height: 27px; - color: colors.$dark-blue-900; -} - -.imgBlock { - height: 80px; - width: 80px; - border-radius: 50%; - background: colors.$grey-100; - margin: 0 auto 10px; - text-align: center; - padding: 20px 0; -} - -.description { - font-weight: normal; - font-size: variables.$font-size-lg; - line-height: 1.3; - color: colors.$grey-500; - margin-top: 5px; -} diff --git a/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.tsx b/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.tsx deleted file mode 100644 index 62f0be6e0ed..00000000000 --- a/airbyte-webapp/src/components/common/EmptyResourceBlock/EmptyResourceBlock.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import React from "react"; - -import cactus from "./cactus.png"; -import styles from "./EmptyResourceBlock.module.scss"; - -interface EmptyResourceBlockProps { - text: React.ReactNode; - description?: React.ReactNode; -} - -export const EmptyResourceBlock: React.FC = ({ text, description }) => ( -
-
- -
- {text} -
{description}
-
-); diff --git a/airbyte-webapp/src/components/common/EmptyResourceBlock/cactus.png b/airbyte-webapp/src/components/common/EmptyResourceBlock/cactus.png deleted file mode 100644 index ca452bd25e3..00000000000 Binary files a/airbyte-webapp/src/components/common/EmptyResourceBlock/cactus.png and /dev/null differ diff --git a/airbyte-webapp/src/components/common/EmptyResourceBlock/index.ts b/airbyte-webapp/src/components/common/EmptyResourceBlock/index.ts deleted file mode 100644 index 42693319a3d..00000000000 --- a/airbyte-webapp/src/components/common/EmptyResourceBlock/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./EmptyResourceBlock"; diff --git a/airbyte-webapp/src/components/common/EmptyState/EmptyState.module.scss b/airbyte-webapp/src/components/common/EmptyState/EmptyState.module.scss new file mode 100644 index 00000000000..e7b286f883c --- /dev/null +++ b/airbyte-webapp/src/components/common/EmptyState/EmptyState.module.scss @@ -0,0 +1,11 @@ +@use "scss/colors"; +@use "scss/variables"; + +$circleSize: 44px; + +.circle { + width: $circleSize; + height: $circleSize; + border-radius: 50%; + background-color: colors.$blue-40; +} diff --git a/airbyte-webapp/src/components/common/EmptyState/EmptyState.tsx b/airbyte-webapp/src/components/common/EmptyState/EmptyState.tsx new file mode 100644 index 00000000000..12582a8de5b --- /dev/null +++ b/airbyte-webapp/src/components/common/EmptyState/EmptyState.tsx @@ -0,0 +1,29 @@ +import React from "react"; + +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Text } from "components/ui/Text"; + +import styles from "./EmptyState.module.scss"; + +interface EmptyStateProps { + icon?: "cactus" | "chart"; + text: React.ReactNode; + description?: React.ReactNode; + button?: React.ReactNode; +} + +export const EmptyState: React.FC = ({ icon = "cactus", text, description, button }) => ( + + + + + + + {text} + + {description && {description}} + {button && button} + + +); diff --git a/airbyte-webapp/src/components/common/EmptyState/index.ts b/airbyte-webapp/src/components/common/EmptyState/index.ts new file mode 100644 index 00000000000..aac2d4d6760 --- /dev/null +++ b/airbyte-webapp/src/components/common/EmptyState/index.ts @@ -0,0 +1 @@ +export { EmptyState } from "./EmptyState"; diff --git a/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.module.scss b/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.module.scss index 92ca6936442..6790ac7f12f 100644 --- a/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.module.scss +++ b/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.module.scss @@ -34,12 +34,9 @@ $spacing: variables.$spacing-xl; } .content { + height: 95%; padding: 0 $spacing $spacing; - &.cloud { - padding-bottom: variables.$spacing-page-bottom-cloud; - } - &.noBottomPadding { padding-bottom: 0; } diff --git a/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.tsx b/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.tsx index a03d1adab37..c1528f5ef7f 100644 --- a/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.tsx +++ b/airbyte-webapp/src/components/common/MainPageWithScroll/MainPageWithScroll.tsx @@ -1,8 +1,6 @@ import classNames from "classnames"; import React from "react"; -import { isCloudApp } from "core/utils/app"; - import styles from "./MainPageWithScroll.module.scss"; /** @@ -39,7 +37,6 @@ export const MainPageWithScroll: React.FC = ({
{children} diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx deleted file mode 100644 index bcdc1845136..00000000000 --- a/airbyte-webapp/src/components/connection/ConnectionForm/CustomTransformationsFormField.tsx +++ /dev/null @@ -1,72 +0,0 @@ -import React, { useMemo } from "react"; -import { useFieldArray } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; - -import { ArrayOfObjectsEditor } from "components/ArrayOfObjectsEditor"; - -import { useCurrentWorkspace } from "core/api"; -import { OperationCreate, OperatorType } from "core/api/types/AirbyteClient"; -import { isDefined } from "core/utils/common"; -import { useModalService } from "hooks/services/Modal"; -import { CustomTransformationsFormValues } from "pages/connections/ConnectionTransformationPage/CustomTransformationsForm"; - -import { DbtOperationReadOrCreate, TransformationForm } from "../TransformationForm"; - -export const CustomTransformationsFormField: React.FC = () => { - const { workspaceId } = useCurrentWorkspace(); - const { fields, append, remove, update, move } = useFieldArray({ - name: "transformations", - }); - const { openModal } = useModalService(); - - const defaultTransformation: OperationCreate = useMemo( - () => ({ - name: "My dbt transformations", - workspaceId, - operatorConfiguration: { - operatorType: OperatorType.dbt, - dbt: { - gitRepoUrl: "", - dockerImage: "fishtownanalytics/dbt:1.0.0", - dbtArguments: "run", - }, - }, - }), - [workspaceId] - ); - - const openEditModal = (transformationItemIndex?: number) => - openModal({ - size: "xl", - title: , - content: ({ onComplete, onCancel }) => ( - { - isDefined(transformationItemIndex) - ? update(transformationItemIndex, transformation) - : append(transformation); - onComplete(); - }} - onCancel={onCancel} - /> - ), - }); - - return ( - } - addButtonText={} - renderItemName={(item) => item.name} - onAddItem={() => openEditModal()} - onStartEdit={openEditModal} - onRemove={remove} - onMove={move} - /> - ); -}; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/NormalizationFormField.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/NormalizationFormField.tsx deleted file mode 100644 index ddec6cda1db..00000000000 --- a/airbyte-webapp/src/components/connection/ConnectionForm/NormalizationFormField.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import React from "react"; -import { FormattedMessage, useIntl } from "react-intl"; - -import { Box } from "components/ui/Box"; -import { ExternalLink } from "components/ui/Link"; - -import { NormalizationType } from "area/connection/types"; -import { links } from "core/utils/links"; -import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; - -import { LabeledRadioButtonFormControl } from "./LabeledRadioButtonFormControl"; - -/** - * react-hook-form field for normalization operation - */ -export const NormalizationFormField: React.FC = () => { - const { formatMessage } = useIntl(); - const { mode } = useConnectionFormService(); - - return ( - - - {lnk}, - }} - /> - ) - } - /> - - ); -}; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/OperationsSectionCard.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/OperationsSectionCard.tsx deleted file mode 100644 index e4df091865d..00000000000 --- a/airbyte-webapp/src/components/connection/ConnectionForm/OperationsSectionCard.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import React, { useMemo } from "react"; -import { useIntl } from "react-intl"; - -import { Card } from "components/ui/Card"; -import { FlexContainer } from "components/ui/Flex"; -import { Heading } from "components/ui/Heading"; - -import { FeatureItem, useFeature } from "core/services/features"; -import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; - -import { CustomTransformationsFormField } from "./CustomTransformationsFormField"; -import { NormalizationFormField } from "./NormalizationFormField"; - -export const OperationsSectionCard: React.FC = () => { - const { formatMessage } = useIntl(); - const { - destDefinitionVersion: { normalizationConfig, supportsDbt }, - } = useConnectionFormService(); - - const supportsNormalization = normalizationConfig.supported; - const supportsTransformations = useFeature(FeatureItem.AllowCustomDBT) && supportsDbt; - - const titleKey = useMemo(() => { - if (supportsNormalization && supportsTransformations) { - return "connectionForm.normalizationAndTransformation.title"; - } else if (supportsNormalization) { - return "connectionForm.normalization.title"; - } - return "connectionForm.transformation.title"; - }, [supportsNormalization, supportsTransformations]); - - if (!supportsNormalization && !supportsTransformations) { - return null; - } - - return ( - - - {supportsNormalization || supportsTransformations ? ( - - {formatMessage({ id: titleKey })} - - ) : null} - {supportsNormalization && } - {supportsTransformations && } - - - ); -}; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SchemaChangesBackdrop.test.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SchemaChangesBackdrop.test.tsx index ef8a912a5f2..57874b83720 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SchemaChangesBackdrop.test.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SchemaChangesBackdrop.test.tsx @@ -1,7 +1,8 @@ import { render } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import { mockConnection, TestWrapper } from "test-utils/testutils"; +import { mockConnection } from "test-utils/mock-data/mockConnection"; +import { TestWrapper } from "test-utils/testutils"; import { SchemaChange } from "core/api/types/AirbyteClient"; import { FeatureItem } from "core/services/features"; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap b/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap index 949d2f72eed..0ef3346ae42 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap +++ b/airbyte-webapp/src/components/connection/ConnectionForm/__snapshots__/formConfig.test.ts.snap @@ -8,7 +8,6 @@ exports[`#useInitialFormValues should generate initial values w/ mode: create 1` "namespaceDefinition": "source", "namespaceFormat": "\${SOURCE_NAMESPACE}", "nonBreakingChangesPreference": "ignore", - "normalization": "basic", "notifySchemaChanges": true, "prefix": "", "scheduleData": undefined, @@ -1559,7 +1558,6 @@ exports[`#useInitialFormValues should generate initial values w/ mode: create 1` }, ], }, - "transformations": [], } `; @@ -1571,7 +1569,6 @@ exports[`#useInitialFormValues should generate initial values w/ mode: edit 1`] "namespaceDefinition": "source", "namespaceFormat": "\${SOURCE_NAMESPACE}", "nonBreakingChangesPreference": "ignore", - "normalization": "basic", "notifySchemaChanges": true, "prefix": "", "scheduleData": undefined, @@ -3122,7 +3119,6 @@ exports[`#useInitialFormValues should generate initial values w/ mode: edit 1`] }, ], }, - "transformations": [], } `; @@ -3134,7 +3130,6 @@ exports[`#useInitialFormValues should generate initial values w/ mode: readonly "namespaceDefinition": "source", "namespaceFormat": "\${SOURCE_NAMESPACE}", "nonBreakingChangesPreference": "ignore", - "normalization": "basic", "notifySchemaChanges": true, "prefix": "", "scheduleData": undefined, @@ -4685,6 +4680,5 @@ exports[`#useInitialFormValues should generate initial values w/ mode: readonly }, ], }, - "transformations": [], } `; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts index 451eca5eae6..747c046a754 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.test.ts @@ -2,10 +2,7 @@ import { renderHook } from "@testing-library/react"; import cloneDeep from "lodash/cloneDeep"; import { mockConnection } from "test-utils/mock-data/mockConnection"; -import { - mockDestinationDefinitionSpecification, - mockDestinationDefinitionVersion, -} from "test-utils/mock-data/mockDestination"; +import { mockDestinationDefinitionSpecification } from "test-utils/mock-data/mockDestination"; import { mockWorkspace } from "test-utils/mock-data/mockWorkspace"; import { useInitialFormValues } from "./formConfig"; @@ -17,12 +14,7 @@ jest.mock("core/api", () => ({ describe("#useInitialFormValues", () => { it("should generate initial values w/ mode: readonly", () => { const { result } = renderHook(() => - useInitialFormValues( - mockConnection, - mockDestinationDefinitionVersion, - mockDestinationDefinitionSpecification, - "readonly" - ) + useInitialFormValues(mockConnection, mockDestinationDefinitionSpecification, "readonly") ); expect(result.current).toMatchSnapshot(); expect(result.current.name).toBeDefined(); @@ -30,12 +22,7 @@ describe("#useInitialFormValues", () => { it("should generate initial values w/ mode: create", () => { const { result } = renderHook(() => - useInitialFormValues( - mockConnection, - mockDestinationDefinitionVersion, - mockDestinationDefinitionSpecification, - "create" - ) + useInitialFormValues(mockConnection, mockDestinationDefinitionSpecification, "create") ); expect(result.current).toMatchSnapshot(); expect(result.current.name).toBeDefined(); @@ -43,12 +30,7 @@ describe("#useInitialFormValues", () => { it("should generate initial values w/ mode: edit", () => { const { result } = renderHook(() => - useInitialFormValues( - mockConnection, - mockDestinationDefinitionVersion, - mockDestinationDefinitionSpecification, - "edit" - ) + useInitialFormValues(mockConnection, mockDestinationDefinitionSpecification, "edit") ); expect(result.current).toMatchSnapshot(); expect(result.current.name).toBeDefined(); @@ -58,12 +40,7 @@ describe("#useInitialFormValues", () => { const connection = cloneDeep(mockConnection); connection.syncCatalog.streams[0].stream!.supportedSyncModes = ["full_refresh", "incremental"]; const { result } = renderHook(() => - useInitialFormValues( - connection, - mockDestinationDefinitionVersion, - mockDestinationDefinitionSpecification, - "create" - ) + useInitialFormValues(connection, mockDestinationDefinitionSpecification, "create") ); expect(result.current.syncCatalog.streams[0].config?.syncMode).toBe("incremental"); expect(result.current.syncCatalog.streams[0].config?.destinationSyncMode).toBe("append_dedup"); @@ -74,12 +51,7 @@ describe("#useInitialFormValues", () => { connection.syncCatalog.streams[0].stream!.supportedSyncModes = ["full_refresh", "incremental"]; connection.syncCatalog.streams[0].config!.destinationSyncMode = "append"; const { result } = renderHook(() => - useInitialFormValues( - connection, - mockDestinationDefinitionVersion, - mockDestinationDefinitionSpecification, - "readonly" - ) + useInitialFormValues(connection, mockDestinationDefinitionSpecification, "readonly") ); expect(result.current.syncCatalog.streams[0].config?.syncMode).toBe("full_refresh"); expect(result.current.syncCatalog.streams[0].config?.destinationSyncMode).toBe("append"); @@ -91,14 +63,9 @@ describe("#useInitialFormValues", () => { connection.syncCatalog.streams[0].config!.destinationSyncMode = "append"; const { result } = renderHook(() => - useInitialFormValues(connection, mockDestinationDefinitionVersion, mockDestinationDefinitionSpecification, "edit") + useInitialFormValues(connection, mockDestinationDefinitionSpecification, "edit") ); expect(result.current.syncCatalog.streams[0].config?.syncMode).toBe("full_refresh"); expect(result.current.syncCatalog.streams[0].config?.destinationSyncMode).toBe("append"); }); - - // This is a low-priority test - it.todo( - "should test for supportsDbt+initialValues.transformations and supportsNormalization+initialValues.normalization" - ); }); diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx index 59eaa508300..2a575df8c1c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/formConfig.tsx @@ -1,27 +1,21 @@ import { useMemo } from "react"; import { FieldArrayWithId } from "react-hook-form"; -import { NormalizationType } from "area/connection/types"; -import { isDbtTransformation, isNormalizationTransformation } from "area/connection/utils"; import { useCurrentWorkspace } from "core/api"; import { AirbyteCatalog, DestinationSyncMode, - OperationCreate, SyncMode, - ActorDefinitionVersionRead, ConnectionScheduleData, ConnectionScheduleType, Geography, NamespaceDefinitionType, NonBreakingChangesPreference, - OperationRead, SchemaChangeBackfillPreference, DestinationDefinitionSpecificationRead, } from "core/api/types/AirbyteClient"; import { FeatureItem, useFeature } from "core/services/features"; import { ConnectionFormMode, ConnectionOrPartialConnection } from "hooks/services/ConnectionForm/ConnectionFormService"; -import { useExperiment } from "hooks/services/Experiment"; import { analyzeSyncCatalogBreakingChanges } from "./calculateInitialCatalog"; import { pruneUnsupportedModes, replicateSourceModes } from "./preferredSyncModes"; @@ -31,7 +25,6 @@ import { } from "./ScheduleFormField/useBasicFrequencyDropdownData"; import { createConnectionValidationSchema } from "./schema"; import { updateStreamSyncMode } from "../syncCatalog/SyncCatalog/updateStreamSyncMode"; -import { DbtOperationRead } from "../TransformationForm"; /** * react-hook-form form values type for the connection form @@ -45,8 +38,6 @@ export interface FormConnectionFormValues { prefix: string; nonBreakingChangesPreference?: NonBreakingChangesPreference; geography?: Geography; - normalization?: NormalizationType; - transformations?: OperationRead[]; syncCatalog: AirbyteCatalog; notifySchemaChanges?: boolean; backfillPreference?: SchemaChangeBackfillPreference; @@ -81,43 +72,15 @@ export const useConnectionValidationSchema = () => { ); }; -/** - * get transformation operations only - * @param operations - */ -export const getInitialTransformations = (operations: OperationRead[]): DbtOperationRead[] => - operations?.filter(isDbtTransformation) ?? []; - -/** - * get normalization initial normalization type - * @param operations - * @param isEditMode - */ -export const getInitialNormalization = ( - operations: Array, - mode: ConnectionFormMode -): NormalizationType => { - const initialNormalization = - operations?.find(isNormalizationTransformation)?.operatorConfiguration?.normalization?.option; - - return initialNormalization - ? NormalizationType[initialNormalization] - : mode !== "create" - ? NormalizationType.raw - : NormalizationType.basic; -}; - // react-hook-form form values type for the connection form. export const useInitialFormValues = ( connection: ConnectionOrPartialConnection, - destDefinitionVersion: ActorDefinitionVersionRead, destDefinitionSpecification: DestinationDefinitionSpecificationRead, mode: ConnectionFormMode ): FormConnectionFormValues => { const workspace = useCurrentWorkspace(); const { catalogDiff, syncCatalog, schemaChange } = connection; - const useSimpliedCreation = useExperiment("connection.simplifiedCreation", true); - + const { notificationSettings } = useCurrentWorkspace(); const supportedSyncModes: SyncMode[] = useMemo(() => { const foundModes = new Set(); for (let i = 0; i < connection.syncCatalog.streams.length; i++) { @@ -180,18 +143,11 @@ export const useInitialFormValues = ( }, nonBreakingChangesPreference: connection.nonBreakingChangesPreference ?? defaultNonBreakingChangesPreference, geography: connection.geography || workspace.defaultGeography || "auto", - ...{ - ...(destDefinitionVersion.supportsDbt && { - normalization: getInitialNormalization(connection.operations ?? [], mode), - }), - }, - ...{ - ...(destDefinitionVersion.supportsDbt && { - transformations: getInitialTransformations(connection.operations ?? []), - }), - }, syncCatalog: analyzeSyncCatalogBreakingChanges(syncCatalog, catalogDiff, schemaChange), - notifySchemaChanges: connection.notifySchemaChanges ?? useSimpliedCreation, + notifySchemaChanges: + connection.notifySchemaChanges ?? + (notificationSettings?.sendOnConnectionUpdate?.notificationType && + notificationSettings.sendOnConnectionUpdate.notificationType.length > 0), backfillPreference: connection.backfillPreference ?? SchemaChangeBackfillPreference.disabled, }; @@ -208,16 +164,13 @@ export const useInitialFormValues = ( connection.prefix, connection.nonBreakingChangesPreference, connection.geography, - connection.operations, connection.notifySchemaChanges, connection.backfillPreference, defaultNonBreakingChangesPreference, workspace.defaultGeography, - destDefinitionVersion.supportsDbt, - mode, syncCatalog, catalogDiff, schemaChange, - useSimpliedCreation, + notificationSettings?.sendOnConnectionUpdate, ]); }; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts index 184451c37c9..fa1f00d9be9 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts @@ -1,7 +1,6 @@ import * as yup from "yup"; import { SchemaOf } from "yup"; -import { NormalizationType } from "area/connection/types"; import { validateCronExpression, validateCronFrequencyOneHourOrMore } from "area/connection/utils"; import { AirbyteStreamAndConfiguration, @@ -17,8 +16,6 @@ import { SchemaChangeBackfillPreference, } from "core/api/types/AirbyteClient"; -import { dbtOperationReadOrCreateSchema } from "../TransformationForm"; - /** * yup schema for the schedule data */ @@ -203,8 +200,6 @@ export const createConnectionValidationSchema = ( ? yup.mixed().oneOf(Object.values(NonBreakingChangesPreference)).required("form.empty.error") : yup.mixed().notRequired(), geography: yup.mixed().oneOf(Object.values(Geography)).optional(), - normalization: yup.mixed().oneOf(Object.values(NormalizationType)).optional(), - transformations: yup.array().of(dbtOperationReadOrCreateSchema).optional(), syncCatalog: syncCatalogSchema, notifySchemaChanges: yup.boolean().optional(), backfillPreference: yup.mixed().oneOf(Object.values(SchemaChangeBackfillPreference)).optional(), diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/utils.ts b/airbyte-webapp/src/components/connection/ConnectionForm/utils.ts index 872187290a9..77522567f36 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/utils.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/utils.ts @@ -1,7 +1,4 @@ -import { NormalizationType } from "area/connection/types"; -import { AirbyteStreamAndConfiguration, OperationCreate, OperatorType } from "core/api/types/AirbyteClient"; - -import { FormConnectionFormValues } from "./formConfig"; +import { AirbyteStreamAndConfiguration } from "core/api/types/AirbyteClient"; /** * since AirbyteStreamAndConfiguration don't have a unique identifier @@ -15,44 +12,3 @@ export const isSameSyncStream = ( streamName: string | undefined, streamNamespace: string | undefined ) => streamNode.stream?.name === streamName && streamNode.stream?.namespace === streamNamespace; -/** - * map the normalization option to the operation - */ -const mapNormalizationOptionToOperation = ( - workspaceId: string, - normalization?: NormalizationType -): OperationCreate[] => { - // if normalization is not supported OR normalization is supported but the default value is selected need to return empty array - if (!normalization || normalization === NormalizationType.raw) { - return []; - } - - // otherwise return the normalization operation selected value - "basic" - return [ - { - name: "Normalization", - workspaceId, - operatorConfiguration: { - operatorType: OperatorType.normalization, - normalization: { - option: normalization, - }, - }, - }, - ]; -}; - -/** - * we need to combine the normalizations, custom transformations and dbt transformations in one operations array - * this function will take the form values return the operations array - * used in create connection case - */ -export const mapFormValuesToOperations = ( - workspaceId: string, - normalization: FormConnectionFormValues["normalization"], - transformations: FormConnectionFormValues["transformations"] -): OperationCreate[] => { - const normalizationOperation = mapNormalizationOptionToOperation(workspaceId, normalization); - - return [...normalizationOperation, ...(transformations?.length ? transformations : [])]; -}; diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss index 120a6977742..64eccc8a194 100644 --- a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.module.scss @@ -1,3 +1,9 @@ .switch { width: 100px; } + +.scheduleButton { + // Max width on the schedule button to prevent it from becoming too wide in case the + // schedule is a complex cron expression. + max-width: 250px; +} diff --git a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx index a4832c4b189..e5dc3e58b07 100644 --- a/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionHeaderControls/ConnectionHeaderControls.tsx @@ -60,7 +60,7 @@ export const ConnectionHeaderControls: React.FC = () => { + - ); - } - - return ( - - {content} - - - ); -}; diff --git a/airbyte-webapp/src/components/connectorBuilder/Sidebar.tsx b/airbyte-webapp/src/components/connectorBuilder/Sidebar.tsx index 49bf9900586..9d23b5bd35f 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Sidebar.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Sidebar.tsx @@ -2,9 +2,11 @@ import classnames from "classnames"; import React from "react"; import { FormattedMessage } from "react-intl"; +import { AdminWorkspaceWarning } from "components/ui/AdminWorkspaceWarning"; import { FlexContainer } from "components/ui/Flex"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; +import { FeatureItem, IfFeatureEnabled } from "core/services/features"; import { useConnectorBuilderFormState } from "services/connectorBuilder/ConnectorBuilderStateService"; import { NameInput } from "./NameInput"; @@ -35,6 +37,9 @@ export const Sidebar: React.FC> = ({ class return ( + + + { + const [loginError, setLoginError] = useState(false); + const { login } = useAuthService(); + + if (!login) { + throw new Error("Login function not available"); + } + + return ( + + defaultValues={{ username: "", password: "" }} + schema={simpleAuthLoginFormSchema} + onSubmit={login} + onError={(error) => { + // Indicates incorrect credentials + if (error instanceof HttpError && error.status === 401) { + setLoginError(true); + } else { + // Otherwise throw in a setState here so that the error is thrown in a render cycle and handled by our error boundary + setLoginError(() => { + throw error; + }); + } + }} + reValidateMode="onChange" + > + + + + {loginError && ( + + + + + + + + )} + + ); +}; + +const SubmitButton = () => { + const { isValid, isSubmitting } = useFormState(); + + return ( + + + + + + ); +}; diff --git a/airbyte-webapp/src/components/login/SimpleAuthLoginForm/index.ts b/airbyte-webapp/src/components/login/SimpleAuthLoginForm/index.ts new file mode 100644 index 00000000000..b1fe2d7f636 --- /dev/null +++ b/airbyte-webapp/src/components/login/SimpleAuthLoginForm/index.ts @@ -0,0 +1 @@ +export * from "./SimpleAuthLoginForm"; diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.module.scss b/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.module.scss index 9c3d2e86931..5769541ac34 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.module.scss +++ b/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.module.scss @@ -1,11 +1,14 @@ @use "scss/colors"; @use "scss/variables"; +$iconWidth: 38px; + .button { border: 1px solid transparent; display: flex; gap: variables.$spacing-lg; align-items: center; + justify-content: space-between; padding: variables.$spacing-lg; box-shadow: variables.$box-shadow; background-color: colors.$foreground; @@ -21,20 +24,58 @@ } } +.iconAndName { + min-width: $iconWidth; +} + .text { - display: flex; - flex-direction: column; - justify-content: center; - align-items: flex-start; + min-width: 0; + display: -webkit-box; + -webkit-box-orient: vertical; + text-overflow: ellipsis; + overflow: hidden; + + &.twoMaxLines { + // supported on all modern browsers, official support is being worked on + -webkit-line-clamp: 2; + line-clamp: 2; + } + + &.threeMaxLines { + -webkit-line-clamp: 3; + line-clamp: 3; + } } -.supportLevel { - margin-left: auto; - display: flex; - align-items: center; +.metrics { + padding-right: variables.$spacing-xl; + gap: calc(2 * variables.$spacing-lg); } .icon { - width: 38px; - height: 38px; + width: $iconWidth; + height: $iconWidth; +} + +.builderPrompt { + min-width: 0; +} + +.builderPromptIcon { + flex-shrink: 0; +} + +.builderPromptText { + max-width: 100%; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + + &.builderPromptPrimary { + flex-shrink: 0; + } + + &.builderPromptSecondary { + flex-shrink: 1; + } } diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.tsx b/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.tsx index b92c66956fe..1b454b51397 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.tsx +++ b/airbyte-webapp/src/components/source/SelectConnector/ConnectorButton.tsx @@ -1,41 +1,88 @@ +import classNames from "classnames"; +import { FormattedMessage } from "react-intl"; + import { ConnectorIcon } from "components/common/ConnectorIcon"; -import { BuilderPrompt } from "components/connectorBuilder/BuilderPrompt"; -import { SupportLevelBadge } from "components/ui/SupportLevelBadge"; +import { MetricIcon } from "components/connector/ConnectorQualityMetrics"; +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Link } from "components/ui/Link"; import { Text } from "components/ui/Text"; +import { useCurrentWorkspaceLink } from "area/workspace/utils"; import { ConnectorDefinition } from "core/domain/connector"; import { RoutePaths } from "pages/routePaths"; import styles from "./ConnectorButton.module.scss"; interface ConnectorButtonProps { + className?: string; onClick: (definition: T) => void; definition: T; + showMetrics?: boolean; + maxLines: 2 | 3; } -export const ConnectorButton = ({ definition, onClick }: ConnectorButtonProps) => { +export const ConnectorButton = ({ + className, + definition, + onClick, + showMetrics, + maxLines, +}: ConnectorButtonProps) => { return ( - ); }; -export const BuilderConnectorButton: React.FC = () => { +interface BuilderConnectorButtonProps { + className?: string; + layout: "horizontal" | "vertical"; +} +export const BuilderConnectorButton: React.FC = ({ className, layout }) => { + const createLink = useCurrentWorkspaceLink(); + return ( - + + + + + + + + + + + + + ); }; diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.module.scss b/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.module.scss deleted file mode 100644 index 7f2ed325833..00000000000 --- a/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.module.scss +++ /dev/null @@ -1,32 +0,0 @@ -@use "scss/variables"; - -.connectorGrid { - --grid-columns: 3; - - grid-column: 2 / 3; - max-width: variables.$page-width; - display: grid; - grid-template-columns: repeat(var(--grid-columns), 1fr); - grid-auto-rows: 68px; - gap: variables.$spacing-xl; - - &__noMatches { - grid-column: span var(--grid-columns); - display: flex; - flex-direction: column; - gap: variables.$spacing-lg; - padding: variables.$spacing-xl 0 variables.$spacing-2xl; - } - - &__hiddenSearchResults { - width: variables.$width-max-notification; - margin-left: auto; - margin-right: auto; - } -} - -@container (max-width: 800px) { - .connectorGrid { - --grid-columns: 2; - } -} diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.tsx b/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.tsx deleted file mode 100644 index acce7b2abb0..00000000000 --- a/airbyte-webapp/src/components/source/SelectConnector/ConnectorGrid.tsx +++ /dev/null @@ -1,82 +0,0 @@ -import { FormattedMessage } from "react-intl"; - -import { Box } from "components/ui/Box"; -import { Message } from "components/ui/Message"; -import { Text } from "components/ui/Text"; - -import { ConnectorDefinition } from "core/domain/connector"; -import { isSourceDefinition } from "core/domain/connector/source"; - -import { BuilderConnectorButton, ConnectorButton } from "./ConnectorButton"; -import styles from "./ConnectorGrid.module.scss"; -import { RequestNewConnectorButton } from "./RequestNewConnectorButton"; - -interface ConnectorGridProps { - connectorDefinitions: T[]; - onConnectorButtonClick: (definition: T) => void; - onShowAllResultsClick: () => void; - onOpenRequestConnectorModal: () => void; - showConnectorBuilderButton?: boolean; - searchResultsHiddenByFilters: number; -} - -export const ConnectorGrid = ({ - connectorDefinitions, - onConnectorButtonClick, - onShowAllResultsClick, - onOpenRequestConnectorModal, - showConnectorBuilderButton = false, - searchResultsHiddenByFilters, -}: ConnectorGridProps) => { - return ( - <> - {connectorDefinitions.length === 0 && ( -
- - - - {searchResultsHiddenByFilters > 0 && ( - - } - actionBtnText={} - onAction={onShowAllResultsClick} - className={styles.connectorGrid__hiddenSearchResults} - /> - )} -
- )} - -
- {connectorDefinitions.map((definition) => { - const key = isSourceDefinition(definition) - ? definition.sourceDefinitionId - : definition.destinationDefinitionId; - return ; - })} - - {showConnectorBuilderButton && } - -
- {connectorDefinitions.length > 0 && searchResultsHiddenByFilters > 0 && ( - - - } - actionBtnText={} - onAction={onShowAllResultsClick} - className={styles.connectorGrid__noMatches__message} - /> - - )} - - ); -}; diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.module.scss b/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.module.scss new file mode 100644 index 00000000000..1ece9ff7c69 --- /dev/null +++ b/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.module.scss @@ -0,0 +1,58 @@ +@use "scss/colors"; +@use "scss/variables"; +@use "./gridColumns"; + +.connectorGrid { + grid-column: 2 / 3; + display: grid; + grid-template-columns: repeat(var(--grid-columns), minmax(0, 1fr)); + grid-auto-rows: 68px; + gap: variables.$spacing-md; + + @include gridColumns.responsive-grid; + + &__noMatches { + display: flex; + flex-direction: column; + gap: variables.$spacing-lg; + padding: variables.$spacing-xl 0 variables.$spacing-2xl; + } + + &__suggestedConnectors { + grid-column: 2 / 3; + margin-inline: calc(-1 * variables.$spacing-md); + } +} + +.connectorList { + & .connectorListButton { + padding-left: variables.$spacing-lg; + padding-right: variables.$spacing-md; + height: 60px; + } +} + +.countAndSort { + position: sticky; + top: 0; + z-index: 2; + grid-column: 2 / 3; + display: flex; + justify-content: space-between; + align-items: center; + padding-top: variables.$spacing-xl; +} + +.sortHeader { + font-size: variables.$font-size-sm; + color: colors.$grey-400; +} + +.sortButton { + padding: 0; + height: 16px; +} + +.activeSortColumn { + color: colors.$blue; +} diff --git a/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.tsx b/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.tsx new file mode 100644 index 00000000000..9d0e09ede3c --- /dev/null +++ b/airbyte-webapp/src/components/source/SelectConnector/ConnectorList.tsx @@ -0,0 +1,147 @@ +import isString from "lodash/isString"; +import { useMemo } from "react"; + +import { FlexContainer } from "components/ui/Flex"; + +import { ConnectorDefinition } from "core/domain/connector"; +import { isSourceDefinition } from "core/domain/connector/source"; + +import { BuilderConnectorButton, ConnectorButton } from "./ConnectorButton"; +import styles from "./ConnectorList.module.scss"; +import { RequestNewConnectorButton } from "./RequestNewConnectorButton"; +import { ConnectorSorting } from "./SelectConnector"; +import { SuggestedConnectors } from "./SuggestedConnectors"; + +interface ConnectorListProps { + sorting: ConnectorSorting; + displayType?: "grid" | "list"; + connectorDefinitions: T[]; + noSearchResultsContent: React.ReactNode; + suggestedConnectorDefinitionIds?: string[]; + onConnectorButtonClick: (definition: ConnectorDefinition) => void; + onOpenRequestConnectorModal: () => void; + showConnectorBuilderButton?: boolean; +} + +export const ConnectorList = ({ + sorting, + displayType, + connectorDefinitions, + noSearchResultsContent, + suggestedConnectorDefinitionIds, + onConnectorButtonClick, + onOpenRequestConnectorModal, + showConnectorBuilderButton = false, +}: ConnectorListProps) => { + const sortedConnectorDefinitions = useMemo( + () => + connectorDefinitions.sort((a, b) => { + switch (sorting.column) { + case "name": + const localeCompare = a.name.localeCompare(b.name); + return sorting.isAscending ? localeCompare : -localeCompare; + default: + return sortNumericMetric( + getNumericMetric(a, sorting.column), + getNumericMetric(b, sorting.column), + sorting.isAscending + ); + } + }), + [connectorDefinitions, sorting.column, sorting.isAscending] + ); + + return ( + + {suggestedConnectorDefinitionIds && suggestedConnectorDefinitionIds.length > 0 && ( +
+ +
+ )} + + {connectorDefinitions.length === 0 && noSearchResultsContent} + + {displayType === "grid" ? ( +
+ {sortedConnectorDefinitions.map((definition) => { + const key = isSourceDefinition(definition) + ? definition.sourceDefinitionId + : definition.destinationDefinitionId; + return ; + })} + + {showConnectorBuilderButton && } + +
+ ) : ( + + {sortedConnectorDefinitions.map((definition) => { + const key = isSourceDefinition(definition) + ? definition.sourceDefinitionId + : definition.destinationDefinitionId; + return ( + + ); + })} + + {showConnectorBuilderButton && ( + + )} + + + )} +
+ ); +}; + +type NumericMetric = 1 | 2 | 3 | undefined; + +const getNumericMetric = (connectorDefinition: ConnectorDefinition, metric: "successRate" | "usage"): NumericMetric => { + const rawMetricValue = + metric === "successRate" + ? connectorDefinition.metrics?.all?.sync_success_rate + : connectorDefinition.metrics?.all?.usage; + if (!isString(rawMetricValue)) { + return undefined; + } + + const lowercaseMetricValue = rawMetricValue.toLowerCase(); + if (lowercaseMetricValue !== "low" && lowercaseMetricValue !== "medium" && lowercaseMetricValue !== "high") { + return undefined; + } + + switch (lowercaseMetricValue) { + case "low": + return 1; + case "medium": + return 2; + case "high": + return 3; + } +}; + +const sortNumericMetric = (a: NumericMetric, b: NumericMetric, isAscending: boolean) => { + if (a && b) { + if (isAscending) { + return a - b; + } + return b - a; + } + if (a && !b) { + return -1; + } + if (!a && b) { + return 1; + } + return 0; +}; diff --git a/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.module.scss b/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.module.scss deleted file mode 100644 index 29d84965e3c..00000000000 --- a/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.module.scss +++ /dev/null @@ -1,8 +0,0 @@ -.checkboxLabel { - cursor: pointer; - user-select: none; - - &:has(:disabled) { - cursor: not-allowed; - } -} diff --git a/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.tsx b/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.tsx deleted file mode 100644 index 72bf961ffe4..00000000000 --- a/airbyte-webapp/src/components/source/SelectConnector/FilterSupportLevel.tsx +++ /dev/null @@ -1,74 +0,0 @@ -import { useMemo } from "react"; -import { FormattedMessage } from "react-intl"; - -import { CheckBox } from "components/ui/CheckBox"; -import { FlexContainer } from "components/ui/Flex"; -import { SupportLevelBadge } from "components/ui/SupportLevelBadge"; -import { Text } from "components/ui/Text"; - -import { SupportLevel } from "core/api/types/AirbyteClient"; - -import styles from "./FilterSupportLevel.module.scss"; - -interface FilterSupportLevelProps { - selectedSupportLevels: SupportLevel[]; - onUpdateSelectedSupportLevels: (newSupportLevels: SupportLevel[]) => void; - availableSupportLevels: SupportLevel[]; -} - -export const FilterSupportLevel: React.FC = ({ - selectedSupportLevels, - onUpdateSelectedSupportLevels, - availableSupportLevels, -}) => { - const handleChange = (stage: SupportLevel, isSelected: boolean) => { - if (isSelected) { - onUpdateSelectedSupportLevels([...selectedSupportLevels, stage]); - } else { - onUpdateSelectedSupportLevels(selectedSupportLevels.filter((s) => s !== stage)); - } - }; - - // It's possible that there are no custom connectors, so that filter is hidden. But that filter might - // still be technically selected, because we cache the user's selection in local storage. - // In that case we want to know how many of the filters that _are_ visible have been selected. - const numberOfVisiblySelectedSupportLevels = useMemo(() => { - return selectedSupportLevels.filter((stage) => availableSupportLevels.includes(stage)).length; - }, [selectedSupportLevels, availableSupportLevels]); - - return ( - - - - - {availableSupportLevels.flatMap((level, index) => { - const id = `filter-support-level-${level}`; - const isChecked = selectedSupportLevels.includes(level); - return [ - // separator inbetween each filter - ...(index !== 0 - ? [ - - | - , - ] - : []), - // rule doesn't understand SupportLevelBadge renders text - // eslint-disable-next-line jsx-a11y/label-has-associated-control - , - ]; - })} - - ); -}; diff --git a/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.module.scss b/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.module.scss index 3a233077f2d..938008e4946 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.module.scss +++ b/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.module.scss @@ -1,17 +1,15 @@ @use "scss/variables"; @use "scss/colors"; -$iconHeight: 38px; - .button { + padding: variables.$spacing-xl; display: flex; justify-content: center; align-items: center; - gap: variables.$spacing-lg; - color: colors.$grey-500; - border: variables.$border-thin solid colors.$grey-100; + gap: variables.$spacing-sm; + border: variables.$border-thin dashed colors.$grey-400; box-shadow: none; - background-color: colors.$grey-30; + background-color: transparent; border-radius: variables.$border-radius-md; transition: color variables.$transition, @@ -23,7 +21,9 @@ $iconHeight: 38px; &__text { transition: color variables.$transition; - color: colors.$grey-500; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; } &:hover, @@ -37,7 +37,7 @@ $iconHeight: 38px; } } - svg { - min-height: $iconHeight; + & .icon { + flex-shrink: 0; } } diff --git a/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.tsx b/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.tsx index c3ba7f79bb8..76499dbab44 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.tsx +++ b/airbyte-webapp/src/components/source/SelectConnector/RequestNewConnectorButton.tsx @@ -1,3 +1,4 @@ +import classNames from "classnames"; import { FormattedMessage } from "react-intl"; import { Icon } from "components/ui/Icon"; @@ -6,13 +7,14 @@ import { Text } from "components/ui/Text"; import styles from "./RequestNewConnectorButton.module.scss"; interface RequestNewConnectorButtonProps { + className?: string; onClick: () => void; } -export const RequestNewConnectorButton: React.FC = ({ onClick }) => { +export const RequestNewConnectorButton: React.FC = ({ className, onClick }) => { return ( - + ); + })} +
+ ); + return (
-
-
- setSearchTerm(e.target.value)} /> - - - - updateSelectedSupportLevels(supportLevels)} +
+ + setSearchTerm(e.target.value)} + placeholder={formatMessage( + { id: "connector.searchPlaceholder" }, + { tabName: getTabDisplayName(selectedTab) } + )} + /> + + { + setSelectedTab("certified"); + }} + /> + setSelectedTab("marketplace")} + /> + {hasCustomConnectors && ( + setSelectedTab("custom")} + /> + )} + + + + - - - + + + handleSortClick("name")} + isActive={sortingByTab[selectedTab].column === "name"} + isAscending={sortingByTab[selectedTab].isAscending} + iconSize="sm" + > + + + {selectedTab === "marketplace" && ( + <> + handleSortClick("successRate")} + isActive={sortingByTab[selectedTab].column === "successRate"} + isAscending={sortingByTab[selectedTab].isAscending} + iconSize="sm" + > + + + handleSortClick("usage")} + isActive={sortingByTab[selectedTab].column === "usage"} + isAscending={sortingByTab[selectedTab].isAscending} + iconSize="sm" + > + + + + )} - -
- -
- - {suggestedConnectorDefinitionIds.length > 0 && ( -
- -
- )} + + +
- 0 ? allSearchResults.length - filteredSearchResults.length : 0 + { - updateSelectedSupportLevels(SUPPORT_LEVELS); - }} - connectorDefinitions={filteredSearchResults} onConnectorButtonClick={handleConnectorButtonClick} onOpenRequestConnectorModal={onOpenRequestConnectorModal} showConnectorBuilderButton={connectorType === "source"} + noSearchResultsContent={ + + + + + {seeMoreButtons} + + } />
+ + {searchTerm.length > 0 && searchResultsByTab[selectedTab].length > 0 && seeMoreButtons}
); }; diff --git a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.module.scss b/airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.module.scss similarity index 53% rename from airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.module.scss rename to airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.module.scss index cce50d9c4bb..2229d13d7b8 100644 --- a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.module.scss +++ b/airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.module.scss @@ -1,21 +1,24 @@ @use "scss/colors"; @use "scss/variables"; +@use "./gridColumns"; .suggestedConnectors { position: relative; background: colors.$blue-50; - padding: variables.$spacing-xl; + padding: variables.$spacing-md; border-radius: variables.$border-radius-md; + @include gridColumns.responsive-grid; + &__dismiss { position: absolute; - top: variables.$spacing-md; - right: variables.$spacing-md; + top: 0; + right: 0; } &__grid { display: grid; - grid-template-columns: repeat(3, 1fr); - gap: variables.$spacing-xl; + grid-template-columns: repeat(var(--grid-columns), 1fr); + gap: variables.$spacing-md; } } diff --git a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.tsx b/airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.tsx similarity index 84% rename from airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.tsx rename to airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.tsx index d05918d5dbe..35f8df30d77 100644 --- a/airbyte-webapp/src/area/connector/components/SuggestedConnectors/SuggestedConnectors.tsx +++ b/airbyte-webapp/src/components/source/SelectConnector/SuggestedConnectors.tsx @@ -64,32 +64,29 @@ export const SuggestedConnectorsUnmemoized: React.FC = : "destinations.suggestedDestinations"; return ( -
- -
- - - - -
-
+ + + + +
{definitions.map((definition) => ( onConnectorButtonClick(definition)} key={isSourceDefinition(definition) ? definition.sourceDefinitionId : definition.destinationDefinitionId} + maxLines={3} /> ))}
-
+ ); }; diff --git a/airbyte-webapp/src/components/source/SelectConnector/_gridColumns.scss b/airbyte-webapp/src/components/source/SelectConnector/_gridColumns.scss new file mode 100644 index 00000000000..089c5bfe48d --- /dev/null +++ b/airbyte-webapp/src/components/source/SelectConnector/_gridColumns.scss @@ -0,0 +1,11 @@ +@mixin responsive-grid() { + --grid-columns: 3; + + @container (max-width: 800px) { + --grid-columns: 2; + } + + @container (max-width: 500px) { + --grid-columns: 1; + } +} diff --git a/airbyte-webapp/src/components/source/SelectConnector/index.ts b/airbyte-webapp/src/components/source/SelectConnector/index.ts index 0a6fec5e976..52b6f760027 100644 --- a/airbyte-webapp/src/components/source/SelectConnector/index.ts +++ b/airbyte-webapp/src/components/source/SelectConnector/index.ts @@ -1 +1,2 @@ export { SelectConnector } from "./SelectConnector"; +export type { ConnectorTab } from "./SelectConnector"; diff --git a/airbyte-webapp/src/components/ui/Button/Button.module.scss b/airbyte-webapp/src/components/ui/Button/Button.module.scss index 4e3e33e2de2..e91fd0bbdff 100644 --- a/airbyte-webapp/src/components/ui/Button/Button.module.scss +++ b/airbyte-webapp/src/components/ui/Button/Button.module.scss @@ -40,11 +40,18 @@ opacity: 0.5; } + .childrenContainer { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } + .buttonIcon { display: flex; justify-content: center; align-content: center; position: relative; + flex: 0 0 auto; &.positionLeft { &.withLabel { diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss index c82f709d1bd..e45b9d0a1e5 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss @@ -52,6 +52,10 @@ $icon-width: 18px; } } +.labelContainer { + min-width: 0; +} + .label { color: inherit; font-size: inherit; diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx index 60759178682..63966319ca0 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx @@ -71,7 +71,7 @@ export const Collapsible: React.FC> = >
- + {label} {infoTooltipContent && {infoTooltipContent}} diff --git a/airbyte-webapp/src/components/ui/Heading/Heading.module.scss b/airbyte-webapp/src/components/ui/Heading/Heading.module.scss index e12c39ece75..b9c383e3afc 100644 --- a/airbyte-webapp/src/components/ui/Heading/Heading.module.scss +++ b/airbyte-webapp/src/components/ui/Heading/Heading.module.scss @@ -11,6 +11,12 @@ } // sizes +.xs { + font-size: 14px; + font-weight: 600; + line-height: 1.2em; +} + .sm { font-size: 16px; line-height: 1.2em; diff --git a/airbyte-webapp/src/components/ui/Heading/Heading.tsx b/airbyte-webapp/src/components/ui/Heading/Heading.tsx index 66418528802..59ec2e01261 100644 --- a/airbyte-webapp/src/components/ui/Heading/Heading.tsx +++ b/airbyte-webapp/src/components/ui/Heading/Heading.tsx @@ -3,7 +3,7 @@ import React, { HTMLAttributes } from "react"; import styles from "./Heading.module.scss"; -type HeadingSize = "sm" | "md" | "lg" | "xl"; +type HeadingSize = "xs" | "sm" | "md" | "lg" | "xl"; type HeadingColor = "darkBlue" | "blue"; type HeadingElementType = "h1" | "h2" | "h3" | "h4" | "h5" | "h6"; @@ -17,6 +17,7 @@ type HeadingProps = HTMLAttributes & { }; const sizes: Record = { + xs: styles.xs, sm: styles.sm, md: styles.md, lg: styles.lg, diff --git a/airbyte-webapp/src/components/ui/Icon/Icon.tsx b/airbyte-webapp/src/components/ui/Icon/Icon.tsx index a045f44a003..4234ec1bc59 100644 --- a/airbyte-webapp/src/components/ui/Icon/Icon.tsx +++ b/airbyte-webapp/src/components/ui/Icon/Icon.tsx @@ -8,6 +8,7 @@ import ArrowLeftIcon from "./icons/arrowLeftIcon.svg?react"; import ArrowRightIcon from "./icons/arrowRightIcon.svg?react"; import ArticleIcon from "./icons/articleIcon.svg?react"; import BellIcon from "./icons/bellIcon.svg?react"; +import CactusIcon from "./icons/cactusIcon.svg?react"; import CalendarCheckIcon from "./icons/calendarCheckIcon.svg?react"; import CalendarIcon from "./icons/calendarIcon.svg?react"; import CaretDownIcon from "./icons/caretDownIcon.svg?react"; @@ -76,6 +77,14 @@ import LoadingIcon from "./icons/loadingIcon.svg?react"; import LocationIcon from "./icons/locationIcon.svg?react"; import LockIcon from "./icons/lockIcon.svg?react"; import MenuIcon from "./icons/menuIcon.svg?react"; +import MetricSuccessHighIcon from "./icons/metricSuccessHighIcon.svg?react"; +import MetricSuccessLowIcon from "./icons/metricSuccessLowIcon.svg?react"; +import MetricSuccessMedIcon from "./icons/metricSuccessMedIcon.svg?react"; +import MetricSuccessNoneIcon from "./icons/metricSuccessNoneIcon.svg?react"; +import MetricUsageHighIcon from "./icons/metricUsageHighIcon.svg?react"; +import MetricUsageLowIcon from "./icons/metricUsageLowIcon.svg?react"; +import MetricUsageMedIcon from "./icons/metricUsageMedIcon.svg?react"; +import MetricUsageNoneIcon from "./icons/metricUsageNoneIcon.svg?react"; import MinusCircleIcon from "./icons/minusCircleIcon.svg?react"; import MinusIcon from "./icons/minusIcon.svg?react"; import ModificationIcon from "./icons/modificationIcon.svg?react"; @@ -161,6 +170,7 @@ export const Icons: Record>> = arrowRight: ArrowRightIcon, article: ArticleIcon, bell: BellIcon, + cactus: CactusIcon, calendar: CalendarIcon, calendarCheck: CalendarCheckIcon, caretDown: CaretDownIcon, @@ -229,6 +239,14 @@ export const Icons: Record>> = location: LocationIcon, lock: LockIcon, menu: MenuIcon, + metricSuccessHigh: MetricSuccessHighIcon, + metricSuccessLow: MetricSuccessLowIcon, + metricSuccessMed: MetricSuccessMedIcon, + metricSuccessNone: MetricSuccessNoneIcon, + metricUsageHigh: MetricUsageHighIcon, + metricUsageLow: MetricUsageLowIcon, + metricUsageMed: MetricUsageMedIcon, + metricUsageNone: MetricUsageNoneIcon, minus: MinusIcon, minusCircle: MinusCircleIcon, modification: ModificationIcon, diff --git a/airbyte-webapp/src/components/ui/Icon/icons/cactusIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/cactusIcon.svg new file mode 100644 index 00000000000..a386cdabc11 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/cactusIcon.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessHighIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessHighIcon.svg new file mode 100644 index 00000000000..a841cc628b6 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessHighIcon.svg @@ -0,0 +1,22 @@ + + + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessLowIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessLowIcon.svg new file mode 100644 index 00000000000..762897046be --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessLowIcon.svg @@ -0,0 +1,26 @@ + + + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessMedIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessMedIcon.svg new file mode 100644 index 00000000000..e16b5747b10 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessMedIcon.svg @@ -0,0 +1,24 @@ + + + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessNoneIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessNoneIcon.svg new file mode 100644 index 00000000000..6aaf56be5db --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricSuccessNoneIcon.svg @@ -0,0 +1,27 @@ + + + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricUsageHighIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageHighIcon.svg new file mode 100644 index 00000000000..d615e781ba0 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageHighIcon.svg @@ -0,0 +1,14 @@ + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricUsageLowIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageLowIcon.svg new file mode 100644 index 00000000000..8af88f3d387 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageLowIcon.svg @@ -0,0 +1,16 @@ + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricUsageMedIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageMedIcon.svg new file mode 100644 index 00000000000..3211e0390d4 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageMedIcon.svg @@ -0,0 +1,15 @@ + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/icons/metricUsageNoneIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageNoneIcon.svg new file mode 100644 index 00000000000..8c6694f212e --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/metricUsageNoneIcon.svg @@ -0,0 +1,17 @@ + + + + + \ No newline at end of file diff --git a/airbyte-webapp/src/components/ui/Icon/types.ts b/airbyte-webapp/src/components/ui/Icon/types.ts index e1e817bd5a8..5cd4caa36f7 100644 --- a/airbyte-webapp/src/components/ui/Icon/types.ts +++ b/airbyte-webapp/src/components/ui/Icon/types.ts @@ -4,6 +4,7 @@ export type IconType = | "arrowRight" | "article" | "bell" + | "cactus" | "calendar" | "calendarCheck" | "caretDown" @@ -72,6 +73,14 @@ export type IconType = | "location" | "lock" | "menu" + | "metricSuccessHigh" + | "metricSuccessLow" + | "metricSuccessMed" + | "metricSuccessNone" + | "metricUsageHigh" + | "metricUsageLow" + | "metricUsageMed" + | "metricUsageNone" | "minus" | "minusCircle" | "modification" diff --git a/airbyte-webapp/src/components/ui/Markdown/Markdown.tsx b/airbyte-webapp/src/components/ui/Markdown/Markdown.tsx index abad936b187..1f80159aca2 100644 --- a/airbyte-webapp/src/components/ui/Markdown/Markdown.tsx +++ b/airbyte-webapp/src/components/ui/Markdown/Markdown.tsx @@ -1,3 +1,5 @@ +import type { PluggableList } from "unified"; + import classNames from "classnames"; import MarkdownToJsx from "markdown-to-jsx"; import React, { useMemo } from "react"; @@ -13,6 +15,7 @@ interface MarkdownToJsxProps { className?: string; content: string; options?: Options; + remarkPlugins?: PluggableList; } function surroundTagWithNewlines(tag: string, markdown: string): string { @@ -23,7 +26,7 @@ function surroundTagWithNewlines(tag: string, markdown: string): string { return processed; } -function preprocessMarkdown(markdown: string): string { +function preprocessMarkdown(markdown: string, additionalPlugins: PluggableList = []): string { // Note: there is also some preprocessing happening in DocumentationPanel.tsx's // prepareMarkdown function that is specific to the connector documentation pages. @@ -64,7 +67,6 @@ function preprocessMarkdown(markdown: string): string { preprocessed = surroundTagWithNewlines("details", preprocessed); // And likewise for preprocessed = surroundTagWithNewlines("Tabs", preprocessed); - // Add an empty line before any code block that isn't already preceded by one, since // without it the code block is just rendered as plain text. preprocessed = preprocessed.replace( @@ -72,44 +74,44 @@ function preprocessMarkdown(markdown: string): string { "$\n\n$```" ); - // Apply remark plugins to the markdown. - // This should be ran last so that remarkGfm doesn't interfere with the above. - preprocessed = remark().use(remarkGfm).processSync(preprocessed).toString(); + const pluginsToApply = [remarkGfm, ...additionalPlugins]; - return preprocessed; + return remark().use(pluginsToApply).processSync(preprocessed).toString(); } -export const Markdown: React.FC = React.memo(({ className, content, options }) => { - const processedMarkdown = useMemo(() => preprocessMarkdown(content), [content]); - return ( -
- = React.memo( + ({ className, content, options, remarkPlugins = [] }) => { + const processedMarkdown = useMemo(() => preprocessMarkdown(content, remarkPlugins), [content, remarkPlugins]); + return ( +
+ - {processedMarkdown} - -
- ); -}); + }} + > + {processedMarkdown} +
+
+ ); + } +); Markdown.displayName = "Markdown"; diff --git a/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.module.scss b/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.module.scss index 3c16816ee44..8edfe5ef9fd 100644 --- a/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.module.scss +++ b/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.module.scss @@ -13,6 +13,13 @@ justify-content: center; align-items: center; + &.small { + height: 14px; + min-width: 14px; + border-radius: variables.$border-radius-pill; + padding: 0; + } + &.default { background: colors.$dark-blue-100; } @@ -29,6 +36,10 @@ background: colors.$blue; } + &.yellow { + background: colors.$yellow; + } + &.blue--outline { border: variables.$border-thin solid colors.$blue; background: colors.$foreground; diff --git a/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.tsx b/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.tsx index eaed829a137..d3144d0f766 100644 --- a/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.tsx +++ b/airbyte-webapp/src/components/ui/NumberBadge/NumberBadge.tsx @@ -7,10 +7,12 @@ import styles from "./NumberBadge.module.scss"; interface NumberBadgeProps { value: number; - color?: "green" | "red" | "blue" | "default" | "grey"; + color?: "green" | "red" | "blue" | "default" | "grey" | "yellow"; outline?: boolean; className?: string; "aria-label"?: string; + small?: boolean; + inverse?: boolean; } export const NumberBadge: React.FC = ({ @@ -19,22 +21,29 @@ export const NumberBadge: React.FC = ({ className, outline = false, "aria-label": ariaLabel, + small = false, + inverse = false, }) => { const numberBadgeClassnames = classnames(styles.circle, className, { + [styles.small]: small, + [styles.inverse]: inverse, [styles.default]: !color || color === "default", [styles["grey--outline"]]: outline === true && color === "grey", [styles["blue--outline"]]: outline === true && color === "blue", [styles.green]: color === "green", [styles.red]: color === "red", [styles.blue]: color === "blue", + [styles.yellow]: color === "yellow", }); + const inverseColor = inverse || (color === "blue" && !outline); + return (
{value} diff --git a/airbyte-webapp/src/components/ui/RemoveButton/RemoveButton.tsx b/airbyte-webapp/src/components/ui/RemoveButton/RemoveButton.tsx index 976b123b630..eed4080115c 100644 --- a/airbyte-webapp/src/components/ui/RemoveButton/RemoveButton.tsx +++ b/airbyte-webapp/src/components/ui/RemoveButton/RemoveButton.tsx @@ -1,9 +1,11 @@ +import classNames from "classnames"; + import styles from "./RemoveButton.module.scss"; import { Icon } from "../Icon"; -export const RemoveButton = ({ onClick }: { onClick: () => void }) => { +export const RemoveButton = ({ onClick, className }: { onClick: () => void; className?: string }) => { return ( - ); diff --git a/airbyte-webapp/src/components/ui/SupportLevelBadge/SupportLevelBadge.tsx b/airbyte-webapp/src/components/ui/SupportLevelBadge/SupportLevelBadge.tsx index f988d898b3f..450d7de7ade 100644 --- a/airbyte-webapp/src/components/ui/SupportLevelBadge/SupportLevelBadge.tsx +++ b/airbyte-webapp/src/components/ui/SupportLevelBadge/SupportLevelBadge.tsx @@ -9,19 +9,27 @@ interface SupportLevelBadgeProps { supportLevel?: SupportLevel; custom?: boolean; tooltip?: boolean; + className?: string; + hideCertified?: boolean; } export const SupportLevelBadge: React.FC = ({ supportLevel, + className, custom = false, tooltip = true, + hideCertified = true, }) => { - if (!supportLevel || (!custom && supportLevel === SupportLevel.none)) { + if ( + !supportLevel || + (!custom && supportLevel === SupportLevel.none) || + (hideCertified && supportLevel === SupportLevel.certified) + ) { return null; } const badgeComponent = ( - + void; isActive: boolean; isAscending: boolean; + className?: string; + activeClassName?: string; + iconSize?: IconSize; } export const SortableTableHeader: React.FC> = ({ @@ -15,9 +19,22 @@ export const SortableTableHeader: React.FC ( - ); diff --git a/airbyte-webapp/src/components/ui/Table/Table.module.scss b/airbyte-webapp/src/components/ui/Table/Table.module.scss index 30b6dcbc994..348db29ae88 100644 --- a/airbyte-webapp/src/components/ui/Table/Table.module.scss +++ b/airbyte-webapp/src/components/ui/Table/Table.module.scss @@ -14,6 +14,10 @@ $border-radius: variables.$border-radius-lg; &--default { box-shadow: variables.$box-shadow; } + + &--empty { + height: 100%; + } } .thead--sticky { @@ -85,6 +89,12 @@ $border-radius: variables.$border-radius-lg; &:hover { background-color: colors.$grey-50; } + + &.emptyPlaceholder { + &:hover { + background-color: colors.$foreground; + } + } } // --------- --------- diff --git a/airbyte-webapp/src/components/ui/Table/Table.stories.tsx b/airbyte-webapp/src/components/ui/Table/Table.stories.tsx index 422c0466774..fba29d8a1c9 100644 --- a/airbyte-webapp/src/components/ui/Table/Table.stories.tsx +++ b/airbyte-webapp/src/components/ui/Table/Table.stories.tsx @@ -1,4 +1,4 @@ -import { ComponentMeta, Story } from "@storybook/react"; +import { Story, StoryObj } from "@storybook/react"; import { createColumnHelper } from "@tanstack/react-table"; import { Table, TableProps } from "./Table"; @@ -12,11 +12,15 @@ export default { title: "UI/Table", component: Table, argTypes: {}, -} as ComponentMeta; +} as StoryObj; const Template = (): Story> => - (args) => {...args} />; + (args) => ( +
+ {...args} /> +
+ ); const data: Item[] = [ { name: "2017", value: 100 }, @@ -44,3 +48,31 @@ Primary.args = { data, columns, }; + +export const PrimaryEmpty = Template().bind({}); +PrimaryEmpty.args = { + data: [], + columns, +}; + +export const Virtualized = Template().bind({}); +Virtualized.args = { + data, + columns, + virtualized: true, +}; + +export const VirtualizedEmpty = Template().bind({}); +VirtualizedEmpty.args = { + data: [], + columns, + virtualized: true, +}; + +export const VirtualizedCustomEmptyPlaceholder = Template().bind({}); +VirtualizedCustomEmptyPlaceholder.args = { + data: [], + columns, + virtualized: true, + customEmptyPlaceholder:
Custom empty placeholder
, +}; diff --git a/airbyte-webapp/src/components/ui/Table/Table.tsx b/airbyte-webapp/src/components/ui/Table/Table.tsx index 42441be2260..4a221e4e2e6 100644 --- a/airbyte-webapp/src/components/ui/Table/Table.tsx +++ b/airbyte-webapp/src/components/ui/Table/Table.tsx @@ -9,8 +9,11 @@ import { } from "@tanstack/react-table"; import classNames from "classnames"; import React, { PropsWithChildren } from "react"; +import { FormattedMessage } from "react-intl"; import { TableVirtuoso, TableComponents, ItemProps } from "react-virtuoso"; +import { Text } from "components/ui/Text"; + import { SortableTableHeader } from "./SortableTableHeader"; import styles from "./Table.module.scss"; import { ColumnMeta } from "./types"; @@ -44,6 +47,10 @@ export interface TableProps { React.ComponentProps, "data" | "components" | "totalCount" | "fixedHeaderContent" >; + /** + * Custom placeholder to be shown when the table is empty. Defaults to a simple "No data" message. + */ + customEmptyPlaceholder?: React.ReactElement; } export const Table = ({ @@ -62,6 +69,7 @@ export const Table = ({ initialSortBy, virtualized = false, virtualizedProps, + customEmptyPlaceholder, }: PropsWithChildren>) => { const table = useReactTable({ columns, @@ -83,6 +91,7 @@ export const Table = ({ ({ )); + const EmptyPlaceholder: TableComponents["EmptyPlaceholder"] = () => ( + + + + + + ); + return virtualized ? ( // the parent container should have exact height to make "AutoSizer" work properly @@ -202,6 +223,7 @@ export const Table = ({ Table, TableHead, TableRow: TableRowVirtualized, + EmptyPlaceholder, }} fixedHeaderContent={headerContent} /> @@ -213,11 +235,15 @@ export const Table = ({ data-testid={testId} > {headerContent()} - - {rows.map((row) => ( - - ))} - + {rows.length === 0 ? ( + + ) : ( + + {rows.map((row) => ( + + ))} + + )}
+ + {customEmptyPlaceholder ? customEmptyPlaceholder : } + +
); }; diff --git a/airbyte-webapp/src/components/ui/Tabs/Tabs.tsx b/airbyte-webapp/src/components/ui/Tabs/Tabs.tsx index 5a76a5b82da..39e06756b1e 100644 --- a/airbyte-webapp/src/components/ui/Tabs/Tabs.tsx +++ b/airbyte-webapp/src/components/ui/Tabs/Tabs.tsx @@ -2,9 +2,13 @@ import React, { PropsWithChildren } from "react"; import { FlexContainer } from "../Flex"; -export const Tabs: React.FC> = ({ children }) => { +interface TabsProps { + className?: string; +} + +export const Tabs: React.FC> = ({ children, className }) => { return ( - + {children} ); diff --git a/airbyte-webapp/src/core/api/hooks/auth.ts b/airbyte-webapp/src/core/api/hooks/auth.ts new file mode 100644 index 00000000000..603a40fbe6a --- /dev/null +++ b/airbyte-webapp/src/core/api/hooks/auth.ts @@ -0,0 +1,40 @@ +import { useMutation } from "@tanstack/react-query"; + +import { apiCall } from "../apis"; + +interface LoginRequestBody { + username: string; + password: string; +} + +interface LoginResponseBody { + username: string; + roles: string[]; + access_token: string; + token_type: "Bearer"; + expires_in: number; +} + +// Defined in code here because this endpoint is not currently part of the open api spec +export const login = (loginRequestBody: LoginRequestBody, options: Parameters[1]) => { + return apiCall( + { + url: `/login`, + method: "post", + headers: { "Content-Type": "application/json" }, + data: loginRequestBody, + }, + options + ); +}; + +export const simpleAuthLogin = async (email: string, password: string): Promise => { + return login({ username: email, password }, { getAccessToken: () => Promise.resolve(null) }); +}; + +export const useSimpleAuthLogin = () => { + return useMutation( + async (loginRequestBody: LoginRequestBody) => + await simpleAuthLogin(loginRequestBody.username, loginRequestBody.password) + ); +}; diff --git a/airbyte-webapp/src/core/api/hooks/connections.tsx b/airbyte-webapp/src/core/api/hooks/connections.tsx index 8d2e26286d2..58185a2edaf 100644 --- a/airbyte-webapp/src/core/api/hooks/connections.tsx +++ b/airbyte-webapp/src/core/api/hooks/connections.tsx @@ -113,7 +113,7 @@ export const useGetConnectionSyncProgress = (connectionId: string, enabled: bool async () => await getConnectionSyncProgress({ connectionId }, requestOptions), { enabled, - refetchInterval: (data) => (data?.jobId ? 60000 : 5000), + refetchInterval: 10000, } ); }; diff --git a/airbyte-webapp/src/core/api/hooks/filters.ts b/airbyte-webapp/src/core/api/hooks/filters.ts index 1d747ab3d2c..cdb34ce0f1f 100644 --- a/airbyte-webapp/src/core/api/hooks/filters.ts +++ b/airbyte-webapp/src/core/api/hooks/filters.ts @@ -8,8 +8,9 @@ type SetFilterValue = = (filters: T) => void; -export const useFilters = (defaultValues: T): [T, SetFilterValue, SetFilters] => { +export const useFilters = (defaultValues: T): [T, SetFilterValue, SetFilters, boolean] => { const [searchParams, setSearchParams] = useSearchParams(); + const [isInitialState, setIsInitialState] = useState(true); const [filterValues, setFilterValues] = useState(() => { const valuesFromSearchParams = Object.keys(defaultValues).reduce>((acc, filterName) => { @@ -28,11 +29,14 @@ export const useFilters = (defaultValues: T): [T, SetFilterVal const setFilterValue = useCallback>( (filterName, filterValue) => { + if (isInitialState) { + setIsInitialState(false); + } setFilterValues((prevValues) => { return { ...prevValues, [filterName]: filterValue }; }); }, - [setFilterValues] + [isInitialState] ); useEffect(() => { @@ -54,7 +58,7 @@ export const useFilters = (defaultValues: T): [T, SetFilterVal } }, [searchParams, filterValues, setSearchParams, defaultValues]); - return [filterValues, setFilterValue, setFilterValues]; + return [filterValues, setFilterValue, setFilterValues, isInitialState]; }; function filtersAreEqual(newFilters: Record, existingParams: URLSearchParams): boolean { diff --git a/airbyte-webapp/src/core/api/hooks/index.ts b/airbyte-webapp/src/core/api/hooks/index.ts index 240f065e3c4..758f212a12d 100644 --- a/airbyte-webapp/src/core/api/hooks/index.ts +++ b/airbyte-webapp/src/core/api/hooks/index.ts @@ -1,5 +1,6 @@ export * from "./actorDefinitionVersions"; export * from "./applications"; +export * from "./auth"; export * from "./connections"; export * from "./connectorBuilderApi"; export * from "./connectorBuilderProject"; @@ -19,6 +20,7 @@ export * from "./notifications"; export * from "./operations"; export * from "./organizations"; export * from "./permissions"; +export * from "./pypi"; export * from "./security"; export * from "./sources"; export * from "./sourceDefinitions"; diff --git a/airbyte-webapp/src/core/api/hooks/pypi.ts b/airbyte-webapp/src/core/api/hooks/pypi.ts new file mode 100644 index 00000000000..99af9cad27d --- /dev/null +++ b/airbyte-webapp/src/core/api/hooks/pypi.ts @@ -0,0 +1,33 @@ +import { useAppMonitoringService } from "hooks/services/AppMonitoringService"; + +import { useSuspenseQuery } from "../useSuspenseQuery"; + +const fetchLatestVersionOfPyPackage = async (packageName: string): Promise => { + const json = await fetch(`https://pypi.org/pypi/${packageName}/json`).then((resp) => resp.json()); + return json?.info?.version ?? undefined; +}; + +/** + * Safely fetches the latest version of the Python CDK + * + * If the request fails, it will return undefined + * @returns the latest version of the Python CDK + */ +export const usePythonCDKVersion = () => { + const { trackError } = useAppMonitoringService(); + + return useSuspenseQuery( + ["pypi.cdkVersion"], + () => { + try { + return fetchLatestVersionOfPyPackage("airbyte-cdk"); + } catch (e) { + trackError(e); + return undefined; + } + }, + { + staleTime: Infinity, + } + ); +}; diff --git a/airbyte-webapp/src/core/domain/connector/connector.ts b/airbyte-webapp/src/core/domain/connector/connector.ts index 9d70fc4c9a2..f7dba5bebe8 100644 --- a/airbyte-webapp/src/core/domain/connector/connector.ts +++ b/airbyte-webapp/src/core/domain/connector/connector.ts @@ -29,14 +29,16 @@ export class ConnectorSpecification { } } -export const shouldDisplayBreakingChangeBanner = (actorDefinitionVersion: ActorDefinitionVersionRead): boolean => { +export const shouldDisplayBreakingChangeBanner = ( + actorDefinitionVersion: Pick +): boolean => { const hasUpcomingBreakingChanges = !!actorDefinitionVersion?.breakingChanges && actorDefinitionVersion.breakingChanges.upcomingBreakingChanges.length > 0; // This is important as it catches the case where a user has been explicitly pinned to a previous version // e.g. Prereleases, PbA Users etc.. - const actorNotOverriden = !actorDefinitionVersion.isVersionOverrideApplied; + const actorNotOverriden = !actorDefinitionVersion?.isVersionOverrideApplied; return hasUpcomingBreakingChanges && actorNotOverriden; }; @@ -46,7 +48,9 @@ export const shouldDisplayBreakingChangeBanner = (actorDefinitionVersion: ActorD * @param actorDefinitionVersion The actor definition version to format the upgrade deadline for * @returns The formatted upgrade deadline or null if there is no deadline */ -export const getHumanReadableUpgradeDeadline = (actorDefinitionVersion: ActorDefinitionVersionRead): string | null => { +export const getHumanReadableUpgradeDeadline = ( + actorDefinitionVersion: Pick +): string | null => { const deadline = actorDefinitionVersion.breakingChanges?.minUpgradeDeadline; if (deadline) { return dayjs(deadline).format("MMMM D, YYYY"); diff --git a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx index 0ed4baae071..e6a6de0dd7d 100644 --- a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx +++ b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx @@ -20,6 +20,7 @@ export enum PageTrackingCodes { CONNECTIONS_ITEM_TRANSFORMATION = "Connections.Item.TransformationView", CONNECTIONS_ITEM_REPLICATION = "Connections.Item.ReplicationView", CONNECTIONS_ITEM_SETTINGS = "Connections.Item.Settings", + CONNECTIONS_ITEM_TIMELINE = "Connections.Item.Timeline", SETTINGS_ACCOUNT = "Settings.Account", SETTINGS_WORKSPACE = "Settings.Workspace", SETTINGS_ORGANIZATION = "Settings.Organization", diff --git a/airbyte-webapp/src/core/services/analytics/types.ts b/airbyte-webapp/src/core/services/analytics/types.ts index f514ccf1d8c..a6c3f42ac77 100644 --- a/airbyte-webapp/src/core/services/analytics/types.ts +++ b/airbyte-webapp/src/core/services/analytics/types.ts @@ -34,6 +34,7 @@ export const enum Action { SELECTION_OPENED = "SelectionOpened", CHECKOUT_START = "CheckoutStart", LOAD_MORE_JOBS = "LoadMoreJobs", + LOAD_MORE_EVENTS = "LoadMoreEvents", INVITE = "Invite", OAUTH_ATTEMPT = "OAuthAttempt", OAUTH_SUCCESS = "OAuthSuccess", @@ -47,6 +48,7 @@ export const enum Action { APPLIED = "Applied", SET_SYNC_MODE = "SetSyncMode", DISMISSED_CHANGES_MODAL = "DismissedChangesModal", + SYNC_PROGRESS = "SyncProgress", // Connector Builder Actions CONNECTOR_BUILDER_START = "ConnectorBuilderStart", diff --git a/airbyte-webapp/src/core/services/auth/AuthContext.ts b/airbyte-webapp/src/core/services/auth/AuthContext.ts index eab919effe2..e2344c84c09 100644 --- a/airbyte-webapp/src/core/services/auth/AuthContext.ts +++ b/airbyte-webapp/src/core/services/auth/AuthContext.ts @@ -5,9 +5,10 @@ import { UserRead } from "core/api/types/AirbyteClient"; export type AuthChangeName = (name: string) => Promise; export type AuthGetAccessToken = () => Promise; export type AuthLogout = () => Promise; +export type AuthLogin = ({ username, password }: { username: string; password: string }) => Promise; export interface AuthContextApi { - authType: "none" | "oidc" | "cloud"; + authType: "none" | "simple" | "oidc" | "cloud"; user: UserRead | null; inited: boolean; emailVerified: boolean; @@ -15,6 +16,7 @@ export interface AuthContextApi { provider: string | null; getAccessToken?: AuthGetAccessToken; updateName?: AuthChangeName; + login?: AuthLogin; logout?: AuthLogout; changeRealmAndRedirectToSignin?: (realm: string) => Promise; redirectToSignInWithGoogle?: () => Promise; diff --git a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx index 37d6d20ad77..c0df53aa60a 100644 --- a/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/EnterpriseAuthService.tsx @@ -15,7 +15,6 @@ import { createUriWithoutSsoParams } from "packages/cloud/services/auth/CloudAut import { AuthContext, AuthContextApi } from "./AuthContext"; -// This wrapper is conditionally present if the KeycloakAuthentication feature is enabled export const EnterpriseAuthService: React.FC> = ({ children }) => { const { auth, airbyteUrl } = useGetInstanceConfiguration(); diff --git a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx b/airbyte-webapp/src/core/services/auth/NoAuthService.tsx similarity index 65% rename from airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx rename to airbyte-webapp/src/core/services/auth/NoAuthService.tsx index ce0238a3dd6..9b1f98dd403 100644 --- a/airbyte-webapp/src/core/services/auth/CommunityAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/NoAuthService.tsx @@ -4,8 +4,9 @@ import { useGetDefaultUser } from "core/api"; import { AuthContext } from "./AuthContext"; -export const CommunityAuthService: React.FC> = ({ children }) => { - // In Community, the getUser endpoint does not require an access token +// This is a static auth service in case the auth mode of the Airbyte instance is set to "none" +export const NoAuthService: React.FC> = ({ children }) => { + // When auth is set to "none", the getUser endpoint does not require an access token const defaultUser = useGetDefaultUser({ getAccessToken: () => Promise.resolve(null) }); return ( diff --git a/airbyte-webapp/src/core/services/auth/OSSAuthService.tsx b/airbyte-webapp/src/core/services/auth/OSSAuthService.tsx index 05583097cfb..020ed2800ed 100644 --- a/airbyte-webapp/src/core/services/auth/OSSAuthService.tsx +++ b/airbyte-webapp/src/core/services/auth/OSSAuthService.tsx @@ -1,16 +1,19 @@ import { PropsWithChildren } from "react"; -import { FeatureItem, useFeature } from "core/services/features"; +import { useGetInstanceConfiguration } from "core/api"; -import { CommunityAuthService } from "./CommunityAuthService"; import { EnterpriseAuthService } from "./EnterpriseAuthService"; +import { NoAuthService } from "./NoAuthService"; +import { SimpleAuthService } from "./SimpleAuthService"; export const OSSAuthService: React.FC> = ({ children }) => { - const isKeycloakAuthenticationEnabled = useFeature(FeatureItem.KeycloakAuthentication); + const { auth } = useGetInstanceConfiguration(); - return isKeycloakAuthenticationEnabled ? ( - {children} - ) : ( - {children} - ); + if (auth.mode === "oidc") { + return {children}; + } + if (auth.mode === "simple") { + return {children}; + } + return {children}; }; diff --git a/airbyte-webapp/src/core/services/auth/SimpleAuthService.tsx b/airbyte-webapp/src/core/services/auth/SimpleAuthService.tsx new file mode 100644 index 00000000000..524b136ae15 --- /dev/null +++ b/airbyte-webapp/src/core/services/auth/SimpleAuthService.tsx @@ -0,0 +1,151 @@ +// Disabling @airbyte/no-local-storage because this is a rare case where we do not want to load the local storage value into react state, so the hook is not helpful. +/* eslint-disable @airbyte/no-local-storage */ +import { jwtDecode } from "jwt-decode"; +import React, { PropsWithChildren, useCallback, useEffect, useMemo, useReducer, useRef } from "react"; +import { useNavigate } from "react-router-dom"; + +import { SimpleAuthLoginFormValues } from "components/login/SimpleAuthLoginForm"; + +import { useGetInstanceConfiguration, useGetOrCreateUser, useSimpleAuthLogin } from "core/api"; +import { UserRead } from "core/api/types/AirbyteClient"; + +import { AuthContext, AuthContextApi } from "./AuthContext"; + +const SIMPLE_AUTH_LOCAL_STORAGE_KEY = "airbyte_simple-auth-token"; + +const isJwtExpired = (jwt: string) => { + if (jwt.length === 0) { + return false; + } + const decoded = jwtDecode(jwt); + return !!decoded.exp && decoded.exp < Date.now() / 1000; +}; + +type AuthState = Pick; + +interface InitializingState extends AuthState { + user: null; + inited: false; + loggedOut: true; +} + +interface LoggedInState extends AuthState { + user: UserRead; + inited: true; + loggedOut: false; +} + +interface LoggedOutState extends AuthState { + user: null; + inited: true; + loggedOut: true; +} + +type SimpleAuthServiceAuthState = InitializingState | LoggedInState | LoggedOutState; + +type AuthAction = { type: "login"; user: UserRead; accessToken: string } | { type: "logout" }; + +const simpleAuthStateReducer = (state: SimpleAuthServiceAuthState, action: AuthAction): SimpleAuthServiceAuthState => { + switch (action.type) { + case "login": + return { + ...state, + inited: true, + user: action.user, + loggedOut: false, + }; + case "logout": + return { + ...state, + inited: true, + user: null, + loggedOut: true, + }; + default: + return state; + } +}; + +const initialAuthState: InitializingState = { + user: null, + inited: false, + loggedOut: true, +}; + +// This is a static auth service in case the auth mode of the Airbyte instance is set to "none" +export const SimpleAuthService: React.FC = ({ children }) => { + const [authState, dispatch] = useReducer(simpleAuthStateReducer, initialAuthState); + // Stored in a ref so we can update the access token without re-rendering the whole context + const accessTokenRef = useRef(null); + const { mutateAsync: login } = useSimpleAuthLogin(); + const { mutateAsync: getAirbyteUser } = useGetOrCreateUser(); + const { defaultUserId } = useGetInstanceConfiguration(); + const initializingRef = useRef(false); + const navigate = useNavigate(); + + // This effect is explicitly run once to initialize the auth state + useEffect(() => { + if (initializingRef.current) { + return; + } + async function initializeSimpleAuthService() { + initializingRef.current = true; + const token = localStorage.getItem(SIMPLE_AUTH_LOCAL_STORAGE_KEY); + if (!token) { + dispatch({ type: "logout" }); + return; + } + if (isJwtExpired(token)) { + localStorage.removeItem(SIMPLE_AUTH_LOCAL_STORAGE_KEY); + dispatch({ type: "logout" }); + return; + } + try { + accessTokenRef.current = token; + const user = await getAirbyteUser({ + authUserId: defaultUserId, + getAccessToken: () => Promise.resolve(token), + }); + dispatch({ type: "login", user, accessToken: token }); + } catch { + dispatch({ type: "logout" }); + } + } + initializeSimpleAuthService(); + }, [defaultUserId, getAirbyteUser]); + + const loginCallback = useCallback( + async (values: SimpleAuthLoginFormValues) => { + const loginResponse = await login({ username: values.username, password: values.password }); + accessTokenRef.current = loginResponse.access_token; + localStorage.setItem(SIMPLE_AUTH_LOCAL_STORAGE_KEY, loginResponse.access_token); + const user = await getAirbyteUser({ + authUserId: defaultUserId, + getAccessToken: () => Promise.resolve(loginResponse.access_token), + }); + dispatch({ type: "login", user, accessToken: loginResponse.access_token }); + }, + [defaultUserId, getAirbyteUser, login] + ); + + const contextValue = useMemo(() => { + return { + authType: "simple", + provider: null, + emailVerified: false, + ...authState, + getAccessToken: () => Promise.resolve(accessTokenRef.current), + login: authState.loggedOut ? loginCallback : undefined, + logout: authState.loggedOut + ? undefined + : async () => { + localStorage.removeItem(SIMPLE_AUTH_LOCAL_STORAGE_KEY); + accessTokenRef.current = null; + navigate("/"); + dispatch({ type: "logout" }); + }, + } as const; + }, [loginCallback, authState, navigate]); + + return {children}; +}; diff --git a/airbyte-webapp/src/core/services/features/FeatureService.test.tsx b/airbyte-webapp/src/core/services/features/FeatureService.test.tsx index 28d495f21d0..e2e1189dd25 100644 --- a/airbyte-webapp/src/core/services/features/FeatureService.test.tsx +++ b/airbyte-webapp/src/core/services/features/FeatureService.test.tsx @@ -38,7 +38,6 @@ describe("Feature Service", () => { it("should allow setting default features", () => { const getFeature = (feature: FeatureItem) => renderHook(() => useFeature(feature), { wrapper }).result.current; expect(getFeature(FeatureItem.AllowDBTCloudIntegration)).toBe(true); - expect(getFeature(FeatureItem.AllowCustomDBT)).toBe(false); expect(getFeature(FeatureItem.AllowUpdateConnectors)).toBe(false); }); @@ -51,7 +50,7 @@ describe("Feature Service", () => { const getFeature = (feature: FeatureItem) => renderHook(() => useFeature(feature), { wrapper: wrapperWithInstanceConfig }).result.current; - expect(getFeature(FeatureItem.KeycloakAuthentication)).toBe(true); + expect(getFeature(FeatureItem.APITokenManagement)).toBe(true); }); it("overwrite features can overwrite default features", () => { @@ -64,9 +63,9 @@ describe("Feature Service", () => { it("overwritten features can be cleared again", () => { const { result, rerender } = getFeatures({ - overwrite: { [FeatureItem.AllowCustomDBT]: true } as FeatureSet, + overwrite: {} as FeatureSet, }); - expect(result.current.sort()).toEqual([FeatureItem.AllowCustomDBT, FeatureItem.AllowDBTCloudIntegration]); + expect(result.current.sort()).toEqual([FeatureItem.AllowDBTCloudIntegration]); rerender({ overwrite: undefined }); expect(result.current.sort()).toEqual([FeatureItem.AllowDBTCloudIntegration]); }); diff --git a/airbyte-webapp/src/core/services/features/FeatureService.tsx b/airbyte-webapp/src/core/services/features/FeatureService.tsx index 7d6534839cd..3645f577af8 100644 --- a/airbyte-webapp/src/core/services/features/FeatureService.tsx +++ b/airbyte-webapp/src/core/services/features/FeatureService.tsx @@ -17,7 +17,6 @@ const featureSetFromList = (featureList: FeatureItem[]): FeatureSet => { const featureSetFromInstanceConfig = (instanceConfig: InstanceConfigurationResponse): FeatureSet => { return { - [FeatureItem.KeycloakAuthentication]: instanceConfig.auth.mode === AuthConfigurationMode.oidc, [FeatureItem.APITokenManagement]: instanceConfig.auth.mode !== AuthConfigurationMode.none, }; }; diff --git a/airbyte-webapp/src/core/services/features/constants.ts b/airbyte-webapp/src/core/services/features/constants.ts index 18617802c9e..36dc990d8e4 100644 --- a/airbyte-webapp/src/core/services/features/constants.ts +++ b/airbyte-webapp/src/core/services/features/constants.ts @@ -2,7 +2,6 @@ import { FeatureItem } from "./types"; export const defaultOssFeatures = [ FeatureItem.AllowAutoDetectSchema, - FeatureItem.AllowCustomDBT, FeatureItem.AllowUpdateConnectors, FeatureItem.AllowUploadCustomImage, FeatureItem.AllowSyncSubOneHourCronExpressions, diff --git a/airbyte-webapp/src/core/services/features/types.tsx b/airbyte-webapp/src/core/services/features/types.tsx index cf29c3415f2..86cf3ac1cbd 100644 --- a/airbyte-webapp/src/core/services/features/types.tsx +++ b/airbyte-webapp/src/core/services/features/types.tsx @@ -7,7 +7,6 @@ export enum FeatureItem { AllowAllRBACRoles = "ALLOW_ALL_RBAC_ROLES", AllowAutoDetectSchema = "ALLOW_AUTO_DETECT_SCHEMA", AllowUploadCustomImage = "ALLOW_UPLOAD_CUSTOM_IMAGE", - AllowCustomDBT = "ALLOW_CUSTOM_DBT", AllowDBTCloudIntegration = "ALLOW_DBT_CLOUD_INTEGRATION", AllowUpdateConnectors = "ALLOW_UPDATE_CONNECTORS", AllowOAuthConnector = "ALLOW_OAUTH_CONNECTOR", @@ -23,7 +22,6 @@ export enum FeatureItem { EnterpriseBranding = "ENTERPRISE_BRANDING", ExternalInvitations = "EXTERNAL_INVITATIONS", IndicateGuestUsers = "INDICATE_GUEST_USERS", - KeycloakAuthentication = "KEYCLOAK_AUTHENTICATION", MultiWorkspaceUI = "MULTI_WORKSPACE_UI", RBAC = "RBAC", RestrictAdminInForeignWorkspace = "RESTRICT_ADMIN_IN_FOREIGN_WORKSPACE", diff --git a/airbyte-webapp/src/core/services/i18n/I18nProvider.test.tsx b/airbyte-webapp/src/core/services/i18n/I18nProvider.test.tsx index f6ac52ab6c0..2bc751cf477 100644 --- a/airbyte-webapp/src/core/services/i18n/I18nProvider.test.tsx +++ b/airbyte-webapp/src/core/services/i18n/I18nProvider.test.tsx @@ -38,6 +38,24 @@ describe("I18nProvider", () => { expect(wrapper.getByTestId("msg").textContent).toBe("Hello world!"); }); + it("should pick the browser locale if no locale is specified", () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + jest.spyOn(Intl.DateTimeFormat.prototype, "resolvedOptions").mockReturnValue({ locale: "de-DE" } as any); + const { result } = renderHook(() => useIntl(), { + wrapper: ({ children }) => {children}, + }); + expect(result.current.locale).toBe("de-DE"); + }); + + it("should use the browser locale for formatting if no locale is specified", () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + jest.spyOn(Intl.DateTimeFormat.prototype, "resolvedOptions").mockReturnValue({ locale: "de-DE" } as any); + const { result } = renderHook(() => useIntl(), { + wrapper: ({ children }) => {children}, + }); + expect(result.current.formatNumber(1_000_000.42)).toBe("1.000.000,42"); + }); + it("should allow render tags for every message", () => { const wrapper = render( diff --git a/airbyte-webapp/src/core/services/i18n/I18nProvider.tsx b/airbyte-webapp/src/core/services/i18n/I18nProvider.tsx index 71a574fada6..6a3aa549463 100644 --- a/airbyte-webapp/src/core/services/i18n/I18nProvider.tsx +++ b/airbyte-webapp/src/core/services/i18n/I18nProvider.tsx @@ -20,9 +20,14 @@ export const useI18nContext = () => { }; interface I18nProviderProps { - locale: string; + /** + * The locale to use for internationalization. If not provided, the browser locale will be used. + */ + locale?: string; } +const getBrowserLocale = () => new Intl.DateTimeFormat().resolvedOptions().locale ?? "en"; + export const I18nProvider: React.FC> = ({ children, locale }) => { const [overwrittenMessages, setOvewrittenMessages] = useState({}); @@ -50,7 +55,7 @@ export const I18nProvider: React.FC> return ( {chunk}, diff --git a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx index 36b0e2452d6..09299f54344 100644 --- a/airbyte-webapp/src/core/utils/errorStatusMessage.tsx +++ b/airbyte-webapp/src/core/utils/errorStatusMessage.tsx @@ -35,7 +35,7 @@ export const generateMessageFromError = ( ); }; -interface FailureUiDetails { +export interface FailureUiDetails { type: "error" | "warning"; typeLabel: string; origin: FailureReason["failureOrigin"]; diff --git a/airbyte-webapp/src/core/utils/time.ts b/airbyte-webapp/src/core/utils/time.ts index a47d80a420c..ca4374dd3fb 100644 --- a/airbyte-webapp/src/core/utils/time.ts +++ b/airbyte-webapp/src/core/utils/time.ts @@ -36,3 +36,17 @@ export const useFormatLengthOfTime = (lengthOfTimeMs: number) => { const strSeconds = formatMessage({ id: "sources.second" }, { second: seconds }); return `${strHours}${strMinutes}${strSeconds}`; }; + +/** + * + * @param start (milliseconds) + * @param end (milliseconds) + * @returns formatted length of time in milliseconds + */ +export const useFormatDuration = (start: number, end: number) => { + const startTime = dayjs(start); + const endTime = dayjs(end); + const duration = endTime.diff(startTime, "milliseconds"); + + return useFormatLengthOfTime(duration); +}; diff --git a/airbyte-webapp/src/core/utils/useLocalStorage.ts b/airbyte-webapp/src/core/utils/useLocalStorage.ts index 391c7c4162c..b710f9732d8 100644 --- a/airbyte-webapp/src/core/utils/useLocalStorage.ts +++ b/airbyte-webapp/src/core/utils/useLocalStorage.ts @@ -4,7 +4,6 @@ import { useLocalStorage as useLocalStorageWithUndefinedBug } from "react-use"; import { BuilderState } from "components/connectorBuilder/types"; -import { SupportLevel } from "core/api/types/AirbyteClient"; import { Theme } from "hooks/theme/useAirbyteTheme"; // Represents all the data we store in localStorage across the airbyte app @@ -15,7 +14,6 @@ interface AirbyteLocalStorage { connectorBuilderLimitWarning: boolean; allowlistIpsOpen: boolean; airbyteTheme: Theme; - "airbyte_connector-grid-support-level-filter": SupportLevel[]; "airbyte_connector-grid-show-suggested-connectors": boolean; "airbyte_show-dev-tools": boolean; "airbyte_workspace-in-title": boolean; diff --git a/airbyte-webapp/src/hooks/services/ConnectionForm/ConnectionFormService.tsx b/airbyte-webapp/src/hooks/services/ConnectionForm/ConnectionFormService.tsx index c3214e55f53..253a8320b22 100644 --- a/airbyte-webapp/src/hooks/services/ConnectionForm/ConnectionFormService.tsx +++ b/airbyte-webapp/src/hooks/services/ConnectionForm/ConnectionFormService.tsx @@ -78,7 +78,7 @@ const useConnectionForm = ({ const destDefinitionVersion = useDestinationDefinitionVersion(destinationId); const destDefinitionSpecification = useGetDestinationDefinitionSpecification(connection.destination.destinationId); - const initialValues = useInitialFormValues(connection, destDefinitionVersion, destDefinitionSpecification, mode); + const initialValues = useInitialFormValues(connection, destDefinitionSpecification, mode); const { formatMessage } = useIntl(); const [submitError, setSubmitError] = useState(null); const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", true); diff --git a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts index 9a1e22d5c1d..83688462552 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts +++ b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts @@ -18,7 +18,7 @@ export interface Experiments { "connection.streamCentricUI.lateMultiplier": number; "connection.streamCentricUI.v2": boolean; "connection.streamCentricUI.historicalOverview": boolean; - "connection.syncProgress": boolean; + "connection.timeline": boolean; "connector.airbyteCloudIpAddresses": string; "connector.suggestedSourceConnectors": string; "connector.suggestedDestinationConnectors": string; diff --git a/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss b/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss index 1d422c3f156..ecb589af1c5 100644 --- a/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss +++ b/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss @@ -8,7 +8,8 @@ colors.$inverse colors.$white colors.$black - + + colors.$blue-100 colors.$blue-500 colors.$red-300 diff --git a/airbyte-webapp/src/views/layout/SideBar/airbyteLogo.svg b/airbyte-webapp/src/images/airbyteLogo.svg similarity index 100% rename from airbyte-webapp/src/views/layout/SideBar/airbyteLogo.svg rename to airbyte-webapp/src/images/airbyteLogo.svg diff --git a/airbyte-webapp/src/locales/en.errors.json b/airbyte-webapp/src/locales/en.errors.json index 734e2a27952..47124fca664 100644 --- a/airbyte-webapp/src/locales/en.errors.json +++ b/airbyte-webapp/src/locales/en.errors.json @@ -1,3 +1,4 @@ { - "cron-validation/invalid-timezone": "The timezone {cronTimezone} is currently not supported. Please chose another timezone." + "cron-validation/invalid-timezone": "The timezone {cronTimezone} is currently not supported. Please chose another timezone.", + "dbtcloud/access-denied": "dbt Cloud denied access. Please verify your service token and access URL." } diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 21bc894cff3..d53c0f1d69b 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -352,26 +352,6 @@ "connectorForm.allowlistIp.message": "Please allow inbound traffic from the following Airbyte IPs in your firewall whether connecting directly or via SSH Tunnel (more info):", "connectorForm.allowlistIp.addressesLabel": "Airbyte IP addresses", - "form.rawData": "Raw data (JSON)", - "form.basicNormalization": "Normalized tabular data", - "form.basicNormalization.message": "Map the JSON object to the types and format native to the destination. Learn more", - "form.customTransformation": "Custom transformation", - "form.transformationCount": "{count, plural, =0 {No custom transformation} one {{count} transformation} other {{count} transformations}}", - "form.addTransformation": "+ Add transformation", - "form.saveTransformation": "Save transformation", - "form.transformationName": "Transformation name *", - "form.transformationType": "Transformation type *", - "form.dockerUrl": "Docker image URL with dbt installed *", - "form.entrypoint": "Entrypoint arguments for dbt cli to run the project *", - "form.entrypoint.docs": "Learn more", - "form.entrypoint.linked.old": "Entrypoint arguments for dbt cli to run the project. Learn more *", - "form.entrypoint.linked": "Entrypoint arguments for dbt cli to run the project. *", - "form.gitBranch": "Git branch name (leave blank for default branch)", - "form.selectType": "Select a type", - "form.repositoryUrl": "Git repository URL of the custom transformation project *", - "form.repositoryUrl.placeholder": "https://github.com/organisation/git_repo.git", - "form.repositoryUrl.invalidUrl": "Please enter a valid Git repository URL", - "form.sourceNamespace": "Source namespace", "form.sourceStreamName": "Source stream name", "form.destinationNamespace": "Dest. namespace", @@ -423,7 +403,7 @@ "sources.incremental": "Incremental - based on...", "sources.newSource": "New source", "sources.newSourceTitle": "New Source", - "sources.selectSourceTitle": "Select the type of source you want to connect", + "sources.selectSourceTitle": "Set up a new source", "sources.suggestedSources": "Suggested sources", "sources.status": "Status", "sources.schema": "Schema", @@ -443,6 +423,9 @@ "sources.request.prioritize.erd": "We're still building this feature for you. Let us know if you're interested in a Entity Relationship Diagram (ERD) for this source.", "sources.request.prioritize.schema": "We're still building this feature for you. Let us know if you're interested in a schema for this source.", "sources.request.thankYou": "Thank you for your request.", + "jobs.noJobs": "No jobs", + "jobs.noJobsDescription": "Start a sync to see jobs here", + "jobs.noJobsFilterDescription": "No jobs match your filters", "jobs.jobId": "Job id: {id}", "jobs.attemptCount": "{count, plural, one {# attempt} other {# attempts}}", "jobs.jobStatus.refresh.failed": "Refresh Failed ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", @@ -450,7 +433,6 @@ "jobs.jobStatus.refresh.succeeded": "Refresh Succeeded ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.refresh.cancelled": "Refresh Cancelled ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.refresh.partialSuccess": "Refresh Partial Success ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", - "jobs.jobStatus.clear_data.failed": "Clear Data Failed ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.clear_data.running": "Clear Data Running ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", "jobs.jobStatus.clear_data.succeeded": "Clear Data Succeeded ({count, plural, =0 {0 streams} one {# stream} other {# streams}})", @@ -484,7 +466,8 @@ "sources.hour": "{hour}h ", "sources.minute": "{minute}m ", "sources.second": "{second}s", - "sources.elapsed": "{time} elapsed", + "sources.fewSecondsElapsed": "a few seconds elapsed", + "sources.elapsed": "{hours, plural, =0 {} other {#h }}{minutes, plural, =0 {} other {#m }}elapsed", "sources.noDestinations": "No destinations yet", "sources.addDestinationReplicateData": "Add destinations where to replicate data to.", "sources.attemptNum": "Attempt {number}", @@ -492,14 +475,13 @@ "sources.countRecords": "{count, plural, =0 {no records} one {# record} other {# records}}", "sources.countRecordsExtracted": "{count, plural, =0 {no records extracted} one {# record extracted} other {# records extracted}}", "sources.countRecordsLoaded": "{count, plural, =0 {no records loaded} one {# record loaded} other {# records loaded}}", - "sources.countLoaded": "{count} loaded", - "sources.countExtracted": "{count} loaded", + "sources.countLoaded": "{count, number} loaded", + "sources.countExtracted": "{count, number} extracted", "sources.countBytes": "{count, plural, =0 {0 Bytes} one {# Byte} other {# Bytes}}", "sources.countKB": "{count} KB", "sources.countMB": "{count} MB", "sources.countGB": "{count} GB", "sources.countTB": "{count} TB", - "sources.queued": "Queued", "sources.syncing": "Syncing", "sources.starting": "Starting...", @@ -521,7 +503,7 @@ "destination.destinationSettings": "Destination Settings", "destinations.newDestination": "New destination", - "destinations.selectDestinationTitle": "Select the type of destination you want to connect", + "destinations.selectDestinationTitle": "Set up a new destination", "destinations.suggestedDestinations": "Suggested destinations", "destinations.description": "Destinations are where you send or push your data to.", "destinations.noDestinations": "Destination list is empty", @@ -606,6 +588,7 @@ "connection.schemaUpdateNotifications.title": "Schema update notifications", "connection.schemaUpdateNotifications.titleNext": "Be notified when schema changes occur", + "connection.schemaUpdateNotifications.workspaceWarning": "Connection update notifications are not enabled in this workspace. Configure them here.", "connection.schemaUpdateNotifications.subtitle": "Receive notifications when schema changes occur", "connection.schemaUpdateNotifications.info": "Send webhook notifications when your source schema has changed", "connection.schemaUpdateNotifications.error": "Unable to save the schema update notifications setting at this time.", @@ -695,6 +678,33 @@ "connection.actions.clearDataDescription": "Clearing your data will delete all data in your destination.", "connection.actions.clearData.confirm.text": "Clearing data for this connection will delete all data in your destination for this connection. The next sync will sync all historical data.", "connection.actions.clearData.confirm.title": "Are you sure you want to clear data from this connection?", + "connection.timeline": "Timeline", + "connection.timeline.error": "Error: ", + "connection.timeline.warning": "Warning: ", + "connection.timeline.clear_cancelled": "Clearing data cancelled ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.clear_failed": "Clearing data failed ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.clear_incomplete": "Clearing data did not complete ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.clear_succeeded": "Clearing data succeeded ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.refresh_cancelled": "Refresh cancelled ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.refresh_failed": "Refresh failed ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.refresh_incomplete": "Refresh did not complete ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.refresh_succeeded": "Refresh succeeded ({value, plural, one {# stream} other {# streams}})", + "connection.timeline.sync_cancelled": "Sync cancelled", + "connection.timeline.sync_failed": "Sync failed", + "connection.timeline.sync_incomplete": "Sync did not complete", + "connection.timeline.sync_succeeded": "Sync succeeded", + "connection.timeline.filters.success": "Success", + "connection.timeline.filters.failure": "Failure", + "connection.timeline.filters.incomplete": "Incomplete", + "connection.timeline.filters.cancelled": "Cancelled", + "connection.timeline.filters.running": "Running", + "connection.timeline.filters.sync": "Sync", + "connection.timeline.filters.refresh": "Refresh", + "connection.timeline.filters.clear": "Clear", + "connection.timeline.filters.allEventTypes": "All event types", + "connection.timeline.filters.allStatuses": "All statuses", + "connection.timeline.filters.eventId": "id: {eventId}", + "connection.actions.error": "There was an error starting this job. Please try again.", "connection.actions.refreshData": "Refresh your data", "connection.actions.refreshConnection": "Refresh connection", @@ -759,7 +769,8 @@ "connection.stream.status.nextSync": "Next sync {sync}", "connection.stream.status.nextTry": "Next try {sync}", - "connection.overview.graph.noData": "No syncs to show data from", + "connection.overview.graph.noData": "No data yet", + "connection.overview.graph.noData.button": "Sync now to see data", "connection.overview.graph.volume": "Volume", "connection.overview.graph.recordsEmitted": "{value, plural, one {# record} other {# records}} extracted", "connection.overview.graph.recordsLoaded": "{value, plural, one {# record} other {# records}} loaded", @@ -795,12 +806,6 @@ "form.frequency.message": "Set how frequently the sync should execute", "connection.replicationFrequency": "Replication frequency*", - "connection.normalization": "Normalization", - "connection.normalization.successMessage": "Normalization settings were updated successfully!", - "connection.normalization.errorMessage": "There was an error during updating your normalization settings", - "connection.customTransformations": "Custom Transformations", - "connection.customTransformations.successMessage": "Custom transformation settings were updated successfully!", - "connection.customTransformations.errorMessage": "There was an error during updating your custom transformation settings", "connection.state.title": "Connection state", "connection.state.warning": "Updates to connection state should be handled with extreme care.", @@ -817,6 +822,7 @@ "connection.state.revert": "Revert changes", "connection.state.copyTitle": "Copy connection state", + "tables.empty": "No data", "tables.name": "Name", "tables.connector": "Connector", "tables.sourceConnectWith": "Destination", @@ -851,6 +857,7 @@ "tables.sourceDeleteConfirm": "Confirm source deletion", "tables.destinationDeleteConfirm": "Confirm destination deletion", "tables.connectionDeleteConfirm": "Confirm connection deletion", + "tables.connectionDeleteConfirmationText": "delete", "tables.sourceDeleteModalText": "Deleting a source cannot be undone without a full re-sync. No existing data in the destination will be altered.", "tables.destinationDeleteModalText": "Deleting a destination cannot be undone. No existing data in the destination will be altered.", "tables.affectedConnectionsOnDeletion": "The following {count, plural, one {connection} other {connections}} will be deleted:\n", @@ -946,7 +953,7 @@ "settings.organizationSettings.orgId": "ID: {id}", "settings.organizationSettings.copyOrgId": "Copy organization id", "settings.organizationSettings.organizationName": "Organization name", - "settings.organizationSettings.email": "Administrator email", + "settings.organizationSettings.email": "Contact email", "settings.organizationSettings.email.description": "Airbyte will display this email to new users in your organization and use this email for communications about your organization.", "settings.accountSettings": "Account Settings", "settings.accountSettings.logoutText": "Sign out", @@ -1060,7 +1067,9 @@ "settings.application.create.error": "There was an error creating the application.", "connector.connectorCount": "{count, plural, one {# connector} other {# connectors}}", - "connector.noSearchResults": "No connectors match your search.", + "connector.searchPlaceholder": "Search {tabName}…", + "connector.noSearchResults": "No results in {tabName}.", + "connector.noMarketplaceSearchResults": "No results in Marketplace.", "connector.searchResultsHiddenByFilters": "{count, plural, one {# matching connector is hidden by filters.} other {# matching connectors are hidden by filters.}}", "connector.showAllResults": "Show hidden results", "connector.requestConnectorBlock": "Request a new connector", @@ -1080,13 +1089,13 @@ "connector.hideSuggestedConnectors": "Hide suggested connectors", "connector.source": "Source", "connector.destination": "Destination", - "connector.supportLevel.certified": "Certified", - "connector.supportLevel.community": "Community", + "connector.supportLevel.certified": "Airbyte Connector", + "connector.supportLevel.community": "Marketplace", "connector.supportLevel.archived": "Archived", "connector.supportLevel.custom": "Custom", "connector.connectorNameAndVersion": "{connectorName} v{version}", - "connector.supportLevel.certified.description": "Certified connectors are actively maintained and supported by the Airbyte team and maintain a high quality bar. They are production ready.", - "connector.supportLevel.community.description": "Community connectors are maintained by the Airbyte community until they become Certified. Airbyte does not offer support SLAs around them, and encourages caution when using them in production.", + "connector.supportLevel.certified.description": "Airbyte Connectors are actively maintained and supported by the Airbyte team and maintain a high quality bar. They are production ready.", + "connector.supportLevel.community.description": "Marketplace connectors are maintained by the Airbyte community until they become Airbyte Connectors. Airbyte does not offer support SLAs around them, and encourages caution when using them in production.", "connector.supportLevel.archived.description": "Archived connectors have been removed from the Airbyte Registry due to low quality or low usage.", "connector.supportLevel.custom.description": "Custom connectors are added to the workspace manually by the user.", "connector.connectorsInDevelopment.docLink": "See our documentation for more details.", @@ -1124,6 +1133,13 @@ "connector.check.jobFailed": "Failed to run connection tests.", "connector.discoverSchema.jobFailed": "Failed to run schema discovery.", "connector.discoverSchema.catalogMissing": "Source did not return a schema.", + "connector.tab.certified": "Airbyte Connectors", + "connector.tab.marketplace": "Marketplace", + "connector.tab.custom": "Custom", + "connector.seeMore": "See {count, plural, one {# more result} other {# more results}} in {tabName}", + "connector.sort.name": "Name", + "connector.sort.success": "Sync success rate", + "connector.sort.usage": "Usage", "credits.credits": "Credits", "credits.whatAreCredits": "What are credits?", @@ -1147,6 +1163,21 @@ "credits.noBillingAccount": "Your account is excluded from billing requirements and credits are not required.", "docs.notFoundError": "We were not able to receive docs. Please click the link above to open docs on our website", + "docs.metrics.supportLevel.label": "Support Level", + "docs.metrics.connectorVersion.label": "Connector Version", + "docs.metrics.lastPublished.label": "Last updated", + "docs.metrics.cdkVersion.label": "CDK Version", + "docs.metrics.isLatestCDK.label": "Latest", + "docs.metrics.usageRate.label": "Usage Rate", + "docs.metrics.usageRate.tooltip.high": "Usage: High", + "docs.metrics.usageRate.tooltip.med": "Usage: Medium", + "docs.metrics.usageRate.tooltip.low": "Usage: Low", + "docs.metrics.usageRate.tooltip.none": "This connector has not been recently used", + "docs.metrics.syncSuccessRate.label": "Sync Success Rate", + "docs.metrics.syncSuccessRate.tooltip.high": "Sync Success: High", + "docs.metrics.syncSuccessRate.tooltip.med": "Sync Success: Medium", + "docs.metrics.syncSuccessRate.tooltip.low": "Sync Success: Low", + "docs.metrics.syncSuccessRate.tooltip.none": "This connector has not been recently used", "errors.messageOnly": "{message}", "errors.title": "Sorry, something went wrong.", "errors.reload": "Reload", @@ -1356,12 +1387,8 @@ "connectorBuilder.loadingStreamList": "Loading", "connectorBuilder.noStreamSelected": "No stream selected", "connectorBuilder.streamTestLimitReached": "Stream testing limit reached. During testing a maximum of {recordLimit} records, or {sliceLimit} stream partitions with {pageLimit} pages each will be returned.", - "connectorBuilder.builderPrompt.button": "Build your connector", - "connectorBuilder.builderPrompt.title": "Need to build your own source?", - "connectorBuilder.builderPrompt.description": "Build your source with our {adjective} {noun}", - "connectorBuilder.builderPrompt.shortDescription": "Use our {adjective} {noun}", - "connectorBuilder.builderPrompt.adjective": "low-code", - "connectorBuilder.builderPrompt.noun": "connector builder", + "connectorBuilder.builderPrompt.primary": "Need to build your own source?", + "connectorBuilder.builderPrompt.secondary": "Use our low-code connector builder", "connectorBuilder.listPage.name": "Name", "connectorBuilder.listPage.version": "Active version", "connectorBuilder.listPage.title": "Custom connectors", @@ -1644,6 +1671,7 @@ "login.haveAccount": "Already have an account?", "login.noAccount": "Don’t have an account?", "login.login": "Log in", + "login.failed": "Invalid username or password", "login.signup": "Sign up", "login.returnToLogin": "Return to login", "login.loginTitle": "Log in to Airbyte", diff --git a/airbyte-webapp/src/packages/cloud/App.tsx b/airbyte-webapp/src/packages/cloud/App.tsx index 14e8f061774..9e693f2d3df 100644 --- a/airbyte-webapp/src/packages/cloud/App.tsx +++ b/airbyte-webapp/src/packages/cloud/App.tsx @@ -46,7 +46,7 @@ const App: React.FC = () => { return ( - + }> diff --git a/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx index 155edad2bb3..a8c27dfc5fc 100644 --- a/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/integrations/DbtCloudSettingsView.tsx @@ -11,8 +11,9 @@ import { ExternalLink } from "components/ui/Link"; import { Message } from "components/ui/Message"; import { Text } from "components/ui/Text"; -import { HttpError, useCurrentWorkspace } from "core/api"; +import { useCurrentWorkspace } from "core/api"; import { useDbtCloudServiceToken } from "core/api/cloud"; +import { useFormatError } from "core/errors"; import { trackError } from "core/utils/datadog"; import { links } from "core/utils/links"; import { useIntent } from "core/utils/rbac"; @@ -30,12 +31,8 @@ interface DbtConfigurationFormValues { accessUrl?: string; } -// TODO(Tim): Needs to be moved to the proper new error system -export const cleanedErrorMessage = (e: Error): string => - e instanceof HttpError ? e.response?.message?.replace("Internal Server Error: ", "") : e.message; -// a centrally-defined key for accessing the token value within formik objects - export const DbtCloudSettingsView: React.FC = () => { + const formatError = useFormatError(); const { formatMessage } = useIntl(); const { hasExistingToken, saveToken } = useDbtCloudServiceToken(); const { registerNotification } = useNotificationService(); @@ -63,7 +60,7 @@ export const DbtCloudSettingsView: React.FC = () => { trackError(e); registerNotification({ id: "dbtCloud/save-token-failure", - text: cleanedErrorMessage(e), + text: formatError(e), type: "error", }); }; diff --git a/airbyte-webapp/src/packages/cloud/views/settings/integrations/useDbtTokenRemovalModal.tsx b/airbyte-webapp/src/packages/cloud/views/settings/integrations/useDbtTokenRemovalModal.tsx index e7295650610..2885ec31bd0 100644 --- a/airbyte-webapp/src/packages/cloud/views/settings/integrations/useDbtTokenRemovalModal.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/integrations/useDbtTokenRemovalModal.tsx @@ -2,16 +2,16 @@ import { useCallback } from "react"; import { useIntl } from "react-intl"; import { useDbtCloudServiceToken } from "core/api/cloud"; +import { useFormatError } from "core/errors"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; import { useNotificationService } from "hooks/services/Notification"; -import { cleanedErrorMessage } from "./DbtCloudSettingsView"; - export const useDbtTokenRemovalModal = () => { const { openConfirmationModal, closeConfirmationModal } = useConfirmationModalService(); const { deleteToken } = useDbtCloudServiceToken(); const { registerNotification } = useNotificationService(); const { formatMessage } = useIntl(); + const formatError = useFormatError(); return useCallback(() => { openConfirmationModal({ @@ -23,7 +23,7 @@ export const useDbtTokenRemovalModal = () => { onError: (e) => { registerNotification({ id: "dbtCloud/delete-token-failure", - text: cleanedErrorMessage(e), + text: formatError(e), type: "error", }); }, @@ -39,5 +39,5 @@ export const useDbtTokenRemovalModal = () => { }, submitButtonDataId: "delete", }); - }, [openConfirmationModal, deleteToken, closeConfirmationModal, registerNotification, formatMessage]); + }, [openConfirmationModal, deleteToken, closeConfirmationModal, registerNotification, formatMessage, formatError]); }; diff --git a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx index 69e5c5536f3..c227d01bcc1 100644 --- a/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView.tsx @@ -1,7 +1,6 @@ import { createColumnHelper } from "@tanstack/react-table"; -import dayjs from "dayjs"; import { useMemo } from "react"; -import { FormattedMessage } from "react-intl"; +import { FormattedDate, FormattedMessage } from "react-intl"; import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; @@ -57,7 +56,9 @@ export const ApplicationSettingsView = () => { columnHelper.accessor("createdAt", { header: () => , cell: (props) => ( - {dayjs.unix(props.row.original.createdAt).format("MMM DD, YYYY h:mmA")} + + + ), sortingFn: "basic", }), diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx index a955fb7c38e..ca756f9dc68 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/OrganizationUsersTable.tsx @@ -33,7 +33,6 @@ export const OrganizationUsersTable: React.FC<{ ); }, sortingFn: "alphanumeric", - meta: { responsive: true }, }), columnHelper.accessor("permissionType", { id: "permissionType", @@ -43,7 +42,6 @@ export const OrganizationUsersTable: React.FC<{ ), - meta: { responsive: true }, cell: (props) => { const user = { userName: props.row.original.name ?? "", diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx index 5884a844972..0a740318544 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/WorkspaceUsersTable.tsx @@ -35,7 +35,6 @@ export const WorkspaceUsersTable: React.FC<{ ); }, sortingFn: "alphanumeric", - meta: { responsive: true }, }), columnHelper.accessor( (row) => { @@ -49,7 +48,6 @@ export const WorkspaceUsersTable: React.FC<{ ), - meta: { responsive: true }, cell: (props) => { return ; }, diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx index b4a865819ae..c0f82658e3a 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccessManagementPage/components/UserCell.tsx @@ -16,7 +16,7 @@ export const UserCell: React.FC<{ name?: string; email: string; isCurrentUser: b - + {nameToDisplay} {isCurrentUser && ( diff --git a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx index 017d88567bc..b8a6a8094b8 100644 --- a/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx +++ b/airbyte-webapp/src/pages/SettingsPage/pages/AccountPage/AccountPage.tsx @@ -4,19 +4,19 @@ import { useIntl } from "react-intl"; import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; -import { FeatureItem, useFeature } from "core/services/features"; +import { useAuthService } from "core/services/auth"; import { AccountForm } from "./components/AccountForm"; import { KeycloakAccountForm } from "./components/KeycloakAccountForm"; export const AccountPage: React.FC = () => { const { formatMessage } = useIntl(); - const isKeycloakAuthenticationEnabled = useFeature(FeatureItem.KeycloakAuthentication); + const { authType } = useAuthService(); return ( {formatMessage({ id: "settings.accountSettings" })} - {isKeycloakAuthenticationEnabled ? : } + {authType === "oidc" ? : } ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx index b806e52a73b..24ade960e33 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionJobHistoryPage/ConnectionJobHistoryPage.tsx @@ -3,7 +3,7 @@ import React from "react"; import { FormattedMessage } from "react-intl"; import { useLocation, useNavigate } from "react-router-dom"; -import { EmptyResourceBlock } from "components/common/EmptyResourceBlock"; +import { EmptyState } from "components/common/EmptyState"; import { ConnectionSyncButtons } from "components/connection/ConnectionSync/ConnectionSyncButtons"; import { ConnectionSyncContextProvider } from "components/connection/ConnectionSync/ConnectionSyncContext"; import { PageContainer } from "components/PageContainer"; @@ -53,7 +53,7 @@ export const ConnectionJobHistoryPage: React.FC = () => { const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", true); const { connection } = useConnectionEditService(); useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_STATUS); - const [filterValues, setFilterValue, setFilters] = useFilters({ + const [filterValues, setFilterValue, setFilters, isInitialState] = useFilters({ jobStatus: "all", startDate: "", endDate: "", @@ -167,16 +167,25 @@ export const ConnectionJobHistoryPage: React.FC = () => { ) : jobs?.length ? ( ) : linkedJobNotFound ? ( - } - description={ - - - - } - /> + + } + description={ + + + + } + /> + ) : ( - } /> + + } + description={ + + } + /> + )} {hasNextPage && ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx index 562a11cd10b..af82816896f 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx @@ -8,6 +8,7 @@ import { FlexContainer } from "components/ui/Flex"; import { PageHeaderWithNavigation } from "components/ui/PageHeader"; import { Tabs, LinkTab } from "components/ui/Tabs"; +import { FeatureItem, useFeature } from "core/services/features"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; import { useExperiment } from "hooks/services/Experiment"; import { RoutePaths, ConnectionRoutePaths } from "pages/routePaths"; @@ -21,6 +22,8 @@ export const ConnectionPageHeader = () => { const { formatMessage } = useIntl(); const currentTab = params["*"] || ConnectionRoutePaths.Status; const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", true); + const supportsDbtCloud = useFeature(FeatureItem.AllowDBTCloudIntegration); + const connectionTimeline = useExperiment("connection.timeline", false); const { connection, schemaRefreshing } = useConnectionEditService(); const breadcrumbsData = [ @@ -39,12 +42,23 @@ export const ConnectionPageHeader = () => { to: basePath, disabled: schemaRefreshing, }, - { - id: ConnectionRoutePaths.JobHistory, - name: , - to: `${basePath}/${ConnectionRoutePaths.JobHistory}`, - disabled: schemaRefreshing, - }, + ...(connectionTimeline + ? [ + { + id: ConnectionRoutePaths.Timeline, + name: , + to: `${basePath}/${ConnectionRoutePaths.Timeline}`, + disabled: schemaRefreshing, + }, + ] + : [ + { + id: ConnectionRoutePaths.JobHistory, + name: , + to: `${basePath}/${ConnectionRoutePaths.JobHistory}`, + disabled: schemaRefreshing, + }, + ]), { id: ConnectionRoutePaths.Replication, name: ( @@ -56,12 +70,16 @@ export const ConnectionPageHeader = () => { to: `${basePath}/${ConnectionRoutePaths.Replication}`, disabled: schemaRefreshing, }, - { - id: ConnectionRoutePaths.Transformation, - name: , - to: `${basePath}/${ConnectionRoutePaths.Transformation}`, - disabled: schemaRefreshing, - }, + ...(supportsDbtCloud + ? [ + { + id: ConnectionRoutePaths.Transformation, + name: , + to: `${basePath}/${ConnectionRoutePaths.Transformation}`, + disabled: schemaRefreshing, + }, + ] + : []), { id: ConnectionRoutePaths.Settings, name: , @@ -71,7 +89,7 @@ export const ConnectionPageHeader = () => { ]; return tabs; - }, [basePath, connection.schemaChange, schemaRefreshing, isSimplifiedCreation]); + }, [basePath, schemaRefreshing, connectionTimeline, isSimplifiedCreation, connection.schemaChange, supportsDbtCloud]); return ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionTitleBlockNext.tsx b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionTitleBlockNext.tsx index e7d49c02c93..51d4eedcd7a 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionTitleBlockNext.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionTitleBlockNext.tsx @@ -63,7 +63,7 @@ export const ConnectionTitleBlockNext = () => { {connectionStatus === ConnectionStatus.deprecated && ( } /> )} - + diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx index 5ef6735859b..345b8f4742e 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx @@ -249,7 +249,6 @@ export const ConnectionReplicationPage: React.FC = () => { schema={validationSchema} onSubmit={onFormSubmit} trackDirtyChanges - disabled={mode === "readonly"} > diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx index 858e657a78c..62e64fa247e 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx @@ -23,7 +23,7 @@ import { Heading } from "components/ui/Heading"; import { ExternalLink } from "components/ui/Link"; import { Spinner } from "components/ui/Spinner"; -import { useCurrentWorkspace, useDestinationDefinitionVersion } from "core/api"; +import { useCurrentWorkspace } from "core/api"; import { Geography, WebBackendConnectionUpdate } from "core/api/types/AirbyteClient"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { FeatureItem, useFeature } from "core/services/features"; @@ -191,14 +191,8 @@ const SimplifiedConnectionSettingsPage = () => { const { trackError } = useAppMonitoringService(); const { mode } = useConnectionFormService(); - const destDefinitionVersion = useDestinationDefinitionVersion(connection.destinationId); const { destDefinitionSpecification } = useConnectionFormService(); - const simplifiedInitialValues = useInitialFormValues( - connection, - destDefinitionVersion, - destDefinitionSpecification, - mode - ); + const simplifiedInitialValues = useInitialFormValues(connection, destDefinitionSpecification, mode); const { workspaceId } = useCurrentWorkspace(); const canEditConnection = useIntent("EditConnection", { workspaceId }); diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/SchemaUpdateNotifications.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/SchemaUpdateNotifications.tsx index 322ef342525..1695c19cddd 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/SchemaUpdateNotifications.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/SchemaUpdateNotifications.tsx @@ -2,6 +2,14 @@ import React from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { FormControl } from "components/forms"; +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Link } from "components/ui/Link"; +import { Text } from "components/ui/Text"; + +import { useCurrentWorkspaceLink } from "area/workspace/utils"; +import { useCurrentWorkspace } from "core/api"; +import { RoutePaths, SettingsRoutePaths } from "pages/routePaths"; import { ConnectionSettingsFormValues } from "./ConnectionSettingsPage"; @@ -11,15 +19,39 @@ interface SchemaUpdateNotificationsProps { export const SchemaUpdateNotifications: React.FC = ({ disabled }) => { const { formatMessage } = useIntl(); + const { notificationSettings } = useCurrentWorkspace(); + const hasWorkspaceConnectionUpdateNotifications = + notificationSettings?.sendOnConnectionUpdate?.notificationType && + notificationSettings.sendOnConnectionUpdate.notificationType.length > 0; + + const createLink = useCurrentWorkspaceLink(); return ( - - disabled={disabled} - label={formatMessage({ id: "connection.schemaUpdateNotifications.title" })} - description={} - fieldType="switch" - name="notifySchemaChanges" - inline - /> + + + disabled={disabled} + label={formatMessage({ id: "connection.schemaUpdateNotifications.title" })} + description={} + fieldType="switch" + name="notifySchemaChanges" + inline + /> + {!hasWorkspaceConnectionUpdateNotifications && ( + <> + {" "} + + + ( + {children} + ), + }} + /> + + + )} + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.module.scss new file mode 100644 index 00000000000..4eb858c783c --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.module.scss @@ -0,0 +1,35 @@ +@use "scss/colors"; + +.connectionTimelineEventIcon__icon { + color: colors.$grey-400; +} + +.connectionTimelineEventIcon__container { + flex-shrink: 0; + display: flex; + justify-content: center; + align-items: center; + position: relative; + width: 30px; + height: 30px; + border-radius: 50%; + background-color: colors.$grey-100; +} + +.connectionTimelineEventIcon__statusIndicator { + width: 12px; + height: 12px; + border-radius: 50%; + position: absolute; + top: -11%; + right: -10%; +} + +.connectionTimelineEventIcon__statusIcon { + width: 16px; + height: 16px; + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%, -50%); +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.tsx new file mode 100644 index 00000000000..b88e4e5797a --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventIcon.tsx @@ -0,0 +1,40 @@ +import classNames from "classnames"; + +import { Icon, IconProps } from "components/ui/Icon"; + +import styles from "./ConnectionTimelineEventIcon.module.scss"; +export const ConnectionTimelineEventIcon: React.FC<{ + isLast: boolean; + icon: IconProps["type"]; + statusIcon?: IconProps["type"]; +}> = ({ isLast, icon, statusIcon }) => { + return ( +
+ {statusIcon && ( +
+ +
+ )} + +
+ ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.module.scss new file mode 100644 index 00000000000..8d5abc4c4df --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.module.scss @@ -0,0 +1,5 @@ +@use "scss/variables"; + +.connectionTimelineEventItem__container { + padding: 0 variables.$spacing-lg; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.tsx new file mode 100644 index 00000000000..af8ecd20d0a --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineEventItem.tsx @@ -0,0 +1,12 @@ +import { PropsWithChildren } from "react"; + +import { FlexContainer } from "components/ui/Flex"; + +import styles from "./ConnectionTimelineEventItem.module.scss"; +export const ConnectionTimelineEventItem: React.FC> = ({ children }) => { + return ( + + {children} + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.module.scss new file mode 100644 index 00000000000..a2af9e787aa --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.module.scss @@ -0,0 +1,21 @@ +@use "scss/colors"; +@use "scss/variables"; + +.filterButton { + background-color: colors.$foreground; + border-radius: variables.$border-radius-sm; + color: colors.$grey-400; + border: variables.$border-thin solid colors.$grey-300; + min-height: auto; + height: variables.$button-height-xs; +} + +.filterOptionsMenu { + display: block; // default is `flex` which shrinks the options to fit the box, instead overflowing into scroll + overflow: auto; + max-height: variables.$height-long-listbox-options-list; +} + +.filterOption { + white-space: nowrap; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.tsx new file mode 100644 index 00000000000..e3c25123832 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineFilters.tsx @@ -0,0 +1,78 @@ +import { FormattedMessage } from "react-intl"; + +import { Box } from "components/ui/Box"; +import { ClearFiltersButton } from "components/ui/ClearFiltersButton"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { ListBox } from "components/ui/ListBox"; +import { Text } from "components/ui/Text"; + +import styles from "./ConnectionTimelineFilters.module.scss"; +import { eventTypeFilterOptions, statusFilterOptions, TimelineFilterValues } from "./utils"; + +interface ConnectionTimelineFiltersProps { + filterValues: TimelineFilterValues; + setFilterValue: (key: keyof TimelineFilterValues, value: string | null) => void; + setFilters: (filters: TimelineFilterValues) => void; +} + +export const ConnectionTimelineFilters: React.FC = ({ + filterValues, + setFilterValue, + setFilters, +}) => { + const hasAnyFilterSelected = !!filterValues.status || !!filterValues.eventType || !!filterValues.eventId; + + return ( + + {!!filterValues.eventId ? ( + + + + + + + + + + ) : ( + <> + + setFilterValue("status", value)} + /> + + + setFilterValue("eventType", value)} + /> + + + )} + + {hasAnyFilterSelected && ( + + { + setFilters({ + status: null, + eventType: null, + eventId: null, + openLogs: null, + }); + }} + /> + + )} + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.module.scss new file mode 100644 index 00000000000..cfdb85e599b --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.module.scss @@ -0,0 +1,19 @@ +@use "scss/colors"; +@use "scss/variables"; + +.eventList { + > * { + position: relative; + + &:not(:last-child)::before { + content: ""; + position: absolute; + top: variables.$spacing-lg; + left: 30px; + height: 100%; + width: variables.$border-thick; + background-color: colors.$grey-100; + transform: translateX(-50%); + } + } +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.tsx new file mode 100644 index 00000000000..bbd2c85b1cd --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelinePage.tsx @@ -0,0 +1,143 @@ +import { FormattedMessage, useIntl } from "react-intl"; + +import { ConnectionSyncContextProvider } from "components/connection/ConnectionSync/ConnectionSyncContext"; +import { PageContainer } from "components/PageContainer"; +import { Box } from "components/ui/Box"; +import { Card } from "components/ui/Card"; +import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; + +import { useFilters } from "core/api"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { useModalService } from "hooks/services/Modal"; + +import { ClearEvent } from "./components/ClearEvent"; +import { RefreshEvent } from "./components/RefreshEvent"; +import { SyncEvent } from "./components/SyncEvent"; +import { ConnectionTimelineFilters } from "./ConnectionTimelineFilters"; +import styles from "./ConnectionTimelinePage.module.scss"; +import { openJobLogsModalFromTimeline } from "./JobEventMenu"; +import { mockConnectionTimelineEventList } from "./mocks"; +import { + castEventSummaryToConnectionTimelineJobStatsProps, + eventTypeByFilterValue, + eventTypeByStatusFilterValue, + extractStreamsFromTimelineEvent, + TimelineFilterValues, +} from "./utils"; + +export const ConnectionTimelinePage: React.FC = () => { + useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_TIMELINE); + const { events: connectionEvents } = mockConnectionTimelineEventList; + const { openModal } = useModalService(); + const { formatMessage } = useIntl(); + const { connection } = useConnectionFormService(); + + const [filterValues, setFilterValue, setFilters] = useFilters({ + status: null, + eventType: null, + eventId: null, + openLogs: null, + }); + + const connectionEventsToShow = connectionEvents.filter((connectionEvent) => { + if (filterValues.eventId) { + return connectionEvent.id === filterValues.eventId; + } + + if (filterValues.status && filterValues.eventType) { + return ( + eventTypeByStatusFilterValue[filterValues.status].includes(connectionEvent.eventType) && + eventTypeByFilterValue[filterValues.eventType].includes(connectionEvent.eventType) + ); + } + if (filterValues.status) { + return eventTypeByStatusFilterValue[filterValues.status].includes(connectionEvent.eventType); + } + if (filterValues.eventType) { + return eventTypeByFilterValue[filterValues.eventType].includes(connectionEvent.eventType); + } + + return true; + }); + + if (filterValues.openLogs && filterValues.eventId) { + const jobId = 55874; // todo: calculate this by fetching single connection timeline event from API based on filterValues.eventId once get endpoint is merged! + openJobLogsModalFromTimeline(openModal, jobId, formatMessage, connection.name ?? ""); + } + + return ( + + + + + + + + + + + + + + +
+ {connectionEventsToShow.map((event) => { + const stats = castEventSummaryToConnectionTimelineJobStatsProps(event.eventSummary); + const streamsToList = extractStreamsFromTimelineEvent(event.eventSummary); + + if (!stats || stats.jobStatus === "pending" || stats.jobStatus === "running") { + return null; + } + + return ( + + {event.eventType.includes("sync") && ( + + )} + {event.eventType.includes("clear") && ( + + )} + {event.eventType.includes("refresh") && ( + + )} + + ); + })} +
+
+
+
+
+ ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss new file mode 100644 index 00000000000..5ac070d5610 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss @@ -0,0 +1,7 @@ +.modalLoading { + position: relative; + flex-grow: 1; + display: flex; + justify-content: center; + align-items: center; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx new file mode 100644 index 00000000000..dbb930fd8e9 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx @@ -0,0 +1,107 @@ +import { Suspense } from "react"; +import { useIntl } from "react-intl"; + +import { Button } from "components/ui/Button"; +import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; +import { Spinner } from "components/ui/Spinner"; + +import { JobLogsModal } from "area/connection/components/JobLogsModal/JobLogsModal"; +import { copyToClipboard } from "core/utils/clipboard"; +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; +import { ModalOptions, ModalResult, useModalService } from "hooks/services/Modal"; +import { Notification, useNotificationService } from "hooks/services/Notification"; + +import styles from "./JobEventMenu.module.scss"; + +enum JobMenuOptions { + OpenLogsModal = "OpenLogsModal", + CopyLinkToJob = "CopyLinkToJob", + DownloadLogs = "DownloadLogs", +} + +export const openJobLogsModalFromTimeline = ( + openModal: (options: ModalOptions) => Promise>, + jobId: number, + formatMessage: (arg0: { id: string }, arg1?: { connectionName: string } | undefined) => string, + connectionName: string, + initialAttemptId?: number +) => { + openModal({ + size: "full", + title: formatMessage({ id: "jobHistory.logs.title" }, { connectionName }), + content: () => ( + + +
+ } + > + + + ), + }); +}; + +const handleClick = ( + optionClicked: DropdownMenuOptionType, + connectionName: string, + formatMessage: (arg0: { id: string }, arg1?: { connectionName: string } | undefined) => string, + eventId: string, + jobId: number, + openModal: (options: ModalOptions) => Promise>, + registerNotification: (notification: Notification) => void +) => { + switch (optionClicked.value) { + case JobMenuOptions.OpenLogsModal: + openJobLogsModalFromTimeline(openModal, jobId, formatMessage, connectionName); + break; + + case JobMenuOptions.CopyLinkToJob: + const url = new URL(window.location.href); + url.searchParams.set("eventId", eventId); + url.searchParams.set("openLogs", "true"); + + copyToClipboard(url.href); + registerNotification({ + type: "success", + text: formatMessage({ id: "jobHistory.copyLinkToJob.success" }), + id: "jobHistory.copyLinkToJob.success", + }); + break; + } +}; + +export const JobEventMenu: React.FC<{ eventId: string; jobId: number }> = ({ eventId, jobId }) => { + const { formatMessage } = useIntl(); + const { connection } = useConnectionFormService(); + const { openModal } = useModalService(); + const { registerNotification } = useNotificationService(); + + if (!jobId) { + return null; + } + + const onChangeHandler = (optionClicked: DropdownMenuOptionType) => { + handleClick(optionClicked, connection.name ?? "", formatMessage, eventId, jobId, openModal, registerNotification); + }; + + return ( + + {() => - ), - cell: (props) => ( - - ), - }), - columnHelper.accessor("dataFreshAsOf", { - header: () => null, - id: "actions", - cell: (props) => ( - - ), - meta: { - thClassName: styles.actionsHeader, - }, - }), - ], - [columnHelper, setShowRelativeTime, showRelativeTime] - ); - - const { status, nextSync, recordsExtracted, recordsLoaded } = useConnectionStatus(connection.connectionId); - - return ( - - - - - - - - - - - - - - -
- - classNames(styles.row, { - [styles["syncing--next"]]: - activeStatuses.includes(data.status) && data.status !== ConnectionStatusIndicatorStatus.Queued, - }) - } - sorting={false} - /> - - - - ); -}; diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss index bc346a65312..b96aa13fcb1 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss @@ -1,13 +1,3 @@ -.clickableHeader { - background-color: inherit; - border: none; - color: inherit; - cursor: pointer; - font-size: inherit; - font-weight: inherit; - text-transform: inherit; - white-space: nowrap; - display: flex; - align-items: center; - padding: 0; +.container { + height: 100%; } diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx index 526843b903a..6cf35d33a94 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.tsx @@ -6,18 +6,17 @@ import { useExperiment } from "hooks/services/Experiment"; import { ConnectionStatusCard } from "./ConnectionStatusCard"; import { ConnectionStatusMessages } from "./ConnectionStatusMessages"; import { ConnectionSyncStatusCard } from "./ConnectionSyncStatusCard"; -import { NextStreamsList } from "./NextStreamsList"; import { StreamsList } from "./StreamsList"; import { StreamsListContextProvider } from "./StreamsListContext"; +import styles from "./StreamStatusPage.module.scss"; export const StreamStatusPage = () => { const isSimplifiedCreation = useExperiment("connection.simplifiedCreation", true); - const showSyncProgress = useExperiment("connection.syncProgress", false); return ( - + {isSimplifiedCreation ? ( <> @@ -26,7 +25,7 @@ export const StreamStatusPage = () => { ) : ( )} - {showSyncProgress ? : } + diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss index 0fcf987ab74..5105be4eb11 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss @@ -3,10 +3,19 @@ @use "scss/variables"; @use "scss/z-indices"; +.card { + height: 100%; + overflow: hidden; +} + .cardHeader { border-bottom: variables.$border-thin solid colors.$grey-100; } +.cardBody { + height: calc(100% - 75px); // cardHeader height +} + .clickableHeader { background-color: inherit; border: none; @@ -30,6 +39,8 @@ } .tableContainer { + height: 100%; + .syncing { background: none; @include mixins.striped-background(colors.$dark-blue-30, 30px); diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx index 1494bb3675f..f573df93a91 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx @@ -1,10 +1,10 @@ import { createColumnHelper } from "@tanstack/react-table"; import classNames from "classnames"; -import dayjs from "dayjs"; -import React, { useMemo } from "react"; +import { useMemo, useRef } from "react"; import { FormattedMessage } from "react-intl"; import { useToggle } from "react-use"; +import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; import { ConnectionStatusIndicatorStatus } from "components/connection/ConnectionStatusIndicator"; import { StreamStatusIndicator } from "components/connection/StreamStatusIndicator"; import { Box } from "components/ui/Box"; @@ -13,102 +13,81 @@ import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { Icon } from "components/ui/Icon"; import { Table } from "components/ui/Table"; -import { Text } from "components/ui/Text"; -import { ConnectionStatus } from "core/api/types/AirbyteClient"; +import { activeStatuses } from "area/connection/utils"; +import { useTrackSyncProgress } from "area/connection/utils/useStreamsTableAnalytics"; +import { useUiStreamStates } from "area/connection/utils/useUiStreamsStates"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; -import { useExperiment } from "hooks/services/Experiment"; +import { DataFreshnessCell } from "./DataFreshnessCell"; +import { LatestSyncCell } from "./LatestSyncCell"; import { StreamActionsMenu } from "./StreamActionsMenu"; import { StreamSearchFiltering } from "./StreamSearchFiltering"; import styles from "./StreamsList.module.scss"; -import { useStreamsListContext } from "./StreamsListContext"; - -const LastSync: React.FC<{ transitionedAt: number | undefined; showRelativeTime: boolean }> = ({ - transitionedAt, - showRelativeTime, -}) => { - const lastSyncDisplayText = useMemo(() => { - if (transitionedAt) { - const lastSync = dayjs(transitionedAt); - - if (showRelativeTime) { - return lastSync.fromNow(); - } - return lastSync.format("MM.DD.YY HH:mm:ss"); - } - return null; - }, [transitionedAt, showRelativeTime]); - - if (lastSyncDisplayText) { - return ( - - {lastSyncDisplayText} - - ); - } - return null; -}; +import { StreamsListSubtitle } from "./StreamsListSubtitle"; export const StreamsList = () => { - const useSimplifiedCreation = useExperiment("connection.simplifiedCreation", true); - const [showRelativeTime, setShowRelativeTime] = useToggle(true); const { connection } = useConnectionEditService(); - const { filteredStreamsByStatus } = useStreamsListContext(); - - const streamEntries = useMemo( - () => - filteredStreamsByStatus.map((stream) => { - return { - name: stream.streamName, - state: { - ...stream, - lastSuccessfulSyncAt: stream.lastSuccessfulSyncAt, - }, - }; - }), - [filteredStreamsByStatus] - ); + const streamEntries = useUiStreamStates(connection.connectionId); + const trackCountRef = useRef(0); + useTrackSyncProgress(connection.connectionId, trackCountRef); const columnHelper = useMemo(() => createColumnHelper<(typeof streamEntries)[number]>(), []); + const columns = useMemo( () => [ - columnHelper.accessor("state", { + columnHelper.accessor("status", { id: "statusIcon", header: () => , cell: (props) => ( - - + + ), meta: { thClassName: styles.statusHeader }, }), - columnHelper.accessor("name", { + columnHelper.accessor("streamName", { header: () => , cell: (props) => <>{props.cell.getValue()}, + meta: { responsive: true }, }), - - columnHelper.accessor("state", { - id: "lastSync", + columnHelper.accessor("recordsLoaded", { + id: "latestSync", + header: () => , + cell: (props) => { + return ( + + ); + }, + meta: { responsive: true }, + }), + columnHelper.accessor("dataFreshAsOf", { header: () => ( ), cell: (props) => ( - + ), + meta: { responsive: true }, }), - columnHelper.accessor("state", { + columnHelper.accessor("dataFreshAsOf", { header: () => null, id: "actions", cell: (props) => ( ), meta: { @@ -119,49 +98,42 @@ export const StreamsList = () => { [columnHelper, setShowRelativeTime, showRelativeTime] ); - const showTable = connection.status !== ConnectionStatus.inactive; + const { status, nextSync, recordsExtracted, recordsLoaded } = useConnectionStatus(connection.connectionId); return ( - + - {useSimplifiedCreation ? ( - - - - - - ) : ( + - )} + + + - +
- {showTable && ( -
- classNames(styles.row, { - [styles.syncing]: data.state?.status === ConnectionStatusIndicatorStatus.Syncing, - }) - } - sorting={false} - /> - )} - - {!showTable && ( - - - - - - )} +
+ classNames(styles.row, { + [styles["syncing--next"]]: + activeStatuses.includes(data.status) && data.status !== ConnectionStatusIndicatorStatus.Queued, + }) + } + sorting={false} + virtualized + /> diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListContext.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListContext.tsx index e00969fa46b..8e7dc4d3d39 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListContext.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListContext.tsx @@ -21,7 +21,6 @@ const useStreamsContextInit = (connectionId: string) => { .filter(([status]) => status !== ConnectionStatusIndicatorStatus.Paused) .flatMap(([_, stream]) => stream); - /** deprecated... will remove with sync progress project */ const filteredStreamsByStatus = useMemo( () => enabledStreamsByStatus.filter((stream) => stream.streamName.includes(searchTerm)), [searchTerm, enabledStreamsByStatus] diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListSubtitle.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListSubtitle.tsx index e25ea45ad60..fcd3cbd6e7e 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListSubtitle.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsListSubtitle.tsx @@ -4,8 +4,6 @@ import { FormattedMessage } from "react-intl"; import { ConnectionStatusIndicatorStatus } from "components/connection/ConnectionStatusIndicator"; import { Text } from "components/ui/Text"; -import { useExperiment } from "hooks/services/Experiment"; - interface StreamsListSubtitleProps { connectionStatus: ConnectionStatusIndicatorStatus; nextSync?: number; @@ -19,9 +17,8 @@ export const StreamsListSubtitle: React.FC = ({ recordsExtracted, recordsLoaded, }) => { - const showSyncProgress = useExperiment("connection.syncProgress", false); return ( - + {connectionStatus === ConnectionStatusIndicatorStatus.OnTime && nextSync && ( )} @@ -30,7 +27,7 @@ export const StreamsListSubtitle: React.FC = ({ nextSync && ( )} - {((showSyncProgress && connectionStatus === ConnectionStatusIndicatorStatus.Syncing) || + {(connectionStatus === ConnectionStatusIndicatorStatus.Syncing || connectionStatus === ConnectionStatusIndicatorStatus.Queued) && (recordsLoaded ? ( diff --git a/airbyte-webapp/src/pages/destination/SelectDestinationPage/SelectDestinationPage.tsx b/airbyte-webapp/src/pages/destination/SelectDestinationPage/SelectDestinationPage.tsx index 978be7c7afa..244fffede28 100644 --- a/airbyte-webapp/src/pages/destination/SelectDestinationPage/SelectDestinationPage.tsx +++ b/airbyte-webapp/src/pages/destination/SelectDestinationPage/SelectDestinationPage.tsx @@ -18,7 +18,7 @@ export const SelectDestinationPage: React.FC = () => { return ( <> - + diff --git a/airbyte-webapp/src/pages/login/LoginPage.module.scss b/airbyte-webapp/src/pages/login/LoginPage.module.scss new file mode 100644 index 00000000000..909e7aa0469 --- /dev/null +++ b/airbyte-webapp/src/pages/login/LoginPage.module.scss @@ -0,0 +1,19 @@ +@use "scss/variables"; + +.loginPage { + display: flex; + justify-content: center; + align-items: center; + height: 100vh; + + &__logo { + display: block; + width: 200px; + } + + &__form { + width: 350px; + max-width: 100%; + padding-bottom: variables.$spacing-2xl; + } +} diff --git a/airbyte-webapp/src/pages/login/LoginPage.tsx b/airbyte-webapp/src/pages/login/LoginPage.tsx new file mode 100644 index 00000000000..0b0e02abcd3 --- /dev/null +++ b/airbyte-webapp/src/pages/login/LoginPage.tsx @@ -0,0 +1,20 @@ +import { SimpleAuthLoginForm } from "components/login/SimpleAuthLoginForm"; +import { FlexContainer } from "components/ui/Flex"; +import AirbyteLogo from "images/airbyteLogo.svg?react"; + +import styles from "./LoginPage.module.scss"; + +export const LoginPage = () => { + return ( +
+
+ + + + + + +
+
+ ); +}; diff --git a/airbyte-webapp/src/pages/routePaths.tsx b/airbyte-webapp/src/pages/routePaths.tsx index f58bd276594..d617c358fef 100644 --- a/airbyte-webapp/src/pages/routePaths.tsx +++ b/airbyte-webapp/src/pages/routePaths.tsx @@ -1,5 +1,6 @@ export enum RoutePaths { Root = "/", + Login = "login", SpeakeasyRedirect = "speakeasy-redirect", Workspaces = "workspaces", Setup = "setup", @@ -33,6 +34,7 @@ export const enum ConnectionRoutePaths { ConnectionNew = "new-connection", Configure = "configure", ConfigureContinued = "continued", + Timeline = "timeline", } export enum SettingsRoutePaths { diff --git a/airbyte-webapp/src/pages/routes.tsx b/airbyte-webapp/src/pages/routes.tsx index 7370aba8d2b..9649e329c2c 100644 --- a/airbyte-webapp/src/pages/routes.tsx +++ b/airbyte-webapp/src/pages/routes.tsx @@ -1,5 +1,5 @@ import React, { useMemo } from "react"; -import { Navigate, Route, Routes, useLocation, useSearchParams } from "react-router-dom"; +import { createSearchParams, Navigate, Route, Routes, useLocation, useSearchParams } from "react-router-dom"; import { useEffectOnce } from "react-use"; import { @@ -16,7 +16,9 @@ import { storeUtmFromQuery } from "core/utils/utmStorage"; import { useApiHealthPoll } from "hooks/services/Health"; import { useBuildUpdateCheck } from "hooks/services/useBuildUpdateCheck"; import { useCurrentWorkspace } from "hooks/services/useWorkspace"; +import { useQuery } from "hooks/useQuery"; import { ApplicationSettingsView } from "packages/cloud/views/users/ApplicationSettingsView/ApplicationSettingsView"; +import { LoginPage } from "pages/login/LoginPage"; import MainView from "views/layout/MainView"; import { RoutePaths, DestinationPaths, SourcePaths, SettingsRoutePaths } from "./routePaths"; @@ -167,25 +169,50 @@ const RoutingWithWorkspace: React.FC<{ element?: JSX.Element }> = ({ element }) }; export const Routing: React.FC = () => { - const { inited, user } = useAuthService(); - + const { pathname: originalPathname, search, hash } = useLocation(); + const { inited, loggedOut } = useAuthService(); useBuildUpdateCheck(); - const { search } = useLocation(); useEffectOnce(() => { storeUtmFromQuery(search); }); + if (!inited) { + return null; + } + + if (loggedOut) { + const loginRedirectSearchParam = `${createSearchParams({ + loginRedirect: `${originalPathname}${search}${hash}`, + })}`; + const loginRedirectTo = + loggedOut && originalPathname === "/" + ? { pathname: RoutePaths.Login } + : { pathname: RoutePaths.Login, search: loginRedirectSearchParam }; + + return ( + + } /> + } /> + + ); + } + + return ; +}; + +const AuthenticatedRoutes = () => { + const { loginRedirect } = useQuery<{ loginRedirect: string }>(); const multiWorkspaceUI = useFeature(FeatureItem.MultiWorkspaceUI); const { initialSetupComplete } = useGetInstanceConfiguration(); - if (!inited) { - return null; + if (loginRedirect) { + return ; } return ( - {user && !initialSetupComplete ? ( + {!initialSetupComplete ? ( } /> ) : ( <> diff --git a/airbyte-webapp/src/scss/_variables.scss b/airbyte-webapp/src/scss/_variables.scss index e23bfab098c..cf8433b7130 100644 --- a/airbyte-webapp/src/scss/_variables.scss +++ b/airbyte-webapp/src/scss/_variables.scss @@ -32,7 +32,6 @@ $spacing-md: 10px; $spacing-lg: 15px; $spacing-xl: 20px; $spacing-2xl: 40px; -$spacing-page-bottom-cloud: 88px; $width-side-menu: 200px; $width-wide-menu: 200px; diff --git a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx index a18faf512c4..957e04329be 100644 --- a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx +++ b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx @@ -315,7 +315,7 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< setValue("mode", "yaml"); } else { const confirmDiscard = (errorMessage: string) => { - if (isEqual(formValues, DEFAULT_BUILDER_FORM_VALUES)) { + if (isEqual(formValues, DEFAULT_BUILDER_FORM_VALUES) && jsonManifest.streams.length > 0) { openNoUiValueModal(errorMessage); } else { openConfirmationModal({ @@ -334,7 +334,7 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< } }; try { - if (jsonManifest === DEFAULT_JSON_MANIFEST_VALUES) { + if (isEqual(jsonManifest, removeEmptyProperties(DEFAULT_JSON_MANIFEST_VALUES))) { setValue("mode", "ui"); return; } diff --git a/airbyte-webapp/src/test-utils/mock-data/mockConnection.ts b/airbyte-webapp/src/test-utils/mock-data/mockConnection.ts index 681476f2af0..7bb8c7cd81d 100644 --- a/airbyte-webapp/src/test-utils/mock-data/mockConnection.ts +++ b/airbyte-webapp/src/test-utils/mock-data/mockConnection.ts @@ -2,6 +2,9 @@ import { ConnectorIds } from "area/connector/utils"; import { WebBackendConnectionRead } from "core/api/types/AirbyteClient"; +import { mockDestinationDefinitionVersion } from "./mockDestination"; +import { mockSourceDefinitionVersion } from "./mockSource"; + export const mockConnection: WebBackendConnectionRead = { connectionId: "a9c8e4b5-349d-4a17-bdff-5ad2f6fbd611", name: "Scrafty <> Heroku Postgres", @@ -908,19 +911,7 @@ export const mockConnection: WebBackendConnectionRead = { name: "Heroku Postgres", destinationName: "Postgres", }, - operations: [ - { - workspaceId: "47c74b9b-9b89-4af1-8331-4865af6c4e4d", - operationId: "8af8ef4d-01b1-49c8-b145-23775f34a74b", - name: "Normalization", - operatorConfiguration: { - operatorType: "normalization", - normalization: { - option: "basic", - }, - }, - }, - ], + operations: [], latestSyncJobCreatedAt: 1660227512, latestSyncJobStatus: "succeeded", isSyncing: false, @@ -929,4 +920,6 @@ export const mockConnection: WebBackendConnectionRead = { notifySchemaChanges: true, notifySchemaChangesByEmail: false, nonBreakingChangesPreference: "ignore", + sourceActorDefinitionVersion: mockSourceDefinitionVersion, + destinationActorDefinitionVersion: mockDestinationDefinitionVersion, }; diff --git a/airbyte-webapp/src/test-utils/mock-data/mockDestination.ts b/airbyte-webapp/src/test-utils/mock-data/mockDestination.ts index b1c98faae46..7961981646a 100644 --- a/airbyte-webapp/src/test-utils/mock-data/mockDestination.ts +++ b/airbyte-webapp/src/test-utils/mock-data/mockDestination.ts @@ -15,26 +15,12 @@ export const mockDestinationDefinition: DestinationDefinitionRead = { icon: '', supportLevel: "certified", custom: false, - supportsDbt: true, - normalizationConfig: { - supported: true, - normalizationRepository: "airbyte/normalization", - normalizationTag: "0.2.25", - normalizationIntegrationType: "postgres", - }, }; export const mockDestinationDefinitionVersion: ActorDefinitionVersionRead = { dockerRepository: "airbyte/destination-postgres", dockerImageTag: "0.3.26", - supportsDbt: true, supportsRefreshes: false, - normalizationConfig: { - supported: true, - normalizationRepository: "airbyte/normalization", - normalizationTag: "0.2.25", - normalizationIntegrationType: "postgres", - }, isVersionOverrideApplied: false, supportState: SupportState.supported, supportLevel: "certified", diff --git a/airbyte-webapp/src/test-utils/mock-data/mockSource.ts b/airbyte-webapp/src/test-utils/mock-data/mockSource.ts index e44f55683fc..aed8ca7ee52 100644 --- a/airbyte-webapp/src/test-utils/mock-data/mockSource.ts +++ b/airbyte-webapp/src/test-utils/mock-data/mockSource.ts @@ -23,11 +23,7 @@ export const mockSourceDefinition: SourceDefinitionRead = { export const mockSourceDefinitionVersion: ActorDefinitionVersionRead = { dockerRepository: "airbyte/source-postgres", dockerImageTag: "1.0.39", - supportsDbt: false, supportsRefreshes: false, - normalizationConfig: { - supported: false, - }, isVersionOverrideApplied: false, supportState: SupportState.supported, supportLevel: "certified", diff --git a/airbyte-webapp/src/test-utils/testutils.tsx b/airbyte-webapp/src/test-utils/testutils.tsx index d6b193ee93a..dc26d06fabf 100644 --- a/airbyte-webapp/src/test-utils/testutils.tsx +++ b/airbyte-webapp/src/test-utils/testutils.tsx @@ -3,13 +3,7 @@ import { act, Queries, queries, render as rtlRender, RenderOptions, RenderResult import React, { Suspense } from "react"; import { MemoryRouter } from "react-router-dom"; -import { - ConnectionStatus, - DestinationRead, - NamespaceDefinitionType, - SourceRead, - WebBackendConnectionRead, -} from "core/api/types/AirbyteClient"; +import { DestinationRead, SourceRead } from "core/api/types/AirbyteClient"; import { defaultOssFeatures, FeatureItem, FeatureService } from "core/services/features"; import { I18nProvider } from "core/services/i18n"; import { ConfirmationModalService } from "hooks/services/ConfirmationModal"; @@ -103,28 +97,3 @@ export const mockDestination: DestinationRead = { destinationDefinitionId: "test-destination-definition-id", connectionConfiguration: undefined, }; - -export const mockConnection: WebBackendConnectionRead = { - connectionId: "test-connection", - name: "test connection", - prefix: "test", - sourceId: "test-source", - destinationId: "test-destination", - status: ConnectionStatus.active, - schedule: undefined, - syncCatalog: { - streams: [], - }, - namespaceDefinition: NamespaceDefinitionType.source, - namespaceFormat: "", - operationIds: [], - source: mockSource, - destination: mockDestination, - operations: [], - catalogId: "", - isSyncing: false, - schemaChange: "no_change", - notifySchemaChanges: true, - notifySchemaChangesByEmail: false, - nonBreakingChangesPreference: "ignore", -}; diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.module.scss b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.module.scss index 2e622630abc..1f6bdfe4d8a 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.module.scss +++ b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.module.scss @@ -2,12 +2,6 @@ @use "scss/mixins"; @use "scss/colors"; -.leftPanel { - > *:last-child { - padding-bottom: variables.$spacing-page-bottom-cloud; - } -} - .rightPanel { @include mixins.left-shadow; diff --git a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.tsx b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.tsx index d7b921768f2..bba9293157e 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorDocumentationLayout/ConnectorDocumentationLayout.tsx @@ -2,7 +2,7 @@ import React, { lazy, Suspense } from "react"; import { useIntl } from "react-intl"; import { useWindowSize } from "react-use"; -import { LoadingPage } from "components/LoadingPage"; +import { LoadingPage } from "components"; import { ResizablePanels } from "components/ui/ResizablePanels"; import { EXCLUDED_DOC_URLS } from "core/api"; @@ -10,9 +10,7 @@ import { EXCLUDED_DOC_URLS } from "core/api"; import styles from "./ConnectorDocumentationLayout.module.scss"; import { useDocumentationPanelContext } from "./DocumentationPanelContext"; -const LazyDocumentationPanel = lazy(() => - import("./DocumentationPanel").then(({ DocumentationPanel }) => ({ default: DocumentationPanel })) -); +const LazyDocumentationPanel = lazy(() => import("./DocumentationPanel")); export const ConnectorDocumentationLayout: React.FC> = ({ children }) => { const { formatMessage } = useIntl(); @@ -36,7 +34,6 @@ export const ConnectorDocumentationLayout: React.FC([\s\S]*?)/gm; const CLOUD_ENV_MARKERS = /([\s\S]*?)/gm; +const removeFirstHeading: Pluggable = () => { + // Remove the first heading from the markdown content, as it is already displayed in the header + return (tree: MdastRoot) => { + let headingRemoved = false; + tree.children = tree.children.filter((node: MdastNode) => { + if (node.type === "heading" && !headingRemoved) { + headingRemoved = true; + return false; + } + return true; + }); + }; +}; + +const remarkPlugins = [removeFirstHeading]; + export const prepareMarkdown = (markdown: string, env: "oss" | "cloud"): string => { // Remove any empty lines between tags and their content, as this causes // the content to be rendered as a raw string unless it contains a list, for reasons @@ -112,6 +134,20 @@ const FieldAnchor: React.FC> = ({ fie ); }; +const ConnectorDocumentationHeader: React.FC<{ selectedConnectorDefinition: ConnectorDefinition }> = ({ + selectedConnectorDefinition, +}) => { + const { name } = selectedConnectorDefinition; + return ( + +
+ {name} +
+ +
+ ); +}; + export const DocumentationPanel: React.FC = () => { const { formatMessage } = useIntl(); const { setDocumentationPanelOpen, selectedConnectorDefinition } = useDocumentationPanelContext(); @@ -180,7 +216,15 @@ export const DocumentationPanel: React.FC = () => { - + + ); }; + +export default DocumentationPanel; diff --git a/airbyte-webapp/src/views/Connector/ConnectorForm/components/WarningMessage.tsx b/airbyte-webapp/src/views/Connector/ConnectorForm/components/WarningMessage.tsx index e71c56b6a03..caa3707ba89 100644 --- a/airbyte-webapp/src/views/Connector/ConnectorForm/components/WarningMessage.tsx +++ b/airbyte-webapp/src/views/Connector/ConnectorForm/components/WarningMessage.tsx @@ -18,7 +18,7 @@ export const WarningMessage: React.FC<{ supportLevel?: SupportLevel }> = ({ supp - + {" "} { const { formatMessage } = useIntl(); diff --git a/airbyte-worker-models/build.gradle.kts b/airbyte-worker-models/build.gradle.kts index 1fa38b1dca9..eb42cf5aa39 100644 --- a/airbyte-worker-models/build.gradle.kts +++ b/airbyte-worker-models/build.gradle.kts @@ -15,7 +15,6 @@ dependencies { implementation(project(":airbyte-commons")) implementation(project(":airbyte-config:config-models")) implementation(libs.airbyte.protocol) - implementation(project(":airbyte-api")) } jsonSchema2Pojo { diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java index 4171ec91bc1..569639d9772 100644 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java +++ b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java @@ -4,7 +4,7 @@ package io.airbyte.workers.models; -import io.airbyte.api.client.model.generated.CatalogDiff; +import io.airbyte.config.CatalogDiff; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java index 662b33a558c..d4791f76f47 100644 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java +++ b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java @@ -50,8 +50,6 @@ public class ReplicationActivityInput { private UUID workspaceId; // The id of the connection associated with this sync. private UUID connectionId; - // Whether normalization should be run in the destination container. - private Boolean normalizeInDestinationContainer; // The task queue that replication will use. private String taskQueue; // Whether this 'sync' is performing a logical reset. diff --git a/airbyte-worker-models/src/main/resources/workers_models/IntegrationLauncherConfig.yaml b/airbyte-worker-models/src/main/resources/workers_models/IntegrationLauncherConfig.yaml index 8e291392f33..0005eaaadcf 100644 --- a/airbyte-worker-models/src/main/resources/workers_models/IntegrationLauncherConfig.yaml +++ b/airbyte-worker-models/src/main/resources/workers_models/IntegrationLauncherConfig.yaml @@ -22,13 +22,6 @@ properties: format: uuid dockerImage: type: string - normalizationDockerImage: - type: string - supportsDbt: - type: boolean - default: false - normalizationIntegrationType: - type: string protocolVersion: type: object existingJavaType: io.airbyte.commons.version.Version diff --git a/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml b/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml index 658271d0392..b8bb5b52976 100644 --- a/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml +++ b/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml @@ -65,10 +65,6 @@ properties: description: The id of the connection associated with this sync type: string format: uuid - normalizeInDestinationContainer: - description: whether normalization should be run in the destination container - type: boolean - default: false isReset: description: whether this 'sync' is performing a logical reset type: boolean diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/config/ActivityBeanFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/config/ActivityBeanFactory.java index af09098745c..655e7964c25 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/config/ActivityBeanFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/config/ActivityBeanFactory.java @@ -24,11 +24,9 @@ import io.airbyte.workers.temporal.scheduling.activities.StreamResetActivity; import io.airbyte.workers.temporal.scheduling.activities.WorkflowConfigActivity; import io.airbyte.workers.temporal.spec.SpecActivity; -import io.airbyte.workers.temporal.sync.DbtTransformationActivity; -import io.airbyte.workers.temporal.sync.NormalizationActivity; -import io.airbyte.workers.temporal.sync.NormalizationSummaryCheckActivity; import io.airbyte.workers.temporal.sync.RefreshSchemaActivity; import io.airbyte.workers.temporal.sync.ReplicationActivity; +import io.airbyte.workers.temporal.sync.ReportRunTimeActivity; import io.airbyte.workers.temporal.sync.WebhookOperationActivity; import io.airbyte.workers.temporal.sync.WorkloadFeatureFlagActivity; import io.micronaut.context.annotation.Factory; @@ -113,15 +111,13 @@ public List specActivities( @Named("syncActivities") public List syncActivities( final ReplicationActivity replicationActivity, - final NormalizationActivity normalizationActivity, - final DbtTransformationActivity dbtTransformationActivity, - final NormalizationSummaryCheckActivity normalizationSummaryCheckActivity, final WebhookOperationActivity webhookOperationActivity, final ConfigFetchActivity configFetchActivity, final RefreshSchemaActivity refreshSchemaActivity, - final WorkloadFeatureFlagActivity workloadFeatureFlagActivity) { - return List.of(replicationActivity, normalizationActivity, dbtTransformationActivity, normalizationSummaryCheckActivity, - webhookOperationActivity, configFetchActivity, refreshSchemaActivity, workloadFeatureFlagActivity); + final WorkloadFeatureFlagActivity workloadFeatureFlagActivity, + final ReportRunTimeActivity reportRunTimeActivity) { + return List.of(replicationActivity, webhookOperationActivity, configFetchActivity, + refreshSchemaActivity, workloadFeatureFlagActivity, reportRunTimeActivity); } @Singleton diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java index 042ed54fce9..52608acd822 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java @@ -38,9 +38,12 @@ import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.helpers.ResourceRequirementsUtils; import io.airbyte.config.secrets.SecretsRepositoryReader; +import io.airbyte.featureflag.Empty; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseWorkloadApi; +import io.airbyte.featureflag.WorkloadApiServerEnabled; import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds; +import io.airbyte.featureflag.WorkloadLauncherEnabled; import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.metrics.lib.MetricAttribute; @@ -282,7 +285,11 @@ public ConnectorJobOutput runWithWorkload(final CheckConnectionInput input) thro @Override public boolean shouldUseWorkload(final UUID workspaceId) { - return featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); + + return ffCheck || envCheck; } @VisibleForTesting diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivity.java index 0ec1645f2b6..e44c625c644 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivity.java @@ -10,6 +10,8 @@ import io.airbyte.persistence.job.models.JobRunConfig; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.models.DiscoverCatalogInput; +import io.airbyte.workers.models.PostprocessCatalogInput; +import io.airbyte.workers.models.PostprocessCatalogOutput; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; import java.util.UUID; @@ -37,4 +39,11 @@ ConnectorJobOutput run(JobRunConfig jobRunConfig, @ActivityMethod void reportFailure(final Boolean workloadEnabled); + /** + * Perform catalog diffing, subsequent disabling of the connection and any other necessary + * operations after performing the discover. + */ + @ActivityMethod + PostprocessCatalogOutput postprocess(final PostprocessCatalogInput input); + } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java index 1ed30c124b8..39f0c5c88cf 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java @@ -14,11 +14,14 @@ import com.google.common.annotations.VisibleForTesting; import datadog.trace.api.Trace; import io.airbyte.api.client.AirbyteApiClient; +import io.airbyte.api.client.model.generated.ConnectionAutoPropagateSchemaChange; import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.client.model.generated.DiffCatalogRequestBody; import io.airbyte.api.client.model.generated.Geography; import io.airbyte.api.client.model.generated.ScopeType; import io.airbyte.api.client.model.generated.SecretPersistenceConfig; import io.airbyte.api.client.model.generated.SecretPersistenceConfigGetRequestBody; +import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; import io.airbyte.api.client.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.converters.ConnectorConfigUpdater; import io.airbyte.commons.features.FeatureFlags; @@ -41,11 +44,14 @@ import io.airbyte.config.helpers.ResourceRequirementsUtils; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; +import io.airbyte.featureflag.Empty; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; import io.airbyte.featureflag.UseWorkloadApi; +import io.airbyte.featureflag.WorkloadApiServerEnabled; import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds; +import io.airbyte.featureflag.WorkloadLauncherEnabled; import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.metrics.lib.MetricAttribute; @@ -57,11 +63,14 @@ import io.airbyte.workers.Worker; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.general.DefaultDiscoverCatalogWorker; +import io.airbyte.workers.helper.CatalogDiffConverter; import io.airbyte.workers.helper.GsonPksExtractor; import io.airbyte.workers.helper.SecretPersistenceConfigHelper; import io.airbyte.workers.internal.AirbyteStreamFactory; import io.airbyte.workers.internal.VersionedAirbyteStreamFactory; import io.airbyte.workers.models.DiscoverCatalogInput; +import io.airbyte.workers.models.PostprocessCatalogInput; +import io.airbyte.workers.models.PostprocessCatalogOutput; import io.airbyte.workers.process.AirbyteIntegrationLauncher; import io.airbyte.workers.process.IntegrationLauncher; import io.airbyte.workers.process.Metadata; @@ -83,6 +92,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -244,7 +254,11 @@ public ConnectorJobOutput runWithWorkload(final DiscoverCatalogInput input) thro @Override public boolean shouldUseWorkload(final UUID workspaceId) { - return featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); + + return ffCheck || envCheck; } @Override @@ -263,6 +277,35 @@ public void reportFailure(final Boolean workloadEnabled) { new MetricAttribute("workload_enabled", workloadEnabledStr)); } + @Override + public PostprocessCatalogOutput postprocess(final PostprocessCatalogInput input) { + try { + Objects.requireNonNull(input.getConnectionId()); + Objects.requireNonNull(input.getCatalogId()); + Objects.requireNonNull(input.getWorkspaceId()); + + final var reqBody = new DiffCatalogRequestBody( + input.getCatalogId(), + input.getConnectionId()); + + final SourceDiscoverSchemaRead resp = airbyteApiClient.getConnectionApi().diffCatalogForConnection(reqBody); + Objects.requireNonNull(resp.getCatalog()); + + final var request = new ConnectionAutoPropagateSchemaChange( + resp.getCatalog(), + input.getCatalogId(), + input.getConnectionId(), + input.getWorkspaceId()); + + final var propagatedDiff = airbyteApiClient.getConnectionApi().applySchemaChangeForConnection(request).getPropagatedDiff(); + final var domainDiff = propagatedDiff != null ? CatalogDiffConverter.toDomain(propagatedDiff) : null; + + return PostprocessCatalogOutput.Companion.success(domainDiff); + } catch (final Exception e) { + return PostprocessCatalogOutput.Companion.failure(e); + } + } + @VisibleForTesting Geography getGeography(final Optional maybeConnectionId, final Optional maybeWorkspaceId) throws WorkerException { try { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index 260ff71b943..b6f823ad816 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -26,7 +26,6 @@ import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardCheckConnectionInput; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.StandardSyncOutput; @@ -338,9 +337,7 @@ private CancellationScope generateSyncWorkflowRunnable(final ConnectionUpdaterIn log.debug("Ignoring canceled failure as it is handled by the cancellation scope."); // do nothing, cancellation handled by cancellationScope } else if (childWorkflowFailure.getCause()instanceof final ActivityFailure af) { - // Allows us to classify unhandled failures from the sync workflow. e.g. If the normalization - // activity throws an exception, for - // example, this lets us set the failureOrigin to normalization. + // Allows us to classify unhandled failures from the sync workflow. workflowInternalState.getFailures().add(FailureHelper.failureReasonFromWorkflowAndActivity( childWorkflowFailure.getWorkflowType(), af.getActivityType(), @@ -1048,14 +1045,6 @@ private boolean getFailStatus(final StandardSyncOutput standardSyncOutput) { return true; } - // catch normalization failure reasons - final NormalizationSummary normalizationSummary = standardSyncOutput.getNormalizationSummary(); - if (normalizationSummary != null && normalizationSummary.getFailures() != null - && !normalizationSummary.getFailures().isEmpty()) { - workflowInternalState.getFailures().addAll(normalizationSummary.getFailures()); - return true; - } - return false; } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java index c572a723d89..b7130cfb4be 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java @@ -30,9 +30,12 @@ import io.airbyte.config.ConnectorJobOutput.OutputType; import io.airbyte.config.JobGetSpecConfig; import io.airbyte.config.helpers.LogConfigs; +import io.airbyte.featureflag.Empty; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseWorkloadApi; +import io.airbyte.featureflag.WorkloadApiServerEnabled; import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds; +import io.airbyte.featureflag.WorkloadLauncherEnabled; import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.metrics.lib.MetricAttribute; @@ -202,7 +205,11 @@ public ConnectorJobOutput runWithWorkload(SpecInput input) throws WorkerExceptio @Override public boolean shouldUseWorkload(UUID workspaceId) { - return featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(workspaceId)); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); + + return ffCheck || envCheck; } @Override diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivity.java deleted file mode 100644 index 1a278c7e5a3..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivity.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; - -/** - * DbtTransformationActivity. - */ -@ActivityInterface -public interface DbtTransformationActivity { - - @ActivityMethod - Void run(JobRunConfig jobRunConfig, - IntegrationLauncherConfig destinationLauncherConfig, - ResourceRequirements resourceRequirements, - OperatorDbtInput input); - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java deleted file mode 100644 index 1be442c8949..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/DbtTransformationActivityImpl.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import static io.airbyte.metrics.lib.ApmTraceConstants.ACTIVITY_TRACE_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.ATTEMPT_NUMBER_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import datadog.trace.api.Trace; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.model.generated.ScopeType; -import io.airbyte.api.client.model.generated.SecretPersistenceConfig; -import io.airbyte.api.client.model.generated.SecretPersistenceConfigGetRequestBody; -import io.airbyte.commons.functional.CheckedSupplier; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.temporal.HeartbeatUtils; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.commons.workers.config.WorkerConfigsProvider; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.AirbyteConfigValidator; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.OperatorDbtInput; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.secrets.SecretsRepositoryReader; -import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.Organization; -import io.airbyte.featureflag.UseRuntimeSecretPersistence; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.Worker; -import io.airbyte.workers.general.DbtTransformationRunner; -import io.airbyte.workers.general.DbtTransformationWorker; -import io.airbyte.workers.helper.SecretPersistenceConfigHelper; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.DbtLauncherWorker; -import io.airbyte.workers.temporal.TemporalAttemptExecution; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.micronaut.context.annotation.Value; -import io.temporal.activity.Activity; -import io.temporal.activity.ActivityExecutionContext; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; - -/** - * DbtTransformationActivityImpl. - */ -@Singleton -public class DbtTransformationActivityImpl implements DbtTransformationActivity { - - private final Optional containerOrchestratorConfig; - private final WorkerConfigsProvider workerConfigsProvider; - private final ProcessFactory processFactory; - private final SecretsRepositoryReader secretsRepositoryReader; - private final Path workspaceRoot; - private final WorkerEnvironment workerEnvironment; - private final LogConfigs logConfigs; - private final String airbyteVersion; - private final Integer serverPort; - private final AirbyteConfigValidator airbyteConfigValidator; - private final AirbyteApiClient airbyteApiClient; - private final FeatureFlagClient featureFlagClient; - private final MetricClient metricClient; - private final WorkloadIdGenerator workloadIdGenerator; - - public DbtTransformationActivityImpl(@Named("containerOrchestratorConfig") final Optional containerOrchestratorConfig, - final WorkerConfigsProvider workerConfigsProvider, - final ProcessFactory processFactory, - final SecretsRepositoryReader secretsRepositoryReader, - @Named("workspaceRoot") final Path workspaceRoot, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - @Value("${airbyte.version}") final String airbyteVersion, - @Value("${micronaut.server.port}") final Integer serverPort, - final AirbyteConfigValidator airbyteConfigValidator, - final AirbyteApiClient airbyteApiClient, - final FeatureFlagClient featureFlagClient, - final MetricClient metricClient, - final WorkloadIdGenerator workloadIdGenerator) { - this.containerOrchestratorConfig = containerOrchestratorConfig; - this.workerConfigsProvider = workerConfigsProvider; - this.processFactory = processFactory; - this.secretsRepositoryReader = secretsRepositoryReader; - this.workspaceRoot = workspaceRoot; - this.workerEnvironment = workerEnvironment; - this.logConfigs = logConfigs; - this.airbyteVersion = airbyteVersion; - this.serverPort = serverPort; - this.airbyteConfigValidator = airbyteConfigValidator; - this.airbyteApiClient = airbyteApiClient; - this.featureFlagClient = featureFlagClient; - this.metricClient = metricClient; - this.workloadIdGenerator = workloadIdGenerator; - } - - @Trace(operationName = ACTIVITY_TRACE_OPERATION_NAME) - @Override - public Void run(final JobRunConfig jobRunConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final ResourceRequirements resourceRequirements, - final OperatorDbtInput input) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.ACTIVITY_DBT_TRANSFORMATION, 1); - - ApmTraceUtils.addTagsToTrace( - Map.of(ATTEMPT_NUMBER_KEY, jobRunConfig.getAttemptId(), JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - final ActivityExecutionContext context = Activity.getExecutionContext(); - final AtomicReference cancellationCallback = new AtomicReference<>(null); - return HeartbeatUtils.withBackgroundHeartbeat( - cancellationCallback, - () -> { - final JsonNode fullDestinationConfig; - final UUID organizationId = input.getConnectionContext().getOrganizationId(); - if (organizationId != null && featureFlagClient.boolVariation(UseRuntimeSecretPersistence.INSTANCE, new Organization(organizationId))) { - try { - final SecretPersistenceConfig secretPersistenceConfig = airbyteApiClient.getSecretPersistenceConfigApi().getSecretsPersistenceConfig( - new SecretPersistenceConfigGetRequestBody(ScopeType.ORGANIZATION, organizationId)); - final RuntimeSecretPersistence runtimeSecretPersistence = - SecretPersistenceConfigHelper.fromApiSecretPersistenceConfig(secretPersistenceConfig); - fullDestinationConfig = - secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(input.getDestinationConfiguration(), runtimeSecretPersistence); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } else { - fullDestinationConfig = secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(input.getDestinationConfiguration()); - } - final var fullInput = Jsons.clone(input).withDestinationConfiguration(fullDestinationConfig); - - final Supplier inputSupplier = () -> { - airbyteConfigValidator.ensureAsRuntime(ConfigSchema.OPERATOR_DBT_INPUT, Jsons.jsonNode(fullInput)); - return fullInput; - }; - - final CheckedSupplier, Exception> workerFactory; - - if (containerOrchestratorConfig.isPresent()) { - final WorkerConfigs workerConfigs = workerConfigsProvider.getConfig(ResourceType.DEFAULT); - workerFactory = - getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig, - input.getConnectionId(), input.getWorkspaceId()); - } else { - workerFactory = getLegacyWorkerFactory(destinationLauncherConfig, jobRunConfig, resourceRequirements); - } - final var worker = workerFactory.get(); - cancellationCallback.set(worker::cancel); - - final TemporalAttemptExecution temporalAttemptExecution = - new TemporalAttemptExecution<>( - workspaceRoot, workerEnvironment, logConfigs, - jobRunConfig, - worker, - inputSupplier.get(), - airbyteApiClient, - airbyteVersion, - () -> context); - - return temporalAttemptExecution.get(); - }, - context); - } - - private CheckedSupplier, Exception> getLegacyWorkerFactory(final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final ResourceRequirements resourceRequirements) { - return () -> new DbtTransformationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - resourceRequirements, - new DbtTransformationRunner(processFactory, destinationLauncherConfig.getDockerImage()), - () -> {}); - } - - @SuppressWarnings("LineLength") - private CheckedSupplier, Exception> getContainerLauncherWorkerFactory( - final WorkerConfigs workerConfigs, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final UUID connectionId, - final UUID workspaceId) { - - return () -> new DbtLauncherWorker( - connectionId, - workspaceId, - destinationLauncherConfig, - jobRunConfig, - workerConfigs, - containerOrchestratorConfig.get(), - serverPort, - featureFlagClient, - metricClient, - workloadIdGenerator); - } - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivity.java deleted file mode 100644 index 530137a35ba..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivity.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; -import jakarta.annotation.Nullable; -import java.util.UUID; - -/** - * Normalization activity temporal interface. - */ -@ActivityInterface -public interface NormalizationActivity { - - @ActivityMethod - NormalizationSummary normalize(JobRunConfig jobRunConfig, - IntegrationLauncherConfig destinationLauncherConfig, - NormalizationInput input); - - @ActivityMethod - NormalizationInput generateNormalizationInputWithMinimumPayloadWithConnectionId(final JsonNode destinationConfiguration, - @Deprecated @Nullable final ConfiguredAirbyteCatalog airbyteCatalog, - final UUID workspaceId, - final UUID connectionId, - final UUID organizationId); - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java deleted file mode 100644 index f35300e75e8..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationActivityImpl.java +++ /dev/null @@ -1,311 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import static io.airbyte.metrics.lib.ApmTraceConstants.ACTIVITY_TRACE_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.ATTEMPT_NUMBER_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.DESTINATION_DOCKER_IMAGE_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import com.fasterxml.jackson.databind.JsonNode; -import com.google.common.annotations.VisibleForTesting; -import datadog.trace.api.Trace; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ScopeType; -import io.airbyte.api.client.model.generated.SecretPersistenceConfig; -import io.airbyte.api.client.model.generated.SecretPersistenceConfigGetRequestBody; -import io.airbyte.commons.converters.CatalogClientConverters; -import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.functional.CheckedSupplier; -import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.protocol.migrations.v1.CatalogMigrationV1Helper; -import io.airbyte.commons.temporal.HeartbeatUtils; -import io.airbyte.commons.version.Version; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.commons.workers.config.WorkerConfigsProvider; -import io.airbyte.commons.workers.config.WorkerConfigsProvider.ResourceType; -import io.airbyte.config.AirbyteConfigValidator; -import io.airbyte.config.ConfigSchema; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.ConnectionContext; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.ResourceRequirements; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.secrets.SecretsRepositoryReader; -import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.Organization; -import io.airbyte.featureflag.UseRuntimeSecretPersistence; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.Worker; -import io.airbyte.workers.general.DefaultNormalizationWorker; -import io.airbyte.workers.helper.SecretPersistenceConfigHelper; -import io.airbyte.workers.internal.NamespacingMapper; -import io.airbyte.workers.normalization.DefaultNormalizationRunner; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.sync.NormalizationLauncherWorker; -import io.airbyte.workers.temporal.TemporalAttemptExecution; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.micronaut.context.annotation.Value; -import io.temporal.activity.Activity; -import io.temporal.activity.ActivityExecutionContext; -import jakarta.annotation.Nullable; -import jakarta.inject.Named; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.nio.file.Path; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; -import lombok.extern.slf4j.Slf4j; - -/** - * Normalization temporal activity impl. - */ -@Singleton -@Slf4j -public class NormalizationActivityImpl implements NormalizationActivity { - - private final Optional containerOrchestratorConfig; - private final WorkerConfigsProvider workerConfigsProvider; - private final ProcessFactory processFactory; - private final SecretsRepositoryReader secretsRepositoryReader; - private final Path workspaceRoot; - private final WorkerEnvironment workerEnvironment; - private final LogConfigs logConfigs; - private final String airbyteVersion; - private final Integer serverPort; - private final AirbyteConfigValidator airbyteConfigValidator; - private final AirbyteApiClient airbyteApiClient; - private final FeatureFlagClient featureFlagClient; - private final MetricClient metricClient; - private final WorkloadIdGenerator workloadIdGenerator; - - private static final String V1_NORMALIZATION_MINOR_VERSION = "3"; - - public NormalizationActivityImpl(@Named("containerOrchestratorConfig") final Optional containerOrchestratorConfig, - final WorkerConfigsProvider workerConfigsProvider, - final ProcessFactory processFactory, - final SecretsRepositoryReader secretsRepositoryReader, - @Named("workspaceRoot") final Path workspaceRoot, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - @Value("${airbyte.version}") final String airbyteVersion, - @Value("${micronaut.server.port}") final Integer serverPort, - final AirbyteConfigValidator airbyteConfigValidator, - final AirbyteApiClient airbyteApiClient, - final FeatureFlagClient featureFlagClient, - final MetricClient metricClient, - final WorkloadIdGenerator workloadIdGenerator) { - this.containerOrchestratorConfig = containerOrchestratorConfig; - this.workerConfigsProvider = workerConfigsProvider; - this.processFactory = processFactory; - this.secretsRepositoryReader = secretsRepositoryReader; - this.workspaceRoot = workspaceRoot; - this.workerEnvironment = workerEnvironment; - this.logConfigs = logConfigs; - this.airbyteVersion = airbyteVersion; - this.serverPort = serverPort; - this.airbyteConfigValidator = airbyteConfigValidator; - this.airbyteApiClient = airbyteApiClient; - this.featureFlagClient = featureFlagClient; - this.metricClient = metricClient; - this.workloadIdGenerator = workloadIdGenerator; - } - - @Trace(operationName = ACTIVITY_TRACE_OPERATION_NAME) - @Override - public NormalizationSummary normalize(final JobRunConfig jobRunConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final NormalizationInput input) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.ACTIVITY_NORMALIZATION, 1); - - ApmTraceUtils.addTagsToTrace( - Map.of(ATTEMPT_NUMBER_KEY, jobRunConfig.getAttemptId(), JOB_ID_KEY, jobRunConfig.getJobId(), DESTINATION_DOCKER_IMAGE_KEY, - destinationLauncherConfig.getDockerImage())); - final ActivityExecutionContext context = Activity.getExecutionContext(); - final AtomicReference cancellationCallback = new AtomicReference<>(null); - return HeartbeatUtils.withBackgroundHeartbeat( - cancellationCallback, - () -> { - final NormalizationInput fullInput = hydrateNormalizationInput(input); - - // Check the version of normalization - // We require at least version 0.3.0 to support data types v1. Using an older version would lead to - // all columns being typed as JSONB. If normalization is using an older version, fallback to using - // v0 data types. - if (!normalizationSupportsV1DataTypes(destinationLauncherConfig)) { - log.info("Using protocol v0"); - CatalogMigrationV1Helper.downgradeSchemaIfNeeded(fullInput.getCatalog()); - } else { - - // This should only be useful for syncs that started before the release that contained v1 migration. - // However, we lack the effective way to detect those syncs so this code should remain until we - // phase v0 out. - // Performance impact should be low considering the nature of the check compared to the time to run - // normalization. - log.info("Using protocol v1"); - CatalogMigrationV1Helper.upgradeSchemaIfNeeded(fullInput.getCatalog()); - } - - final Supplier inputSupplier = () -> { - airbyteConfigValidator.ensureAsRuntime(ConfigSchema.NORMALIZATION_INPUT, Jsons.jsonNode(fullInput)); - return fullInput; - }; - - final CheckedSupplier, Exception> workerFactory; - - log.info("Using normalization: " + destinationLauncherConfig.getNormalizationDockerImage()); - if (containerOrchestratorConfig.isPresent()) { - final WorkerConfigs workerConfigs = workerConfigsProvider.getConfig(ResourceType.DEFAULT); - workerFactory = getContainerLauncherWorkerFactory(workerConfigs, destinationLauncherConfig, jobRunConfig, - input.getConnectionId(), input.getWorkspaceId()); - } else { - workerFactory = getLegacyWorkerFactory(destinationLauncherConfig, jobRunConfig); - } - final var worker = workerFactory.get(); - cancellationCallback.set(worker::cancel); - - final TemporalAttemptExecution temporalAttemptExecution = new TemporalAttemptExecution<>( - workspaceRoot, workerEnvironment, logConfigs, - jobRunConfig, - worker, - inputSupplier.get(), - airbyteApiClient, - airbyteVersion, - () -> context); - - return temporalAttemptExecution.get(); - }, - context); - } - - private NormalizationInput hydrateNormalizationInput(final NormalizationInput input) throws Exception { - // Hydrate the destination config. - final JsonNode fullDestinationConfig; - final UUID organizationId = input.getConnectionContext().getOrganizationId(); - if (organizationId != null && featureFlagClient.boolVariation(UseRuntimeSecretPersistence.INSTANCE, new Organization(organizationId))) { - try { - final SecretPersistenceConfig secretPersistenceConfig = airbyteApiClient.getSecretPersistenceConfigApi().getSecretsPersistenceConfig( - new SecretPersistenceConfigGetRequestBody(ScopeType.ORGANIZATION, organizationId)); - final RuntimeSecretPersistence runtimeSecretPersistence = - SecretPersistenceConfigHelper.fromApiSecretPersistenceConfig(secretPersistenceConfig); - fullDestinationConfig = - secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(input.getDestinationConfiguration(), runtimeSecretPersistence); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } else { - fullDestinationConfig = secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(input.getDestinationConfiguration()); - } - // Retrieve the catalog. - final ConfiguredAirbyteCatalog catalog = retrieveCatalog(input.getConnectionId()); - return input.withDestinationConfiguration(fullDestinationConfig).withCatalog(catalog); - } - - @Trace(operationName = ACTIVITY_TRACE_OPERATION_NAME) - @Override - public NormalizationInput generateNormalizationInputWithMinimumPayloadWithConnectionId(final JsonNode destinationConfiguration, - @Deprecated @Nullable final ConfiguredAirbyteCatalog unused, - final UUID workspaceId, - final UUID connectionId, - final UUID organizationId) { - return new NormalizationInput() - .withConnectionId(connectionId) - .withDestinationConfiguration(destinationConfiguration) - .withCatalog(null) // this is null as we will hydrate downstream in the NormalizationActivity - .withResourceRequirements(getNormalizationResourceRequirements()) - .withWorkspaceId(workspaceId) - // As much info as we can give. - .withConnectionContext( - new ConnectionContext() - .withOrganizationId(organizationId) - .withConnectionId(connectionId) - .withWorkspaceId(workspaceId)); - } - - private ResourceRequirements getNormalizationResourceRequirements() { - return workerConfigsProvider.getConfig(ResourceType.NORMALIZATION).getResourceRequirements(); - } - - @VisibleForTesting - static boolean normalizationSupportsV1DataTypes(final IntegrationLauncherConfig destinationLauncherConfig) { - try { - final Version normalizationVersion = new Version(getNormalizationImageTag(destinationLauncherConfig)); - return V1_NORMALIZATION_MINOR_VERSION.equals(normalizationVersion.getMinorVersion()); - } catch (final IllegalArgumentException e) { - // IllegalArgument here means that the version isn't in a semver format. - // The current behavior is to assume it supports v0 data types for dev purposes. - return false; - } - } - - private static String getNormalizationImageTag(final IntegrationLauncherConfig destinationLauncherConfig) { - return destinationLauncherConfig.getNormalizationDockerImage().split(":", 2)[1]; - } - - @SuppressWarnings("LineLength") - private CheckedSupplier, Exception> getLegacyWorkerFactory( - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig) { - return () -> new DefaultNormalizationWorker( - jobRunConfig.getJobId(), - Math.toIntExact(jobRunConfig.getAttemptId()), - new DefaultNormalizationRunner( - processFactory, - destinationLauncherConfig.getNormalizationDockerImage(), - destinationLauncherConfig.getNormalizationIntegrationType()), - workerEnvironment, () -> {}); - } - - @SuppressWarnings("LineLength") - private CheckedSupplier, Exception> getContainerLauncherWorkerFactory( - final WorkerConfigs workerConfigs, - final IntegrationLauncherConfig destinationLauncherConfig, - final JobRunConfig jobRunConfig, - final UUID connectionId, - final UUID workspaceId) { - return () -> new NormalizationLauncherWorker( - connectionId, - workspaceId, - destinationLauncherConfig, - jobRunConfig, - workerConfigs, - containerOrchestratorConfig.get(), - serverPort, - featureFlagClient, - metricClient, - workloadIdGenerator); - } - - private ConfiguredAirbyteCatalog retrieveCatalog(final UUID connectionId) throws Exception { - final ConnectionRead connectionInfo = airbyteApiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody(connectionId)); - if (connectionInfo.getSyncCatalog() == null) { - throw new IllegalArgumentException("Connection is missing catalog, which is required"); - } - final ConfiguredAirbyteCatalog catalog = CatalogClientConverters.toConfiguredAirbyteProtocol(connectionInfo.getSyncCatalog()); - - // NOTE: when we passed the catalog through the activity input, this mapping was previously done - // during replication. - return new NamespacingMapper( - Enums.convertTo(connectionInfo.getNamespaceDefinition(), JobSyncConfig.NamespaceDefinitionType.class), - connectionInfo.getNamespaceFormat(), - connectionInfo.getPrefix()).mapCatalog(catalog); - } - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivity.java deleted file mode 100644 index 67d086e4893..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivity.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import io.temporal.activity.ActivityInterface; -import io.temporal.activity.ActivityMethod; -import java.util.Optional; - -/** - * Normalization summary check temporal activity interface. - */ -@ActivityInterface -public interface NormalizationSummaryCheckActivity { - - @ActivityMethod - boolean shouldRunNormalization(Long jobId, Long attemptId, Optional numCommittedRecords); - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivityImpl.java deleted file mode 100644 index 2940f3e1507..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/NormalizationSummaryCheckActivityImpl.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import static io.airbyte.metrics.lib.ApmTraceConstants.ACTIVITY_TRACE_OPERATION_NAME; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.ATTEMPT_NUMBER_KEY; -import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ID_KEY; - -import datadog.trace.api.Trace; -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.model.generated.AttemptNormalizationStatusRead; -import io.airbyte.api.client.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.commons.temporal.exception.RetryableException; -import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.OssMetricsRegistry; -import io.micronaut.http.HttpStatus; -import io.temporal.activity.Activity; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.Comparator; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import lombok.extern.slf4j.Slf4j; -import org.openapitools.client.infrastructure.ClientException; - -/** - * NormalizationSummaryCheckActivityImpl. - */ -@Slf4j -@Singleton -public class NormalizationSummaryCheckActivityImpl implements NormalizationSummaryCheckActivity { - - private final AirbyteApiClient airbyteApiClient; - - public NormalizationSummaryCheckActivityImpl(final AirbyteApiClient airbyteApiClient) { - this.airbyteApiClient = airbyteApiClient; - } - - @Trace(operationName = ACTIVITY_TRACE_OPERATION_NAME) - @Override - @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") - public boolean shouldRunNormalization(final Long jobId, final Long attemptNumber, final Optional numCommittedRecords) { - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.ACTIVITY_NORMALIZATION_SUMMARY_CHECK, 1); - - ApmTraceUtils.addTagsToTrace(Map.of(ATTEMPT_NUMBER_KEY, attemptNumber, JOB_ID_KEY, jobId)); - - // if the count of committed records for this attempt is > 0 OR if it is null, - // then we should run normalization - if (numCommittedRecords.isEmpty() || numCommittedRecords.get() > 0) { - return true; - } - - final AttemptNormalizationStatusReadList AttemptNormalizationStatusReadList; - try { - AttemptNormalizationStatusReadList = airbyteApiClient.getJobsApi().getAttemptNormalizationStatusesForJob(new JobIdRequestBody(jobId)); - } catch (final ClientException e) { - if (e.getStatusCode() == HttpStatus.NOT_FOUND.getCode()) { - throw e; - } - throw new RetryableException(e); - } catch (final IOException e) { - throw Activity.wrap(e); - } - final AtomicLong totalRecordsCommitted = new AtomicLong(0L); - final AtomicBoolean shouldReturnTrue = new AtomicBoolean(false); - - AttemptNormalizationStatusReadList.getAttemptNormalizationStatuses().stream().sorted(Comparator.comparing( - AttemptNormalizationStatusRead::getAttemptNumber).reversed()).toList() - .forEach(n -> { - // Have to cast it because attemptNumber is read from JobRunConfig. - if (n.getAttemptNumber().intValue() == attemptNumber) { - return; - } - - // if normalization succeeded from a previous attempt succeeded, - // we can stop looking for previous attempts - if (!n.getHasNormalizationFailed()) { - return; - } - - // if normalization failed on past attempt, add number of records committed on that attempt to total - // committed number - // if there is no data recorded for the number of committed records, we should assume that there - // were committed records and run normalization - if (!n.getHasRecordsCommitted()) { - shouldReturnTrue.set(true); - return; - } else if (n.getRecordsCommitted().longValue() != 0L) { - totalRecordsCommitted.addAndGet(n.getRecordsCommitted()); - } - }); - - if (shouldReturnTrue.get() || totalRecordsCommitted.get() > 0L) { - return true; - } - - return false; - - } - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivity.java index 221a9543977..a3ccb82f294 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivity.java @@ -19,8 +19,6 @@ public interface RefreshSchemaActivity { @ActivityMethod boolean shouldRefreshSchema(UUID sourceCatalogId); - void refreshSchema(UUID sourceCatalogId, UUID connectionId) throws Exception; - /** * Refresh the schema. This will eventually replace the one above. * diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java index 1b69800515c..d32de00cfcf 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java @@ -34,6 +34,7 @@ import io.airbyte.metrics.lib.MetricClientFactory; import io.airbyte.metrics.lib.MetricTags; import io.airbyte.metrics.lib.OssMetricsRegistry; +import io.airbyte.workers.helper.CatalogDiffConverter; import io.airbyte.workers.models.RefreshSchemaActivityInput; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import jakarta.inject.Singleton; @@ -103,7 +104,6 @@ private SourceDiscoverSchemaRead discoverSchemaForRefresh(final UUID sourceId, f return airbyteApiClient.getSourceApi().discoverSchemaForSource(requestBody); } - @Override @Trace(operationName = ACTIVITY_TRACE_OPERATION_NAME) public void refreshSchema(final UUID sourceId, final UUID connectionId) throws IOException { final var sourceDiscoverSchemaRead = discoverSchemaForRefresh(sourceId, connectionId); @@ -152,8 +152,10 @@ public RefreshSchemaActivityOutput refreshSchemaV2(final RefreshSchemaActivityIn connectionId, workspaceId); - final var output = new RefreshSchemaActivityOutput( - airbyteApiClient.getConnectionApi().applySchemaChangeForConnection(request).getPropagatedDiff()); + final var propagatedDiff = airbyteApiClient.getConnectionApi().applySchemaChangeForConnection(request).getPropagatedDiff(); + final var domainDiff = propagatedDiff != null ? CatalogDiffConverter.toDomain(propagatedDiff) : null; + + final var output = new RefreshSchemaActivityOutput(domainDiff); final var attrs = new MetricAttribute[] { new MetricAttribute(MetricTags.CONNECTION_ID, String.valueOf(connectionId)) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java index a1382148c46..38ffc35a8d8 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java @@ -17,7 +17,6 @@ import datadog.trace.api.Trace; import io.airbyte.api.client.AirbyteApiClient; import io.airbyte.api.client.WorkloadApiClient; -import io.airbyte.api.client.model.generated.StreamDescriptor; import io.airbyte.commons.functional.CheckedSupplier; import io.airbyte.commons.temporal.HeartbeatUtils; import io.airbyte.commons.temporal.utils.PayloadChecker; @@ -27,6 +26,7 @@ import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.State; +import io.airbyte.config.StreamDescriptor; import io.airbyte.config.helpers.LogConfigs; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.featureflag.Connection; @@ -110,7 +110,7 @@ public ReplicationActivityImpl(final SecretsRepositoryReader secretsRepositoryRe @Named("outputStateClient") final OutputStorageClient stateStorageClient, @Named("outputCatalogClient") final OutputStorageClient catalogStorageClient, final ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper) { - this.replicationInputHydrator = new ReplicationInputHydrator(airbyteApiClient, secretsRepositoryReader, + this.replicationInputHydrator = new ReplicationInputHydrator(airbyteApiClient, resumableFullRefreshStatsHelper, secretsRepositoryReader, featureFlagClient); this.workspaceRoot = workspaceRoot; this.workerEnvironment = workerEnvironment; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java index 84626e17df4..59f98f405cd 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java @@ -15,8 +15,6 @@ import io.airbyte.api.client.model.generated.ConnectionStatus; import io.airbyte.commons.temporal.annotations.TemporalActivityStub; import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.OperatorWebhookInput; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.StandardSyncOperation; @@ -27,15 +25,12 @@ import io.airbyte.config.SyncStats; import io.airbyte.config.WebhookOperationSummary; import io.airbyte.metrics.lib.ApmTraceUtils; -import io.airbyte.metrics.lib.MetricAttribute; -import io.airbyte.metrics.lib.MetricClientFactory; -import io.airbyte.metrics.lib.MetricTags; -import io.airbyte.metrics.lib.OssMetricsRegistry; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; import io.airbyte.persistence.job.models.JobRunConfig; import io.airbyte.workers.models.RefreshSchemaActivityInput; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.models.ReplicationActivityInput; +import io.airbyte.workers.temporal.activities.ReportRunTimeActivityInput; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity; import io.temporal.workflow.Workflow; import java.util.Map; @@ -54,12 +49,6 @@ public class SyncWorkflowImpl implements SyncWorkflow { @TemporalActivityStub(activityOptionsBeanName = "longRunActivityOptions") private ReplicationActivity replicationActivity; - @TemporalActivityStub(activityOptionsBeanName = "longRunActivityOptions") - private NormalizationActivity normalizationActivity; - @TemporalActivityStub(activityOptionsBeanName = "longRunActivityOptions") - private DbtTransformationActivity dbtTransformationActivity; - @TemporalActivityStub(activityOptionsBeanName = "shortActivityOptions") - private NormalizationSummaryCheckActivity normalizationSummaryCheckActivity; @TemporalActivityStub(activityOptionsBeanName = "shortActivityOptions") private WebhookOperationActivity webhookOperationActivity; @TemporalActivityStub(activityOptionsBeanName = "refreshSchemaActivityOptions") @@ -68,6 +57,8 @@ public class SyncWorkflowImpl implements SyncWorkflow { private ConfigFetchActivity configFetchActivity; @TemporalActivityStub(activityOptionsBeanName = "shortActivityOptions") private WorkloadFeatureFlagActivity workloadFeatureFlagActivity; + @TemporalActivityStub(activityOptionsBeanName = "shortActivityOptions") + private ReportRunTimeActivity reportRunTimeActivity; @Trace(operationName = WORKFLOW_TRACE_OPERATION_NAME) @Override @@ -77,9 +68,11 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, final StandardSyncInput syncInput, final UUID connectionId) { + final long startTime = Workflow.currentTimeMillis(); // TODO: Remove this once Workload API rolled out final var useWorkloadApi = checkUseWorkloadApiFlag(syncInput); final var useWorkloadOutputDocStore = checkUseWorkloadOutputFlag(syncInput); + final var sendRunTimeMetrics = shouldReportRuntime(); ApmTraceUtils .addTagsToTrace(Map.of( @@ -94,22 +87,20 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, final Optional sourceId = configFetchActivity.getSourceId(connectionId); RefreshSchemaActivityOutput refreshSchemaOutput = null; - if (!sourceId.isEmpty() && refreshSchemaActivity.shouldRefreshSchema(sourceId.get())) { + final boolean shouldRefreshSchema = refreshSchemaActivity.shouldRefreshSchema(sourceId.get()); + if (!sourceId.isEmpty() && shouldRefreshSchema) { LOGGER.info("Refreshing source schema..."); try { - final var version = Workflow.getVersion("AUTO_BACKFILL_ON_NEW_COLUMNS", Workflow.DEFAULT_VERSION, 1); - if (version == Workflow.DEFAULT_VERSION) { - refreshSchemaActivity.refreshSchema(sourceId.get(), connectionId); - } else { - refreshSchemaOutput = - refreshSchemaActivity.refreshSchemaV2(new RefreshSchemaActivityInput(sourceId.get(), connectionId, syncInput.getWorkspaceId())); - } + refreshSchemaOutput = + refreshSchemaActivity.refreshSchemaV2(new RefreshSchemaActivityInput(sourceId.get(), connectionId, syncInput.getWorkspaceId())); } catch (final Exception e) { ApmTraceUtils.addExceptionToTrace(e); return SyncOutputProvider.getRefreshSchemaFailure(e); } } + final long discoverSchemaEndTime = Workflow.currentTimeMillis(); + final Optional status = configFetchActivity.getStatus(connectionId); if (!status.isEmpty() && ConnectionStatus.INACTIVE == status.get()) { LOGGER.info("Connection {} is disabled. Cancelling run.", connectionId); @@ -125,27 +116,7 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, if (syncInput.getOperationSequence() != null && !syncInput.getOperationSequence().isEmpty()) { for (final StandardSyncOperation standardSyncOperation : syncInput.getOperationSequence()) { - if (standardSyncOperation.getOperatorType() == OperatorType.NORMALIZATION) { - if (destinationLauncherConfig.getNormalizationDockerImage() == null - || destinationLauncherConfig.getNormalizationIntegrationType() == null) { - // In the case that this connection used to run normalization but the destination no longer supports - // it (destinations v1 -> v2) - LOGGER.info("Not Running Normalization Container for connection {}, attempt id {}, because destination no longer supports normalization", - connectionId, jobRunConfig.getAttemptId()); - } else if (syncInput.getNormalizeInDestinationContainer()) { - LOGGER.info("Not Running Normalization Container for connection {}, attempt id {}, because it ran in destination", - connectionId, jobRunConfig.getAttemptId()); - } else { - final NormalizationInput normalizationInput = generateNormalizationInput(syncInput); - final NormalizationSummary normalizationSummary = - normalizationActivity.normalize(jobRunConfig, destinationLauncherConfig, normalizationInput); - syncOutput = syncOutput.withNormalizationSummary(normalizationSummary); - MetricClientFactory.getMetricClient().count(OssMetricsRegistry.NORMALIZATION_IN_NORMALIZATION_CONTAINER, 1, - new MetricAttribute(MetricTags.CONNECTION_ID, connectionId.toString())); - } - } else if (standardSyncOperation.getOperatorType() == OperatorType.DBT) { - LOGGER.info("skipping custom dbt. deprecated."); - } else if (standardSyncOperation.getOperatorType() == OperatorType.WEBHOOK) { + if (standardSyncOperation.getOperatorType() == OperatorType.WEBHOOK) { LOGGER.info("running webhook operation"); LOGGER.debug("webhook operation input: {}", standardSyncOperation); final boolean success = webhookOperationActivity @@ -167,22 +138,37 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, syncOutput.getWebhookOperationSummary().getFailures().add(standardSyncOperation.getOperatorWebhook().getWebhookConfigId()); } } else { - final String message = String.format("Unsupported operation type: %s", standardSyncOperation.getOperatorType()); - LOGGER.error(message); - throw new IllegalArgumentException(message); + LOGGER.warn("Unsupported operation type '{}' found. Skipping operation...", standardSyncOperation.getOperatorType()); } } } + final long replicationEndTime = Workflow.currentTimeMillis(); + + if (sendRunTimeMetrics) { + reportRunTimeActivity.reportRunTime(new ReportRunTimeActivityInput( + connectionId, + syncInput.getConnectionContext() == null || syncInput.getConnectionContext().getSourceDefinitionId() == null + ? UUID.fromString("00000000-0000-0000-0000-000000000000") + : syncInput.getConnectionContext().getSourceDefinitionId(), + startTime, + discoverSchemaEndTime, + replicationEndTime, + shouldRefreshSchema)); + } + + if (shouldRefreshSchema && syncOutput.getStandardSyncSummary() != null && syncOutput.getStandardSyncSummary().getTotalStats() != null) { + syncOutput.getStandardSyncSummary().getTotalStats().setDiscoverSchemaEndTime(discoverSchemaEndTime); + syncOutput.getStandardSyncSummary().getTotalStats().setDiscoverSchemaStartTime(startTime); + } + return syncOutput; } - private NormalizationInput generateNormalizationInput(final StandardSyncInput syncInput) { - return normalizationActivity.generateNormalizationInputWithMinimumPayloadWithConnectionId(syncInput.getDestinationConfiguration(), - null, - syncInput.getWorkspaceId(), - syncInput.getConnectionId(), - syncInput.getConnectionContext().getOrganizationId()); + private boolean shouldReportRuntime() { + final int shouldReportRuntimeVersion = Workflow.getVersion("SHOULD_REPORT_RUNTIME", Workflow.DEFAULT_VERSION, 1); + + return shouldReportRuntimeVersion != Workflow.DEFAULT_VERSION; } private ReplicationActivityInput generateReplicationActivityInput(final StandardSyncInput syncInput, @@ -204,7 +190,6 @@ private ReplicationActivityInput generateReplicationActivityInput(final Standard syncInput.getSyncResourceRequirements(), syncInput.getWorkspaceId(), syncInput.getConnectionId(), - syncInput.getNormalizeInDestinationContainer(), taskQueue, syncInput.getIsReset(), syncInput.getNamespaceDefinition(), diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/WorkloadFeatureFlagActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/WorkloadFeatureFlagActivityImpl.java index 2624f3a90d5..db1559bca83 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/WorkloadFeatureFlagActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/WorkloadFeatureFlagActivityImpl.java @@ -4,8 +4,11 @@ package io.airbyte.workers.temporal.sync; +import io.airbyte.featureflag.Empty; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseWorkloadApi; +import io.airbyte.featureflag.WorkloadApiServerEnabled; +import io.airbyte.featureflag.WorkloadLauncherEnabled; import io.airbyte.featureflag.Workspace; import jakarta.inject.Singleton; @@ -21,9 +24,11 @@ public WorkloadFeatureFlagActivityImpl(final FeatureFlagClient featureFlagClient @Override public Boolean useWorkloadApi(final WorkloadFeatureFlagActivity.Input input) { - final var context = new Workspace(input.getWorkspaceId()); + var ffCheck = featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, new Workspace(input.getWorkspaceId())); + var envCheck = featureFlagClient.boolVariation(WorkloadLauncherEnabled.INSTANCE, Empty.INSTANCE) + && featureFlagClient.boolVariation(WorkloadApiServerEnabled.INSTANCE, Empty.INSTANCE); - return featureFlagClient.boolVariation(UseWorkloadApi.INSTANCE, context); + return ffCheck || envCheck; } @Override diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/activities/ReportRunTimeActivityInput.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/activities/ReportRunTimeActivityInput.kt new file mode 100644 index 00000000000..88f4fb88ca2 --- /dev/null +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/activities/ReportRunTimeActivityInput.kt @@ -0,0 +1,36 @@ +package io.airbyte.workers.temporal.activities + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import java.util.UUID + +@JsonDeserialize(builder = ReportRunTimeActivityInput.Builder::class) +data class ReportRunTimeActivityInput( + val connectionId: UUID, + val sourceDefinitionId: UUID, + val startTime: Long, + val refreshSchemaEndTime: Long, + val replicationEndTime: Long, + val shouldRefreshSchema: Boolean, +) { + class Builder + @JvmOverloads + constructor( + val connectionId: UUID? = null, + val sourceDefinitionId: UUID? = null, + val startTime: Long? = null, + val refreshSchemaEndTime: Long? = null, + val replicationEndTime: Long? = null, + val shouldRefreshSchema: Boolean? = null, + ) { + fun build(): ReportRunTimeActivityInput { + return ReportRunTimeActivityInput( + connectionId!!, + sourceDefinitionId!!, + startTime!!, + refreshSchemaEndTime!!, + replicationEndTime!!, + shouldRefreshSchema!!, + ) + } + } +} diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivity.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivity.kt new file mode 100644 index 00000000000..49b0a094dea --- /dev/null +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivity.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.temporal.sync + +import io.airbyte.workers.temporal.activities.ReportRunTimeActivityInput +import io.temporal.activity.ActivityInterface +import io.temporal.activity.ActivityMethod + +@ActivityInterface +interface ReportRunTimeActivity { + @ActivityMethod + fun reportRunTime(input: ReportRunTimeActivityInput) +} diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivityImpl.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivityImpl.kt new file mode 100644 index 00000000000..1cf9a36f96b --- /dev/null +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/ReportRunTimeActivityImpl.kt @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.temporal.sync + +import io.airbyte.metrics.lib.MetricAttribute +import io.airbyte.metrics.lib.MetricClient +import io.airbyte.metrics.lib.MetricTags +import io.airbyte.metrics.lib.OssMetricsRegistry +import io.airbyte.workers.temporal.activities.ReportRunTimeActivityInput +import jakarta.inject.Singleton + +@Singleton +class ReportRunTimeActivityImpl(private val metricClient: MetricClient) : ReportRunTimeActivity { + override fun reportRunTime(input: ReportRunTimeActivityInput) { + val runTimeRefresh = input.refreshSchemaEndTime - input.startTime + val runTimeReplication = input.replicationEndTime - input.refreshSchemaEndTime + val totalWorkflowRunTime = input.replicationEndTime - input.startTime + + val connectionTag = MetricAttribute(MetricTags.CONNECTION_ID, input.connectionId.toString()) + val sourceDefinitionTag = MetricAttribute(MetricTags.SOURCE_DEFINITION_ID, input.sourceDefinitionId.toString()) + + if (input.shouldRefreshSchema) { + metricClient.count( + OssMetricsRegistry.DISCOVER_CATALOG_RUN_TIME, + runTimeRefresh, + connectionTag, + sourceDefinitionTag, + ) + } + metricClient.count(OssMetricsRegistry.REPLICATION_RUN_TIME, runTimeReplication, connectionTag, sourceDefinitionTag) + metricClient.count(OssMetricsRegistry.SYNC_TOTAL_TIME, totalWorkflowRunTime, connectionTag, sourceDefinitionTag) + } +} diff --git a/airbyte-workers/src/main/resources/application.yml b/airbyte-workers/src/main/resources/application.yml index 0cdc0a6e29f..9e5143889bb 100644 --- a/airbyte-workers/src/main/resources/application.yml +++ b/airbyte-workers/src/main/resources/application.yml @@ -218,7 +218,7 @@ airbyte: image-pull-policy: ${JOB_KUBE_SIDECAR_CONTAINER_IMAGE_PULL_POLICY:IfNotPresent} tolerations: ${JOB_KUBE_TOLERATIONS:} replication: - persistence-flush-period-sec: ${REPLICATION_FLUSH_PERIOD_SECONDS:60} + persistence-flush-period-sec: ${REPLICATION_FLUSH_PERIOD_SECONDS:10} spec: enabled: ${SHOULD_RUN_GET_SPEC_WORKFLOWS:true} max-workers: ${MAX_SPEC_WORKERS:5} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java index 06b203bd619..659fe08ef2f 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java @@ -11,12 +11,6 @@ import io.airbyte.config.secrets.persistence.SecretPersistence; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivityImpl; -import io.airbyte.workers.temporal.sync.DbtTransformationActivity; -import io.airbyte.workers.temporal.sync.DbtTransformationActivityImpl; -import io.airbyte.workers.temporal.sync.NormalizationActivity; -import io.airbyte.workers.temporal.sync.NormalizationActivityImpl; -import io.airbyte.workers.temporal.sync.NormalizationSummaryCheckActivity; -import io.airbyte.workers.temporal.sync.NormalizationSummaryCheckActivityImpl; import io.airbyte.workers.temporal.sync.RefreshSchemaActivity; import io.airbyte.workers.temporal.sync.RefreshSchemaActivityImpl; import io.airbyte.workers.temporal.sync.ReplicationActivity; @@ -63,15 +57,6 @@ class DataPlaneActivityInitializationMicronautTest { @Inject ConfigFetchActivity configFetchActivity; - @Inject - DbtTransformationActivity dbtTransformationActivity; - - @Inject - NormalizationActivity normalizationActivity; - - @Inject - NormalizationSummaryCheckActivity normalizationSummaryCheckActivity; - @Inject RefreshSchemaActivity refreshSchemaActivity; @@ -86,21 +71,6 @@ void testConfigFetchActivity() { assertEquals(ConfigFetchActivityImpl.class, configFetchActivity.getClass()); } - @Test - void testDbtTransformationActivity() { - assertEquals(DbtTransformationActivityImpl.class, dbtTransformationActivity.getClass()); - } - - @Test - void testNormalizationActivity() { - assertEquals(NormalizationActivityImpl.class, normalizationActivity.getClass()); - } - - @Test - void testNormalizationSummaryCheckActivity() { - assertEquals(NormalizationSummaryCheckActivityImpl.class, normalizationSummaryCheckActivity.getClass()); - } - @Test void testRefreshSchemaActivity() { assertEquals(RefreshSchemaActivityImpl.class, refreshSchemaActivity.getClass()); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt index a25d47db2f1..296d3812b78 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt @@ -5,7 +5,14 @@ package io.airbyte.workers.temporal.discover.catalog import io.airbyte.api.client.AirbyteApiClient import io.airbyte.api.client.WorkloadApiClient +import io.airbyte.api.client.generated.ConnectionApi +import io.airbyte.api.client.model.generated.AirbyteCatalog +import io.airbyte.api.client.model.generated.CatalogDiff +import io.airbyte.api.client.model.generated.ConnectionAutoPropagateResult +import io.airbyte.api.client.model.generated.ConnectionAutoPropagateSchemaChange +import io.airbyte.api.client.model.generated.DiffCatalogRequestBody import io.airbyte.api.client.model.generated.Geography +import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead import io.airbyte.commons.features.FeatureFlags import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory @@ -22,8 +29,11 @@ import io.airbyte.featureflag.TestClient import io.airbyte.metrics.lib.MetricClient import io.airbyte.persistence.job.models.IntegrationLauncherConfig import io.airbyte.persistence.job.models.JobRunConfig +import io.airbyte.workers.helper.CatalogDiffConverter import io.airbyte.workers.helper.GsonPksExtractor import io.airbyte.workers.models.DiscoverCatalogInput +import io.airbyte.workers.models.PostprocessCatalogInput +import io.airbyte.workers.models.PostprocessCatalogOutput import io.airbyte.workers.process.ProcessFactory import io.airbyte.workers.sync.WorkloadClient import io.airbyte.workers.workload.JobOutputDocStore @@ -35,9 +45,11 @@ import io.airbyte.workload.api.client.model.generated.WorkloadType import io.mockk.every import io.mockk.mockk import io.mockk.spyk +import io.mockk.verify import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test +import java.io.IOException import java.nio.file.Path import java.util.Optional import java.util.UUID @@ -58,6 +70,7 @@ class DiscoverCatalogActivityTest { private val featureFlagClient: FeatureFlagClient = TestClient() private val gsonPksExtractor: GsonPksExtractor = mockk() private val workloadApi: WorkloadApi = mockk() + private val connectionApi: ConnectionApi = mockk() private val workloadApiClient: WorkloadApiClient = mockk() private val workloadIdGenerator: WorkloadIdGenerator = mockk() private val jobOutputDocStore: JobOutputDocStore = mockk() @@ -66,6 +79,7 @@ class DiscoverCatalogActivityTest { @BeforeEach fun init() { every { workloadApiClient.workloadApi }.returns(workloadApi) + every { airbyteApiClient.connectionApi }.returns(connectionApi) discoverCatalogActivity = spyk( DiscoverCatalogActivityImpl( @@ -123,4 +137,90 @@ class DiscoverCatalogActivityTest { val actualOutput = discoverCatalogActivity.runWithWorkload(input) Assertions.assertEquals(output, actualOutput) } + + @Test + fun postprocessHappyPath() { + val catalog1: AirbyteCatalog = mockk() + val read: SourceDiscoverSchemaRead = + mockk { + every { catalog } returns catalog1 + } + val diff1: CatalogDiff = + mockk { + every { transforms } returns listOf() + } + val propagation: ConnectionAutoPropagateResult = + mockk { + every { propagatedDiff } returns diff1 + } + every { connectionApi.diffCatalogForConnection(any()) } returns read + every { connectionApi.applySchemaChangeForConnection(any()) } returns propagation + + val input = PostprocessCatalogInput(UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()) + val result = discoverCatalogActivity.postprocess(input) + + val expectedDiffReqBody = DiffCatalogRequestBody(input.catalogId!!, input.connectionId!!) + val expectedSchemaChangeReqBody = + ConnectionAutoPropagateSchemaChange( + read.catalog!!, + input.catalogId!!, + input.connectionId!!, + input.workspaceId!!, + ) + verify { connectionApi.diffCatalogForConnection(eq(expectedDiffReqBody)) } + verify { connectionApi.applySchemaChangeForConnection(eq(expectedSchemaChangeReqBody)) } + + val expected = PostprocessCatalogOutput.success(CatalogDiffConverter.toDomain(diff1)) + Assertions.assertEquals(expected, result) + Assertions.assertTrue(result.isSuccess) + Assertions.assertFalse(result.isFailure) + } + + @Test + fun postprocessDiffExceptionalPath() { + val exception = IOException("not happy") + val catalog1: AirbyteCatalog = mockk() + val read: SourceDiscoverSchemaRead = + mockk { + every { catalog } returns catalog1 + } + every { connectionApi.diffCatalogForConnection(any()) } returns read + every { connectionApi.applySchemaChangeForConnection(any()) } throws exception + + val input = PostprocessCatalogInput(UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()) + val result = discoverCatalogActivity.postprocess(input) + + val expectedDiffReqBody = DiffCatalogRequestBody(input.catalogId!!, input.connectionId!!) + val expectedSchemaChangeReqBody = + ConnectionAutoPropagateSchemaChange( + read.catalog!!, + input.catalogId!!, + input.connectionId!!, + input.workspaceId!!, + ) + verify { connectionApi.diffCatalogForConnection(eq(expectedDiffReqBody)) } + verify { connectionApi.applySchemaChangeForConnection(eq(expectedSchemaChangeReqBody)) } + + val expected = PostprocessCatalogOutput.failure(exception) + Assertions.assertEquals(expected, result) + Assertions.assertFalse(result.isSuccess) + Assertions.assertTrue(result.isFailure) + } + + @Test + fun postprocessSchemaChangeExceptionalPath() { + val exception = IOException("not happy") + every { connectionApi.diffCatalogForConnection(any()) } throws exception + + val input = PostprocessCatalogInput(UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID()) + val result = discoverCatalogActivity.postprocess(input) + + val expectedReqBody = DiffCatalogRequestBody(input.catalogId!!, input.connectionId!!) + verify { connectionApi.diffCatalogForConnection(eq(expectedReqBody)) } + + val expected = PostprocessCatalogOutput.failure(exception) + Assertions.assertEquals(expected, result) + Assertions.assertFalse(result.isSuccess) + Assertions.assertTrue(result.isFailure) + } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index c2c6d5ae2d5..2f080b2f6aa 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -71,10 +71,7 @@ import io.airbyte.workers.temporal.scheduling.testcheckworkflow.CheckConnectionSuccessWorkflow; import io.airbyte.workers.temporal.scheduling.testcheckworkflow.CheckConnectionSystemErrorWorkflow; import io.airbyte.workers.temporal.scheduling.testsyncworkflow.CancelledSyncWorkflow; -import io.airbyte.workers.temporal.scheduling.testsyncworkflow.DbtFailureSyncWorkflow; import io.airbyte.workers.temporal.scheduling.testsyncworkflow.EmptySyncWorkflow; -import io.airbyte.workers.temporal.scheduling.testsyncworkflow.NormalizationFailureSyncWorkflow; -import io.airbyte.workers.temporal.scheduling.testsyncworkflow.NormalizationTraceFailureSyncWorkflow; import io.airbyte.workers.temporal.scheduling.testsyncworkflow.PersistFailureSyncWorkflow; import io.airbyte.workers.temporal.scheduling.testsyncworkflow.ReplicateFailureSyncWorkflow; import io.airbyte.workers.temporal.scheduling.testsyncworkflow.SleepingSyncWorkflow; @@ -1284,89 +1281,6 @@ void testSourceAndDestinationFailuresRecorded() throws Exception { .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DESTINATION))); } - @Test - @Timeout(value = 10, - unit = TimeUnit.SECONDS) - @DisplayName("Test that normalization failure is recorded") - void testNormalizationFailure() throws Exception { - setupNormalizationFailure(); - - Mockito.verify(mJobCreationAndStatusUpdateActivity) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); - } - - @Test - @Timeout(value = 10, - unit = TimeUnit.SECONDS) - @DisplayName("Test that normalization trace failure is recorded") - void testNormalizationTraceFailure() throws Exception { - returnTrueForLastJobOrAttemptFailure(); - final Worker syncWorker = testEnv.newWorker(TemporalJobType.SYNC.name()); - syncWorker.registerWorkflowImplementationTypes(NormalizationTraceFailureSyncWorkflow.class); - final Worker checkWorker = testEnv.newWorker(TemporalJobType.CHECK_CONNECTION.name()); - checkWorker.registerWorkflowImplementationTypes(CheckConnectionSuccessWorkflow.class); - - testEnv.start(); - - final UUID testId = UUID.randomUUID(); - final TestStateListener testStateListener = new TestStateListener(); - final WorkflowState workflowState = new WorkflowState(testId, testStateListener); - final ConnectionUpdaterInput input = ConnectionUpdaterInput.builder() - .connectionId(UUID.randomUUID()) - .jobId(JOB_ID) - .attemptId(ATTEMPT_ID) - .fromFailure(false) - .attemptNumber(1) - .workflowState(workflowState) - .build(); - - startWorkflowAndWaitUntilReady(workflow, input); - - // wait for workflow to initialize - testEnv.sleep(Duration.ofMinutes(1)); - - workflow.submitManualSync(); - - Mockito.verify(mJobCreationAndStatusUpdateActivity, VERIFY_TIMEOUT) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.NORMALIZATION))); - } - - @Test - @Timeout(value = 10, - unit = TimeUnit.SECONDS) - @DisplayName("Test that dbt failure is recorded") - void testDbtFailureRecorded() throws Exception { - returnTrueForLastJobOrAttemptFailure(); - final Worker syncWorker = testEnv.newWorker(TemporalJobType.SYNC.name()); - syncWorker.registerWorkflowImplementationTypes(DbtFailureSyncWorkflow.class); - final Worker checkWorker = testEnv.newWorker(TemporalJobType.CHECK_CONNECTION.name()); - checkWorker.registerWorkflowImplementationTypes(CheckConnectionSuccessWorkflow.class); - - testEnv.start(); - - final UUID testId = UUID.randomUUID(); - final TestStateListener testStateListener = new TestStateListener(); - final WorkflowState workflowState = new WorkflowState(testId, testStateListener); - final ConnectionUpdaterInput input = ConnectionUpdaterInput.builder() - .connectionId(UUID.randomUUID()) - .jobId(JOB_ID) - .attemptId(ATTEMPT_ID) - .fromFailure(false) - .attemptNumber(1) - .workflowState(workflowState) - .build(); - - startWorkflowAndWaitUntilReady(workflow, input); - - // wait for workflow to initialize - testEnv.sleep(Duration.ofMinutes(1)); - - workflow.submitManualSync(); - - Mockito.verify(mJobCreationAndStatusUpdateActivity, VERIFY_TIMEOUT) - .attemptFailureWithAttemptNumber(Mockito.argThat(new HasFailureFromOrigin(FailureOrigin.DBT))); - } - @Test @Timeout(value = 10, unit = TimeUnit.SECONDS) @@ -1641,7 +1555,6 @@ void usesAttemptBasedRetriesIfRetryManagerUnset(final Class coreFailureTypesMatrix() { return Stream.of( - Arguments.of(NormalizationFailureSyncWorkflow.class), Arguments.of(SourceAndDestinationFailureSyncWorkflow.class), Arguments.of(ReplicateFailureSyncWorkflow.class), Arguments.of(PersistFailureSyncWorkflow.class), @@ -2078,10 +1991,6 @@ private void setupReplicationFailure() throws Exception { setupFailureCase(ReplicateFailureSyncWorkflow.class); } - private void setupNormalizationFailure() throws Exception { - setupFailureCase(NormalizationFailureSyncWorkflow.class); - } - /** * Does all the legwork for setting up a workflow for simple runs. NOTE: Don't forget to add your * mock activity below. diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java deleted file mode 100644 index 260ffc9417f..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/WorkflowReplayingTest.java +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling; - -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.databind.MapperFeature; -import com.google.common.base.Defaults; -import io.airbyte.commons.temporal.converter.AirbyteTemporalDataConverter; -import io.airbyte.micronaut.temporal.TemporalProxyHelper; -import io.airbyte.workers.temporal.sync.SyncWorkflowImpl; -import io.micronaut.context.BeanRegistration; -import io.micronaut.inject.BeanIdentifier; -import io.temporal.activity.ActivityOptions; -import io.temporal.api.common.v1.Payload; -import io.temporal.api.common.v1.Payloads; -import io.temporal.common.RetryOptions; -import io.temporal.common.converter.ByteArrayPayloadConverter; -import io.temporal.common.converter.DataConverter; -import io.temporal.common.converter.DataConverterException; -import io.temporal.common.converter.EncodingKeys; -import io.temporal.common.converter.GlobalDataConverter; -import io.temporal.common.converter.JacksonJsonPayloadConverter; -import io.temporal.common.converter.NullPayloadConverter; -import io.temporal.common.converter.PayloadConverter; -import io.temporal.common.converter.ProtobufJsonPayloadConverter; -import io.temporal.common.converter.ProtobufPayloadConverter; -import io.temporal.testing.WorkflowReplayer; -import java.io.File; -import java.lang.reflect.Type; -import java.net.URL; -import java.time.Duration; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -// TODO: Auto generation of the input and more scenario coverage -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class WorkflowReplayingTest { - - private TemporalProxyHelper temporalProxyHelper; - - @BeforeAll - static void beforeAll() { - // Register the custom data converter configured to work with Airbyte JSON - GlobalDataConverter.register(new AirbyteTemporalDataConverter()); - } - - @BeforeEach - void setUp() { - ActivityOptions activityOptions = ActivityOptions.newBuilder() - .setHeartbeatTimeout(Duration.ofSeconds(30)) - .setStartToCloseTimeout(Duration.ofSeconds(120)) - .setRetryOptions(RetryOptions.newBuilder() - .setMaximumAttempts(5) - .setInitialInterval(Duration.ofSeconds(30)) - .setMaximumInterval(Duration.ofSeconds(600)) - .build()) - .build(); - - final BeanRegistration shortActivityOptionsBeanRegistration = getActivityOptionBeanRegistration("shortActivityOptions", activityOptions); - final BeanRegistration longActivityOptionsBeanRegistration = getActivityOptionBeanRegistration("longRunActivityOptions", activityOptions); - final BeanRegistration discoveryActivityOptionsBeanRegistration = getActivityOptionBeanRegistration("discoveryActivityOptions", activityOptions); - final BeanRegistration refreshSchemaActivityOptionsBeanRegistration = - getActivityOptionBeanRegistration("refreshSchemaActivityOptions", activityOptions); - - temporalProxyHelper = new TemporalProxyHelper( - List.of(shortActivityOptionsBeanRegistration, longActivityOptionsBeanRegistration, discoveryActivityOptionsBeanRegistration, - refreshSchemaActivityOptionsBeanRegistration)); - } - - @Test - void replaySimpleSuccessfulConnectionManagerWorkflow() throws Exception { - // This test ensures that a new version of the workflow doesn't break an in-progress execution - // This JSON file is exported from Temporal directly (e.g. - // `http://${temporal-ui}/namespaces/default/workflows/connection_manager_-${uuid}/${uuid}/history`) - // and export - final URL historyPath = getClass().getClassLoader().getResource("connectionManagerWorkflowHistory.json"); - - final File historyFile = new File(historyPath.toURI()); - - WorkflowReplayer.replayWorkflowExecution(historyFile, temporalProxyHelper.proxyWorkflowClass(ConnectionManagerWorkflowImpl.class)); - } - - @Test - void replaySyncWorkflowWithNormalization() throws Exception { - // This test ensures that a new version of the workflow doesn't break an in-progress execution - // This JSON file is exported from Temporal directly (e.g. - // `http://${temporal-ui}/namespaces/default/workflows/connection_manager_-${uuid}/${uuid}/history`) - // and export - GlobalDataConverter.register(new TestPayloadConverter()); - final URL historyPath = getClass().getClassLoader().getResource("syncWorkflowHistory.json"); - final File historyFile = new File(historyPath.toURI()); - WorkflowReplayer.replayWorkflowExecution(historyFile, temporalProxyHelper.proxyWorkflowClass(SyncWorkflowImpl.class)); - } - - private BeanRegistration getActivityOptionBeanRegistration(String name, ActivityOptions activityOptions) { - final BeanIdentifier activitiesBeanIdentifier = mock(BeanIdentifier.class); - final BeanRegistration activityOptionsBeanRegistration = mock(BeanRegistration.class); - when(activitiesBeanIdentifier.getName()).thenReturn(name); - when(activityOptionsBeanRegistration.getIdentifier()).thenReturn(activitiesBeanIdentifier); - when(activityOptionsBeanRegistration.getBean()).thenReturn(activityOptions); - - return activityOptionsBeanRegistration; - } - - /** - * Custom Temporal {@link DataConverter} that modifies the Jackson-based converter to enable case - * insensitive enum value parsing by Jackson when loading a job history as part of the test. - */ - private class TestPayloadConverter implements DataConverter { - - private static final PayloadConverter[] STANDARD_PAYLOAD_CONVERTERS = { - new NullPayloadConverter(), - new ByteArrayPayloadConverter(), - new ProtobufJsonPayloadConverter(), - new ProtobufPayloadConverter(), - new JacksonJsonPayloadConverter(JacksonJsonPayloadConverter.newDefaultObjectMapper() - .enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS)) - }; - - @Override - public Optional toPayload(T value) throws DataConverterException { - for (PayloadConverter converter : STANDARD_PAYLOAD_CONVERTERS) { - Optional result = converter.toData(value); - if (result.isPresent()) { - return result; - } - } - return Optional.empty(); - } - - @Override - public T fromPayload(Payload payload, Class valueClass, Type valueType) throws DataConverterException { - try { - String encoding = - payload.getMetadataOrThrow(EncodingKeys.METADATA_ENCODING_KEY).toString(UTF_8); - Optional converter = - Stream.of(STANDARD_PAYLOAD_CONVERTERS).filter(c -> encoding.equalsIgnoreCase(c.getEncodingType())).findFirst(); - if (converter.isEmpty()) { - throw new DataConverterException( - "No PayloadConverter is registered for an encoding: " + encoding); - } - return converter.get().fromData(payload, valueClass, valueType); - } catch (DataConverterException e) { - throw e; - } catch (Exception e) { - throw new DataConverterException(payload, valueClass, e); - } - } - - @Override - public Optional toPayloads(Object... values) throws DataConverterException { - if (values == null || values.length == 0) { - return Optional.empty(); - } - try { - Payloads.Builder result = Payloads.newBuilder(); - for (Object value : values) { - result.addPayloads(toPayload(value).get()); - } - return Optional.of(result.build()); - } catch (DataConverterException e) { - throw e; - } catch (Throwable e) { - throw new DataConverterException(e); - } - } - - @Override - public T fromPayloads(int index, Optional content, Class parameterType, Type genericParameterType) - throws DataConverterException { - if (!content.isPresent()) { - return Defaults.defaultValue(parameterType); - } - int count = content.get().getPayloadsCount(); - // To make adding arguments a backwards compatible change - if (index >= count) { - return Defaults.defaultValue(parameterType); - } - return fromPayload(content.get().getPayloads(index), parameterType, genericParameterType); - } - - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java index 491acac4e8b..a46b9340b44 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityTest.java @@ -30,7 +30,6 @@ import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.FailureReason; import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.StandardSyncOutput; import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StandardSyncSummary.ReplicationStatus; @@ -98,9 +97,7 @@ class JobCreationAndStatusUpdateActivityTest { private static final StandardSyncOutput standardSyncOutput = new StandardSyncOutput() .withStandardSyncSummary( new StandardSyncSummary() - .withStatus(ReplicationStatus.COMPLETED)) - .withNormalizationSummary( - new NormalizationSummary()); + .withStatus(ReplicationStatus.COMPLETED)); private static final AttemptFailureSummary failureSummary = new AttemptFailureSummary() .withFailures(Collections.singletonList( diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/NormalizationSummaryCheckActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/NormalizationSummaryCheckActivityTest.java deleted file mode 100644 index 042768cfa50..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/NormalizationSummaryCheckActivityTest.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.activities; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.JobsApi; -import io.airbyte.api.client.model.generated.AttemptNormalizationStatusRead; -import io.airbyte.api.client.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.client.model.generated.JobIdRequestBody; -import io.airbyte.workers.temporal.sync.NormalizationSummaryCheckActivityImpl; -import java.io.IOException; -import java.util.List; -import java.util.Optional; -import lombok.extern.slf4j.Slf4j; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; - -@Slf4j -@ExtendWith(MockitoExtension.class) -class NormalizationSummaryCheckActivityTest { - - private static final Long JOB_ID = 10L; - private static NormalizationSummaryCheckActivityImpl normalizationSummaryCheckActivity; - private static AirbyteApiClient airbyteApiClient; - private static JobsApi jobsApi; - - @BeforeAll - static void setUp() { - airbyteApiClient = mock(AirbyteApiClient.class); - jobsApi = mock(JobsApi.class); - when(airbyteApiClient.getJobsApi()).thenReturn(jobsApi); - normalizationSummaryCheckActivity = new NormalizationSummaryCheckActivityImpl(airbyteApiClient); - } - - @Test - void testShouldRunNormalizationRecordsCommittedOnFirstAttemptButNotCurrentAttempt() throws IOException { - // Attempt 1 committed records, but normalization failed - // Attempt 2 did not commit records, normalization failed (or did not run) - final AttemptNormalizationStatusRead attempt1 = - new AttemptNormalizationStatusRead(1, true, 10L, true); - final AttemptNormalizationStatusRead attempt2 = - new AttemptNormalizationStatusRead(2, true, 0L, true); - - when(jobsApi.getAttemptNormalizationStatusesForJob(new JobIdRequestBody(JOB_ID))) - .thenReturn(new AttemptNormalizationStatusReadList(List.of(attempt1, attempt2))); - - Assertions.assertThat(true).isEqualTo(normalizationSummaryCheckActivity.shouldRunNormalization(JOB_ID, 3L, Optional.of(0L))); - } - - @Test - void testShouldRunNormalizationRecordsCommittedOnCurrentAttempt() throws IOException { - Assertions.assertThat(true).isEqualTo(normalizationSummaryCheckActivity.shouldRunNormalization(JOB_ID, 3L, Optional.of(30L))); - } - - @Test - void testShouldRunNormalizationNoRecordsCommittedOnCurrentAttemptOrPreviousAttempts() throws IOException { - // No attempts committed any records - // Normalization did not run on any attempts - final AttemptNormalizationStatusRead attempt1 = - new AttemptNormalizationStatusRead(1, true, 0L, true); - final AttemptNormalizationStatusRead attempt2 = - new AttemptNormalizationStatusRead(2, true, 0L, true); - - when(jobsApi.getAttemptNormalizationStatusesForJob(new JobIdRequestBody(JOB_ID))) - .thenReturn(new AttemptNormalizationStatusReadList(List.of(attempt1, attempt2))); - Assertions.assertThat(false).isEqualTo(normalizationSummaryCheckActivity.shouldRunNormalization(JOB_ID, 3L, Optional.of(0L))); - } - - @Test - void testShouldRunNormalizationNoRecordsCommittedOnCurrentAttemptPreviousAttemptsSucceeded() throws IOException { - // Records committed on first two attempts and normalization succeeded - // No records committed on current attempt and normalization has not yet run - final AttemptNormalizationStatusRead attempt1 = - new AttemptNormalizationStatusRead(1, true, 10L, false); - final AttemptNormalizationStatusRead attempt2 = - new AttemptNormalizationStatusRead(2, true, 20L, false); - - when(jobsApi.getAttemptNormalizationStatusesForJob(new JobIdRequestBody(JOB_ID))) - .thenReturn(new AttemptNormalizationStatusReadList(List.of(attempt1, attempt2))); - Assertions.assertThat(false).isEqualTo(normalizationSummaryCheckActivity.shouldRunNormalization(JOB_ID, 3L, Optional.of(0L))); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java index 049ffb3b8fc..86b7eb85685 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java @@ -5,6 +5,7 @@ package io.airbyte.workers.temporal.scheduling.activities; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -48,6 +49,7 @@ import io.airbyte.featureflag.SourceDefinition; import io.airbyte.featureflag.TestClient; import io.airbyte.featureflag.Workspace; +import io.airbyte.workers.helper.CatalogDiffConverter; import io.airbyte.workers.models.RefreshSchemaActivityInput; import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.temporal.sync.RefreshSchemaActivityImpl; @@ -240,7 +242,25 @@ void testRefreshSchemaForAutoBackfillOnNewColumns() throws IOException { verify(mSourceApi, times(0)).applySchemaChangeForSource(any()); verify(mConnectionApi, times(1)) .applySchemaChangeForConnection(new ConnectionAutoPropagateSchemaChange(CATALOG, CATALOG_ID, CONNECTION_ID, WORKSPACE_ID)); - assertEquals(CATALOG_DIFF, result.getAppliedDiff()); + assertEquals(CatalogDiffConverter.toDomain(CATALOG_DIFF), result.getAppliedDiff()); + } + + @Test + void refreshSchemaHandlesNullDiff() throws IOException { + when(mAirbyteApiClient.getConnectionApi()).thenReturn(mConnectionApi); + when(mFeatureFlagClient.boolVariation(eq(ShouldRunRefreshSchema.INSTANCE), any())).thenReturn(true); + when(mFeatureFlagClient.boolVariation(eq(AutoBackfillOnNewColumns.INSTANCE), any())).thenReturn(true); + + final CatalogDiff catalogDiff = null; + when(mConnectionApi.applySchemaChangeForConnection(new ConnectionAutoPropagateSchemaChange(CATALOG, CATALOG_ID, CONNECTION_ID, WORKSPACE_ID))) + .thenReturn(new ConnectionAutoPropagateResult(catalogDiff)); + + final var result = refreshSchemaActivity.refreshSchemaV2(new RefreshSchemaActivityInput(SOURCE_ID, CONNECTION_ID, WORKSPACE_ID)); + + verify(mSourceApi, times(0)).applySchemaChangeForSource(any()); + verify(mConnectionApi, times(1)) + .applySchemaChangeForConnection(new ConnectionAutoPropagateSchemaChange(CATALOG, CATALOG_ID, CONNECTION_ID, WORKSPACE_ID)); + assertNull(result.getAppliedDiff()); } @Test @@ -253,7 +273,7 @@ void refreshV2ValidatesPayloadSize() throws IOException { refreshSchemaActivity.refreshSchemaV2(new RefreshSchemaActivityInput(SOURCE_ID, CONNECTION_ID, WORKSPACE_ID)); - verify(mPayloadChecker, times(1)).validatePayloadSize(eq(new RefreshSchemaActivityOutput(CATALOG_DIFF)), any()); + verify(mPayloadChecker, times(1)).validatePayloadSize(eq(new RefreshSchemaActivityOutput(CatalogDiffConverter.toDomain(CATALOG_DIFF))), any()); } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/DbtFailureSyncWorkflow.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/DbtFailureSyncWorkflow.java deleted file mode 100644 index aafd12dd38a..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/DbtFailureSyncWorkflow.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.testsyncworkflow; - -import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.api.enums.v1.RetryState; -import io.temporal.failure.ActivityFailure; -import java.util.UUID; - -public class DbtFailureSyncWorkflow implements SyncWorkflow { - - // Should match activity types from FailureHelper.java - private static final String ACTIVITY_TYPE_DBT_RUN = "Run"; - - public static final Throwable CAUSE = new Exception("dbt failed"); - - @Override - public StandardSyncOutput run(final JobRunConfig jobRunConfig, - final IntegrationLauncherConfig sourceLauncherConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final StandardSyncInput syncInput, - final UUID connectionId) { - - throw new ActivityFailure("dbt failed", 1L, 1L, ACTIVITY_TYPE_DBT_RUN, "someId", RetryState.RETRY_STATE_UNSPECIFIED, "someIdentity", CAUSE); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationFailureSyncWorkflow.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationFailureSyncWorkflow.java deleted file mode 100644 index 977abb41eb9..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationFailureSyncWorkflow.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.testsyncworkflow; - -import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.temporal.api.enums.v1.RetryState; -import io.temporal.failure.ActivityFailure; -import java.util.UUID; - -public class NormalizationFailureSyncWorkflow implements SyncWorkflow { - - // Should match activity types from FailureHelper.java - private static final String ACTIVITY_TYPE_NORMALIZE = "Normalize"; - - public static final Throwable CAUSE = new Exception("normalization failed"); - - @Override - public StandardSyncOutput run(final JobRunConfig jobRunConfig, - final IntegrationLauncherConfig sourceLauncherConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final StandardSyncInput syncInput, - final UUID connectionId) { - - throw new ActivityFailure("normalization failed", 1L, 1L, ACTIVITY_TYPE_NORMALIZE, "someId", RetryState.RETRY_STATE_UNSPECIFIED, "someIdentity", - CAUSE); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationTraceFailureSyncWorkflow.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationTraceFailureSyncWorkflow.java deleted file mode 100644 index 641939016ea..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/testsyncworkflow/NormalizationTraceFailureSyncWorkflow.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling.testsyncworkflow; - -import com.google.common.annotations.VisibleForTesting; -import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.config.FailureReason; -import io.airbyte.config.FailureReason.FailureOrigin; -import io.airbyte.config.NormalizationSummary; -import io.airbyte.config.StandardSyncInput; -import io.airbyte.config.StandardSyncOutput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import java.util.List; -import java.util.UUID; - -public class NormalizationTraceFailureSyncWorkflow implements SyncWorkflow { - - // Should match activity types from FailureHelper.java - - @VisibleForTesting - public static final FailureReason FAILURE_REASON = new FailureReason() - .withFailureOrigin(FailureOrigin.NORMALIZATION) - .withTimestamp(System.currentTimeMillis()); - - @Override - public StandardSyncOutput run(final JobRunConfig jobRunConfig, - final IntegrationLauncherConfig sourceLauncherConfig, - final IntegrationLauncherConfig destinationLauncherConfig, - final StandardSyncInput syncInput, - final UUID connectionId) { - - return new StandardSyncOutput() - .withNormalizationSummary(new NormalizationSummary() - .withFailures(List.of(FAILURE_REASON)) - .withStartTime(System.currentTimeMillis() - 1000) - .withEndTime(System.currentTimeMillis())); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/NormalizationActivityImplTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/NormalizationActivityImplTest.java deleted file mode 100644 index 42f2fe53723..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/NormalizationActivityImplTest.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.sync; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.generated.ConnectionApi; -import io.airbyte.api.client.model.generated.AirbyteCatalog; -import io.airbyte.api.client.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.client.model.generated.ConnectionRead; -import io.airbyte.api.client.model.generated.ConnectionStatus; -import io.airbyte.commons.workers.config.WorkerConfigs; -import io.airbyte.commons.workers.config.WorkerConfigsProvider; -import io.airbyte.config.AirbyteConfigValidator; -import io.airbyte.config.Configs; -import io.airbyte.config.ConnectionContext; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.secrets.SecretsRepositoryReader; -import io.airbyte.config.storage.GcsStorageConfig; -import io.airbyte.config.storage.StorageBucketConfig; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.TestClient; -import io.airbyte.featureflag.UseCustomK8sScheduler; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.ContainerOrchestratorConfig; -import io.airbyte.workers.process.AsyncKubePodStatus; -import io.airbyte.workers.process.ProcessFactory; -import io.airbyte.workers.storage.StorageClient; -import io.airbyte.workers.workload.JobOutputDocStore; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.temporal.testing.TestActivityEnvironment; -import java.io.IOException; -import java.nio.file.Path; -import java.util.List; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -@SuppressWarnings("PMD.AvoidDuplicateLiterals") -class NormalizationActivityImplTest { - - private static TestActivityEnvironment testEnv; - - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static ContainerOrchestratorConfig mContainerOrchestratorConfig; - private static WorkerConfigsProvider mWorkerConfigsProvider; - private static ProcessFactory mProcessFactory; - private static SecretsRepositoryReader mSecretsRepositoryReader; - private static final Path WORKSPACE_ROOT = Path.of("/unused/path"); - private static final Configs.WorkerEnvironment WORKER_ENVIRONMENT = Configs.WorkerEnvironment.KUBERNETES; - private static GcsStorageConfig mStorageConfigs; - private static LogConfigs LOG_CONFIGS; - private static final String AIRBYTE_VERSION = "1.0"; - private static final Integer SERVER_PORT = 8888; - private static AirbyteConfigValidator mAirbyteConfigValidator; - private static AirbyteApiClient mAirbyteApiClient; - private static ConnectionApi mConnectionApi; - private static FeatureFlagClient mFeatureFlagClient; - private static JobOutputDocStore mJobOutputDocStore; - private static NormalizationActivityImpl normalizationActivityImpl; - private static NormalizationActivity normalizationActivity; - private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig().withJobId("1").withAttemptId(0L); - private static final IntegrationLauncherConfig DESTINATION_CONFIG = new IntegrationLauncherConfig() - .withDockerImage("unused") - .withNormalizationDockerImage("unused:unused"); - private static WorkerConfigs mWorkerConfigs; - private StorageClient mStorageClient; - private KubernetesClient mKubernetesClient; - - @BeforeEach - void beforeEach() throws Exception { - testEnv = TestActivityEnvironment.newInstance(); - mContainerOrchestratorConfig = mock(ContainerOrchestratorConfig.class); - mStorageClient = mock(StorageClient.class); - mKubernetesClient = mock(KubernetesClient.class); - mJobOutputDocStore = mock(JobOutputDocStore.class); - when(mContainerOrchestratorConfig.workerEnvironment()).thenReturn(Configs.WorkerEnvironment.KUBERNETES); - when(mContainerOrchestratorConfig.containerOrchestratorImage()).thenReturn("gcr.io/my-project/image-name:v2"); - when(mContainerOrchestratorConfig.containerOrchestratorImagePullPolicy()).thenReturn("Always"); - when(mContainerOrchestratorConfig.storageClient()).thenReturn(mStorageClient); - when(mStorageClient.read(argThat(key -> key.contains(AsyncKubePodStatus.SUCCEEDED.name())))).thenReturn(""); - when(mContainerOrchestratorConfig.jobOutputDocStore()).thenReturn(mJobOutputDocStore); - when(mJobOutputDocStore.readSyncOutput(any())).thenReturn(Optional.empty()); - when(mContainerOrchestratorConfig.kubernetesClient()).thenReturn(mKubernetesClient); - mWorkerConfigsProvider = mock(WorkerConfigsProvider.class); - mWorkerConfigs = mock(WorkerConfigs.class); - - mStorageConfigs = mock(GcsStorageConfig.class); - when(mStorageConfigs.getBuckets()).thenReturn(new StorageBucketConfig("unused", "unused", "unused", "unused")); - when(mStorageConfigs.getApplicationCredentials()).thenReturn("unused"); - - LOG_CONFIGS = new LogConfigs(mStorageConfigs); - when(mWorkerConfigsProvider.getConfig(any())).thenReturn(mWorkerConfigs); - mProcessFactory = mock(ProcessFactory.class); - mSecretsRepositoryReader = mock(SecretsRepositoryReader.class); - mAirbyteConfigValidator = mock(AirbyteConfigValidator.class); - mAirbyteApiClient = mock(AirbyteApiClient.class); - mConnectionApi = mock(ConnectionApi.class); - when(mAirbyteApiClient.getConnectionApi()).thenReturn(mConnectionApi); - mFeatureFlagClient = mock(TestClient.class); - when(mFeatureFlagClient.stringVariation(eq(UseCustomK8sScheduler.INSTANCE), any())).thenReturn(""); - normalizationActivityImpl = new NormalizationActivityImpl( - Optional.of(mContainerOrchestratorConfig), - mWorkerConfigsProvider, - mProcessFactory, - mSecretsRepositoryReader, - WORKSPACE_ROOT, - WORKER_ENVIRONMENT, - LOG_CONFIGS, - AIRBYTE_VERSION, - SERVER_PORT, - mAirbyteConfigValidator, - mAirbyteApiClient, - mFeatureFlagClient, - mock(MetricClient.class), - new WorkloadIdGenerator()); - testEnv.registerActivitiesImplementations(normalizationActivityImpl); - normalizationActivity = testEnv.newActivityStub(NormalizationActivity.class); - } - - @AfterEach - void afterEach() { - testEnv.close(); - } - - @Test - void checkNormalizationDataTypesSupportFromVersionString() { - Assertions.assertFalse(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("0.2.5"))); - Assertions.assertFalse(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("0.1.1"))); - Assertions.assertTrue(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("0.3.0"))); - Assertions.assertFalse(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("0.4.1"))); - Assertions.assertFalse(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("dev"))); - Assertions.assertFalse(NormalizationActivityImpl.normalizationSupportsV1DataTypes(withNormalizationVersion("protocolv1"))); - } - - private IntegrationLauncherConfig withNormalizationVersion(final String version) { - return new IntegrationLauncherConfig() - .withNormalizationDockerImage("normalization:" + version); - } - - @Test - void retrievesCatalog() throws IOException { - when(mConnectionApi.getConnection(new ConnectionIdRequestBody(CONNECTION_ID))).thenReturn( - new ConnectionRead(CONNECTION_ID, "name", UUID.randomUUID(), UUID.randomUUID(), new AirbyteCatalog(List.of()), ConnectionStatus.ACTIVE, false, - null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, UUID.randomUUID())); - normalizationActivity.normalize(JOB_RUN_CONFIG, DESTINATION_CONFIG, new NormalizationInput() - .withConnectionId(CONNECTION_ID) - .withWorkspaceId(UUID.randomUUID()) - .withConnectionContext(new ConnectionContext().withOrganizationId(UUID.randomUUID()))); - verify(mConnectionApi).getConnection(new ConnectionIdRequestBody(CONNECTION_ID)); - } - -} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java index f91a6d71d3a..6918b0de2cf 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java @@ -23,11 +23,8 @@ import io.airbyte.config.ConnectionContext; import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; -import io.airbyte.config.NormalizationInput; -import io.airbyte.config.NormalizationSummary; import io.airbyte.config.OperatorWebhook; import io.airbyte.config.OperatorWebhookInput; -import io.airbyte.config.ResourceRequirements; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncInput; import io.airbyte.config.StandardSyncOperation; @@ -63,7 +60,6 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; @@ -81,12 +77,11 @@ class SyncWorkflowTest { private Worker syncWorker; private WorkflowClient client; private ReplicationActivityImpl replicationActivity; - private NormalizationActivityImpl normalizationActivity; - private NormalizationSummaryCheckActivityImpl normalizationSummaryCheckActivity; private WebhookOperationActivityImpl webhookOperationActivity; private RefreshSchemaActivityImpl refreshSchemaActivity; private ConfigFetchActivityImpl configFetchActivity; private WorkloadFeatureFlagActivity workloadFeatureFlagActivity; + private ReportRunTimeActivity reportRunTimeActivity; // AIRBYTE CONFIGURATION private static final long JOB_ID = 11L; @@ -97,8 +92,6 @@ class SyncWorkflowTest { .withAttemptId((long) ATTEMPT_ID); private static final String IMAGE_NAME1 = "hms invincible"; private static final String IMAGE_NAME2 = "hms defiant"; - private static final String NORMALIZATION_IMAGE1 = "hms normalize"; - private static final String NORMALIZATION_TYPE = "postgres"; private static final IntegrationLauncherConfig SOURCE_LAUNCHER_CONFIG = new IntegrationLauncherConfig() .withJobId(String.valueOf(JOB_ID)) .withAttemptId((long) ATTEMPT_ID) @@ -106,23 +99,20 @@ class SyncWorkflowTest { private static final IntegrationLauncherConfig DESTINATION_LAUNCHER_CONFIG = new IntegrationLauncherConfig() .withJobId(String.valueOf(JOB_ID)) .withAttemptId((long) ATTEMPT_ID) - .withDockerImage(IMAGE_NAME2) - .withNormalizationDockerImage(NORMALIZATION_IMAGE1) - .withNormalizationIntegrationType(NORMALIZATION_TYPE); + .withDockerImage(IMAGE_NAME2); private static final String SYNC_QUEUE = "SYNC"; private static final UUID ORGANIZATION_ID = UUID.randomUUID(); + private static final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); private StandardSync sync; private StandardSyncInput syncInput; - private NormalizationInput normalizationInput; private StandardSyncOutput replicationSuccessOutput; private StandardSyncOutput replicationFailOutput; private StandardSyncSummary standardSyncSummary; private StandardSyncSummary failedSyncSummary; private SyncStats syncStats; - private NormalizationSummary normalizationSummary; private ActivityOptions longActivityOptions; private ActivityOptions shortActivityOptions; private ActivityOptions discoveryActivityOptions; @@ -132,10 +122,11 @@ class SyncWorkflowTest { @BeforeEach void setUp() { testEnv = TestWorkflowEnvironment.newInstance(); + syncWorker = testEnv.newWorker(SYNC_QUEUE); client = testEnv.getWorkflowClient(); - final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(ORGANIZATION_ID); + final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(ORGANIZATION_ID, SOURCE_DEFINITION_ID); sync = syncPair.getKey(); syncInput = syncPair.getValue(); @@ -144,27 +135,12 @@ void setUp() { failedSyncSummary = new StandardSyncSummary().withStatus(ReplicationStatus.FAILED).withTotalStats(new SyncStats().withRecordsEmitted(0L)); replicationSuccessOutput = new StandardSyncOutput().withStandardSyncSummary(standardSyncSummary); replicationFailOutput = new StandardSyncOutput().withStandardSyncSummary(failedSyncSummary); - - normalizationSummary = new NormalizationSummary(); - - normalizationInput = new NormalizationInput() - .withDestinationConfiguration(syncInput.getDestinationConfiguration()) - .withResourceRequirements(new ResourceRequirements()) - .withConnectionId(syncInput.getConnectionId()) - .withWorkspaceId(syncInput.getWorkspaceId()) - .withConnectionContext(new ConnectionContext().withOrganizationId(ORGANIZATION_ID)); - replicationActivity = mock(ReplicationActivityImpl.class); - normalizationActivity = mock(NormalizationActivityImpl.class); - normalizationSummaryCheckActivity = mock(NormalizationSummaryCheckActivityImpl.class); webhookOperationActivity = mock(WebhookOperationActivityImpl.class); refreshSchemaActivity = mock(RefreshSchemaActivityImpl.class); configFetchActivity = mock(ConfigFetchActivityImpl.class); workloadFeatureFlagActivity = mock(WorkloadFeatureFlagActivityImpl.class); - - when(normalizationActivity.generateNormalizationInputWithMinimumPayloadWithConnectionId(any(), any(), any(), any(), any())) - .thenReturn(normalizationInput); - when(normalizationSummaryCheckActivity.shouldRunNormalization(any(), any(), any())).thenReturn(true); + reportRunTimeActivity = mock(ReportRunTimeActivityImpl.class); when(configFetchActivity.getSourceId(sync.getConnectionId())).thenReturn(Optional.of(SOURCE_ID)); when(refreshSchemaActivity.shouldRefreshSchema(SOURCE_ID)).thenReturn(true); @@ -231,12 +207,11 @@ public void tearDown() { // bundle up all the temporal worker setup / execution into one method. private StandardSyncOutput execute() { syncWorker.registerActivitiesImplementations(replicationActivity, - normalizationActivity, - normalizationSummaryCheckActivity, webhookOperationActivity, refreshSchemaActivity, configFetchActivity, - workloadFeatureFlagActivity); + workloadFeatureFlagActivity, + reportRunTimeActivity); testEnv.start(); final SyncWorkflow workflow = client.newWorkflowStub(SyncWorkflow.class, WorkflowOptions.newBuilder().setTaskQueue(SYNC_QUEUE).build()); @@ -248,20 +223,15 @@ private StandardSyncOutput execute() { void testSuccess() throws Exception { doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - doReturn(normalizationSummary).when(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - final StandardSyncOutput actualOutput = execute(); verifyReplication(replicationActivity, syncInput); - verifyNormalize(normalizationActivity, normalizationInput); verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); + verify(reportRunTimeActivity).reportRunTime(any()); assertEquals( - replicationSuccessOutput.withNormalizationSummary(normalizationSummary).getStandardSyncSummary(), - actualOutput.getStandardSyncSummary()); + replicationSuccessOutput.getStandardSyncSummary(), + removeRefreshTime(actualOutput.getStandardSyncSummary())); } @ParameterizedTest @@ -271,20 +241,14 @@ void passesThroughFFCall(final boolean useWorkloadApi) throws Exception { doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - doReturn(normalizationSummary).when(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - final StandardSyncOutput actualOutput = execute(); verifyReplication(replicationActivity, syncInput, useWorkloadApi, false); - verifyNormalize(normalizationActivity, normalizationInput); verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); assertEquals( - replicationSuccessOutput.withNormalizationSummary(normalizationSummary).getStandardSyncSummary(), - actualOutput.getStandardSyncSummary()); + replicationSuccessOutput.getStandardSyncSummary(), + removeRefreshTime(actualOutput.getStandardSyncSummary())); } @Test @@ -296,44 +260,27 @@ void testReplicationFailure() throws Exception { verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); verifyReplication(replicationActivity, syncInput); - verifyNoInteractions(normalizationActivity); } @Test void testReplicationFailedGracefully() throws Exception { doReturn(replicationFailOutput).when(replicationActivity).replicateV2(any()); - doReturn(normalizationSummary).when(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - final StandardSyncOutput actualOutput = execute(); verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); verifyReplication(replicationActivity, syncInput); - verifyNormalize(normalizationActivity, normalizationInput); assertEquals( - replicationFailOutput.withNormalizationSummary(normalizationSummary).getStandardSyncSummary(), - actualOutput.getStandardSyncSummary()); + replicationFailOutput.getStandardSyncSummary(), + removeRefreshTime(actualOutput.getStandardSyncSummary())); } - @Test - void testNormalizationFailure() throws Exception { - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); + private StandardSyncSummary removeRefreshTime(final StandardSyncSummary in) { + in.getTotalStats().setDiscoverSchemaEndTime(null); + in.getTotalStats().setDiscoverSchemaStartTime(null); - doThrow(new IllegalArgumentException("induced exception")).when(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - - assertThrows(WorkflowFailedException.class, this::execute); - - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); - verifyNormalize(normalizationActivity, normalizationInput); + return in; } @Test @@ -348,40 +295,6 @@ void testCancelDuringReplication() throws Exception { verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); verifyReplication(replicationActivity, syncInput); - verifyNoInteractions(normalizationActivity); - } - - @Test - void testCancelDuringNormalization() throws Exception { - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - - doAnswer(ignored -> { - cancelWorkflow(); - return replicationSuccessOutput; - }).when(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - - assertThrows(WorkflowFailedException.class, this::execute); - - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); - verifyNormalize(normalizationActivity, normalizationInput); - } - - @Test - @Disabled("This behavior has been disabled temporarily (OC Issue #741)") - void testSkipNormalization() throws Exception { - when(normalizationSummaryCheckActivity.shouldRunNormalization(any(), any(), any())).thenReturn(false); - - execute(); - - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); - verifyNoInteractions(normalizationActivity); } @Test @@ -399,7 +312,7 @@ void testWebhookOperation() { when(webhookOperationActivity.invokeWebhook( new OperatorWebhookInput().withWebhookConfigId(WEBHOOK_CONFIG_ID).withExecutionUrl(WEBHOOK_URL).withExecutionBody(WEBHOOK_BODY) .withWorkspaceWebhookConfigs(workspaceWebhookConfigs) - .withConnectionContext(new ConnectionContext().withOrganizationId(ORGANIZATION_ID)))) + .withConnectionContext(new ConnectionContext().withOrganizationId(ORGANIZATION_ID).withSourceDefinitionId(SOURCE_DEFINITION_ID)))) .thenReturn(true); final StandardSyncOutput actualOutput = execute(); assertEquals(actualOutput.getWebhookOperationSummary().getSuccesses().get(0), WEBHOOK_CONFIG_ID); @@ -412,7 +325,6 @@ void testSkipReplicationAfterRefreshSchema() throws Exception { verifyShouldRefreshSchema(refreshSchemaActivity); verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); verifyNoInteractions(replicationActivity); - verifyNoInteractions(normalizationActivity); assertEquals(output.getStandardSyncSummary().getStatus(), ReplicationStatus.CANCELLED); } @@ -464,25 +376,17 @@ private static void verifyReplication(final ReplicationActivity replicationActiv syncInput.getSyncResourceRequirements(), syncInput.getWorkspaceId(), syncInput.getConnectionId(), - syncInput.getNormalizeInDestinationContainer(), SYNC_QUEUE, syncInput.getIsReset(), syncInput.getNamespaceDefinition(), syncInput.getNamespaceFormat(), syncInput.getPrefix(), null, - new ConnectionContext().withOrganizationId(ORGANIZATION_ID), + new ConnectionContext().withOrganizationId(ORGANIZATION_ID).withSourceDefinitionId(SOURCE_DEFINITION_ID), useWorkloadApi, useOutputDocStore)); } - private void verifyNormalize(final NormalizationActivity normalizationActivity, final NormalizationInput normalizationInput) { - verify(normalizationActivity).normalize( - JOB_RUN_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - normalizationInput); - } - private static void verifyShouldRefreshSchema(final RefreshSchemaActivity refreshSchemaActivity) { verify(refreshSchemaActivity).shouldRefreshSchema(SOURCE_ID); } diff --git a/airbyte-workers/src/test/resources/connectionManagerWorkflowHistory.json b/airbyte-workers/src/test/resources/connectionManagerWorkflowHistory.json deleted file mode 100644 index 32fd9a1ebb3..00000000000 --- a/airbyte-workers/src/test/resources/connectionManagerWorkflowHistory.json +++ /dev/null @@ -1,2827 +0,0 @@ -{ - "events": [ - { - "eventId": "1", - "eventTime": "2023-09-20T23:45:02.991104669Z", - "eventType": "WorkflowExecutionStarted", - "version": "1067", - "taskId": "212146739", - "workerMayIgnore": false, - "workflowExecutionStartedEventAttributes": { - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "parentWorkflowNamespace": "", - "parentWorkflowNamespaceId": "", - "parentWorkflowExecution": null, - "parentInitiatedEventId": "0", - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2UsImZyb21Kb2JSZXNldEZhaWx1cmUiOmZhbHNlLCJza2lwU2NoZWR1bGluZyI6ZmFsc2V9" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "continuedExecutionRunId": "937d49f6-296c-4788-b138-2c1ea5735518", - "initiator": "Workflow", - "continuedFailure": null, - "lastCompletionResult": null, - "originalExecutionRunId": "cbc173a4-cc96-4155-afb8-12422f1a5f48", - "identity": "", - "firstExecutionRunId": "35996e87-c60d-4210-bd9f-0e8b1701db37", - "retryPolicy": null, - "attempt": 1, - "workflowExecutionExpirationTime": null, - "cronSchedule": "", - "firstWorkflowTaskBackoff": null, - "memo": null, - "searchAttributes": null, - "prevAutoResetPoints": { - "points": [] - }, - "header": { - "fields": {} - }, - "parentInitiatedEventVersion": "0", - "workflowId": "connection_manager_bbfb812e-42cf-43e2-8bce-17502ae7c4f0", - "sourceVersionStamp": null - } - }, - { - "eventId": "2", - "eventTime": "2023-09-20T23:45:02.991148520Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146740", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "3", - "eventTime": "2023-09-20T23:45:03.023435066Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146747", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "2", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "e7b0c930-6122-4b23-b7f0-cdc5bd9b1be3", - "suggestContinueAsNew": false, - "historySizeBytes": "589" - } - }, - { - "eventId": "4", - "eventTime": "2023-09-20T23:45:03.079347800Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146752", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "2", - "startedEventId": "3", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "5", - "eventTime": "2023-09-20T23:45:03.079374900Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146753", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "3ae37633-6aaf-3df5-8668-2ebbdeb81763", - "activityType": { - "name": "GetWorkflowRestartDelaySeconds" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": null, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "4", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "6", - "eventTime": "2023-09-20T23:45:03.079387820Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146757", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "5", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "08352aa0-0b9a-42e3-9e04-b24dfeeb41c5", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "7", - "eventTime": "2023-09-20T23:45:03.131337891Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146758", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "NjAwLjAwMDAwMDAwMA==" - } - ] - }, - "scheduledEventId": "5", - "startedEventId": "6", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "8", - "eventTime": "2023-09-20T23:45:03.131341641Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146759", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "9", - "eventTime": "2023-09-20T23:45:03.139715386Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146763", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "8", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "f54ff921-6f86-4a74-ae8e-c3aa84f8e012", - "suggestContinueAsNew": false, - "historySizeBytes": "1305" - } - }, - { - "eventId": "10", - "eventTime": "2023-09-20T23:45:03.206535430Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146768", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "8", - "startedEventId": "9", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "11", - "eventTime": "2023-09-20T23:45:03.206560941Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146769", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "c61d5e9b-ba66-3950-911d-da3fd1c5863c", - "activityType": { - "name": "RecordWorkflowCountMetric" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uVXBkYXRlcklucHV0Ijp7ImNvbm5lY3Rpb25JZCI6ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCIsImpvYklkIjpudWxsLCJhdHRlbXB0SWQiOm51bGwsImZyb21GYWlsdXJlIjpmYWxzZSwiYXR0ZW1wdE51bWJlciI6MSwid29ya2Zsb3dTdGF0ZSI6bnVsbCwicmVzZXRDb25uZWN0aW9uIjpmYWxzZSwiZnJvbUpvYlJlc2V0RmFpbHVyZSI6ZmFsc2UsInNraXBTY2hlZHVsaW5nIjpmYWxzZX0sImZhaWx1cmVDYXVzZSI6bnVsbCwibWV0cmljTmFtZSI6IlRFTVBPUkFMX1dPUktGTE9XX0FUVEVNUFQiLCJtZXRyaWNBdHRyaWJ1dGVzIjpudWxsfQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "10", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "12", - "eventTime": "2023-09-20T23:45:03.206573561Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146782", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "11", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "b4e49b81-4f91-4bbc-b14b-6d80b527263f", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "13", - "eventTime": "2023-09-20T23:45:03.271718285Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146783", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "11", - "startedEventId": "12", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "14", - "eventTime": "2023-09-20T23:45:03.271722065Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146784", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "15", - "eventTime": "2023-09-20T23:45:03.284608653Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146794", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "14", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "e26935ef-2829-42da-9719-1f4126633ab1", - "suggestContinueAsNew": false, - "historySizeBytes": "2339" - } - }, - { - "eventId": "16", - "eventTime": "2023-09-20T23:45:03.346264032Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146803", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "14", - "startedEventId": "15", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "17", - "eventTime": "2023-09-20T23:45:03.346299153Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146804", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "7c5385b2-8da5-311f-8403-c4f7f8f72d96", - "activityType": { - "name": "EnsureCleanJobState" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "16", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "18", - "eventTime": "2023-09-20T23:45:03.346313213Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146807", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "17", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "77116974-4d5c-4fe7-a98e-613fa4f0e468", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "19", - "eventTime": "2023-09-20T23:45:03.403387878Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146808", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "17", - "startedEventId": "18", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "20", - "eventTime": "2023-09-20T23:45:03.403392228Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146809", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "21", - "eventTime": "2023-09-20T23:45:03.414209448Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146813", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "20", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "2efb4ef6-08f6-4ebd-b912-f57a65754fac", - "suggestContinueAsNew": false, - "historySizeBytes": "3092" - } - }, - { - "eventId": "22", - "eventTime": "2023-09-20T23:45:03.472765121Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146823", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "20", - "startedEventId": "21", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "23", - "eventTime": "2023-09-20T23:45:03.472787491Z", - "eventType": "MarkerRecorded", - "version": "1067", - "taskId": "212146824", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "Im5ld19yZXRyaWVzIg==" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "22", - "header": null, - "failure": null - } - }, - { - "eventId": "24", - "eventTime": "2023-09-20T23:45:03.472800491Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146825", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "38a0e379-23f3-30aa-9010-01aa4a3077f0", - "activityType": { - "name": "HydrateRetryState" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6bnVsbCwiY29ubmVjdGlvbklkIjoiYmJmYjgxMmUtNDJjZi00M2UyLThiY2UtMTc1MDJhZTdjNGYwIn0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "22", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "25", - "eventTime": "2023-09-20T23:45:03.472811422Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146828", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "24", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "bc788459-fba9-44e1-bd2a-cb0f9fd52932", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "26", - "eventTime": "2023-09-20T23:45:03.536738003Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146829", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJtYW5hZ2VyIjp7ImNvbXBsZXRlRmFpbHVyZUJhY2tvZmZQb2xpY3kiOnsibWluSW50ZXJ2YWwiOjEwLjAwMDAwMDAwMCwibWF4SW50ZXJ2YWwiOjE4MDAuMDAwMDAwMDAwLCJiYXNlIjozfSwicGFydGlhbEZhaWx1cmVCYWNrb2ZmUG9saWN5IjpudWxsLCJzdWNjZXNzaXZlQ29tcGxldGVGYWlsdXJlTGltaXQiOjUsInRvdGFsQ29tcGxldGVGYWlsdXJlTGltaXQiOjUsInN1Y2Nlc3NpdmVQYXJ0aWFsRmFpbHVyZUxpbWl0IjoxMDAwLCJ0b3RhbFBhcnRpYWxGYWlsdXJlTGltaXQiOjEwLCJzdWNjZXNzaXZlQ29tcGxldGVGYWlsdXJlcyI6MCwidG90YWxDb21wbGV0ZUZhaWx1cmVzIjowLCJzdWNjZXNzaXZlUGFydGlhbEZhaWx1cmVzIjowLCJ0b3RhbFBhcnRpYWxGYWlsdXJlcyI6MCwiYmFja29mZiI6MC4wLCJiYWNrb2ZmU3RyaW5nIjoiMCBzZWNvbmRzIn19" - } - ] - }, - "scheduledEventId": "24", - "startedEventId": "25", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "27", - "eventTime": "2023-09-20T23:45:03.536741113Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146830", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "28", - "eventTime": "2023-09-20T23:45:03.548309997Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146834", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "27", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "f65e13c6-bb1b-475c-8d0d-5b7dfb405aeb", - "suggestContinueAsNew": false, - "historySizeBytes": "4463" - } - }, - { - "eventId": "29", - "eventTime": "2023-09-20T23:45:03.603510237Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146839", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "27", - "startedEventId": "28", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "30", - "eventTime": "2023-09-20T23:45:03.603534958Z", - "eventType": "MarkerRecorded", - "version": "1067", - "taskId": "212146840", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImFwcGVuZF9hdHRlbXB0X2xvZyI=" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "29", - "header": null, - "failure": null - } - }, - { - "eventId": "31", - "eventTime": "2023-09-20T23:45:03.603549618Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146841", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "b6e9f9f7-e9a4-31db-bbce-4b0200328267", - "activityType": { - "name": "Log" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6bnVsbCwiYXR0ZW1wdE51bWJlciI6MCwibWVzc2FnZSI6IlJldHJ5IFN0YXRlOiBSZXRyeU1hbmFnZXIoY29tcGxldGVGYWlsdXJlQmFja29mZlBvbGljeT1CYWNrb2ZmUG9saWN5KG1pbkludGVydmFsPVBUMTBTLCBtYXhJbnRlcnZhbD1QVDMwTSwgYmFzZT0zKSwgcGFydGlhbEZhaWx1cmVCYWNrb2ZmUG9saWN5PW51bGwsIHN1Y2Nlc3NpdmVDb21wbGV0ZUZhaWx1cmVMaW1pdD01LCB0b3RhbENvbXBsZXRlRmFpbHVyZUxpbWl0PTUsIHN1Y2Nlc3NpdmVQYXJ0aWFsRmFpbHVyZUxpbWl0PTEwMDAsIHRvdGFsUGFydGlhbEZhaWx1cmVMaW1pdD0xMCwgc3VjY2Vzc2l2ZUNvbXBsZXRlRmFpbHVyZXM9MCwgdG90YWxDb21wbGV0ZUZhaWx1cmVzPTAsIHN1Y2Nlc3NpdmVQYXJ0aWFsRmFpbHVyZXM9MCwgdG90YWxQYXJ0aWFsRmFpbHVyZXM9MCkiLCJsZXZlbCI6IklORk8ifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "29", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "32", - "eventTime": "2023-09-20T23:45:03.603564578Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146846", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "31", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "47b7c381-2329-4cb1-b602-5810f43db9cd", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "33", - "eventTime": "2023-09-20T23:45:03.658521514Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146847", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdWNjZXNzIjpmYWxzZX0=" - } - ] - }, - "scheduledEventId": "31", - "startedEventId": "32", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "34", - "eventTime": "2023-09-20T23:45:03.658526164Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146848", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "35", - "eventTime": "2023-09-20T23:45:03.674954898Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146858", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "34", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "e1af513e-0281-452d-b6ae-ee956af5ba96", - "suggestContinueAsNew": false, - "historySizeBytes": "5787" - } - }, - { - "eventId": "36", - "eventTime": "2023-09-20T23:45:03.735858893Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146867", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "34", - "startedEventId": "35", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "37", - "eventTime": "2023-09-20T23:45:03.735891804Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212146868", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "a6a117ec-b803-3950-9db3-31e77dcda872", - "activityType": { - "name": "GetTimeToWait" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "36", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "38", - "eventTime": "2023-09-20T23:45:03.735906724Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212146906", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "37", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "a91d6255-d81b-450e-9958-c54ce9259df9", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "39", - "eventTime": "2023-09-20T23:45:04.561099025Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212146907", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ0aW1lVG9XYWl0IjozNDM0LjAwMDAwMDAwMH0=" - } - ] - }, - "scheduledEventId": "37", - "startedEventId": "38", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "40", - "eventTime": "2023-09-20T23:45:04.561103345Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212146908", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "41", - "eventTime": "2023-09-20T23:45:04.569973919Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212146912", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "40", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "64f9d3c3-3e5c-47da-830d-796df7846ddb", - "suggestContinueAsNew": false, - "historySizeBytes": "6594" - } - }, - { - "eventId": "42", - "eventTime": "2023-09-20T23:45:04.627238118Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212146927", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "40", - "startedEventId": "41", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "43", - "eventTime": "2023-09-20T23:45:04.627258088Z", - "eventType": "TimerStarted", - "version": "1067", - "taskId": "212146928", - "workerMayIgnore": false, - "timerStartedEventAttributes": { - "timerId": "99bbfc14-0966-37ba-a93c-706b5443bd04", - "startToFireTimeout": "3434s", - "workflowTaskCompletedEventId": "42" - } - }, - { - "eventId": "44", - "eventTime": "2023-09-21T00:42:18.628425846Z", - "eventType": "TimerFired", - "version": "1067", - "taskId": "212228748", - "workerMayIgnore": false, - "timerFiredEventAttributes": { - "timerId": "99bbfc14-0966-37ba-a93c-706b5443bd04", - "startedEventId": "43" - } - }, - { - "eventId": "45", - "eventTime": "2023-09-21T00:42:18.628428736Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228749", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "46", - "eventTime": "2023-09-21T00:42:18.637975938Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228753", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "45", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "68456e10-a11f-4482-b646-26a49f95b6dd", - "suggestContinueAsNew": false, - "historySizeBytes": "7066" - } - }, - { - "eventId": "47", - "eventTime": "2023-09-21T00:42:18.698188428Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228764", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "45", - "startedEventId": "46", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "48", - "eventTime": "2023-09-21T00:42:18.698227559Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228765", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "2a8f9871-a067-383b-a55c-4ce6a8003bb3", - "activityType": { - "name": "HydrateRetryState" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6bnVsbCwiY29ubmVjdGlvbklkIjoiYmJmYjgxMmUtNDJjZi00M2UyLThiY2UtMTc1MDJhZTdjNGYwIn0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "47", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "49", - "eventTime": "2023-09-21T00:42:18.698241629Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228768", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "48", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "4cc651e5-98ed-4155-a41a-2ade1b6a7812", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "50", - "eventTime": "2023-09-21T00:42:18.763398028Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228769", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJtYW5hZ2VyIjp7ImNvbXBsZXRlRmFpbHVyZUJhY2tvZmZQb2xpY3kiOnsibWluSW50ZXJ2YWwiOjEwLjAwMDAwMDAwMCwibWF4SW50ZXJ2YWwiOjE4MDAuMDAwMDAwMDAwLCJiYXNlIjozfSwicGFydGlhbEZhaWx1cmVCYWNrb2ZmUG9saWN5IjpudWxsLCJzdWNjZXNzaXZlQ29tcGxldGVGYWlsdXJlTGltaXQiOjUsInRvdGFsQ29tcGxldGVGYWlsdXJlTGltaXQiOjUsInN1Y2Nlc3NpdmVQYXJ0aWFsRmFpbHVyZUxpbWl0IjoxMDAwLCJ0b3RhbFBhcnRpYWxGYWlsdXJlTGltaXQiOjEwLCJzdWNjZXNzaXZlQ29tcGxldGVGYWlsdXJlcyI6MCwidG90YWxDb21wbGV0ZUZhaWx1cmVzIjowLCJzdWNjZXNzaXZlUGFydGlhbEZhaWx1cmVzIjowLCJ0b3RhbFBhcnRpYWxGYWlsdXJlcyI6MCwiYmFja29mZiI6MC4wLCJiYWNrb2ZmU3RyaW5nIjoiMCBzZWNvbmRzIn19" - } - ] - }, - "scheduledEventId": "48", - "startedEventId": "49", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "51", - "eventTime": "2023-09-21T00:42:18.763401938Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228770", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "52", - "eventTime": "2023-09-21T00:42:18.771674098Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228774", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "51", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "46eeba16-6d78-46bb-b8ee-7418a891f206", - "suggestContinueAsNew": false, - "historySizeBytes": "8297" - } - }, - { - "eventId": "53", - "eventTime": "2023-09-21T00:42:18.824932182Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228781", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "51", - "startedEventId": "52", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "54", - "eventTime": "2023-09-21T00:42:18.824952312Z", - "eventType": "MarkerRecorded", - "version": "1067", - "taskId": "212228782", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImdldF9mZWF0dXJlX2ZsYWdzIg==" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "53", - "header": null, - "failure": null - } - }, - { - "eventId": "55", - "eventTime": "2023-09-21T00:42:18.824964602Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228783", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "f65003ac-cc04-3bc0-9971-429b21c179d2", - "activityType": { - "name": "GetFeatureFlags" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "53", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "56", - "eventTime": "2023-09-21T00:42:18.824985203Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228786", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "55", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "06680c83-c00c-4445-942f-de7adc4ca491", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "57", - "eventTime": "2023-09-21T00:42:18.874148582Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228787", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJmZWF0dXJlRmxhZ3MiOnt9fQ==" - } - ] - }, - "scheduledEventId": "55", - "startedEventId": "56", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "58", - "eventTime": "2023-09-21T00:42:18.874152492Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228788", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "59", - "eventTime": "2023-09-21T00:42:18.883367639Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228792", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "58", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "d1b3dfbc-8928-4cfa-9338-b5da0521bda6", - "suggestContinueAsNew": false, - "historySizeBytes": "9241" - } - }, - { - "eventId": "60", - "eventTime": "2023-09-21T00:42:18.936105993Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228797", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "58", - "startedEventId": "59", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "61", - "eventTime": "2023-09-21T00:42:18.936131914Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228798", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "3f51ba69-145e-3100-932d-77f526178fda", - "activityType": { - "name": "CreateNewJob" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "60", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "62", - "eventTime": "2023-09-21T00:42:18.936144044Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228812", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "61", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "c1711dc3-ddcf-4d4a-9e89-25c365d7f482", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "63", - "eventTime": "2023-09-21T00:42:19.187681926Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228813", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6NDc3NzcwOX0=" - } - ] - }, - "scheduledEventId": "61", - "startedEventId": "62", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "64", - "eventTime": "2023-09-21T00:42:19.187686976Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228814", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "65", - "eventTime": "2023-09-21T00:42:19.198853318Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228818", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "64", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "bcb1c2d2-28fc-49e8-8720-dc9c76b6ae15", - "suggestContinueAsNew": false, - "historySizeBytes": "10032" - } - }, - { - "eventId": "66", - "eventTime": "2023-09-21T00:42:19.253040549Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228823", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "64", - "startedEventId": "65", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "67", - "eventTime": "2023-09-21T00:42:19.253070939Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228824", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "e8c985a7-f092-333d-ae4e-227bb3043882", - "activityType": { - "name": "CreateNewAttemptNumber" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6NDc3NzcwOX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "66", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "68", - "eventTime": "2023-09-21T00:42:19.253085529Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228837", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "67", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "6f534f02-4ec2-4c8b-beb7-fa5645c28ac0", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "69", - "eventTime": "2023-09-21T00:42:19.324418580Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228838", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0TnVtYmVyIjowfQ==" - } - ] - }, - "scheduledEventId": "67", - "startedEventId": "68", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "70", - "eventTime": "2023-09-21T00:42:19.324422630Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228839", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "71", - "eventTime": "2023-09-21T00:42:19.333047946Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228843", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "70", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "d646e55f-2d70-4bc8-8c40-97be63bf2613", - "suggestContinueAsNew": false, - "historySizeBytes": "10795" - } - }, - { - "eventId": "72", - "eventTime": "2023-09-21T00:42:19.389519018Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228850", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "70", - "startedEventId": "71", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "73", - "eventTime": "2023-09-21T00:42:19.389540388Z", - "eventType": "MarkerRecorded", - "version": "1067", - "taskId": "212228851", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImdlbmVyYXRlX2NoZWNrX2lucHV0Ig==" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "72", - "header": null, - "failure": null - } - }, - { - "eventId": "74", - "eventTime": "2023-09-21T00:42:19.389553299Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228852", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "cff0bb34-e416-380f-870f-3e22d13143f1", - "activityType": { - "name": "ReportJobStart" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6NDc3NzcwOSwiY29ubmVjdGlvbklkIjoiYmJmYjgxMmUtNDJjZi00M2UyLThiY2UtMTc1MDJhZTdjNGYwIn0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "72", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "75", - "eventTime": "2023-09-21T00:42:19.389567669Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228855", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "74", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "7b64b5a2-085e-434d-9299-5547d551f890", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "76", - "eventTime": "2023-09-21T00:42:19.480894292Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228856", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "74", - "startedEventId": "75", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "77", - "eventTime": "2023-09-21T00:42:19.480898912Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228857", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "78", - "eventTime": "2023-09-21T00:42:19.489521958Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228861", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "77", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "2e02d398-0317-4c75-b83e-466d143c6af7", - "suggestContinueAsNew": false, - "historySizeBytes": "11708" - } - }, - { - "eventId": "79", - "eventTime": "2023-09-21T00:42:19.542200882Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228866", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "77", - "startedEventId": "78", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "80", - "eventTime": "2023-09-21T00:42:19.542229492Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228867", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "050c8380-8824-3a98-b1e7-19d0c526670e", - "activityType": { - "name": "IsLastJobOrAttemptFailure" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6NDc3NzcwOSwiYXR0ZW1wdElkIjowLCJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "79", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "81", - "eventTime": "2023-09-21T00:42:19.542243072Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228870", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "80", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "ec7d0ce9-6c0c-4286-b971-7ced09923a99", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "82", - "eventTime": "2023-09-21T00:42:19.616413515Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228871", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ZmFsc2U=" - } - ] - }, - "scheduledEventId": "80", - "startedEventId": "81", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "83", - "eventTime": "2023-09-21T00:42:19.616417125Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228872", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "84", - "eventTime": "2023-09-21T00:42:19.623475093Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228876", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "83", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "80f47462-ef76-4956-ab8c-075f3e21e0ed", - "suggestContinueAsNew": false, - "historySizeBytes": "12532" - } - }, - { - "eventId": "85", - "eventTime": "2023-09-21T00:42:19.683783005Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228890", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "83", - "startedEventId": "84", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "86", - "eventTime": "2023-09-21T00:42:19.683811265Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228891", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "db5c17c4-4041-34db-a002-8cd8ce14c2a2", - "activityType": { - "name": "GetSyncWorkflowInputWithAttemptNumber" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhdHRlbXB0TnVtYmVyIjowLCJqb2JJZCI6NDc3NzcwOX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "85", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "87", - "eventTime": "2023-09-21T00:42:19.683823415Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228896", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "86", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "fac1a283-7f96-4b53-b10a-bec726393bd7", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "88", - "eventTime": "2023-09-21T00:42:19.814080633Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228897", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "{"jobRunConfig":{"jobId":"4777709","attemptId":0},"sourceLauncherConfig":{"jobId":"4777709","attemptId":0,"connectionId":"bbfb812e-42cf-43e2-8bce-17502ae7c4f0","workspaceId":"db9a8026-2d04-44c6-8371-915fcad1e2ef","dockerImage":"airbyte/source-google-sheets:0.3.7","supportsDbt":false,"protocolVersion":{"version":"0.2.0"},"isCustomConnector":false,"allowedHosts":{"hosts":["*.googleapis.com","*.datadoghq.com","*.datadoghq.eu","*.sentry.io"]}},"destinationLauncherConfig":{"jobId":"4777709","attemptId":0,"connectionId":"bbfb812e-42cf-43e2-8bce-17502ae7c4f0","workspaceId":"db9a8026-2d04-44c6-8371-915fcad1e2ef","dockerImage":"airbyte/destination-bigquery:1.10.2","normalizationDockerImage":"airbyte/normalization:0.4.3","supportsDbt":true,"normalizationIntegrationType":"bigquery","protocolVersion":{"version":"0.2.0"},"isCustomConnector":false,"additionalEnvironmentVariables":{"NORMALIZATION_TECHNIQUE":"LEGACY"}},"syncInput":{"namespaceDefinition":"customformat","namespaceFormat":"db_spyne","prefix":"","sourceId":"090d74c5-3900-4197-ad1a-278d67daa631","destinationId":"af09bfe2-4cd3-4df6-a58f-011a9633b8f7","sourceConfiguration":{"credentials":{"auth_type":"Client","client_id":"332657581931-0g7mb0kb09cc2g5kkckjne46g45rtoc8.apps.googleusercontent.com","client_secret":"kyKNGpvQekmkML_CniA-Cbd8","refresh_token":{"_secret":"airbyte_workspace_db9a8026-2d04-44c6-8371-915fcad1e2ef_secret_16aac82c-1942-4e53-b87c-c02a3859a96b_v1"}},"row_batch_size":200,"spreadsheet_id":"https://docs.google.com/spreadsheets/d/13Z-A8wPoWWm8mWHKTPCQ9feaVTdBYP86YCU-GS0fUkg/edit?usp=sharing"},"destinationConfiguration":{"dataset_id":"airbyte","project_id":"spynedatastaging-v2-383509","loading_method":{"method":"Standard"},"credentials_json":{"_secret":"airbyte_workspace_db9a8026-2d04-44c6-8371-915fcad1e2ef_secret_30b5f38a-9cc2-4148-a173-bde0a8fe8b41_v1"},"dataset_location":"EU","transformation_priority":"interactive","big_query_client_buffer_size_mb":15},"operationSequence":[{"operationId":"88263471-a30c-475b-91f6-33437ea3d0c0","name":"Normalization","operatorType":"normalization","operatorNormalization":{"option":"basic"},"tombstone":false,"workspaceId":"db9a8026-2d04-44c6-8371-915fcad1e2ef"}],"webhookOperationConfigs":{},"catalog":{"streams":[{"stream":{"name":"additional_integrations","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"connector_id":{"type":"string"},"id":{"type":"string"},"status":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"campaign_matching_conf","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"customer":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"ad_matching_conf","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"customer":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"filtered_in_accounts","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"integration_id":{"type":"string"},"included_account_id":{"type":"string"},"client":{"type":"string"},"source_name":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"filtered_out_accounts","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"belongs_to":{"type":"string"},"integration_id":{"type":"string"},"excluded_account_id":{"type":"string"},"client":{"type":"string"},"source_name":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"filtered_out_integrations","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"integration_id":{"type":"string"},"status":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]},{"stream":{"name":"mockup_workspace_id","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"workspace_id":{"type":"string"},"status":{"type":"string"}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[]}]},"state":{"state":{}},"syncResourceRequirements":{"configKey":{"variant":"default","subType":"file"},"destination":{"cpu_request":"0.5","cpu_limit":"1","memory_request":"1Gi","memory_limit":"1Gi"},"destinationStdErr":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"destinationStdIn":{"cpu_request":"0.5","cpu_limit":"1","memory_request":"25Mi","memory_limit":"50Mi"},"destinationStdOut":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"orchestrator":{"cpu_request":"0.5","cpu_limit":"1","memory_request":"2Gi","memory_limit":"2Gi"},"source":{"cpu_request":"0.5","cpu_limit":"1","memory_request":"1Gi","memory_limit":"2Gi"},"sourceStdErr":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"sourceStdOut":{"cpu_request":"0.5","cpu_limit":"1","memory_request":"25Mi","memory_limit":"50Mi"},"heartbeat":{"cpu_request":"0.05","cpu_limit":"0.2","memory_request":"25Mi","memory_limit":"50Mi"}},"workspaceId":"db9a8026-2d04-44c6-8371-915fcad1e2ef","connectionId":"bbfb812e-42cf-43e2-8bce-17502ae7c4f0","normalizeInDestinationContainer":true,"isReset":false}}" - } - ] - }, - "scheduledEventId": "86", - "startedEventId": "87", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "89", - "eventTime": "2023-09-21T00:42:19.814086833Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228898", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "90", - "eventTime": "2023-09-21T00:42:19.824909829Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228902", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "89", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "d6ce7e2a-dca6-478d-8d6b-6c703defe66c", - "suggestContinueAsNew": false, - "historySizeBytes": "19826" - } - }, - { - "eventId": "91", - "eventTime": "2023-09-21T00:42:19.878631991Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228907", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "89", - "startedEventId": "90", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "92", - "eventTime": "2023-09-21T00:42:19.878667962Z", - "eventType": "MarkerRecorded", - "version": "1067", - "taskId": "212228908", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "InN5bmNfdGFza19xdWV1ZV9yb3V0ZV9yZW5hbWUi" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "91", - "header": null, - "failure": null - } - }, - { - "eventId": "93", - "eventTime": "2023-09-21T00:42:19.878686742Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212228909", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "10b54525-cabc-3418-903d-d7b5079f4011", - "activityType": { - "name": "RouteToSync" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAifQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "91", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "94", - "eventTime": "2023-09-21T00:42:19.878707413Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212228912", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "93", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "191a2a37-3528-43d6-acd1-c6b4e0ff4197", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "95", - "eventTime": "2023-09-21T00:42:19.931929745Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212228913", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ0YXNrUXVldWUiOiJHQ1BfVVNfRVhQQU5EX1NZTkMifQ==" - } - ] - }, - "scheduledEventId": "93", - "startedEventId": "94", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "96", - "eventTime": "2023-09-21T00:42:19.931933986Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228914", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "97", - "eventTime": "2023-09-21T00:42:19.940540041Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228918", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "96", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "53bab760-6c27-4c09-b777-a4d2163ec976", - "suggestContinueAsNew": false, - "historySizeBytes": "20794" - } - }, - { - "eventId": "98", - "eventTime": "2023-09-21T00:42:19.995085008Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228922", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "96", - "startedEventId": "97", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "99", - "eventTime": "2023-09-21T00:42:19.995124749Z", - "eventType": "StartChildWorkflowExecutionInitiated", - "version": "1067", - "taskId": "212228923", - "workerMayIgnore": false, - "startChildWorkflowExecutionInitiatedEventAttributes": { - "namespace": "prod.ebc2e", - "namespaceId": "8741d553-ca6f-4c06-b896-f260688e5dc1", - "workflowId": "sync_4777709", - "workflowType": { - "name": "SyncWorkflow" - }, - "taskQueue": { - "name": "GCP_US_EXPAND_SYNC", - "kind": "Unspecified", - "normalName": "" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjQ3Nzc3MDkiLCJhdHRlbXB0SWQiOjB9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjQ3Nzc3MDkiLCJhdHRlbXB0SWQiOjAsImNvbm5lY3Rpb25JZCI6ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCIsIndvcmtzcGFjZUlkIjoiZGI5YTgwMjYtMmQwNC00NGM2LTgzNzEtOTE1ZmNhZDFlMmVmIiwiZG9ja2VySW1hZ2UiOiJhaXJieXRlL3NvdXJjZS1nb29nbGUtc2hlZXRzOjAuMy43Iiwic3VwcG9ydHNEYnQiOmZhbHNlLCJwcm90b2NvbFZlcnNpb24iOnsidmVyc2lvbiI6IjAuMi4wIn0sImlzQ3VzdG9tQ29ubmVjdG9yIjpmYWxzZSwiYWxsb3dlZEhvc3RzIjp7Imhvc3RzIjpbIiouZ29vZ2xlYXBpcy5jb20iLCIqLmRhdGFkb2docS5jb20iLCIqLmRhdGFkb2docS5ldSIsIiouc2VudHJ5LmlvIl19fQ==" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjQ3Nzc3MDkiLCJhdHRlbXB0SWQiOjAsImNvbm5lY3Rpb25JZCI6ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCIsIndvcmtzcGFjZUlkIjoiZGI5YTgwMjYtMmQwNC00NGM2LTgzNzEtOTE1ZmNhZDFlMmVmIiwiZG9ja2VySW1hZ2UiOiJhaXJieXRlL2Rlc3RpbmF0aW9uLWJpZ3F1ZXJ5OjEuMTAuMiIsIm5vcm1hbGl6YXRpb25Eb2NrZXJJbWFnZSI6ImFpcmJ5dGUvbm9ybWFsaXphdGlvbjowLjQuMyIsInN1cHBvcnRzRGJ0Ijp0cnVlLCJub3JtYWxpemF0aW9uSW50ZWdyYXRpb25UeXBlIjoiYmlncXVlcnkiLCJwcm90b2NvbFZlcnNpb24iOnsidmVyc2lvbiI6IjAuMi4wIn0sImlzQ3VzdG9tQ29ubmVjdG9yIjpmYWxzZSwiYWRkaXRpb25hbEVudmlyb25tZW50VmFyaWFibGVzIjp7Ik5PUk1BTElaQVRJT05fVEVDSE5JUVVFIjoiTEVHQUNZIn19" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJuYW1lc3BhY2VEZWZpbml0aW9uIjoiY3VzdG9tZm9ybWF0IiwibmFtZXNwYWNlRm9ybWF0IjoiZGJfc3B5bmUiLCJwcmVmaXgiOiIiLCJzb3VyY2VJZCI6IjA5MGQ3NGM1LTM5MDAtNDE5Ny1hZDFhLTI3OGQ2N2RhYTYzMSIsImRlc3RpbmF0aW9uSWQiOiJhZjA5YmZlMi00Y2QzLTRkZjYtYTU4Zi0wMTFhOTYzM2I4ZjciLCJzb3VyY2VDb25maWd1cmF0aW9uIjp7ImNyZWRlbnRpYWxzIjp7ImF1dGhfdHlwZSI6IkNsaWVudCIsImNsaWVudF9pZCI6IjMzMjY1NzU4MTkzMS0wZzdtYjBrYjA5Y2MyZzVra2Nram5lNDZnNDVydG9jOC5hcHBzLmdvb2dsZXVzZXJjb250ZW50LmNvbSIsImNsaWVudF9zZWNyZXQiOiJreUtOR3B2UWVrbWtNTF9DbmlBLUNiZDgiLCJyZWZyZXNoX3Rva2VuIjp7Il9zZWNyZXQiOiJhaXJieXRlX3dvcmtzcGFjZV9kYjlhODAyNi0yZDA0LTQ0YzYtODM3MS05MTVmY2FkMWUyZWZfc2VjcmV0XzE2YWFjODJjLTE5NDItNGU1My1iODdjLWMwMmEzODU5YTk2Yl92MSJ9fSwicm93X2JhdGNoX3NpemUiOjIwMCwic3ByZWFkc2hlZXRfaWQiOiJodHRwczovL2RvY3MuZ29vZ2xlLmNvbS9zcHJlYWRzaGVldHMvZC8xM1otQTh3UG9XV204bVdIS1RQQ1E5ZmVhVlRkQllQODZZQ1UtR1MwZlVrZy9lZGl0P3VzcD1zaGFyaW5nIn0sImRlc3RpbmF0aW9uQ29uZmlndXJhdGlvbiI6eyJkYXRhc2V0X2lkIjoiYWlyYnl0ZSIsInByb2plY3RfaWQiOiJzcHluZWRhdGFzdGFnaW5nLXYyLTM4MzUwOSIsImxvYWRpbmdfbWV0aG9kIjp7Im1ldGhvZCI6IlN0YW5kYXJkIn0sImNyZWRlbnRpYWxzX2pzb24iOnsiX3NlY3JldCI6ImFpcmJ5dGVfd29ya3NwYWNlX2RiOWE4MDI2LTJkMDQtNDRjNi04MzcxLTkxNWZjYWQxZTJlZl9zZWNyZXRfMzBiNWYzOGEtOWNjMi00MTQ4LWExNzMtYmRlMGE4ZmU4YjQxX3YxIn0sImRhdGFzZXRfbG9jYXRpb24iOiJFVSIsInRyYW5zZm9ybWF0aW9uX3ByaW9yaXR5IjoiaW50ZXJhY3RpdmUiLCJiaWdfcXVlcnlfY2xpZW50X2J1ZmZlcl9zaXplX21iIjoxNX0sIm9wZXJhdGlvblNlcXVlbmNlIjpbeyJvcGVyYXRpb25JZCI6Ijg4MjYzNDcxLWEzMGMtNDc1Yi05MWY2LTMzNDM3ZWEzZDBjMCIsIm5hbWUiOiJOb3JtYWxpemF0aW9uIiwib3BlcmF0b3JUeXBlIjoibm9ybWFsaXphdGlvbiIsIm9wZXJhdG9yTm9ybWFsaXphdGlvbiI6eyJvcHRpb24iOiJiYXNpYyJ9LCJ0b21ic3RvbmUiOmZhbHNlLCJ3b3Jrc3BhY2VJZCI6ImRiOWE4MDI2LTJkMDQtNDRjNi04MzcxLTkxNWZjYWQxZTJlZiJ9XSwid2ViaG9va09wZXJhdGlvbkNvbmZpZ3MiOnt9LCJjYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoiYWRkaXRpb25hbF9pbnRlZ3JhdGlvbnMiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY29ubmVjdG9yX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiaWQiOnsidHlwZSI6InN0cmluZyJ9LCJzdGF0dXMiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImNhbXBhaWduX21hdGNoaW5nX2NvbmYiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY3VzdG9tZXIiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImFkX21hdGNoaW5nX2NvbmYiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY3VzdG9tZXIiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXX0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImZpbHRlcmVkX2luX2FjY291bnRzIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImludGVncmF0aW9uX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiaW5jbHVkZWRfYWNjb3VudF9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImNsaWVudCI6eyJ0eXBlIjoic3RyaW5nIn0sInNvdXJjZV9uYW1lIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJmaWx0ZXJlZF9vdXRfYWNjb3VudHMiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiYmVsb25nc190byI6eyJ0eXBlIjoic3RyaW5nIn0sImludGVncmF0aW9uX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiZXhjbHVkZWRfYWNjb3VudF9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImNsaWVudCI6eyJ0eXBlIjoic3RyaW5nIn0sInNvdXJjZV9uYW1lIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJmaWx0ZXJlZF9vdXRfaW50ZWdyYXRpb25zIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7ImludGVncmF0aW9uX2lkIjp7InR5cGUiOiJzdHJpbmcifSwic3RhdHVzIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W119LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJtb2NrdXBfd29ya3NwYWNlX2lkIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sInN0YXR1cyI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdfSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoib3ZlcndyaXRlIiwicHJpbWFyeV9rZXkiOltdfV19LCJzdGF0ZSI6eyJzdGF0ZSI6e319LCJzeW5jUmVzb3VyY2VSZXF1aXJlbWVudHMiOnsiY29uZmlnS2V5Ijp7InZhcmlhbnQiOiJkZWZhdWx0Iiwic3ViVHlwZSI6ImZpbGUifSwiZGVzdGluYXRpb24iOnsiY3B1X3JlcXVlc3QiOiIwLjUiLCJjcHVfbGltaXQiOiIxIiwibWVtb3J5X3JlcXVlc3QiOiIxR2kiLCJtZW1vcnlfbGltaXQiOiIxR2kifSwiZGVzdGluYXRpb25TdGRFcnIiOnsiY3B1X3JlcXVlc3QiOiIwLjAxIiwiY3B1X2xpbWl0IjoiMC41IiwibWVtb3J5X3JlcXVlc3QiOiIyNU1pIiwibWVtb3J5X2xpbWl0IjoiNTBNaSJ9LCJkZXN0aW5hdGlvblN0ZEluIjp7ImNwdV9yZXF1ZXN0IjoiMC41IiwiY3B1X2xpbWl0IjoiMSIsIm1lbW9yeV9yZXF1ZXN0IjoiMjVNaSIsIm1lbW9yeV9saW1pdCI6IjUwTWkifSwiZGVzdGluYXRpb25TdGRPdXQiOnsiY3B1X3JlcXVlc3QiOiIwLjAxIiwiY3B1X2xpbWl0IjoiMC41IiwibWVtb3J5X3JlcXVlc3QiOiIyNU1pIiwibWVtb3J5X2xpbWl0IjoiNTBNaSJ9LCJvcmNoZXN0cmF0b3IiOnsiY3B1X3JlcXVlc3QiOiIwLjUiLCJjcHVfbGltaXQiOiIxIiwibWVtb3J5X3JlcXVlc3QiOiIyR2kiLCJtZW1vcnlfbGltaXQiOiIyR2kifSwic291cmNlIjp7ImNwdV9yZXF1ZXN0IjoiMC41IiwiY3B1X2xpbWl0IjoiMSIsIm1lbW9yeV9yZXF1ZXN0IjoiMUdpIiwibWVtb3J5X2xpbWl0IjoiMkdpIn0sInNvdXJjZVN0ZEVyciI6eyJjcHVfcmVxdWVzdCI6IjAuMDEiLCJjcHVfbGltaXQiOiIwLjUiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn0sInNvdXJjZVN0ZE91dCI6eyJjcHVfcmVxdWVzdCI6IjAuNSIsImNwdV9saW1pdCI6IjEiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn0sImhlYXJ0YmVhdCI6eyJjcHVfcmVxdWVzdCI6IjAuMDUiLCJjcHVfbGltaXQiOiIwLjIiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn19LCJ3b3Jrc3BhY2VJZCI6ImRiOWE4MDI2LTJkMDQtNDRjNi04MzcxLTkxNWZjYWQxZTJlZiIsImNvbm5lY3Rpb25JZCI6ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCIsIm5vcm1hbGl6ZUluRGVzdGluYXRpb25Db250YWluZXIiOnRydWUsImlzUmVzZXQiOmZhbHNlfQ==" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCI=" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "parentClosePolicy": "RequestCancel", - "control": "", - "workflowTaskCompletedEventId": "98", - "workflowIdReusePolicy": "AllowDuplicate", - "retryPolicy": null, - "cronSchedule": "", - "header": { - "fields": {} - }, - "memo": null, - "searchAttributes": null, - "useCompatibleVersion": false - } - }, - { - "eventId": "100", - "eventTime": "2023-09-21T00:42:20.015126831Z", - "eventType": "ChildWorkflowExecutionStarted", - "version": "1067", - "taskId": "212228926", - "workerMayIgnore": false, - "childWorkflowExecutionStartedEventAttributes": { - "namespace": "prod.ebc2e", - "namespaceId": "8741d553-ca6f-4c06-b896-f260688e5dc1", - "initiatedEventId": "99", - "workflowExecution": { - "workflowId": "sync_4777709", - "runId": "da7964bc-b252-4b17-880f-0934617d6f7c" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "header": { - "fields": {} - } - } - }, - { - "eventId": "101", - "eventTime": "2023-09-21T00:42:20.015130621Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212228927", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "102", - "eventTime": "2023-09-21T00:42:20.023648905Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212228932", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "101", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "23fbdbe1-b163-4e72-b281-6da435bd70b1", - "suggestContinueAsNew": false, - "historySizeBytes": "28038" - } - }, - { - "eventId": "103", - "eventTime": "2023-09-21T00:42:20.075781749Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212228936", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "101", - "startedEventId": "102", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "104", - "eventTime": "2023-09-21T00:44:27.369794647Z", - "eventType": "ChildWorkflowExecutionCompleted", - "version": "1067", - "taskId": "212236245", - "workerMayIgnore": false, - "childWorkflowExecutionCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjU0LCJieXRlc1N5bmNlZCI6NjY4MCwic3RhcnRUaW1lIjoxNjk1MjU2OTY3MzQ4LCJlbmRUaW1lIjoxNjk1MjU3MDY0ODEzLCJ0b3RhbFN0YXRzIjp7ImJ5dGVzQ29tbWl0dGVkIjo2NjgwLCJieXRlc0VtaXR0ZWQiOjY2ODAsImRlc3RpbmF0aW9uU3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsImRlc3RpbmF0aW9uV3JpdGVFbmRUaW1lIjoxNjk1MjU3MDY0NjQ0LCJkZXN0aW5hdGlvbldyaXRlU3RhcnRUaW1lIjoxNjk1MjU2OTY3NDE2LCJtZWFuU2Vjb25kc0JlZm9yZVNvdXJjZVN0YXRlTWVzc2FnZUVtaXR0ZWQiOjAsIm1heFNlY29uZHNCZWZvcmVTb3VyY2VTdGF0ZU1lc3NhZ2VFbWl0dGVkIjowLCJtYXhTZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsIm1lYW5TZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsInJlY29yZHNFbWl0dGVkIjo1NCwicmVjb3Jkc0NvbW1pdHRlZCI6NTQsInJlcGxpY2F0aW9uRW5kVGltZSI6MTY5NTI1NzA2NDgwNSwicmVwbGljYXRpb25TdGFydFRpbWUiOjE2OTUyNTY5NjczNDgsInNvdXJjZVJlYWRFbmRUaW1lIjoxNjk1MjU2OTk2OTAyLCJzb3VyY2VSZWFkU3RhcnRUaW1lIjoxNjk1MjU2OTY3NDA3LCJzb3VyY2VTdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MH0sInN0cmVhbVN0YXRzIjpbeyJzdHJlYW1OYW1lIjoiYWRfbWF0Y2hpbmdfY29uZiIsInN0YXRzIjp7ImJ5dGVzQ29tbWl0dGVkIjo0MCwiYnl0ZXNFbWl0dGVkIjo0MCwicmVjb3Jkc0VtaXR0ZWQiOjEsInJlY29yZHNDb21taXR0ZWQiOjF9fSx7InN0cmVhbU5hbWUiOiJtb2NrdXBfd29ya3NwYWNlX2lkIiwic3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjM4LCJieXRlc0VtaXR0ZWQiOjM4LCJyZWNvcmRzRW1pdHRlZCI6MSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19LHsic3RyZWFtTmFtZSI6ImFkZGl0aW9uYWxfaW50ZWdyYXRpb25zIiwic3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjIxNiwiYnl0ZXNFbWl0dGVkIjoyMTYsInJlY29yZHNFbWl0dGVkIjozLCJyZWNvcmRzQ29tbWl0dGVkIjozfX0seyJzdHJlYW1OYW1lIjoiY2FtcGFpZ25fbWF0Y2hpbmdfY29uZiIsInN0YXRzIjp7ImJ5dGVzQ29tbWl0dGVkIjoyNjMsImJ5dGVzRW1pdHRlZCI6MjYzLCJyZWNvcmRzRW1pdHRlZCI6NiwicmVjb3Jkc0NvbW1pdHRlZCI6Nn19LHsic3RyZWFtTmFtZSI6ImZpbHRlcmVkX2luX2FjY291bnRzIiwic3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjYxMjMsImJ5dGVzRW1pdHRlZCI6NjEyMywicmVjb3Jkc0VtaXR0ZWQiOjQzLCJyZWNvcmRzQ29tbWl0dGVkIjo0M319XSwicGVyZm9ybWFuY2VNZXRyaWNzIjp7InByb2Nlc3NGcm9tU291cmNlIjp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6NjYzNDg0NTMsImV4ZWN1dGlvbkNvdW50Ijo1NCwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxMjI4Njc1LjA1NTU1NTU1NTV9LCJyZWFkRnJvbVNvdXJjZSI6eyJlbGFwc2VkVGltZUluTmFub3MiOjE2NTM0MjQxNzk5LCJleGVjdXRpb25Db3VudCI6MzY3NzksImF2Z0V4ZWNUaW1lSW5OYW5vcyI6NDQ5NTU2LjU4OTMzMDg2ODE2fSwicHJvY2Vzc0Zyb21EZXN0Ijp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6MCwiZXhlY3V0aW9uQ291bnQiOjAsImF2Z0V4ZWNUaW1lSW5OYW5vcyI6Ik5hTiJ9LCJ3cml0ZVRvRGVzdCI6eyJlbGFwc2VkVGltZUluTmFub3MiOjYwNzgzNDM1LCJleGVjdXRpb25Db3VudCI6NTQsImF2Z0V4ZWNUaW1lSW5OYW5vcyI6MTEyNTYxOS4xNjY2NjY2NjY3fSwicmVhZEZyb21EZXN0Ijp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6ODQ4MDgxODAwNjYsImV4ZWN1dGlvbkNvdW50IjoyNDEzNzUsImF2Z0V4ZWNUaW1lSW5OYW5vcyI6MzUxMzU0LjQ0ODc0NTcyNzZ9fX0sIm91dHB1dF9jYXRhbG9nIjp7InN0cmVhbXMiOlt7InN0cmVhbSI6eyJuYW1lIjoiYWRkaXRpb25hbF9pbnRlZ3JhdGlvbnMiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY29ubmVjdG9yX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiaWQiOnsidHlwZSI6InN0cmluZyJ9LCJzdGF0dXMiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXSwibmFtZXNwYWNlIjoiZGJfc3B5bmUifSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoib3ZlcndyaXRlIiwicHJpbWFyeV9rZXkiOltdfSx7InN0cmVhbSI6eyJuYW1lIjoiY2FtcGFpZ25fbWF0Y2hpbmdfY29uZiIsImpzb25fc2NoZW1hIjp7IiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJ0eXBlIjoib2JqZWN0IiwicHJvcGVydGllcyI6eyJ3b3Jrc3BhY2VfaWQiOnsidHlwZSI6InN0cmluZyJ9LCJjdXN0b21lciI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdLCJuYW1lc3BhY2UiOiJkYl9zcHluZSJ9LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJhZF9tYXRjaGluZ19jb25mIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImN1c3RvbWVyIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W10sIm5hbWVzcGFjZSI6ImRiX3NweW5lIn0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImZpbHRlcmVkX2luX2FjY291bnRzIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImludGVncmF0aW9uX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiaW5jbHVkZWRfYWNjb3VudF9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImNsaWVudCI6eyJ0eXBlIjoic3RyaW5nIn0sInNvdXJjZV9uYW1lIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W10sIm5hbWVzcGFjZSI6ImRiX3NweW5lIn0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImZpbHRlcmVkX291dF9hY2NvdW50cyIsImpzb25fc2NoZW1hIjp7IiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJ0eXBlIjoib2JqZWN0IiwicHJvcGVydGllcyI6eyJ3b3Jrc3BhY2VfaWQiOnsidHlwZSI6InN0cmluZyJ9LCJiZWxvbmdzX3RvIjp7InR5cGUiOiJzdHJpbmcifSwiaW50ZWdyYXRpb25faWQiOnsidHlwZSI6InN0cmluZyJ9LCJleGNsdWRlZF9hY2NvdW50X2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY2xpZW50Ijp7InR5cGUiOiJzdHJpbmcifSwic291cmNlX25hbWUiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXSwibmFtZXNwYWNlIjoiZGJfc3B5bmUifSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoib3ZlcndyaXRlIiwicHJpbWFyeV9rZXkiOltdfSx7InN0cmVhbSI6eyJuYW1lIjoiZmlsdGVyZWRfb3V0X2ludGVncmF0aW9ucyIsImpzb25fc2NoZW1hIjp7IiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJ0eXBlIjoib2JqZWN0IiwicHJvcGVydGllcyI6eyJpbnRlZ3JhdGlvbl9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sInN0YXR1cyI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdLCJuYW1lc3BhY2UiOiJkYl9zcHluZSJ9LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJtb2NrdXBfd29ya3NwYWNlX2lkIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sInN0YXR1cyI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdLCJuYW1lc3BhY2UiOiJkYl9zcHluZSJ9LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119XX0sImZhaWx1cmVzIjpbXX0=" - } - ] - }, - "namespace": "prod.ebc2e", - "namespaceId": "8741d553-ca6f-4c06-b896-f260688e5dc1", - "workflowExecution": { - "workflowId": "sync_4777709", - "runId": "da7964bc-b252-4b17-880f-0934617d6f7c" - }, - "workflowType": { - "name": "SyncWorkflow" - }, - "initiatedEventId": "99", - "startedEventId": "100" - } - }, - { - "eventId": "105", - "eventTime": "2023-09-21T00:44:27.369797887Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212236246", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "106", - "eventTime": "2023-09-21T00:44:27.376154232Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212236250", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "105", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "c778a6b8-e645-443c-95ea-48f65d10ed8f", - "suggestContinueAsNew": false, - "historySizeBytes": "33795" - } - }, - { - "eventId": "107", - "eventTime": "2023-09-21T00:44:27.444797504Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212236255", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "105", - "startedEventId": "106", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "108", - "eventTime": "2023-09-21T00:44:27.444825694Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212236256", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "fe2c2a02-fbfe-3671-b595-328806cd0f44", - "activityType": { - "name": "JobSuccessWithAttemptNumber" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6NDc3NzcwOSwiYXR0ZW1wdE51bWJlciI6MCwiY29ubmVjdGlvbklkIjoiYmJmYjgxMmUtNDJjZi00M2UyLThiY2UtMTc1MDJhZTdjNGYwIiwic3RhbmRhcmRTeW5jT3V0cHV0Ijp7InN0YW5kYXJkU3luY1N1bW1hcnkiOnsic3RhdHVzIjoiY29tcGxldGVkIiwicmVjb3Jkc1N5bmNlZCI6NTQsImJ5dGVzU3luY2VkIjo2NjgwLCJzdGFydFRpbWUiOjE2OTUyNTY5NjczNDgsImVuZFRpbWUiOjE2OTUyNTcwNjQ4MTMsInRvdGFsU3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjY2ODAsImJ5dGVzRW1pdHRlZCI6NjY4MCwiZGVzdGluYXRpb25TdGF0ZU1lc3NhZ2VzRW1pdHRlZCI6MCwiZGVzdGluYXRpb25Xcml0ZUVuZFRpbWUiOjE2OTUyNTcwNjQ2NDQsImRlc3RpbmF0aW9uV3JpdGVTdGFydFRpbWUiOjE2OTUyNTY5Njc0MTYsIm1lYW5TZWNvbmRzQmVmb3JlU291cmNlU3RhdGVNZXNzYWdlRW1pdHRlZCI6MCwibWF4U2Vjb25kc0JlZm9yZVNvdXJjZVN0YXRlTWVzc2FnZUVtaXR0ZWQiOjAsIm1heFNlY29uZHNCZXR3ZWVuU3RhdGVNZXNzYWdlRW1pdHRlZGFuZENvbW1pdHRlZCI6MCwibWVhblNlY29uZHNCZXR3ZWVuU3RhdGVNZXNzYWdlRW1pdHRlZGFuZENvbW1pdHRlZCI6MCwicmVjb3Jkc0VtaXR0ZWQiOjU0LCJyZWNvcmRzQ29tbWl0dGVkIjo1NCwicmVwbGljYXRpb25FbmRUaW1lIjoxNjk1MjU3MDY0ODA1LCJyZXBsaWNhdGlvblN0YXJ0VGltZSI6MTY5NTI1Njk2NzM0OCwic291cmNlUmVhZEVuZFRpbWUiOjE2OTUyNTY5OTY5MDIsInNvdXJjZVJlYWRTdGFydFRpbWUiOjE2OTUyNTY5Njc0MDcsInNvdXJjZVN0YXRlTWVzc2FnZXNFbWl0dGVkIjowfSwic3RyZWFtU3RhdHMiOlt7InN0cmVhbU5hbWUiOiJhZF9tYXRjaGluZ19jb25mIiwic3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjQwLCJieXRlc0VtaXR0ZWQiOjQwLCJyZWNvcmRzRW1pdHRlZCI6MSwicmVjb3Jkc0NvbW1pdHRlZCI6MX19LHsic3RyZWFtTmFtZSI6Im1vY2t1cF93b3Jrc3BhY2VfaWQiLCJzdGF0cyI6eyJieXRlc0NvbW1pdHRlZCI6MzgsImJ5dGVzRW1pdHRlZCI6MzgsInJlY29yZHNFbWl0dGVkIjoxLCJyZWNvcmRzQ29tbWl0dGVkIjoxfX0seyJzdHJlYW1OYW1lIjoiYWRkaXRpb25hbF9pbnRlZ3JhdGlvbnMiLCJzdGF0cyI6eyJieXRlc0NvbW1pdHRlZCI6MjE2LCJieXRlc0VtaXR0ZWQiOjIxNiwicmVjb3Jkc0VtaXR0ZWQiOjMsInJlY29yZHNDb21taXR0ZWQiOjN9fSx7InN0cmVhbU5hbWUiOiJjYW1wYWlnbl9tYXRjaGluZ19jb25mIiwic3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjI2MywiYnl0ZXNFbWl0dGVkIjoyNjMsInJlY29yZHNFbWl0dGVkIjo2LCJyZWNvcmRzQ29tbWl0dGVkIjo2fX0seyJzdHJlYW1OYW1lIjoiZmlsdGVyZWRfaW5fYWNjb3VudHMiLCJzdGF0cyI6eyJieXRlc0NvbW1pdHRlZCI6NjEyMywiYnl0ZXNFbWl0dGVkIjo2MTIzLCJyZWNvcmRzRW1pdHRlZCI6NDMsInJlY29yZHNDb21taXR0ZWQiOjQzfX1dLCJwZXJmb3JtYW5jZU1ldHJpY3MiOnsicHJvY2Vzc0Zyb21Tb3VyY2UiOnsiZWxhcHNlZFRpbWVJbk5hbm9zIjo2NjM0ODQ1MywiZXhlY3V0aW9uQ291bnQiOjU0LCJhdmdFeGVjVGltZUluTmFub3MiOjEyMjg2NzUuMDU1NTU1NTU1NX0sInJlYWRGcm9tU291cmNlIjp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6MTY1MzQyNDE3OTksImV4ZWN1dGlvbkNvdW50IjozNjc3OSwiYXZnRXhlY1RpbWVJbk5hbm9zIjo0NDk1NTYuNTg5MzMwODY4MTZ9LCJwcm9jZXNzRnJvbURlc3QiOnsiZWxhcHNlZFRpbWVJbk5hbm9zIjowLCJleGVjdXRpb25Db3VudCI6MCwiYXZnRXhlY1RpbWVJbk5hbm9zIjoiTmFOIn0sIndyaXRlVG9EZXN0Ijp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6NjA3ODM0MzUsImV4ZWN1dGlvbkNvdW50Ijo1NCwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxMTI1NjE5LjE2NjY2NjY2Njd9LCJyZWFkRnJvbURlc3QiOnsiZWxhcHNlZFRpbWVJbk5hbm9zIjo4NDgwODE4MDA2NiwiZXhlY3V0aW9uQ291bnQiOjI0MTM3NSwiYXZnRXhlY1RpbWVJbk5hbm9zIjozNTEzNTQuNDQ4NzQ1NzI3Nn19fSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJhZGRpdGlvbmFsX2ludGVncmF0aW9ucyIsImpzb25fc2NoZW1hIjp7IiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJ0eXBlIjoib2JqZWN0IiwicHJvcGVydGllcyI6eyJ3b3Jrc3BhY2VfaWQiOnsidHlwZSI6InN0cmluZyJ9LCJjb25uZWN0b3JfaWQiOnsidHlwZSI6InN0cmluZyJ9LCJpZCI6eyJ0eXBlIjoic3RyaW5nIn0sInN0YXR1cyI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdLCJuYW1lc3BhY2UiOiJkYl9zcHluZSJ9LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJjYW1wYWlnbl9tYXRjaGluZ19jb25mIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImN1c3RvbWVyIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W10sIm5hbWVzcGFjZSI6ImRiX3NweW5lIn0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6ImFkX21hdGNoaW5nX2NvbmYiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY3VzdG9tZXIiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXSwibmFtZXNwYWNlIjoiZGJfc3B5bmUifSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoib3ZlcndyaXRlIiwicHJpbWFyeV9rZXkiOltdfSx7InN0cmVhbSI6eyJuYW1lIjoiZmlsdGVyZWRfaW5fYWNjb3VudHMiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwiaW50ZWdyYXRpb25faWQiOnsidHlwZSI6InN0cmluZyJ9LCJpbmNsdWRlZF9hY2NvdW50X2lkIjp7InR5cGUiOiJzdHJpbmcifSwiY2xpZW50Ijp7InR5cGUiOiJzdHJpbmcifSwic291cmNlX25hbWUiOnsidHlwZSI6InN0cmluZyJ9fX0sInN1cHBvcnRlZF9zeW5jX21vZGVzIjpbImZ1bGxfcmVmcmVzaCJdLCJkZWZhdWx0X2N1cnNvcl9maWVsZCI6W10sInNvdXJjZV9kZWZpbmVkX3ByaW1hcnlfa2V5IjpbXSwibmFtZXNwYWNlIjoiZGJfc3B5bmUifSwic3luY19tb2RlIjoiZnVsbF9yZWZyZXNoIiwiY3Vyc29yX2ZpZWxkIjpbXSwiZGVzdGluYXRpb25fc3luY19tb2RlIjoib3ZlcndyaXRlIiwicHJpbWFyeV9rZXkiOltdfSx7InN0cmVhbSI6eyJuYW1lIjoiZmlsdGVyZWRfb3V0X2FjY291bnRzIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7IndvcmtzcGFjZV9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImJlbG9uZ3NfdG8iOnsidHlwZSI6InN0cmluZyJ9LCJpbnRlZ3JhdGlvbl9pZCI6eyJ0eXBlIjoic3RyaW5nIn0sImV4Y2x1ZGVkX2FjY291bnRfaWQiOnsidHlwZSI6InN0cmluZyJ9LCJjbGllbnQiOnsidHlwZSI6InN0cmluZyJ9LCJzb3VyY2VfbmFtZSI6eyJ0eXBlIjoic3RyaW5nIn19fSwic3VwcG9ydGVkX3N5bmNfbW9kZXMiOlsiZnVsbF9yZWZyZXNoIl0sImRlZmF1bHRfY3Vyc29yX2ZpZWxkIjpbXSwic291cmNlX2RlZmluZWRfcHJpbWFyeV9rZXkiOltdLCJuYW1lc3BhY2UiOiJkYl9zcHluZSJ9LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W119LHsic3RyZWFtIjp7Im5hbWUiOiJmaWx0ZXJlZF9vdXRfaW50ZWdyYXRpb25zIiwianNvbl9zY2hlbWEiOnsiJHNjaGVtYSI6Imh0dHA6Ly9qc29uLXNjaGVtYS5vcmcvZHJhZnQtMDcvc2NoZW1hIyIsInR5cGUiOiJvYmplY3QiLCJwcm9wZXJ0aWVzIjp7ImludGVncmF0aW9uX2lkIjp7InR5cGUiOiJzdHJpbmcifSwic3RhdHVzIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W10sIm5hbWVzcGFjZSI6ImRiX3NweW5lIn0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX0seyJzdHJlYW0iOnsibmFtZSI6Im1vY2t1cF93b3Jrc3BhY2VfaWQiLCJqc29uX3NjaGVtYSI6eyIkc2NoZW1hIjoiaHR0cDovL2pzb24tc2NoZW1hLm9yZy9kcmFmdC0wNy9zY2hlbWEjIiwidHlwZSI6Im9iamVjdCIsInByb3BlcnRpZXMiOnsid29ya3NwYWNlX2lkIjp7InR5cGUiOiJzdHJpbmcifSwic3RhdHVzIjp7InR5cGUiOiJzdHJpbmcifX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W10sIm5hbWVzcGFjZSI6ImRiX3NweW5lIn0sInN5bmNfbW9kZSI6ImZ1bGxfcmVmcmVzaCIsImN1cnNvcl9maWVsZCI6W10sImRlc3RpbmF0aW9uX3N5bmNfbW9kZSI6Im92ZXJ3cml0ZSIsInByaW1hcnlfa2V5IjpbXX1dfSwiZmFpbHVyZXMiOltdfX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "107", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "109", - "eventTime": "2023-09-21T00:44:27.444841485Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212236259", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "108", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "c572d559-cf9c-4b0e-859b-cb121e440490", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "110", - "eventTime": "2023-09-21T00:44:27.627304535Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212236260", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "108", - "startedEventId": "109", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "111", - "eventTime": "2023-09-21T00:44:27.627308575Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212236261", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "112", - "eventTime": "2023-09-21T00:44:27.636215367Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212236265", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "111", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "e0784138-7a2c-456a-8f7c-2db3933aefe8", - "suggestContinueAsNew": false, - "historySizeBytes": "39859" - } - }, - { - "eventId": "113", - "eventTime": "2023-09-21T00:44:27.692577796Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212236270", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "111", - "startedEventId": "112", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "114", - "eventTime": "2023-09-21T00:44:27.692606457Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212236271", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "7548fffe-08de-3fa5-ab51-9ab2c08cc844", - "activityType": { - "name": "DeleteStreamResetRecordsForJob" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAiLCJqb2JJZCI6NDc3NzcwOX0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "113", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "115", - "eventTime": "2023-09-21T00:44:27.692618887Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212236274", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "114", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "f78b6122-4cbb-4c56-a897-2cbe927cb7b0", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "116", - "eventTime": "2023-09-21T00:44:27.757111754Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212236275", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "114", - "startedEventId": "115", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "117", - "eventTime": "2023-09-21T00:44:27.757115104Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212236276", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "118", - "eventTime": "2023-09-21T00:44:27.768902327Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212236280", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "117", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "86fe0d59-3fd5-4d52-a56c-192dc31a9eb4", - "suggestContinueAsNew": false, - "historySizeBytes": "40640" - } - }, - { - "eventId": "119", - "eventTime": "2023-09-21T00:44:27.822326043Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212236285", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "117", - "startedEventId": "118", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "120", - "eventTime": "2023-09-21T00:44:27.822354914Z", - "eventType": "ActivityTaskScheduled", - "version": "1067", - "taskId": "212236286", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "9ca0e530-6050-330a-921c-38bc99389edd", - "activityType": { - "name": "RecordWorkflowCountMetric" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uVXBkYXRlcklucHV0Ijp7ImNvbm5lY3Rpb25JZCI6ImJiZmI4MTJlLTQyY2YtNDNlMi04YmNlLTE3NTAyYWU3YzRmMCIsImpvYklkIjo0Nzc3NzA5LCJhdHRlbXB0SWQiOm51bGwsImZyb21GYWlsdXJlIjpmYWxzZSwiYXR0ZW1wdE51bWJlciI6MSwid29ya2Zsb3dTdGF0ZSI6bnVsbCwicmVzZXRDb25uZWN0aW9uIjpmYWxzZSwiZnJvbUpvYlJlc2V0RmFpbHVyZSI6ZmFsc2UsInNraXBTY2hlZHVsaW5nIjpmYWxzZX0sImZhaWx1cmVDYXVzZSI6bnVsbCwibWV0cmljTmFtZSI6IlRFTVBPUkFMX1dPUktGTE9XX1NVQ0NFU1MiLCJtZXRyaWNBdHRyaWJ1dGVzIjpudWxsfQ==" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "119", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "121", - "eventTime": "2023-09-21T00:44:27.822368894Z", - "eventType": "ActivityTaskStarted", - "version": "1067", - "taskId": "212236289", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "120", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "62672b01-2ce5-4764-9024-2c2ceb1b0a7b", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "122", - "eventTime": "2023-09-21T00:44:27.878864806Z", - "eventType": "ActivityTaskCompleted", - "version": "1067", - "taskId": "212236290", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": null, - "scheduledEventId": "120", - "startedEventId": "121", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "workerVersion": null - } - }, - { - "eventId": "123", - "eventTime": "2023-09-21T00:44:27.878870267Z", - "eventType": "WorkflowTaskScheduled", - "version": "1067", - "taskId": "212236291", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@prod-airbyte-worker-85df7858dd-nkgkv:19f7357e-6457-4548-883f-d15688bfb20a", - "kind": "Sticky", - "normalName": "CONNECTION_UPDATER" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "124", - "eventTime": "2023-09-21T00:44:27.887291529Z", - "eventType": "WorkflowTaskStarted", - "version": "1067", - "taskId": "212236295", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "123", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "requestId": "c36b5f1c-14be-48b8-91cd-d1b91eacf405", - "suggestContinueAsNew": false, - "historySizeBytes": "41682" - } - }, - { - "eventId": "125", - "eventTime": "2023-09-21T00:44:27.945167096Z", - "eventType": "WorkflowTaskCompleted", - "version": "1067", - "taskId": "212236299", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "123", - "startedEventId": "124", - "identity": "1@prod-airbyte-worker-85df7858dd-nkgkv", - "binaryChecksum": "", - "workerVersion": null, - "sdkMetadata": null, - "meteringMetadata": null - } - }, - { - "eventId": "126", - "eventTime": "2023-09-21T00:44:27.945200866Z", - "eventType": "WorkflowExecutionContinuedAsNew", - "version": "1067", - "taskId": "212236300", - "workerMayIgnore": false, - "workflowExecutionContinuedAsNewEventAttributes": { - "newExecutionRunId": "f0c6c023-af83-4e1c-b08d-f783ababf599", - "workflowType": { - "name": "ConnectionManagerWorkflow" - }, - "taskQueue": { - "name": "CONNECTION_UPDATER", - "kind": "Normal", - "normalName": "" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJjb25uZWN0aW9uSWQiOiJiYmZiODEyZS00MmNmLTQzZTItOGJjZS0xNzUwMmFlN2M0ZjAiLCJqb2JJZCI6bnVsbCwiYXR0ZW1wdElkIjpudWxsLCJmcm9tRmFpbHVyZSI6ZmFsc2UsImF0dGVtcHROdW1iZXIiOjEsIndvcmtmbG93U3RhdGUiOm51bGwsInJlc2V0Q29ubmVjdGlvbiI6ZmFsc2UsImZyb21Kb2JSZXNldEZhaWx1cmUiOmZhbHNlLCJza2lwU2NoZWR1bGluZyI6ZmFsc2V9" - } - ] - }, - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "workflowTaskCompletedEventId": "125", - "backoffStartInterval": null, - "initiator": "Unspecified", - "failure": null, - "lastCompletionResult": null, - "header": { - "fields": {} - }, - "memo": null, - "searchAttributes": null, - "useCompatibleVersion": false - } - } - ] -} diff --git a/airbyte-workers/src/test/resources/syncWorkflowHistory.json b/airbyte-workers/src/test/resources/syncWorkflowHistory.json deleted file mode 100644 index 430f8db61b0..00000000000 --- a/airbyte-workers/src/test/resources/syncWorkflowHistory.json +++ /dev/null @@ -1,1178 +0,0 @@ -{ - "events": [ - { - "eventId": "1", - "eventTime": "2024-01-19T17:54:01.365656420Z", - "eventType": "WorkflowExecutionStarted", - "version": "0", - "taskId": "13631639", - "workerMayIgnore": false, - "workflowExecutionStartedEventAttributes": { - "workflowType": { - "name": "SyncWorkflow" - }, - "parentWorkflowNamespace": "default", - "parentWorkflowNamespaceId": "d7789e69-81fc-4542-abf2-33fff5f65636", - "parentWorkflowExecution": { - "workflowId": "connection_manager_999ef05a-dbd9-445a-903b-5a1f82adc26d", - "runId": "315b08b5-36cc-4d70-b8f7-d4ecff35db90" - }, - "parentInitiatedEventId": "99", - "taskQueue": { - "name": "SYNC", - "kind": "Unspecified", - "normalName": "" - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjgiLCJhdHRlbXB0SWQiOjB9" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjgiLCJhdHRlbXB0SWQiOjAsImNvbm5lY3Rpb25JZCI6Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCIsIndvcmtzcGFjZUlkIjoiMWJlYzkyMTUtNGFlOS00ZDAwLWIwNmUtMzYzMGY2ZmQxYTc4IiwiZG9ja2VySW1hZ2UiOiJhaXJieXRlL3NvdXJjZS1wb2tlYXBpOjAuMi4wIiwic3VwcG9ydHNEYnQiOmZhbHNlLCJwcm90b2NvbFZlcnNpb24iOnsidmVyc2lvbiI6IjAuMi4wIn0sImlzQ3VzdG9tQ29ubmVjdG9yIjpmYWxzZSwiYWxsb3dlZEhvc3RzIjp7Imhvc3RzIjpbIioiLCIqLmRhdGFkb2docS5jb20iLCIqLmRhdGFkb2docS5ldSIsIiouc2VudHJ5LmlvIl19fQ==" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJqb2JJZCI6IjgiLCJhdHRlbXB0SWQiOjAsImNvbm5lY3Rpb25JZCI6Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCIsIndvcmtzcGFjZUlkIjoiMWJlYzkyMTUtNGFlOS00ZDAwLWIwNmUtMzYzMGY2ZmQxYTc4IiwiZG9ja2VySW1hZ2UiOiJhaXJieXRlL2Rlc3RpbmF0aW9uLWUyZS10ZXN0OjAuMy4wIiwic3VwcG9ydHNEYnQiOmZhbHNlLCJwcm90b2NvbFZlcnNpb24iOnsidmVyc2lvbiI6IjAuMi4xIn0sImlzQ3VzdG9tQ29ubmVjdG9yIjpmYWxzZSwiYWRkaXRpb25hbEVudmlyb25tZW50VmFyaWFibGVzIjp7fX0=" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "{"namespaceDefinition":"destination","sourceId":"1b66de36-4041-43a4-a34d-3f539786fcf7","destinationId":"894225b5-b534-4daa-a1fe-192bf89f6db0","sourceConfiguration":{"pokemon_name":"venusaur"},"destinationConfiguration":{"test_destination":{"logging_config":{"logging_type":"FirstN","max_entry_count":100},"test_destination_type":"LOGGING"}},"operationSequence":[],"catalog":{"streams":[{"stream":{"name":"pokemon","json_schema":{"$schema":"http://json-schema.org/draft-07/schema#","type":"object","properties":{"location_area_encounters":{"type":["null","string"]},"types":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"slot":{"type":["null","integer"]},"type":{"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}},"base_experience":{"type":["null","integer"]},"held_items":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"item":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"version_details":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"version":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"rarity":{"type":["null","integer"]}}}}}}},"weight":{"type":["null","integer"]},"is_default":{"type":["null","boolean"]},"sprites":{"additionalProperties":true,"type":["null","object"],"properties":{"back_shiny_female":{"type":["null","string"]},"back_female":{"type":["null","string"]},"back_default":{"type":["null","string"]},"front_shiny_female":{"type":["null","string"]},"front_default":{"type":["null","string"]},"front_female":{"type":["null","string"]},"back_shiny":{"type":["null","string"]},"front_shiny":{"type":["null","string"]}}},"past_types":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"generation":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"types":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"slot":{"type":["null","integer"]},"type":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}}}}},"abilities":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"is_hidden":{"type":["null","boolean"]},"slot":{"type":["null","integer"]},"ability":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}},"game_indices":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"game_index":{"type":["null","integer"]},"version":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}},"stats":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"stat":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"base_stat":{"type":["null","integer"]},"effort":{"type":["null","integer"]}}}},"species":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"moves":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"version_group_details":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"level_learned_at":{"type":["null","integer"]},"version_group":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}},"move_learn_method":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}},"move":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}}}},"name":{"type":["null","string"]},"id":{"type":["null","integer"]},"forms":{"type":["null","array"],"items":{"additionalProperties":true,"type":["null","object"],"properties":{"name":{"type":["null","string"]},"url":{"type":["null","string"]}}}},"order":{"type":["null","integer"]},"height":{"type":["null","integer"]}}},"supported_sync_modes":["full_refresh"],"default_cursor_field":[],"source_defined_primary_key":[["id"]]},"sync_mode":"full_refresh","cursor_field":[],"destination_sync_mode":"overwrite","primary_key":[["id"]]}]},"syncResourceRequirements":{"configKey":{"variant":"default","subType":"api"},"destination":{"cpu_request":"0.2","cpu_limit":"1","memory_request":"1Gi","memory_limit":"2Gi"},"destinationStdErr":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"destinationStdIn":{"cpu_request":"0.1","cpu_limit":"1","memory_request":"25Mi","memory_limit":"50Mi"},"destinationStdOut":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"orchestrator":{"cpu_request":"0.3","cpu_limit":"1","memory_request":"2Gi","memory_limit":"2Gi"},"source":{"cpu_request":"0.2","cpu_limit":"1","memory_request":"1Gi","memory_limit":"2Gi"},"sourceStdErr":{"cpu_request":"0.01","cpu_limit":"0.5","memory_request":"25Mi","memory_limit":"50Mi"},"sourceStdOut":{"cpu_request":"0.2","cpu_limit":"1","memory_request":"25Mi","memory_limit":"50Mi"},"heartbeat":{"cpu_request":"0.05","cpu_limit":"0.2","memory_request":"25Mi","memory_limit":"50Mi"}},"workspaceId":"1bec9215-4ae9-4d00-b06e-3630f6fd1a78","connectionId":"999ef05a-dbd9-445a-903b-5a1f82adc26d","normalizeInDestinationContainer":false,"isReset":false,"connectionContext":{"connectionId":"999ef05a-dbd9-445a-903b-5a1f82adc26d","sourceId":"1b66de36-4041-43a4-a34d-3f539786fcf7","destinationId":"894225b5-b534-4daa-a1fe-192bf89f6db0","workspaceId":"1bec9215-4ae9-4d00-b06e-3630f6fd1a78","organizationId":"00000000-0000-0000-0000-000000000000"}}" - }, - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCI=" - } - ] - }, - "workflowExecutionTimeout": "0s", - "workflowRunTimeout": "0s", - "workflowTaskTimeout": "10s", - "continuedExecutionRunId": "", - "initiator": "Unspecified", - "continuedFailure": null, - "lastCompletionResult": null, - "originalExecutionRunId": "fcd046e0-056d-4468-a45a-337efa654948", - "identity": "", - "firstExecutionRunId": "fcd046e0-056d-4468-a45a-337efa654948", - "retryPolicy": null, - "attempt": 1, - "workflowExecutionExpirationTime": null, - "cronSchedule": "", - "firstWorkflowTaskBackoff": "0s", - "memo": null, - "searchAttributes": null, - "prevAutoResetPoints": null, - "header": { - "fields": {} - }, - "parentInitiatedEventVersion": "0", - "workflowId": "sync_8", - "sourceVersionStamp": null - } - }, - { - "eventId": "2", - "eventTime": "2024-01-19T17:54:01.392695545Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631649", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "3", - "eventTime": "2024-01-19T17:54:01.418664087Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631656", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "2", - "identity": "1@a13622a48bbf", - "requestId": "7a50b6a6-ca97-4e13-96c6-88d8a189a117", - "suggestContinueAsNew": false, - "historySizeBytes": "7541" - } - }, - { - "eventId": "4", - "eventTime": "2024-01-19T17:54:01.450265920Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631662", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "2", - "startedEventId": "3", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": { - "coreUsedFlags": [], - "langUsedFlags": [1], - "sdkName": "", - "sdkVersion": "" - }, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "5", - "eventTime": "2024-01-19T17:54:01.450375587Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631663", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "35b400c1-76ff-3e03-9fb6-91f28d995e20", - "activityType": { - "name": "UseWorkloadApi" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ3b3Jrc3BhY2VJZCI6IjFiZWM5MjE1LTRhZTktNGQwMC1iMDZlLTM2MzBmNmZkMWE3OCIsImNvbm5lY3Rpb25JZCI6Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCIsIm9yZ2FuaXphdGlvbklkIjoiMDAwMDAwMDAtMDAwMC0wMDAwLTAwMDAtMDAwMDAwMDAwMDAwIn0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "4", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "6", - "eventTime": "2024-01-19T17:54:01.463666712Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631669", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "5", - "identity": "1@a13622a48bbf", - "requestId": "44daf745-59ac-4153-a6b7-d645523ba731", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "7", - "eventTime": "2024-01-19T17:54:01.483123628Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631670", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ZmFsc2U=" - } - ] - }, - "scheduledEventId": "5", - "startedEventId": "6", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "8", - "eventTime": "2024-01-19T17:54:01.483130462Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631671", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "9", - "eventTime": "2024-01-19T17:54:01.548659420Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631675", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "8", - "identity": "1@a13622a48bbf", - "requestId": "83571557-fba4-46bf-8526-06059a49e21f", - "suggestContinueAsNew": false, - "historySizeBytes": "8278" - } - }, - { - "eventId": "10", - "eventTime": "2024-01-19T17:54:01.617768337Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631679", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "8", - "startedEventId": "9", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "11", - "eventTime": "2024-01-19T17:54:01.617903670Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631680", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "23981b87-2a58-38f4-a601-c91e10167866", - "activityType": { - "name": "UseOutputDocStore" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJ3b3Jrc3BhY2VJZCI6IjFiZWM5MjE1LTRhZTktNGQwMC1iMDZlLTM2MzBmNmZkMWE3OCIsImNvbm5lY3Rpb25JZCI6Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCIsIm9yZ2FuaXphdGlvbklkIjoiMDAwMDAwMDAtMDAwMC0wMDAwLTAwMDAtMDAwMDAwMDAwMDAwIn0=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "10", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "12", - "eventTime": "2024-01-19T17:54:01.684730545Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631685", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "11", - "identity": "1@a13622a48bbf", - "requestId": "430d6b57-f0e0-43ac-913c-df24df7420ed", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "13", - "eventTime": "2024-01-19T17:54:01.721181504Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631686", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ZmFsc2U=" - } - ] - }, - "scheduledEventId": "11", - "startedEventId": "12", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "14", - "eventTime": "2024-01-19T17:54:01.721192462Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631687", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "15", - "eventTime": "2024-01-19T17:54:01.734748629Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631691", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "14", - "identity": "1@a13622a48bbf", - "requestId": "d2604e55-adf3-448d-bca5-4254affa1ff3", - "suggestContinueAsNew": false, - "historySizeBytes": "9013" - } - }, - { - "eventId": "16", - "eventTime": "2024-01-19T17:54:01.748411545Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631695", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "14", - "startedEventId": "15", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "17", - "eventTime": "2024-01-19T17:54:01.748449920Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631696", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "e6c99c77-5d34-3424-a4d0-64f300decf52", - "activityType": { - "name": "GetSourceId" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCI=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "16", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "18", - "eventTime": "2024-01-19T17:54:01.755496712Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631701", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "17", - "identity": "1@a13622a48bbf", - "requestId": "d10dbdb0-99e3-4e3d-a486-cadea65bd00d", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "19", - "eventTime": "2024-01-19T17:54:01.860476879Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631702", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "IjFiNjZkZTM2LTQwNDEtNDNhNC1hMzRkLTNmNTM5Nzg2ZmNmNyI=" - } - ] - }, - "scheduledEventId": "17", - "startedEventId": "18", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "20", - "eventTime": "2024-01-19T17:54:01.860483337Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631703", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "21", - "eventTime": "2024-01-19T17:54:01.867421462Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631707", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "20", - "identity": "1@a13622a48bbf", - "requestId": "14f868bb-b1eb-49bb-bad5-3ad4d644b20d", - "suggestContinueAsNew": false, - "historySizeBytes": "9646" - } - }, - { - "eventId": "22", - "eventTime": "2024-01-19T17:54:01.879152379Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631711", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "20", - "startedEventId": "21", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "23", - "eventTime": "2024-01-19T17:54:01.879216504Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631712", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "54354418-20bc-3da4-8836-95d0c6943103", - "activityType": { - "name": "ShouldRefreshSchema" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "IjFiNjZkZTM2LTQwNDEtNDNhNC1hMzRkLTNmNTM5Nzg2ZmNmNyI=" - } - ] - }, - "scheduleToCloseTimeout": "1800s", - "scheduleToStartTimeout": "1800s", - "startToCloseTimeout": "1800s", - "heartbeatTimeout": "0s", - "workflowTaskCompletedEventId": "22", - "retryPolicy": { - "initialInterval": "1s", - "backoffCoefficient": 2, - "maximumInterval": "100s", - "maximumAttempts": 1, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "24", - "eventTime": "2024-01-19T17:54:01.887260879Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631718", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "23", - "identity": "1@a13622a48bbf", - "requestId": "938ce08a-8d00-4c02-8438-4e4e83ca2a4c", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "25", - "eventTime": "2024-01-19T17:54:01.941878337Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631719", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "dHJ1ZQ==" - } - ] - }, - "scheduledEventId": "23", - "startedEventId": "24", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "26", - "eventTime": "2024-01-19T17:54:01.941886170Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631720", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "27", - "eventTime": "2024-01-19T17:54:01.949086379Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631724", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "26", - "identity": "1@a13622a48bbf", - "requestId": "f90b30b1-545c-475b-bcb1-3da4770e9f1b", - "suggestContinueAsNew": false, - "historySizeBytes": "10257" - } - }, - { - "eventId": "28", - "eventTime": "2024-01-19T17:54:01.965300212Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631728", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "26", - "startedEventId": "27", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "29", - "eventTime": "2024-01-19T17:54:01.965325087Z", - "eventType": "MarkerRecorded", - "version": "0", - "taskId": "13631729", - "workerMayIgnore": false, - "markerRecordedEventAttributes": { - "markerName": "Version", - "details": { - "changeId": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "IkFVVE9fQkFDS0ZJTExfT05fTkVXX0NPTFVNTlMi" - } - ] - }, - "version": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "MQ==" - } - ] - } - }, - "workflowTaskCompletedEventId": "28", - "header": null, - "failure": null - } - }, - { - "eventId": "30", - "eventTime": "2024-01-19T17:54:01.965345545Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631730", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "6ff7c02f-4af1-3200-bcf6-133d6e67f956", - "activityType": { - "name": "RefreshSchemaV2" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzb3VyY2VDYXRhbG9nSWQiOiIxYjY2ZGUzNi00MDQxLTQzYTQtYTM0ZC0zZjUzOTc4NmZjZjciLCJjb25uZWN0aW9uSWQiOiI5OTllZjA1YS1kYmQ5LTQ0NWEtOTAzYi01YTFmODJhZGMyNmQiLCJ3b3Jrc3BhY2VJZCI6IjFiZWM5MjE1LTRhZTktNGQwMC1iMDZlLTM2MzBmNmZkMWE3OCJ9" - } - ] - }, - "scheduleToCloseTimeout": "1800s", - "scheduleToStartTimeout": "1800s", - "startToCloseTimeout": "1800s", - "heartbeatTimeout": "0s", - "workflowTaskCompletedEventId": "28", - "retryPolicy": { - "initialInterval": "1s", - "backoffCoefficient": 2, - "maximumInterval": "100s", - "maximumAttempts": 1, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "31", - "eventTime": "2024-01-19T17:54:01.972715004Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631747", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "30", - "identity": "1@a13622a48bbf", - "requestId": "5cdcc75c-29d9-41fb-b469-b7e8d8afc1f8", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "32", - "eventTime": "2024-01-19T17:54:05.757867214Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631748", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJhcHBsaWVkRGlmZiI6bnVsbH0=" - } - ] - }, - "scheduledEventId": "30", - "startedEventId": "31", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "33", - "eventTime": "2024-01-19T17:54:05.757873755Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631749", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "34", - "eventTime": "2024-01-19T17:54:05.766371089Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631753", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "33", - "identity": "1@a13622a48bbf", - "requestId": "6e4909d6-de25-47fa-9eb7-7ba4741069ca", - "suggestContinueAsNew": false, - "historySizeBytes": "11164" - } - }, - { - "eventId": "35", - "eventTime": "2024-01-19T17:54:05.855501172Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631757", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "33", - "startedEventId": "34", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "36", - "eventTime": "2024-01-19T17:54:05.855556547Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631758", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "4b8e1e51-0e0c-33be-854b-df39e725f431", - "activityType": { - "name": "GetStatus" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCI=" - } - ] - }, - "scheduleToCloseTimeout": "0s", - "scheduleToStartTimeout": "0s", - "startToCloseTimeout": "120s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "35", - "retryPolicy": { - "initialInterval": "30s", - "backoffCoefficient": 2, - "maximumInterval": "600s", - "maximumAttempts": 5, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "37", - "eventTime": "2024-01-19T17:54:05.916894589Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631763", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "36", - "identity": "1@a13622a48bbf", - "requestId": "be0a336d-c30e-4510-9858-b76daba6ee63", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "38", - "eventTime": "2024-01-19T17:54:06.825562923Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631764", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "ImFjdGl2ZSI=" - } - ] - }, - "scheduledEventId": "36", - "startedEventId": "37", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "39", - "eventTime": "2024-01-19T17:54:06.825595881Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631765", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "40", - "eventTime": "2024-01-19T17:54:06.847367506Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631769", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "39", - "identity": "1@a13622a48bbf", - "requestId": "28c4333a-b746-49e0-8f55-fa0aa70b4c76", - "suggestContinueAsNew": false, - "historySizeBytes": "11765" - } - }, - { - "eventId": "41", - "eventTime": "2024-01-19T17:54:06.888239506Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631773", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "39", - "startedEventId": "40", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "42", - "eventTime": "2024-01-19T17:54:06.888366381Z", - "eventType": "ActivityTaskScheduled", - "version": "0", - "taskId": "13631774", - "workerMayIgnore": false, - "activityTaskScheduledEventAttributes": { - "activityId": "ddf315f4-504f-3dee-a91d-590fd3724c93", - "activityType": { - "name": "ReplicateV2" - }, - "taskQueue": { - "name": "SYNC", - "kind": "Normal", - "normalName": "" - }, - "header": { - "fields": {} - }, - "input": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzb3VyY2VJZCI6IjFiNjZkZTM2LTQwNDEtNDNhNC1hMzRkLTNmNTM5Nzg2ZmNmNyIsImRlc3RpbmF0aW9uSWQiOiI4OTQyMjViNS1iNTM0LTRkYWEtYTFmZS0xOTJiZjg5ZjZkYjAiLCJzb3VyY2VDb25maWd1cmF0aW9uIjp7InBva2Vtb25fbmFtZSI6InZlbnVzYXVyIn0sImRlc3RpbmF0aW9uQ29uZmlndXJhdGlvbiI6eyJ0ZXN0X2Rlc3RpbmF0aW9uIjp7ImxvZ2dpbmdfY29uZmlnIjp7ImxvZ2dpbmdfdHlwZSI6IkZpcnN0TiIsIm1heF9lbnRyeV9jb3VudCI6MTAwfSwidGVzdF9kZXN0aW5hdGlvbl90eXBlIjoiTE9HR0lORyJ9fSwiam9iUnVuQ29uZmlnIjp7ImpvYklkIjoiOCIsImF0dGVtcHRJZCI6MH0sInNvdXJjZUxhdW5jaGVyQ29uZmlnIjp7ImpvYklkIjoiOCIsImF0dGVtcHRJZCI6MCwiY29ubmVjdGlvbklkIjoiOTk5ZWYwNWEtZGJkOS00NDVhLTkwM2ItNWExZjgyYWRjMjZkIiwid29ya3NwYWNlSWQiOiIxYmVjOTIxNS00YWU5LTRkMDAtYjA2ZS0zNjMwZjZmZDFhNzgiLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvc291cmNlLXBva2VhcGk6MC4yLjAiLCJzdXBwb3J0c0RidCI6ZmFsc2UsInByb3RvY29sVmVyc2lvbiI6eyJ2ZXJzaW9uIjoiMC4yLjAifSwiaXNDdXN0b21Db25uZWN0b3IiOmZhbHNlLCJhbGxvd2VkSG9zdHMiOnsiaG9zdHMiOlsiKiIsIiouZGF0YWRvZ2hxLmNvbSIsIiouZGF0YWRvZ2hxLmV1IiwiKi5zZW50cnkuaW8iXX19LCJkZXN0aW5hdGlvbkxhdW5jaGVyQ29uZmlnIjp7ImpvYklkIjoiOCIsImF0dGVtcHRJZCI6MCwiY29ubmVjdGlvbklkIjoiOTk5ZWYwNWEtZGJkOS00NDVhLTkwM2ItNWExZjgyYWRjMjZkIiwid29ya3NwYWNlSWQiOiIxYmVjOTIxNS00YWU5LTRkMDAtYjA2ZS0zNjMwZjZmZDFhNzgiLCJkb2NrZXJJbWFnZSI6ImFpcmJ5dGUvZGVzdGluYXRpb24tZTJlLXRlc3Q6MC4zLjAiLCJzdXBwb3J0c0RidCI6ZmFsc2UsInByb3RvY29sVmVyc2lvbiI6eyJ2ZXJzaW9uIjoiMC4yLjEifSwiaXNDdXN0b21Db25uZWN0b3IiOmZhbHNlLCJhZGRpdGlvbmFsRW52aXJvbm1lbnRWYXJpYWJsZXMiOnt9fSwic3luY1Jlc291cmNlUmVxdWlyZW1lbnRzIjp7ImNvbmZpZ0tleSI6eyJ2YXJpYW50IjoiZGVmYXVsdCIsInN1YlR5cGUiOiJhcGkifSwiZGVzdGluYXRpb24iOnsiY3B1X3JlcXVlc3QiOiIwLjIiLCJjcHVfbGltaXQiOiIxIiwibWVtb3J5X3JlcXVlc3QiOiIxR2kiLCJtZW1vcnlfbGltaXQiOiIyR2kifSwiZGVzdGluYXRpb25TdGRFcnIiOnsiY3B1X3JlcXVlc3QiOiIwLjAxIiwiY3B1X2xpbWl0IjoiMC41IiwibWVtb3J5X3JlcXVlc3QiOiIyNU1pIiwibWVtb3J5X2xpbWl0IjoiNTBNaSJ9LCJkZXN0aW5hdGlvblN0ZEluIjp7ImNwdV9yZXF1ZXN0IjoiMC4xIiwiY3B1X2xpbWl0IjoiMSIsIm1lbW9yeV9yZXF1ZXN0IjoiMjVNaSIsIm1lbW9yeV9saW1pdCI6IjUwTWkifSwiZGVzdGluYXRpb25TdGRPdXQiOnsiY3B1X3JlcXVlc3QiOiIwLjAxIiwiY3B1X2xpbWl0IjoiMC41IiwibWVtb3J5X3JlcXVlc3QiOiIyNU1pIiwibWVtb3J5X2xpbWl0IjoiNTBNaSJ9LCJvcmNoZXN0cmF0b3IiOnsiY3B1X3JlcXVlc3QiOiIwLjMiLCJjcHVfbGltaXQiOiIxIiwibWVtb3J5X3JlcXVlc3QiOiIyR2kiLCJtZW1vcnlfbGltaXQiOiIyR2kifSwic291cmNlIjp7ImNwdV9yZXF1ZXN0IjoiMC4yIiwiY3B1X2xpbWl0IjoiMSIsIm1lbW9yeV9yZXF1ZXN0IjoiMUdpIiwibWVtb3J5X2xpbWl0IjoiMkdpIn0sInNvdXJjZVN0ZEVyciI6eyJjcHVfcmVxdWVzdCI6IjAuMDEiLCJjcHVfbGltaXQiOiIwLjUiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn0sInNvdXJjZVN0ZE91dCI6eyJjcHVfcmVxdWVzdCI6IjAuMiIsImNwdV9saW1pdCI6IjEiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn0sImhlYXJ0YmVhdCI6eyJjcHVfcmVxdWVzdCI6IjAuMDUiLCJjcHVfbGltaXQiOiIwLjIiLCJtZW1vcnlfcmVxdWVzdCI6IjI1TWkiLCJtZW1vcnlfbGltaXQiOiI1ME1pIn19LCJ3b3Jrc3BhY2VJZCI6IjFiZWM5MjE1LTRhZTktNGQwMC1iMDZlLTM2MzBmNmZkMWE3OCIsImNvbm5lY3Rpb25JZCI6Ijk5OWVmMDVhLWRiZDktNDQ1YS05MDNiLTVhMWY4MmFkYzI2ZCIsIm5vcm1hbGl6ZUluRGVzdGluYXRpb25Db250YWluZXIiOmZhbHNlLCJ0YXNrUXVldWUiOiJTWU5DIiwiaXNSZXNldCI6ZmFsc2UsIm5hbWVzcGFjZURlZmluaXRpb24iOiJkZXN0aW5hdGlvbiIsIm5hbWVzcGFjZUZvcm1hdCI6bnVsbCwicHJlZml4IjpudWxsLCJzY2hlbWFSZWZyZXNoT3V0cHV0Ijp7ImFwcGxpZWREaWZmIjpudWxsfSwiY29ubmVjdGlvbkNvbnRleHQiOnsiY29ubmVjdGlvbklkIjoiOTk5ZWYwNWEtZGJkOS00NDVhLTkwM2ItNWExZjgyYWRjMjZkIiwic291cmNlSWQiOiIxYjY2ZGUzNi00MDQxLTQzYTQtYTM0ZC0zZjUzOTc4NmZjZjciLCJkZXN0aW5hdGlvbklkIjoiODk0MjI1YjUtYjUzNC00ZGFhLWExZmUtMTkyYmY4OWY2ZGIwIiwid29ya3NwYWNlSWQiOiIxYmVjOTIxNS00YWU5LTRkMDAtYjA2ZS0zNjMwZjZmZDFhNzgiLCJvcmdhbml6YXRpb25JZCI6IjAwMDAwMDAwLTAwMDAtMDAwMC0wMDAwLTAwMDAwMDAwMDAwMCJ9LCJ1c2VXb3JrbG9hZEFwaSI6ZmFsc2UsInVzZU5ld0RvY1N0b3JlQXBpIjpmYWxzZX0=" - } - ] - }, - "scheduleToCloseTimeout": "259200s", - "scheduleToStartTimeout": "259200s", - "startToCloseTimeout": "259200s", - "heartbeatTimeout": "30s", - "workflowTaskCompletedEventId": "41", - "retryPolicy": { - "initialInterval": "1s", - "backoffCoefficient": 2, - "maximumInterval": "100s", - "maximumAttempts": 1, - "nonRetryableErrorTypes": [] - }, - "useCompatibleVersion": false - } - }, - { - "eventId": "43", - "eventTime": "2024-01-19T17:54:06.903610631Z", - "eventType": "ActivityTaskStarted", - "version": "0", - "taskId": "13631780", - "workerMayIgnore": false, - "activityTaskStartedEventAttributes": { - "scheduledEventId": "42", - "identity": "1@a13622a48bbf", - "requestId": "1b649df3-ae66-433b-adac-21382dbf6cf2", - "attempt": 1, - "lastFailure": null - } - }, - { - "eventId": "44", - "eventTime": "2024-01-19T17:54:14.470114051Z", - "eventType": "ActivityTaskCompleted", - "version": "0", - "taskId": "13631781", - "workerMayIgnore": false, - "activityTaskCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjAsImJ5dGVzU3luY2VkIjowLCJzdGFydFRpbWUiOjE3MDU2ODY4NDc1OTMsImVuZFRpbWUiOjE3MDU2ODY4NTQ0MjUsInRvdGFsU3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjI1NDAyMiwiYnl0ZXNFbWl0dGVkIjoyNTQwMjIsImRlc3RpbmF0aW9uU3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsImRlc3RpbmF0aW9uV3JpdGVFbmRUaW1lIjoxNzA1Njg2ODU0Mzg3LCJkZXN0aW5hdGlvbldyaXRlU3RhcnRUaW1lIjoxNzA1Njg2ODQ3NjMxLCJtZWFuU2Vjb25kc0JlZm9yZVNvdXJjZVN0YXRlTWVzc2FnZUVtaXR0ZWQiOjAsIm1heFNlY29uZHNCZWZvcmVTb3VyY2VTdGF0ZU1lc3NhZ2VFbWl0dGVkIjowLCJtYXhTZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsIm1lYW5TZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsInJlY29yZHNFbWl0dGVkIjoxLCJyZWNvcmRzQ29tbWl0dGVkIjoxLCJyZXBsaWNhdGlvbkVuZFRpbWUiOjE3MDU2ODY4NTQ0MjAsInJlcGxpY2F0aW9uU3RhcnRUaW1lIjoxNzA1Njg2ODQ3NTkzLCJzb3VyY2VSZWFkRW5kVGltZSI6MTcwNTY4Njg1MjUyMiwic291cmNlUmVhZFN0YXJ0VGltZSI6MTcwNTY4Njg0NzYzMywic291cmNlU3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjB9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJieXRlc0NvbW1pdHRlZCI6MjU0MDIyLCJieXRlc0VtaXR0ZWQiOjI1NDAyMiwicmVjb3Jkc0VtaXR0ZWQiOjEsInJlY29yZHNDb21taXR0ZWQiOjF9fV0sInBlcmZvcm1hbmNlTWV0cmljcyI6eyJwcm9jZXNzRnJvbVNvdXJjZSI6eyJlbGFwc2VkVGltZUluTmFub3MiOjEyNTA0MjExMjcsImV4ZWN1dGlvbkNvdW50Ijo0LCJhdmdFeGVjVGltZUluTmFub3MiOjMuMTI2MDUyODE3NUU4fSwicmVhZEZyb21Tb3VyY2UiOnsiZWxhcHNlZFRpbWVJbk5hbm9zIjo0MTY0NDE4MDg1LCJleGVjdXRpb25Db3VudCI6NywiYXZnRXhlY1RpbWVJbk5hbm9zIjo1Ljk0OTE2ODY5Mjg1NzE0M0U4fSwicHJvY2Vzc0Zyb21EZXN0Ijp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6MCwiZXhlY3V0aW9uQ291bnQiOjAsImF2Z0V4ZWNUaW1lSW5OYW5vcyI6Ik5hTiJ9LCJ3cml0ZVRvRGVzdCI6eyJlbGFwc2VkVGltZUluTmFub3MiOjE1MzI4NDU4LCJleGVjdXRpb25Db3VudCI6MSwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxLjUzMjg0NThFN30sInJlYWRGcm9tRGVzdCI6eyJlbGFwc2VkVGltZUluTmFub3MiOjYwODMyMzAzODMsImV4ZWN1dGlvbkNvdW50Ijo0MSwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxLjQ4MzcxNDcyNzU2MDk3NTZFOH19fSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJpc19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sInBhc3RfdHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiZ2VuZXJhdGlvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W1siaWQiXV19LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W1siaWQiXV19XX0sImZhaWx1cmVzIjpbXX0=" - } - ] - }, - "scheduledEventId": "42", - "startedEventId": "43", - "identity": "1@a13622a48bbf", - "workerVersion": null - } - }, - { - "eventId": "45", - "eventTime": "2024-01-19T17:54:14.470131926Z", - "eventType": "WorkflowTaskScheduled", - "version": "0", - "taskId": "13631782", - "workerMayIgnore": false, - "workflowTaskScheduledEventAttributes": { - "taskQueue": { - "name": "1@a13622a48bbf:24cc445d-3319-4d09-8cb5-d95c55af6a1d", - "kind": "Sticky", - "normalName": "SYNC" - }, - "startToCloseTimeout": "10s", - "attempt": 1 - } - }, - { - "eventId": "46", - "eventTime": "2024-01-19T17:54:14.481113385Z", - "eventType": "WorkflowTaskStarted", - "version": "0", - "taskId": "13631786", - "workerMayIgnore": false, - "workflowTaskStartedEventAttributes": { - "scheduledEventId": "45", - "identity": "1@a13622a48bbf", - "requestId": "73366269-4f70-439a-b9c4-2056b7765efb", - "suggestContinueAsNew": false, - "historySizeBytes": "20962" - } - }, - { - "eventId": "47", - "eventTime": "2024-01-19T17:54:14.501098718Z", - "eventType": "WorkflowTaskCompleted", - "version": "0", - "taskId": "13631790", - "workerMayIgnore": false, - "workflowTaskCompletedEventAttributes": { - "scheduledEventId": "45", - "startedEventId": "46", - "identity": "1@a13622a48bbf", - "binaryChecksum": "", - "workerVersion": { - "buildId": "", - "bundleId": "", - "useVersioning": false - }, - "sdkMetadata": null, - "meteringMetadata": { - "nonfirstLocalActivityExecutionAttempts": 0 - } - } - }, - { - "eventId": "48", - "eventTime": "2024-01-19T17:54:14.501187843Z", - "eventType": "WorkflowExecutionCompleted", - "version": "0", - "taskId": "13631791", - "workerMayIgnore": false, - "workflowExecutionCompletedEventAttributes": { - "result": { - "payloads": [ - { - "metadata": { - "encoding": "anNvbi9wbGFpbg==" - }, - "data": "eyJzdGFuZGFyZFN5bmNTdW1tYXJ5Ijp7InN0YXR1cyI6ImNvbXBsZXRlZCIsInJlY29yZHNTeW5jZWQiOjAsImJ5dGVzU3luY2VkIjowLCJzdGFydFRpbWUiOjE3MDU2ODY4NDc1OTMsImVuZFRpbWUiOjE3MDU2ODY4NTQ0MjUsInRvdGFsU3RhdHMiOnsiYnl0ZXNDb21taXR0ZWQiOjI1NDAyMiwiYnl0ZXNFbWl0dGVkIjoyNTQwMjIsImRlc3RpbmF0aW9uU3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjAsImRlc3RpbmF0aW9uV3JpdGVFbmRUaW1lIjoxNzA1Njg2ODU0Mzg3LCJkZXN0aW5hdGlvbldyaXRlU3RhcnRUaW1lIjoxNzA1Njg2ODQ3NjMxLCJtZWFuU2Vjb25kc0JlZm9yZVNvdXJjZVN0YXRlTWVzc2FnZUVtaXR0ZWQiOjAsIm1heFNlY29uZHNCZWZvcmVTb3VyY2VTdGF0ZU1lc3NhZ2VFbWl0dGVkIjowLCJtYXhTZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsIm1lYW5TZWNvbmRzQmV0d2VlblN0YXRlTWVzc2FnZUVtaXR0ZWRhbmRDb21taXR0ZWQiOjAsInJlY29yZHNFbWl0dGVkIjoxLCJyZWNvcmRzQ29tbWl0dGVkIjoxLCJyZXBsaWNhdGlvbkVuZFRpbWUiOjE3MDU2ODY4NTQ0MjAsInJlcGxpY2F0aW9uU3RhcnRUaW1lIjoxNzA1Njg2ODQ3NTkzLCJzb3VyY2VSZWFkRW5kVGltZSI6MTcwNTY4Njg1MjUyMiwic291cmNlUmVhZFN0YXJ0VGltZSI6MTcwNTY4Njg0NzYzMywic291cmNlU3RhdGVNZXNzYWdlc0VtaXR0ZWQiOjB9LCJzdHJlYW1TdGF0cyI6W3sic3RyZWFtTmFtZSI6InBva2Vtb24iLCJzdGF0cyI6eyJieXRlc0NvbW1pdHRlZCI6MjU0MDIyLCJieXRlc0VtaXR0ZWQiOjI1NDAyMiwicmVjb3Jkc0VtaXR0ZWQiOjEsInJlY29yZHNDb21taXR0ZWQiOjF9fV0sInBlcmZvcm1hbmNlTWV0cmljcyI6eyJwcm9jZXNzRnJvbVNvdXJjZSI6eyJlbGFwc2VkVGltZUluTmFub3MiOjEyNTA0MjExMjcsImV4ZWN1dGlvbkNvdW50Ijo0LCJhdmdFeGVjVGltZUluTmFub3MiOjMuMTI2MDUyODE3NUU4fSwicmVhZEZyb21Tb3VyY2UiOnsiZWxhcHNlZFRpbWVJbk5hbm9zIjo0MTY0NDE4MDg1LCJleGVjdXRpb25Db3VudCI6NywiYXZnRXhlY1RpbWVJbk5hbm9zIjo1Ljk0OTE2ODY5Mjg1NzE0M0U4fSwicHJvY2Vzc0Zyb21EZXN0Ijp7ImVsYXBzZWRUaW1lSW5OYW5vcyI6MCwiZXhlY3V0aW9uQ291bnQiOjAsImF2Z0V4ZWNUaW1lSW5OYW5vcyI6Ik5hTiJ9LCJ3cml0ZVRvRGVzdCI6eyJlbGFwc2VkVGltZUluTmFub3MiOjE1MzI4NDU4LCJleGVjdXRpb25Db3VudCI6MSwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxLjUzMjg0NThFN30sInJlYWRGcm9tRGVzdCI6eyJlbGFwc2VkVGltZUluTmFub3MiOjYwODMyMzAzODMsImV4ZWN1dGlvbkNvdW50Ijo0MSwiYXZnRXhlY1RpbWVJbk5hbm9zIjoxLjQ4MzcxNDcyNzU2MDk3NTZFOH19fSwib3V0cHV0X2NhdGFsb2ciOnsic3RyZWFtcyI6W3sic3RyZWFtIjp7Im5hbWUiOiJwb2tlbW9uIiwianNvbl9zY2hlbWEiOnsidHlwZSI6Im9iamVjdCIsIiRzY2hlbWEiOiJodHRwOi8vanNvbi1zY2hlbWEub3JnL2RyYWZ0LTA3L3NjaGVtYSMiLCJwcm9wZXJ0aWVzIjp7ImlkIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJmb3JtcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sIm1vdmVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Im1vdmUiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sInZlcnNpb25fZ3JvdXBfZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uX2dyb3VwIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJsZXZlbF9sZWFybmVkX2F0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIm1vdmVfbGVhcm5fbWV0aG9kIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sIm9yZGVyIjp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sInN0YXRzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InN0YXQiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sImVmZm9ydCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJiYXNlX3N0YXQiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sInR5cGVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidHlwZSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiaGVpZ2h0Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX0sIndlaWdodCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJzcGVjaWVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJzcHJpdGVzIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7ImJhY2tfc2hpbnkiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJiYWNrX2ZlbWFsZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImZyb250X3NoaW55Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiYmFja19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfZGVmYXVsdCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sImJhY2tfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwiZnJvbnRfc2hpbnlfZmVtYWxlIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfSwiYWJpbGl0aWVzIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InNsb3QiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwiYWJpbGl0eSI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfSwiaXNfaGlkZGVuIjp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJoZWxkX2l0ZW1zIjp7InR5cGUiOlsibnVsbCIsImFycmF5Il0sIml0ZW1zIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7Iml0ZW0iOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidXJsIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfSwibmFtZSI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX0sInZlcnNpb25fZGV0YWlscyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJyYXJpdHkiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwidmVyc2lvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJpc19kZWZhdWx0Ijp7InR5cGUiOlsibnVsbCIsImJvb2xlYW4iXX0sInBhc3RfdHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsidHlwZXMiOnsidHlwZSI6WyJudWxsIiwiYXJyYXkiXSwiaXRlbXMiOnsidHlwZSI6WyJudWxsIiwib2JqZWN0Il0sInByb3BlcnRpZXMiOnsic2xvdCI6eyJ0eXBlIjpbIm51bGwiLCJpbnRlZ2VyIl19LCJ0eXBlIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9fSwiZ2VuZXJhdGlvbiI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ1cmwiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19LCJuYW1lIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImFkZGl0aW9uYWxQcm9wZXJ0aWVzIjp0cnVlfX0sImdhbWVfaW5kaWNlcyI6eyJ0eXBlIjpbIm51bGwiLCJhcnJheSJdLCJpdGVtcyI6eyJ0eXBlIjpbIm51bGwiLCJvYmplY3QiXSwicHJvcGVydGllcyI6eyJ2ZXJzaW9uIjp7InR5cGUiOlsibnVsbCIsIm9iamVjdCJdLCJwcm9wZXJ0aWVzIjp7InVybCI6eyJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0sIm5hbWUiOnsidHlwZSI6WyJudWxsIiwic3RyaW5nIl19fSwiYWRkaXRpb25hbFByb3BlcnRpZXMiOnRydWV9LCJnYW1lX2luZGV4Ijp7InR5cGUiOlsibnVsbCIsImludGVnZXIiXX19LCJhZGRpdGlvbmFsUHJvcGVydGllcyI6dHJ1ZX19LCJiYXNlX2V4cGVyaWVuY2UiOnsidHlwZSI6WyJudWxsIiwiaW50ZWdlciJdfSwibG9jYXRpb25fYXJlYV9lbmNvdW50ZXJzIjp7InR5cGUiOlsibnVsbCIsInN0cmluZyJdfX19LCJzdXBwb3J0ZWRfc3luY19tb2RlcyI6WyJmdWxsX3JlZnJlc2giXSwiZGVmYXVsdF9jdXJzb3JfZmllbGQiOltdLCJzb3VyY2VfZGVmaW5lZF9wcmltYXJ5X2tleSI6W1siaWQiXV19LCJzeW5jX21vZGUiOiJmdWxsX3JlZnJlc2giLCJjdXJzb3JfZmllbGQiOltdLCJkZXN0aW5hdGlvbl9zeW5jX21vZGUiOiJvdmVyd3JpdGUiLCJwcmltYXJ5X2tleSI6W1siaWQiXV19XX0sImZhaWx1cmVzIjpbXX0=" - } - ] - }, - "workflowTaskCompletedEventId": "47", - "newExecutionRunId": "" - } - } - ] -} diff --git a/airbyte-workload-launcher/src/main/kotlin/client/WorkloadApiClient.kt b/airbyte-workload-launcher/src/main/kotlin/client/WorkloadApiClient.kt index 72eecce422d..664bbde71b2 100644 --- a/airbyte-workload-launcher/src/main/kotlin/client/WorkloadApiClient.kt +++ b/airbyte-workload-launcher/src/main/kotlin/client/WorkloadApiClient.kt @@ -4,7 +4,9 @@ package io.airbyte.workload.launcher.client +import com.amazonaws.internal.ExceptionUtils import io.airbyte.api.client.WorkloadApiClient +import io.airbyte.metrics.lib.MetricEmittingApps import io.airbyte.workload.api.client.model.generated.ClaimResponse import io.airbyte.workload.api.client.model.generated.WorkloadClaimRequest import io.airbyte.workload.api.client.model.generated.WorkloadFailureRequest @@ -31,7 +33,7 @@ class WorkloadApiClient( } try { - updateStatusToFailed(failure.io.msg.workloadId) + updateStatusToFailed(failure) } catch (e: Exception) { logger.warn(e) { "Could not set the status for workload ${failure.io.msg.workloadId} to failed." } } @@ -43,9 +45,14 @@ class WorkloadApiClient( workloadApiClient.workloadApi.workloadRunning(request) } - fun updateStatusToFailed(workloadId: String) { - val request = WorkloadFailureRequest(workloadId) - logger.info { "Attempting to update workload: $workloadId to FAILED." } + fun updateStatusToFailed(failure: StageError) { + val request = + WorkloadFailureRequest( + failure.io.msg.workloadId, + MetricEmittingApps.WORKLOAD_LAUNCHER.applicationName, + ExceptionUtils.exceptionStackTrace(failure), + ) + logger.info { "Attempting to update workload: ${failure.io.msg.workloadId} to FAILED." } workloadApiClient.workloadApi.workloadFailure(request) } diff --git a/airbyte-workload-launcher/src/main/kotlin/config/ApplicationBeanFactory.kt b/airbyte-workload-launcher/src/main/kotlin/config/ApplicationBeanFactory.kt index bca72580e29..fff354c2779 100644 --- a/airbyte-workload-launcher/src/main/kotlin/config/ApplicationBeanFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/config/ApplicationBeanFactory.kt @@ -17,6 +17,7 @@ import io.airbyte.workers.CheckConnectionInputHydrator import io.airbyte.workers.ConnectorSecretsHydrator import io.airbyte.workers.DiscoverCatalogInputHydrator import io.airbyte.workers.ReplicationInputHydrator +import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper import io.micrometer.core.instrument.MeterRegistry import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Value @@ -46,10 +47,11 @@ class ApplicationBeanFactory { @Singleton fun replicationInputHydrator( airbyteApiClient: AirbyteApiClient, + resumableFullRefreshStatsHelper: ResumableFullRefreshStatsHelper, secretsRepositoryReader: SecretsRepositoryReader, featureFlagClient: FeatureFlagClient, ): ReplicationInputHydrator { - return ReplicationInputHydrator(airbyteApiClient, secretsRepositoryReader, featureFlagClient) + return ReplicationInputHydrator(airbyteApiClient, resumableFullRefreshStatsHelper, secretsRepositoryReader, featureFlagClient) } @Singleton diff --git a/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt b/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt index c1f2a6fce48..0919d4a59da 100644 --- a/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt @@ -57,10 +57,10 @@ class ContainerConfigBeanFactory { return injectedImage } - if (airbyteVersion.endsWith("-cloud")) { - return "airbyte/connector-sidecar:${airbyteVersion.dropLast(6)}" + return if (airbyteVersion.endsWith("-cloud")) { + "airbyte/connector-sidecar:${airbyteVersion.dropLast(6)}" } else { - return "airbyte/connector-sidecar:$airbyteVersion" + "airbyte/connector-sidecar:$airbyteVersion" } } diff --git a/airbyte-workload-launcher/src/main/kotlin/config/CustomOkHttpClientFactory.kt b/airbyte-workload-launcher/src/main/kotlin/config/CustomOkHttpClientFactory.kt index 0c5f6f81072..72cb99ef038 100644 --- a/airbyte-workload-launcher/src/main/kotlin/config/CustomOkHttpClientFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/config/CustomOkHttpClientFactory.kt @@ -17,22 +17,22 @@ import java.util.concurrent.TimeUnit */ @Singleton class CustomOkHttpClientFactory( - @Value("\${airbyte.kubernetes.client.call-timeout-sec}") private val callTimeout: java.lang.Long, - @Value("\${airbyte.kubernetes.client.connect-timeout-sec}") private val connectTimeout: java.lang.Long, - @Value("\${airbyte.kubernetes.client.connection-pool.keep-alive-sec}") private val keepAliveDuration: java.lang.Long, - @Value("\${airbyte.kubernetes.client.connection-pool.max-idle-connections}") private val maxIdleConnections: Integer, - @Value("\${airbyte.kubernetes.client.read-timeout-sec}") private val readTimeout: java.lang.Long, - @Value("\${airbyte.kubernetes.client.write-timeout-sec}") private val writeTimeout: java.lang.Long, + @Value("\${airbyte.kubernetes.client.call-timeout-sec}") private val callTimeout: Long, + @Value("\${airbyte.kubernetes.client.connect-timeout-sec}") private val connectTimeout: Long, + @Value("\${airbyte.kubernetes.client.connection-pool.keep-alive-sec}") private val keepAliveDuration: Long, + @Value("\${airbyte.kubernetes.client.connection-pool.max-idle-connections}") private val maxIdleConnections: Int, + @Value("\${airbyte.kubernetes.client.read-timeout-sec}") private val readTimeout: Long, + @Value("\${airbyte.kubernetes.client.write-timeout-sec}") private val writeTimeout: Long, ) : OkHttpClientFactory() { override fun additionalConfig(builder: OkHttpClient.Builder?) { builder?.apply { - callTimeout(callTimeout.toLong(), TimeUnit.SECONDS) - connectionPool(ConnectionPool(maxIdleConnections.toInt(), keepAliveDuration.toLong(), TimeUnit.SECONDS)) - connectTimeout(connectTimeout.toLong(), TimeUnit.SECONDS) - readTimeout(readTimeout.toLong(), TimeUnit.SECONDS) + callTimeout(callTimeout, TimeUnit.SECONDS) + connectionPool(ConnectionPool(maxIdleConnections, keepAliveDuration, TimeUnit.SECONDS)) + connectTimeout(connectTimeout, TimeUnit.SECONDS) + readTimeout(readTimeout, TimeUnit.SECONDS) // Retry on Connectivity issues (Unreachable IP/Proxy, Stale Pool Connection) retryOnConnectionFailure(true) - writeTimeout(writeTimeout.toLong(), TimeUnit.SECONDS) + writeTimeout(writeTimeout, TimeUnit.SECONDS) } } } diff --git a/airbyte-workload-launcher/src/main/kotlin/model/CheckConnectionInputExtensions.kt b/airbyte-workload-launcher/src/main/kotlin/model/CheckConnectionInputExtensions.kt index dd76a7f7e74..3f75f5f2dfb 100644 --- a/airbyte-workload-launcher/src/main/kotlin/model/CheckConnectionInputExtensions.kt +++ b/airbyte-workload-launcher/src/main/kotlin/model/CheckConnectionInputExtensions.kt @@ -14,12 +14,3 @@ fun CheckConnectionInput.getAttemptId(): Long { fun CheckConnectionInput.getActorType(): ActorType { return this.checkConnectionInput.actorType } - -fun CheckConnectionInput.setConnectorLabels(labels: Map): CheckConnectionInput { - return this.apply { - launcherConfig = - launcherConfig.apply { - additionalLabels = labels - } - } -} diff --git a/airbyte-workload-launcher/src/main/kotlin/pipeline/LaunchPipeline.kt b/airbyte-workload-launcher/src/main/kotlin/pipeline/LaunchPipeline.kt index 01bb66bbd99..df89c6a2475 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pipeline/LaunchPipeline.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pipeline/LaunchPipeline.kt @@ -15,7 +15,6 @@ import io.airbyte.workload.launcher.pipeline.handlers.FailureHandler import io.airbyte.workload.launcher.pipeline.handlers.SuccessHandler import io.airbyte.workload.launcher.pipeline.stages.model.LaunchStage import io.airbyte.workload.launcher.pipeline.stages.model.LaunchStageIO -import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton @@ -25,8 +24,6 @@ import reactor.kotlin.core.publisher.toMono import kotlin.time.TimeSource import kotlin.time.toJavaDuration -private val logger = KotlinLogging.logger {} - @Singleton class LaunchPipeline( @Value("\${airbyte.data-plane-id}") private val dataplaneId: String, diff --git a/airbyte-workload-launcher/src/main/kotlin/pipeline/stages/LaunchPodStage.kt b/airbyte-workload-launcher/src/main/kotlin/pipeline/stages/LaunchPodStage.kt index 0c4d9103c18..fe6c6e19551 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pipeline/stages/LaunchPodStage.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pipeline/stages/LaunchPodStage.kt @@ -42,9 +42,7 @@ open class LaunchPodStage( } override fun applyStage(input: LaunchStageIO): LaunchStageIO { - val payload = input.payload!! - - when (payload) { + when (val payload = input.payload!!) { is SyncPayload -> launcher.launchReplication(payload.input, input.msg) is CheckPayload -> launcher.launchCheck(payload.input, input.msg) is DiscoverCatalogPayload -> launcher.launchDiscover(payload.input, input.msg) diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/DockerPodLauncher.kt b/airbyte-workload-launcher/src/main/kotlin/pods/DockerPodLauncher.kt index bfce72d325a..c56ed58e32e 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/DockerPodLauncher.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/DockerPodLauncher.kt @@ -45,7 +45,7 @@ class DockerPodLauncher(private val dockerConfig: DockerConfig) { * * Returns a list of containerIds. */ - fun find(filters: Map): List { + private fun find(filters: Map): List { val cmd = mutableListOf("docker", "ps", "--quiet") filters.forEach { (k, v) -> cmd.add("--filter") diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/KubeCopyClient.kt b/airbyte-workload-launcher/src/main/kotlin/pods/KubeCopyClient.kt index d84ceed75d6..539d060b1c6 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/KubeCopyClient.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/KubeCopyClient.kt @@ -58,9 +58,15 @@ class KubeCopyClient(private val metricClient: MetricClient) { // several issues with copying files. See https://github.com/airbytehq/airbyte/issues/8643 for // details. val command = - """ - kubectl cp $localPath ${pod.metadata.namespace}/${pod.metadata.name}:$containerPath -c ${KubePodProcess.INIT_CONTAINER_NAME} --retries=3 - """.trimMargin() + arrayOf( + "kubectl", + "cp", + localPath.toString(), + "${pod.metadata.namespace}/${pod.metadata.name}:$containerPath", + "-c", + KubePodProcess.INIT_CONTAINER_NAME, + "--retries=3", + ) return Runtime.getRuntime().exec(command) } diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/KubePodLauncher.kt b/airbyte-workload-launcher/src/main/kotlin/pods/KubePodLauncher.kt index 720a2f0c76e..0bc522ded8a 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/KubePodLauncher.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/KubePodLauncher.kt @@ -62,10 +62,10 @@ class KubePodLauncher( pod: Pod, waitDuration: Duration, ) { - if (shouldUseCustomK8sInitCheck()) { - return waitForPodInitCustomCheck(pod, waitDuration) + return if (shouldUseCustomK8sInitCheck()) { + waitForPodInitCustomCheck(pod, waitDuration) } else { - return waitForPodInitDefaultCheck(pod, waitDuration) + waitForPodInitDefaultCheck(pod, waitDuration) } } diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt b/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt index 20b13945114..c4e11f70e67 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt @@ -121,7 +121,7 @@ class PayloadKubeInputMapper( ) val nodeSelectors = - if (WorkloadPriority.DEFAULT.equals(input.launcherConfig.priority)) { + if (WorkloadPriority.DEFAULT == input.launcherConfig.priority) { getNodeSelectors(input.launcherConfig.isCustomConnector, replicationWorkerConfigs) } else { getNodeSelectors(input.launcherConfig.isCustomConnector, checkWorkerConfigs) @@ -132,7 +132,7 @@ class PayloadKubeInputMapper( val extraEnv = resolveAwsAssumedRoleEnvVars(input.launcherConfig) return ConnectorKubeInput( - labeler.getCheckConnectorLabels() + sharedLabels, + labeler.getCheckLabels() + sharedLabels, nodeSelectors, connectorPodInfo, fileMap, @@ -174,7 +174,7 @@ class PayloadKubeInputMapper( val extraEnv = resolveAwsAssumedRoleEnvVars(input.launcherConfig) return ConnectorKubeInput( - labeler.getCheckConnectorLabels() + sharedLabels, + labeler.getDiscoverLabels() + sharedLabels, nodeSelectors, connectorPodInfo, fileMap, @@ -209,7 +209,7 @@ class PayloadKubeInputMapper( val fileMap = buildSpecFileMap(workloadId, input, input.jobRunConfig, logPath) return ConnectorKubeInput( - labeler.getCheckConnectorLabels() + sharedLabels, + labeler.getSpecLabels() + sharedLabels, nodeSelectors, connectorPodInfo, fileMap, diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/PodLabeler.kt b/airbyte-workload-launcher/src/main/kotlin/pods/PodLabeler.kt index 78425ee4e81..22d02ea93d9 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/PodLabeler.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/PodLabeler.kt @@ -2,11 +2,11 @@ package io.airbyte.workload.launcher.pods import io.airbyte.workers.process.Metadata import io.airbyte.workers.process.Metadata.CHECK_JOB -import io.airbyte.workers.process.Metadata.CHECK_STEP_KEY -import io.airbyte.workers.process.Metadata.CONNECTOR_STEP +import io.airbyte.workers.process.Metadata.DISCOVER_JOB import io.airbyte.workers.process.Metadata.JOB_TYPE_KEY import io.airbyte.workers.process.Metadata.ORCHESTRATOR_REPLICATION_STEP import io.airbyte.workers.process.Metadata.READ_STEP +import io.airbyte.workers.process.Metadata.SPEC_JOB import io.airbyte.workers.process.Metadata.SYNC_JOB import io.airbyte.workers.process.Metadata.SYNC_STEP_KEY import io.airbyte.workers.process.Metadata.WRITE_STEP @@ -44,10 +44,21 @@ class PodLabeler( ) } - fun getCheckConnectorLabels(): Map { + fun getCheckLabels(): Map { return mapOf( JOB_TYPE_KEY to CHECK_JOB, - CHECK_STEP_KEY to CONNECTOR_STEP, + ) + } + + fun getDiscoverLabels(): Map { + return mapOf( + JOB_TYPE_KEY to DISCOVER_JOB, + ) + } + + fun getSpecLabels(): Map { + return mapOf( + JOB_TYPE_KEY to SPEC_JOB, ) } diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt index aee40675568..f41780a7708 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt @@ -153,9 +153,9 @@ class ConnectorPodFactory( } companion object { - val CHECK_OPERATION_NAME = "check" - val DISCOVER_OPERATION_NAME = "discover" - val SPEC_OPERATION_NAME = "spec" + const val CHECK_OPERATION_NAME = "check" + const val DISCOVER_OPERATION_NAME = "discover" + const val SPEC_OPERATION_NAME = "spec" } } diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/factories/OrchestratorPodFactory.kt b/airbyte-workload-launcher/src/main/kotlin/pods/factories/OrchestratorPodFactory.kt index a7cd526bdef..1220814facf 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/factories/OrchestratorPodFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/factories/OrchestratorPodFactory.kt @@ -12,6 +12,7 @@ import io.fabric8.kubernetes.api.model.CapabilitiesBuilder import io.fabric8.kubernetes.api.model.ContainerBuilder import io.fabric8.kubernetes.api.model.ContainerPort import io.fabric8.kubernetes.api.model.EnvVar +import io.fabric8.kubernetes.api.model.LocalObjectReference import io.fabric8.kubernetes.api.model.Pod import io.fabric8.kubernetes.api.model.PodBuilder import io.fabric8.kubernetes.api.model.PodSecurityContext @@ -33,6 +34,7 @@ class OrchestratorPodFactory( private val orchestratorEnvSingleton: OrchestratorEnvSingleton, @Value("\${airbyte.worker.job.kube.serviceAccount}") private val serviceAccount: String?, @Named("orchestratorContainerPorts") private val containerPorts: List, + @Named("discoverImagePullSecrets") private val imagePullSecrets: List, private val volumeFactory: VolumeFactory, private val initContainerFactory: InitContainerFactory, ) { @@ -100,6 +102,7 @@ class OrchestratorPodFactory( .withRestartPolicy("Never") .withContainers(mainContainer) .withInitContainers(initContainer) + .withImagePullSecrets(imagePullSecrets) .withVolumes(volumes) .withNodeSelector(nodeSelectors) .withSecurityContext(podSecurityContext()) diff --git a/airbyte-workload-launcher/src/test/kotlin/client/WorkloadApiClientTest.kt b/airbyte-workload-launcher/src/test/kotlin/client/WorkloadApiClientTest.kt index ec8d5aa7f48..d48cee5dcf2 100644 --- a/airbyte-workload-launcher/src/test/kotlin/client/WorkloadApiClientTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/client/WorkloadApiClientTest.kt @@ -52,13 +52,11 @@ internal class WorkloadApiClientTest { autoId = UUID.randomUUID(), ) val stageIo: StageIO = mockk() - val failure: StageError = mockk() val requestCapture = slot() every { workloadApi.workloadFailure(any()) } returns Unit every { stageIo.msg } returns launcherInput - every { failure.stageName } returns StageName.LAUNCH - every { failure.io } returns stageIo + val failure = StageError(stageIo, StageName.LAUNCH, RuntimeException("Cause")) workloadApiClient.reportFailure(failure) @@ -110,9 +108,13 @@ internal class WorkloadApiClientTest { val workloadId = "workload-id" val requestCapture = slot() + val launcherInput = mockk() + every { launcherInput.workloadId } returns workloadId + val io = mockk() + every { io.msg } returns launcherInput every { workloadApi.workloadFailure(any()) } returns Unit - workloadApiClient.updateStatusToFailed(workloadId) + workloadApiClient.updateStatusToFailed(StageError(io, StageName.CLAIM, RuntimeException("Cause"))) verify(exactly = 1) { workloadApi.workloadFailure(capture(requestCapture)) } assertEquals(workloadId, requestCapture.captured.workloadId) diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt index ccdc81e0953..b0d35503148 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt @@ -16,6 +16,7 @@ import io.airbyte.workload.launcher.model.setSourceLabels import io.airbyte.workload.launcher.pods.KubePodClient.Companion.ORCHESTRATOR_STARTUP_TIMEOUT_VALUE import io.airbyte.workload.launcher.pods.KubePodClient.Companion.POD_INIT_TIMEOUT_VALUE import io.airbyte.workload.launcher.pods.KubePodClient.Companion.REPL_CONNECTOR_STARTUP_TIMEOUT_VALUE +import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.WORKLOAD_ID import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.checkLauncherInput import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.connectorKubeInput import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.discoverLauncherInput @@ -23,7 +24,6 @@ import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.replKubeInpu import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.replLauncherInput import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.sharedLabels import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.specLauncherInput -import io.airbyte.workload.launcher.pods.KubePodClientTest.Fixtures.workloadId import io.airbyte.workload.launcher.pods.factories.ConnectorPodFactory import io.airbyte.workload.launcher.pods.factories.OrchestratorPodFactory import io.fabric8.kubernetes.api.model.EnvVar @@ -129,11 +129,11 @@ class KubePodClientTest { every { labeler.getSharedLabels(any(), any(), any(), any()) } returns sharedLabels - every { mapper.toKubeInput(workloadId, replInput, sharedLabels) } returns replKubeInput - every { mapper.toKubeInput(workloadId, resetInput, sharedLabels) } returns replKubeInput - every { mapper.toKubeInput(workloadId, checkInput, sharedLabels, "/log/path") } returns connectorKubeInput - every { mapper.toKubeInput(workloadId, discoverInput, sharedLabels, "/log/path") } returns connectorKubeInput - every { mapper.toKubeInput(workloadId, specInput, sharedLabels, "/log/path") } returns connectorKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, replInput, sharedLabels) } returns replKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, resetInput, sharedLabels) } returns replKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, checkInput, sharedLabels, "/log/path") } returns connectorKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, discoverInput, sharedLabels, "/log/path") } returns connectorKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, specInput, sharedLabels, "/log/path") } returns connectorKubeInput every { orchestratorPodFactory.create( @@ -236,13 +236,13 @@ class KubePodClientTest { @Test fun `launchReplication sets pass-through labels for propagation to source and destination`() { every { labeler.getSharedLabels(any(), any(), any(), any()) } returns sharedLabels - every { mapper.toKubeInput(workloadId, replInput, sharedLabels) } returns replKubeInput + every { mapper.toKubeInput(WORKLOAD_ID, replInput, sharedLabels) } returns replKubeInput client.launchReplication(replInput, replLauncherInput) val inputWithLabels = replInput.setDestinationLabels(sharedLabels).setSourceLabels(sharedLabels) - verify { mapper.toKubeInput(workloadId, inputWithLabels, sharedLabels) } + verify { mapper.toKubeInput(WORKLOAD_ID, inputWithLabels, sharedLabels) } } @Test @@ -439,26 +439,26 @@ class KubePodClientTest { listOf(EnvVar("extra-env", "val6", null)), ) - val workloadId = "workload-id" - val passThroughLabels = mapOf("labels" to "we get", "from" to "the activity") + const val WORKLOAD_ID = "workload-id" + private val passThroughLabels = mapOf("labels" to "we get", "from" to "the activity") val sharedLabels = mapOf("arbitrary" to "label", "literally" to "anything") - val replLauncherInput = RecordFixtures.launcherInput(workloadId = workloadId, labels = passThroughLabels) + val replLauncherInput = RecordFixtures.launcherInput(workloadId = WORKLOAD_ID, labels = passThroughLabels) val checkLauncherInput = RecordFixtures.launcherInput( - workloadId = workloadId, + workloadId = WORKLOAD_ID, labels = passThroughLabels, workloadType = WorkloadType.CHECK, ) val discoverLauncherInput = RecordFixtures.launcherInput( - workloadId = workloadId, + workloadId = WORKLOAD_ID, labels = passThroughLabels, workloadType = WorkloadType.DISCOVER, ) val specLauncherInput = RecordFixtures.launcherInput( - workloadId = workloadId, + workloadId = WORKLOAD_ID, labels = passThroughLabels, workloadType = WorkloadType.SPEC, ) diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt index 21249c26234..9547950a229 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt @@ -224,7 +224,7 @@ class PayloadKubeInputMapperTest { val connectorLabels = mapOf("connector" to "labels") val sharedLabels = mapOf("pass through" to "labels") - every { labeler.getCheckConnectorLabels() } returns connectorLabels + every { labeler.getCheckLabels() } returns connectorLabels val result = mapper.toKubeInput(workloadId, input, sharedLabels, logPath) Assertions.assertEquals(connectorLabels + sharedLabels, result.connectorLabels) @@ -359,7 +359,7 @@ class PayloadKubeInputMapperTest { val connectorLabels = mapOf("connector" to "labels") val sharedLabels = mapOf("pass through" to "labels") - every { labeler.getCheckConnectorLabels() } returns connectorLabels + every { labeler.getDiscoverLabels() } returns connectorLabels val result = mapper.toKubeInput(workloadId, input, sharedLabels, logPath) Assertions.assertEquals(connectorLabels + sharedLabels, result.connectorLabels) @@ -478,7 +478,7 @@ class PayloadKubeInputMapperTest { val connectorLabels = mapOf("connector" to "labels") val sharedLabels = mapOf("pass through" to "labels") - every { labeler.getCheckConnectorLabels() } returns connectorLabels + every { labeler.getSpecLabels() } returns connectorLabels val result = mapper.toKubeInput(workloadId, input, sharedLabels, logPath) Assertions.assertEquals(connectorLabels + sharedLabels, result.connectorLabels) diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/PodLabelerTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/PodLabelerTest.kt index d043ebb9e12..56fc9bbf039 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/PodLabelerTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/PodLabelerTest.kt @@ -1,13 +1,13 @@ package io.airbyte.workload.launcher.pods import io.airbyte.workers.process.Metadata.CHECK_JOB -import io.airbyte.workers.process.Metadata.CHECK_STEP_KEY -import io.airbyte.workers.process.Metadata.CONNECTOR_STEP +import io.airbyte.workers.process.Metadata.DISCOVER_JOB import io.airbyte.workers.process.Metadata.IMAGE_NAME import io.airbyte.workers.process.Metadata.IMAGE_VERSION import io.airbyte.workers.process.Metadata.JOB_TYPE_KEY import io.airbyte.workers.process.Metadata.ORCHESTRATOR_REPLICATION_STEP import io.airbyte.workers.process.Metadata.READ_STEP +import io.airbyte.workers.process.Metadata.SPEC_JOB import io.airbyte.workers.process.Metadata.SYNC_JOB import io.airbyte.workers.process.Metadata.SYNC_STEP_KEY import io.airbyte.workers.process.Metadata.WRITE_STEP @@ -68,15 +68,40 @@ class PodLabelerTest { } @Test - fun getCheckConnectorLabels() { + fun getCheckLabels() { val labeler = PodLabeler(ORCHESTRATOR_IMAGE_NAME) - val result = labeler.getCheckConnectorLabels() + val result = labeler.getCheckLabels() assert( result == mapOf( JOB_TYPE_KEY to CHECK_JOB, - CHECK_STEP_KEY to CONNECTOR_STEP, + ), + ) + } + + @Test + fun getDiscoverLabels() { + val labeler = PodLabeler(ORCHESTRATOR_IMAGE_NAME) + val result = labeler.getDiscoverLabels() + + assert( + result == + mapOf( + JOB_TYPE_KEY to DISCOVER_JOB, + ), + ) + } + + @Test + fun getSpecLabels() { + val labeler = PodLabeler(ORCHESTRATOR_IMAGE_NAME) + val result = labeler.getSpecLabels() + + assert( + result == + mapOf( + JOB_TYPE_KEY to SPEC_JOB, ), ) } diff --git a/build.gradle b/build.gradle index 57c7603a592..d068031f4dc 100644 --- a/build.gradle +++ b/build.gradle @@ -25,11 +25,12 @@ buildscript { plugins { id "base" id "com.dorongold.task-tree" version "2.1.1" - id "io.airbyte.gradle.jvm" version "0.35.0" apply false - id "io.airbyte.gradle.jvm.app" version "0.35.0" apply false - id "io.airbyte.gradle.jvm.lib" version "0.35.0" apply false - id "io.airbyte.gradle.docker" version "0.35.0" apply false - id "io.airbyte.gradle.publish" version "0.35.0" apply false + id "io.airbyte.gradle.jvm" version "0.36.1" apply false + id "io.airbyte.gradle.jvm.app" version "0.36.1" apply false + id "io.airbyte.gradle.jvm.lib" version "0.36.1" apply false + id "io.airbyte.gradle.docker" version "0.36.1" apply false + id "io.airbyte.gradle.publish" version "0.36.1" apply false + id "io.airbyte.gradle.kube-reload" version "0.36.1" apply false // uncomment for testing plugin locally // id "io.airbyte.gradle.jvm" version "local-test" apply false // id "io.airbyte.gradle.jvm.app" version "local-test" apply false diff --git a/charts/airbyte-api-server/Chart.yaml b/charts/airbyte-api-server/Chart.yaml index de3d2060bde..c0067267802 100644 --- a/charts/airbyte-api-server/Chart.yaml +++ b/charts/airbyte-api-server/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte-bootloader/Chart.yaml b/charts/airbyte-bootloader/Chart.yaml index 2e2761b6c2f..25c2e17f951 100644 --- a/charts/airbyte-bootloader/Chart.yaml +++ b/charts/airbyte-bootloader/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-connector-builder-server/Chart.yaml b/charts/airbyte-connector-builder-server/Chart.yaml index d88abde95d4..68afa2a58f3 100644 --- a/charts/airbyte-connector-builder-server/Chart.yaml +++ b/charts/airbyte-connector-builder-server/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte-cron/Chart.yaml b/charts/airbyte-cron/Chart.yaml index 1bb2a2ca967..d923c60ccdf 100644 --- a/charts/airbyte-cron/Chart.yaml +++ b/charts/airbyte-cron/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte-keycloak-setup/Chart.yaml b/charts/airbyte-keycloak-setup/Chart.yaml index 97a48176360..9c7b9a35543 100644 --- a/charts/airbyte-keycloak-setup/Chart.yaml +++ b/charts/airbyte-keycloak-setup/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-keycloak/Chart.yaml b/charts/airbyte-keycloak/Chart.yaml index a4568d91758..09014038c1f 100644 --- a/charts/airbyte-keycloak/Chart.yaml +++ b/charts/airbyte-keycloak/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-metrics/Chart.yaml b/charts/airbyte-metrics/Chart.yaml index b1057051f22..4083c2f9052 100644 --- a/charts/airbyte-metrics/Chart.yaml +++ b/charts/airbyte-metrics/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-pod-sweeper/Chart.yaml b/charts/airbyte-pod-sweeper/Chart.yaml index ff560af307e..44defb81be3 100644 --- a/charts/airbyte-pod-sweeper/Chart.yaml +++ b/charts/airbyte-pod-sweeper/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-server/Chart.yaml b/charts/airbyte-server/Chart.yaml index c2a97e4b3fe..b9d80219346 100644 --- a/charts/airbyte-server/Chart.yaml +++ b/charts/airbyte-server/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte-server/templates/deployment.yaml b/charts/airbyte-server/templates/deployment.yaml index 212240a8339..32846ec7013 100644 --- a/charts/airbyte-server/templates/deployment.yaml +++ b/charts/airbyte-server/templates/deployment.yaml @@ -215,6 +215,16 @@ spec: value: "X-Airbyte-Auth" - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal server" + - name: WORKLOAD_LAUNCHER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_LAUNCHER_ENABLED + - name: WORKLOAD_API_SERVER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_SERVER_ENABLED # SECRETS MANAGER - name: SECRET_PERSISTENCE diff --git a/charts/airbyte-temporal/Chart.yaml b/charts/airbyte-temporal/Chart.yaml index a0a79a3696c..7a4869104e7 100644 --- a/charts/airbyte-temporal/Chart.yaml +++ b/charts/airbyte-temporal/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-webapp/Chart.yaml b/charts/airbyte-webapp/Chart.yaml index a66b8775046..920236b3b2e 100644 --- a/charts/airbyte-webapp/Chart.yaml +++ b/charts/airbyte-webapp/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-worker/Chart.yaml b/charts/airbyte-worker/Chart.yaml index ad7317a89c0..95fc37d1dbe 100644 --- a/charts/airbyte-worker/Chart.yaml +++ b/charts/airbyte-worker/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be diff --git a/charts/airbyte-worker/templates/deployment.yaml b/charts/airbyte-worker/templates/deployment.yaml index 5dad338accc..b54dd1fe0c1 100644 --- a/charts/airbyte-worker/templates/deployment.yaml +++ b/charts/airbyte-worker/templates/deployment.yaml @@ -293,13 +293,22 @@ spec: configMapKeyRef: name: {{ .Release.Name }}-airbyte-env key: WORKERS_MICRONAUT_ENVIRONMENTS + - name: WORKLOAD_LAUNCHER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_LAUNCHER_ENABLED + - name: WORKLOAD_API_SERVER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_SERVER_ENABLED {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - name: AIRBYTE_API_AUTH_HEADER_NAME value: "X-Airbyte-Auth" - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal worker" {{- end }} - # SECRETS MANAGER - name: SECRET_PERSISTENCE value: {{ include "airbyte.secretPersistence" . }} diff --git a/charts/airbyte-workload-api-server/Chart.yaml b/charts/airbyte-workload-api-server/Chart.yaml index 5c6570b539c..d017822ee8c 100644 --- a/charts/airbyte-workload-api-server/Chart.yaml +++ b/charts/airbyte-workload-api-server/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte-workload-launcher/Chart.yaml b/charts/airbyte-workload-launcher/Chart.yaml index cbbcfc22694..bd0c94e0038 100644 --- a/charts/airbyte-workload-launcher/Chart.yaml +++ b/charts/airbyte-workload-launcher/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/charts/airbyte/Chart.lock b/charts/airbyte/Chart.lock index bd7c771887f..360362d7631 100644 --- a/charts/airbyte/Chart.lock +++ b/charts/airbyte/Chart.lock @@ -4,45 +4,45 @@ dependencies: version: 1.17.1 - name: airbyte-bootloader repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: temporal repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: webapp repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: airbyte-api-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: worker repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: workload-api-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: workload-launcher repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: pod-sweeper repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: metrics repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: cron repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: connector-builder-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: keycloak repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - name: keycloak-setup repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 -digest: sha256:9af38136c3c9053998d533ae49b0fa949bc1247e31c5d7a27bdff76ac7e2689a -generated: "2024-06-18T00:33:16.326558067Z" + version: 0.282.0 +digest: sha256:548ae0c89d19d502fad4219578b294c549995555a2cebf123c26de5075cc1d9d +generated: "2024-07-10T21:46:01.08033481Z" diff --git a/charts/airbyte/Chart.yaml b/charts/airbyte/Chart.yaml index 3851327143f..07372e6fc2c 100644 --- a/charts/airbyte/Chart.yaml +++ b/charts/airbyte/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.199.0 +version: 0.282.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to @@ -32,56 +32,56 @@ dependencies: - condition: airbyte-bootloader.enabled name: airbyte-bootloader repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: temporal.enabled name: temporal repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: webapp.enabled name: webapp repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: server.enabled name: server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: airbyte-api-server.enabled name: airbyte-api-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: worker.enabled name: worker repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: workload-api-server.enabled name: workload-api-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: workload-launcher.enabled name: workload-launcher repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: pod-sweeper.enabled name: pod-sweeper repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: metrics.enabled name: metrics repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: cron.enabled name: cron repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: connector-builder-server.enabled name: connector-builder-server repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: keycloak.enabled name: keycloak repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 - condition: keycloak-setup.enabled name: keycloak-setup repository: https://airbytehq.github.io/helm-charts/ - version: 0.199.0 + version: 0.282.0 diff --git a/charts/airbyte/templates/_database.tpl b/charts/airbyte/templates/_database.tpl index 9d3d3263666..fd0e5ef8736 100644 --- a/charts/airbyte/templates/_database.tpl +++ b/charts/airbyte/templates/_database.tpl @@ -96,6 +96,7 @@ Renders the name of the secret where the database user will be referenced */}} {{- define "airbyte.database.userSecretKey" }} {{- if .Values.global.database.userSecretKey }} + {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} {{- .Values.global.database.userSecretKey }} {{- else }} {{- printf "%s" "DATABASE_USER" }} @@ -139,6 +140,7 @@ Renders the name of the secret where the database password will be referenced */}} {{- define "airbyte.database.passwordSecretKey" }} {{- if .Values.global.database.passwordSecretKey }} + {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} {{- .Values.global.database.passwordSecretKey }} {{- else }} {{- printf "%s" "DATABASE_PASSWORD" }} @@ -205,10 +207,14 @@ Renders all of the common environment variables which provide database credentia Renders a set of database secrets to be included in the shared Airbyte secret */}} {{- define "airbyte.database.secrets" }} -{{- if and (not .Values.global.database.secretName) (not .Values.externalDatabase.existingSecret) }} -DATABASE_USER: {{ include "airbyte.database.user" . }} -DATABASE_PASSWORD: {{ include "airbyte.database.password" . }} +{{ $user := (include "airbyte.database.user" .)}} +{{- if not (empty $user) }} +DATABASE_USER: {{ $user }} {{- end }} +{{ $password := (include "airbyte.database.password" .)}} +{{- if not (empty $password) }} +DATABASE_PASSWORD: {{ $password }} +{{- end}} {{- end }} {{/* @@ -219,4 +225,10 @@ DATABASE_HOST: {{ include "airbyte.database.host" . }} DATABASE_PORT: {{ include "airbyte.database.port" . | quote }} DATABASE_DB: {{ include "airbyte.database.name" . }} DATABASE_URL: {{ include "airbyte.database.url" . }} +{{- if .Values.global.database.user }} +DATABASE_USER: {{ include "airbyte.database.user" . }} +{{- end}} +{{- if .Values.global.database.password }} +DATABASE_PASSWORD: {{ include "airbyte.database.password" . }} +{{- end}} {{- end }} diff --git a/charts/airbyte/templates/env-configmap.yaml b/charts/airbyte/templates/env-configmap.yaml index cc9bcdddb3d..071be595bde 100644 --- a/charts/airbyte/templates/env-configmap.yaml +++ b/charts/airbyte/templates/env-configmap.yaml @@ -159,6 +159,8 @@ data: MAX_NOTIFY_WORKERS: {{ .Values.worker.maxNotifyWorkers | default "5" | quote }} KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS: "" WORKLOAD_LAUNCHER_PARALLELISM: "10" + WORKLOAD_LAUNCHER_ENABLED: {{ (index .Values "workload-launcher" "enabled") | default "false" | quote }} + WORKLOAD_API_SERVER_ENABLED: {{ (index .Values "workload-api-server" "enabled") | default "false" | quote }} CONNECTOR_BUILDER_SERVER_API_HOST: http://{{ .Release.Name }}-airbyte-connector-builder-server-svc:{{ index .Values "connector-builder-server" "service" "port" }} PUB_SUB_ENABLED: "false" PUB_SUB_TOPIC_NAME: "" diff --git a/charts/airbyte/templates/minio.yaml b/charts/airbyte/templates/minio.yaml index 408ccb2e0f7..1c554c2e397 100644 --- a/charts/airbyte/templates/minio.yaml +++ b/charts/airbyte/templates/minio.yaml @@ -54,10 +54,10 @@ spec: - containerPort: 9000 resources: requests: - memory: "256Mi" + memory: "1024Mi" cpu: "200m" limits: - memory: "256Mi" + memory: "1024Mi" cpu: "200m" # Mount the volume into the pod securityContext: diff --git a/charts/helm-tests/tests/basic_template_test.go b/charts/helm-tests/tests/basic_template_test.go index d19acb73224..ff396d613bd 100644 --- a/charts/helm-tests/tests/basic_template_test.go +++ b/charts/helm-tests/tests/basic_template_test.go @@ -77,6 +77,8 @@ var commonConfigMapKeys = toStringSet( "WORKER_ENVIRONMENT", "WORKFLOW_FAILURE_RESTART_DELAY_SECONDS", "WORKLOAD_API_HOST", + "WORKLOAD_API_SERVER_ENABLED", + "WORKLOAD_LAUNCHER_ENABLED", "WORKLOAD_LAUNCHER_PARALLELISM", "WORKSPACE_DOCKER_MOUNT", "WORKSPACE_ROOT", diff --git a/charts/helm-tests/tests/database_config_test.go b/charts/helm-tests/tests/database_config_test.go index 462991aa4c6..d9b576fe6bd 100644 --- a/charts/helm-tests/tests/database_config_test.go +++ b/charts/helm-tests/tests/database_config_test.go @@ -287,4 +287,48 @@ func TestExternalDatabaseConfiguration(t *testing.T) { }) } }) + + t.Run("should set the DATABASE_USER in the generated secret when plaintext value is provided", func(t *testing.T) { + helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts.SetValues["postgresql.enabled"] = "false" + helmOpts.SetValues["global.database.secretName"] = "database-secret" + helmOpts.SetValues["global.database.host"] = "localhost" + helmOpts.SetValues["global.database.port"] = "5432" + helmOpts.SetValues["global.database.database"] = "airbyte" + helmOpts.SetValues["global.database.user"] = "octavia" + helmOpts.SetValues["global.database.passwordSecretKey"] = "DATABASE_PASSWORD" + + chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) + require.NoError(t, err) + + configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") + require.NotNil(t, configMap) + require.NoError(t, err) + + assert.Equal(t, "octavia", configMap.Data["DATABASE_USER"]) + _, ok := configMap.Data["DATABASE_PASSWORD"] + assert.False(t, ok) + }) + + t.Run("should set the DATABASE_PASSWORD in the config map when plaintext value is provided", func(t *testing.T) { + helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts.SetValues["postgresql.enabled"] = "false" + helmOpts.SetValues["global.database.secretName"] = "database-secret" + helmOpts.SetValues["global.database.host"] = "localhost" + helmOpts.SetValues["global.database.port"] = "5432" + helmOpts.SetValues["global.database.database"] = "airbyte" + helmOpts.SetValues["global.database.userSecretKey"] = "DATABASE_USER" + helmOpts.SetValues["global.database.password"] = "squidward" + + chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) + require.NoError(t, err) + + configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") + require.NotNil(t, configMap) + require.NoError(t, err) + + assert.Equal(t, "squidward", configMap.Data["DATABASE_PASSWORD"]) + _, ok := configMap.Data["DATABASE_USER"] + assert.False(t, ok) + }) } diff --git a/deps.toml b/deps.toml index 5507bdfe243..ceedb6d8054 100644 --- a/deps.toml +++ b/deps.toml @@ -21,16 +21,16 @@ kotlin-logging = "5.1.0" kubernetes-client = "6.12.1" log4j = "2.23.1" lombok = "1.18.30" -micronaut = "4.4.3" +micronaut = "4.5.0" micronaut-cache = "4.3.0" -micronaut-data = "4.7.1" +micronaut-data = "4.8.1" micronaut-email = "2.5.0" -micronaut-jaxrs = "4.4.0" +micronaut-jaxrs = "4.5.0" micronaut-jdbc = "5.6.0" micronaut-kotlin = "4.3.0" -micronaut-micrometer = "5.5.0" -micronaut-openapi = "6.8.0" -micronaut-security = "4.7.0" +micronaut-micrometer = "5.7.0" +micronaut-openapi = "6.11.0" +micronaut-security = "4.9.0" micronaut-test = "4.3.0" moshi = "1.15.0" mockito = "5.8.0" @@ -85,7 +85,7 @@ flyway-core = { module = "org.flywaydb:flyway-core", version.ref = "flyway" } flyway-postgresql = { module = "org.flywaydb:flyway-database-postgresql", version.ref = "flyway" } glassfish = { module = "org.glassfish.jersey:jackson-bom", version.ref = "glassfish_version" } google-auth-library-oauth2-http = { module = "com.google.auth:google-auth-library-oauth2-http", version = "1.20.0" } -google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version = "2.17.2" } +google-cloud-storage = { module = "com.google.cloud:google-cloud-storage", version = "2.40.0" } google-cloud-storage-secretmanager = { module = "com.google.cloud:google-cloud-secretmanager", version = "2.0.5" } google-cloud-pubsub = { module = "com.google.cloud:google-cloud-pubsub", version = "1.130.0" } google-cloud-sqladmin = { module = "com.google.apis:google-api-services-sqladmin", version = "v1-rev20240317-2.0.0" } @@ -121,6 +121,7 @@ jooq-codegen = { module = "org.jooq:jooq-codegen", version.ref = "jooq" } jooq-meta = { module = "org.jooq:jooq-meta", version.ref = "jooq" } json-assert = { module = "org.skyscreamer:jsonassert", version = "1.5.1" } json-path = { module = "com.jayway.jsonpath:json-path", version = "2.9.0" } +json-schema-validator = { module = "com.networknt:json-schema-validator", version = "1.4.0" } json-simple = { module = "com.googlecode.json-simple:json-simple", version = "1.1.1" } jsoup = { module = "org.jsoup:jsoup", version = "1.17.2" } jul-to-slf4j = { module = "org.slf4j:jul-to-slf4j", version.ref = "slf4j" } @@ -203,7 +204,7 @@ micronaut-data-model = { module = "io.micronaut.data:micronaut-data-model", vers micronaut-data-tx = { module = "io.micronaut.data:micronaut-data-tx", version.ref = "micronaut-data" } micronaut-email = { module = "io.micronaut.email:micronaut-email", version.ref = "micronaut-email" } micronaut-email-sendgrid = { module = "io.micronaut.email:micronaut-email-sendgrid", version.ref = "micronaut-email" } -micronaut-flyway = { module = "io.micronaut.flyway:micronaut-flyway", version = "7.2.0" } +micronaut-flyway = { module = "io.micronaut.flyway:micronaut-flyway", version = "7.3.0" } micronaut-inject = { module = "io.micronaut:micronaut-inject", version.ref = "micronaut" } micronaut-http = { module = "io.micronaut:micronaut-http", version.ref = "micronaut" } micronaut-http-client = { module = "io.micronaut:micronaut-http-client", version.ref = "micronaut" } @@ -227,14 +228,14 @@ micronaut-micrometer-registry-statsd = { module = "io.micronaut.micrometer:micro micronaut-openapi = { module = "io.micronaut.openapi:micronaut-openapi", version.ref = "micronaut-openapi" } micronaut-openapi-annotations = { module = "io.micronaut.openapi:micronaut-openapi-annotations", version.ref = "micronaut-openapi" } micronaut-platform = { module = "io.micronaut.platform:micronaut-platform", version.ref = "micronaut" } -micronaut-problem-json = { module = "io.micronaut.problem:micronaut-problem-json", version = "3.3.0" } +micronaut-problem-json = { module = "io.micronaut.problem:micronaut-problem-json", version = "3.4.0" } micronaut-redis-lettuce = { module = "io.micronaut.redis:micronaut-redis-lettuce", version = "6.4.0" } micronaut-runtime = { module = "io.micronaut:micronaut-runtime", version.ref = "micronaut" } micronaut-security = { module = "io.micronaut.security:micronaut-security", version.ref = "micronaut-security" } micronaut-security-jwt = { module = "io.micronaut.security:micronaut-security-jwt", version.ref = "micronaut-security" } micronaut-test-core = { module = "io.micronaut.test:micronaut-test-core", version.ref = "micronaut-test" } micronaut-test-junit5 = { module = "io.micronaut.test:micronaut-test-junit5", version.ref = "micronaut-test" } -micronaut-validation = { module = "io.micronaut.validation:micronaut-validation", version = "4.5.0" } +micronaut-validation = { module = "io.micronaut.validation:micronaut-validation", version = "4.6.0" } [bundles] apache = ["apache-commons", "apache-commons-lang"] diff --git a/docker-compose.yaml b/docker-compose.yaml index 2a0aaf29d24..2bf443d6a73 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -128,10 +128,6 @@ services: - MICROMETER_METRICS_ENABLED=${MICROMETER_METRICS_ENABLED} - MICROMETER_METRICS_STATSD_FLAVOR=${MICROMETER_METRICS_STATSD_FLAVOR} - MICRONAUT_ENVIRONMENTS=${WORKERS_MICRONAUT_ENVIRONMENTS} - - NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT=${NORMALIZATION_JOB_MAIN_CONTAINER_CPU_LIMIT} - - NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST=${NORMALIZATION_JOB_MAIN_CONTAINER_CPU_REQUEST} - - NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT=${NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_LIMIT} - - NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST=${NORMALIZATION_JOB_MAIN_CONTAINER_MEMORY_REQUEST} - OTEL_COLLECTOR_ENDPOINT=${OTEL_COLLECTOR_ENDPOINT} - PUBLISH_METRICS=${PUBLISH_METRICS} - SECRET_PERSISTENCE=${SECRET_PERSISTENCE} diff --git a/flags.yml b/flags.yml index d711b4a82b8..9208d02b837 100644 --- a/flags.yml +++ b/flags.yml @@ -14,7 +14,7 @@ flags: - name: connection.columnSelection serve: true - name: refreshSchema.period.hours - serve: 24 + serve: 0 - name: concurrent.source.stream.read serve: false - name: platform.add-scheduling-jitter diff --git a/settings.gradle.kts b/settings.gradle.kts index 7088c2849a8..a2923064b31 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -15,7 +15,7 @@ pluginManagement { // as much information as possible. plugins { id("com.gradle.enterprise") version "3.15.1" - id("com.github.burrunan.s3-build-cache") version "1.5" + id("com.github.burrunan.s3-build-cache") version "1.8.1" } gradleEnterprise {