-
Notifications
You must be signed in to change notification settings - Fork 66
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #254 from Multiwoven/cherry-pick-ce-commit-d0c9f23…
…85c5989803f0db2c93ca8c8ac578aeaf8 feat(CE): Add databricks lakehouse destination
- Loading branch information
Showing
8 changed files
with
451 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
147 changes: 147 additions & 0 deletions
147
integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,147 @@ | ||
# frozen_string_literal: true | ||
|
||
module Multiwoven | ||
module Integrations | ||
module Destination | ||
module DatabricksLakehouse | ||
include Multiwoven::Integrations::Core | ||
class Client < DestinationConnector | ||
MAX_CHUNK_SIZE = 10 | ||
def check_connection(connection_config) | ||
connection_config = connection_config.with_indifferent_access | ||
db = create_connection(connection_config) | ||
response = db.get("/api/2.0/clusters/list") | ||
if response.status == 200 | ||
success_status | ||
else | ||
failure_status(nil) | ||
end | ||
rescue StandardError => e | ||
handle_exception(e, { | ||
context: "DATABRICKS:LAKEHOUSE:CHECK_CONNECTION:EXCEPTION", | ||
type: "error" | ||
}) | ||
failure_status(e) | ||
end | ||
|
||
def discover(connection_config) | ||
connection_config = connection_config.with_indifferent_access | ||
table_query = "SHOW TABLES IN #{connection_config[:catalog]}.#{connection_config[:schema]};" | ||
db = create_connection(connection_config) | ||
records = [] | ||
table_response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], table_query).to_json) | ||
table_response_body = JSON.parse(table_response.body) | ||
table_response_body["result"]["data_array"].each do |table| | ||
table_name = table[1] | ||
query = "DESCRIBE TABLE #{connection_config[:catalog]}.#{connection_config[:schema]}.#{table_name};" | ||
column_response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query).to_json) | ||
column_response_body = JSON.parse(column_response.body) | ||
records << [table_name, column_response_body["result"]["data_array"]] | ||
end | ||
catalog = Catalog.new(streams: create_streams(records)) | ||
catalog.to_multiwoven_message | ||
rescue StandardError => e | ||
handle_exception( | ||
"DATABRICKS:LAKEHOUSE:DISCOVER:EXCEPTION", | ||
"error", | ||
e | ||
) | ||
end | ||
|
||
def write(sync_config, records, action = "destination_insert") | ||
connection_config = sync_config.destination.connection_specification.with_indifferent_access | ||
table_name = "#{connection_config[:catalog]}.#{connection_config[:schema]}.#{sync_config.stream.name}" | ||
primary_key = sync_config.model.primary_key | ||
db = create_connection(connection_config) | ||
write_success = 0 | ||
write_failure = 0 | ||
log_message_array = [] | ||
|
||
records.each do |record| | ||
query = Multiwoven::Integrations::Core::QueryBuilder.perform(action, table_name, record, primary_key) | ||
logger.debug("DATABRICKS:LAKEHOUSE:WRITE:QUERY query = #{query} sync_id = #{sync_config.sync_id} sync_run_id = #{sync_config.sync_run_id}") | ||
begin | ||
arg = ["/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query)] | ||
response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query).to_json) | ||
if response.status == 200 | ||
write_success += 1 | ||
else | ||
write_failure += 1 | ||
end | ||
log_message_array << log_request_response("info", arg, response) | ||
rescue StandardError => e | ||
handle_exception(e, { | ||
context: "DATABRICKS:LAKEHOUSE:RECORD:WRITE:EXCEPTION", | ||
type: "error", | ||
sync_id: sync_config.sync_id, | ||
sync_run_id: sync_config.sync_run_id | ||
}) | ||
write_failure += 1 | ||
end | ||
end | ||
tracking_message(write_success, write_failure) | ||
rescue StandardError => e | ||
handle_exception(e, { | ||
context: "DATABRICKS:LAKEHOUSE:RECORD:WRITE:EXCEPTION", | ||
type: "error", | ||
sync_id: sync_config.sync_id, | ||
sync_run_id: sync_config.sync_run_id | ||
}) | ||
end | ||
|
||
private | ||
|
||
def create_connection(connection_config) | ||
Faraday.new(url: connection_config[:host]) do |conn| | ||
conn.headers["Authorization"] = "Bearer #{connection_config[:api_token]}" | ||
conn.headers["Content-Type"] = "application/json" | ||
conn.adapter Faraday.default_adapter | ||
end | ||
end | ||
|
||
def generate_body(warehouse_id, query) | ||
{ | ||
warehouse_id: warehouse_id, | ||
statement: query, | ||
wait_timeout: "15s" | ||
} | ||
end | ||
|
||
def create_streams(records) | ||
message = [] | ||
group_by_table(records).each_value do |r| | ||
message << Multiwoven::Integrations::Protocol::Stream.new(name: r[:tablename], action: StreamAction["fetch"], json_schema: convert_to_json_schema(r[:columns])) | ||
end | ||
message | ||
end | ||
|
||
def group_by_table(records) | ||
result = {} | ||
records.each_with_index do |entries, index| | ||
table_name = records[index][0] | ||
column = [] | ||
entry_data = entries[1] | ||
entry_data.each do |entry| | ||
column << { | ||
column_name: entry[0], | ||
data_type: entry[1], | ||
is_nullable: true | ||
} | ||
end | ||
result[index] ||= {} | ||
result[index][:tablename] = table_name | ||
result[index][:columns] = column | ||
end | ||
result | ||
end | ||
|
||
def tracking_message(success, failure) | ||
Multiwoven::Integrations::Protocol::TrackingMessage.new( | ||
success: success, failed: failure | ||
).to_multiwoven_message | ||
end | ||
end | ||
end | ||
end | ||
end | ||
end |
15 changes: 15 additions & 0 deletions
15
integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
{ | ||
"data": { | ||
"name": "DatabricksLakehouse", | ||
"title": "Databricks Lakehouse", | ||
"connector_type": "destination", | ||
"category": "Marketing Automation", | ||
"documentation_url": "https://docs.multiwoven.com/destinations/databricks_lakehouse", | ||
"github_issue_label": "destination-databricks-lakehouse", | ||
"icon": "icon.svg", | ||
"license": "MIT", | ||
"release_stage": "alpha", | ||
"support_level": "community", | ||
"tags": ["language:ruby", "multiwoven"] | ||
} | ||
} |
44 changes: 44 additions & 0 deletions
44
integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
{ | ||
"documentation_url": "https://docs.multiwoven.com/integrations/destination/databrick_lakehouse", | ||
"stream_type": "static", | ||
"connection_specification": { | ||
"$schema": "http://json-schema.org/draft-07/schema#", | ||
"title": "Databricks Lakehouse", | ||
"type": "object", | ||
"required": ["host", "api_token", "warehouse_id", "catalog", "schema"], | ||
"properties": { | ||
"host": { | ||
"description": "The databrick lakehouse host domain.", | ||
"type": "string", | ||
"title": "Host", | ||
"order": 0 | ||
}, | ||
"api_token": { | ||
"description": "The databrick lakehouse api token.", | ||
"type": "string", | ||
"multiwoven_secret": true, | ||
"title": "API Token", | ||
"order": 1 | ||
},"warehouse_id": { | ||
"description": "The databrick lakehouse warehouse ID.", | ||
"type": "string", | ||
"title": "Warehouse ID", | ||
"order": 2 | ||
}, | ||
"catalog": { | ||
"description": "The name of the catalog", | ||
"default": "hive_metastore", | ||
"type": "string", | ||
"title": "Databricks catalog", | ||
"order": 3 | ||
}, | ||
"schema": { | ||
"description": "The default schema tables are written.", | ||
"default": "default", | ||
"type": "string", | ||
"title": "Database schema", | ||
"order": 4 | ||
} | ||
} | ||
} | ||
} |
65 changes: 65 additions & 0 deletions
65
integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/icon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.