From df35db868c3f27b3078d3cd6c6d5383d2446b625 Mon Sep 17 00:00:00 2001 From: TivonB-AI2 <124182151+TivonB-AI2@users.noreply.github.com> Date: Tue, 16 Jul 2024 14:25:58 -0400 Subject: [PATCH 1/4] fix(CE): refactor discover stream (#290) --- .../multiwoven/integrations/destination/maria_db/client.rb | 4 +++- .../integrations/destination/maria_db/client_spec.rb | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/integrations/lib/multiwoven/integrations/destination/maria_db/client.rb b/integrations/lib/multiwoven/integrations/destination/maria_db/client.rb index 75931b8d..ccb74107 100644 --- a/integrations/lib/multiwoven/integrations/destination/maria_db/client.rb +++ b/integrations/lib/multiwoven/integrations/destination/maria_db/client.rb @@ -102,7 +102,9 @@ def group_by_table(records) result[index][:tablename] = table_name result[index][:columns] = [column_data] end - result + result.values.group_by { |entry| entry[:tablename] }.transform_values do |entries| + { tablename: entries.first[:tablename], columns: entries.flat_map { |entry| entry[:columns] } } + end end def tracking_message(success, failure) diff --git a/integrations/spec/multiwoven/integrations/destination/maria_db/client_spec.rb b/integrations/spec/multiwoven/integrations/destination/maria_db/client_spec.rb index 270ce34d..fa8e4b0e 100644 --- a/integrations/spec/multiwoven/integrations/destination/maria_db/client_spec.rb +++ b/integrations/spec/multiwoven/integrations/destination/maria_db/client_spec.rb @@ -90,7 +90,7 @@ expect(first_stream.name).to eq("test_table") expect(first_stream.json_schema).to be_an(Hash) expect(first_stream.json_schema["type"]).to eq("object") - expect(first_stream.json_schema["properties"]).to eq({ "col1" => { "type" => "string" } }) + expect(first_stream.json_schema["properties"]).to eq({ "col1" => { "type" => "string" }, "col2" => { "type" => "string" }, "col3" => { "type" => "string" } }) end end From 02c1d62bd491520aa95e0384961ce506bc445138 Mon Sep 17 00:00:00 2001 From: TivonB-AI2 <124182151+TivonB-AI2@users.noreply.github.com> Date: Tue, 16 Jul 2024 06:28:36 -0400 Subject: [PATCH 2/4] feat(CE): Add databricks lakehouse destination --- integrations/Gemfile.lock | 2 +- integrations/lib/multiwoven/integrations.rb | 1 + .../databricks_lakehouse/client.rb | 147 +++++++++++++++ .../databricks_lakehouse/config/meta.json | 15 ++ .../databricks_lakehouse/config/spec.json | 44 +++++ .../destination/databricks_lakehouse/icon.svg | 65 +++++++ .../lib/multiwoven/integrations/rollout.rb | 3 +- .../databricks_lakehouse/client_spec.rb | 176 ++++++++++++++++++ 8 files changed, 451 insertions(+), 2 deletions(-) create mode 100644 integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb create mode 100644 integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json create mode 100644 integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json create mode 100644 integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/icon.svg create mode 100644 integrations/spec/multiwoven/integrations/destination/databricks_lakehouse/client_spec.rb diff --git a/integrations/Gemfile.lock b/integrations/Gemfile.lock index 35f8e9bd..98331163 100644 --- a/integrations/Gemfile.lock +++ b/integrations/Gemfile.lock @@ -7,7 +7,7 @@ GIT PATH remote: . specs: - multiwoven-integrations (0.4.1) + multiwoven-integrations (0.5.0) activesupport async-websocket aws-sdk-athena diff --git a/integrations/lib/multiwoven/integrations.rb b/integrations/lib/multiwoven/integrations.rb index 35bb8848..d305120f 100644 --- a/integrations/lib/multiwoven/integrations.rb +++ b/integrations/lib/multiwoven/integrations.rb @@ -77,6 +77,7 @@ require_relative "integrations/destination/http/client" require_relative "integrations/destination/iterable/client" require_relative "integrations/destination/maria_db/client" +require_relative "integrations/destination/databricks_lakehouse/client" module Multiwoven module Integrations diff --git a/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb new file mode 100644 index 00000000..341d02d7 --- /dev/null +++ b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/client.rb @@ -0,0 +1,147 @@ +# frozen_string_literal: true + +module Multiwoven + module Integrations + module Destination + module DatabricksLakehouse + include Multiwoven::Integrations::Core + class Client < DestinationConnector + MAX_CHUNK_SIZE = 10 + def check_connection(connection_config) + connection_config = connection_config.with_indifferent_access + db = create_connection(connection_config) + response = db.get("/api/2.0/clusters/list") + if response.status == 200 + success_status + else + failure_status(nil) + end + rescue StandardError => e + handle_exception(e, { + context: "DATABRICKS:LAKEHOUSE:CHECK_CONNECTION:EXCEPTION", + type: "error" + }) + failure_status(e) + end + + def discover(connection_config) + connection_config = connection_config.with_indifferent_access + table_query = "SHOW TABLES IN #{connection_config[:catalog]}.#{connection_config[:schema]};" + db = create_connection(connection_config) + records = [] + table_response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], table_query).to_json) + table_response_body = JSON.parse(table_response.body) + table_response_body["result"]["data_array"].each do |table| + table_name = table[1] + query = "DESCRIBE TABLE #{connection_config[:catalog]}.#{connection_config[:schema]}.#{table_name};" + column_response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query).to_json) + column_response_body = JSON.parse(column_response.body) + records << [table_name, column_response_body["result"]["data_array"]] + end + catalog = Catalog.new(streams: create_streams(records)) + catalog.to_multiwoven_message + rescue StandardError => e + handle_exception( + "DATABRICKS:LAKEHOUSE:DISCOVER:EXCEPTION", + "error", + e + ) + end + + def write(sync_config, records, action = "destination_insert") + connection_config = sync_config.destination.connection_specification.with_indifferent_access + table_name = "#{connection_config[:catalog]}.#{connection_config[:schema]}.#{sync_config.stream.name}" + primary_key = sync_config.model.primary_key + db = create_connection(connection_config) + write_success = 0 + write_failure = 0 + log_message_array = [] + + records.each do |record| + query = Multiwoven::Integrations::Core::QueryBuilder.perform(action, table_name, record, primary_key) + logger.debug("DATABRICKS:LAKEHOUSE:WRITE:QUERY query = #{query} sync_id = #{sync_config.sync_id} sync_run_id = #{sync_config.sync_run_id}") + begin + arg = ["/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query)] + response = db.post("/api/2.0/sql/statements", generate_body(connection_config[:warehouse_id], query).to_json) + if response.status == 200 + write_success += 1 + else + write_failure += 1 + end + log_message_array << log_request_response("info", arg, response) + rescue StandardError => e + handle_exception(e, { + context: "DATABRICKS:LAKEHOUSE:RECORD:WRITE:EXCEPTION", + type: "error", + sync_id: sync_config.sync_id, + sync_run_id: sync_config.sync_run_id + }) + write_failure += 1 + end + end + tracking_message(write_success, write_failure) + rescue StandardError => e + handle_exception(e, { + context: "DATABRICKS:LAKEHOUSE:RECORD:WRITE:EXCEPTION", + type: "error", + sync_id: sync_config.sync_id, + sync_run_id: sync_config.sync_run_id + }) + end + + private + + def create_connection(connection_config) + Faraday.new(url: connection_config[:host]) do |conn| + conn.headers["Authorization"] = "Bearer #{connection_config[:api_token]}" + conn.headers["Content-Type"] = "application/json" + conn.adapter Faraday.default_adapter + end + end + + def generate_body(warehouse_id, query) + { + warehouse_id: warehouse_id, + statement: query, + wait_timeout: "15s" + } + end + + def create_streams(records) + message = [] + group_by_table(records).each_value do |r| + message << Multiwoven::Integrations::Protocol::Stream.new(name: r[:tablename], action: StreamAction["fetch"], json_schema: convert_to_json_schema(r[:columns])) + end + message + end + + def group_by_table(records) + result = {} + records.each_with_index do |entries, index| + table_name = records[index][0] + column = [] + entry_data = entries[1] + entry_data.each do |entry| + column << { + column_name: entry[0], + data_type: entry[1], + is_nullable: true + } + end + result[index] ||= {} + result[index][:tablename] = table_name + result[index][:columns] = column + end + result + end + + def tracking_message(success, failure) + Multiwoven::Integrations::Protocol::TrackingMessage.new( + success: success, failed: failure + ).to_multiwoven_message + end + end + end + end + end +end diff --git a/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json new file mode 100644 index 00000000..6849247c --- /dev/null +++ b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/meta.json @@ -0,0 +1,15 @@ +{ + "data": { + "name": "DatabricksLakehouse", + "title": "Databricks Lakehouse", + "connector_type": "destination", + "category": "Marketing Automation", + "documentation_url": "https://docs.multiwoven.com/destinations/databricks_lakehouse", + "github_issue_label": "destination-databricks-lakehouse", + "icon": "icon.svg", + "license": "MIT", + "release_stage": "alpha", + "support_level": "community", + "tags": ["language:ruby", "multiwoven"] + } +} diff --git a/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json new file mode 100644 index 00000000..bbd4f497 --- /dev/null +++ b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/config/spec.json @@ -0,0 +1,44 @@ +{ + "documentation_url": "https://docs.multiwoven.com/integrations/destination/databrick_lakehouse", + "stream_type": "static", + "connection_specification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Databricks Lakehouse", + "type": "object", + "required": ["host", "api_token", "warehouse_id", "catalog", "schema"], + "properties": { + "host": { + "description": "The databrick lakehouse host domain.", + "type": "string", + "title": "Host", + "order": 0 + }, + "api_token": { + "description": "The databrick lakehouse api token.", + "type": "string", + "multiwoven_secret": true, + "title": "API Token", + "order": 1 + },"warehouse_id": { + "description": "The databrick lakehouse warehouse ID.", + "type": "string", + "title": "Warehouse ID", + "order": 2 + }, + "catalog": { + "description": "The name of the catalog", + "default": "hive_metastore", + "type": "string", + "title": "Databricks catalog", + "order": 3 + }, + "schema": { + "description": "The default schema tables are written.", + "default": "default", + "type": "string", + "title": "Database schema", + "order": 4 + } + } + } +} diff --git a/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/icon.svg b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/icon.svg new file mode 100644 index 00000000..4a18a58b --- /dev/null +++ b/integrations/lib/multiwoven/integrations/destination/databricks_lakehouse/icon.svg @@ -0,0 +1,65 @@ + + + + \ No newline at end of file diff --git a/integrations/lib/multiwoven/integrations/rollout.rb b/integrations/lib/multiwoven/integrations/rollout.rb index d7cd5fd9..7dc8c564 100644 --- a/integrations/lib/multiwoven/integrations/rollout.rb +++ b/integrations/lib/multiwoven/integrations/rollout.rb @@ -2,7 +2,7 @@ module Multiwoven module Integrations - VERSION = "0.4.1" + VERSION = "0.5.0" ENABLED_SOURCES = %w[ Snowflake @@ -33,6 +33,7 @@ module Integrations Http Iterable MariaDB + DatabricksLakehouse ].freeze end end diff --git a/integrations/spec/multiwoven/integrations/destination/databricks_lakehouse/client_spec.rb b/integrations/spec/multiwoven/integrations/destination/databricks_lakehouse/client_spec.rb new file mode 100644 index 00000000..9cc10028 --- /dev/null +++ b/integrations/spec/multiwoven/integrations/destination/databricks_lakehouse/client_spec.rb @@ -0,0 +1,176 @@ +# frozen_string_literal: true + +RSpec.describe Multiwoven::Integrations::Destination::DatabricksLakehouse::Client do + include WebMock::API + + before(:each) do + WebMock.disable_net_connect!(allow_localhost: true) + end + + let(:client) { described_class.new } + let(:connection_config) do + { + host: "https://adb-7377493381576663.3.azuredatabricks.net", + api_token: ENV["DATABRICKS_API_TOKEN"], + warehouse_id: ENV["DATABRICKS_WAREHOUSE_ID"], + catalog: "hive_metastore", + schema: "default", + endpoint: "table sync" + } + end + let(:sync_config_json) do + { + source: { + name: "Sample Source Connector", + type: "source", + connection_specification: { + private_api_key: "test_api_key" + } + }, + destination: { + name: "Databricks", + type: "destination", + connection_specification: connection_config + }, + model: { + name: "ExampleModel", + query: "SELECT col1, col2, col3 FROM test_table_1", + query_type: "raw_sql", + primary_key: "col1" + }, + sync_mode: "incremental", + destination_sync_mode: "insert", + stream: { + name: "table", + action: "create", + json_schema: {}, + supported_sync_modes: %w[incremental] + } + } + end + + let(:faraday_connection) { instance_double(Faraday::Connection) } + let(:response) { instance_double(Faraday::Response, status: 200, success?: true) } + let(:faraday_connection2) { instance_double(Faraday::Connection) } + let(:response2) { instance_double(Faraday::Response, status: 200, success?: true) } + let(:table_response_body) { { "result" => { "data_array" => [%w[table_name test_table]] } }.to_json } + let(:column_response_body) { { "result" => { "data_array" => [%w[col1 int YES], %w[col2 varchar YES], %w[col3 float YES]] } }.to_json } + + describe "#check_connection" do + context "when the connection is successful" do + it "returns a succeeded connection status" do + allow(Faraday).to receive(:new).with(url: sync_config_json[:destination][:connection_specification][:host]).and_return(faraday_connection) + allow(faraday_connection).to receive(:get).with("/api/2.0/clusters/list").and_return(response) + allow(response).to receive(:status).and_return(200) + message = client.check_connection(sync_config_json[:destination][:connection_specification]) + result = message.connection_status + expect(result.status).to eq("succeeded") + expect(result.message).to be_nil + end + end + + context "when the connection fails" do + it "returns a failed connection status with an error message" do + allow(client).to receive(:create_connection).and_raise(StandardError, "Connection failed") + message = client.check_connection(sync_config_json[:destination][:connection_specification]) + result = message.connection_status + expect(result.status).to eq("failed") + expect(result.message).to include("Connection failed") + end + end + end + + describe "#discover" do + it "discovers schema successfully" do + body1 = { + warehouse_id: connection_config[:warehouse_id], + statement: "SHOW TABLES IN #{connection_config[:catalog]}.#{connection_config[:schema]};", + wait_timeout: "15s" + } + body2 = { + warehouse_id: connection_config[:warehouse_id], + statement: "DESCRIBE TABLE #{connection_config[:catalog]}.#{connection_config[:schema]}.test_table;", + wait_timeout: "15s" + } + call_count = 0 + allow(Faraday).to receive(:new).with(url: sync_config_json[:destination][:connection_specification][:host]).and_return(faraday_connection, faraday_connection2) + + allow(faraday_connection).to receive(:post) do |*args| + call_count += 1 + if call_count == 1 + expect(args).to eq(["/api/2.0/sql/statements", body1.to_json]) + else + expect(args).to eq(["/api/2.0/sql/statements", body2.to_json]) + end + response + end + + allow(response).to receive(:body) do + call_count == 1 ? table_response_body : column_response_body + end + + message = client.discover(sync_config_json[:destination][:connection_specification]) + expect(message.catalog).to be_an(Multiwoven::Integrations::Protocol::Catalog) + first_stream = message.catalog.streams.first + expect(first_stream).to be_a(Multiwoven::Integrations::Protocol::Stream) + expect(first_stream.name).to eq("test_table") + expect(first_stream.json_schema).to be_an(Hash) + expect(first_stream.json_schema["type"]).to eq("object") + expect(first_stream.json_schema["properties"]).to eq({ "col1" => { "type" => "string" }, "col2" => { "type" => "string" }, "col3" => { "type" => "string" } }) + end + end + + describe "#write" do + context "when the write operation is successful" do + it "increments the success count" do + sync_config = Multiwoven::Integrations::Protocol::SyncConfig.from_json( + sync_config_json.to_json + ) + record = [ + { "col1" => 1, "col2" => "first", "col3" => 1.1 } + ] + body = { + warehouse_id: connection_config[:warehouse_id], + statement: "INSERT INTO hive_metastore.default.table (col1, col2, col3) VALUES ('1', 'first', '1.1');", + wait_timeout: "15s" + } + allow(Faraday).to receive(:new).with(url: sync_config_json[:destination][:connection_specification][:host]).and_return(faraday_connection) + allow(faraday_connection).to receive(:post).with("/api/2.0/sql/statements", body.to_json).and_return(response) + allow(response).to receive(:status).and_return(200) + response = client.write(sync_config, record) + expect(response.tracking.success).to eq(record.size) + expect(response.tracking.failed).to eq(0) + end + end + + context "when the write operation fails" do + it "increments the failure count" do + sync_config = Multiwoven::Integrations::Protocol::SyncConfig.from_json( + sync_config_json.to_json + ) + record = [ + { "col1" => 1, "col2" => "first", "col3" => 1.1 } + ] + body = { + warehouse_id: connection_config[:warehouse_id], + statement: "INSERT INTO hive_metastore.default.table (col1, col2, col3) VALUES ('1', 'first', '1.1');", + wait_timeout: "15s" + } + allow(Faraday).to receive(:new).with(url: sync_config_json[:destination][:connection_specification][:host]).and_return(faraday_connection) + allow(faraday_connection).to receive(:post).with("/api/2.0/sql/statements", body.to_json).and_return(response) + allow(response).to receive(:status).and_return(400) + response = client.write(sync_config, record) + expect(response.tracking.failed).to eq(record.size) + expect(response.tracking.success).to eq(0) + end + end + end + + describe "#meta_data" do + # change this to rollout validation for all connector rolling out + it "client class_name and meta name is same" do + meta_name = client.class.to_s.split("::")[-2] + expect(client.send(:meta_data)[:data][:name]).to eq(meta_name) + end + end +end From e57410b5094b15ea0bc79586227963f24d3a66fa Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 18 Jul 2024 18:05:35 +0530 Subject: [PATCH 3/4] chore(CE): template changes and reset token validation (#255) --- .../app/controllers/api/v1/auth_controller.rb | 5 +- .../mailer/invitation_instructions.html.erb | 192 ++++++++++++++++-- .../devise/mailer/password_change.html.erb | 34 +++- .../reset_password_instructions.html.erb | 5 +- .../api/v1/auth_controller_spec.rb | 17 ++ server/spec/mailers/device_mailer_spec.rb | 6 +- 6 files changed, 226 insertions(+), 33 deletions(-) diff --git a/server/app/controllers/api/v1/auth_controller.rb b/server/app/controllers/api/v1/auth_controller.rb index 39ca0b4e..c305f5d5 100644 --- a/server/app/controllers/api/v1/auth_controller.rb +++ b/server/app/controllers/api/v1/auth_controller.rb @@ -69,8 +69,9 @@ def forgot_password def reset_password user = User.with_reset_password_token(params[:reset_password_token]) - - if user&.reset_password(params[:password], params[:password_confirmation]) + if user && !user.reset_password_period_valid? + render_error(message: "Token has expired.", status: :unprocessable_entity) + elsif user&.reset_password(params[:password], params[:password_confirmation]) render json: { data: { type: "message", id: user.id, attributes: { message: "Password successfully reset." } } }, diff --git a/server/app/views/devise/mailer/invitation_instructions.html.erb b/server/app/views/devise/mailer/invitation_instructions.html.erb index 598b72e2..68660e71 100644 --- a/server/app/views/devise/mailer/invitation_instructions.html.erb +++ b/server/app/views/devise/mailer/invitation_instructions.html.erb @@ -1,17 +1,175 @@ -<% query_params = { invited: true, invited_user:@resource.email, invitation_token: @token, workspace_id: @workspace.id, workspace_name: @workspace.name, invited_by: @resource.invited_by.email } %> - -<% custom_url = "#{ENV["UI_HOST"]}/sign-up?#{query_params.to_query}" %> - -
<%= "Join #{@workspace.name} on AI Squared" %>
- -- <%= "#{@resource.invited_by.email} invited you to join the workspace #{@workspace.name} on AI Squared" %> -
- -- <%= link_to t("devise.mailer.invitation_instructions.accept"), custom_url %> -
- -<% if @resource.invitation_due_at %> -<%= t("devise.mailer.invitation_instructions.accept_until", due_date: l(@resource.invitation_due_at, format: :'devise.mailer.invitation_instructions.accept_until_format')) %>
-<% end %> + + + +
+
|
+
-
Cheers,
Team AI Squared
+ Your password has been changed successfully. If you did not make this change, please reset your password immediately.
Date: Thu, 18 Jul 2024 17:46:39 -0400 Subject: [PATCH 4/4] Pablo/be 244 duckdb error on nationwide (#293) (#247) * fix(CE): install and load httpfs in duckdb before usage Co-authored-by: pabss-ai2 <125898021+pabss-ai2@users.noreply.github.com> --- server/Dockerfile | 19 +++++++++++++++++-- server/Dockerfile.dev | 16 ++++++++++++++-- server/getduckdbcli.sh | 10 ++++++++++ 3 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 server/getduckdbcli.sh diff --git a/server/Dockerfile b/server/Dockerfile index a7c96480..c1c292f7 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -21,15 +21,30 @@ RUN apt-get update -qq && \ apt-get install --no-install-recommends -y build-essential autoconf automake libtool git libpq-dev libvips pkg-config m4 perl libltdl-dev curl git wget unzip default-libmysqlclient-dev COPY getduckdb.sh . -# Make the script executable -RUN chmod +x getduckdb.sh +COPY getduckdbcli.sh . + +# Make the scripts executable +RUN chmod +x getduckdb.sh getduckdbcli.sh + +# Run the scripts RUN ./getduckdb.sh +RUN ./getduckdbcli.sh +# Unzip and move DuckDB files RUN unzip duckdb.zip -d libduckdb RUN mv libduckdb/duckdb.* /usr/local/include RUN mv libduckdb/libduckdb.so /usr/local/lib RUN ldconfig /usr/local/lib +# Download and install the DuckDB CLI +RUN unzip duckdb_cli.zip -d duckdb_cli +RUN mv duckdb_cli/duckdb /usr/local/bin/ +RUN chmod +x /usr/local/bin/duckdb + +RUN ldconfig /usr/local/lib +# Load the httpfs extension into DuckDB +RUN echo "INSTALL httpfs; LOAD httpfs;" | duckdb + # # Navigate to the directory of the component you want to build WORKDIR / diff --git a/server/Dockerfile.dev b/server/Dockerfile.dev index 394bcc03..e16a9127 100644 --- a/server/Dockerfile.dev +++ b/server/Dockerfile.dev @@ -10,15 +10,27 @@ RUN apt-get update -qq && \ apt-get install --no-install-recommends -y build-essential autoconf automake libtool git libpq-dev libvips pkg-config m4 perl libltdl-dev curl git wget unzip default-libmysqlclient-dev COPY getduckdb.sh . +COPY getduckdbcli.sh . + # Make the script executable -RUN chmod +x getduckdb.sh +RUN chmod +x getduckdb.sh getduckdbcli.sh + RUN ./getduckdb.sh +RUN ./getduckdbcli.sh +# Unzip and move DuckDB files RUN unzip duckdb.zip -d libduckdb RUN mv libduckdb/duckdb.* /usr/local/include RUN mv libduckdb/libduckdb.so /usr/local/lib -RUN ldconfig /usr/local/lib +# Download and install the DuckDB CLI +RUN unzip duckdb_cli.zip -d duckdb_cli +RUN mv duckdb_cli/duckdb /usr/local/bin/ +RUN chmod +x /usr/local/bin/duckdb + +RUN ldconfig /usr/local/lib +# Load the httpfs extension into DuckDB +RUN echo "INSTALL httpfs; LOAD httpfs;" | duckdb WORKDIR /rails diff --git a/server/getduckdbcli.sh b/server/getduckdbcli.sh new file mode 100644 index 00000000..f5231efc --- /dev/null +++ b/server/getduckdbcli.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +MACHINE=`uname -m` + +case "$MACHINE" in + "x86_64" ) ARC=amd64 ;; + "aarch64" ) ARC=aarch64 ;; +esac + +wget -O duckdb_cli.zip "https://github.com/duckdb/duckdb/releases/download/v1.0.0/duckdb_cli-linux-$ARC.zip" \ No newline at end of file