Skip to content
This repository was archived by the owner on Jun 9, 2025. It is now read-only.

Commit bf8d6ff

Browse files
author
Henrique Schumann Costa
committed
fix log4j issue
1 parent 3c43183 commit bf8d6ff

File tree

4 files changed

+43
-49
lines changed

4 files changed

+43
-49
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Metabase Driver: Databricks SQL Warehouse
22

3+
This project is based on:
4+
1. (Community Databricks Driver)[https://github.com/relferreira/metabase-sparksql-databricks-driver]
5+
2. (Databricks SparkSQL Driver) [https://github.com/metabase/metabase/blob/master/modules/drivers/sparksql]
6+
37
## Installation
48

59
Beginning with Metabase 0.32, drivers must be stored in a `plugins` directory in the same directory where `metabase.jar` is, or you can specify the directory by setting the environment variable `MB_PLUGINS_DIR`. There are a few options to get up and running with a custom driver.

bin/cli.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/bin/bash
2-
cp /driver/bin/deps.edn ~/.clojure/deps.edn
3-
cd /metabase
2+
#cp ./bin/deps.edn ~/.clojure/deps.edn
3+
cd ../metabase
44

55
function nrepl() {
66
clojure -M:user/databricks-sql:nrepl

deps.edn

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,7 @@
22
["src" "resources"]
33

44
:deps
5-
{org.clojure.clr/core.logic {:mvn/version "1.0.1"}
6-
com.databricks/databricks-jdbc {:mvn/version "2.6.34"}}
5+
{com.databricks/databricks-jdbc {:mvn/version "2.6.34" :exclusion [log4j/log4j]}}
76

87
;; the stuff below is only for hacking on the driver locally and is not needed if you follow the instructions in the
98
;; README and create a `:local/root` dep for the driver and launch the REPL from the Metabase project rather than

src/metabase/driver/databricks_sql.clj

Lines changed: 36 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
[metabase.driver.sql.util :as sql.u]
1111
[metabase.driver.sql.util.unprepare :as unprepare]
1212
[metabase.mbql.util :as mbql.u]
13-
[clojure.string :as str]
1413
[metabase.query-processor.util :as qp.util])
1514
(:import [java.sql Connection ResultSet]))
1615

@@ -64,36 +63,20 @@
6463
#".*" :type/*))
6564

6665

67-
;; 1. databricks-sql doesn't support `.supportsTransactionIsolationLevel`
68-
;; 2. databricks-sql doesn't support session timezones (at least our driver doesn't support it)
69-
;; 3. databricks-sql doesn't support making connections read-only
70-
;; 4. databricks-sql doesn't support setting the default result set holdability
71-
(defmethod sql-jdbc.execute/do-with-connection-with-options :databricks-sql
72-
[driver db-or-id-or-spec options f]
73-
(sql-jdbc.execute/do-with-resolved-connection
74-
driver
75-
db-or-id-or-spec
76-
options
77-
(fn [^Connection conn]
78-
(when-not (sql-jdbc.execute/recursive-connection?)
79-
(.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED))
80-
(f conn))))
66+
(defmethod sql.qp/honey-sql-version :databricks-sql
67+
[_driver]
68+
2)
8169

8270
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
8371
(defmethod driver/describe-database :databricks-sql
84-
[driver database]
72+
[_ database]
8573
{:tables
86-
(sql-jdbc.execute/do-with-connection-with-options
87-
driver
88-
database
89-
nil
90-
(fn [^Connection conn]
91-
(set
92-
(for [{:keys [database tablename tab_name], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])]
93-
{:name (or tablename tab_name) ; column name differs depending on server (databricks-sql, hive, Impala)
94-
:schema (or (not-empty database)
95-
(not-empty table-namespace))}))))})
96-
74+
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
75+
(set
76+
(for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])]
77+
{:name tablename
78+
:schema (or (not-empty database)
79+
(not-empty table-namespace))})))})
9780

9881
;; Hive describe table result has commented rows to distinguish partitions
9982
(defn- valid-describe-table-row? [{:keys [col_name data_type]}]
@@ -105,30 +88,25 @@
10588
(when s
10689
(str/replace s #"-" "_")))
10790

108-
10991
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
11092
(defmethod driver/describe-table :databricks-sql
11193
[driver database {table-name :name, schema :schema}]
11294
{:name table-name
11395
:schema schema
11496
:fields
115-
(sql-jdbc.execute/do-with-connection-with-options
116-
driver
117-
database
118-
nil
119-
(fn [^Connection conn]
120-
(let [results (jdbc/query {:connection conn} [(format
121-
"describe %s"
122-
(sql.u/quote-name driver :table
123-
(dash-to-underscore schema)
124-
(dash-to-underscore table-name)))])]
125-
(set
126-
(for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results)
127-
:when (valid-describe-table-row? result)]
128-
{:name col-name
129-
:database-type data-type
130-
:base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type))
131-
:database-position idx})))))})
97+
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
98+
(let [results (jdbc/query {:connection conn} [(format
99+
"describe %s"
100+
(sql.u/quote-name driver :table
101+
(dash-to-underscore schema)
102+
(dash-to-underscore table-name)))])]
103+
(set
104+
(for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results)
105+
:while (valid-describe-table-row? result)]
106+
{:name col-name
107+
:database-type data-type
108+
:base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type))
109+
:database-position idx}))))})
132110

133111
(def ^:dynamic *param-splice-style*
134112
"How we should splice params into SQL (i.e. 'unprepare' the SQL). Either `:friendly` (the default) or `:paranoid`.
@@ -151,6 +129,19 @@
151129
query (assoc outer-query :native inner-query)]
152130
((get-method driver/execute-reducible-query :sql-jdbc) driver query context respond)))
153131

132+
;; 1. SparkSQL doesn't support `.supportsTransactionIsolationLevel`
133+
;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it)
134+
;; 3. SparkSQL doesn't support making connections read-only
135+
;; 4. SparkSQL doesn't support setting the default result set holdability
136+
(defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql
137+
[driver database _timezone-id]
138+
(let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))]
139+
(try
140+
(.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED)
141+
conn
142+
(catch Throwable e
143+
(.close conn)
144+
(throw e)))))
154145

155146
;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT`
156147
(defmethod sql-jdbc.execute/prepared-statement :databricks-sql

0 commit comments

Comments
 (0)