Skip to content
This repository was archived by the owner on Jun 9, 2025. It is now read-only.

Commit cb639aa

Browse files
author
Henrique Schumann Costa
committed
upgrade metabase to v0.50.3
1 parent 5b72c42 commit cb639aa

File tree

4 files changed

+47
-37
lines changed

4 files changed

+47
-37
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ COPY . /driver
55
RUN apt-get update && apt-get install -y --no-install-recommends curl && \
66
rm -rf /var/lib/apt/lists/*
77

8-
ARG METABASE_VERSION="v0.49.15"
8+
ARG METABASE_VERSION="v0.50.3"
99

1010
RUN curl -Lo - https://github.com/metabase/metabase/archive/refs/tags/${METABASE_VERSION}.tar.gz | tar -xz && mv metabase-* /metabase
1111

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11

22
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
3-
METABASE_VERSION=v0.49.15
3+
METABASE_VERSION=v0.50.3
44

55
build:
66
@echo "build"

deps.edn

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@
1111
:aliases
1212
{:dev
1313
{:extra-deps
14-
{com.github.metabase/metabase {:git/tag "v0.49.15", :git/sha "77a08d7710d79f82c52c1d1e3cd3d4edc10bd8406d93eec9988d12158334ecc3"}}}}}
14+
{com.github.metabase/metabase {:git/tag "v0.50.3", :git/sha "def3903f275276e339f2ed99bb37fd2f85652b62f8f3be50cb0e708f1eeaffbc"}}}}}

src/metabase/driver/databricks_sql.clj

Lines changed: 44 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
[metabase.driver.sql.query-processor :as sql.qp]
1212
[metabase.driver.sql.util :as sql.u]
1313
[metabase.driver.sql.util.unprepare :as unprepare]
14-
[metabase.mbql.util :as mbql.u]
14+
[metabase.legacy-mbql.util :as mbql.u]
1515
[metabase.query-processor.util :as qp.util])
1616
(:import
1717
(java.sql Connection ResultSet)))
@@ -80,46 +80,54 @@
8080
[_driver]
8181
2)
8282

83+
(defn- dash-to-underscore [s]
84+
(when s
85+
(str/replace s #"-" "_")))
86+
8387
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
8488
(defmethod driver/describe-database :databricks-sql
85-
[_ database]
89+
[driver database]
8690
{:tables
87-
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
88-
(set
89-
(for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])]
90-
{:name tablename
91-
:schema (or (not-empty database)
92-
(not-empty table-namespace))})))})
91+
(sql-jdbc.execute/do-with-connection-with-options
92+
driver
93+
database
94+
nil
95+
(fn [^Connection conn]
96+
(set
97+
(for [{:keys [database tablename tab_name], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])]
98+
{:name (or tablename tab_name) ; column name differs depending on server (SparkSQL, hive, Impala)
99+
:schema (or (not-empty database)
100+
(not-empty table-namespace))}))))})
93101

94102
;; Hive describe table result has commented rows to distinguish partitions
95103
(defn- valid-describe-table-row? [{:keys [col_name data_type]}]
96104
(every? (every-pred (complement str/blank?)
97105
(complement #(str/starts-with? % "#")))
98106
[col_name data_type]))
99107

100-
(defn- dash-to-underscore [s]
101-
(when s
102-
(str/replace s #"-" "_")))
103-
104108
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
105109
(defmethod driver/describe-table :databricks-sql
106110
[driver database {table-name :name, schema :schema}]
107111
{:name table-name
108112
:schema schema
109113
:fields
110-
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
111-
(let [results (jdbc/query {:connection conn} [(format
112-
"describe %s"
113-
(sql.u/quote-name driver :table
114-
(dash-to-underscore schema)
115-
(dash-to-underscore table-name)))])]
116-
(set
117-
(for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results)
118-
:while (valid-describe-table-row? result)]
119-
{:name col-name
120-
:database-type data-type
121-
:base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type))
122-
:database-position idx}))))})
114+
(sql-jdbc.execute/do-with-connection-with-options
115+
driver
116+
database
117+
nil
118+
(fn [^Connection conn]
119+
(let [results (jdbc/query {:connection conn} [(format
120+
"describe %s"
121+
(sql.u/quote-name driver :table
122+
(dash-to-underscore schema)
123+
(dash-to-underscore table-name)))])]
124+
(set
125+
(for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results)
126+
:when (valid-describe-table-row? result)]
127+
{:name col-name
128+
:database-type data-type
129+
:base-type (sql-jdbc.sync/database-type->base-type :hive-like (keyword data-type))
130+
:database-position idx})))))})
123131

124132
(def ^:dynamic *param-splice-style*
125133
"How we should splice params into SQL (i.e. 'unprepare' the SQL). Either `:friendly` (the default) or `:paranoid`.
@@ -146,15 +154,16 @@
146154
;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it)
147155
;; 3. SparkSQL doesn't support making connections read-only
148156
;; 4. SparkSQL doesn't support setting the default result set holdability
149-
(defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql
150-
[driver database _timezone-id]
151-
(let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))]
152-
(try
153-
(.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED)
154-
conn
155-
(catch Throwable e
156-
(.close conn)
157-
(throw e)))))
157+
(defmethod sql-jdbc.execute/do-with-connection-with-options :databricks-sql
158+
[driver db-or-id-or-spec options f]
159+
(sql-jdbc.execute/do-with-resolved-connection
160+
driver
161+
db-or-id-or-spec
162+
options
163+
(fn [^Connection conn]
164+
(when-not (sql-jdbc.execute/recursive-connection?)
165+
(.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED))
166+
(f conn))))
158167

159168
;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT`
160169
(defmethod sql-jdbc.execute/prepared-statement :databricks-sql
@@ -180,6 +189,7 @@
180189
:native-parameters true
181190
:nested-queries true
182191
:standard-deviation-aggregations true
192+
:metadata/key-constraints false
183193
:test/jvm-timezone-setting false}]
184194
(defmethod driver/database-supports? [:databricks-sql feature] [_driver _feature _db] supported?))
185195

0 commit comments

Comments
 (0)