Skip to content
This repository was archived by the owner on Jun 9, 2025. It is now read-only.

Commit b80c05f

Browse files
authored
Merge pull request #4 from buserbrasil/avelino/fixed-document-references
style: fixed document references
2 parents a532aff + f99d05d commit b80c05f

File tree

1 file changed

+19
-15
lines changed

1 file changed

+19
-15
lines changed

src/metabase/driver/databricks_sql.clj

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
(:require [clojure.java.jdbc :as jdbc]
33
[clojure.string :as str]
44
[medley.core :as m]
5-
[metabase.driver :as driver]
5+
[metabase.driver :as driver]
66
[metabase.driver.sql-jdbc.connection :as sql-jdbc.conn]
77
[metabase.driver.sql-jdbc.execute :as sql-jdbc.execute]
88
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
@@ -28,9 +28,10 @@
2828
:pwd token})
2929

3030
(defmethod sql-jdbc.conn/data-warehouse-connection-pool-properties :databricks-sql
31+
"The Hive JDBC driver doesn't support `Connection.isValid()`,
32+
so we need to supply a test query for c3p0 to use to validate
33+
connections upon checkout."
3134
[driver database]
32-
;; The Hive JDBC driver doesn't support `Connection.isValid()`, so we need to supply a test query for c3p0 to use to
33-
;; validate connections upon checkout.
3435
(merge
3536
((get-method sql-jdbc.conn/data-warehouse-connection-pool-properties :sql-jdbc) driver database)
3637
{"preferredTestQuery" "SELECT 1"}))
@@ -59,8 +60,8 @@
5960
#"map" :type/Dictionary
6061
#".*" :type/*))
6162

62-
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
6363
(defmethod driver/describe-database :databricks-sql
64+
"workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata"
6465
[_ database]
6566
{:tables
6667
(with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))]
@@ -70,8 +71,9 @@
7071
:schema (or (not-empty database)
7172
(not-empty table-namespace))})))})
7273

73-
;; Hive describe table result has commented rows to distinguish partitions
74-
(defn- valid-describe-table-row? [{:keys [col_name data_type]}]
74+
(defn- valid-describe-table-row?
75+
"Hive describe table result has commented rows to distinguish partitions"
76+
[{:keys [col_name data_type]}]
7577
(every? (every-pred (complement str/blank?)
7678
(complement #(str/starts-with? % "#")))
7779
[col_name data_type]))
@@ -80,8 +82,8 @@
8082
(when s
8183
(str/replace s #"-" "_")))
8284

83-
;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata
8485
(defmethod driver/describe-table :databricks-sql
86+
"workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata"
8587
[driver database {table-name :name, schema :schema}]
8688
{:name table-name
8789
:schema schema
@@ -107,8 +109,8 @@
107109
strings so SQL injection is impossible; this isn't nice to look at, so use this for actually running a query."
108110
:friendly)
109111

110-
;; bound variables are not supported in Spark SQL (maybe not Hive either, haven't checked)
111112
(defmethod driver/execute-reducible-query :databricks-sql
113+
"bound variables are not supported in Spark SQL (maybe not Hive either, haven't checked)"
112114
[driver {{sql :query, :keys [params], :as inner-query} :native, :as outer-query} context respond]
113115
(let [inner-query (-> (assoc inner-query
114116
:remark (qp.util/query->remark :databricks-sql outer-query)
@@ -121,11 +123,11 @@
121123
query (assoc outer-query :native inner-query)]
122124
((get-method driver/execute-reducible-query :sql-jdbc) driver query context respond)))
123125

124-
;; 1. SparkSQL doesn't support `.supportsTransactionIsolationLevel`
125-
;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it)
126-
;; 3. SparkSQL doesn't support making connections read-only
127-
;; 4. SparkSQL doesn't support setting the default result set holdability
128126
(defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql
127+
"1. SparkSQL doesn't support `.supportsTransactionIsolationLevel`
128+
2. SparkSQL doesn't support session timezones (at least our driver doesn't support it)
129+
3. SparkSQL doesn't support making connections read-only
130+
4. SparkSQL doesn't support setting the default result set holdability"
129131
[driver database _timezone-id]
130132
(let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))]
131133
(try
@@ -135,8 +137,8 @@
135137
(.close conn)
136138
(throw e)))))
137139

138-
;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT`
139140
(defmethod sql-jdbc.execute/prepared-statement :databricks-sql
141+
"1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT`"
140142
[driver ^Connection conn ^String sql params]
141143
(let [stmt (.prepareStatement conn sql
142144
ResultSet/TYPE_FORWARD_ONLY
@@ -149,8 +151,10 @@
149151
(.close stmt)
150152
(throw e)))))
151153

152-
;; the current HiveConnection doesn't support .createStatement
153-
(defmethod sql-jdbc.execute/statement-supported? :databricks-sql [_] false)
154+
(defmethod sql-jdbc.execute/statement-supported? :databricks-sql
155+
"the current HiveConnection doesn't support .createStatement"
156+
[_]
157+
false)
154158

155159
(doseq [feature [:basic-aggregations
156160
:binning

0 commit comments

Comments
 (0)