|
2 | 2 | (:require [clojure.java.jdbc :as jdbc] |
3 | 3 | [clojure.string :as str] |
4 | 4 | [medley.core :as m] |
5 | | - [metabase.driver :as driver] |
| 5 | + [metabase.driver :as driver] |
6 | 6 | [metabase.driver.sql-jdbc.connection :as sql-jdbc.conn] |
7 | 7 | [metabase.driver.sql-jdbc.execute :as sql-jdbc.execute] |
8 | 8 | [metabase.driver.sql-jdbc.sync :as sql-jdbc.sync] |
|
28 | 28 | :pwd token}) |
29 | 29 |
|
30 | 30 | (defmethod sql-jdbc.conn/data-warehouse-connection-pool-properties :databricks-sql |
| 31 | + "The Hive JDBC driver doesn't support `Connection.isValid()`, |
| 32 | + so we need to supply a test query for c3p0 to use to validate |
| 33 | + connections upon checkout." |
31 | 34 | [driver database] |
32 | | - ;; The Hive JDBC driver doesn't support `Connection.isValid()`, so we need to supply a test query for c3p0 to use to |
33 | | - ;; validate connections upon checkout. |
34 | 35 | (merge |
35 | 36 | ((get-method sql-jdbc.conn/data-warehouse-connection-pool-properties :sql-jdbc) driver database) |
36 | 37 | {"preferredTestQuery" "SELECT 1"})) |
|
59 | 60 | #"map" :type/Dictionary |
60 | 61 | #".*" :type/*)) |
61 | 62 |
|
62 | | -;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
63 | 63 | (defmethod driver/describe-database :databricks-sql |
| 64 | + "workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata" |
64 | 65 | [_ database] |
65 | 66 | {:tables |
66 | 67 | (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
|
70 | 71 | :schema (or (not-empty database) |
71 | 72 | (not-empty table-namespace))})))}) |
72 | 73 |
|
73 | | -;; Hive describe table result has commented rows to distinguish partitions |
74 | | -(defn- valid-describe-table-row? [{:keys [col_name data_type]}] |
| 74 | +(defn- valid-describe-table-row? |
| 75 | + "Hive describe table result has commented rows to distinguish partitions" |
| 76 | + [{:keys [col_name data_type]}] |
75 | 77 | (every? (every-pred (complement str/blank?) |
76 | 78 | (complement #(str/starts-with? % "#"))) |
77 | 79 | [col_name data_type])) |
|
80 | 82 | (when s |
81 | 83 | (str/replace s #"-" "_"))) |
82 | 84 |
|
83 | | -;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
84 | 85 | (defmethod driver/describe-table :databricks-sql |
| 86 | + "workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata" |
85 | 87 | [driver database {table-name :name, schema :schema}] |
86 | 88 | {:name table-name |
87 | 89 | :schema schema |
|
107 | 109 | strings so SQL injection is impossible; this isn't nice to look at, so use this for actually running a query." |
108 | 110 | :friendly) |
109 | 111 |
|
110 | | -;; bound variables are not supported in Spark SQL (maybe not Hive either, haven't checked) |
111 | 112 | (defmethod driver/execute-reducible-query :databricks-sql |
| 113 | + "bound variables are not supported in Spark SQL (maybe not Hive either, haven't checked)" |
112 | 114 | [driver {{sql :query, :keys [params], :as inner-query} :native, :as outer-query} context respond] |
113 | 115 | (let [inner-query (-> (assoc inner-query |
114 | 116 | :remark (qp.util/query->remark :databricks-sql outer-query) |
|
121 | 123 | query (assoc outer-query :native inner-query)] |
122 | 124 | ((get-method driver/execute-reducible-query :sql-jdbc) driver query context respond))) |
123 | 125 |
|
124 | | -;; 1. SparkSQL doesn't support `.supportsTransactionIsolationLevel` |
125 | | -;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it) |
126 | | -;; 3. SparkSQL doesn't support making connections read-only |
127 | | -;; 4. SparkSQL doesn't support setting the default result set holdability |
128 | 126 | (defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql |
| 127 | + "1. SparkSQL doesn't support `.supportsTransactionIsolationLevel` |
| 128 | + 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it) |
| 129 | + 3. SparkSQL doesn't support making connections read-only |
| 130 | + 4. SparkSQL doesn't support setting the default result set holdability" |
129 | 131 | [driver database _timezone-id] |
130 | 132 | (let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))] |
131 | 133 | (try |
|
135 | 137 | (.close conn) |
136 | 138 | (throw e))))) |
137 | 139 |
|
138 | | -;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT` |
139 | 140 | (defmethod sql-jdbc.execute/prepared-statement :databricks-sql |
| 141 | + "1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT`" |
140 | 142 | [driver ^Connection conn ^String sql params] |
141 | 143 | (let [stmt (.prepareStatement conn sql |
142 | 144 | ResultSet/TYPE_FORWARD_ONLY |
|
149 | 151 | (.close stmt) |
150 | 152 | (throw e))))) |
151 | 153 |
|
152 | | -;; the current HiveConnection doesn't support .createStatement |
153 | | -(defmethod sql-jdbc.execute/statement-supported? :databricks-sql [_] false) |
| 154 | +(defmethod sql-jdbc.execute/statement-supported? :databricks-sql |
| 155 | + "the current HiveConnection doesn't support .createStatement" |
| 156 | + [_] |
| 157 | + false) |
154 | 158 |
|
155 | 159 | (doseq [feature [:basic-aggregations |
156 | 160 | :binning |
|
0 commit comments