|
10 | 10 | [metabase.driver.sql.util :as sql.u] |
11 | 11 | [metabase.driver.sql.util.unprepare :as unprepare] |
12 | 12 | [metabase.mbql.util :as mbql.u] |
13 | | - [clojure.string :as str] |
14 | 13 | [metabase.query-processor.util :as qp.util]) |
15 | 14 | (:import [java.sql Connection ResultSet])) |
16 | 15 |
|
|
64 | 63 | #".*" :type/*)) |
65 | 64 |
|
66 | 65 |
|
67 | | -;; 1. databricks-sql doesn't support `.supportsTransactionIsolationLevel` |
68 | | -;; 2. databricks-sql doesn't support session timezones (at least our driver doesn't support it) |
69 | | -;; 3. databricks-sql doesn't support making connections read-only |
70 | | -;; 4. databricks-sql doesn't support setting the default result set holdability |
71 | | -(defmethod sql-jdbc.execute/do-with-connection-with-options :databricks-sql |
72 | | - [driver db-or-id-or-spec options f] |
73 | | - (sql-jdbc.execute/do-with-resolved-connection |
74 | | - driver |
75 | | - db-or-id-or-spec |
76 | | - options |
77 | | - (fn [^Connection conn] |
78 | | - (when-not (sql-jdbc.execute/recursive-connection?) |
79 | | - (.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED)) |
80 | | - (f conn)))) |
| 66 | +(defmethod sql.qp/honey-sql-version :databricks-sql |
| 67 | + [_driver] |
| 68 | + 2) |
81 | 69 |
|
82 | 70 | ;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
83 | 71 | (defmethod driver/describe-database :databricks-sql |
84 | | - [driver database] |
| 72 | + [_ database] |
85 | 73 | {:tables |
86 | | - (sql-jdbc.execute/do-with-connection-with-options |
87 | | - driver |
88 | | - database |
89 | | - nil |
90 | | - (fn [^Connection conn] |
91 | | - (set |
92 | | - (for [{:keys [database tablename tab_name], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])] |
93 | | - {:name (or tablename tab_name) ; column name differs depending on server (databricks-sql, hive, Impala) |
94 | | - :schema (or (not-empty database) |
95 | | - (not-empty table-namespace))}))))}) |
96 | | - |
| 74 | + (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
| 75 | + (set |
| 76 | + (for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])] |
| 77 | + {:name tablename |
| 78 | + :schema (or (not-empty database) |
| 79 | + (not-empty table-namespace))})))}) |
97 | 80 |
|
98 | 81 | ;; Hive describe table result has commented rows to distinguish partitions |
99 | 82 | (defn- valid-describe-table-row? [{:keys [col_name data_type]}] |
|
105 | 88 | (when s |
106 | 89 | (str/replace s #"-" "_"))) |
107 | 90 |
|
108 | | - |
109 | 91 | ;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
110 | 92 | (defmethod driver/describe-table :databricks-sql |
111 | 93 | [driver database {table-name :name, schema :schema}] |
112 | 94 | {:name table-name |
113 | 95 | :schema schema |
114 | 96 | :fields |
115 | | - (sql-jdbc.execute/do-with-connection-with-options |
116 | | - driver |
117 | | - database |
118 | | - nil |
119 | | - (fn [^Connection conn] |
120 | | - (let [results (jdbc/query {:connection conn} [(format |
121 | | - "describe %s" |
122 | | - (sql.u/quote-name driver :table |
123 | | - (dash-to-underscore schema) |
124 | | - (dash-to-underscore table-name)))])] |
125 | | - (set |
126 | | - (for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results) |
127 | | - :when (valid-describe-table-row? result)] |
128 | | - {:name col-name |
129 | | - :database-type data-type |
130 | | - :base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type)) |
131 | | - :database-position idx})))))}) |
| 97 | + (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
| 98 | + (let [results (jdbc/query {:connection conn} [(format |
| 99 | + "describe %s" |
| 100 | + (sql.u/quote-name driver :table |
| 101 | + (dash-to-underscore schema) |
| 102 | + (dash-to-underscore table-name)))])] |
| 103 | + (set |
| 104 | + (for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results) |
| 105 | + :while (valid-describe-table-row? result)] |
| 106 | + {:name col-name |
| 107 | + :database-type data-type |
| 108 | + :base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type)) |
| 109 | + :database-position idx}))))}) |
132 | 110 |
|
133 | 111 | (def ^:dynamic *param-splice-style* |
134 | 112 | "How we should splice params into SQL (i.e. 'unprepare' the SQL). Either `:friendly` (the default) or `:paranoid`. |
|
151 | 129 | query (assoc outer-query :native inner-query)] |
152 | 130 | ((get-method driver/execute-reducible-query :sql-jdbc) driver query context respond))) |
153 | 131 |
|
| 132 | +;; 1. SparkSQL doesn't support `.supportsTransactionIsolationLevel` |
| 133 | +;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it) |
| 134 | +;; 3. SparkSQL doesn't support making connections read-only |
| 135 | +;; 4. SparkSQL doesn't support setting the default result set holdability |
| 136 | +(defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql |
| 137 | + [driver database _timezone-id] |
| 138 | + (let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))] |
| 139 | + (try |
| 140 | + (.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED) |
| 141 | + conn |
| 142 | + (catch Throwable e |
| 143 | + (.close conn) |
| 144 | + (throw e))))) |
154 | 145 |
|
155 | 146 | ;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT` |
156 | 147 | (defmethod sql-jdbc.execute/prepared-statement :databricks-sql |
|
0 commit comments