|
11 | 11 | [metabase.driver.sql.query-processor :as sql.qp] |
12 | 12 | [metabase.driver.sql.util :as sql.u] |
13 | 13 | [metabase.driver.sql.util.unprepare :as unprepare] |
14 | | - [metabase.mbql.util :as mbql.u] |
| 14 | + [metabase.legacy-mbql.util :as mbql.u] |
15 | 15 | [metabase.query-processor.util :as qp.util]) |
16 | 16 | (:import |
17 | 17 | (java.sql Connection ResultSet))) |
|
80 | 80 | [_driver] |
81 | 81 | 2) |
82 | 82 |
|
| 83 | +(defn- dash-to-underscore [s] |
| 84 | + (when s |
| 85 | + (str/replace s #"-" "_"))) |
| 86 | + |
83 | 87 | ;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
84 | 88 | (defmethod driver/describe-database :databricks-sql |
85 | | - [_ database] |
| 89 | + [driver database] |
86 | 90 | {:tables |
87 | | - (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
88 | | - (set |
89 | | - (for [{:keys [database tablename], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])] |
90 | | - {:name tablename |
91 | | - :schema (or (not-empty database) |
92 | | - (not-empty table-namespace))})))}) |
| 91 | + (sql-jdbc.execute/do-with-connection-with-options |
| 92 | + driver |
| 93 | + database |
| 94 | + nil |
| 95 | + (fn [^Connection conn] |
| 96 | + (set |
| 97 | + (for [{:keys [database tablename tab_name], table-namespace :namespace} (jdbc/query {:connection conn} ["show tables"])] |
| 98 | + {:name (or tablename tab_name) ; column name differs depending on server (SparkSQL, hive, Impala) |
| 99 | + :schema (or (not-empty database) |
| 100 | + (not-empty table-namespace))}))))}) |
93 | 101 |
|
94 | 102 | ;; Hive describe table result has commented rows to distinguish partitions |
95 | 103 | (defn- valid-describe-table-row? [{:keys [col_name data_type]}] |
96 | 104 | (every? (every-pred (complement str/blank?) |
97 | 105 | (complement #(str/starts-with? % "#"))) |
98 | 106 | [col_name data_type])) |
99 | 107 |
|
100 | | -(defn- dash-to-underscore [s] |
101 | | - (when s |
102 | | - (str/replace s #"-" "_"))) |
103 | | - |
104 | 108 | ;; workaround for SPARK-9686 Spark Thrift server doesn't return correct JDBC metadata |
105 | 109 | (defmethod driver/describe-table :databricks-sql |
106 | 110 | [driver database {table-name :name, schema :schema}] |
107 | 111 | {:name table-name |
108 | 112 | :schema schema |
109 | 113 | :fields |
110 | | - (with-open [conn (jdbc/get-connection (sql-jdbc.conn/db->pooled-connection-spec database))] |
111 | | - (let [results (jdbc/query {:connection conn} [(format |
112 | | - "describe %s" |
113 | | - (sql.u/quote-name driver :table |
114 | | - (dash-to-underscore schema) |
115 | | - (dash-to-underscore table-name)))])] |
116 | | - (set |
117 | | - (for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results) |
118 | | - :while (valid-describe-table-row? result)] |
119 | | - {:name col-name |
120 | | - :database-type data-type |
121 | | - :base-type (sql-jdbc.sync/database-type->base-type :databricks-sql (keyword data-type)) |
122 | | - :database-position idx}))))}) |
| 114 | + (sql-jdbc.execute/do-with-connection-with-options |
| 115 | + driver |
| 116 | + database |
| 117 | + nil |
| 118 | + (fn [^Connection conn] |
| 119 | + (let [results (jdbc/query {:connection conn} [(format |
| 120 | + "describe %s" |
| 121 | + (sql.u/quote-name driver :table |
| 122 | + (dash-to-underscore schema) |
| 123 | + (dash-to-underscore table-name)))])] |
| 124 | + (set |
| 125 | + (for [[idx {col-name :col_name, data-type :data_type, :as result}] (m/indexed results) |
| 126 | + :when (valid-describe-table-row? result)] |
| 127 | + {:name col-name |
| 128 | + :database-type data-type |
| 129 | + :base-type (sql-jdbc.sync/database-type->base-type :hive-like (keyword data-type)) |
| 130 | + :database-position idx})))))}) |
123 | 131 |
|
124 | 132 | (def ^:dynamic *param-splice-style* |
125 | 133 | "How we should splice params into SQL (i.e. 'unprepare' the SQL). Either `:friendly` (the default) or `:paranoid`. |
|
146 | 154 | ;; 2. SparkSQL doesn't support session timezones (at least our driver doesn't support it) |
147 | 155 | ;; 3. SparkSQL doesn't support making connections read-only |
148 | 156 | ;; 4. SparkSQL doesn't support setting the default result set holdability |
149 | | -(defmethod sql-jdbc.execute/connection-with-timezone :databricks-sql |
150 | | - [driver database _timezone-id] |
151 | | - (let [conn (.getConnection (sql-jdbc.execute/datasource-with-diagnostic-info! driver database))] |
152 | | - (try |
153 | | - (.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED) |
154 | | - conn |
155 | | - (catch Throwable e |
156 | | - (.close conn) |
157 | | - (throw e))))) |
| 157 | +(defmethod sql-jdbc.execute/do-with-connection-with-options :databricks-sql |
| 158 | + [driver db-or-id-or-spec options f] |
| 159 | + (sql-jdbc.execute/do-with-resolved-connection |
| 160 | + driver |
| 161 | + db-or-id-or-spec |
| 162 | + options |
| 163 | + (fn [^Connection conn] |
| 164 | + (when-not (sql-jdbc.execute/recursive-connection?) |
| 165 | + (.setTransactionIsolation conn Connection/TRANSACTION_READ_UNCOMMITTED)) |
| 166 | + (f conn)))) |
158 | 167 |
|
159 | 168 | ;; 1. SparkSQL doesn't support setting holdability type to `CLOSE_CURSORS_AT_COMMIT` |
160 | 169 | (defmethod sql-jdbc.execute/prepared-statement :databricks-sql |
|
180 | 189 | :native-parameters true |
181 | 190 | :nested-queries true |
182 | 191 | :standard-deviation-aggregations true |
| 192 | + :metadata/key-constraints false |
183 | 193 | :test/jvm-timezone-setting false}] |
184 | 194 | (defmethod driver/database-supports? [:databricks-sql feature] [_driver _feature _db] supported?)) |
185 | 195 |
|
|
0 commit comments