Skip to content
This repository has been archived by the owner on Apr 17, 2021. It is now read-only.

Commit

Permalink
Merge pull request #26 from shopsmart/db_cursor_batching_002
Browse files Browse the repository at this point in the history
Add function for processing query resultsets as a cursor (in batches)…
  • Loading branch information
levi-andrew-dixon committed Aug 7, 2017
2 parents dedfddc + af80652 commit 448348a
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 4 deletions.
2 changes: 1 addition & 1 deletion project.clj
@@ -1,4 +1,4 @@
(defproject com.github.shopsmart/clj-infrastructure "0.1.19"
(defproject com.github.shopsmart/clj-infrastructure "0.1.20"
:description "Infrastructure helpers for AWS, database, etc."
:url "https://github.com/shopsmart/clj-infrastructure"

Expand Down
75 changes: 72 additions & 3 deletions src/clj_infrastructure/db.clj
Expand Up @@ -29,8 +29,9 @@
(:import [java.sql PreparedStatement SQLException])
(:gen-class))

(def DB_EXEC_MODE_QUERY "query")
(def DB_EXEC_MODE_EXEC "execute")
(def DB_EXEC_MODE_QUERY "query")
(def DB_EXEC_MODE_QUERY_CURSOR "query_cursor")
(def DB_EXEC_MODE_EXEC "execute")

;; Error handling ----------------------------------------------------------------------------

Expand Down Expand Up @@ -669,6 +670,61 @@
detail-map))


(defn sql-cursor->result-set-fn-results [
conn
[sql & params :as sql-params]
result-set-fn &
[{:keys [transaction? as-arrays? fetch-size] :as opt-map}]]

"""
Runs the provided SQL and passes the result set cursor to the provided
result set function. Using a cursor allows rows to be fetched in batches
(of size (:fetch-size opt-map)), but requires the a function to process
results due to the need to manage the transaction and cursor requirements
properly (i.e. ensure auto commit is off, that the cursor is closed, etc.)
The interface works with parameters as expected by other clojure.java.jdbc
function.
TODO: Add handling of :multi? opt key
"""

; Factored out body of function:
(let [
body-fn
(fn fn-body-sql->cursor [
conn
[sql & params :as sql-params] &
[{:keys [transaction? as-arrays? fetch-size] :as opt-map}]]

(let [fetch-size (or fetch-size 1000)]

(with-open [
cursor
(let [stmt (.prepareStatement (:connection conn) sql)]
(doseq [[index value] (map vector (iterate inc 1) params)]
(.setObject stmt index value))
(.setFetchSize stmt fetch-size)
(.executeQuery stmt))]

(result-set-fn
(if (true? as-arrays?)
(do (map #(into [] (vals %)) (resultset-seq cursor)))
(do (resultset-seq cursor)))))))]

; Run the body optionally wrapped in a transaction
(let [auto-commit-val (.getAutoCommit (:connection conn))]
(if (or transaction? auto-commit-val)
(do
(.setAutoCommit (:connection conn) false)
(clojure.java.jdbc/with-db-transaction [conn (dbconfig {} "connection")]
(body-fn conn sql-params opt-map))
(when (true? auto-commit-val)
(.commit (:connection conn))
(.setAutoCommit (:connection conn) auto-commit-val)))
(do (body-fn conn sql-params opt-map))))))


(defn run-statement
[conn {:keys [stmt-text exec-mode op-comment binds opt-map commit?] :as stmt-detail-map}]

Expand All @@ -686,7 +742,19 @@
(let [updated-map
(assoc-in stmt-detail-map [:result]
(cond
(= exec-mode DB_EXEC_MODE_QUERY)
(and (:result-set-fn opt-map) (= exec-mode DB_EXEC_MODE_QUERY_CURSOR))
; Execute query using cursor to fetch results in batches (requires a
; result set function that processes rows)
(do
(log/debug
"Issuing statement as query with cursor and batch "
"fetching (results expected) ...")
(sql-cursor->result-set-fn-results conn sql-params (:result-set-fn opt-map) opt-map))
(or
(= exec-mode DB_EXEC_MODE_QUERY)
(and (= exec-mode DB_EXEC_MODE_QUERY_CURSOR) (not (:result-set-fn opt-map))))
; Execute a query using clojure query function; all results
; are materialized during the fetch
(do
(log/debug "Issuing statement as query (results expected) ...")
(clojure.java.jdbc/query conn sql-params opt-map))
Expand Down Expand Up @@ -729,3 +797,4 @@
(for [stmt-detail-map stmt-detail-vec]
(run-statement conn stmt-detail-map))))))


0 comments on commit 448348a

Please sign in to comment.