Allows a background change for bulk ledger

This commit is contained in:
2022-10-01 07:36:42 -07:00
parent 55e3df7e94
commit 70391b6b56
4 changed files with 118 additions and 13 deletions

View File

@@ -91,7 +91,7 @@
:network-configuration {:aws-vpc-configuration {:subnets ["subnet-5e675761" "subnet-8519fde2" "subnet-89bab8d4"]
:security-groups ["sg-004e5855310c453a3" "sg-02d167406b1082698"]
:assign-public-ip AssignPublicIp/ENABLED}}}
args (assoc-in [:overrides :container-overrides ] [{:name "integreat-app" :environment [{:name "args" :value (pr-str task)}]}]))))
args (assoc-in [:overrides :container-overrides ] [{:name "integreat-app" :environment [{:name "args" :value (pr-str args)}]}]))))
(defn request-job [context value _]
(assert-admin (:id context))

View File

@@ -1,13 +1,79 @@
(ns auto-ap.jobs.bulk-journal-import
(:gen-class)
(:require
[amazonica.aws.s3 :as s3]
[auto-ap.graphql.ledger :refer [import-ledger]]
[auto-ap.jobs.core :refer [execute]]
[clojure.data.csv :as csv]
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[config.core :refer [env]]))
(defn bulk-journal-import []
(log/info "importing ledger from" (:args env)))
(defn line->id [{:keys [source id client-code]}]
(str client-code "-" source "-" id))
(defn csv->graphql-rows [lines]
(for [lines (partition-by line->id (drop 1 lines))
:let [{:keys [source client-code date vendor-name note cleared-against] :as line} (first lines)]]
{:source source
:external_id (line->id line)
:client_code client-code
:date date
:note note
:cleared_against cleared-against
:vendor_name vendor-name
:amount (reduce + 0
(->> lines
(map :debit)
(map #(if (str/blank? %)
0.0
(Double/parseDouble %)))))
:line_items (map (fn [{:keys [debit credit account-identifier location]}]
{:account_identifier account-identifier
:location (some-> location str/trim)
:debit (if (str/blank? debit)
0.0
(Double/parseDouble debit))
:credit (if (str/blank? credit)
0.0
(Double/parseDouble credit))})
lines)}))
(def bucket (:data-bucket env))
(defn s3->csv [ledger-url]
(->> (-> (s3/get-object {:bucket-name bucket
:key (str "bulk-import/" ledger-url)})
:input-stream
io/reader
csv/read-csv)
(sequence (comp
(filter (fn [x]
(seq (filter (complement str/blank?) x))))
(map (fn [[id client-code source vendor-name date account-identifier location debit credit note cleared-against]]
{:id id
:client-code client-code
:source source
:vendor-name vendor-name
:date date
:account-identifier account-identifier
:location location
:debit debit
:credit credit
:note note
:cleared-against cleared-against}))))))
(defn bulk-journal-import [args]
(let [{:keys [ledger-url]} args
_ (log/info "importing ledger from" ledger-url)
csv-stream (s3->csv ledger-url)
import-rows (csv->graphql-rows csv-stream)]
(import-ledger {:id {:user/name "Bulk-import" :user/role "admin"}}
{:entries import-rows}
nil)))
(defn -main [& _]
(execute "bulk-journal-import" bulk-journal-import))
(execute "bulk-journal-import" #(bulk-journal-import (:args env))))