fix(sales): resolve namespace collision and add missing clojure.string import

- Remove sales_orders_new.clj (unreferenced, duplicate ns)
- Add [clojure.string :as str] to sales_orders.clj ns
This commit is contained in:
2026-04-27 07:48:36 -07:00
parent 26c9563a03
commit 4597611655
4 changed files with 5 additions and 226 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -3,8 +3,9 @@
[auto-ap.datomic :refer [conn]]
[auto-ap.storage.parquet :as pq]
[clj-time.coerce :as c]
[clojure.set :as set]
[com.brunobonacci.mulog :as mu]))
[clojure.set :as set]
[clojure.string :as str]
[com.brunobonacci.mulog :as mu]))
(defn <-row
"Convert a flat parquet row into the shape consumers expect.

View File

@@ -1,224 +0,0 @@
(ns auto-ap.datomic.sales-orders
(:require [auto-ap.datomic :refer [conn]]
[auto-ap.storage.parquet :as pq]
[clojure.data.json :as json]
[clojure.java.io :as io]))
(defn <-row
"Convert a flat parquet row (string keys) into the
shape consumers expect. Parquet produces maps of the form
{\"external-id\" \"square/order/123\",
\"location\" \"DT\",
\"total\" 50.0}
which we transform to:
{:sales-order/external-id \"square/order/123\",
:sales-order/location \"DT\",
:sales-order/total 50.0}
Note: client, charges and other nested structures are not
available in the flat parquet rows. When denormalisation
adds those columns we can restore the full consumer shape."
[row]
(-> row
(set/rename-keys
{"external-id" :sales-order/external-id
"location" :sales-order/location
"total" :sales-order/total
"tax" :sales-order/tax
"tip" :sales-order/tip})))
(defn build-where-clause
"Build a SQL WHERE fragment from the fields that
parquet can filter on: external_id.client, vendor, location.
Returns either a predicate string e.g.
\"WHERE external_id.client = 'acme' AND vendor = 'square'\"
or nil when no applicable filters exist."
[args]
(let [clauses []
client (:client-code args)
vendor (:vendor args)
location (:location args)]
(when (or client vendor location)
(->> [[:client "external_id.client" client]
[:vendor "external_id.vendor" vendor]
[:location "location" location]]
(keep (fn [[_ col v]]
(when v [col v])))
(mapv #(str %1 " = '" %2 "'"))
(str/join " AND "))}))
(defn build-sort-clause
"Map sort-key field names from args into SQL ORDER-BY fragments.
Supported fields map to parquet column names:
\"date\" -> DATE
\"total\" -> TOTAL
\"tax\" -> TAX
\"tip\" -> TIP
\"client\" -> EXTERNAL_ID_CLIENT (for flat client codes)
\"location\"-> LOCATION
Falls back to \"DATE DESC\" when the args do not specify
an explicit field."
[args]
;; We delegate most of the SQL ordering work to get-sales-orders,
;; which already defaults to DATE DESC.
(when-let [sorts (:sort args)]
(->> sorts
(keep (fn [{:keys [sort-key asc]}]
(let [dir (if asc "ASC" "DESC")
col (case sort-key
"date" "DATE"
"total" "TOTAL"
"tax" "TAX"
"tip" "TIP"
"total-desc" "TOTAL DESC"
"source" "SALE_SOURCE"
"client" "EXTERNAL_ID_CLIENT"
"location" "LOCATION"
nil)] ; unknown → skip
(when col `[~col ~dir]))))
(interleave (repeat \,))
(apply str))))
(defn build-pagination-clause
"Convert a Datomic-side pagination request into SQL-limit/offset
numbers.
Supports:
:start → OFFSET
:count / :per-page → LIMIT"
[args]
{:limit (or (:count args) (:per-page args))
:offset (or (:start args) 0)})
(defn- apply-pagination
"Safely re-implements the old datomic-side pagination logic.
Mutates a COPY of args so we can extract the resulting
cursor values for the response.
Returns {limit offset}"
[args]
(let [page (build-pagination-clause args)
{:keys [limit offset]} page
client (:client-code args)]
; In the new architecture pagination is applied server-side
; by get-sales-orders via LIMIT/OFFSET, so this function
; mainly exists as a thin wrapper for any remaining
; in-memory re-paging concerns.
(if limit
(assoc page :limit (Integer. limit))
page)))
(defn- build-options
"Assemble the opts map passed to pq/get-sales-orders:
{:client ..., :location ..., :vendor ...,
:limit 10, :offset 0, :sort, :order}"
[args]
(let [page (apply-pagination args)
limit (:limit page)
offset (:offset page)
client (:client-code args)]
(cond-> {:client client
:vendor (:vendor args)
:limit limit
:offset offset}
(:location args) (assoc :location (:location args))
(:sort args) (assoc :sort "date") ; let get-sales-orders handle order
true (merge {:order "DESC"
:sort-key (:sort-key args)}))))
(defn raw-graphql-ids
"Query sales-orders FROM parquet files via DuckDB instead of Datomic.
Filters applied at the parquet level:
- date-range → selects which parquet files to read
- client-code / vendor / location where clauses
- sort & pagination are delegated to get-sales-orders
category, processor, type-name filters require nested joins
that parquet does not support -- those fields are ignored.
Returns
{:ids [string-key-for-each-matched-row]
:count int (total matches BEFORE pagination)}"
[args]
(let [start (when-let [s (:start (:date-range args))]
(.toString (.plusDays (java.time.LocalDate/parse s) -1)))
end (when-let [e (:end (:date-range args))]
(-> e .substring 0 10))
where (build-where-clause args)
options (build-options args)
where-str (some-> where #(str " WHERE " %))]
(cond->> nil
; Query rows from parquet with our filters and sort.
where-str (pq/get-sales-orders
start end
(assoc options :sort-key where-str)))
; For each row returned we need an ID string.
; We use the external-id column as the lookup key.
(when-let [rows (:rows result)]
{:ids (mapv #(str (:external_id %)) rows)
:rows rows
:count (:count result)})))
(defn graphql-results
"Return the full payment-row data for the selected IDs.
Since we now read FROM parquet, we receive the raw row vector
and transform it.
The old signature [ids db args] is replaced by [rows id-keys _].
We ignore the database argument (Datomic pull is no longer
called)."
[rows _id-keys _args]
(->> rows
(mapv #(<-row %))))
(defn summarize-orders
"Sum totals and discounts for the given ID-set.
This function still queries Datomic because the parquet-side
aggregation query would duplicate the WHERE logic.
If we want a pure parquet path here, add an
SQL-based aggregation in a follow-up."
[ids]
(let [[total tax]
(#'auto-ap.datomic/aggregate-sum ids) ; uses dc/q internally
first]
{:total total
:tax tax}))
(defn get-graphql
"Entry-point: return [payments count summary].
The data flow is:
1. raw-graphql-ids → parquet query → [:rows :count]
2. graphql-results <- transform rows ← [:results <_id-keys> _args]
3. summarize-orders <- Datomic agg ← [:total :tax]"
[args]
(let [{:keys [ids count']}
(mu/trace
::get-sales-order-ids
[]
(raw-graphql-ids args))]
[(->> (mu/trace ::get-results [] (graphql-results ids id-keys args))
matching-count
summarize-orders ids)])))
(defn summarize-graphql
"Entry-point: return just the summary {:total :tax}.
Like get-graphql, this delegates to raw-graphql-ids
and then to summarize-orders."
[args]
(let [{:keys [ids count']}
(mu/trace
::get-sales-order-ids
[]
(raw-graphql-ids args))]
(summarize-orders ids)))