♻️ Refactor storage and assets related modules

- improve internal error handling
- add more specs and more asserts
This commit is contained in:
Andrey Antukh 2023-02-06 12:27:53 +01:00
parent 4b4f78b4cc
commit ab3b9cba45
20 changed files with 547 additions and 511 deletions

View file

@ -146,9 +146,9 @@
(instance? javax.sql.DataSource v)) (instance? javax.sql.DataSource v))
(s/def ::conn some?) (s/def ::conn some?)
(s/def ::pool pool?)
(s/def ::nilable-pool (s/nilable ::pool)) (s/def ::nilable-pool (s/nilable ::pool))
(s/def ::conn-or-pool some?) (s/def ::pool pool?)
(s/def ::pool-or-conn some?)
(defn closed? (defn closed?
[pool] [pool]

View file

@ -7,18 +7,17 @@
(ns app.http.assets (ns app.http.assets
"Assets related handlers." "Assets related handlers."
(:require (:require
[app.common.data :as d]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uri :as u] [app.common.uri :as u]
[app.db :as db] [app.db :as db]
[app.metrics :as mtx]
[app.storage :as sto] [app.storage :as sto]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk] [app.worker :as wrk]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.core :as p] [promesa.core :as p]
[promesa.exec :as px]
[yetti.response :as yrs])) [yetti.response :as yrs]))
(def ^:private cache-max-age (def ^:private cache-max-age
@ -27,104 +26,96 @@
(def ^:private signature-max-age (def ^:private signature-max-age
(dt/duration {:hours 24 :minutes 15})) (dt/duration {:hours 24 :minutes 15}))
(defn coerce-id (defn get-id
[id] [{:keys [path-params]}]
(let [res (parse-uuid id)] (if-let [id (some-> path-params :id d/parse-uuid)]
(when-not (uuid? res) (p/resolved id)
(ex/raise :type :not-found (p/rejected (ex/error :type :not-found
:hint "object not found")) :hunt "object not found"))))
res))
(defn- get-file-media-object (defn- get-file-media-object
[{:keys [pool executor] :as storage} id] [pool id]
(px/with-dispatch executor (db/get pool :file-media-object {:id id}))
(let [id (coerce-id id)
mobj (db/exec-one! pool ["select * from file_media_object where id=?" id])] (defn- serve-object-from-s3
(when-not mobj [{:keys [::sto/storage] :as cfg} obj]
(ex/raise :type :not-found (let [mdata (meta obj)]
:hint "object does not found")) (->> (sto/get-object-url storage obj {:max-age signature-max-age})
mobj))) (p/fmap (fn [{:keys [host port] :as url}]
(let [headers {"location" (str url)
"x-host" (cond-> host port (str ":" port))
"x-mtype" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
(yrs/response
:status 307
:headers headers)))))))
(defn- serve-object-from-fs
[{:keys [::path]} obj]
(let [purl (u/join (u/uri path)
(sto/object->relative-path obj))
mdata (meta obj)
headers {"x-accel-redirect" (:path purl)
"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
(p/resolved
(yrs/response :status 204 :headers headers))))
(defn- serve-object (defn- serve-object
"Helper function that returns the appropriate response depending on "Helper function that returns the appropriate response depending on
the storage object backend type." the storage object backend type."
[{:keys [storage] :as cfg} obj] [{:keys [::sto/storage] :as cfg} {:keys [backend] :as obj}]
(let [mdata (meta obj) (let [backend (sto/resolve-backend storage backend)]
backend (sto/resolve-backend storage (:backend obj))] (case (::sto/type backend)
(case (:type backend) :s3 (serve-object-from-s3 cfg obj)
:s3 :fs (serve-object-from-fs cfg obj))))
(p/let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
(yrs/response :status 307
:headers {"location" (str url)
"x-host" (cond-> host port (str ":" port))
"x-mtype" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}))
:fs
(p/let [purl (u/uri (:assets-path cfg))
purl (u/join purl (sto/object->relative-path obj))]
(yrs/response :status 204
:headers {"x-accel-redirect" (:path purl)
"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))})))))
(defn objects-handler (defn objects-handler
"Handler that servers storage objects by id." "Handler that servers storage objects by id."
[{:keys [storage executor] :as cfg} request respond raise] [{:keys [::sto/storage ::wrk/executor] :as cfg} request respond raise]
(-> (px/with-dispatch executor (->> (get-id request)
(p/let [id (get-in request [:path-params :id]) (p/mcat executor (fn [id] (sto/get-object storage id)))
id (coerce-id id) (p/mcat executor (fn [obj]
obj (sto/get-object storage id)] (if (some? obj)
(if obj
(serve-object cfg obj) (serve-object cfg obj)
(yrs/response 404)))) (p/resolved (yrs/response 404)))))
(p/fnly executor (fn [result cause]
(p/bind p/wrap) (if cause (raise cause) (respond result))))))
(p/then' respond)
(p/catch raise)))
(defn- generic-handler (defn- generic-handler
"A generic handler helper/common code for file-media based handlers." "A generic handler helper/common code for file-media based handlers."
[{:keys [storage] :as cfg} request kf] [{:keys [::sto/storage ::wrk/executor] :as cfg} request kf]
(p/let [id (get-in request [:path-params :id]) (let [pool (::db/pool storage)]
mobj (get-file-media-object storage id) (->> (get-id request)
obj (sto/get-object storage (kf mobj))] (p/fmap executor (fn [id] (get-file-media-object pool id)))
(if obj (p/mcat executor (fn [mobj] (sto/get-object storage (kf mobj))))
(serve-object cfg obj) (p/mcat executor (fn [sobj]
(yrs/response 404)))) (if sobj
(serve-object cfg sobj)
(p/resolved (yrs/response 404))))))))
(defn file-objects-handler (defn file-objects-handler
"Handler that serves storage objects by file media id." "Handler that serves storage objects by file media id."
[cfg request respond raise] [cfg request respond raise]
(-> (generic-handler cfg request :media-id) (->> (generic-handler cfg request :media-id)
(p/then respond) (p/fnly (fn [result cause]
(p/catch raise))) (if cause (raise cause) (respond result))))))
(defn file-thumbnails-handler (defn file-thumbnails-handler
"Handler that serves storage objects by thumbnail-id and quick "Handler that serves storage objects by thumbnail-id and quick
fallback to file-media-id if no thumbnail is available." fallback to file-media-id if no thumbnail is available."
[cfg request respond raise] [cfg request respond raise]
(-> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %))) (->> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %)))
(p/then respond) (p/fnly (fn [result cause]
(p/catch raise))) (if cause (raise cause) (respond result))))))
;; --- Initialization ;; --- Initialization
(s/def ::storage some?) (s/def ::path ::us/string)
(s/def ::assets-path ::us/string)
(s/def ::cache-max-age ::dt/duration)
(s/def ::signature-max-age ::dt/duration)
(s/def ::routes vector?) (s/def ::routes vector?)
;; FIXME: namespace qualified params
(defmethod ig/pre-init-spec ::routes [_] (defmethod ig/pre-init-spec ::routes [_]
(s/keys :req-un [::storage (s/keys :req [::sto/storage ::wrk/executor ::path]))
::wrk/executor
::mtx/metrics
::assets-path
::cache-max-age
::signature-max-age]))
(defmethod ig/init-key ::routes (defmethod ig/init-key ::routes
[_ cfg] [_ cfg]

View file

@ -33,6 +33,8 @@
[app.rpc.doc :as-alias rpc.doc] [app.rpc.doc :as-alias rpc.doc]
[app.srepl :as-alias srepl] [app.srepl :as-alias srepl]
[app.storage :as-alias sto] [app.storage :as-alias sto]
[app.storage.fs :as-alias sto.fs]
[app.storage.s3 :as-alias sto.s3]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk] [app.worker :as-alias wrk]
[cuerdas.core :as str] [cuerdas.core :as str]
@ -206,12 +208,11 @@
::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)} ::wrk/scheduled-executor (ig/ref ::wrk/scheduled-executor)}
::sto/gc-deleted-task ::sto/gc-deleted-task
{:pool (ig/ref ::db/pool) {::db/pool (ig/ref ::db/pool)
:storage (ig/ref ::sto/storage) ::sto/storage (ig/ref ::sto/storage)}
:executor (ig/ref ::wrk/executor)}
::sto/gc-touched-task ::sto/gc-touched-task
{:pool (ig/ref ::db/pool)} {::db/pool (ig/ref ::db/pool)}
::http.client/client ::http.client/client
{::wrk/executor (ig/ref ::wrk/executor)} {::wrk/executor (ig/ref ::wrk/executor)}
@ -310,12 +311,11 @@
::session/manager (ig/ref ::session/manager)} ::session/manager (ig/ref ::session/manager)}
:app.http.assets/routes :app.http.assets/routes
{:metrics (ig/ref ::mtx/metrics) {::http.assets/path (cf/get :assets-path)
:assets-path (cf/get :assets-path) ::http.assets/cache-max-age (dt/duration {:hours 24})
:storage (ig/ref ::sto/storage) ::http.assets/cache-max-agesignature-max-age (dt/duration {:hours 24 :minutes 5})
:executor (ig/ref ::wrk/executor) ::sto/storage (ig/ref ::sto/storage)
:cache-max-age (dt/duration {:hours 24}) ::wrk/executor (ig/ref ::wrk/executor)}
:signature-max-age (dt/duration {:hours 24 :minutes 5})}
:app.rpc/climit :app.rpc/climit
{::mtx/metrics (ig/ref ::mtx/metrics) {::mtx/metrics (ig/ref ::mtx/metrics)
@ -358,9 +358,9 @@
::props (ig/ref :app.setup/props)} ::props (ig/ref :app.setup/props)}
::wrk/registry ::wrk/registry
{:metrics (ig/ref ::mtx/metrics) {::mtx/metrics (ig/ref ::mtx/metrics)
:tasks ::wrk/tasks
{:sendmail (ig/ref :app.emails/handler) {:sendmail (ig/ref ::email/handler)
:objects-gc (ig/ref :app.tasks.objects-gc/handler) :objects-gc (ig/ref :app.tasks.objects-gc/handler)
:file-gc (ig/ref :app.tasks.file-gc/handler) :file-gc (ig/ref :app.tasks.file-gc/handler)
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler) :file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
@ -392,18 +392,17 @@
::mtx/metrics (ig/ref ::mtx/metrics)} ::mtx/metrics (ig/ref ::mtx/metrics)}
:app.tasks.tasks-gc/handler :app.tasks.tasks-gc/handler
{:pool (ig/ref ::db/pool) {::db/pool (ig/ref ::db/pool)}
:max-age cf/deletion-delay}
:app.tasks.objects-gc/handler :app.tasks.objects-gc/handler
{::db/pool (ig/ref ::db/pool) {::db/pool (ig/ref ::db/pool)
::sto/storage (ig/ref ::sto/storage)} ::sto/storage (ig/ref ::sto/storage)}
:app.tasks.file-gc/handler :app.tasks.file-gc/handler
{:pool (ig/ref ::db/pool)} {::db/pool (ig/ref ::db/pool)}
:app.tasks.file-xlog-gc/handler :app.tasks.file-xlog-gc/handler
{:pool (ig/ref ::db/pool)} {::db/pool (ig/ref ::db/pool)}
:app.tasks.telemetry/handler :app.tasks.telemetry/handler
{::db/pool (ig/ref ::db/pool) {::db/pool (ig/ref ::db/pool)
@ -457,25 +456,20 @@
{::db/pool (ig/ref ::db/pool)} {::db/pool (ig/ref ::db/pool)}
::sto/storage ::sto/storage
{:pool (ig/ref ::db/pool) {::db/pool (ig/ref ::db/pool)
:executor (ig/ref ::wrk/executor) ::wrk/executor (ig/ref ::wrk/executor)
::sto/backends
:backends
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend]) {:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
:assets-fs (ig/ref [::assets :app.storage.fs/backend]) :assets-fs (ig/ref [::assets :app.storage.fs/backend])}}
;; keep this for backward compatibility
:s3 (ig/ref [::assets :app.storage.s3/backend])
:fs (ig/ref [::assets :app.storage.fs/backend])}}
[::assets :app.storage.s3/backend] [::assets :app.storage.s3/backend]
{:region (cf/get :storage-assets-s3-region) {::sto.s3/region (cf/get :storage-assets-s3-region)
:endpoint (cf/get :storage-assets-s3-endpoint) ::sto.s3/endpoint (cf/get :storage-assets-s3-endpoint)
:bucket (cf/get :storage-assets-s3-bucket) ::sto.s3/bucket (cf/get :storage-assets-s3-bucket)
:executor (ig/ref ::wrk/executor)} ::wrk/executor (ig/ref ::wrk/executor)}
[::assets :app.storage.fs/backend] [::assets :app.storage.fs/backend]
{:directory (cf/get :storage-assets-fs-directory)} {::sto.fs/directory (cf/get :storage-assets-fs-directory)}
}) })

View file

@ -12,6 +12,8 @@
[app.common.media :as cm] [app.common.media :as cm]
[app.common.spec :as us] [app.common.spec :as us]
[app.config :as cf] [app.config :as cf]
[app.db :as-alias db]
[app.storage :as-alias sto]
[app.storage.tmp :as tmp] [app.storage.tmp :as tmp]
[app.util.svg :as svg] [app.util.svg :as svg]
[buddy.core.bytes :as bb] [buddy.core.bytes :as bb]
@ -297,8 +299,7 @@
"Given storage map, returns a storage configured with the appropriate "Given storage map, returns a storage configured with the appropriate
backend for assets and optional connection attached." backend for assets and optional connection attached."
([storage] ([storage]
(assoc storage :backend (cf/get :assets-storage-backend :assets-fs))) (assoc storage ::sto/backend (cf/get :assets-storage-backend :assets-fs)))
([storage conn] ([storage pool-or-conn]
(-> storage (-> (configure-assets-storage storage)
(assoc :conn conn) (assoc ::db/pool-or-conn pool-or-conn))))
(assoc :backend (cf/get :assets-storage-backend :assets-fs)))))

View file

@ -317,12 +317,11 @@
]) ])
(defn- apply-migrations! (defn apply-migrations!
[pool migrations] [pool name migrations]
;; (app.common.pprint/pprint migrations)
(dm/with-open [conn (db/open pool)] (dm/with-open [conn (db/open pool)]
(mg/setup! conn) (mg/setup! conn)
(mg/migrate! conn {:name "main" :steps migrations}))) (mg/migrate! conn {:name name :steps migrations})))
(defmethod ig/pre-init-spec ::migrations (defmethod ig/pre-init-spec ::migrations
[_] [_]
@ -332,4 +331,4 @@
[module {:keys [::db/pool]}] [module {:keys [::db/pool]}]
(when-not (db/read-only? pool) (when-not (db/read-only? pool)
(l/info :hint "running migrations" :module module) (l/info :hint "running migrations" :module module)
(some->> (seq migrations) (apply-migrations! pool)))) (some->> (seq migrations) (apply-migrations! pool "main"))))

View file

@ -365,9 +365,10 @@
(defmethod ig/init-key ::methods (defmethod ig/init-key ::methods
[_ cfg] [_ cfg]
(let [cfg (d/without-nils cfg)]
{:mutations (resolve-mutation-methods cfg) {:mutations (resolve-mutation-methods cfg)
:queries (resolve-query-methods cfg) :queries (resolve-query-methods cfg)
:commands (resolve-command-methods cfg)}) :commands (resolve-command-methods cfg)}))
(s/def ::mutations (s/def ::mutations
(s/map-of keyword? fn?)) (s/map-of keyword? fn?))

View file

@ -516,7 +516,7 @@
(write-obj! output rels))) (write-obj! output rels)))
(defmethod write-section :v1/sobjects (defmethod write-section :v1/sobjects
[{:keys [storage ::output]}] [{:keys [::sto/storage ::output]}]
(let [sids (-> *state* deref :sids) (let [sids (-> *state* deref :sids)
storage (media/configure-assets-storage storage)] storage (media/configure-assets-storage storage)]
(l/debug :hint "found sobjects" (l/debug :hint "found sobjects"

View file

@ -23,7 +23,7 @@
;; PUBLIC API ;; PUBLIC API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::conn ::db/conn-or-pool) (s/def ::conn ::db/pool-or-conn)
(s/def ::file-id ::us/uuid) (s/def ::file-id ::us/uuid)
(s/def ::team-id ::us/uuid) (s/def ::team-id ::us/uuid)
(s/def ::project-id ::us/uuid) (s/def ::project-id ::us/uuid)
@ -53,7 +53,7 @@
(defn check-quote! (defn check-quote!
[conn quote] [conn quote]
(us/assert! ::db/conn-or-pool conn) (us/assert! ::db/pool-or-conn conn)
(us/assert! ::quote quote) (us/assert! ::quote quote)
(when (contains? cf/flags :quotes) (when (contains? cf/flags :quotes)
(when @enabled (when @enabled

View file

@ -29,8 +29,10 @@
;; Storage Module State ;; Storage Module State
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::id #{:assets-fs :assets-s3})
(s/def ::s3 ::ss3/backend) (s/def ::s3 ::ss3/backend)
(s/def ::fs ::sfs/backend) (s/def ::fs ::sfs/backend)
(s/def ::type #{:fs :s3})
(s/def ::backends (s/def ::backends
(s/map-of ::us/keyword (s/map-of ::us/keyword
@ -39,34 +41,26 @@
:fs ::sfs/backend)))) :fs ::sfs/backend))))
(defmethod ig/pre-init-spec ::storage [_] (defmethod ig/pre-init-spec ::storage [_]
(s/keys :req-un [::db/pool ::wrk/executor ::backends])) (s/keys :req [::db/pool ::wrk/executor ::backends]))
(defmethod ig/prep-key ::storage
[_ {:keys [backends] :as cfg}]
(-> (d/without-nils cfg)
(assoc :backends (d/without-nils backends))))
(defmethod ig/init-key ::storage (defmethod ig/init-key ::storage
[_ {:keys [backends] :as cfg}] [_ {:keys [::backends ::db/pool] :as cfg}]
(-> (d/without-nils cfg) (-> (d/without-nils cfg)
(assoc :backends (d/without-nils backends)))) (assoc ::backends (d/without-nils backends))
(assoc ::db/pool-or-conn pool)))
(s/def ::backend keyword?)
(s/def ::storage (s/def ::storage
(s/keys :req-un [::backends ::db/pool])) (s/keys :req [::backends ::db/pool ::db/pool-or-conn]
:opt [::backend]))
(s/def ::storage-with-backend
(s/and ::storage #(contains? % ::backend)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Database Objects ;; Database Objects
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defrecord StorageObject [id size created-at expired-at touched-at backend])
(defn storage-object?
[v]
(instance? StorageObject v))
(s/def ::storage-object storage-object?)
(s/def ::storage-content impl/content?)
(defn get-metadata (defn get-metadata
[params] [params]
(into {} (into {}
@ -74,19 +68,18 @@
params)) params))
(defn- get-database-object-by-hash (defn- get-database-object-by-hash
[conn backend bucket hash] [pool-or-conn backend bucket hash]
(let [sql (str "select * from storage_object " (let [sql (str "select * from storage_object "
" where (metadata->>'~:hash') = ? " " where (metadata->>'~:hash') = ? "
" and (metadata->>'~:bucket') = ? " " and (metadata->>'~:bucket') = ? "
" and backend = ?" " and backend = ?"
" and deleted_at is null" " and deleted_at is null"
" limit 1")] " limit 1")]
(some-> (db/exec-one! conn [sql hash bucket (name backend)]) (some-> (db/exec-one! pool-or-conn [sql hash bucket (name backend)])
(update :metadata db/decode-transit-pgobject)))) (update :metadata db/decode-transit-pgobject))))
(defn- create-database-object (defn- create-database-object
[{:keys [conn backend executor]} {:keys [::content ::expired-at ::touched-at] :as params}] [{:keys [::backend ::wrk/executor ::db/pool-or-conn]} {:keys [::content ::expired-at ::touched-at] :as params}]
(us/assert ::storage-content content)
(px/with-dispatch executor (px/with-dispatch executor
(let [id (uuid/random) (let [id (uuid/random)
@ -101,10 +94,10 @@
result (when (and (::deduplicate? params) result (when (and (::deduplicate? params)
(:hash mdata) (:hash mdata)
(:bucket mdata)) (:bucket mdata))
(get-database-object-by-hash conn backend (:bucket mdata) (:hash mdata))) (get-database-object-by-hash pool-or-conn backend (:bucket mdata) (:hash mdata)))
result (or result result (or result
(-> (db/insert! conn :storage-object (-> (db/insert! pool-or-conn :storage-object
{:id id {:id id
:size (impl/get-size content) :size (impl/get-size content)
:backend (name backend) :backend (name backend)
@ -114,33 +107,33 @@
(update :metadata db/decode-transit-pgobject) (update :metadata db/decode-transit-pgobject)
(update :metadata assoc ::created? true)))] (update :metadata assoc ::created? true)))]
(StorageObject. (:id result) (impl/storage-object
(:id result)
(:size result) (:size result)
(:created-at result) (:created-at result)
(:deleted-at result) (:deleted-at result)
(:touched-at result) (:touched-at result)
backend backend
(:metadata result) (:metadata result)))))
nil))))
(def ^:private sql:retrieve-storage-object (def ^:private sql:retrieve-storage-object
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())") "select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
(defn row->storage-object [res] (defn row->storage-object [res]
(let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})] (let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})]
(StorageObject. (:id res) (impl/storage-object
(:id res)
(:size res) (:size res)
(:created-at res) (:created-at res)
(:deleted-at res) (:deleted-at res)
(:touched-at res) (:touched-at res)
(keyword (:backend res)) (keyword (:backend res))
mdata mdata)))
nil)))
(defn- retrieve-database-object (defn- retrieve-database-object
[{:keys [conn] :as storage} id] [conn id]
(when-let [res (db/exec-one! conn [sql:retrieve-storage-object id])] (some-> (db/exec-one! conn [sql:retrieve-storage-object id])
(row->storage-object res))) (row->storage-object)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API ;; API
@ -152,103 +145,99 @@
(defn file-url->path (defn file-url->path
[url] [url]
(fs/path (java.net.URI. (str url)))) (when url
(fs/path (java.net.URI. (str url)))))
(dm/export impl/content) (dm/export impl/content)
(dm/export impl/wrap-with-hash) (dm/export impl/wrap-with-hash)
(dm/export impl/object?)
(defn get-object (defn get-object
[{:keys [conn pool] :as storage} id] [{:keys [::db/pool-or-conn ::wrk/executor] :as storage} id]
(us/assert ::storage storage) (us/assert! ::storage storage)
(p/do (px/with-dispatch executor
(-> (assoc storage :conn (or conn pool)) (retrieve-database-object pool-or-conn id)))
(retrieve-database-object id))))
(defn put-object! (defn put-object!
"Creates a new object with the provided content." "Creates a new object with the provided content."
[{:keys [pool conn backend] :as storage} {:keys [::content] :as params}] [{:keys [::backend] :as storage} {:keys [::content] :as params}]
(us/assert ::storage storage) (us/assert! ::storage-with-backend storage)
(us/assert ::storage-content content) (us/assert! ::impl/content content)
(us/assert ::us/keyword backend) (->> (create-database-object storage params)
(p/let [storage (assoc storage :conn (or conn pool)) (p/mcat (fn [object]
object (create-database-object storage params)] (if (::created? (meta object))
(when (::created? (meta object))
;; Store the data finally on the underlying storage subsystem. ;; Store the data finally on the underlying storage subsystem.
(-> (impl/resolve-backend storage backend) (-> (impl/resolve-backend storage backend)
(impl/put-object object content))) (impl/put-object object content))
(p/resolved object))))))
object))
(defn touch-object! (defn touch-object!
"Mark object as touched." "Mark object as touched."
[{:keys [pool conn] :as storage} object-or-id] [{:keys [::db/pool-or-conn ::wrk/executor] :as storage} object-or-id]
(p/do (us/assert! ::storage storage)
(let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id) (px/with-dispatch executor
res (db/update! (or conn pool) :storage-object (let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
rs (db/update! pool-or-conn :storage-object
{:touched-at (dt/now)} {:touched-at (dt/now)}
{:id id} {:id id}
{::db/return-keys? false})] {::db/return-keys? false})]
(pos? (:next.jdbc/update-count res))))) (pos? (db/get-update-count rs)))))
(defn get-object-data (defn get-object-data
"Return an input stream instance of the object content." "Return an input stream instance of the object content."
[{:keys [pool conn] :as storage} object] [storage object]
(us/assert ::storage storage) (us/assert! ::storage storage)
(p/do (if (or (nil? (:expired-at object))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (impl/resolve-backend storage (:backend object))
(impl/resolve-backend (:backend object)) (impl/get-object-data object))
(impl/get-object-data object))))) (p/resolved nil)))
(defn get-object-bytes (defn get-object-bytes
"Returns a byte array of object content." "Returns a byte array of object content."
[{:keys [pool conn] :as storage} object] [storage object]
(us/assert ::storage storage) (us/assert! ::storage storage)
(p/do (if (or (nil? (:expired-at object))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (impl/resolve-backend storage (:backend object))
(impl/resolve-backend (:backend object)) (impl/get-object-bytes object))
(impl/get-object-bytes object))))) (p/resolved nil)))
(defn get-object-url (defn get-object-url
([storage object] ([storage object]
(get-object-url storage object nil)) (get-object-url storage object nil))
([{:keys [conn pool] :as storage} object options] ([storage object options]
(us/assert ::storage storage) (us/assert! ::storage storage)
(p/do (if (or (nil? (:expired-at object))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (impl/resolve-backend storage (:backend object))
(impl/resolve-backend (:backend object)) (impl/get-object-url object options))
(impl/get-object-url object options)))))) (p/resolved nil))))
(defn get-object-path (defn get-object-path
"Get the Path to the object. Only works with `:fs` type of "Get the Path to the object. Only works with `:fs` type of
storages." storages."
[storage object] [storage object]
(p/do (us/assert! ::storage storage)
(let [backend (impl/resolve-backend storage (:backend object))] (let [backend (impl/resolve-backend storage (:backend object))]
(when (not= :fs (:type backend)) (if (not= :fs (::type backend))
(ex/raise :type :internal (p/resolved nil)
:code :operation-not-allowed (if (or (nil? (:expired-at object))
:hint "get-object-path only works with fs type backends"))
(when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(p/-> (impl/get-object-url backend object nil) file-url->path))))) (->> (impl/get-object-url backend object nil)
(p/fmap file-url->path))
(p/resolved nil)))))
(defn del-object! (defn del-object!
[{:keys [conn pool] :as storage} object-or-id] [{:keys [::db/pool-or-conn ::wrk/executor] :as storage} object-or-id]
(us/assert ::storage storage) (us/assert! ::storage storage)
(p/do (px/with-dispatch executor
(let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id) (let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! (or conn pool) :storage-object res (db/update! pool-or-conn :storage-object
{:deleted-at (dt/now)} {:deleted-at (dt/now)}
{:id id} {:id id}
{::db/return-keys? false})] {::db/return-keys? false})]
(pos? (:next.jdbc/update-count res))))) (pos? (db/get-update-count res)))))
(dm/export impl/resolve-backend) (dm/export impl/resolve-backend)
(dm/export impl/calculate-hash) (dm/export impl/calculate-hash)
@ -265,18 +254,15 @@
(declare sql:retrieve-deleted-objects-chunk) (declare sql:retrieve-deleted-objects-chunk)
(s/def ::min-age ::dt/duration)
(defmethod ig/pre-init-spec ::gc-deleted-task [_] (defmethod ig/pre-init-spec ::gc-deleted-task [_]
(s/keys :req-un [::storage ::db/pool ::min-age ::wrk/executor])) (s/keys :req [::storage ::db/pool]))
(defmethod ig/prep-key ::gc-deleted-task (defmethod ig/prep-key ::gc-deleted-task
[_ cfg] [_ cfg]
(merge {:min-age (dt/duration {:hours 2})} (assoc cfg ::min-age (dt/duration {:hours 2})))
(d/without-nils cfg)))
(defmethod ig/init-key ::gc-deleted-task (defmethod ig/init-key ::gc-deleted-task
[_ {:keys [pool storage] :as cfg}] [_ {:keys [::db/pool ::storage ::min-age]}]
(letfn [(retrieve-deleted-objects-chunk [conn min-age cursor] (letfn [(retrieve-deleted-objects-chunk [conn min-age cursor]
(let [min-age (db/interval min-age) (let [min-age (db/interval min-age)
rows (db/exec! conn [sql:retrieve-deleted-objects-chunk min-age cursor])] rows (db/exec! conn [sql:retrieve-deleted-objects-chunk min-age cursor])]
@ -289,27 +275,26 @@
:vf second :vf second
:kf first)) :kf first))
(delete-in-bulk [conn backend-name ids] (delete-in-bulk [backend-id ids]
(let [backend (impl/resolve-backend storage backend-name) (let [backend (impl/resolve-backend storage backend-id)]
backend (assoc backend :conn conn)]
(doseq [id ids] (doseq [id ids]
(l/debug :hint "permanently delete storage object" :task "gc-deleted" :backend backend-name :id id)) (l/debug :hint "gc-deleted: permanently delete storage object" :backend backend-id :id id))
@(impl/del-objects-in-bulk backend ids)))] @(impl/del-objects-in-bulk backend ids)))]
(fn [params] (fn [params]
(let [min-age (or (:min-age params) (:min-age cfg))] (let [min-age (or (:min-age params) min-age)]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(loop [total 0 (loop [total 0
groups (retrieve-deleted-objects conn min-age)] groups (retrieve-deleted-objects conn min-age)]
(if-let [[backend ids] (first groups)] (if-let [[backend-id ids] (first groups)]
(do (do
(delete-in-bulk conn backend ids) (delete-in-bulk backend-id ids)
(recur (+ total (count ids)) (recur (+ total (count ids))
(rest groups))) (rest groups)))
(do (do
(l/info :hint "task finished" :min-age (dt/format-duration min-age) :task "gc-deleted" :total total) (l/info :hint "gc-deleted: task finished" :min-age (dt/format-duration min-age) :total total)
{:deleted total})))))))) {:deleted total}))))))))
(def sql:retrieve-deleted-objects-chunk (def sql:retrieve-deleted-objects-chunk
@ -349,10 +334,10 @@
(declare sql:retrieve-profile-nrefs) (declare sql:retrieve-profile-nrefs)
(defmethod ig/pre-init-spec ::gc-touched-task [_] (defmethod ig/pre-init-spec ::gc-touched-task [_]
(s/keys :req-un [::db/pool])) (s/keys :req [::db/pool]))
(defmethod ig/init-key ::gc-touched-task (defmethod ig/init-key ::gc-touched-task
[_ {:keys [pool] :as cfg}] [_ {:keys [::db/pool]}]
(letfn [(get-team-font-variant-nrefs [conn id] (letfn [(get-team-font-variant-nrefs [conn id]
(-> (db/exec-one! conn [sql:retrieve-team-font-variant-nrefs id id id id]) :nrefs)) (-> (db/exec-one! conn [sql:retrieve-team-font-variant-nrefs id id id id]) :nrefs))
@ -409,13 +394,13 @@
(let [nrefs (get-fn conn id)] (let [nrefs (get-fn conn id)]
(if (pos? nrefs) (if (pos? nrefs)
(do (do
(l/debug :hint "processing storage object" (l/debug :hint "gc-touched: processing storage object"
:task "gc-touched" :id id :status "freeze" :id id :status "freeze"
:bucket bucket :refs nrefs) :bucket bucket :refs nrefs)
(recur (conj to-freeze id) to-delete (rest ids))) (recur (conj to-freeze id) to-delete (rest ids)))
(do (do
(l/debug :hint "processing storage object" (l/debug :hint "gc-touched: processing storage object"
:task "gc-touched" :id id :status "delete" :id id :status "delete"
:bucket bucket :refs nrefs) :bucket bucket :refs nrefs)
(recur to-freeze (conj to-delete id) (rest ids))))) (recur to-freeze (conj to-delete id) (rest ids)))))
(do (do
@ -441,7 +426,7 @@
(+ to-delete d) (+ to-delete d)
(rest groups))) (rest groups)))
(do (do
(l/info :hint "task finished" :task "gc-touched" :to-freeze to-freeze :to-delete to-delete) (l/info :hint "gc-touched: task finished" :to-freeze to-freeze :to-delete to-delete)
{:freeze to-freeze :delete to-delete}))))))) {:freeze to-freeze :delete to-delete})))))))
(def sql:retrieve-touched-objects-chunk (def sql:retrieve-touched-objects-chunk

View file

@ -9,7 +9,9 @@
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uri :as u] [app.common.uri :as u]
[app.storage :as-alias sto]
[app.storage.impl :as impl] [app.storage.impl :as impl]
[app.worker :as-alias wrk]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[datoteka.fs :as fs] [datoteka.fs :as fs]
@ -28,42 +30,49 @@
(s/def ::directory ::us/string) (s/def ::directory ::us/string)
(defmethod ig/pre-init-spec ::backend [_] (defmethod ig/pre-init-spec ::backend [_]
(s/keys :opt-un [::directory])) (s/keys :opt [::directory]))
(defmethod ig/init-key ::backend (defmethod ig/init-key ::backend
[_ cfg] [_ cfg]
;; Return a valid backend data structure only if all optional ;; Return a valid backend data structure only if all optional
;; parameters are provided. ;; parameters are provided.
(when (string? (:directory cfg)) (when (string? (::directory cfg))
(let [dir (fs/normalize (:directory cfg))] (let [dir (fs/normalize (::directory cfg))]
(assoc cfg (assoc cfg
:type :fs ::sto/type :fs
:directory (str dir) ::directory (str dir)
:uri (u/uri (str "file://" dir)))))) ::uri (u/uri (str "file://" dir))))))
(s/def ::type ::us/keyword)
(s/def ::uri u/uri?) (s/def ::uri u/uri?)
(s/def ::backend (s/def ::backend
(s/keys :req-un [::type ::directory ::uri])) (s/keys :req [::directory
::uri]
:opt [::sto/type
::sto/id
::wrk/executor]))
;; --- API IMPL ;; --- API IMPL
(defmethod impl/put-object :fs (defmethod impl/put-object :fs
[{:keys [executor] :as backend} {:keys [id] :as object} content] [{:keys [::wrk/executor] :as backend} {:keys [id] :as object} content]
(us/assert! ::backend backend)
(px/with-dispatch executor (px/with-dispatch executor
(let [base (fs/path (:directory backend)) (let [base (fs/path (::directory backend))
path (fs/path (impl/id->path id)) path (fs/path (impl/id->path id))
full (fs/normalize (fs/join base path))] full (fs/normalize (fs/join base path))]
(when-not (fs/exists? (fs/parent full)) (when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full))) (fs/create-dir (fs/parent full)))
(with-open [^InputStream src (io/input-stream content) (with-open [^InputStream src (io/input-stream content)
^OutputStream dst (io/output-stream full)] ^OutputStream dst (io/output-stream full)]
(io/copy! src dst))))) (io/copy! src dst))
object)))
(defmethod impl/get-object-data :fs (defmethod impl/get-object-data :fs
[{:keys [executor] :as backend} {:keys [id] :as object}] [{:keys [::wrk/executor] :as backend} {:keys [id] :as object}]
(us/assert! ::backend backend)
(px/with-dispatch executor (px/with-dispatch executor
(let [^Path base (fs/path (:directory backend)) (let [^Path base (fs/path (::directory backend))
^Path path (fs/path (impl/id->path id)) ^Path path (fs/path (impl/id->path id))
^Path full (fs/normalize (fs/join base path))] ^Path full (fs/normalize (fs/join base path))]
(when-not (fs/exists? full) (when-not (fs/exists? full)
@ -74,15 +83,17 @@
(defmethod impl/get-object-bytes :fs (defmethod impl/get-object-bytes :fs
[backend object] [backend object]
(p/let [input (impl/get-object-data backend object)] (->> (impl/get-object-data backend object)
(p/fmap (fn [input]
(try (try
(io/read-as-bytes input) (io/read-as-bytes input)
(finally (finally
(io/close! input))))) (io/close! input)))))))
(defmethod impl/get-object-url :fs (defmethod impl/get-object-url :fs
[{:keys [uri executor] :as backend} {:keys [id] :as object} _] [{:keys [::uri] :as backend} {:keys [id] :as object} _]
(px/with-dispatch executor (us/assert! ::backend backend)
(p/resolved
(update uri :path (update uri :path
(fn [existing] (fn [existing]
(if (str/ends-with? existing "/") (if (str/ends-with? existing "/")
@ -90,17 +101,19 @@
(str existing "/" (impl/id->path id))))))) (str existing "/" (impl/id->path id)))))))
(defmethod impl/del-object :fs (defmethod impl/del-object :fs
[{:keys [executor] :as backend} {:keys [id] :as object}] [{:keys [::wrk/executor] :as backend} {:keys [id] :as object}]
(us/assert! ::backend backend)
(px/with-dispatch executor (px/with-dispatch executor
(let [base (fs/path (:directory backend)) (let [base (fs/path (::directory backend))
path (fs/path (impl/id->path id)) path (fs/path (impl/id->path id))
path (fs/join base path)] path (fs/join base path)]
(Files/deleteIfExists ^Path path)))) (Files/deleteIfExists ^Path path))))
(defmethod impl/del-objects-in-bulk :fs (defmethod impl/del-objects-in-bulk :fs
[{:keys [executor] :as backend} ids] [{:keys [::wrk/executor] :as backend} ids]
(us/assert! ::backend backend)
(px/with-dispatch executor (px/with-dispatch executor
(let [base (fs/path (:directory backend))] (let [base (fs/path (::directory backend))]
(doseq [id ids] (doseq [id ids]
(let [path (fs/path (impl/id->path id)) (let [path (fs/path (impl/id->path id))
path (fs/join base path)] path (fs/join base path)]

View file

@ -9,9 +9,13 @@
(:require (:require
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.db :as-alias db]
[app.storage :as-alias sto]
[app.worker :as-alias wrk]
[buddy.core.codecs :as bc] [buddy.core.codecs :as bc]
[buddy.core.hash :as bh] [buddy.core.hash :as bh]
[clojure.java.io :as jio] [clojure.java.io :as jio]
[clojure.spec.alpha :as s]
[datoteka.io :as io]) [datoteka.io :as io])
(:import (:import
java.nio.ByteBuffer java.nio.ByteBuffer
@ -21,7 +25,7 @@
;; --- API Definition ;; --- API Definition
(defmulti put-object (fn [cfg _ _] (:type cfg))) (defmulti put-object (fn [cfg _ _] (::sto/type cfg)))
(defmethod put-object :default (defmethod put-object :default
[cfg _ _] [cfg _ _]
@ -29,7 +33,7 @@
:code :invalid-storage-backend :code :invalid-storage-backend
:context cfg)) :context cfg))
(defmulti get-object-data (fn [cfg _] (:type cfg))) (defmulti get-object-data (fn [cfg _] (::sto/type cfg)))
(defmethod get-object-data :default (defmethod get-object-data :default
[cfg _] [cfg _]
@ -37,7 +41,7 @@
:code :invalid-storage-backend :code :invalid-storage-backend
:context cfg)) :context cfg))
(defmulti get-object-bytes (fn [cfg _] (:type cfg))) (defmulti get-object-bytes (fn [cfg _] (::sto/type cfg)))
(defmethod get-object-bytes :default (defmethod get-object-bytes :default
[cfg _] [cfg _]
@ -45,7 +49,7 @@
:code :invalid-storage-backend :code :invalid-storage-backend
:context cfg)) :context cfg))
(defmulti get-object-url (fn [cfg _ _] (:type cfg))) (defmulti get-object-url (fn [cfg _ _] (::sto/type cfg)))
(defmethod get-object-url :default (defmethod get-object-url :default
[cfg _ _] [cfg _ _]
@ -54,7 +58,7 @@
:context cfg)) :context cfg))
(defmulti del-object (fn [cfg _] (:type cfg))) (defmulti del-object (fn [cfg _] (::sto/type cfg)))
(defmethod del-object :default (defmethod del-object :default
[cfg _] [cfg _]
@ -62,7 +66,7 @@
:code :invalid-storage-backend :code :invalid-storage-backend
:context cfg)) :context cfg))
(defmulti del-objects-in-bulk (fn [cfg _] (:type cfg))) (defmulti del-objects-in-bulk (fn [cfg _] (::sto/type cfg)))
(defmethod del-objects-in-bulk :default (defmethod del-objects-in-bulk :default
[cfg _] [cfg _]
@ -189,10 +193,6 @@
(make-output-stream [_ opts] (make-output-stream [_ opts]
(jio/make-output-stream content opts)))) (jio/make-output-stream content opts))))
(defn content?
[v]
(satisfies? IContentObject v))
(defn calculate-hash (defn calculate-hash
[resource] [resource]
(let [result (with-open [input (io/input-stream resource)] (let [result (with-open [input (io/input-stream resource)]
@ -201,13 +201,37 @@
(str "blake2b:" result))) (str "blake2b:" result)))
(defn resolve-backend (defn resolve-backend
[{:keys [conn pool executor] :as storage} backend-id] [{:keys [::db/pool ::wrk/executor] :as storage} backend-id]
(let [backend (get-in storage [:backends backend-id])] (let [backend (get-in storage [::sto/backends backend-id])]
(when-not backend (when-not backend
(ex/raise :type :internal (ex/raise :type :internal
:code :backend-not-configured :code :backend-not-configured
:hint (dm/fmt "backend '%' not configured" backend-id))) :hint (dm/fmt "backend '%' not configured" backend-id)))
(assoc backend (-> backend
:executor executor (assoc ::sto/id backend-id)
:conn (or conn pool) (assoc ::wrk/executor executor)
:id backend-id))) (assoc ::db/pool pool))))
(defrecord StorageObject [id size created-at expired-at touched-at backend])
(ns-unmap *ns* '->StorageObject)
(ns-unmap *ns* 'map->StorageObject)
(defn storage-object
([id size created-at expired-at touched-at backend]
(StorageObject. id size created-at expired-at touched-at backend))
([id size created-at expired-at touched-at backend mdata]
(StorageObject. id size created-at expired-at touched-at backend mdata nil)))
(defn object?
[v]
(instance? StorageObject v))
(defn content?
[v]
(satisfies? IContentObject v))
(s/def ::object object?)
(s/def ::content content?)

View file

@ -8,9 +8,12 @@
"S3 Storage backend implementation." "S3 Storage backend implementation."
(:require (:require
[app.common.data :as d] [app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uri :as u] [app.common.uri :as u]
[app.storage :as-alias sto]
[app.storage.impl :as impl] [app.storage.impl :as impl]
[app.storage.tmp :as tmp] [app.storage.tmp :as tmp]
[app.util.time :as dt] [app.util.time :as dt]
@ -64,6 +67,9 @@
(declare build-s3-client) (declare build-s3-client)
(declare build-s3-presigner) (declare build-s3-presigner)
;; (set! *warn-on-reflection* true)
;; (set! *unchecked-math* :warn-on-boxed)
;; --- BACKEND INIT ;; --- BACKEND INIT
(s/def ::region ::us/keyword) (s/def ::region ::us/keyword)
@ -72,26 +78,26 @@
(s/def ::endpoint ::us/string) (s/def ::endpoint ::us/string)
(defmethod ig/pre-init-spec ::backend [_] (defmethod ig/pre-init-spec ::backend [_]
(s/keys :opt-un [::region ::bucket ::prefix ::endpoint ::wrk/executor])) (s/keys :opt [::region ::bucket ::prefix ::endpoint ::wrk/executor]))
(defmethod ig/prep-key ::backend (defmethod ig/prep-key ::backend
[_ {:keys [prefix region] :as cfg}] [_ {:keys [::prefix ::region] :as cfg}]
(cond-> (d/without-nils cfg) (cond-> (d/without-nils cfg)
(some? prefix) (assoc :prefix prefix) (some? prefix) (assoc ::prefix prefix)
(nil? region) (assoc :region :eu-central-1))) (nil? region) (assoc ::region :eu-central-1)))
(defmethod ig/init-key ::backend (defmethod ig/init-key ::backend
[_ cfg] [_ cfg]
;; Return a valid backend data structure only if all optional ;; Return a valid backend data structure only if all optional
;; parameters are provided. ;; parameters are provided.
(when (and (contains? cfg :region) (when (and (contains? cfg ::region)
(string? (:bucket cfg))) (string? (::bucket cfg)))
(let [client (build-s3-client cfg) (let [client (build-s3-client cfg)
presigner (build-s3-presigner cfg)] presigner (build-s3-presigner cfg)]
(assoc cfg (assoc cfg
:client @client ::sto/type :s3
:presigner presigner ::client @client
:type :s3 ::presigner presigner
::close-fn #(.close ^java.lang.AutoCloseable client))))) ::close-fn #(.close ^java.lang.AutoCloseable client)))))
(defmethod ig/halt-key! ::backend (defmethod ig/halt-key! ::backend
@ -99,21 +105,27 @@
(when (fn? close-fn) (when (fn? close-fn)
(px/run! close-fn))) (px/run! close-fn)))
(s/def ::type ::us/keyword)
(s/def ::client #(instance? S3AsyncClient %)) (s/def ::client #(instance? S3AsyncClient %))
(s/def ::presigner #(instance? S3Presigner %)) (s/def ::presigner #(instance? S3Presigner %))
(s/def ::backend (s/def ::backend
(s/keys :req-un [::region ::bucket ::client ::type ::presigner] (s/keys :req [::region
:opt-un [::prefix])) ::bucket
::client
::presigner]
:opt [::prefix
::sto/id
::wrk/executor]))
;; --- API IMPL ;; --- API IMPL
(defmethod impl/put-object :s3 (defmethod impl/put-object :s3
[backend object content] [backend object content]
(us/assert! ::backend backend)
(put-object backend object content)) (put-object backend object content))
(defmethod impl/get-object-data :s3 (defmethod impl/get-object-data :s3
[backend object] [backend object]
(us/assert! ::backend backend)
(letfn [(no-such-key? [cause] (letfn [(no-such-key? [cause]
(instance? software.amazon.awssdk.services.s3.model.NoSuchKeyException cause)) (instance? software.amazon.awssdk.services.s3.model.NoSuchKeyException cause))
(handle-not-found [cause] (handle-not-found [cause]
@ -127,18 +139,22 @@
(defmethod impl/get-object-bytes :s3 (defmethod impl/get-object-bytes :s3
[backend object] [backend object]
(us/assert! ::backend backend)
(get-object-bytes backend object)) (get-object-bytes backend object))
(defmethod impl/get-object-url :s3 (defmethod impl/get-object-url :s3
[backend object options] [backend object options]
(us/assert! ::backend backend)
(get-object-url backend object options)) (get-object-url backend object options))
(defmethod impl/del-object :s3 (defmethod impl/del-object :s3
[backend object] [backend object]
(us/assert! ::backend backend)
(del-object backend object)) (del-object backend object))
(defmethod impl/del-objects-in-bulk :s3 (defmethod impl/del-objects-in-bulk :s3
[backend ids] [backend ids]
(us/assert! ::backend backend)
(del-object-in-bulk backend ids)) (del-object-in-bulk backend ids))
;; --- HELPERS ;; --- HELPERS
@ -152,8 +168,8 @@
[region] [region]
(Region/of (name region))) (Region/of (name region)))
(defn build-s3-client (defn- build-s3-client
[{:keys [region endpoint executor]}] [{:keys [::region ::endpoint ::wrk/executor]}]
(let [aconfig (-> (ClientAsyncConfiguration/builder) (let [aconfig (-> (ClientAsyncConfiguration/builder)
(.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor) (.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor)
(.build)) (.build))
@ -188,8 +204,8 @@
(.close ^NettyNioAsyncHttpClient hclient) (.close ^NettyNioAsyncHttpClient hclient)
(.close ^S3AsyncClient client))))) (.close ^S3AsyncClient client)))))
(defn build-s3-presigner (defn- build-s3-presigner
[{:keys [region endpoint]}] [{:keys [::region ::endpoint]}]
(let [config (-> (S3Configuration/builder) (let [config (-> (S3Configuration/builder)
(cond-> (some? endpoint) (.pathStyleAccessEnabled true)) (cond-> (some? endpoint) (.pathStyleAccessEnabled true))
(.build))] (.build))]
@ -200,65 +216,87 @@
(.serviceConfiguration ^S3Configuration config) (.serviceConfiguration ^S3Configuration config)
(.build)))) (.build))))
(defn- make-request-body (defn- upload-thread
[content] [id subscriber sem content]
(let [is (io/input-stream content) (px/thread
buff-size (* 1024 64) {:name "penpot/s3/uploader"
sem (Semaphore. 0) :daemon true}
(l/trace :hint "start upload thread"
writer-fn (fn [^Subscriber s] :object-id (str id)
:size (impl/get-size content)
::l/sync? true)
(let [stream (io/input-stream content)
bsize (* 1024 64)
tpoint (dt/tpoint)]
(try (try
(loop [] (loop []
(.acquire sem 1) (.acquire ^Semaphore sem 1)
(let [buffer (byte-array buff-size) (let [buffer (byte-array bsize)
readed (.read is buffer)] readed (.read ^InputStream stream buffer)]
(when (pos? readed) (when (pos? readed)
(.onNext ^Subscriber s (ByteBuffer/wrap buffer 0 readed)) (let [data (ByteBuffer/wrap ^bytes buffer 0 readed)]
(when (= readed buff-size) (.onNext ^Subscriber subscriber ^ByteBuffer data)
(recur))))) (when (= readed bsize)
(.onComplete s) (recur))))))
(.onComplete ^Subscriber subscriber)
(catch InterruptedException _
(l/trace :hint "interrupted upload thread"
:object-:id (str id)
::l/sync? true)
nil)
(catch Throwable cause (catch Throwable cause
(.onError s cause)) (.onError ^Subscriber subscriber cause))
(finally (finally
(.close ^InputStream is))))] (l/trace :hint "end upload thread"
:object-id (str id)
:elapsed (dt/format-duration (tpoint))
::l/sync? true)
(.close ^InputStream stream))))))
(defn- make-request-body
[id content]
(reify (reify
AsyncRequestBody AsyncRequestBody
(contentLength [_] (contentLength [_]
(Optional/of (long (impl/get-size content)))) (Optional/of (long (impl/get-size content))))
(^void subscribe [_ ^Subscriber s] (^void subscribe [_ ^Subscriber subscriber]
(let [thread (Thread. #(writer-fn s))] (let [sem (Semaphore. 0)
(.setDaemon thread true) thr (upload-thread id subscriber sem content)]
(.setName thread "penpot/storage:s3") (.onSubscribe subscriber
(.start thread) (reify Subscription
(.onSubscribe s (reify Subscription
(cancel [_] (cancel [_]
(.interrupt thread) (px/interrupt! thr)
(.release sem 1)) (.release sem 1))
(request [_ n] (request [_ n]
(.release sem (int n)))))))))) (.release sem (int n)))))))))
(defn put-object (defn- put-object
[{:keys [client bucket prefix]} {:keys [id] :as object} content] [{:keys [::client ::bucket ::prefix]} {:keys [id] :as object} content]
(p/let [path (str prefix (impl/id->path id)) (let [path (dm/str prefix (impl/id->path id))
mdata (meta object) mdata (meta object)
mtype (:content-type mdata "application/octet-stream") mtype (:content-type mdata "application/octet-stream")
rbody (make-request-body id content)
request (.. (PutObjectRequest/builder) request (.. (PutObjectRequest/builder)
(bucket bucket) (bucket bucket)
(contentType mtype) (contentType mtype)
(key path) (key path)
(build))] (build))]
(->> (.putObject ^S3AsyncClient client
(let [content (make-request-body content)]
(.putObject ^S3AsyncClient client
^PutObjectRequest request ^PutObjectRequest request
^AsyncRequestBody content)))) ^AsyncRequestBody rbody)
(p/fmap (constantly object)))))
(defn get-object-data (defn- path->stream
[{:keys [client bucket prefix]} {:keys [id size]}] [path]
(proxy [FilterInputStream] [(io/input-stream path)]
(close []
(fs/delete path)
(proxy-super close))))
(defn- get-object-data
[{:keys [::client ::bucket ::prefix]} {:keys [id size]}]
(let [gor (.. (GetObjectRequest/builder) (let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
@ -267,65 +305,64 @@
;; If the file size is greater than 2MiB then stream the content ;; If the file size is greater than 2MiB then stream the content
;; to the filesystem and then read with buffered inputstream; if ;; to the filesystem and then read with buffered inputstream; if
;; not, read the contento into memory using bytearrays. ;; not, read the contento into memory using bytearrays.
(if (> size (* 1024 1024 2)) (if (> ^long size (* 1024 1024 2))
(p/let [path (tmp/tempfile :prefix "penpot.storage.s3.") (let [path (tmp/tempfile :prefix "penpot.storage.s3.")
rxf (AsyncResponseTransformer/toFile ^Path path) rxf (AsyncResponseTransformer/toFile ^Path path)]
_ (.getObject ^S3AsyncClient client (->> (.getObject ^S3AsyncClient client
^GetObjectRequest gor ^GetObjectRequest gor
^AsyncResponseTransformer rxf)] ^AsyncResponseTransformer rxf)
(proxy [FilterInputStream] [(io/input-stream path)] (p/fmap (constantly path))
(close [] (p/fmap path->stream)))
(fs/delete path)
(proxy-super close))))
(p/let [rxf (AsyncResponseTransformer/toBytes) (let [rxf (AsyncResponseTransformer/toBytes)]
obj (.getObject ^S3AsyncClient client (->> (.getObject ^S3AsyncClient client
^GetObjectRequest gor ^GetObjectRequest gor
^AsyncResponseTransformer rxf)] ^AsyncResponseTransformer rxf)
(.asInputStream ^ResponseBytes obj))))) (p/fmap #(.asInputStream ^ResponseBytes %)))))))
(defn get-object-bytes (defn- get-object-bytes
[{:keys [client bucket prefix]} {:keys [id]}] [{:keys [::client ::bucket ::prefix]} {:keys [id]}]
(p/let [gor (.. (GetObjectRequest/builder) (let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build)) (build))
rxf (AsyncResponseTransformer/toBytes) rxf (AsyncResponseTransformer/toBytes)]
obj (.getObject ^S3AsyncClient client (->> (.getObject ^S3AsyncClient client
^GetObjectRequest gor ^GetObjectRequest gor
^AsyncResponseTransformer rxf)] ^AsyncResponseTransformer rxf)
(.asByteArray ^ResponseBytes obj))) (p/fmap #(.asByteArray ^ResponseBytes %)))))
(def default-max-age (def default-max-age
(dt/duration {:minutes 10})) (dt/duration {:minutes 10}))
(defn get-object-url (defn- get-object-url
[{:keys [presigner bucket prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}] [{:keys [::presigner ::bucket ::prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}]
(us/assert dt/duration? max-age) (us/assert dt/duration? max-age)
(p/do
(let [gor (.. (GetObjectRequest/builder) (let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (dm/str prefix (impl/id->path id)))
(build)) (build))
gopr (.. (GetObjectPresignRequest/builder) gopr (.. (GetObjectPresignRequest/builder)
(signatureDuration ^Duration max-age) (signatureDuration ^Duration max-age)
(getObjectRequest ^GetObjectRequest gor) (getObjectRequest ^GetObjectRequest gor)
(build)) (build))
pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)] pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)]
(p/resolved
(u/uri (str (.url ^PresignedGetObjectRequest pgor)))))) (u/uri (str (.url ^PresignedGetObjectRequest pgor))))))
(defn del-object (defn- del-object
[{:keys [bucket client prefix]} {:keys [id] :as obj}] [{:keys [::bucket ::client ::prefix]} {:keys [id] :as obj}]
(p/let [dor (.. (DeleteObjectRequest/builder) (let [dor (.. (DeleteObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (dm/str prefix (impl/id->path id)))
(build))] (build))]
(.deleteObject ^S3AsyncClient client (->> (.deleteObject ^S3AsyncClient client ^DeleteObjectRequest dor)
^DeleteObjectRequest dor))) (p/fmap (constantly nil)))))
(defn del-object-in-bulk (defn- del-object-in-bulk
[{:keys [bucket client prefix]} ids] [{:keys [::bucket ::client ::prefix]} ids]
(p/let [oids (map (fn [id]
(let [oids (map (fn [id]
(.. (ObjectIdentifier/builder) (.. (ObjectIdentifier/builder)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build))) (build)))
@ -336,9 +373,10 @@
dor (.. (DeleteObjectsRequest/builder) dor (.. (DeleteObjectsRequest/builder)
(bucket bucket) (bucket bucket)
(delete ^Delete delc) (delete ^Delete delc)
(build)) (build))]
dres (.deleteObjects ^S3AsyncClient client
^DeleteObjectsRequest dor)] (->> (.deleteObjects ^S3AsyncClient client ^DeleteObjectsRequest dor)
(p/fmap (fn [dres]
(when (.hasErrors ^DeleteObjectsResponse dres) (when (.hasErrors ^DeleteObjectsResponse dres)
(let [errors (seq (.errors ^DeleteObjectsResponse dres))] (let [errors (seq (.errors ^DeleteObjectsResponse dres))]
(ex/raise :type :internal (ex/raise :type :internal
@ -346,4 +384,4 @@
:s3-errors (mapv (fn [^S3Error error] :s3-errors (mapv (fn [^S3Error error]
{:key (.key error) {:key (.key error)
:msg (.message error)}) :msg (.message error)})
errors)))))) errors)))))))))

View file

@ -32,27 +32,24 @@
;; HANDLER ;; HANDLER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::min-age ::dt/duration)
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool ::min-age])) (s/keys :req [::db/pool]))
(defmethod ig/prep-key ::handler (defmethod ig/prep-key ::handler
[_ cfg] [_ cfg]
(merge {:min-age cf/deletion-delay} (assoc cfg ::min-age cf/deletion-delay))
(d/without-nils cfg)))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ {:keys [pool] :as cfg}] [_ {:keys [::db/pool] :as cfg}]
(fn [{:keys [file-id] :as params}] (fn [{:keys [file-id] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [min-age (or (:min-age params) (:min-age cfg)) (let [min-age (or (:min-age params) (::min-age cfg))
cfg (assoc cfg :min-age min-age :conn conn :file-id file-id)] cfg (assoc cfg ::min-age min-age ::conn conn ::file-id file-id)]
(loop [total 0 (loop [total 0
files (retrieve-candidates cfg)] files (retrieve-candidates cfg)]
(if-let [file (first files)] (if-let [file (first files)]
(do (do
(process-file cfg file) (process-file conn file)
(recur (inc total) (recur (inc total)
(rest files))) (rest files)))
(do (do
@ -84,7 +81,7 @@
for update skip locked") for update skip locked")
(defn- retrieve-candidates (defn- retrieve-candidates
[{:keys [conn min-age file-id] :as cfg}] [{:keys [::conn ::min-age ::file-id]}]
(if (uuid? file-id) (if (uuid? file-id)
(do (do
(l/warn :hint "explicit file id passed on params" :file-id file-id) (l/warn :hint "explicit file id passed on params" :file-id file-id)
@ -256,7 +253,7 @@
(db/delete! conn :file-data-fragment {:id fragment-id :file-id file-id})))) (db/delete! conn :file-data-fragment {:id fragment-id :file-id file-id}))))
(defn- process-file (defn- process-file
[{:keys [conn] :as cfg} {:keys [id data revn modified-at features] :as file}] [conn {:keys [id data revn modified-at features] :as file}]
(l/debug :hint "processing file" :id id :modified-at modified-at) (l/debug :hint "processing file" :id id :modified-at modified-at)
(binding [pmap/*load-fn* (partial files/load-pointer conn id)] (binding [pmap/*load-fn* (partial files/load-pointer conn id)]

View file

@ -8,42 +8,36 @@
"A maintenance task that performs a garbage collection of the file "A maintenance task that performs a garbage collection of the file
change (transaction) log." change (transaction) log."
(:require (:require
[app.common.data :as d]
[app.common.logging :as l] [app.common.logging :as l]
[app.db :as db] [app.db :as db]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig])) [integrant.core :as ig]))
(declare sql:delete-files-xlog) (def ^:private
sql:delete-files-xlog
(s/def ::min-age ::dt/duration) "delete from file_change
where created_at < now() - ?::interval")
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool] (s/keys :req [::db/pool]))
:opt-un [::min-age]))
(defmethod ig/prep-key ::handler (defmethod ig/prep-key ::handler
[_ cfg] [_ cfg]
(merge {:min-age (dt/duration {:hours 72})} (assoc cfg ::min-age (dt/duration {:hours 72})))
(d/without-nils cfg)))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ {:keys [pool] :as cfg}] [_ {:keys [::db/pool] :as cfg}]
(fn [params] (fn [params]
(let [min-age (or (:min-age params) (:min-age cfg))] (let [min-age (or (:min-age params) (::min-age cfg))]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [interval (db/interval min-age) (let [interval (db/interval min-age)
result (db/exec-one! conn [sql:delete-files-xlog interval]) result (db/exec-one! conn [sql:delete-files-xlog interval])
result (:next.jdbc/update-count result)] result (db/get-update-count result)]
(l/info :hint "task finished" :min-age (dt/format-duration min-age) :total result) (l/info :hint "task finished" :min-age (dt/format-duration min-age) :total result)
(when (:rollback? params) (when (:rollback? params)
(db/rollback! conn)) (db/rollback! conn))
result))))) result)))))
(def ^:private
sql:delete-files-xlog
"delete from file_change
where created_at < now() - ?::interval")

View file

@ -25,16 +25,12 @@
(declare ^:private delete-files!) (declare ^:private delete-files!)
(declare ^:private delete-orphan-teams!) (declare ^:private delete-orphan-teams!)
(s/def ::min-age ::dt/duration)
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req [::db/pool ::sto/storage] (s/keys :req [::db/pool ::sto/storage]))
:opt [::min-age]))
(defmethod ig/prep-key ::handler (defmethod ig/prep-key ::handler
[_ cfg] [_ cfg]
(merge {::min-age cf/deletion-delay} (assoc cfg ::min-age cf/deletion-delay))
(d/without-nils cfg)))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ {:keys [::db/pool ::sto/storage] :as cfg}] [_ {:keys [::db/pool ::sto/storage] :as cfg}]
@ -133,7 +129,6 @@
:kf first :kf first
:initk (dt/now))))) :initk (dt/now)))))
(def ^:private sql:get-orphan-teams-chunk (def ^:private sql:get-orphan-teams-chunk
"select t.id, t.created_at "select t.id, t.created_at
from team as t from team as t
@ -154,14 +149,15 @@
[(some->> rows peek :created-at) rows]))] [(some->> rows peek :created-at) rows]))]
(reduce (reduce
(fn [total {:keys [id]}] (fn [total {:keys [id]}]
(l/debug :hint "mark team for deletion" :id (str id)) (let [result (db/update! conn :team
;; And finally, permanently delete the team.
(db/update! conn :team
{:deleted-at (dt/now)} {:deleted-at (dt/now)}
{:id id}) {:id id :deleted-at nil}
{::db/return-keys? false})
count (db/get-update-count result)]
(when (pos? count)
(l/debug :hint "mark team for deletion" :id (str id) ))
(inc total)) (+ total count)))
0 0
(d/iteration get-chunk (d/iteration get-chunk
:vf second :vf second

View file

@ -8,35 +8,33 @@
"A maintenance task that performs a cleanup of already executed tasks "A maintenance task that performs a cleanup of already executed tasks
from the database table." from the database table."
(:require (:require
[app.common.data :as d]
[app.common.logging :as l] [app.common.logging :as l]
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
[app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig])) [integrant.core :as ig]))
(declare sql:delete-completed-tasks) (def ^:private
sql:delete-completed-tasks
(s/def ::min-age ::dt/duration) "delete from task_completed
where scheduled_at < now() - ?::interval")
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool] (s/keys :req [::db/pool]))
:opt-un [::min-age]))
(defmethod ig/prep-key ::handler (defmethod ig/prep-key ::handler
[_ cfg] [_ cfg]
(merge {:min-age cf/deletion-delay} (assoc cfg ::min-age cf/deletion-delay))
(d/without-nils cfg)))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ {:keys [pool] :as cfg}] [_ {:keys [::db/pool ::min-age] :as cfg}]
(fn [params] (fn [params]
(let [min-age (or (:min-age params) (:min-age cfg))] (let [min-age (or (:min-age params) min-age)]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [interval (db/interval min-age) (let [interval (db/interval min-age)
result (db/exec-one! conn [sql:delete-completed-tasks interval]) result (db/exec-one! conn [sql:delete-completed-tasks interval])
result (:next.jdbc/update-count result)] result (db/get-update-count result)]
(l/debug :hint "task finished" :total result) (l/debug :hint "task finished" :total result)
(when (:rollback? params) (when (:rollback? params)
@ -44,7 +42,3 @@
result))))) result)))))
(def ^:private
sql:delete-completed-tasks
"delete from task_completed
where scheduled_at < now() - ?::interval")

View file

@ -90,10 +90,10 @@
(s/def ::registry (s/map-of ::us/string fn?)) (s/def ::registry (s/map-of ::us/string fn?))
(defmethod ig/pre-init-spec ::registry [_] (defmethod ig/pre-init-spec ::registry [_]
(s/keys :req-un [::mtx/metrics ::tasks])) (s/keys :req [::mtx/metrics ::tasks]))
(defmethod ig/init-key ::registry (defmethod ig/init-key ::registry
[_ {:keys [metrics tasks]}] [_ {:keys [::mtx/metrics ::tasks]}]
(l/info :hint "registry initialized" :tasks (count tasks)) (l/info :hint "registry initialized" :tasks (count tasks))
(reduce-kv (fn [registry k v] (reduce-kv (fn [registry k v]
(let [tname (name k)] (let [tname (name k)]

View file

@ -44,8 +44,8 @@
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 @(sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 122785 (:size mobj1))) (t/is (= 122785 (:size mobj1)))
;; This is because in ubuntu 21.04 generates different ;; This is because in ubuntu 21.04 generates different
;; thumbnail that in ubuntu 22.04. This hack should be removed ;; thumbnail that in ubuntu 22.04. This hack should be removed
@ -85,8 +85,8 @@
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 @(sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 312043 (:size mobj1))) (t/is (= 312043 (:size mobj1)))
(t/is (= 3887 (:size mobj2))))) (t/is (= 3887 (:size mobj2)))))
)) ))
@ -164,8 +164,8 @@
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 @(sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 122785 (:size mobj1))) (t/is (= 122785 (:size mobj1)))
;; This is because in ubuntu 21.04 generates different ;; This is because in ubuntu 21.04 generates different
;; thumbnail that in ubuntu 22.04. This hack should be removed ;; thumbnail that in ubuntu 22.04. This hack should be removed
@ -205,8 +205,8 @@
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 @(sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 @(sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 312043 (:size mobj1))) (t/is (= 312043 (:size mobj1)))
(t/is (= 3887 (:size mobj2))))) (t/is (= 3887 (:size mobj2)))))
)) ))

View file

@ -27,11 +27,11 @@
"Given storage map, returns a storage configured with the appropriate "Given storage map, returns a storage configured with the appropriate
backend for assets." backend for assets."
([storage] ([storage]
(assoc storage :backend :assets-fs)) (assoc storage ::sto/backend :assets-fs))
([storage conn] ([storage conn]
(-> storage (-> storage
(assoc :conn conn) (assoc ::db/pool-or-conn conn)
(assoc :backend :assets-fs)))) (assoc ::sto/backend :assets-fs))))
(t/deftest put-and-retrieve-object (t/deftest put-and-retrieve-object
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
@ -40,8 +40,10 @@
object @(sto/put-object! storage {::sto/content content object @(sto/put-object! storage {::sto/content content
:content-type "text/plain" :content-type "text/plain"
:other "data"})] :other "data"})]
(t/is (sto/storage-object? object))
(t/is (sto/object? object))
(t/is (fs/path? @(sto/get-object-path storage object))) (t/is (fs/path? @(sto/get-object-path storage object)))
(t/is (nil? (:expired-at object))) (t/is (nil? (:expired-at object)))
(t/is (= :assets-fs (:backend object))) (t/is (= :assets-fs (:backend object)))
(t/is (= "data" (:other (meta object)))) (t/is (= "data" (:other (meta object))))
@ -58,7 +60,8 @@
::sto/expired-at (dt/in-future {:seconds 1}) ::sto/expired-at (dt/in-future {:seconds 1})
:content-type "text/plain" :content-type "text/plain"
})] })]
(t/is (sto/storage-object? object))
(t/is (sto/object? object))
(t/is (dt/instant? (:expired-at object))) (t/is (dt/instant? (:expired-at object)))
(t/is (dt/is-after? (:expired-at object) (dt/now))) (t/is (dt/is-after? (:expired-at object) (dt/now)))
(t/is (= object @(sto/get-object storage (:id object)))) (t/is (= object @(sto/get-object storage (:id object))))
@ -77,7 +80,7 @@
object @(sto/put-object! storage {::sto/content content object @(sto/put-object! storage {::sto/content content
:content-type "text/plain" :content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})] :expired-at (dt/in-future {:seconds 1})})]
(t/is (sto/storage-object? object)) (t/is (sto/object? object))
(t/is (true? @(sto/del-object! storage object))) (t/is (true? @(sto/del-object! storage object)))
;; retrieving the same object should be not nil because the ;; retrieving the same object should be not nil because the

View file

@ -8,7 +8,8 @@
"A collection if helpers for working with data structures and other "A collection if helpers for working with data structures and other
data resources." data resources."
(:refer-clojure :exclude [read-string hash-map merge name update-vals (:refer-clojure :exclude [read-string hash-map merge name update-vals
parse-double group-by iteration concat mapcat]) parse-double group-by iteration concat mapcat
parse-uuid])
#?(:cljs #?(:cljs
(:require-macros [app.common.data])) (:require-macros [app.common.data]))
@ -17,6 +18,7 @@
:clj [clojure.edn :as r]) :clj [clojure.edn :as r])
#?(:cljs [cljs.core :as c] #?(:cljs [cljs.core :as c]
:clj [clojure.core :as c]) :clj [clojure.core :as c])
[app.common.exceptions :as ex]
[app.common.math :as mth] [app.common.math :as mth]
[clojure.set :as set] [clojure.set :as set]
[cuerdas.core :as str] [cuerdas.core :as str]
@ -516,6 +518,10 @@
default default
v)))) v))))
(defn parse-uuid
[v]
(ex/ignoring (c/parse-uuid v)))
(defn num-string? [v] (defn num-string? [v]
;; https://stackoverflow.com/questions/175739/built-in-way-in-javascript-to-check-if-a-string-is-a-valid-number ;; https://stackoverflow.com/questions/175739/built-in-way-in-javascript-to-check-if-a-string-is-a-valid-number
#?(:cljs (and (string? v) #?(:cljs (and (string? v)