♻️ Refactor backend to be more async friendly

This commit is contained in:
Andrey Antukh 2022-02-28 17:15:58 +01:00 committed by Alonso Torres
parent 087d896569
commit 9e4a50fb15
49 changed files with 1503 additions and 1378 deletions

View file

@ -19,8 +19,8 @@
io.lettuce/lettuce-core {:mvn/version "6.1.6.RELEASE"} io.lettuce/lettuce-core {:mvn/version "6.1.6.RELEASE"}
java-http-clj/java-http-clj {:mvn/version "0.4.3"} java-http-clj/java-http-clj {:mvn/version "0.4.3"}
funcool/yetti {:git/tag "v4.0" :git/sha "59ed2a7" funcool/yetti {:git/tag "v5.0" :git/sha "f7d61e2"
:git/url "https://github.com/funcool/yetti.git" :git/url "https://github.com/funcool/yetti"
:exclusions [org.slf4j/slf4j-api]} :exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc {:mvn/version "1.2.772"} com.github.seancorfield/next.jdbc {:mvn/version "1.2.772"}

View file

@ -1,129 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.cli.migrate-media
(:require
[app.common.logging :as l]
[app.common.media :as cm]
[app.config :as cf]
[app.db :as db]
[app.main :as main]
[app.storage :as sto]
[cuerdas.core :as str]
[datoteka.core :as fs]
[integrant.core :as ig]))
(declare migrate-profiles)
(declare migrate-teams)
(declare migrate-file-media)
(defn run-in-system
[system]
(db/with-atomic [conn (:app.db/pool system)]
(let [system (assoc system ::conn conn)]
(migrate-profiles system)
(migrate-teams system)
(migrate-file-media system))
system))
(defn run
[]
(let [config (select-keys main/system-config
[:app.db/pool
:app.migrations/migrations
:app.metrics/metrics
:app.storage.s3/backend
:app.storage.db/backend
:app.storage.fs/backend
:app.storage/storage])]
(ig/load-namespaces config)
(try
(-> (ig/prep config)
(ig/init)
(run-in-system)
(ig/halt!))
(catch Exception e
(l/error :hint "unhandled exception" :cause e)))))
;; --- IMPL
(defn migrate-profiles
[{:keys [::conn] :as system}]
(letfn [(retrieve-profiles [conn]
(->> (db/exec! conn ["select * from profile"])
(filter #(not (str/empty? (:photo %))))
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [profile (retrieve-profiles conn)]
(let [path (fs/path (:photo profile))
full (-> (fs/join base path)
(fs/normalize))
ext (fs/ext path)
mtype (cm/format->mtype (keyword ext))
obj (sto/put-object storage {:content (sto/content full)
:content-type mtype})]
(db/update! conn :profile
{:photo-id (:id obj)}
{:id (:id profile)}))))))
(defn migrate-teams
[{:keys [::conn] :as system}]
(letfn [(retrieve-teams [conn]
(->> (db/exec! conn ["select * from team"])
(filter #(not (str/empty? (:photo %))))
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [team (retrieve-teams conn)]
(let [path (fs/path (:photo team))
full (-> (fs/join base path)
(fs/normalize))
ext (fs/ext path)
mtype (cm/format->mtype (keyword ext))
obj (sto/put-object storage {:content (sto/content full)
:content-type mtype})]
(db/update! conn :team
{:photo-id (:id obj)}
{:id (:id team)}))))))
(defn migrate-file-media
[{:keys [::conn] :as system}]
(letfn [(retrieve-media-objects [conn]
(->> (db/exec! conn ["select fmo.id, fmo.path, fth.path as thumbnail_path
from file_media_object as fmo
join file_media_thumbnail as fth on (fth.media_object_id = fmo.id)"])
(seq)))]
(let [base (fs/path (cf/get :storage-fs-old-directory))
storage (-> (:app.storage/storage system)
(assoc :conn conn))]
(doseq [mobj (retrieve-media-objects conn)]
(let [img-path (fs/path (:path mobj))
thm-path (fs/path (:thumbnail-path mobj))
img-path (-> (fs/join base img-path)
(fs/normalize))
thm-path (-> (fs/join base thm-path)
(fs/normalize))
img-ext (fs/ext img-path)
thm-ext (fs/ext thm-path)
img-mtype (cm/format->mtype (keyword img-ext))
thm-mtype (cm/format->mtype (keyword thm-ext))
img-obj (sto/put-object storage {:content (sto/content img-path)
:content-type img-mtype})
thm-obj (sto/put-object storage {:content (sto/content thm-path)
:content-type thm-mtype})]
(db/update! conn :file-media-object
{:media-id (:id img-obj)
:thumbnail-id (:id thm-obj)}
{:id (:id mobj)}))))))

View file

@ -47,7 +47,7 @@
:database-username "penpot" :database-username "penpot"
:database-password "penpot" :database-password "penpot"
:default-blob-version 3 :default-blob-version 4
:loggers-zmq-uri "tcp://localhost:45556" :loggers-zmq-uri "tcp://localhost:45556"
:file-change-snapshot-every 5 :file-change-snapshot-every 5
@ -65,8 +65,8 @@
:assets-path "/internal/assets/" :assets-path "/internal/assets/"
:rlimit-password 10 :rlimit-password 10
:rlimit-image 2 :rlimit-image 10
:rlimit-font 5 :rlimit-font 10
:smtp-default-reply-to "Penpot <no-reply@example.com>" :smtp-default-reply-to "Penpot <no-reply@example.com>"
:smtp-default-from "Penpot <no-reply@example.com>" :smtp-default-from "Penpot <no-reply@example.com>"

View file

@ -247,7 +247,7 @@
([ds table params where opts] ([ds table params where opts]
(exec-one! ds (exec-one! ds
(sql/update table params where opts) (sql/update table params where opts)
(assoc opts :return-keys true)))) (merge {:return-keys true} opts))))
(defn delete! (defn delete!
([ds table params] (delete! ds table params nil)) ([ds table params] (delete! ds table params nil))

View file

@ -29,6 +29,7 @@
;; HTTP SERVER ;; HTTP SERVER
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::session map?)
(s/def ::handler fn?) (s/def ::handler fn?)
(s/def ::router some?) (s/def ::router some?)
(s/def ::port ::us/integer) (s/def ::port ::us/integer)
@ -47,7 +48,7 @@
(d/without-nils cfg))) (d/without-nils cfg)))
(defmethod ig/pre-init-spec ::server [_] (defmethod ig/pre-init-spec ::server [_]
(s/keys :req-un [::port ::host ::name ::min-threads ::max-threads] (s/keys :req-un [::port ::host ::name ::min-threads ::max-threads ::session]
:opt-un [::mtx/metrics ::router ::handler])) :opt-un [::mtx/metrics ::router ::handler]))
(defn- instrument-metrics (defn- instrument-metrics
@ -59,37 +60,39 @@
server)) server))
(defmethod ig/init-key ::server (defmethod ig/init-key ::server
[_ {:keys [handler router port name metrics host] :as opts}] [_ {:keys [handler router port name metrics host] :as cfg}]
(l/info :hint "starting http server" (l/info :hint "starting http server"
:port port :host host :name name :port port :host host :name name
:min-threads (:min-threads opts) :min-threads (:min-threads cfg)
:max-threads (:max-threads opts)) :max-threads (:max-threads cfg))
(let [options {:http/port port (let [options {:http/port port
:http/host host :http/host host
:thread-pool/max-threads (:max-threads opts) :thread-pool/max-threads (:max-threads cfg)
:thread-pool/min-threads (:min-threads opts) :thread-pool/min-threads (:min-threads cfg)
:ring/async true} :ring/async true}
handler (cond handler (cond
(fn? handler) handler (fn? handler) handler
(some? router) (wrap-router router) (some? router) (wrap-router cfg router)
:else (ex/raise :type :internal :else (ex/raise :type :internal
:code :invalid-argument :code :invalid-argument
:hint "Missing `handler` or `router` option.")) :hint "Missing `handler` or `router` option."))
server (-> (yt/server handler (d/without-nils options)) server (-> (yt/server handler (d/without-nils options))
(cond-> metrics (instrument-metrics metrics)))] (cond-> metrics (instrument-metrics metrics)))]
(assoc opts :server (yt/start! server)))) (assoc cfg :server (yt/start! server))))
(defmethod ig/halt-key! ::server (defmethod ig/halt-key! ::server
[_ {:keys [server name port] :as opts}] [_ {:keys [server name port] :as cfg}]
(l/info :msg "stoping http server" :name name :port port) (l/info :msg "stoping http server" :name name :port port)
(yt/stop! server)) (yt/stop! server))
(defn- wrap-router (defn- wrap-router
[router] [{:keys [session] :as cfg} router]
(let [default (rr/routes (let [default (rr/routes
(rr/create-resource-handler {:path "/"}) (rr/create-resource-handler {:path "/"})
(rr/create-default-handler)) (rr/create-default-handler))
options {:middleware [middleware/wrap-server-timing] options {:middleware [[middleware/wrap-server-timing]
[middleware/cookies]
[(:middleware session)]]
:inject-match? false :inject-match? false
:inject-router? false} :inject-router? false}
handler (rr/ring-handler router default options)] handler (rr/ring-handler router default options)]
@ -106,28 +109,25 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(s/def ::rpc map?) (s/def ::rpc map?)
(s/def ::session map?)
(s/def ::oauth map?) (s/def ::oauth map?)
(s/def ::storage map?) (s/def ::storage map?)
(s/def ::assets map?) (s/def ::assets map?)
(s/def ::feedback fn?) (s/def ::feedback fn?)
(s/def ::ws fn?) (s/def ::ws fn?)
(s/def ::audit-http-handler fn?) (s/def ::audit-handler fn?)
(s/def ::debug map?) (s/def ::debug map?)
(s/def ::awsns-handler fn?)
(defmethod ig/pre-init-spec ::router [_] (defmethod ig/pre-init-spec ::router [_]
(s/keys :req-un [::rpc ::session ::mtx/metrics ::ws (s/keys :req-un [::rpc ::mtx/metrics ::ws ::oauth ::storage ::assets
::oauth ::storage ::assets ::feedback ::feedback ::awsns-handler ::debug ::audit-handler]))
::debug ::audit-http-handler]))
(defmethod ig/init-key ::router (defmethod ig/init-key ::router
[_ {:keys [ws session rpc oauth metrics assets feedback debug] :as cfg}] [_ {:keys [ws session rpc oauth metrics assets feedback debug] :as cfg}]
(rr/router (rr/router
[["/metrics" {:get (:handler metrics)}] [["/metrics" {:get (:handler metrics)}]
["/assets" {:middleware [[middleware/format-response-body] ["/assets" {:middleware [[middleware/format-response-body]
[middleware/errors errors/handle] [middleware/errors errors/handle]]}
[middleware/cookies]
(:middleware session)]}
["/by-id/:id" {:get (:objects-handler assets)}] ["/by-id/:id" {:get (:objects-handler assets)}]
["/by-file-media-id/:id" {:get (:file-objects-handler assets)}] ["/by-file-media-id/:id" {:get (:file-objects-handler assets)}]
["/by-file-media-id/:id/thumbnail" {:get (:file-thumbnails-handler assets)}]] ["/by-file-media-id/:id/thumbnail" {:get (:file-thumbnails-handler assets)}]]
@ -136,9 +136,7 @@
[middleware/params] [middleware/params]
[middleware/keyword-params] [middleware/keyword-params]
[middleware/format-response-body] [middleware/format-response-body]
[middleware/errors errors/handle] [middleware/errors errors/handle]]}
[middleware/cookies]
[(:middleware session)]]}
["" {:get (:index debug)}] ["" {:get (:index debug)}]
["/error-by-id/:id" {:get (:retrieve-error debug)}] ["/error-by-id/:id" {:get (:retrieve-error debug)}]
["/error/:id" {:get (:retrieve-error debug)}] ["/error/:id" {:get (:retrieve-error debug)}]
@ -148,15 +146,13 @@
["/file/changes" {:get (:retrieve-file-changes debug)}]] ["/file/changes" {:get (:retrieve-file-changes debug)}]]
["/webhooks" ["/webhooks"
["/sns" {:post (:sns-webhook cfg)}]] ["/sns" {:post (:awsns-handler cfg)}]]
["/ws/notifications" ["/ws/notifications"
{:middleware [[middleware/params] {:middleware [[middleware/params]
[middleware/keyword-params] [middleware/keyword-params]
[middleware/format-response-body] [middleware/format-response-body]
[middleware/errors errors/handle] [middleware/errors errors/handle]]
[middleware/cookies]
[(:middleware session)]]
:get ws}] :get ws}]
["/api" {:middleware [[middleware/cors] ["/api" {:middleware [[middleware/cors]
@ -165,8 +161,7 @@
[middleware/keyword-params] [middleware/keyword-params]
[middleware/format-response-body] [middleware/format-response-body]
[middleware/parse-request-body] [middleware/parse-request-body]
[middleware/errors errors/handle] [middleware/errors errors/handle]]}
[middleware/cookies]]}
["/health" {:get (:health-check debug)}] ["/health" {:get (:health-check debug)}]
["/_doc" {:get (doc/handler rpc)}] ["/_doc" {:get (doc/handler rpc)}]
@ -175,10 +170,9 @@
["/auth/oauth/:provider" {:post (:handler oauth)}] ["/auth/oauth/:provider" {:post (:handler oauth)}]
["/auth/oauth/:provider/callback" {:get (:callback-handler oauth)}] ["/auth/oauth/:provider/callback" {:get (:callback-handler oauth)}]
["/audit/events" {:middleware [(:middleware session)] ["/audit/events" {:post (:audit-handler cfg)}]
:post (:audit-http-handler cfg)}]
["/rpc" {:middleware [(:middleware session)]} ["/rpc"
["/query/:type" {:get (:query-handler rpc) ["/query/:type" {:get (:query-handler rpc)
:post (:query-handler rpc)}] :post (:query-handler rpc)}]
["/mutation/:type" {:post (:mutation-handler rpc)}]]]])) ["/mutation/:type" {:post (:mutation-handler rpc)}]]]]))

View file

@ -13,12 +13,12 @@
[app.db :as db] [app.db :as db]
[app.metrics :as mtx] [app.metrics :as mtx]
[app.storage :as sto] [app.storage :as sto]
[app.util.async :as async]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk] [app.worker :as wrk]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.core :as p])) [promesa.core :as p]
[promesa.exec :as px]))
(def ^:private cache-max-age (def ^:private cache-max-age
(dt/duration {:hours 24})) (dt/duration {:hours 24}))
@ -35,27 +35,31 @@
res)) res))
(defn- get-file-media-object (defn- get-file-media-object
[{:keys [pool] :as storage} id] [{:keys [pool executor] :as storage} id]
(px/with-dispatch executor
(let [id (coerce-id id) (let [id (coerce-id id)
mobj (db/exec-one! pool ["select * from file_media_object where id=?" id])] mobj (db/exec-one! pool ["select * from file_media_object where id=?" id])]
(when-not mobj (when-not mobj
(ex/raise :type :not-found (ex/raise :type :not-found
:hint "object does not found")) :hint "object does not found"))
mobj)) mobj)))
(defn- serve-object (defn- serve-object
"Helper function that returns the appropriate responde depending on
the storage object backend type."
[{:keys [storage] :as cfg} obj] [{:keys [storage] :as cfg} obj]
(let [mdata (meta obj) (let [mdata (meta obj)
backend (sto/resolve-backend storage (:backend obj))] backend (sto/resolve-backend storage (:backend obj))]
(case (:type backend) (case (:type backend)
:db :db
(p/let [body (sto/get-object-bytes storage obj)]
{:status 200 {:status 200
:headers {"content-type" (:content-type mdata) :headers {"content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))} "cache-control" (str "max-age=" (inst-ms cache-max-age))}
:body (sto/get-object-bytes storage obj)} :body body})
:s3 :s3
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})] (p/let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
{:status 307 {:status 307
:headers {"location" (str url) :headers {"location" (str url)
"x-host" (cond-> host port (str ":" port)) "x-host" (cond-> host port (str ":" port))
@ -63,7 +67,7 @@
:body ""}) :body ""})
:fs :fs
(let [purl (u/uri (:assets-path cfg)) (p/let [purl (u/uri (:assets-path cfg))
purl (u/join purl (sto/object->relative-path obj))] purl (u/join purl (sto/object->relative-path obj))]
{:status 204 {:status 204
:headers {"x-accel-redirect" (:path purl) :headers {"x-accel-redirect" (:path purl)
@ -71,35 +75,41 @@
"cache-control" (str "max-age=" (inst-ms cache-max-age))} "cache-control" (str "max-age=" (inst-ms cache-max-age))}
:body ""})))) :body ""}))))
(defn- generic-handler
[{:keys [storage executor] :as cfg} request kf]
(async/with-dispatch executor
(let [id (get-in request [:path-params :id])
mobj (get-file-media-object storage id)
obj (sto/get-object storage (kf mobj))]
(if obj
(serve-object cfg obj)
{:status 404 :body ""}))))
(defn objects-handler (defn objects-handler
"Handler that servers storage objects by id."
[{:keys [storage executor] :as cfg} request respond raise] [{:keys [storage executor] :as cfg} request respond raise]
(-> (async/with-dispatch executor (-> (px/with-dispatch executor
(let [id (get-in request [:path-params :id]) (p/let [id (get-in request [:path-params :id])
id (coerce-id id) id (coerce-id id)
obj (sto/get-object storage id)] obj (sto/get-object storage id)]
(if obj (if obj
(serve-object cfg obj) (serve-object cfg obj)
{:status 404 :body ""}))) {:status 404 :body ""})))
(p/then respond)
(p/bind p/wrap)
(p/then' respond)
(p/catch raise))) (p/catch raise)))
(defn- generic-handler
"A generic handler helper/common code for file-media based handlers."
[{:keys [storage] :as cfg} request kf]
(p/let [id (get-in request [:path-params :id])
mobj (get-file-media-object storage id)
obj (sto/get-object storage (kf mobj))]
(if obj
(serve-object cfg obj)
{:status 404 :body ""})))
(defn file-objects-handler (defn file-objects-handler
"Handler that serves storage objects by file media id."
[cfg request respond raise] [cfg request respond raise]
(-> (generic-handler cfg request :media-id) (-> (generic-handler cfg request :media-id)
(p/then respond) (p/then respond)
(p/catch raise))) (p/catch raise)))
(defn file-thumbnails-handler (defn file-thumbnails-handler
"Handler that serves storage objects by thumbnail-id and quick
fallback to file-media-id if no thumbnail is available."
[cfg request respond raise] [cfg request respond raise]
(-> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %))) (-> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %)))
(p/then respond) (p/then respond)

View file

@ -11,31 +11,40 @@
[app.common.logging :as l] [app.common.logging :as l]
[app.db :as db] [app.db :as db]
[app.db.sql :as sql] [app.db.sql :as sql]
[app.util.http :as http]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[jsonista.core :as j])) [jsonista.core :as j]
[promesa.exec :as px]))
(declare parse-json) (declare parse-json)
(declare handle-request)
(declare parse-notification) (declare parse-notification)
(declare process-report) (declare process-report)
(s/def ::http-client fn?)
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool])) (s/keys :req-un [::db/pool ::http-client]))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ cfg] [_ {:keys [executor] :as cfg}]
(fn [request respond _] (fn [request respond _]
(let [data (slurp (:body request))]
(px/run! executor #(handle-request cfg data))
(respond {:status 200 :body ""}))))
(defn handle-request
[{:keys [http-client] :as cfg} data]
(try (try
(let [body (parse-json (slurp (:body request))) (let [body (parse-json data)
mtype (get body "Type")] mtype (get body "Type")]
(cond (cond
(= mtype "SubscriptionConfirmation") (= mtype "SubscriptionConfirmation")
(let [surl (get body "SubscribeURL") (let [surl (get body "SubscribeURL")
stopic (get body "TopicArn")] stopic (get body "TopicArn")]
(l/info :action "subscription received" :topic stopic :url surl) (l/info :action "subscription received" :topic stopic :url surl)
(http/send! {:uri surl :method :post :timeout 10000})) (http-client {:uri surl :method :post :timeout 10000} {:sync? true}))
(= mtype "Notification") (= mtype "Notification")
(when-let [message (parse-json (get body "Message"))] (when-let [message (parse-json (get body "Message"))]
@ -45,11 +54,10 @@
:else :else
(l/warn :hint "unexpected data received" (l/warn :hint "unexpected data received"
:report (pr-str body)))) :report (pr-str body))))
(catch Throwable cause
(l/error :hint "unexpected exception on awsns handler"
:cause cause)))
(respond {:status 200 :body ""}))) (catch Throwable cause
(l/error :hint "unexpected exception on awsns"
:cause cause))))
(defn- parse-bounce (defn- parse-bounce
[data] [data]

View file

@ -0,0 +1,30 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.http.client
"Http client abstraction layer."
(:require
[app.worker :as wrk]
[clojure.spec.alpha :as s]
[integrant.core :as ig]
[java-http-clj.core :as http]))
(defmethod ig/pre-init-spec :app.http/client [_]
(s/keys :req-un [::wrk/executor]))
(defmethod ig/init-key :app.http/client
[_ {:keys [executor] :as cfg}]
(let [client (http/build-client {:executor executor
:connect-timeout 30000 ;; 10s
:follow-redirects :always})]
(with-meta
(fn send
([req] (send req {}))
([req {:keys [response-type sync?] :or {response-type :string sync? false}}]
(if sync?
(http/send req {:client client :as response-type})
(http/send-async req {:client client :as response-type}))))
{::client client})))

View file

@ -14,7 +14,6 @@
[app.db :as db] [app.db :as db]
[app.rpc.mutations.files :as m.files] [app.rpc.mutations.files :as m.files]
[app.rpc.queries.profile :as profile] [app.rpc.queries.profile :as profile]
[app.util.async :as async]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.template :as tmpl] [app.util.template :as tmpl]
[app.util.time :as dt] [app.util.time :as dt]
@ -25,7 +24,8 @@
[datoteka.core :as fs] [datoteka.core :as fs]
[fipp.edn :as fpp] [fipp.edn :as fpp]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.core :as p])) [promesa.core :as p]
[promesa.exec :as px]))
;; (selmer.parser/cache-off!) ;; (selmer.parser/cache-off!)
@ -208,8 +208,7 @@
(defn- wrap-async (defn- wrap-async
[{:keys [executor] :as cfg} f] [{:keys [executor] :as cfg} f]
(fn [request respond raise] (fn [request respond raise]
(-> (async/with-dispatch executor (-> (px/submit! executor #(f cfg request))
(f cfg request))
(p/then respond) (p/then respond)
(p/catch raise)))) (p/catch raise))))

View file

@ -15,17 +15,15 @@
[app.db :as db] [app.db :as db]
[app.loggers.audit :as audit] [app.loggers.audit :as audit]
[app.rpc.queries.profile :as profile] [app.rpc.queries.profile :as profile]
[app.util.http :as http] [app.util.json :as json]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.data.json :as json]
[clojure.set :as set] [clojure.set :as set]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px])) [promesa.exec :as px]))
;; TODO: make it fully async (?)
(defn- build-redirect-uri (defn- build-redirect-uri
[{:keys [provider] :as cfg}] [{:keys [provider] :as cfg}]
(let [public (u/uri (:public-uri cfg))] (let [public (u/uri (:public-uri cfg))]
@ -43,9 +41,15 @@
(assoc :query query) (assoc :query query)
(str)))) (str))))
(defn- qualify-props
[provider props]
(reduce-kv (fn [result k v]
(assoc result (keyword (:name provider) (name k)) v))
{}
props))
(defn retrieve-access-token (defn retrieve-access-token
[{:keys [provider] :as cfg} code] [{:keys [provider http-client] :as cfg} code]
(try
(let [params {:client_id (:client-id provider) (let [params {:client_id (:client-id provider)
:client_secret (:client-secret provider) :client_secret (:client-secret provider)
:code code :code code
@ -54,42 +58,46 @@
req {:method :post req {:method :post
:headers {"content-type" "application/x-www-form-urlencoded"} :headers {"content-type" "application/x-www-form-urlencoded"}
:uri (:token-uri provider) :uri (:token-uri provider)
:body (u/map->query-string params)} :body (u/map->query-string params)}]
res (http/send! req)] (p/then
(when (= 200 (:status res)) (http-client req)
(let [data (json/read-str (:body res))] (fn [{:keys [status body] :as res}]
{:token (get data "access_token") (if (= status 200)
:type (get data "token_type")}))) (let [data (json/read body)]
(catch Exception e {:token (get data :access_token)
(l/warn :hint "unexpected error on retrieve-access-token" :cause e) :type (get data :token_type)})
nil))) (ex/raise :type :internal
:code :unable-to-retrieve-token
(defn- qualify-props ::http-status status
[provider props] ::http-body body))))))
(reduce-kv (fn [result k v]
(assoc result (keyword (:name provider) (name k)) v))
{}
props))
(defn- retrieve-user-info (defn- retrieve-user-info
[{:keys [provider] :as cfg} tdata] [{:keys [provider http-client] :as cfg} tdata]
(try (p/then
(let [req {:uri (:user-uri provider) (http-client {:uri (:user-uri provider)
:headers {"Authorization" (str (:type tdata) " " (:token tdata))} :headers {"Authorization" (str (:type tdata) " " (:token tdata))}
:timeout 6000 :timeout 6000
:method :get} :method :get})
res (http/send! req)] (fn [{:keys [status body] :as res}]
(if (= 200 status)
(when (= 200 (:status res)) (let [info (json/read body)
(let [info (json/read-str (:body res) :key-fn keyword)] info {:backend (:name provider)
{:backend (:name provider) :email (get info :email)
:email (:email info) :fullname (get info :name)
:fullname (:name info)
:props (->> (dissoc info :name :email) :props (->> (dissoc info :name :email)
(qualify-props provider))}))) (qualify-props provider))}]
(catch Exception e
(l/warn :hint "unexpected exception on retrieve-user-info" :cause e) (when-not (s/valid? ::info info)
nil))) (l/warn :hint "received incomplete profile info object (please set correct scopes)"
:info (pr-str info))
(ex/raise :type :internal
:code :unable-to-auth
:hint "no user info"))
info)
(ex/raise :type :internal
:code :unable-to-retrieve-user-info
::http-status status
::http-body body)))))
(s/def ::backend ::us/not-empty-string) (s/def ::backend ::us/not-empty-string)
(s/def ::email ::us/not-empty-string) (s/def ::email ::us/not-empty-string)
@ -104,19 +112,7 @@
(defn retrieve-info (defn retrieve-info
[{:keys [tokens provider] :as cfg} request] [{:keys [tokens provider] :as cfg} request]
(let [state (get-in request [:params :state]) (letfn [(validate-oidc [info]
state (tokens :verify {:token state :iss :oauth})
info (some->> (get-in request [:params :code])
(retrieve-access-token cfg)
(retrieve-user-info cfg))]
(when-not (s/valid? ::info info)
(l/warn :hint "received incomplete profile info object (please set correct scopes)"
:info (pr-str info))
(ex/raise :type :internal
:code :unable-to-auth
:hint "no user info"))
;; If the provider is OIDC, we can proceed to check ;; If the provider is OIDC, we can proceed to check
;; roles if they are defined. ;; roles if they are defined.
(when (and (= "oidc" (:name provider)) (when (and (= "oidc" (:name provider))
@ -134,7 +130,9 @@
(ex/raise :type :internal (ex/raise :type :internal
:code :unable-to-auth :code :unable-to-auth
:hint "not enough permissions")))) :hint "not enough permissions"))))
info)
(post-process [state info]
(cond-> info (cond-> info
(some? (:invitation-token state)) (some? (:invitation-token state))
(assoc :invitation-token (:invitation-token state)) (assoc :invitation-token (:invitation-token state))
@ -142,7 +140,16 @@
;; If state token comes with props, merge them. The state token ;; If state token comes with props, merge them. The state token
;; props can contain pm_ and utm_ prefixed query params. ;; props can contain pm_ and utm_ prefixed query params.
(map? (:props state)) (map? (:props state))
(update :props merge (:props state))))) (update :props merge (:props state))))]
(let [state (get-in request [:params :state])
state (tokens :verify {:token state :iss :oauth})
code (get-in request [:params :code])]
(-> (p/resolved code)
(p/then #(retrieve-access-token cfg %))
(p/then #(retrieve-user-info cfg %))
(p/then' validate-oidc)
(p/then' (partial post-process state))))))
;; --- HTTP HANDLERS ;; --- HTTP HANDLERS
@ -158,12 +165,13 @@
params)) params))
(defn- retrieve-profile (defn- retrieve-profile
[{:keys [pool] :as cfg} info] [{:keys [pool executor] :as cfg} info]
(px/with-dispatch executor
(with-open [conn (db/open pool)] (with-open [conn (db/open pool)]
(some->> (:email info) (some->> (:email info)
(profile/retrieve-profile-data-by-email conn) (profile/retrieve-profile-data-by-email conn)
(profile/populate-additional-data conn) (profile/populate-additional-data conn)
(profile/decode-profile-row)))) (profile/decode-profile-row)))))
(defn- redirect-response (defn- redirect-response
[uri] [uri]
@ -202,6 +210,7 @@
(->> (redirect-response uri) (->> (redirect-response uri)
(sxf request))) (sxf request)))
(let [info (assoc info (let [info (assoc info
:iss :prepared-register :iss :prepared-register
:is-active true :is-active true
@ -216,35 +225,30 @@
(redirect-response uri)))) (redirect-response uri))))
(defn- auth-handler (defn- auth-handler
[{:keys [tokens executor] :as cfg} {:keys [params] :as request} respond _] [{:keys [tokens] :as cfg} {:keys [params] :as request} respond _]
(px/run! (let [props (extract-utm-props params)
executor
(fn []
(let [invitation (:invitation-token params)
props (extract-utm-props params)
state (tokens :generate state (tokens :generate
{:iss :oauth {:iss :oauth
:invitation-token invitation :invitation-token (:invitation-token params)
:props props :props props
:exp (dt/in-future "15m")}) :exp (dt/in-future "15m")})
uri (build-auth-uri cfg state)] uri (build-auth-uri cfg state)]
(respond {:status 200 :body {:redirect-uri uri}})))
(respond
{:status 200
:body {:redirect-uri uri}})))))
(defn- callback-handler (defn- callback-handler
[{:keys [executor] :as cfg} request respond _] [cfg request respond _]
(px/run! (letfn [(process-request []
executor (p/let [info (retrieve-info cfg request)
(fn []
(try
(let [info (retrieve-info cfg request)
profile (retrieve-profile cfg info)] profile (retrieve-profile cfg info)]
(respond (generate-redirect cfg request info profile))) (generate-redirect cfg request info profile)))
(catch Exception cause
(handle-error [cause]
(l/warn :hint "error on oauth process" :cause cause) (l/warn :hint "error on oauth process" :cause cause)
(respond (generate-error-redirect cfg cause))))))) (respond (generate-error-redirect cfg cause)))]
(-> (process-request)
(p/then respond)
(p/catch handle-error))))
;; --- INIT ;; --- INIT
@ -281,10 +285,10 @@
:callback-handler (wrap-handler cfg callback-handler)})) :callback-handler (wrap-handler cfg callback-handler)}))
(defn- discover-oidc-config (defn- discover-oidc-config
[{:keys [base-uri] :as opts}] [{:keys [http-client]} {:keys [base-uri] :as opts}]
(let [discovery-uri (u/join base-uri ".well-known/openid-configuration") (let [discovery-uri (u/join base-uri ".well-known/openid-configuration")
response (ex/try (http/send! {:method :get :uri (str discovery-uri)}))] response (ex/try (http-client {:method :get :uri (str discovery-uri)} {:sync? true}))]
(cond (cond
(ex/exception? response) (ex/exception? response)
(do (do
@ -294,10 +298,10 @@
nil) nil)
(= 200 (:status response)) (= 200 (:status response))
(let [data (json/read-str (:body response))] (let [data (json/read (:body response))]
{:token-uri (get data "token_endpoint") {:token-uri (get data :token_endpoint)
:auth-uri (get data "authorization_endpoint") :auth-uri (get data :authorization_endpoint)
:user-uri (get data "userinfo_endpoint")}) :user-uri (get data :userinfo_endpoint)})
:else :else
(do (do
@ -325,6 +329,7 @@
:roles-attr (cf/get :oidc-roles-attr) :roles-attr (cf/get :oidc-roles-attr)
:roles (cf/get :oidc-roles) :roles (cf/get :oidc-roles)
:name "oidc"}] :name "oidc"}]
(if (and (string? (:base-uri opts)) (if (and (string? (:base-uri opts))
(string? (:client-id opts)) (string? (:client-id opts))
(string? (:client-secret opts))) (string? (:client-secret opts)))
@ -339,7 +344,7 @@
(assoc-in cfg [:providers "oidc"] opts)) (assoc-in cfg [:providers "oidc"] opts))
(do (do
(l/debug :hint "trying to discover oidc provider configuration using BASE_URI") (l/debug :hint "trying to discover oidc provider configuration using BASE_URI")
(if-let [opts' (discover-oidc-config opts)] (if-let [opts' (discover-oidc-config cfg opts)]
(do (do
(l/debug :hint "discovered opts" :additional-opts opts') (l/debug :hint "discovered opts" :additional-opts opts')
(assoc-in cfg [:providers "oidc"] (merge opts opts'))) (assoc-in cfg [:providers "oidc"] (merge opts opts')))

View file

@ -89,16 +89,6 @@
(when-let [token (get-in cookies [token-cookie-name :value])] (when-let [token (get-in cookies [token-cookie-name :value])]
(rss/delete-session store token))) (rss/delete-session store token)))
(defn- retrieve-session
[store token]
(when token
(rss/read-session store token)))
(defn- retrieve-from-request
[store {:keys [cookies] :as request}]
(->> (get-in cookies [token-cookie-name :value])
(retrieve-session store)))
(defn- add-cookies (defn- add-cookies
[response token] [response token]
(let [cors? (contains? cfg/flags :cors) (let [cors? (contains? cfg/flags :cors)
@ -132,40 +122,55 @@
:value "" :value ""
:max-age -1}}))) :max-age -1}})))
;; NOTE: for now the session middleware is synchronous and is
;; processed on jetty threads. This is because of probably a bug on
;; jetty that causes NPE on upgrading connection to websocket from
;; thread not managed by jetty. We probably can fix it running
;; websocket server in different port as standalone service.
(defn- middleware (defn- middleware
[events-ch store handler] [{:keys [::events-ch ::store] :as cfg} handler]
(letfn [(get-session [{:keys [cookies] :as request}]
(if-let [token (get-in cookies [token-cookie-name :value])]
(if-let [{:keys [id profile-id]} (rss/read-session store token)]
(assoc request :session-id id :profile-id profile-id)
request)
request))]
(fn [request respond raise] (fn [request respond raise]
(if-let [{:keys [id profile-id] :as session} (retrieve-from-request store request)] (try
(do (let [{:keys [session-id profile-id] :as request} (get-session request)]
(a/>!! events-ch id) (when (and session-id profile-id)
(l/set-context! {:profile-id profile-id}) (a/offer! events-ch session-id))
(handler (assoc request :profile-id profile-id :session-id id) respond raise)) (handler request respond raise))
(handler request respond raise)))) (catch Throwable cause
(raise cause))))))
;; --- STATE INIT: SESSION ;; --- STATE INIT: SESSION
(s/def ::tokens fn?) (s/def ::tokens fn?)
(defmethod ig/pre-init-spec ::session [_] (defmethod ig/pre-init-spec :app.http/session [_]
(s/keys :req-un [::db/pool ::tokens])) (s/keys :req-un [::db/pool ::tokens ::wrk/executor]))
(defmethod ig/prep-key ::session (defmethod ig/prep-key :app.http/session
[_ cfg] [_ cfg]
(d/merge {:buffer-size 128} (d/merge {:buffer-size 128}
(d/without-nils cfg))) (d/without-nils cfg)))
(defmethod ig/init-key ::session (defmethod ig/init-key :app.http/session
[_ {:keys [pool tokens] :as cfg}] [_ {:keys [pool tokens] :as cfg}]
(let [events-ch (a/chan (a/dropping-buffer (:buffer-size cfg))) (let [events-ch (a/chan (a/dropping-buffer (:buffer-size cfg)))
store (if (db/read-only? pool) store (if (db/read-only? pool)
(->MemoryStore (atom {}) tokens) (->MemoryStore (atom {}) tokens)
(->DatabaseStore pool tokens))] (->DatabaseStore pool tokens))
cfg (assoc cfg ::store store ::events-ch events-ch)]
(when (db/read-only? pool) (when (db/read-only? pool)
(l/warn :hint "sessions module initialized with in-memory store")) (l/warn :hint "sessions module initialized with in-memory store"))
(-> cfg (-> cfg
(assoc ::events-ch events-ch) (assoc :middleware (partial middleware cfg))
(assoc :middleware (partial middleware events-ch store))
(assoc :create (fn [profile-id] (assoc :create (fn [profile-id]
(fn [request response] (fn [request response]
(let [token (create-session store request profile-id)] (let [token (create-session store request profile-id)]
@ -177,11 +182,10 @@
(assoc :body "") (assoc :body "")
(clear-cookies))))))) (clear-cookies)))))))
(defmethod ig/halt-key! ::session (defmethod ig/halt-key! :app.http/session
[_ data] [_ data]
(a/close! (::events-ch data))) (a/close! (::events-ch data)))
;; --- STATE INIT: SESSION UPDATER ;; --- STATE INIT: SESSION UPDATER
(declare update-sessions) (declare update-sessions)
@ -192,8 +196,7 @@
(defmethod ig/pre-init-spec ::updater [_] (defmethod ig/pre-init-spec ::updater [_]
(s/keys :req-un [::db/pool ::wrk/executor ::mtx/metrics ::session] (s/keys :req-un [::db/pool ::wrk/executor ::mtx/metrics ::session]
:opt-un [::max-batch-age :opt-un [::max-batch-age ::max-batch-size]))
::max-batch-size]))
(defmethod ig/prep-key ::updater (defmethod ig/prep-key ::updater
[_ cfg] [_ cfg]

View file

@ -16,7 +16,6 @@
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
[app.util.async :as aa] [app.util.async :as aa]
[app.util.http :as http]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk] [app.worker :as wrk]
[clojure.core.async :as a] [clojure.core.async :as a]
@ -221,11 +220,12 @@
(declare archive-events) (declare archive-events)
(s/def ::http-client fn?)
(s/def ::uri ::us/string) (s/def ::uri ::us/string)
(s/def ::tokens fn?) (s/def ::tokens fn?)
(defmethod ig/pre-init-spec ::archive-task [_] (defmethod ig/pre-init-spec ::archive-task [_]
(s/keys :req-un [::db/pool ::tokens] (s/keys :req-un [::db/pool ::tokens ::http-client]
:opt-un [::uri])) :opt-un [::uri]))
(defmethod ig/init-key ::archive-task (defmethod ig/init-key ::archive-task
@ -257,7 +257,7 @@
for update skip locked;") for update skip locked;")
(defn archive-events (defn archive-events
[{:keys [pool uri tokens] :as cfg}] [{:keys [pool uri tokens http-client] :as cfg}]
(letfn [(decode-row [{:keys [props ip-addr context] :as row}] (letfn [(decode-row [{:keys [props ip-addr context] :as row}]
(cond-> row (cond-> row
(db/pgobject? props) (db/pgobject? props)
@ -293,7 +293,7 @@
:method :post :method :post
:headers headers :headers headers
:body body} :body body}
resp (http/send! params)] resp (http-client params {:sync? true})]
(if (= (:status resp) 204) (if (= (:status resp) 204)
true true
(do (do

View file

@ -11,7 +11,6 @@
[app.common.spec :as us] [app.common.spec :as us]
[app.config :as cfg] [app.config :as cfg]
[app.util.async :as aa] [app.util.async :as aa]
[app.util.http :as http]
[app.util.json :as json] [app.util.json :as json]
[app.worker :as wrk] [app.worker :as wrk]
[clojure.core.async :as a] [clojure.core.async :as a]
@ -62,13 +61,14 @@
(str "\n" (:trace error))))]]}]})) (str "\n" (:trace error))))]]}]}))
(defn- send-log (defn- send-log
[uri payload i] [{:keys [http-client uri]} payload i]
(try (try
(let [response (http/send! {:uri uri (let [response (http-client {:uri uri
:timeout 6000 :timeout 6000
:method :post :method :post
:headers {"content-type" "application/json"} :headers {"content-type" "application/json"}
:body (json/write payload)})] :body (json/write payload)}
{:sync? true})]
(cond (cond
(= (:status response) 204) (= (:status response) 204)
true true
@ -89,11 +89,11 @@
false))) false)))
(defn- handle-event (defn- handle-event
[{:keys [executor uri]} event] [{:keys [executor] :as cfg} event]
(aa/with-thread executor (aa/with-thread executor
(let [payload (prepare-payload event)] (let [payload (prepare-payload event)]
(loop [i 1] (loop [i 1]
(when (and (not (send-log uri payload i)) (< i 20)) (when (and (not (send-log cfg payload i)) (< i 20))
(Thread/sleep (* i 2000)) (Thread/sleep (* i 2000))
(recur (inc i))))))) (recur (inc i)))))))

View file

@ -9,52 +9,47 @@
(:require (:require
[app.common.logging :as l] [app.common.logging :as l]
[app.config :as cf] [app.config :as cf]
[app.db :as db]
[app.loggers.database :as ldb] [app.loggers.database :as ldb]
[app.util.async :as aa]
[app.util.http :as http]
[app.util.json :as json] [app.util.json :as json]
[app.worker :as wrk]
[clojure.core.async :as a] [clojure.core.async :as a]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig])) [integrant.core :as ig]
[promesa.core :as p]))
(defonce enabled (atom true)) (defonce enabled (atom true))
(defn- send-mattermost-notification! (defn- send-mattermost-notification!
[cfg {:keys [host id public-uri] :as event}] [{:keys [http-client] :as cfg} {:keys [host id public-uri] :as event}]
(try
(let [uri (:uri cfg) (let [uri (:uri cfg)
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n" text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
(when-let [pid (:profile-id event)] (when-let [pid (:profile-id event)]
(str "- profile-id: #uuid-" pid "\n"))) (str "- profile-id: #uuid-" pid "\n")))]
rsp (http/send! {:uri uri (p/then
(http-client {:uri uri
:method :post :method :post
:headers {"content-type" "application/json"} :headers {"content-type" "application/json"}
:body (json/write-str {:text text})})] :body (json/write-str {:text text})})
(when (not= (:status rsp) 200) (fn [{:keys [status] :as rsp}]
(l/error :hint "error on sending data to mattermost" (when (not= status 200)
:response (pr-str rsp)))) (l/warn :hint "error on sending data to mattermost"
:response (pr-str rsp)))))))
(catch Exception e
(l/error :hint "unexpected exception on error reporter"
:cause e))))
(defn handle-event (defn handle-event
[{:keys [executor] :as cfg} event] [cfg event]
(aa/with-thread executor (let [ch (a/chan)]
(try (-> (p/let [event (ldb/parse-event event)]
(let [event (ldb/parse-event event)] (send-mattermost-notification! cfg event))
(when @enabled (p/finally (fn [_ cause]
(send-mattermost-notification! cfg event))) (when cause
(catch Exception e (l/warn :hint "unexpected exception on error reporter" :cause cause))
(l/warn :hint "unexpected exception on error reporter" :cause e))))) (a/close! ch))))
ch))
(s/def ::http-client fn?)
(s/def ::uri ::cf/error-report-webhook) (s/def ::uri ::cf/error-report-webhook)
(defmethod ig/pre-init-spec ::reporter [_] (defmethod ig/pre-init-spec ::reporter [_]
(s/keys :req-un [::wrk/executor ::db/pool ::receiver] (s/keys :req-un [::http-client ::receiver]
:opt-un [::uri])) :opt-un [::uri]))
(defmethod ig/init-key ::reporter (defmethod ig/init-key ::reporter

View file

@ -37,7 +37,11 @@
(keep prepare))) (keep prepare)))
mult (a/mult output)] mult (a/mult output)]
(when endpoint (when endpoint
(a/thread (start-rcv-loop {:out buffer :endpoint endpoint}))) (let [thread (Thread. #(start-rcv-loop {:out buffer :endpoint endpoint}))]
(.setDaemon thread false)
(.setName thread "penpot/zmq-logger-receiver")
(.start thread)))
(a/pipe buffer output) (a/pipe buffer output)
(with-meta (with-meta
(fn [cmd ch] (fn [cmd ch]
@ -62,7 +66,7 @@
([] (start-rcv-loop nil)) ([] (start-rcv-loop nil))
([{:keys [out endpoint] :or {endpoint "tcp://localhost:5556"}}] ([{:keys [out endpoint] :or {endpoint "tcp://localhost:5556"}}]
(let [out (or out (a/chan 1)) (let [out (or out (a/chan 1))
zctx (ZContext.) zctx (ZContext. 1)
socket (.. zctx (createSocket SocketType/SUB))] socket (.. zctx (createSocket SocketType/SUB))]
(.. socket (connect ^String endpoint)) (.. socket (connect ^String endpoint))
(.. socket (subscribe "")) (.. socket (subscribe ""))
@ -75,7 +79,7 @@
(recur) (recur)
(do (do
(.close ^java.lang.AutoCloseable socket) (.close ^java.lang.AutoCloseable socket)
(.close ^java.lang.AutoCloseable zctx)))))))) (.destroy ^ZContext zctx))))))))
(s/def ::logger-name string?) (s/def ::logger-name string?)
(s/def ::level string?) (s/def ::level string?)

View file

@ -29,10 +29,10 @@
{:parallelism (cf/get :default-executor-parallelism 60) {:parallelism (cf/get :default-executor-parallelism 60)
:prefix :default} :prefix :default}
;; Constrained thread pool. Should only be used from high demand ;; Constrained thread pool. Should only be used from high resources
;; RPC methods. ;; demanding operations.
[::blocking :app.worker/executor] [::blocking :app.worker/executor]
{:parallelism (cf/get :blocking-executor-parallelism 20) {:parallelism (cf/get :blocking-executor-parallelism 10)
:prefix :blocking} :prefix :blocking}
;; Dedicated thread pool for backround tasks execution. ;; Dedicated thread pool for backround tasks execution.
@ -40,6 +40,10 @@
{:parallelism (cf/get :worker-executor-parallelism 10) {:parallelism (cf/get :worker-executor-parallelism 10)
:prefix :worker} :prefix :worker}
:app.worker/scheduler
{:parallelism 1
:prefix :scheduler}
:app.worker/executors :app.worker/executors
{:default (ig/ref [::default :app.worker/executor]) {:default (ig/ref [::default :app.worker/executor])
:worker (ig/ref [::worker :app.worker/executor]) :worker (ig/ref [::worker :app.worker/executor])
@ -47,6 +51,7 @@
:app.worker/executors-monitor :app.worker/executors-monitor
{:metrics (ig/ref :app.metrics/metrics) {:metrics (ig/ref :app.metrics/metrics)
:scheduler (ig/ref :app.worker/scheduler)
:executors (ig/ref :app.worker/executors)} :executors (ig/ref :app.worker/executors)}
:app.migrations/migrations :app.migrations/migrations
@ -68,14 +73,19 @@
:app.storage/gc-deleted-task :app.storage/gc-deleted-task
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:storage (ig/ref :app.storage/storage) :storage (ig/ref :app.storage/storage)
:executor (ig/ref [::worker :app.worker/executor])
:min-age (dt/duration {:hours 2})} :min-age (dt/duration {:hours 2})}
:app.storage/gc-touched-task :app.storage/gc-touched-task
{:pool (ig/ref :app.db/pool)} {:pool (ig/ref :app.db/pool)}
:app.http.session/session :app.http/client
{:executor (ig/ref [::default :app.worker/executor])}
:app.http/session
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:tokens (ig/ref :app.tokens/tokens)} :tokens (ig/ref :app.tokens/tokens)
:executor (ig/ref [::default :app.worker/executor])}
:app.http.session/gc-task :app.http.session/gc-task
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
@ -85,28 +95,30 @@
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:metrics (ig/ref :app.metrics/metrics) :metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::worker :app.worker/executor]) :executor (ig/ref [::worker :app.worker/executor])
:session (ig/ref :app.http.session/session) :session (ig/ref :app.http/session)
:max-batch-age (cf/get :http-session-updater-batch-max-age) :max-batch-age (cf/get :http-session-updater-batch-max-age)
:max-batch-size (cf/get :http-session-updater-batch-max-size)} :max-batch-size (cf/get :http-session-updater-batch-max-size)}
:app.http.awsns/handler :app.http.awsns/handler
{:tokens (ig/ref :app.tokens/tokens) {:tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)} :pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)
:executor (ig/ref [::worker :app.worker/executor])}
:app.http/server :app.http/server
{:port (cf/get :http-server-port) {:port (cf/get :http-server-port)
:host (cf/get :http-server-host) :host (cf/get :http-server-host)
:router (ig/ref :app.http/router) :router (ig/ref :app.http/router)
:metrics (ig/ref :app.metrics/metrics) :metrics (ig/ref :app.metrics/metrics)
:executor (ig/ref [::default :app.worker/executor])
:session (ig/ref :app.http/session)
:max-threads (cf/get :http-server-max-threads) :max-threads (cf/get :http-server-max-threads)
:min-threads (cf/get :http-server-min-threads)} :min-threads (cf/get :http-server-min-threads)}
:app.http/router :app.http/router
{:assets (ig/ref :app.http.assets/handlers) {:assets (ig/ref :app.http.assets/handlers)
:feedback (ig/ref :app.http.feedback/handler) :feedback (ig/ref :app.http.feedback/handler)
:session (ig/ref :app.http.session/session) :awsns-handler (ig/ref :app.http.awsns/handler)
:sns-webhook (ig/ref :app.http.awsns/handler)
:oauth (ig/ref :app.http.oauth/handler) :oauth (ig/ref :app.http.oauth/handler)
:debug (ig/ref :app.http.debug/handlers) :debug (ig/ref :app.http.debug/handlers)
:ws (ig/ref :app.http.websocket/handler) :ws (ig/ref :app.http.websocket/handler)
@ -114,12 +126,13 @@
:public-uri (cf/get :public-uri) :public-uri (cf/get :public-uri)
:storage (ig/ref :app.storage/storage) :storage (ig/ref :app.storage/storage)
:tokens (ig/ref :app.tokens/tokens) :tokens (ig/ref :app.tokens/tokens)
:audit-http-handler (ig/ref :app.loggers.audit/http-handler) :audit-handler (ig/ref :app.loggers.audit/http-handler)
:rpc (ig/ref :app.rpc/rpc)} :rpc (ig/ref :app.rpc/rpc)
:executor (ig/ref [::default :app.worker/executor])}
:app.http.debug/handlers :app.http.debug/handlers
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])} :executor (ig/ref [::worker :app.worker/executor])}
:app.http.websocket/handler :app.http.websocket/handler
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
@ -140,22 +153,24 @@
:app.http.oauth/handler :app.http.oauth/handler
{:rpc (ig/ref :app.rpc/rpc) {:rpc (ig/ref :app.rpc/rpc)
:session (ig/ref :app.http.session/session) :session (ig/ref :app.http/session)
:pool (ig/ref :app.db/pool) :pool (ig/ref :app.db/pool)
:tokens (ig/ref :app.tokens/tokens) :tokens (ig/ref :app.tokens/tokens)
:audit (ig/ref :app.loggers.audit/collector) :audit (ig/ref :app.loggers.audit/collector)
:executor (ig/ref [::default :app.worker/executor]) :executor (ig/ref [::default :app.worker/executor])
:http-client (ig/ref :app.http/client)
:public-uri (cf/get :public-uri)} :public-uri (cf/get :public-uri)}
:app.rpc/rpc :app.rpc/rpc
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:session (ig/ref :app.http.session/session) :session (ig/ref :app.http/session)
:tokens (ig/ref :app.tokens/tokens) :tokens (ig/ref :app.tokens/tokens)
:metrics (ig/ref :app.metrics/metrics) :metrics (ig/ref :app.metrics/metrics)
:storage (ig/ref :app.storage/storage) :storage (ig/ref :app.storage/storage)
:msgbus (ig/ref :app.msgbus/msgbus) :msgbus (ig/ref :app.msgbus/msgbus)
:public-uri (cf/get :public-uri) :public-uri (cf/get :public-uri)
:audit (ig/ref :app.loggers.audit/collector) :audit (ig/ref :app.loggers.audit/collector)
:http-client (ig/ref :app.http/client)
:executors (ig/ref :app.worker/executors)} :executors (ig/ref :app.worker/executors)}
:app.worker/worker :app.worker/worker
@ -164,11 +179,12 @@
:metrics (ig/ref :app.metrics/metrics) :metrics (ig/ref :app.metrics/metrics)
:pool (ig/ref :app.db/pool)} :pool (ig/ref :app.db/pool)}
:app.worker/scheduler :app.worker/cron
{:executor (ig/ref [::worker :app.worker/executor]) {:executor (ig/ref [::worker :app.worker/executor])
:scheduler (ig/ref :app.worker/scheduler)
:tasks (ig/ref :app.worker/registry) :tasks (ig/ref :app.worker/registry)
:pool (ig/ref :app.db/pool) :pool (ig/ref :app.db/pool)
:schedule :entries
[{:cron #app/cron "0 0 0 * * ?" ;; daily [{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :file-media-gc} :task :file-media-gc}
@ -261,7 +277,8 @@
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:version (:full cf/version) :version (:full cf/version)
:uri (cf/get :telemetry-uri) :uri (cf/get :telemetry-uri)
:sprops (ig/ref :app.setup/props)} :sprops (ig/ref :app.setup/props)
:http-client (ig/ref :app.http/client)}
:app.srepl/server :app.srepl/server
{:port (cf/get :srepl-port) {:port (cf/get :srepl-port)
@ -279,7 +296,7 @@
:app.loggers.audit/http-handler :app.loggers.audit/http-handler
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:executor (ig/ref [::worker :app.worker/executor])} :executor (ig/ref [::default :app.worker/executor])}
:app.loggers.audit/collector :app.loggers.audit/collector
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
@ -288,7 +305,8 @@
:app.loggers.audit/archive-task :app.loggers.audit/archive-task
{:uri (cf/get :audit-log-archive-uri) {:uri (cf/get :audit-log-archive-uri)
:tokens (ig/ref :app.tokens/tokens) :tokens (ig/ref :app.tokens/tokens)
:pool (ig/ref :app.db/pool)} :pool (ig/ref :app.db/pool)
:http-client (ig/ref :app.http/client)}
:app.loggers.audit/gc-task :app.loggers.audit/gc-task
{:max-age (cf/get :audit-log-gc-max-age cf/deletion-delay) {:max-age (cf/get :audit-log-gc-max-age cf/deletion-delay)
@ -302,8 +320,7 @@
:app.loggers.mattermost/reporter :app.loggers.mattermost/reporter
{:uri (cf/get :error-report-webhook) {:uri (cf/get :error-report-webhook)
:receiver (ig/ref :app.loggers.zmq/receiver) :receiver (ig/ref :app.loggers.zmq/receiver)
:pool (ig/ref :app.db/pool) :http-client (ig/ref :app.http/client)}
:executor (ig/ref [::worker :app.worker/executor])}
:app.loggers.database/reporter :app.loggers.database/reporter
{:receiver (ig/ref :app.loggers.zmq/receiver) {:receiver (ig/ref :app.loggers.zmq/receiver)
@ -312,6 +329,8 @@
:app.storage/storage :app.storage/storage
{:pool (ig/ref :app.db/pool) {:pool (ig/ref :app.db/pool)
:executor (ig/ref [::default :app.worker/executor])
:backends :backends
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend]) {:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
:assets-db (ig/ref [::assets :app.storage.db/backend]) :assets-db (ig/ref [::assets :app.storage.db/backend])
@ -328,12 +347,14 @@
{:region (cf/get :storage-fdata-s3-region) {:region (cf/get :storage-fdata-s3-region)
:bucket (cf/get :storage-fdata-s3-bucket) :bucket (cf/get :storage-fdata-s3-bucket)
:endpoint (cf/get :storage-fdata-s3-endpoint) :endpoint (cf/get :storage-fdata-s3-endpoint)
:prefix (cf/get :storage-fdata-s3-prefix)} :prefix (cf/get :storage-fdata-s3-prefix)
:executor (ig/ref [::default :app.worker/executor])}
[::assets :app.storage.s3/backend] [::assets :app.storage.s3/backend]
{:region (cf/get :storage-assets-s3-region) {:region (cf/get :storage-assets-s3-region)
:endpoint (cf/get :storage-assets-s3-endpoint) :endpoint (cf/get :storage-assets-s3-endpoint)
:bucket (cf/get :storage-assets-s3-bucket)} :bucket (cf/get :storage-assets-s3-bucket)
:executor (ig/ref [::default :app.worker/executor])}
[::assets :app.storage.fs/backend] [::assets :app.storage.fs/backend]
{:directory (cf/get :storage-assets-fs-directory)} {:directory (cf/get :storage-assets-fs-directory)}

View file

@ -42,13 +42,14 @@
:internal.http.upload/tempfile :internal.http.upload/tempfile
:internal.http.upload/content-type])) :internal.http.upload/content-type]))
(defn validate-media-type (defn validate-media-type!
([mtype] (validate-media-type mtype cm/valid-image-types)) ([mtype] (validate-media-type! mtype cm/valid-image-types))
([mtype allowed] ([mtype allowed]
(when-not (contains? allowed mtype) (when-not (contains? allowed mtype)
(ex/raise :type :validation (ex/raise :type :validation
:code :media-type-not-allowed :code :media-type-not-allowed
:hint "Seems like you are uploading an invalid media object")))) :hint "Seems like you are uploading an invalid media object"))
mtype))
(defmulti process :cmd) (defmulti process :cmd)
(defmulti process-error class) (defmulti process-error class)

View file

@ -211,6 +211,9 @@
{:name "0067-add-team-invitation-table" {:name "0067-add-team-invitation-table"
:fn (mg/resource "app/migrations/sql/0067-add-team-invitation-table.sql")} :fn (mg/resource "app/migrations/sql/0067-add-team-invitation-table.sql")}
{:name "0068-mod-storage-object-table"
:fn (mg/resource "app/migrations/sql/0068-mod-storage-object-table.sql")}
]) ])

View file

@ -9,7 +9,6 @@ CREATE TABLE team_invitation (
PRIMARY KEY(team_id, email_to) PRIMARY KEY(team_id, email_to)
); );
ALTER TABLE team_invitation ALTER TABLE team_invitation
ALTER COLUMN email_to SET STORAGE external, ALTER COLUMN email_to SET STORAGE external,
ALTER COLUMN role SET STORAGE external; ALTER COLUMN role SET STORAGE external;

View file

@ -0,0 +1,3 @@
CREATE INDEX storage_object__hash_backend_bucket__idx
ON storage_object ((metadata->>'~:hash'), (metadata->>'~:bucket'), backend)
WHERE deleted_at IS NULL;

View file

@ -110,11 +110,11 @@
"Wraps service method into async flow, with the ability to dispatching "Wraps service method into async flow, with the ability to dispatching
it to a preconfigured executor service." it to a preconfigured executor service."
[{:keys [executors] :as cfg} f mdata] [{:keys [executors] :as cfg} f mdata]
(let [dname (::async/dispatch mdata :none)] (let [dname (::async/dispatch mdata :default)]
(if (= :none dname) (if (= :none dname)
(with-meta (with-meta
(fn [cfg params] (fn [cfg params]
(p/do! (f cfg params))) (p/do (f cfg params)))
mdata) mdata)
(let [executor (get executors dname)] (let [executor (get executors dname)]

View file

@ -17,12 +17,13 @@
[app.rpc.permissions :as perms] [app.rpc.permissions :as perms]
[app.rpc.queries.files :as files] [app.rpc.queries.files :as files]
[app.rpc.queries.projects :as proj] [app.rpc.queries.projects :as proj]
[app.rpc.rlimit :as rlimit]
[app.storage.impl :as simpl] [app.storage.impl :as simpl]
[app.util.async :as async]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s])) [clojure.spec.alpha :as s]
[promesa.core :as p]))
(declare create-file) (declare create-file)
@ -126,7 +127,6 @@
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}] [{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(files/check-edition-permissions! conn profile-id id) (files/check-edition-permissions! conn profile-id id)
(mark-file-deleted conn params))) (mark-file-deleted conn params)))
(defn mark-file-deleted (defn mark-file-deleted
@ -273,7 +273,7 @@
(contains? o :changes-with-metadata))))) (contains? o :changes-with-metadata)))))
(sv/defmethod ::update-file (sv/defmethod ::update-file
{::async/dispatch :blocking} {::rlimit/permits 20}
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}] [{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(db/xact-lock! conn id) (db/xact-lock! conn id)
@ -295,8 +295,9 @@
(defn- delete-from-storage (defn- delete-from-storage
[{:keys [storage] :as cfg} file] [{:keys [storage] :as cfg} file]
(p/do
(when-let [backend (simpl/resolve-backend storage (:data-backend file))] (when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/del-object backend file))) (simpl/del-object backend file))))
(defn- update-file (defn- update-file
[{:keys [conn metrics] :as cfg} {:keys [file changes changes-with-metadata session-id profile-id] :as params}] [{:keys [conn metrics] :as cfg} {:keys [file changes changes-with-metadata session-id profile-id] :as params}]
@ -353,7 +354,7 @@
;; We need to delete the data from external storage backend ;; We need to delete the data from external storage backend
(when-not (nil? (:data-backend file)) (when-not (nil? (:data-backend file))
(delete-from-storage cfg file)) @(delete-from-storage cfg file))
(db/update! conn :project (db/update! conn :project
{:modified-at ts} {:modified-at ts}

View file

@ -6,6 +6,7 @@
(ns app.rpc.mutations.fonts (ns app.rpc.mutations.fonts
(:require (:require
[app.common.data :as d]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
@ -15,7 +16,9 @@
[app.storage :as sto] [app.storage :as sto]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s])) [clojure.spec.alpha :as s]
[promesa.core :as p]
[promesa.exec :as px]))
(declare create-font-variant) (declare create-font-variant)
@ -38,45 +41,57 @@
(sv/defmethod ::create-font-variant (sv/defmethod ::create-font-variant
[{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}] [{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}]
(let [cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id team-id) (teams/check-edition-permissions! pool profile-id team-id)
(create-font-variant cfg params)) (create-font-variant cfg params)))
(defn create-font-variant (defn create-font-variant
[{:keys [storage pool] :as cfg} {:keys [data] :as params}] [{:keys [storage pool executors] :as cfg} {:keys [data] :as params}]
(let [data (media/run {:cmd :generate-fonts :input data}) (letfn [(generate-fonts [data]
storage (media/configure-assets-storage storage)] (px/with-dispatch (:blocking executors)
(media/run {:cmd :generate-fonts :input data})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data. Even though it uses the hight
;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))
(validate-data [data]
(when (and (not (contains? data "font/otf")) (when (and (not (contains? data "font/otf"))
(not (contains? data "font/ttf")) (not (contains? data "font/ttf"))
(not (contains? data "font/woff")) (not (contains? data "font/woff"))
(not (contains? data "font/woff2"))) (not (contains? data "font/woff2")))
(ex/raise :type :validation (ex/raise :type :validation
:code :invalid-font-upload)) :code :invalid-font-upload))
data)
(let [otf (when-let [fdata (get data "font/otf")] (persist-font-object [data mtype]
(sto/put-object storage {:content (sto/content fdata) (when-let [fdata (get data mtype)]
:content-type "font/otf" (p/let [hash (calculate-hash fdata)
:reference :team-font-variant content (-> (sto/content fdata)
:touched-at (dt/now)})) (sto/wrap-with-hash hash))]
(sto/put-object! storage {::sto/content content
::sto/touched-at (dt/now)
::sto/deduplicate? true
:content-type mtype
:bucket "team-font-variant"}))))
ttf (when-let [fdata (get data "font/ttf")] (persist-fonts [data]
(sto/put-object storage {:content (sto/content fdata) (p/let [otf (persist-font-object data "font/otf")
:content-type "font/ttf" ttf (persist-font-object data "font/ttf")
:touched-at (dt/now) woff1 (persist-font-object data "font/woff")
:reference :team-font-variant})) woff2 (persist-font-object data "font/woff2")]
woff1 (when-let [fdata (get data "font/woff")] (d/without-nils
(sto/put-object storage {:content (sto/content fdata) {:otf otf
:content-type "font/woff" :ttf ttf
:touched-at (dt/now) :woff1 woff1
:reference :team-font-variant})) :woff2 woff2})))
woff2 (when-let [fdata (get data "font/woff2")]
(sto/put-object storage {:content (sto/content fdata)
:content-type "font/woff2"
:touched-at (dt/now)
:reference :team-font-variant}))]
(insert-into-db [{:keys [woff1 woff2 otf ttf]}]
(db/insert! pool :team-font-variant (db/insert! pool :team-font-variant
{:id (uuid/next) {:id (uuid/next)
:team-id (:team-id params) :team-id (:team-id params)
@ -87,7 +102,13 @@
:woff1-file-id (:id woff1) :woff1-file-id (:id woff1)
:woff2-file-id (:id woff2) :woff2-file-id (:id woff2)
:otf-file-id (:id otf) :otf-file-id (:id otf)
:ttf-file-id (:id ttf)})))) :ttf-file-id (:id ttf)}))
]
(-> (generate-fonts data)
(p/then validate-data)
(p/then persist-fonts (:default executors))
(p/then insert-into-db (:default executors)))))
;; --- UPDATE FONT FAMILY ;; --- UPDATE FONT FAMILY

View file

@ -6,6 +6,7 @@
(ns app.rpc.mutations.media (ns app.rpc.mutations.media
(:require (:require
[app.common.data :as d]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.media :as cm] [app.common.media :as cm]
[app.common.spec :as us] [app.common.spec :as us]
@ -16,12 +17,12 @@
[app.rpc.queries.teams :as teams] [app.rpc.queries.teams :as teams]
[app.rpc.rlimit :as rlimit] [app.rpc.rlimit :as rlimit]
[app.storage :as sto] [app.storage :as sto]
[app.util.async :as async]
[app.util.http :as http]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[datoteka.core :as fs])) [datoteka.core :as fs]
[promesa.core :as p]
[promesa.exec :as px]))
(def thumbnail-options (def thumbnail-options
{:width 100 {:width 100
@ -50,10 +51,10 @@
:opt-un [::id])) :opt-un [::id]))
(sv/defmethod ::upload-file-media-object (sv/defmethod ::upload-file-media-object
{::rlimit/permits (cf/get :rlimit-image) {::rlimit/permits (cf/get :rlimit-image)}
::async/dispatch :default}
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}] [{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(let [file (select-file pool file-id)] (let [file (select-file pool file-id)
cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file)) (teams/check-edition-permissions! pool profile-id (:team-id file))
(create-file-media-object cfg params))) (create-file-media-object cfg params)))
@ -68,34 +69,6 @@
[info] [info]
(= (:mtype info) "image/svg+xml")) (= (:mtype info) "image/svg+xml"))
(defn- fetch-url
[url]
(try
(http/get! url {:as :byte-array})
(catch Exception e
(ex/raise :type :validation
:code :unable-to-access-to-url
:cause e))))
;; TODO: we need to check the size before fetch resource, if not we
;; can start downloading very big object and cause OOM errors.
(defn- download-media
[{:keys [storage] :as cfg} url]
(let [result (fetch-url url)
data (:body result)
mtype (get (:headers result) "content-type")
format (cm/mtype->format mtype)]
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "Seems like the url points to an invalid media object."))
(-> (assoc storage :backend :tmp)
(sto/put-object {:content (sto/content data)
:content-type mtype
:reference :file-media-object
:expired-at (dt/in-future {:minutes 30})}))))
;; NOTE: we use the `on conflict do update` instead of `do nothing` ;; NOTE: we use the `on conflict do update` instead of `do nothing`
;; because postgresql does not returns anything if no update is ;; because postgresql does not returns anything if no update is
;; performed, the `do update` does the trick. ;; performed, the `do update` does the trick.
@ -121,67 +94,137 @@
;; inverse, soft referential integrity). ;; inverse, soft referential integrity).
(defn create-file-media-object (defn create-file-media-object
[{:keys [storage pool] :as cfg} {:keys [id file-id is-local name content] :as params}] [{:keys [storage pool executors] :as cfg} {:keys [id file-id is-local name content] :as params}]
(media/validate-media-type (:content-type content)) (media/validate-media-type! (:content-type content))
(let [source-path (fs/path (:tempfile content))
source-mtype (:content-type content)
source-info (media/run {:cmd :info :input {:path source-path :mtype source-mtype}})
storage (media/configure-assets-storage storage)
thumb (when (and (not (svg-image? source-info)) (letfn [;; Function responsible to retrieve the file information, as
(big-enough-for-thumbnail? source-info)) ;; it is synchronous operation it should be wrapped into
;; with-dispatch macro.
(get-info [path mtype]
(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path path :mtype mtype}})))
;; Function responsible of calculating cryptographyc hash of
;; the provided data. Even though it uses the hight
;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))
;; Function responsible of generating thumnail. As it is synchronous
;; opetation, it should be wrapped into with-dispatch macro
(generate-thumbnail [info path]
(px/with-dispatch (:blocking executors)
(media/run (assoc thumbnail-options (media/run (assoc thumbnail-options
:cmd :generic-thumbnail :cmd :generic-thumbnail
:input {:mtype (:mtype source-info) :input {:mtype (:mtype info) :path path}))))
:path source-path})))
image (if (= (:mtype source-info) "image/svg+xml") (create-thumbnail [info path]
(let [data (slurp source-path)] (when (and (not (svg-image? info))
(sto/put-object storage (big-enough-for-thumbnail? info))
{:content (sto/content data) (p/let [thumb (generate-thumbnail info path)
:content-type (:mtype source-info) hash (calculate-hash (:data thumb))
:reference :file-media-object content (-> (sto/content (:data thumb) (:size thumb))
:touched-at (dt/now)})) (sto/wrap-with-hash hash))]
(sto/put-object storage (sto/put-object! storage
{:content (sto/content source-path) {::sto/content content
:content-type (:mtype source-info) ::sto/deduplicate? true
:reference :file-media-object ::sto/touched-at (dt/now)
:touched-at (dt/now)}))
thumb (when thumb
(sto/put-object storage
{:content (sto/content (:data thumb) (:size thumb))
:content-type (:mtype thumb) :content-type (:mtype thumb)
:reference :file-media-object :bucket "file-media-object"}))))
:touched-at (dt/now)}))]
(create-image [info path]
(p/let [data (cond-> path (= (:mtype info) "image/svg+xml") slurp)
hash (calculate-hash data)
content (-> (sto/content data)
(sto/wrap-with-hash hash))]
(sto/put-object! storage
{::sto/content content
::sto/deduplicate? true
::sto/touched-at (dt/now)
:content-type (:mtype info)
:bucket "file-media-object"})))
(insert-into-database [info image thumb]
(px/with-dispatch (:default executors)
(db/exec-one! pool [sql:create-file-media-object (db/exec-one! pool [sql:create-file-media-object
(or id (uuid/next)) (or id (uuid/next))
file-id is-local name file-id is-local name
(:id image) (:id image)
(:id thumb) (:id thumb)
(:width source-info) (:width info)
(:height source-info) (:height info)
source-mtype]))) (:mtype info)])))]
(p/let [path (fs/path (:tempfile content))
info (get-info path (:content-type content))
thumb (create-thumbnail info path)
image (create-image info path)]
(insert-into-database info image thumb))))
;; --- Create File Media Object (from URL) ;; --- Create File Media Object (from URL)
(declare ^:private create-file-media-object-from-url)
(s/def ::create-file-media-object-from-url (s/def ::create-file-media-object-from-url
(s/keys :req-un [::profile-id ::file-id ::is-local ::url] (s/keys :req-un [::profile-id ::file-id ::is-local ::url]
:opt-un [::id ::name])) :opt-un [::id ::name]))
(sv/defmethod ::create-file-media-object-from-url (sv/defmethod ::create-file-media-object-from-url
{::rlimit/permits (cf/get :rlimit-image) {::rlimit/permits (cf/get :rlimit-image)}
::async/dispatch :default} [{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
[{:keys [pool storage] :as cfg} {:keys [profile-id file-id url name] :as params}] (let [file (select-file pool file-id)
(let [file (select-file pool file-id)] cfg (update cfg :storage media/configure-assets-storage)]
(teams/check-edition-permissions! pool profile-id (:team-id file)) (teams/check-edition-permissions! pool profile-id (:team-id file))
(let [mobj (download-media cfg url) (create-file-media-object-from-url cfg params)))
content {:filename "tempfile"
:size (:size mobj)
:tempfile (sto/get-object-path storage mobj)
:content-type (:content-type (meta mobj))}]
(def max-download-file-size
(* 1024 1024 100)) ; 100MiB
(defn- create-file-media-object-from-url
[{:keys [storage http-client] :as cfg} {:keys [url name] :as params}]
(letfn [(parse-and-validate-size [headers]
(let [size (some-> (get headers "content-length") d/parse-integer)
mtype (get headers "content-type")
format (cm/mtype->format mtype)]
(when-not size
(ex/raise :type :validation
:code :unknown-size
:hint "Seems like the url points to resource with unknown size"))
(when (> size max-download-file-size)
(ex/raise :type :validation
:code :file-too-large
:hint "Seems like the url points to resource with size greater than 100MiB"))
(when (nil? format)
(ex/raise :type :validation
:code :media-type-not-allowed
:hint "Seems like the url points to an invalid media object"))
{:size size
:mtype mtype
:format format}))
(download-media [uri]
(p/let [{:keys [body headers]} (http-client {:method :get :uri uri} {:response-type :input-stream})
{:keys [size mtype]} (parse-and-validate-size headers)]
(-> (assoc storage :backend :tmp)
(sto/put-object! {::sto/content (sto/content body size)
::sto/expired-at (dt/in-future {:minutes 30})
:content-type mtype
:bucket "file-media-object"})
(p/then (fn [sobj]
(p/let [path (sto/get-object-path storage sobj)]
{:filename "tempfile"
:size (:size sobj)
:tempfile path
:content-type (:content-type (meta sobj))}))))))]
(p/let [content (download-media url)]
(->> (merge params {:content content :name (or name (:filename content))}) (->> (merge params {:content content :name (or name (:filename content))})
(create-file-media-object cfg))))) (create-file-media-object cfg)))))

View file

@ -24,7 +24,8 @@
[app.util.time :as dt] [app.util.time :as dt]
[buddy.hashers :as hashers] [buddy.hashers :as hashers]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str])) [cuerdas.core :as str]
[promesa.exec :as px]))
;; --- Helpers & Specs ;; --- Helpers & Specs
@ -345,6 +346,7 @@
(profile/decode-profile-row) (profile/decode-profile-row)
(profile/strip-private-attrs)))) (profile/strip-private-attrs))))
(s/def ::update-profile (s/def ::update-profile
(s/keys :req-un [::id ::fullname] (s/keys :req-un [::id ::fullname]
:opt-un [::lang ::theme])) :opt-un [::lang ::theme]))
@ -410,32 +412,33 @@
(s/def ::update-profile-photo (s/def ::update-profile-photo
(s/keys :req-un [::profile-id ::file])) (s/keys :req-un [::profile-id ::file]))
;; TODO: properly handle resource usage, transactions and storage
(sv/defmethod ::update-profile-photo (sv/defmethod ::update-profile-photo
{::rlimit/permits (cf/get :rlimit-image)} {::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool storage] :as cfg} {:keys [profile-id file] :as params}] [{:keys [pool storage executors] :as cfg} {:keys [profile-id file] :as params}]
(db/with-atomic [conn pool]
(media/validate-media-type (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
(media/run {:cmd :info :input {:path (:tempfile file)
:mtype (:content-type file)}})
;; Validate incoming mime type
(media/validate-media-type! (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
;; Perform file validation
@(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path (:tempfile file) :mtype (:content-type file)}}))
(db/with-atomic [conn pool]
(let [profile (db/get-by-id conn :profile profile-id) (let [profile (db/get-by-id conn :profile profile-id)
storage (media/configure-assets-storage storage conn) cfg (update cfg :storage media/configure-assets-storage conn)
cfg (assoc cfg :storage storage) photo @(teams/upload-photo cfg params)]
photo (teams/upload-photo cfg params)]
;; Schedule deletion of old photo ;; Schedule deletion of old photo
(when-let [id (:photo-id profile)] (when-let [id (:photo-id profile)]
(sto/del-object storage id)) @(sto/touch-object! storage id))
;; Save new photo ;; Save new photo
(update-profile-photo conn profile-id photo))))
(defn- update-profile-photo
[conn profile-id sobj]
(db/update! conn :profile (db/update! conn :profile
{:photo-id (:id sobj)} {:photo-id (:id photo)}
{:id profile-id}) {:id profile-id})
nil) nil)))
;; --- MUTATION: Request Email Change ;; --- MUTATION: Request Email Change

View file

@ -24,7 +24,9 @@
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[datoteka.core :as fs])) [datoteka.core :as fs]
[promesa.core :as p]
[promesa.exec :as px]))
;; --- Helpers & Specs ;; --- Helpers & Specs
@ -276,7 +278,6 @@
nil))) nil)))
;; --- Mutation: Update Team Photo ;; --- Mutation: Update Team Photo
(declare upload-photo) (declare upload-photo)
@ -289,21 +290,25 @@
(sv/defmethod ::update-team-photo (sv/defmethod ::update-team-photo
{::rlimit/permits (cf/get :rlimit-image)} {::rlimit/permits (cf/get :rlimit-image)}
[{:keys [pool storage] :as cfg} {:keys [profile-id file team-id] :as params}] [{:keys [pool storage executors] :as cfg} {:keys [profile-id file team-id] :as params}]
;; Validate incoming mime type
(media/validate-media-type! (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
;; Perform file validation
@(px/with-dispatch (:blocking executors)
(media/run {:cmd :info :input {:path (:tempfile file) :mtype (:content-type file)}}))
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(teams/check-edition-permissions! conn profile-id team-id) (teams/check-edition-permissions! conn profile-id team-id)
(media/validate-media-type (:content-type file) #{"image/jpeg" "image/png" "image/webp"})
(media/run {:cmd :info :input {:path (:tempfile file)
:mtype (:content-type file)}})
(let [team (teams/retrieve-team conn profile-id team-id) (let [team (teams/retrieve-team conn profile-id team-id)
storage (media/configure-assets-storage storage conn) cfg (update cfg :storage media/configure-assets-storage conn)
cfg (assoc cfg :storage storage) photo @(upload-photo cfg params)]
photo (upload-photo cfg params)]
;; Schedule deletion of old photo ;; Mark object as touched for make it ellegible for tentative
;; garbage collection.
(when-let [id (:photo-id team)] (when-let [id (:photo-id team)]
(sto/del-object storage id)) @(sto/touch-object! storage id))
;; Save new photo ;; Save new photo
(db/update! conn :team (db/update! conn :team
@ -313,17 +318,33 @@
(assoc team :photo-id (:id photo))))) (assoc team :photo-id (:id photo)))))
(defn upload-photo (defn upload-photo
[{:keys [storage] :as cfg} {:keys [file]}] [{:keys [storage executors] :as cfg} {:keys [file]}]
(let [thumb (media/run {:cmd :profile-thumbnail (letfn [(generate-thumbnail [path mtype]
(px/with-dispatch (:blocking executors)
(media/run {:cmd :profile-thumbnail
:format :jpeg :format :jpeg
:quality 85 :quality 85
:width 256 :width 256
:height 256 :height 256
:input {:path (fs/path (:tempfile file)) :input {:path path :mtype mtype}})))
:mtype (:content-type file)}})]
(sto/put-object storage ;; Function responsible of calculating cryptographyc hash of
{:content (sto/content (:data thumb) (:size thumb)) ;; the provided data. Even though it uses the hight
:content-type (:mtype thumb)}))) ;; performance BLAKE2b algorithm, we prefer to schedule it
;; to be executed on the blocking executor.
(calculate-hash [data]
(px/with-dispatch (:blocking executors)
(sto/calculate-hash data)))]
(p/let [thumb (generate-thumbnail (fs/path (:tempfile file))
(:content-type file))
hash (calculate-hash (:data thumb))
content (-> (sto/content (:data thumb) (:size thumb))
(sto/wrap-with-hash hash))]
(sto/put-object! storage {::sto/content content
::sto/deduplicate? true
:bucket "profile"
:content-type (:mtype thumb)}))))
;; --- Mutation: Invite Member ;; --- Mutation: Invite Member

View file

@ -19,7 +19,8 @@
[app.storage.impl :as simpl] [app.storage.impl :as simpl]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.services :as sv] [app.util.services :as sv]
[clojure.spec.alpha :as s])) [clojure.spec.alpha :as s]
[promesa.core :as p]))
(declare decode-row) (declare decode-row)
(declare decode-row-xf) (declare decode-row-xf)
@ -35,7 +36,6 @@
(s/def ::team-id ::us/uuid) (s/def ::team-id ::us/uuid)
(s/def ::search-term ::us/string) (s/def ::search-term ::us/string)
;; --- Query: File Permissions ;; --- Query: File Permissions
(def ^:private sql:file-permissions (def ^:private sql:file-permissions
@ -188,18 +188,20 @@
(defn- retrieve-data* (defn- retrieve-data*
[{:keys [storage] :as cfg} file] [{:keys [storage] :as cfg} file]
(p/do
(when-let [backend (simpl/resolve-backend storage (:data-backend file))] (when-let [backend (simpl/resolve-backend storage (:data-backend file))]
(simpl/get-object-bytes backend file))) (simpl/get-object-bytes backend file))))
(defn retrieve-data (defn retrieve-data
[cfg file] [cfg file]
(if (bytes? (:data file)) (if (bytes? (:data file))
file file
(assoc file :data (retrieve-data* cfg file)))) (p/->> (retrieve-data* cfg file)
(assoc file :data))))
(defn retrieve-file (defn retrieve-file
[{:keys [conn] :as cfg} id] [{:keys [pool] :as cfg} id]
(->> (db/get-by-id conn :file id) (p/->> (db/get-by-id pool :file id)
(retrieve-data cfg) (retrieve-data cfg)
(decode-row) (decode-row)
(pmg/migrate-file))) (pmg/migrate-file)))
@ -210,13 +212,10 @@
(sv/defmethod ::file (sv/defmethod ::file
"Retrieve a file by its ID. Only authenticated users." "Retrieve a file by its ID. Only authenticated users."
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}] [{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool] (let [perms (get-permissions pool profile-id id)]
(let [cfg (assoc cfg :conn conn)
perms (get-permissions conn profile-id id)]
(check-read-permissions! perms) (check-read-permissions! perms)
(some-> (retrieve-file cfg id) (p/-> (retrieve-file cfg id)
(assoc :permissions perms))))) (assoc :permissions perms))))
(declare trim-file-data) (declare trim-file-data)
@ -232,13 +231,11 @@
need force download all shapes when only a small subset is need force download all shapes when only a small subset is
necesseary." necesseary."
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}] [{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
(db/with-atomic [conn pool] (let [perms (get-permissions pool profile-id id)]
(let [cfg (assoc cfg :conn conn)
perms (get-permissions conn profile-id id)]
(check-read-permissions! perms) (check-read-permissions! perms)
(some-> (retrieve-file cfg id) (p/-> (retrieve-file cfg id)
(trim-file-data params) (trim-file-data params)
(assoc :permissions perms))))) (assoc :permissions perms))))
(defn- trim-file-data (defn- trim-file-data
[file {:keys [page-id object-id]}] [file {:keys [page-id object-id]}]
@ -263,15 +260,12 @@
"Retrieves the first page of the file. Used mainly for render "Retrieves the first page of the file. Used mainly for render
thumbnails on dashboard." thumbnails on dashboard."
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as props}] [{:keys [pool] :as cfg} {:keys [profile-id file-id] :as props}]
(db/with-atomic [conn pool] (check-read-permissions! pool profile-id file-id)
(check-read-permissions! conn profile-id file-id) (p/let [file (retrieve-file cfg file-id)
(let [cfg (assoc cfg :conn conn)
file (retrieve-file cfg file-id)
page-id (get-in file [:data :pages 0])] page-id (get-in file [:data :pages 0])]
(cond-> (get-in file [:data :pages-index page-id]) (cond-> (get-in file [:data :pages-index page-id])
(true? (:strip-frames-with-thumbnails props)) (true? (:strip-frames-with-thumbnails props))
(strip-frames-with-thumbnails))))) (strip-frames-with-thumbnails))))
(defn strip-frames-with-thumbnails (defn strip-frames-with-thumbnails
"Remove unnecesary shapes from frames that have thumbnail." "Remove unnecesary shapes from frames that have thumbnail."
@ -354,22 +348,20 @@
WHERE l.deleted_at IS NULL OR l.deleted_at > now();") WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
(defn retrieve-file-libraries (defn retrieve-file-libraries
[{:keys [conn] :as cfg} is-indirect file-id] [{:keys [pool] :as cfg} is-indirect file-id]
(let [xform (comp (let [xform (comp
(map #(assoc % :is-indirect is-indirect)) (map #(assoc % :is-indirect is-indirect))
(map #(retrieve-data cfg %)) (map #(retrieve-data cfg %))
(map decode-row))] (map decode-row))]
(into #{} xform (db/exec! conn [sql:file-libraries file-id])))) (into #{} xform (db/exec! pool [sql:file-libraries file-id]))))
(s/def ::file-libraries (s/def ::file-libraries
(s/keys :req-un [::profile-id ::file-id])) (s/keys :req-un [::profile-id ::file-id]))
(sv/defmethod ::file-libraries (sv/defmethod ::file-libraries
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}] [{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
(db/with-atomic [conn pool] (check-read-permissions! pool profile-id file-id)
(let [cfg (assoc cfg :conn conn)] (retrieve-file-libraries cfg false file-id))
(check-read-permissions! conn profile-id file-id)
(retrieve-file-libraries cfg false file-id))))
;; --- QUERY: team-recent-files ;; --- QUERY: team-recent-files
@ -399,9 +391,8 @@
(sv/defmethod ::team-recent-files (sv/defmethod ::team-recent-files
[{:keys [pool] :as cfg} {:keys [profile-id team-id]}] [{:keys [pool] :as cfg} {:keys [profile-id team-id]}]
(with-open [conn (db/open pool)] (teams/check-read-permissions! pool profile-id team-id)
(teams/check-read-permissions! conn profile-id team-id) (db/exec! pool [sql:team-recent-files team-id]))
(db/exec! conn [sql:team-recent-files team-id])))
;; --- QUERY: get the thumbnail for an frame ;; --- QUERY: get the thumbnail for an frame
@ -417,10 +408,8 @@
(sv/defmethod ::file-frame-thumbnail (sv/defmethod ::file-frame-thumbnail
[{:keys [pool]} {:keys [profile-id file-id frame-id]}] [{:keys [pool]} {:keys [profile-id file-id frame-id]}]
(with-open [conn (db/open pool)] (check-read-permissions! pool profile-id file-id)
(check-read-permissions! conn profile-id file-id) (db/exec-one! pool [sql:file-frame-thumbnail file-id frame-id]))
(db/exec-one! conn [sql:file-frame-thumbnail file-id frame-id])))
;; --- Helpers ;; --- Helpers

View file

@ -13,25 +13,26 @@
[app.rpc.queries.share-link :as slnk] [app.rpc.queries.share-link :as slnk]
[app.rpc.queries.teams :as teams] [app.rpc.queries.teams :as teams]
[app.util.services :as sv] [app.util.services :as sv]
[clojure.spec.alpha :as s])) [clojure.spec.alpha :as s]
[promesa.core :as p]))
;; --- Query: View Only Bundle ;; --- Query: View Only Bundle
(defn- retrieve-project (defn- retrieve-project
[conn id] [pool id]
(db/get-by-id conn :project id {:columns [:id :name :team-id]})) (db/get-by-id pool :project id {:columns [:id :name :team-id]}))
(defn- retrieve-bundle (defn- retrieve-bundle
[{:keys [conn] :as cfg} file-id] [{:keys [pool] :as cfg} file-id]
(let [file (files/retrieve-file cfg file-id) (p/let [file (files/retrieve-file cfg file-id)
project (retrieve-project conn (:project-id file)) project (retrieve-project pool (:project-id file))
libs (files/retrieve-file-libraries cfg false file-id) libs (files/retrieve-file-libraries cfg false file-id)
users (teams/retrieve-users conn (:team-id project)) users (teams/retrieve-users pool (:team-id project))
links (->> (db/query conn :share-link {:file-id file-id}) links (->> (db/query pool :share-link {:file-id file-id})
(mapv slnk/decode-share-link-row)) (mapv slnk/decode-share-link-row))
fonts (db/query conn :team-font-variant fonts (db/query pool :team-font-variant
{:team-id (:team-id project) {:team-id (:team-id project)
:deleted-at nil})] :deleted-at nil})]
{:file file {:file file
@ -50,12 +51,9 @@
(sv/defmethod ::view-only-bundle {:auth false} (sv/defmethod ::view-only-bundle {:auth false}
[{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}] [{:keys [pool] :as cfg} {:keys [profile-id file-id share-id] :as params}]
(db/with-atomic [conn pool] (p/let [slink (slnk/retrieve-share-link pool file-id share-id)
(let [cfg (assoc cfg :conn conn) perms (files/get-permissions pool profile-id file-id share-id)
slink (slnk/retrieve-share-link conn file-id share-id) bundle (p/-> (retrieve-bundle cfg file-id)
perms (files/get-permissions conn profile-id file-id share-id)
bundle (some-> (retrieve-bundle cfg file-id)
(assoc :permissions perms))] (assoc :permissions perms))]
;; When we have neither profile nor share, we just return a not ;; When we have neither profile nor share, we just return a not
@ -68,7 +66,7 @@
;; When we have only profile, we need to check read permissions ;; When we have only profile, we need to check read permissions
;; on file. ;; on file.
(when (and profile-id (not slink)) (when (and profile-id (not slink))
(files/check-read-permissions! conn profile-id file-id)) (files/check-read-permissions! pool profile-id file-id))
(cond-> bundle (cond-> bundle
(some? slink) (some? slink)
@ -80,4 +78,4 @@
(let [allowed-pages (:pages slink)] (let [allowed-pages (:pages slink)]
(-> data (-> data
(update :pages (fn [pages] (filterv #(contains? allowed-pages %) pages))) (update :pages (fn [pages] (filterv #(contains? allowed-pages %) pages)))
(update :pages-index (fn [index] (select-keys index allowed-pages))))))))))) (update :pages-index (fn [index] (select-keys index allowed-pages))))))))))

View file

@ -52,7 +52,7 @@
)))) ))))
(defn wrap-rlimit (defn wrap-rlimit
[{:keys [metrics] :as cfg} f mdata] [{:keys [metrics executors] :as cfg} f mdata]
(if-let [permits (::permits mdata)] (if-let [permits (::permits mdata)]
(let [sem (semaphore {:permits permits (let [sem (semaphore {:permits permits
:metrics metrics :metrics metrics
@ -60,7 +60,7 @@
(l/debug :hint "wrapping rlimit" :handler (::sv/name mdata) :permits permits) (l/debug :hint "wrapping rlimit" :handler (::sv/name mdata) :permits permits)
(fn [cfg params] (fn [cfg params]
(-> (acquire! sem) (-> (acquire! sem)
(p/then (fn [_] (f cfg params))) (p/then (fn [_] (f cfg params)) (:default executors))
(p/finally (fn [_ _] (release! sem)))))) (p/finally (fn [_ _] (release! sem))))))
f)) f))

View file

@ -19,9 +19,12 @@
[app.storage.impl :as impl] [app.storage.impl :as impl]
[app.storage.s3 :as ss3] [app.storage.s3 :as ss3]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[datoteka.core :as fs] [datoteka.core :as fs]
[integrant.core :as ig])) [integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Storage Module State ;; Storage Module State
@ -39,7 +42,7 @@
:db ::sdb/backend)))) :db ::sdb/backend))))
(defmethod ig/pre-init-spec ::storage [_] (defmethod ig/pre-init-spec ::storage [_]
(s/keys :req-un [::db/pool ::backends])) (s/keys :req-un [::db/pool ::wrk/executor ::backends]))
(defmethod ig/prep-key ::storage (defmethod ig/prep-key ::storage
[_ {:keys [backends] :as cfg}] [_ {:keys [backends] :as cfg}]
@ -67,39 +70,49 @@
(s/def ::storage-object storage-object?) (s/def ::storage-object storage-object?)
(s/def ::storage-content impl/content?) (s/def ::storage-content impl/content?)
(defn get-metadata
[params]
(into {}
(remove (fn [[k _]] (qualified-keyword? k)))
params))
(defn- clone-database-object (defn- get-database-object-by-hash
;; If we in this condition branch, this means we come from the [conn backend bucket hash]
;; clone-object, so we just need to clone it with a new backend. (let [sql (str "select * from storage_object "
[{:keys [conn backend]} object] " where (metadata->>'~:hash') = ? "
(let [id (uuid/random) " and (metadata->>'~:bucket') = ? "
mdata (meta object) " and backend = ?"
result (db/insert! conn :storage-object " and deleted_at is null"
{:id id " limit 1")]
:size (:size object) (db/exec-one! conn [sql hash bucket (name backend)])))
:backend (name backend)
:metadata (db/tjson mdata)
:deleted-at (:expired-at object)
:touched-at (:touched-at object)})]
(assoc object
:id (:id result)
:backend backend
:created-at (:created-at result)
:touched-at (:touched-at result))))
(defn- create-database-object (defn- create-database-object
[{:keys [conn backend]} {:keys [content] :as object}] [{:keys [conn backend executor]} {:keys [::content ::expired-at ::touched-at] :as params}]
(us/assert ::storage-content content) (us/assert ::storage-content content)
(px/with-dispatch executor
(let [id (uuid/random) (let [id (uuid/random)
mdata (dissoc object :content :expired-at :touched-at)
result (db/insert! conn :storage-object mdata (cond-> (get-metadata params)
(satisfies? impl/IContentHash content)
(assoc :hash (impl/get-hash content)))
;; NOTE: for now we don't reuse the deleted objects, but in
;; futute we can consider reusing deleted objects if we
;; found a duplicated one and is marked for deletion but
;; still not deleted.
result (when (and (::deduplicate? params)
(:hash mdata)
(:bucket mdata))
(get-database-object-by-hash conn backend (:bucket mdata) (:hash mdata)))
result (or result
(db/insert! conn :storage-object
{:id id {:id id
:size (count content) :size (count content)
:backend (name backend) :backend (name backend)
:metadata (db/tjson mdata) :metadata (db/tjson mdata)
:deleted-at (:expired-at object) :deleted-at expired-at
:touched-at (:touched-at object)})] :touched-at touched-at}))]
(StorageObject. (:id result) (StorageObject. (:id result)
(:size result) (:size result)
@ -108,7 +121,7 @@
(:touched-at result) (:touched-at result)
backend backend
mdata mdata
nil))) nil))))
(def ^:private sql:retrieve-storage-object (def ^:private sql:retrieve-storage-object
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())") "select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
@ -129,14 +142,6 @@
(when-let [res (db/exec-one! conn [sql:retrieve-storage-object id])] (when-let [res (db/exec-one! conn [sql:retrieve-storage-object id])]
(row->storage-object res))) (row->storage-object res)))
(def sql:delete-storage-object
"update storage_object set deleted_at=now() where id=?")
(defn- delete-database-object
[{:keys [conn] :as storage} id]
(let [result (db/exec-one! conn [sql:delete-storage-object id])]
(pos? (:next.jdbc/update-count result))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API ;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -149,23 +154,23 @@
[url] [url]
(fs/path (java.net.URI. (str url)))) (fs/path (java.net.URI. (str url))))
(defn content (dm/export impl/content)
([data] (impl/content data nil)) (dm/export impl/wrap-with-hash)
([data size] (impl/content data size)))
(defn get-object (defn get-object
[{:keys [conn pool] :as storage} id] [{:keys [conn pool] :as storage} id]
(us/assert ::storage storage) (us/assert ::storage storage)
(p/do
(-> (assoc storage :conn (or conn pool)) (-> (assoc storage :conn (or conn pool))
(retrieve-database-object id))) (retrieve-database-object id))))
(defn put-object (defn put-object!
"Creates a new object with the provided content." "Creates a new object with the provided content."
[{:keys [pool conn backend] :as storage} {:keys [content] :as params}] [{:keys [pool conn backend] :as storage} {:keys [::content] :as params}]
(us/assert ::storage storage) (us/assert ::storage storage)
(us/assert ::storage-content content) (us/assert ::storage-content content)
(us/assert ::us/keyword backend) (us/assert ::us/keyword backend)
(let [storage (assoc storage :conn (or conn pool)) (p/let [storage (assoc storage :conn (or conn pool))
object (create-database-object storage params)] object (create-database-object storage params)]
;; Store the data finally on the underlying storage subsystem. ;; Store the data finally on the underlying storage subsystem.
@ -174,65 +179,56 @@
object)) object))
(defn clone-object (defn touch-object!
"Creates a clone of the provided object using backend based efficient "Mark object as touched."
method. Always clones objects to the configured default." [{:keys [pool conn] :as storage} object-or-id]
[{:keys [pool conn backend] :as storage} object] (p/do
(us/assert ::storage storage) (let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id)
(us/assert ::storage-object object) res (db/update! (or conn pool) :storage-object
(us/assert ::us/keyword backend) {:touched-at (dt/now)}
(let [storage (assoc storage :conn (or conn pool)) {:id id}
object* (clone-database-object storage object)] {:return-keys false})]
(if (= (:backend object) (:backend storage)) (pos? (:next.jdbc/update-count res)))))
;; if the source and destination backends are the same, we
;; proceed to use the fast path with specific copy
;; implementation on backend.
(-> (impl/resolve-backend storage (:backend storage))
(impl/copy-object object object*))
;; if the source and destination backends are different, we just
;; need to obtain the streams and proceed full copy of the data
(with-open [is (-> (impl/resolve-backend storage (:backend object))
(impl/get-object-data object))]
(-> (impl/resolve-backend storage (:backend storage))
(impl/put-object object* (impl/content is (:size object))))))
object*))
(defn get-object-data (defn get-object-data
"Return an input stream instance of the object content." "Return an input stream instance of the object content."
[{:keys [pool conn] :as storage} object] [{:keys [pool conn] :as storage} object]
(us/assert ::storage storage) (us/assert ::storage storage)
(p/do
(when (or (nil? (:expired-at object)) (when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object)) (impl/resolve-backend (:backend object))
(impl/get-object-data object)))) (impl/get-object-data object)))))
(defn get-object-bytes (defn get-object-bytes
"Returns a byte array of object content." "Returns a byte array of object content."
[{:keys [pool conn] :as storage} object] [{:keys [pool conn] :as storage} object]
(us/assert ::storage storage) (us/assert ::storage storage)
(p/do
(when (or (nil? (:expired-at object)) (when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object)) (impl/resolve-backend (:backend object))
(impl/get-object-bytes object)))) (impl/get-object-bytes object)))))
(defn get-object-url (defn get-object-url
([storage object] ([storage object]
(get-object-url storage object nil)) (get-object-url storage object nil))
([{:keys [conn pool] :as storage} object options] ([{:keys [conn pool] :as storage} object options]
(us/assert ::storage storage) (us/assert ::storage storage)
(p/do
(when (or (nil? (:expired-at object)) (when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (assoc storage :conn (or conn pool)) (-> (assoc storage :conn (or conn pool))
(impl/resolve-backend (:backend object)) (impl/resolve-backend (:backend object))
(impl/get-object-url object options))))) (impl/get-object-url object options))))))
(defn get-object-path (defn get-object-path
"Get the Path to the object. Only works with `:fs` type of "Get the Path to the object. Only works with `:fs` type of
storages." storages."
[storage object] [storage object]
(p/do
(let [backend (impl/resolve-backend storage (:backend object))] (let [backend (impl/resolve-backend storage (:backend object))]
(when (not= :fs (:type backend)) (when (not= :fs (:type backend))
(ex/raise :type :internal (ex/raise :type :internal
@ -240,16 +236,21 @@
:hint "get-object-path only works with fs type backends")) :hint "get-object-path only works with fs type backends"))
(when (or (nil? (:expired-at object)) (when (or (nil? (:expired-at object))
(dt/is-after? (:expired-at object) (dt/now))) (dt/is-after? (:expired-at object) (dt/now)))
(-> (impl/get-object-url backend object nil) (p/-> (impl/get-object-url backend object nil) file-url->path)))))
(file-url->path)))))
(defn del-object (defn del-object!
[{:keys [conn pool] :as storage} id-or-obj] [{:keys [conn pool] :as storage} object-or-id]
(us/assert ::storage storage) (us/assert ::storage storage)
(-> (assoc storage :conn (or conn pool)) (p/do
(delete-database-object (if (uuid? id-or-obj) id-or-obj (:id id-or-obj))))) (let [id (if (storage-object? object-or-id) (:id object-or-id) object-or-id)
res (db/update! (or conn pool) :storage-object
{:deleted-at (dt/now)}
{:id id}
{:return-keys false})]
(pos? (:next.jdbc/update-count res)))))
(dm/export impl/resolve-backend) (dm/export impl/resolve-backend)
(dm/export impl/calculate-hash)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Garbage Collection: Permanently delete objects ;; Garbage Collection: Permanently delete objects
@ -263,7 +264,7 @@
(s/def ::min-age ::dt/duration) (s/def ::min-age ::dt/duration)
(defmethod ig/pre-init-spec ::gc-deleted-task [_] (defmethod ig/pre-init-spec ::gc-deleted-task [_]
(s/keys :req-un [::storage ::db/pool ::min-age])) (s/keys :req-un [::storage ::db/pool ::min-age ::wrk/executor]))
(defmethod ig/init-key ::gc-deleted-task (defmethod ig/init-key ::gc-deleted-task
[_ {:keys [pool storage min-age] :as cfg}] [_ {:keys [pool storage min-age] :as cfg}]
@ -284,7 +285,7 @@
(delete-in-bulk [conn backend ids] (delete-in-bulk [conn backend ids]
(let [backend (impl/resolve-backend storage backend) (let [backend (impl/resolve-backend storage backend)
backend (assoc backend :conn conn)] backend (assoc backend :conn conn)]
(impl/del-objects-in-bulk backend ids)))] @(impl/del-objects-in-bulk backend ids)))]
(fn [_] (fn [_]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
@ -317,18 +318,23 @@
;; Garbage Collection: Analyze touched objects ;; Garbage Collection: Analyze touched objects
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; This task is part of the garbage collection of storage objects and is responsible on analyzing the touched ;; This task is part of the garbage collection of storage objects and
;; objects and mark them for deletion if corresponds. ;; is responsible on analyzing the touched objects and mark them for
;; deletion if corresponds.
;; ;;
;; For example: when file_media_object is deleted, the depending storage_object are marked as touched. This ;; For example: when file_media_object is deleted, the depending
;; means that some files that depend on a concrete storage_object are no longer exists and maybe this ;; storage_object are marked as touched. This means that some files
;; storage_object is no longer necessary and can be eligible for elimination. This task periodically analyzes ;; that depend on a concrete storage_object are no longer exists and
;; touched objects and mark them as freeze (means that has other references and the object is still valid) or ;; maybe this storage_object is no longer necessary and can be
;; deleted (no more references to this object so is ready to be deleted). ;; eligible for elimination. This task periodically analyzes touched
;; objects and mark them as freeze (means that has other references
;; and the object is still valid) or deleted (no more references to
;; this object so is ready to be deleted).
(declare sql:retrieve-touched-objects-chunk) (declare sql:retrieve-touched-objects-chunk)
(declare sql:retrieve-file-media-object-nrefs) (declare sql:retrieve-file-media-object-nrefs)
(declare sql:retrieve-team-font-variant-nrefs) (declare sql:retrieve-team-font-variant-nrefs)
(declare sql:retrieve-profile-nrefs)
(defmethod ig/pre-init-spec ::gc-touched-task [_] (defmethod ig/pre-init-spec ::gc-touched-task [_]
(s/keys :req-un [::db/pool])) (s/keys :req-un [::db/pool]))
@ -341,6 +347,9 @@
(has-file-media-object-nrefs? [conn id] (has-file-media-object-nrefs? [conn id]
(-> (db/exec-one! conn [sql:retrieve-file-media-object-nrefs id id]) :nrefs pos?)) (-> (db/exec-one! conn [sql:retrieve-file-media-object-nrefs id id]) :nrefs pos?))
(has-profile-nrefs? [conn id]
(-> (db/exec-one! conn [sql:retrieve-profile-nrefs id id]) :nrefs pos?))
(mark-freeze-in-bulk [conn ids] (mark-freeze-in-bulk [conn ids]
(db/exec-one! conn ["update storage_object set touched_at=null where id = ANY(?)" (db/exec-one! conn ["update storage_object set touched_at=null where id = ANY(?)"
(db/create-array conn "uuid" ids)])) (db/create-array conn "uuid" ids)]))
@ -349,17 +358,30 @@
(db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)" (db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)"
(db/create-array conn "uuid" ids)])) (db/create-array conn "uuid" ids)]))
;; NOTE: A getter that retrieves the key witch will be used
;; for group ids; previoulsy we have no value, then we
;; introduced the `:reference` prop, and then it is renamed
;; to `:bucket` and now is string instead. This is
;; implemented in this way for backward comaptibilty.
;; NOTE: we use the "file-media-object" as default value for
;; backward compatibility because when we deploy it we can
;; have old backend instances running in the same time as
;; the new one and we can still have storage-objects created
;; without bucket value. And we know that if it does not
;; have value, it means :file-media-object.
(get-bucket [{:keys [metadata]}]
(or (some-> metadata :bucket)
(some-> metadata :reference d/name)
"file-media-object"))
(retrieve-touched-chunk [conn cursor] (retrieve-touched-chunk [conn cursor]
(let [rows (->> (db/exec! conn [sql:retrieve-touched-objects-chunk cursor]) (let [rows (->> (db/exec! conn [sql:retrieve-touched-objects-chunk cursor])
(mapv #(d/update-when % :metadata db/decode-transit-pgobject))) (mapv #(d/update-when % :metadata db/decode-transit-pgobject)))]
kw (fn [o] (if (keyword? o) o (keyword o)))]
(when (seq rows) (when (seq rows)
[(-> rows peek :created-at) [(-> rows peek :created-at)
;; NOTE: we use the :file-media-object as default value for backward compatibility because when we (d/group-by' get-bucket :id rows)])))
;; deploy it we can have old backend instances running in the same time as the new one and we can
;; still have storage-objects created without reference value. And we know that if it does not
;; have value, it means :file-media-object.
(d/group-by' #(or (some-> % :metadata :reference kw) :file-media-object) :id rows)])))
(retrieve-touched [conn] (retrieve-touched [conn]
(->> (d/iteration (fn [cursor] (->> (d/iteration (fn [cursor]
@ -389,13 +411,14 @@
(loop [to-freeze 0 (loop [to-freeze 0
to-delete 0 to-delete 0
groups (retrieve-touched conn)] groups (retrieve-touched conn)]
(if-let [[reference ids] (first groups)] (if-let [[bucket ids] (first groups)]
(let [[f d] (case reference (let [[f d] (case bucket
:file-media-object (process-objects! conn has-file-media-object-nrefs? ids) "file-media-object" (process-objects! conn has-file-media-object-nrefs? ids)
:team-font-variant (process-objects! conn has-team-font-variant-nrefs? ids) "team-font-variant" (process-objects! conn has-team-font-variant-nrefs? ids)
"profile" (process-objects! conn has-profile-nrefs? ids)
(ex/raise :type :internal (ex/raise :type :internal
:code :unexpected-unknown-reference :code :unexpected-unknown-reference
:hint (format "unknown reference %s" (pr-str reference))))] :hint (dm/fmt "unknown reference %" bucket)))]
(recur (+ to-freeze f) (recur (+ to-freeze f)
(+ to-delete d) (+ to-delete d)
(rest groups))) (rest groups)))
@ -419,3 +442,7 @@
(select count(*) from team_font_variant where woff2_file_id = ?) + (select count(*) from team_font_variant where woff2_file_id = ?) +
(select count(*) from team_font_variant where otf_file_id = ?) + (select count(*) from team_font_variant where otf_file_id = ?) +
(select count(*) from team_font_variant where ttf_file_id = ?)) as nrefs") (select count(*) from team_font_variant where ttf_file_id = ?)) as nrefs")
(def sql:retrieve-profile-nrefs
"select ((select count(*) from profile where photo_id = ?) +
(select count(*) from team where photo_id = ?)) as nrefs")

View file

@ -10,7 +10,8 @@
[app.db :as db] [app.db :as db]
[app.storage.impl :as impl] [app.storage.impl :as impl]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig]) [integrant.core :as ig]
[promesa.exec :as px])
(:import (:import
java.io.ByteArrayInputStream)) java.io.ByteArrayInputStream))
@ -30,26 +31,23 @@
;; --- API IMPL ;; --- API IMPL
(defmethod impl/put-object :db (defmethod impl/put-object :db
[{:keys [conn] :as storage} {:keys [id] :as object} content] [{:keys [conn executor] :as storage} {:keys [id] :as object} content]
(px/with-dispatch executor
(let [data (impl/slurp-bytes content)] (let [data (impl/slurp-bytes content)]
(db/insert! conn :storage-data {:id id :data data}) (db/insert! conn :storage-data {:id id :data data})
object)) object)))
(defmethod impl/copy-object :db
[{:keys [conn] :as storage} src-object dst-object]
(db/exec-one! conn ["insert into storage_data (id, data) select ? as id, data from storage_data where id=?"
(:id dst-object)
(:id src-object)]))
(defmethod impl/get-object-data :db (defmethod impl/get-object-data :db
[{:keys [conn] :as backend} {:keys [id] :as object}] [{:keys [conn executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])] (let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(ByteArrayInputStream. (:data result)))) (ByteArrayInputStream. (:data result)))))
(defmethod impl/get-object-bytes :db (defmethod impl/get-object-bytes :db
[{:keys [conn] :as backend} {:keys [id] :as object}] [{:keys [conn executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [result (db/exec-one! conn ["select data from storage_data where id=?" id])] (let [result (db/exec-one! conn ["select data from storage_data where id=?" id])]
(:data result))) (:data result))))
(defmethod impl/get-object-url :db (defmethod impl/get-object-url :db
[_ _] [_ _]

View file

@ -14,7 +14,8 @@
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[datoteka.core :as fs] [datoteka.core :as fs]
[integrant.core :as ig]) [integrant.core :as ig]
[promesa.exec :as px])
(:import (:import
java.io.InputStream java.io.InputStream
java.io.OutputStream java.io.OutputStream
@ -47,7 +48,8 @@
;; --- API IMPL ;; --- API IMPL
(defmethod impl/put-object :fs (defmethod impl/put-object :fs
[backend {:keys [id] :as object} content] [{:keys [executor] :as backend} {:keys [id] :as object} content]
(px/with-dispatch executor
(let [base (fs/path (:directory backend)) (let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id)) path (fs/path (impl/id->path id))
full (fs/normalize (fs/join base path))] full (fs/normalize (fs/join base path))]
@ -55,21 +57,11 @@
(fs/create-dir (fs/parent full))) (fs/create-dir (fs/parent full)))
(with-open [^InputStream src (io/input-stream content) (with-open [^InputStream src (io/input-stream content)
^OutputStream dst (io/output-stream full)] ^OutputStream dst (io/output-stream full)]
(io/copy src dst)))) (io/copy src dst)))))
(defmethod impl/copy-object :fs
[backend src-object dst-object]
(let [base (fs/path (:directory backend))
path (fs/path (impl/id->path (:id dst-object)))
full (fs/normalize (fs/join base path))]
(when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full)))
(with-open [^InputStream src (impl/get-object-data backend src-object)
^OutputStream dst (io/output-stream full)]
(io/copy src dst))))
(defmethod impl/get-object-data :fs (defmethod impl/get-object-data :fs
[backend {:keys [id] :as object}] [{:keys [executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [^Path base (fs/path (:directory backend)) (let [^Path base (fs/path (:directory backend))
^Path path (fs/path (impl/id->path id)) ^Path path (fs/path (impl/id->path id))
^Path full (fs/normalize (fs/join base path))] ^Path full (fs/normalize (fs/join base path))]
@ -77,32 +69,36 @@
(ex/raise :type :internal (ex/raise :type :internal
:code :filesystem-object-does-not-exists :code :filesystem-object-does-not-exists
:path (str full))) :path (str full)))
(io/input-stream full))) (io/input-stream full))))
(defmethod impl/get-object-bytes :fs (defmethod impl/get-object-bytes :fs
[backend object] [{:keys [executor] :as backend} object]
(fs/slurp-bytes (impl/get-object-data backend object))) (px/with-dispatch executor
(fs/slurp-bytes (impl/get-object-data backend object))))
(defmethod impl/get-object-url :fs (defmethod impl/get-object-url :fs
[{:keys [uri] :as backend} {:keys [id] :as object} _] [{:keys [uri executor] :as backend} {:keys [id] :as object} _]
(px/with-dispatch executor
(update uri :path (update uri :path
(fn [existing] (fn [existing]
(if (str/ends-with? existing "/") (if (str/ends-with? existing "/")
(str existing (impl/id->path id)) (str existing (impl/id->path id))
(str existing "/" (impl/id->path id)))))) (str existing "/" (impl/id->path id)))))))
(defmethod impl/del-object :fs (defmethod impl/del-object :fs
[backend {:keys [id] :as object}] [{:keys [executor] :as backend} {:keys [id] :as object}]
(px/with-dispatch executor
(let [base (fs/path (:directory backend)) (let [base (fs/path (:directory backend))
path (fs/path (impl/id->path id)) path (fs/path (impl/id->path id))
path (fs/join base path)] path (fs/join base path)]
(Files/deleteIfExists ^Path path))) (Files/deleteIfExists ^Path path))))
(defmethod impl/del-objects-in-bulk :fs (defmethod impl/del-objects-in-bulk :fs
[backend ids] [{:keys [executor] :as backend} ids]
(px/with-dispatch executor
(let [base (fs/path (:directory backend))] (let [base (fs/path (:directory backend))]
(doseq [id ids] (doseq [id ids]
(let [path (fs/path (impl/id->path id)) (let [path (fs/path (impl/id->path id))
path (fs/join base path)] path (fs/join base path)]
(Files/deleteIfExists ^Path path))))) (Files/deleteIfExists ^Path path))))))

View file

@ -7,17 +7,20 @@
(ns app.storage.impl (ns app.storage.impl
"Storage backends abstraction layer." "Storage backends abstraction layer."
(:require (:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[buddy.core.codecs :as bc] [buddy.core.codecs :as bc]
[clojure.java.io :as io] [buddy.core.hash :as bh]
[cuerdas.core :as str]) [clojure.java.io :as io])
(:import (:import
java.nio.ByteBuffer java.nio.ByteBuffer
java.util.UUID java.util.UUID
java.io.ByteArrayInputStream java.io.ByteArrayInputStream
java.io.InputStream java.io.InputStream
java.nio.file.Files)) java.nio.file.Files
org.apache.commons.io.input.BoundedInputStream
))
;; --- API Definition ;; --- API Definition
@ -29,14 +32,6 @@
:code :invalid-storage-backend :code :invalid-storage-backend
:context cfg)) :context cfg))
(defmulti copy-object (fn [cfg _ _] (:type cfg)))
(defmethod copy-object :default
[cfg _ _]
(ex/raise :type :internal
:code :invalid-storage-backend
:context cfg))
(defmulti get-object-data (fn [cfg _] (:type cfg))) (defmulti get-object-data (fn [cfg _] (:type cfg)))
(defmethod get-object-data :default (defmethod get-object-data :default
@ -106,63 +101,26 @@
:code :invalid-id-type :code :invalid-id-type
:hint "id should be string or uuid"))) :hint "id should be string or uuid")))
(defprotocol IContentObject
(size [_] "get object size"))
(defprotocol IContentObject) (defprotocol IContentHash
(get-hash [_] "get precalculated hash"))
(defn- path->content (defn- make-content
[path] [^InputStream is ^long size]
(let [size (Files/size path)]
(reify (reify
IContentObject IContentObject
(size [_] size)
io/IOFactory io/IOFactory
(make-reader [_ opts] (make-reader [this opts]
(io/make-reader path opts)) (io/make-reader this opts))
(make-writer [_ _] (make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented"))) (throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts] (make-input-stream [_ _]
(io/make-input-stream path opts)) (doto (BoundedInputStream. is size)
(make-output-stream [_ _] (.setPropagateClose false)))
(throw (UnsupportedOperationException. "not implemented")))
clojure.lang.Counted
(count [_] size)
java.lang.AutoCloseable
(close [_]))))
(defn string->content
[^String v]
(let [data (.getBytes v "UTF-8")
bais (ByteArrayInputStream. ^bytes data)]
(reify
IContentObject
io/IOFactory
(make-reader [_ opts]
(io/make-reader bais opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts]
(io/make-input-stream bais opts))
(make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented")))
clojure.lang.Counted
(count [_]
(alength data))
java.lang.AutoCloseable
(close [_]))))
(defn- input-stream->content
[^InputStream is size]
(reify
IContentObject
io/IOFactory
(make-reader [_ opts]
(io/make-reader is opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ opts]
(io/make-input-stream is opts))
(make-output-stream [_ _] (make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented"))) (throw (UnsupportedOperationException. "not implemented")))
@ -178,26 +136,63 @@
([data size] ([data size]
(cond (cond
(instance? java.nio.file.Path data) (instance? java.nio.file.Path data)
(path->content data) (make-content (io/input-stream data)
(Files/size data))
(instance? java.io.File data) (instance? java.io.File data)
(path->content (.toPath ^java.io.File data)) (content (.toPath ^java.io.File data) nil)
(instance? String data) (instance? String data)
(string->content data) (let [data (.getBytes data "UTF-8")
bais (ByteArrayInputStream. ^bytes data)]
(make-content bais (alength data)))
(bytes? data) (bytes? data)
(input-stream->content (ByteArrayInputStream. ^bytes data) (alength ^bytes data)) (let [size (alength ^bytes data)
bais (ByteArrayInputStream. ^bytes data)]
(make-content bais size))
(instance? InputStream data) (instance? InputStream data)
(do (do
(when-not size (when-not size
(throw (UnsupportedOperationException. "size should be provided on InputStream"))) (throw (UnsupportedOperationException. "size should be provided on InputStream")))
(input-stream->content data size)) (make-content data size))
:else :else
(throw (UnsupportedOperationException. "type not supported"))))) (throw (UnsupportedOperationException. "type not supported")))))
(defn wrap-with-hash
[content ^String hash]
(when-not (satisfies? IContentObject content)
(throw (UnsupportedOperationException. "`content` should be an instance of IContentObject")))
(when-not (satisfies? io/IOFactory content)
(throw (UnsupportedOperationException. "`content` should be an instance of IOFactory")))
(reify
IContentObject
(size [_] (size content))
IContentHash
(get-hash [_] hash)
io/IOFactory
(make-reader [_ opts]
(io/make-reader content opts))
(make-writer [_ opts]
(io/make-writer content opts))
(make-input-stream [_ opts]
(io/make-input-stream content opts))
(make-output-stream [_ opts]
(io/make-output-stream content opts))
clojure.lang.Counted
(count [_] (count content))
java.lang.AutoCloseable
(close [_]
(.close ^java.lang.AutoCloseable content))))
(defn content? (defn content?
[v] [v]
(satisfies? IContentObject v)) (satisfies? IContentObject v))
@ -209,15 +204,29 @@
(io/copy input output) (io/copy input output)
(.toByteArray output))) (.toByteArray output)))
(defn calculate-hash
[path-or-stream]
(let [result (cond
(instance? InputStream path-or-stream)
(let [result (-> (bh/blake2b-256 path-or-stream)
(bc/bytes->hex))]
(.reset path-or-stream)
result)
:else
(with-open [is (io/input-stream path-or-stream)]
(-> (bh/blake2b-256 is)
(bc/bytes->hex))))]
(str "blake2b:" result)))
(defn resolve-backend (defn resolve-backend
[{:keys [conn pool] :as storage} backend-id] [{:keys [conn pool executor] :as storage} backend-id]
(when backend-id
(let [backend (get-in storage [:backends backend-id])] (let [backend (get-in storage [:backends backend-id])]
(when-not backend (when-not backend
(ex/raise :type :internal (ex/raise :type :internal
:code :backend-not-configured :code :backend-not-configured
:hint (str/fmt "backend '%s' not configured" backend-id))) :hint (dm/fmt "backend '%' not configured" backend-id)))
(assoc backend (assoc backend
:executor executor
:conn (or conn pool) :conn (or conn pool)
:id backend-id)))) :id backend-id)))

View file

@ -13,36 +13,42 @@
[app.common.uri :as u] [app.common.uri :as u]
[app.storage.impl :as impl] [app.storage.impl :as impl]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk]
[clojure.java.io :as io] [clojure.java.io :as io]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig]) [integrant.core :as ig]
[promesa.core :as p]
[promesa.exec :as px])
(:import (:import
java.time.Duration
java.io.InputStream java.io.InputStream
java.nio.ByteBuffer
java.time.Duration
java.util.Collection java.util.Collection
software.amazon.awssdk.core.sync.RequestBody java.util.Optional
java.util.concurrent.Semaphore
org.reactivestreams.Subscriber
org.reactivestreams.Subscription
software.amazon.awssdk.core.ResponseBytes software.amazon.awssdk.core.ResponseBytes
;; software.amazon.awssdk.core.ResponseInputStream software.amazon.awssdk.core.async.AsyncRequestBody
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
software.amazon.awssdk.http.nio.netty.SdkEventLoopGroup
software.amazon.awssdk.regions.Region software.amazon.awssdk.regions.Region
software.amazon.awssdk.services.s3.S3Client software.amazon.awssdk.services.s3.S3AsyncClient
software.amazon.awssdk.services.s3.model.Delete software.amazon.awssdk.services.s3.model.Delete
software.amazon.awssdk.services.s3.model.CopyObjectRequest software.amazon.awssdk.services.s3.model.DeleteObjectRequest
software.amazon.awssdk.services.s3.model.DeleteObjectsRequest software.amazon.awssdk.services.s3.model.DeleteObjectsRequest
software.amazon.awssdk.services.s3.model.DeleteObjectsResponse software.amazon.awssdk.services.s3.model.DeleteObjectsResponse
software.amazon.awssdk.services.s3.model.DeleteObjectRequest
software.amazon.awssdk.services.s3.model.GetObjectRequest software.amazon.awssdk.services.s3.model.GetObjectRequest
software.amazon.awssdk.services.s3.model.ObjectIdentifier software.amazon.awssdk.services.s3.model.ObjectIdentifier
software.amazon.awssdk.services.s3.model.PutObjectRequest software.amazon.awssdk.services.s3.model.PutObjectRequest
software.amazon.awssdk.services.s3.model.S3Error software.amazon.awssdk.services.s3.model.S3Error
;; software.amazon.awssdk.services.s3.model.GetObjectResponse
software.amazon.awssdk.services.s3.presigner.S3Presigner software.amazon.awssdk.services.s3.presigner.S3Presigner
software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest
software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest))
))
(declare put-object) (declare put-object)
(declare copy-object)
(declare get-object-bytes) (declare get-object-bytes)
(declare get-object-data) (declare get-object-data)
(declare get-object-url) (declare get-object-url)
@ -59,7 +65,7 @@
(s/def ::endpoint ::us/string) (s/def ::endpoint ::us/string)
(defmethod ig/pre-init-spec ::backend [_] (defmethod ig/pre-init-spec ::backend [_]
(s/keys :opt-un [::region ::bucket ::prefix ::endpoint])) (s/keys :opt-un [::region ::bucket ::prefix ::endpoint ::wrk/executor]))
(defmethod ig/prep-key ::backend (defmethod ig/prep-key ::backend
[_ {:keys [prefix] :as cfg}] [_ {:keys [prefix] :as cfg}]
@ -75,12 +81,18 @@
(let [client (build-s3-client cfg) (let [client (build-s3-client cfg)
presigner (build-s3-presigner cfg)] presigner (build-s3-presigner cfg)]
(assoc cfg (assoc cfg
:client client :client @client
:presigner presigner :presigner presigner
:type :s3)))) :type :s3
::close-fn #(.close ^java.lang.AutoCloseable client)))))
(defmethod ig/halt-key! ::backend
[_ {:keys [::close-fn]}]
(when (fn? close-fn)
(px/run! close-fn)))
(s/def ::type ::us/keyword) (s/def ::type ::us/keyword)
(s/def ::client #(instance? S3Client %)) (s/def ::client #(instance? S3AsyncClient %))
(s/def ::presigner #(instance? S3Presigner %)) (s/def ::presigner #(instance? S3Presigner %))
(s/def ::backend (s/def ::backend
(s/keys :req-un [::region ::bucket ::client ::type ::presigner] (s/keys :req-un [::region ::bucket ::client ::type ::presigner]
@ -92,10 +104,6 @@
[backend object content] [backend object content]
(put-object backend object content)) (put-object backend object content))
(defmethod impl/copy-object :s3
[backend src-object dst-object]
(copy-object backend src-object dst-object))
(defmethod impl/get-object-data :s3 (defmethod impl/get-object-data :s3
[backend object] [backend object]
(get-object-data backend object)) (get-object-data backend object))
@ -118,21 +126,44 @@
;; --- HELPERS ;; --- HELPERS
(def default-eventloop-threads 4)
(def default-timeout
(dt/duration {:seconds 30}))
(defn- ^Region lookup-region (defn- ^Region lookup-region
[region] [region]
(Region/of (name region))) (Region/of (name region)))
(defn build-s3-client (defn build-s3-client
[{:keys [region endpoint]}] [{:keys [region endpoint executor]}]
(if (string? endpoint) (let [hclient (.. (NettyNioAsyncHttpClient/builder)
(let [uri (java.net.URI. endpoint)] (eventLoopGroupBuilder (.. (SdkEventLoopGroup/builder)
(.. (S3Client/builder) (numberOfThreads (int default-eventloop-threads))))
(endpointOverride uri) (connectionAcquisitionTimeout default-timeout)
(region (lookup-region region)) (connectionTimeout default-timeout)
(readTimeout default-timeout)
(writeTimeout default-timeout)
(build))
client (.. (S3AsyncClient/builder)
(asyncConfiguration (.. (ClientAsyncConfiguration/builder)
(advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR
executor)
(build))) (build)))
(.. (S3Client/builder) (httpClient hclient)
(region (lookup-region region)) (region (lookup-region region)))]
(build))))
(when-let [uri (some-> endpoint (java.net.URI.))]
(.endpointOverride client uri))
(let [client (.build client)]
(reify
clojure.lang.IDeref
(deref [_] client)
java.lang.AutoCloseable
(close [_]
(.close hclient)
(.close client))))))
(defn build-s3-presigner (defn build-s3-presigner
[{:keys [region endpoint]}] [{:keys [region endpoint]}]
@ -146,9 +177,51 @@
(region (lookup-region region)) (region (lookup-region region))
(build)))) (build))))
(defn- make-request-body
[content]
(let [is (io/input-stream content)
buff-size (* 1024 64)
sem (Semaphore. 0)
writer-fn (fn [s]
(try
(loop []
(.acquire sem 1)
(let [buffer (byte-array buff-size)
readed (.read is buffer)]
(when (pos? readed)
(.onNext ^Subscriber s (ByteBuffer/wrap buffer 0 readed))
(when (= readed buff-size)
(recur)))))
(.onComplete s)
(catch Throwable cause
(.onError s cause))
(finally
(.close ^InputStream is))))]
(reify
AsyncRequestBody
(contentLength [_]
(Optional/of (long (count content))))
(^void subscribe [_ ^Subscriber s]
(let [thread (Thread. #(writer-fn s))]
(.setDaemon thread true)
(.setName thread "penpot/storage:s3")
(.start thread)
(.onSubscribe s (reify Subscription
(cancel [_]
(.interrupt thread)
(.release sem 1))
(request [_ n]
(.release sem (int n))))))))))
(defn put-object (defn put-object
[{:keys [client bucket prefix]} {:keys [id] :as object} content] [{:keys [client bucket prefix]} {:keys [id] :as object} content]
(let [path (str prefix (impl/id->path id)) (p/let [path (str prefix (impl/id->path id))
mdata (meta object) mdata (meta object)
mtype (:content-type mdata "application/octet-stream") mtype (:content-type mdata "application/octet-stream")
request (.. (PutObjectRequest/builder) request (.. (PutObjectRequest/builder)
@ -157,35 +230,18 @@
(key path) (key path)
(build))] (build))]
(with-open [^InputStream is (io/input-stream content)] (let [content (make-request-body content)]
(let [content (RequestBody/fromInputStream is (count content))] (.putObject ^S3AsyncClient client
(.putObject ^S3Client client
^PutObjectRequest request ^PutObjectRequest request
^RequestBody content))))) ^AsyncRequestBody content))))
(defn copy-object
[{:keys [client bucket prefix]} src-object dst-object]
(let [source-path (str prefix (impl/id->path (:id src-object)))
source-mdata (meta src-object)
source-mtype (:content-type source-mdata "application/octet-stream")
dest-path (str prefix (impl/id->path (:id dst-object)))
request (.. (CopyObjectRequest/builder)
(copySource (u/query-encode (str bucket "/" source-path)))
(destinationBucket bucket)
(destinationKey dest-path)
(contentType source-mtype)
(build))]
(.copyObject ^S3Client client ^CopyObjectRequest request)))
(defn get-object-data (defn get-object-data
[{:keys [client bucket prefix]} {:keys [id]}] [{:keys [client bucket prefix]} {:keys [id]}]
(let [gor (.. (GetObjectRequest/builder) (p/let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build)) (build))
obj (.getObject ^S3Client client ^GetObjectRequest gor) obj (.getObject ^S3AsyncClient client ^GetObjectRequest gor)
;; rsp (.response ^ResponseInputStream obj) ;; rsp (.response ^ResponseInputStream obj)
;; len (.contentLength ^GetObjectResponse rsp) ;; len (.contentLength ^GetObjectResponse rsp)
] ]
@ -193,11 +249,11 @@
(defn get-object-bytes (defn get-object-bytes
[{:keys [client bucket prefix]} {:keys [id]}] [{:keys [client bucket prefix]} {:keys [id]}]
(let [gor (.. (GetObjectRequest/builder) (p/let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build)) (build))
obj (.getObjectAsBytes ^S3Client client ^GetObjectRequest gor)] obj (.getObjectAsBytes ^S3AsyncClient client ^GetObjectRequest gor)]
(.asByteArray ^ResponseBytes obj))) (.asByteArray ^ResponseBytes obj)))
(def default-max-age (def default-max-age
@ -206,6 +262,7 @@
(defn get-object-url (defn get-object-url
[{:keys [presigner bucket prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}] [{:keys [presigner bucket prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}]
(us/assert dt/duration? max-age) (us/assert dt/duration? max-age)
(p/do
(let [gor (.. (GetObjectRequest/builder) (let [gor (.. (GetObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
@ -215,20 +272,20 @@
(getObjectRequest ^GetObjectRequest gor) (getObjectRequest ^GetObjectRequest gor)
(build)) (build))
pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)] pgor (.presignGetObject ^S3Presigner presigner ^GetObjectPresignRequest gopr)]
(u/uri (str (.url ^PresignedGetObjectRequest pgor))))) (u/uri (str (.url ^PresignedGetObjectRequest pgor))))))
(defn del-object (defn del-object
[{:keys [bucket client prefix]} {:keys [id] :as obj}] [{:keys [bucket client prefix]} {:keys [id] :as obj}]
(let [dor (.. (DeleteObjectRequest/builder) (p/let [dor (.. (DeleteObjectRequest/builder)
(bucket bucket) (bucket bucket)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build))] (build))]
(.deleteObject ^S3Client client (.deleteObject ^S3AsyncClient client
^DeleteObjectRequest dor))) ^DeleteObjectRequest dor)))
(defn del-object-in-bulk (defn del-object-in-bulk
[{:keys [bucket client prefix]} ids] [{:keys [bucket client prefix]} ids]
(let [oids (map (fn [id] (p/let [oids (map (fn [id]
(.. (ObjectIdentifier/builder) (.. (ObjectIdentifier/builder)
(key (str prefix (impl/id->path id))) (key (str prefix (impl/id->path id)))
(build))) (build)))
@ -240,7 +297,7 @@
(bucket bucket) (bucket bucket)
(delete ^Delete delc) (delete ^Delete delc)
(build)) (build))
dres (.deleteObjects ^S3Client client dres (.deleteObjects ^S3AsyncClient client
^DeleteObjectsRequest dor)] ^DeleteObjectsRequest dor)]
(when (.hasErrors ^DeleteObjectsResponse dres) (when (.hasErrors ^DeleteObjectsResponse dres)
(let [errors (seq (.errors ^DeleteObjectsResponse dres))] (let [errors (seq (.errors ^DeleteObjectsResponse dres))]

View file

@ -9,10 +9,9 @@
of deleted objects." of deleted objects."
(:require (:require
[app.common.logging :as l] [app.common.logging :as l]
[app.config :as cf]
[app.db :as db] [app.db :as db]
[app.media :as media]
[app.storage :as sto] [app.storage :as sto]
[app.storage.impl :as simpl]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
@ -56,16 +55,12 @@
;; --- IMPL: file deletion ;; --- IMPL: file deletion
(defmethod delete-objects "file" (defmethod delete-objects "file"
[{:keys [conn max-age table storage] :as cfg}] [{:keys [conn max-age table] :as cfg}]
(let [sql (str/fmt sql:delete-objects (let [sql (str/fmt sql:delete-objects {:table table :limit 50})
{:table table :limit 50}) result (db/exec! conn [sql max-age])]
result (db/exec! conn [sql max-age])
backend (simpl/resolve-backend storage (cf/get :fdata-storage-backend))]
(doseq [{:keys [id] :as item} result] (doseq [{:keys [id] :as item} result]
(l/trace :hint "delete object" :table table :id id) (l/trace :hint "delete object" :table table :id id))
(when backend
(simpl/del-object backend item)))
(count result))) (count result)))
@ -76,13 +71,13 @@
(let [sql (str/fmt sql:delete-objects (let [sql (str/fmt sql:delete-objects
{:table table :limit 50}) {:table table :limit 50})
fonts (db/exec! conn [sql max-age]) fonts (db/exec! conn [sql max-age])
storage (assoc storage :conn conn)] storage (media/configure-assets-storage storage conn)]
(doseq [{:keys [id] :as font} fonts] (doseq [{:keys [id] :as font} fonts]
(l/trace :hint "delete object" :table table :id id) (l/trace :hint "delete object" :table table :id id)
(some->> (:woff1-file-id font) (sto/del-object storage)) (some->> (:woff1-file-id font) (sto/touch-object! storage) deref)
(some->> (:woff2-file-id font) (sto/del-object storage)) (some->> (:woff2-file-id font) (sto/touch-object! storage) deref)
(some->> (:otf-file-id font) (sto/del-object storage)) (some->> (:otf-file-id font) (sto/touch-object! storage) deref)
(some->> (:ttf-file-id font) (sto/del-object storage))) (some->> (:ttf-file-id font) (sto/touch-object! storage) deref))
(count fonts))) (count fonts)))
;; --- IMPL: team deletion ;; --- IMPL: team deletion
@ -96,7 +91,7 @@
(doseq [{:keys [id] :as team} teams] (doseq [{:keys [id] :as team} teams]
(l/trace :hint "delete object" :table table :id id) (l/trace :hint "delete object" :table table :id id)
(some->> (:photo-id team) (sto/del-object storage))) (some->> (:photo-id team) (sto/touch-object! storage) deref))
(count teams))) (count teams)))
@ -135,7 +130,7 @@
;; Mark as deleted the storage object related with the photo-id ;; Mark as deleted the storage object related with the photo-id
;; field. ;; field.
(some->> (:photo-id profile) (sto/del-object storage)) (some->> (:photo-id profile) (sto/touch-object! storage) deref)
;; And finally, permanently delete the profile. ;; And finally, permanently delete the profile.
(db/delete! conn :profile {:id id})) (db/delete! conn :profile {:id id}))

View file

@ -15,7 +15,6 @@
[app.config :as cfg] [app.config :as cfg]
[app.db :as db] [app.db :as db]
[app.util.async :refer [thread-sleep]] [app.util.async :refer [thread-sleep]]
[app.util.http :as http]
[app.util.json :as json] [app.util.json :as json]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig])) [integrant.core :as ig]))
@ -27,6 +26,7 @@
(declare get-stats) (declare get-stats)
(declare send!) (declare send!)
(s/def ::http-client fn?)
(s/def ::version ::us/string) (s/def ::version ::us/string)
(s/def ::uri ::us/string) (s/def ::uri ::us/string)
(s/def ::instance-id ::us/uuid) (s/def ::instance-id ::us/uuid)
@ -34,7 +34,7 @@
(s/keys :req-un [::instance-id])) (s/keys :req-un [::instance-id]))
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req-un [::db/pool ::version ::uri ::sprops])) (s/keys :req-un [::db/pool ::http-client ::version ::uri ::sprops]))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ {:keys [pool sprops version] :as cfg}] [_ {:keys [pool sprops version] :as cfg}]
@ -47,7 +47,8 @@
stats (-> (get-stats pool version) stats (-> (get-stats pool version)
(assoc :instance-id instance-id))] (assoc :instance-id instance-id))]
(when send? (when send?
(send! stats cfg)) (send! cfg stats))
stats))) stats)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -55,11 +56,12 @@
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- send! (defn- send!
[data cfg] [{:keys [http-client uri] :as cfg} data]
(let [response (http/send! {:method :post (let [response (http-client {:method :post
:uri (:uri cfg) :uri uri
:headers {"content-type" "application/json"} :headers {"content-type" "application/json"}
:body (json/write-str data)})] :body (json/write-str data)}
{:sync? true})]
(when (> (:status response) 206) (when (> (:status response) 206)
(ex/raise :type :internal (ex/raise :type :internal
:code :invalid-response :code :invalid-response

View file

@ -7,8 +7,7 @@
(ns app.util.async (ns app.util.async
(:require (:require
[clojure.core.async :as a] [clojure.core.async :as a]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s])
[promesa.exec :as px])
(:import (:import
java.util.concurrent.Executor)) java.util.concurrent.Executor))
@ -61,10 +60,6 @@
`(a/thread-call (^:once fn* [] (try ~@body (catch Exception e# e#)))) `(a/thread-call (^:once fn* [] (try ~@body (catch Exception e# e#))))
`(thread-call ~executor (^:once fn* [] ~@body)))) `(thread-call ~executor (^:once fn* [] ~@body))))
(defmacro with-dispatch
[executor & body]
`(px/submit! ~executor (^:once fn* [] ~@body)))
(defn batch (defn batch
[in {:keys [max-batch-size [in {:keys [max-batch-size
max-batch-age max-batch-age

View file

@ -1,27 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) UXBOX Labs SL
(ns app.util.http
"Http client abstraction layer."
(:require
[java-http-clj.core :as http]
[promesa.exec :as px]))
(def default-client
(delay (http/build-client {:executor @px/default-executor
:connect-timeout 10000 ;; 10s
:follow-redirects :always})))
(defn get!
[url opts]
(let [opts' (merge {:client @default-client :as :string} opts)]
(http/get url nil opts')))
(defn send!
([req]
(http/send req {:client @default-client :as :string}))
([req opts]
(http/send req (merge {:client @default-client :as :string} opts))))

View file

@ -18,7 +18,6 @@
java.nio.ByteBuffer java.nio.ByteBuffer
org.eclipse.jetty.io.EofException)) org.eclipse.jetty.io.EofException))
(declare decode-beat) (declare decode-beat)
(declare encode-beat) (declare encode-beat)
(declare process-heartbeat) (declare process-heartbeat)

View file

@ -23,47 +23,77 @@
[promesa.exec :as px]) [promesa.exec :as px])
(:import (:import
java.util.concurrent.ExecutorService java.util.concurrent.ExecutorService
java.util.concurrent.Executors
java.util.concurrent.ForkJoinPool java.util.concurrent.ForkJoinPool
java.util.concurrent.ForkJoinWorkerThread java.util.concurrent.Future
java.util.concurrent.ForkJoinPool$ForkJoinWorkerThreadFactory java.util.concurrent.ForkJoinPool$ForkJoinWorkerThreadFactory
java.util.concurrent.atomic.AtomicLong java.util.concurrent.ForkJoinWorkerThread
java.util.concurrent.Executors)) java.util.concurrent.ScheduledExecutorService
java.util.concurrent.ThreadFactory
java.util.concurrent.atomic.AtomicLong))
(set! *warn-on-reflection* true) (set! *warn-on-reflection* true)
(s/def ::executor #(instance? ExecutorService %)) (s/def ::executor #(instance? ExecutorService %))
(s/def ::scheduler #(instance? ScheduledExecutorService %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Executor ;; Executor
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare ^:private get-fj-thread-factory)
(declare ^:private get-thread-factory)
(s/def ::prefix keyword?) (s/def ::prefix keyword?)
(s/def ::parallelism ::us/integer) (s/def ::parallelism ::us/integer)
(s/def ::min-threads ::us/integer)
(s/def ::max-threads ::us/integer)
(s/def ::idle-timeout ::us/integer) (s/def ::idle-timeout ::us/integer)
(defmethod ig/pre-init-spec ::executor [_] (defmethod ig/pre-init-spec ::executor [_]
(s/keys :req-un [::prefix ::parallelism])) (s/keys :req-un [::prefix]
:opt-un [::parallelism]))
(defn- get-thread-factory (defmethod ig/init-key ::executor
[_ {:keys [parallelism prefix]}]
(let [counter (AtomicLong. 0)]
(if parallelism
(ForkJoinPool. (int parallelism) (get-fj-thread-factory prefix counter) nil false)
(Executors/newCachedThreadPool (get-thread-factory prefix counter)))))
(defmethod ig/halt-key! ::executor
[_ instance]
(.shutdown ^ExecutorService instance))
(defmethod ig/pre-init-spec ::scheduler [_]
(s/keys :req-un [::prefix]
:opt-un [::parallelism]))
(defmethod ig/init-key ::scheduler
[_ {:keys [parallelism prefix] :or {parallelism 1}}]
(let [counter (AtomicLong. 0)]
(px/scheduled-pool parallelism (get-thread-factory prefix counter))))
(defmethod ig/halt-key! ::scheduler
[_ instance]
(.shutdown ^ExecutorService instance))
(defn- get-fj-thread-factory
^ForkJoinPool$ForkJoinWorkerThreadFactory ^ForkJoinPool$ForkJoinWorkerThreadFactory
[prefix counter] [prefix counter]
(reify ForkJoinPool$ForkJoinWorkerThreadFactory (reify ForkJoinPool$ForkJoinWorkerThreadFactory
(newThread [_ pool] (newThread [_ pool]
(let [^ForkJoinWorkerThread thread (.newThread ForkJoinPool/defaultForkJoinWorkerThreadFactory pool) (let [^ForkJoinWorkerThread thread (.newThread ForkJoinPool/defaultForkJoinWorkerThreadFactory pool)
^String thread-name (str (name prefix) "-" (.getAndIncrement ^AtomicLong counter))] ^String thread-name (str "penpot/" (name prefix) "-" (.getAndIncrement ^AtomicLong counter))]
(.setName thread thread-name) (.setName thread thread-name)
thread)))) thread))))
(defmethod ig/init-key ::executor (defn- get-thread-factory
[_ {:keys [parallelism prefix]}] ^ThreadFactory
(let [counter (AtomicLong. 0)] [prefix counter]
(ForkJoinPool. (int parallelism) (get-thread-factory prefix counter) nil false))) (reify ThreadFactory
(newThread [_ runnable]
(defmethod ig/halt-key! ::executor (doto (Thread. runnable)
[_ instance] (.setDaemon true)
(.shutdown ^ForkJoinPool instance)) (.setName (str "penpot/" (name prefix) "-" (.getAndIncrement ^AtomicLong counter)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Executor Monitor ;; Executor Monitor
@ -72,11 +102,11 @@
(s/def ::executors (s/map-of keyword? ::executor)) (s/def ::executors (s/map-of keyword? ::executor))
(defmethod ig/pre-init-spec ::executors-monitor [_] (defmethod ig/pre-init-spec ::executors-monitor [_]
(s/keys :req-un [::executors ::mtx/metrics])) (s/keys :req-un [::executors ::scheduler ::mtx/metrics]))
(defmethod ig/init-key ::executors-monitor (defmethod ig/init-key ::executors-monitor
[_ {:keys [executors metrics interval] :or {interval 3000}}] [_ {:keys [executors metrics interval scheduler] :or {interval 3000}}]
(letfn [(log-stats [scheduler state] (letfn [(log-stats [state]
(doseq [[key ^ForkJoinPool executor] executors] (doseq [[key ^ForkJoinPool executor] executors]
(let [labels (into-array String [(name key)]) (let [labels (into-array String [(name key)])
running (.getRunningThreadCount executor) running (.getRunningThreadCount executor)
@ -97,18 +127,17 @@
:queued queued :queued queued
:steals steals))) :steals steals)))
(when-not (.isShutdown scheduler) (when (and (not (.isShutdown scheduler))
(px/schedule! scheduler interval (partial log-stats scheduler state))))] (not (:shutdown @state)))
(px/schedule! scheduler interval (partial log-stats state))))]
(let [scheduler (px/scheduled-pool 1) (let [state (atom {})]
state (atom {})] (px/schedule! scheduler interval (partial log-stats state))
(px/schedule! scheduler interval (partial log-stats scheduler state)) {:state state})))
{::scheduler scheduler
::state state})))
(defmethod ig/halt-key! ::executors-monitor (defmethod ig/halt-key! ::executors-monitor
[_ {:keys [::scheduler]}] [_ {:keys [state]}]
(.shutdown ^ExecutorService scheduler)) (swap! state assoc :shutdown true))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Worker ;; Worker
@ -252,7 +281,6 @@
(db/exec-one! conn [sql:insert-new-task id (d/name task) props (d/name queue) priority max-retries interval]) (db/exec-one! conn [sql:insert-new-task id (d/name task) props (d/name queue) priority max-retries interval])
id)) id))
;; --- RUNNER ;; --- RUNNER
(def ^:private (def ^:private
@ -392,13 +420,12 @@
[{:keys [executor] :as cfg}] [{:keys [executor] :as cfg}]
(aa/thread-call executor #(event-loop-fn* cfg))) (aa/thread-call executor #(event-loop-fn* cfg)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Scheduler ;; Scheduler
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(declare schedule-task) (declare schedule-cron-task)
(declare synchronize-schedule) (declare synchronize-cron-entries)
(s/def ::fn (s/or :var var? :fn fn?)) (s/def ::fn (s/or :var var? :fn fn?))
(s/def ::id keyword?) (s/def ::id keyword?)
@ -406,21 +433,21 @@
(s/def ::props (s/nilable map?)) (s/def ::props (s/nilable map?))
(s/def ::task keyword?) (s/def ::task keyword?)
(s/def ::scheduled-task (s/def ::cron-task
(s/keys :req-un [::cron ::task] (s/keys :req-un [::cron ::task]
:opt-un [::props ::id])) :opt-un [::props ::id]))
(s/def ::schedule (s/coll-of (s/nilable ::scheduled-task))) (s/def ::entries (s/coll-of (s/nilable ::cron-task)))
(defmethod ig/pre-init-spec ::scheduler [_] (defmethod ig/pre-init-spec ::cron [_]
(s/keys :req-un [::executor ::db/pool ::schedule ::tasks])) (s/keys :req-un [::executor ::scheduler ::db/pool ::entries ::tasks]))
(defmethod ig/init-key ::scheduler (defmethod ig/init-key ::cron
[_ {:keys [schedule tasks pool] :as cfg}] [_ {:keys [entries tasks pool] :as cfg}]
(let [scheduler (Executors/newScheduledThreadPool (int 1))]
(if (db/read-only? pool) (if (db/read-only? pool)
(l/warn :hint "scheduler not started, db is read-only") (l/warn :hint "scheduler not started, db is read-only")
(let [schedule (->> schedule (let [running (atom #{})
entries (->> entries
(filter some?) (filter some?)
;; If id is not defined, use the task as id. ;; If id is not defined, use the task as id.
(map (fn [{:keys [id task] :as item}] (map (fn [{:keys [id task] :as item}]
@ -436,49 +463,55 @@
(-> item (-> item
(dissoc :task) (dissoc :task)
(assoc :fn f)))))) (assoc :fn f))))))
cfg (assoc cfg
:scheduler scheduler
:schedule schedule)]
(l/info :hint "scheduler started"
:registred-tasks (count schedule))
(synchronize-schedule cfg) cfg (assoc cfg :entries entries :running running)]
(run! (partial schedule-task cfg)
(filter some? schedule)))) (l/info :hint "cron started" :registred-tasks (count entries))
(synchronize-cron-entries cfg)
(->> (filter some? entries)
(run! (partial schedule-cron-task cfg)))
(reify (reify
clojure.lang.IDeref
(deref [_] @running)
java.lang.AutoCloseable java.lang.AutoCloseable
(close [_] (close [_]
(.shutdownNow ^ExecutorService scheduler))))) (doseq [item @running]
(when-not (.isDone ^Future item)
(.cancel ^Future item true))))))))
(defmethod ig/halt-key! ::scheduler
(defmethod ig/halt-key! ::cron
[_ instance] [_ instance]
(.close ^java.lang.AutoCloseable instance)) (when instance
(.close ^java.lang.AutoCloseable instance)))
(def sql:upsert-scheduled-task (def sql:upsert-cron-task
"insert into scheduled_task (id, cron_expr) "insert into scheduled_task (id, cron_expr)
values (?, ?) values (?, ?)
on conflict (id) on conflict (id)
do update set cron_expr=?") do update set cron_expr=?")
(defn- synchronize-schedule-item (defn- synchronize-cron-item
[conn {:keys [id cron]}] [conn {:keys [id cron]}]
(let [cron (str cron)] (let [cron (str cron)]
(l/debug :action "initialize scheduled task" :id id :cron cron) (l/debug :action "initialize scheduled task" :id id :cron cron)
(db/exec-one! conn [sql:upsert-scheduled-task id cron cron]))) (db/exec-one! conn [sql:upsert-cron-task id cron cron])))
(defn- synchronize-schedule (defn- synchronize-cron-entries
[{:keys [pool schedule]}] [{:keys [pool schedule]}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(run! (partial synchronize-schedule-item conn) schedule))) (run! (partial synchronize-cron-item conn) schedule)))
(def sql:lock-scheduled-task (def sql:lock-cron-task
"select id from scheduled_task where id=? for update skip locked") "select id from scheduled_task where id=? for update skip locked")
(defn- execute-scheduled-task (defn- execute-cron-task
[{:keys [executor pool] :as cfg} {:keys [id] :as task}] [{:keys [executor pool] :as cfg} {:keys [id] :as task}]
(letfn [(run-task [conn] (letfn [(run-task [conn]
(when (db/exec-one! conn [sql:lock-scheduled-task (d/name id)]) (when (db/exec-one! conn [sql:lock-cron-task (d/name id)])
(l/debug :action "execute scheduled task" :id id) (l/debug :action "execute scheduled task" :id id)
((:fn task) task))) ((:fn task) task)))
@ -491,10 +524,10 @@
::l/context (get-error-context cause task) ::l/context (get-error-context cause task)
:task-id id :task-id id
:cause cause))))] :cause cause))))]
(try
(px/run! executor handle-task) (px/run! executor handle-task)
(finally (px/run! executor #(schedule-cron-task cfg task))
(schedule-task cfg task))))) nil))
(defn- ms-until-valid (defn- ms-until-valid
[cron] [cron]
@ -503,10 +536,16 @@
next (dt/next-valid-instant-from cron now)] next (dt/next-valid-instant-from cron now)]
(inst-ms (dt/diff now next)))) (inst-ms (dt/diff now next))))
(defn- schedule-task (def ^:private
[{:keys [scheduler] :as cfg} {:keys [cron] :as task}] xf-without-done
(let [ms (ms-until-valid cron)] (remove #(.isDone ^Future %)))
(px/schedule! scheduler ms (partial execute-scheduled-task cfg task))))
(defn- schedule-cron-task
[{:keys [scheduler running] :as cfg} {:keys [cron] :as task}]
(let [ft (px/schedule! scheduler
(ms-until-valid cron)
(partial execute-cron-task cfg task))]
(swap! running #(into #{ft} xf-without-done %))))
;; --- INSTRUMENTATION ;; --- INSTRUMENTATION

View file

@ -174,12 +174,18 @@
:type :image :type :image
:metadata {:id (:id fmo1)}}}]})] :metadata {:id (:id fmo1)}}}]})]
;; Check that reference storage objets on filemediaobjects
;; are the same because of deduplication feature.
(t/is (= (:media-id fmo1) (:media-id fmo2)))
(t/is (= (:thumbnail-id fmo1) (:thumbnail-id fmo2)))
;; If we launch gc-touched-task, we should have 2 items to
;; If we launch gc-touched-task, we should have 4 items to freeze. ;; freeze because of the deduplication (we have uploaded 2 times
;; 2 two same files).
(let [task (:app.storage/gc-touched-task th/*system*) (let [task (:app.storage/gc-touched-task th/*system*)
res (task {})] res (task {})]
(t/is (= 4 (:freeze res)))
(t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res)))) (t/is (= 0 (:delete res))))
;; run the task immediately ;; run the task immediately
@ -205,27 +211,26 @@
(t/is (= 1 (count rows)))) (t/is (= 1 (count rows))))
;; The underlying storage objects are still available. ;; The underlying storage objects are still available.
(t/is (some? (sto/get-object storage (:media-id fmo2)))) (t/is (some? @(sto/get-object storage (:media-id fmo2))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo2)))) (t/is (some? @(sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? (sto/get-object storage (:media-id fmo1)))) (t/is (some? @(sto/get-object storage (:media-id fmo1))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1)))) (t/is (some? @(sto/get-object storage (:thumbnail-id fmo1))))
;; now, we have deleted the unused file-media-object, if we ;; now, we have deleted the unused file-media-object, if we
;; execute the touched-gc task, we should see that two of them ;; execute the touched-gc task, we should see that two of them
;; are marked to be deleted. ;; are marked to be deleted.
(let [task (:app.storage/gc-touched-task th/*system*) (let [task (:app.storage/gc-touched-task th/*system*)
res (task {})] res (task {})]
(t/is (= 0 (:freeze res))) (t/is (= 2 (:freeze res)))
(t/is (= 2 (:delete res)))) (t/is (= 0 (:delete res))))
;; Finally, check that some of the objects that are marked as ;; Finally, check that some of the objects that are marked as
;; deleted we are unable to retrieve them using standard storage ;; deleted we are unable to retrieve them using standard storage
;; public api. ;; public api.
(t/is (nil? (sto/get-object storage (:media-id fmo2)))) (t/is (some? @(sto/get-object storage (:media-id fmo2))))
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2)))) (t/is (some? @(sto/get-object storage (:thumbnail-id fmo2))))
(t/is (some? (sto/get-object storage (:media-id fmo1)))) (t/is (some? @(sto/get-object storage (:media-id fmo1))))
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1)))) (t/is (some? @(sto/get-object storage (:thumbnail-id fmo1))))
))) )))

View file

@ -23,7 +23,7 @@
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content") sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain" :content-type "text/plain"
:other "data"}) :other "data"})
profile (th/create-profile* 1 {:is-active true}) profile (th/create-profile* 1 {:is-active true})
@ -92,15 +92,17 @@
)))) ))))
(t/deftest duplicate-file-with-deleted-rels (t/deftest duplicate-file-with-deleted-relations
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content") sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain" :content-type "text/plain"
:other "data"}) :other "data"})
profile (th/create-profile* 1 {:is-active true}) profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile) project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)}) :profile-id (:id profile)})
file1 (th/create-file* 1 {:profile-id (:id profile) file1 (th/create-file* 1 {:profile-id (:id profile)
:project-id (:id project)}) :project-id (:id project)})
file2 (th/create-file* 2 {:profile-id (:id profile) file2 (th/create-file* 2 {:profile-id (:id profile)
@ -112,16 +114,10 @@
mobj (th/create-file-media-object* {:file-id (:id file1) mobj (th/create-file-media-object* {:file-id (:id file1)
:is-local false :is-local false
:media-id (:id sobject)}) :media-id (:id sobject)})]
_ (th/mark-file-deleted* {:id (:id file2)}) (th/mark-file-deleted* {:id (:id file2)})
_ (sto/del-object storage (:id sobject))] @(sto/del-object! storage sobject)
(th/update-file*
{:file-id (:id file1)
:profile-id (:id profile)
:changes [{:type :add-media
:object (select-keys mobj [:id :width :height :mtype :name])}]})
(let [data {::th/type :duplicate-file (let [data {::th/type :duplicate-file
:profile-id (:id profile) :profile-id (:id profile)
@ -140,7 +136,7 @@
(t/is (= "file 1 (copy)" (:name result))) (t/is (= "file 1 (copy)" (:name result)))
(t/is (not= (:id file1) (:id result))) (t/is (not= (:id file1) (:id result)))
;; Check that the deleted library is not duplicated ;; Check that there are no relation to a deleted library
(let [[item :as rows] (db/query th/*pool* :file-library-rel {:file-id (:id result)})] (let [[item :as rows] (db/query th/*pool* :file-library-rel {:file-id (:id result)})]
(t/is (= 0 (count rows)))) (t/is (= 0 (count rows))))
@ -158,9 +154,10 @@
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content") sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain" :content-type "text/plain"
:other "data"}) :other "data"})
profile (th/create-profile* 1 {:is-active true}) profile (th/create-profile* 1 {:is-active true})
project (th/create-project* 1 {:team-id (:default-team-id profile) project (th/create-project* 1 {:team-id (:default-team-id profile)
:profile-id (:id profile)}) :profile-id (:id profile)})
@ -176,6 +173,7 @@
:is-local false :is-local false
:media-id (:id sobject)})] :media-id (:id sobject)})]
(th/update-file* (th/update-file*
{:file-id (:id file1) {:file-id (:id file1)
:profile-id (:id profile) :profile-id (:id profile)
@ -229,7 +227,7 @@
(t/deftest duplicate-project-with-deleted-files (t/deftest duplicate-project-with-deleted-files
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
sobject (sto/put-object storage {:content (sto/content "content") sobject @(sto/put-object! storage {::sto/content (sto/content "content")
:content-type "text/plain" :content-type "text/plain"
:other "data"}) :other "data"})
profile (th/create-profile* 1 {:is-active true}) profile (th/create-profile* 1 {:is-active true})
@ -247,12 +245,6 @@
:is-local false :is-local false
:media-id (:id sobject)})] :media-id (:id sobject)})]
(th/update-file*
{:file-id (:id file1)
:profile-id (:id profile)
:changes [{:type :add-media
:object (select-keys mobj [:id :width :height :mtype :name])}]})
(th/mark-file-deleted* {:id (:id file1)}) (th/mark-file-deleted* {:id (:id file1)})
(let [data {::th/type :duplicate-project (let [data {::th/type :duplicate-project
@ -610,6 +602,3 @@
(t/is (= (:library-file-id item1) (:id file2)))) (t/is (= (:library-file-id item1) (:id file2))))
))) )))

View file

@ -41,8 +41,8 @@
(t/is (uuid? media-id)) (t/is (uuid? media-id))
(t/is (uuid? thumbnail-id)) (t/is (uuid? thumbnail-id))
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 (sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 (sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/storage-object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/storage-object? mobj2))
(t/is (= 122785 (:size mobj1))) (t/is (= 122785 (:size mobj1)))
@ -79,8 +79,8 @@
(t/is (uuid? media-id)) (t/is (uuid? media-id))
(t/is (uuid? thumbnail-id)) (t/is (uuid? thumbnail-id))
(let [storage (:app.storage/storage th/*system*) (let [storage (:app.storage/storage th/*system*)
mobj1 (sto/get-object storage media-id) mobj1 @(sto/get-object storage media-id)
mobj2 (sto/get-object storage thumbnail-id)] mobj2 @(sto/get-object storage thumbnail-id)]
(t/is (sto/storage-object? mobj1)) (t/is (sto/storage-object? mobj1))
(t/is (sto/storage-object? mobj2)) (t/is (sto/storage-object? mobj2))
(t/is (= 312043 (:size mobj1))) (t/is (= 312043 (:size mobj1)))

View file

@ -37,69 +37,74 @@
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
content (sto/content "content") content (sto/content "content")
object (sto/put-object storage {:content content object @(sto/put-object! storage {::sto/content content
:content-type "text/plain" :content-type "text/plain"
:other "data"})] :other "data"})]
(t/is (sto/storage-object? object)) (t/is (sto/storage-object? object))
(t/is (fs/path? (sto/get-object-path storage object))) (t/is (fs/path? @(sto/get-object-path storage object)))
(t/is (nil? (:expired-at object))) (t/is (nil? (:expired-at object)))
(t/is (= :tmp (:backend object))) (t/is (= :tmp (:backend object)))
(t/is (= "data" (:other (meta object)))) (t/is (= "data" (:other (meta object))))
(t/is (= "text/plain" (:content-type (meta object)))) (t/is (= "text/plain" (:content-type (meta object))))
(t/is (= "content" (slurp (sto/get-object-data storage object)))) (t/is (= "content" (slurp @(sto/get-object-data storage object))))
(t/is (= "content" (slurp (sto/get-object-path storage object)))) (t/is (= "content" (slurp @(sto/get-object-path storage object))))
)) ))
(t/deftest put-and-retrieve-expired-object (t/deftest put-and-retrieve-expired-object
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
content (sto/content "content") content (sto/content "content")
object (sto/put-object storage {:content content object @(sto/put-object! storage {::sto/content content
::sto/expired-at (dt/in-future {:seconds 1})
:content-type "text/plain" :content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})] })]
(t/is (sto/storage-object? object)) (t/is (sto/storage-object? object))
(t/is (dt/instant? (:expired-at object))) (t/is (dt/instant? (:expired-at object)))
(t/is (dt/is-after? (:expired-at object) (dt/now))) (t/is (dt/is-after? (:expired-at object) (dt/now)))
(t/is (= object (sto/get-object storage (:id object)))) (t/is (= object @(sto/get-object storage (:id object))))
(th/sleep 1000) (th/sleep 1000)
(t/is (nil? (sto/get-object storage (:id object)))) (t/is (nil? @(sto/get-object storage (:id object))))
(t/is (nil? (sto/get-object-data storage object))) (t/is (nil? @(sto/get-object-data storage object)))
(t/is (nil? (sto/get-object-url storage object))) (t/is (nil? @(sto/get-object-url storage object)))
(t/is (nil? (sto/get-object-path storage object))) (t/is (nil? @(sto/get-object-path storage object)))
)) ))
(t/deftest put-and-delete-object (t/deftest put-and-delete-object
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
content (sto/content "content") content (sto/content "content")
object (sto/put-object storage {:content content object @(sto/put-object! storage {::sto/content content
:content-type "text/plain" :content-type "text/plain"
:expired-at (dt/in-future {:seconds 1})})] :expired-at (dt/in-future {:seconds 1})})]
(t/is (sto/storage-object? object)) (t/is (sto/storage-object? object))
(t/is (true? (sto/del-object storage object))) (t/is (true? @(sto/del-object! storage object)))
;; retrieving the same object should be not nil because the ;; retrieving the same object should be not nil because the
;; deletion is not immediate ;; deletion is not immediate
(t/is (some? (sto/get-object-data storage object))) (t/is (some? @(sto/get-object-data storage object)))
(t/is (some? (sto/get-object-url storage object))) (t/is (some? @(sto/get-object-url storage object)))
(t/is (some? (sto/get-object-path storage object))) (t/is (some? @(sto/get-object-path storage object)))
;; But you can't retrieve the object again because in database is ;; But you can't retrieve the object again because in database is
;; marked as deleted/expired. ;; marked as deleted/expired.
(t/is (nil? (sto/get-object storage (:id object)))) (t/is (nil? @(sto/get-object storage (:id object))))
)) ))
(t/deftest test-deleted-gc-task (t/deftest test-deleted-gc-task
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
content (sto/content "content") content1 (sto/content "content1")
object1 (sto/put-object storage {:content content content2 (sto/content "content2")
object1 @(sto/put-object! storage {::sto/content content1
::sto/expired-at (dt/now)
:content-type "text/plain" :content-type "text/plain"
:expired-at (dt/now)}) })
object2 (sto/put-object storage {:content content object2 @(sto/put-object! storage {::sto/content content2
::sto/expired-at (dt/in-past {:hours 2})
:content-type "text/plain" :content-type "text/plain"
:expired-at (dt/in-past {:hours 2})})] })]
(th/sleep 200) (th/sleep 200)
(let [task (:app.storage/gc-deleted-task th/*system*) (let [task (:app.storage/gc-deleted-task th/*system*)
@ -147,22 +152,24 @@
(t/is (uuid? (:media-id result-1))) (t/is (uuid? (:media-id result-1)))
(t/is (uuid? (:media-id result-2))) (t/is (uuid? (:media-id result-2)))
(t/is (= (:media-id result-1) (:media-id result-2)))
;; now we proceed to manually delete one file-media-object ;; now we proceed to manually delete one file-media-object
(db/exec-one! th/*pool* ["delete from file_media_object where id = ?" (:id result-1)]) (db/exec-one! th/*pool* ["delete from file_media_object where id = ?" (:id result-1)])
;; check that we still have all the storage objects ;; check that we still have all the storage objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object"])]
(t/is (= 4 (:count res)))) (t/is (= 2 (:count res))))
;; now check if the storage objects are touched ;; now check if the storage objects are touched
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
(t/is (= 4 (:count res)))) (t/is (= 2 (:count res))))
;; run the touched gc task ;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*) (let [task (:app.storage/gc-touched-task th/*system*)
res (task {})] res (task {})]
(t/is (= 2 (:freeze res))) (t/is (= 2 (:freeze res)))
(t/is (= 2 (:delete res)))) (t/is (= 0 (:delete res))))
;; now check that there are no touched objects ;; now check that there are no touched objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
@ -170,7 +177,7 @@
;; now check that all objects are marked to be deleted ;; now check that all objects are marked to be deleted
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 2 (:count res)))) (t/is (= 0 (:count res))))
))) )))
@ -249,7 +256,7 @@
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])]
(t/is (= 4 (:count res)))))))) (t/is (= 4 (:count res))))))))
(t/deftest test-touched-gc-task-without-delete (t/deftest test-touched-gc-task-3
(let [storage (-> (:app.storage/storage th/*system*) (let [storage (-> (:app.storage/storage th/*system*)
(configure-storage-backend)) (configure-storage-backend))
prof (th/create-profile* 1) prof (th/create-profile* 1)
@ -285,9 +292,23 @@
;; run the touched gc task ;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*) (let [task (:app.storage/gc-touched-task th/*system*)
res (task {})] res (task {})]
(t/is (= 4 (:freeze res))) (t/is (= 2 (:freeze res)))
(t/is (= 0 (:delete res)))) (t/is (= 0 (:delete res))))
;; check that we have all object in the db ;; check that we have all object in the db
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])] (let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])]
(t/is (= 4 (:count res))))))) (t/is (= 2 (:count res)))))
;; now we proceed to manually delete all team_font_variant
(db/exec-one! th/*pool* ["delete from file_media_object"])
;; run the touched gc task
(let [task (:app.storage/gc-touched-task th/*system*)
res (task {})]
(t/is (= 0 (:freeze res)))
(t/is (= 2 (:delete res))))
;; check that we have all no objects
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])]
(t/is (= 0 (:count res))))))

View file

@ -27,7 +27,7 @@
(task-fn nil) (task-fn nil)
(t/is (:called? @mock)) (t/is (:called? @mock))
(let [[data] (-> @mock :call-args)] (let [[_ data] (-> @mock :call-args)]
(t/is (contains? data :total-fonts)) (t/is (contains? data :total-fonts))
(t/is (contains? data :total-users)) (t/is (contains? data :total-users))
(t/is (contains? data :total-projects)) (t/is (contains? data :total-projects))

View file

@ -61,7 +61,7 @@
:app.http.oauth/gitlab :app.http.oauth/gitlab
:app.http.oauth/github :app.http.oauth/github
:app.http.oauth/all :app.http.oauth/all
:app.worker/scheduler :app.worker/cron
:app.worker/worker) :app.worker/worker)
(d/deep-merge (d/deep-merge
{:app.tasks.file-media-gc/handler {:max-age (dt/duration 300)}})) {:app.tasks.file-media-gc/handler {:max-age (dt/duration 300)}}))

View file

@ -79,12 +79,6 @@
(stringify-data val)]))) (stringify-data val)])))
data))) data)))
#?(:clj
(defn set-context!
[data]
(ThreadContext/putAll (data->context-map data))
nil))
#?(:clj #?(:clj
(defmacro with-context (defmacro with-context
[data & body] [data & body]
@ -173,12 +167,11 @@
~level-sym (get-level ~level)] ~level-sym (get-level ~level)]
(when (enabled? ~logger-sym ~level-sym) (when (enabled? ~logger-sym ~level-sym)
~(if async ~(if async
`(->> (ThreadContext/getImmutableContext) `(send-off logging-agent
(send-off logging-agent (fn [_#]
(fn [_# cdata#] (with-context (into {:id (uuid/next)} ~context)
(with-context (-> {:id (uuid/next)} (into cdata#) (into ~context))
(->> (or ~raw (build-map-message ~props)) (->> (or ~raw (build-map-message ~props))
(write-log! ~logger-sym ~level-sym ~cause)))))) (write-log! ~logger-sym ~level-sym ~cause)))))
`(let [message# (or ~raw (build-map-message ~props))] `(let [message# (or ~raw (build-map-message ~props))]
(write-log! ~logger-sym ~level-sym ~cause message#)))))))) (write-log! ~logger-sym ~level-sym ~cause message#))))))))

View file

@ -131,8 +131,7 @@
(defn resolve-file-media (defn resolve-file-media
([media] ([media]
(resolve-file-media media false)) (resolve-file-media media false))
([{:keys [id] :as media} thumbnail?]
([{:keys [id]} thumbnail?]
(str (cond-> (u/join public-uri "assets/by-file-media-id/") (str (cond-> (u/join public-uri "assets/by-file-media-id/")
(true? thumbnail?) (u/join (str id "/thumbnail")) (true? thumbnail?) (u/join (str id "/thumbnail"))
(false? thumbnail?) (u/join (str id)))))) (false? thumbnail?) (u/join (str id))))))

View file

@ -7,18 +7,18 @@
(ns app.main.errors (ns app.main.errors
"Generic error handling" "Generic error handling"
(:require (:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.config :as cf] [app.config :as cf]
[app.main.data.messages :as dm] [app.main.data.messages :as msg]
[app.main.data.users :as du] [app.main.data.users :as du]
[app.main.sentry :as sentry] [app.main.sentry :as sentry]
[app.main.store :as st] [app.main.store :as st]
[app.util.i18n :refer [tr]] [app.util.i18n :refer [tr]]
[app.util.router :as rt] [app.util.router :as rt]
[app.util.timers :as ts] [app.util.timers :as ts]
[cljs.pprint :refer [pprint]]
[cuerdas.core :as str]
[expound.alpha :as expound] [expound.alpha :as expound]
[fipp.edn :as fpp]
[potok.core :as ptk])) [potok.core :as ptk]))
(defn on-error (defn on-error
@ -33,7 +33,7 @@
:else :else
(let [hint (ex-message error) (let [hint (ex-message error)
msg (str "Internal Error: " hint)] msg (dm/str "Internal Error: " hint)]
(sentry/capture-exception error) (sentry/capture-exception error)
(ts/schedule (st/emitf (rt/assign-exception error))) (ts/schedule (st/emitf (rt/assign-exception error)))
@ -51,7 +51,7 @@
[_] [_]
(let [msg (tr "errors.auth.unable-to-login")] (let [msg (tr "errors.auth.unable-to-login")]
(st/emit! (du/logout {:capture-redirect true})) (st/emit! (du/logout {:capture-redirect true}))
(ts/schedule 500 (st/emitf (dm/warn msg))))) (ts/schedule 500 (st/emitf (msg/warn msg)))))
;; That are special case server-errors that should be treated ;; That are special case server-errors that should be treated
@ -73,7 +73,7 @@
[error] [error]
(ts/schedule (ts/schedule
(st/emitf (st/emitf
(dm/show {:content "Unexpected validation error." (msg/show {:content "Unexpected validation error."
:type :error :type :error
:timeout 3000}))) :timeout 3000})))
@ -81,7 +81,7 @@
(js/console.group "Validation Error:") (js/console.group "Validation Error:")
(ex/ignoring (ex/ignoring
(js/console.info (js/console.info
(with-out-str (pprint (dissoc error :explain))))) (with-out-str (fpp/pprint (dissoc error :explain)))))
(when-let [explain (:explain error)] (when-let [explain (:explain error)]
(js/console.group "Spec explain:") (js/console.group "Spec explain:")
@ -96,7 +96,7 @@
[_] [_]
(ts/schedule (ts/schedule
(st/emitf (st/emitf
(dm/show {:content "SVG is invalid or malformed" (msg/show {:content "SVG is invalid or malformed"
:type :error :type :error
:timeout 3000})))) :timeout 3000}))))
@ -104,7 +104,7 @@
[_] [_]
(ts/schedule (ts/schedule
(st/emitf (st/emitf
(dm/show {:content "There was an error with the comment" (msg/show {:content "There was an error with the comment"
:type :error :type :error
:timeout 3000})))) :timeout 3000}))))
@ -114,15 +114,15 @@
(defmethod ptk/handle-error :assertion (defmethod ptk/handle-error :assertion
[{:keys [message hint] :as error}] [{:keys [message hint] :as error}]
(let [message (or message hint) (let [message (or message hint)
message (str "Internal Assertion Error: " message) message (dm/str "Internal Assertion Error: " message)
context (str/fmt "ns: '%s'\nname: '%s'\nfile: '%s:%s'" context (dm/fmt "ns: '%'\nname: '%'\nfile: '%:%'"
(:ns error) (:ns error)
(:name error) (:name error)
(str cf/public-uri "js/cljs-runtime/" (:file error)) (dm/str cf/public-uri "js/cljs-runtime/" (:file error))
(:line error))] (:line error))]
(ts/schedule (ts/schedule
(st/emitf (st/emitf
(dm/show {:content "Internal error: assertion." (msg/show {:content "Internal error: assertion."
:type :error :type :error
:timeout 3000}))) :timeout 3000})))
@ -138,17 +138,23 @@
(defmethod ptk/handle-error :server-error (defmethod ptk/handle-error :server-error
[{:keys [data hint] :as error}] [{:keys [data hint] :as error}]
(let [hint (or hint (:hint data) (:message data)) (let [hint (or hint (:hint data) (:message data))
info (with-out-str (pprint data)) info (with-out-str (fpp/pprint (dissoc data :explain)))
msg (str "Internal Server Error: " hint)] msg (dm/str "Internal Server Error: " hint)]
(ts/schedule (ts/schedule
(st/emitf #(st/emit!
(dm/show {:content "Something wrong has happened (on backend)." (msg/show {:content "Something wrong has happened (on backend)."
:type :error :type :error
:timeout 3000}))) :timeout 3000})))
(js/console.group msg) (js/console.group msg)
(js/console.info info) (js/console.info info)
(when-let [explain (:explain data)]
(js/console.group "Spec explain:")
(js/console.log explain)
(js/console.groupEnd "Spec explain:"))
(js/console.groupEnd msg))) (js/console.groupEnd msg)))
(defn on-unhandled-error (defn on-unhandled-error
@ -156,7 +162,7 @@
(if (instance? ExceptionInfo error) (if (instance? ExceptionInfo error)
(-> error sentry/capture-exception ex-data ptk/handle-error) (-> error sentry/capture-exception ex-data ptk/handle-error)
(let [hint (ex-message error) (let [hint (ex-message error)
msg (str "Unhandled Internal Error: " hint)] msg (dm/str "Unhandled Internal Error: " hint)]
(sentry/capture-exception error) (sentry/capture-exception error)
(ts/schedule (st/emitf (rt/assign-exception error))) (ts/schedule (st/emitf (rt/assign-exception error)))
(js/console.group msg) (js/console.group msg)