From 7f60946204d81788e6e6f3a19a0f5f256a55b4e1 Mon Sep 17 00:00:00 2001 From: Andrey Antukh Date: Mon, 22 Jan 2024 15:19:03 +0100 Subject: [PATCH] :recycle: Refactor exportation and duplicate mechanism Previously the file processing was implemented 3 times using similar approaches bug each own with its own bugs. This PR unifies the loging to a single implementation used by the 3 operations. --- backend/resources/app/templates/debug.tmpl | 11 - backend/src/app/binfile/common.clj | 491 ++++++++ backend/src/app/binfile/v1.clj | 762 +++++++++++++ backend/src/app/binfile/v2.clj | 429 ++----- backend/src/app/db/sql.clj | 5 +- backend/src/app/http/debug.clj | 64 +- backend/src/app/rpc/commands/binfile.clj | 1110 +------------------ backend/src/app/rpc/commands/management.clj | 517 +++------ backend/src/app/rpc/commands/teams.clj | 16 +- backend/src/app/srepl/main.clj | 4 +- backend/src/app/tasks/file_gc.clj | 32 +- frontend/src/app/worker/export.cljs | 4 +- 12 files changed, 1587 insertions(+), 1858 deletions(-) create mode 100644 backend/src/app/binfile/common.clj create mode 100644 backend/src/app/binfile/v1.clj diff --git a/backend/resources/app/templates/debug.tmpl b/backend/resources/app/templates/debug.tmpl index 0416a045d..caede1af8 100644 --- a/backend/resources/app/templates/debug.tmpl +++ b/backend/resources/app/templates/debug.tmpl @@ -145,17 +145,6 @@ Debug Main Page -
- - -
- - Do not break on index lookup errors (remap operation). - Useful when importing a broken file that has broken - relations or missing pieces. - -
-
diff --git a/backend/src/app/binfile/common.clj b/backend/src/app/binfile/common.clj new file mode 100644 index 000000000..aceb4ef7b --- /dev/null +++ b/backend/src/app/binfile/common.clj @@ -0,0 +1,491 @@ +;; This Source Code Form is subject to the terms of the Mozilla Public +;; License, v. 2.0. If a copy of the MPL was not distributed with this +;; file, You can obtain one at http://mozilla.org/MPL/2.0/. +;; +;; Copyright (c) KALEIDOS INC + +(ns app.binfile.common + "A binfile related file processing common code, used for different + binfile format implementations and management rpc methods." + (:require + [app.common.data :as d] + [app.common.data.macros :as dm] + [app.common.exceptions :as ex] + [app.common.features :as cfeat] + [app.common.files.defaults :as cfd] + [app.common.files.migrations :as fmg] + [app.common.files.validate :as fval] + [app.common.logging :as l] + [app.common.uuid :as uuid] + [app.config :as cf] + [app.db :as db] + [app.db.sql :as sql] + [app.features.components-v2 :as feat.compv2] + [app.features.fdata :as feat.fdata] + [app.loggers.audit :as-alias audit] + [app.loggers.webhooks :as-alias webhooks] + [app.util.blob :as blob] + [app.util.pointer-map :as pmap] + [app.util.time :as dt] + [app.worker :as-alias wrk] + [clojure.set :as set] + [clojure.walk :as walk] + [cuerdas.core :as str])) + +(set! *warn-on-reflection* true) + +(def ^:dynamic *state* nil) +(def ^:dynamic *options* nil) + +(def xf-map-id + (map :id)) + +(def xf-map-media-id + (comp + (mapcat (juxt :media-id + :thumbnail-id + :woff1-file-id + :woff2-file-id + :ttf-file-id + :otf-file-id)) + (filter uuid?))) + +(def into-vec + (fnil into [])) + +(def conj-vec + (fnil conj [])) + +(defn collect-storage-objects + [state items] + (update state :storage-objects into xf-map-media-id items)) + +(defn collect-summary + [state key items] + (update state key into xf-map-media-id items)) + +(defn lookup-index + [id] + (when id + (let [val (get-in @*state* [:index id])] + (l/trc :fn "lookup-index" :id (str id) :result (some-> val str) ::l/sync? true) + (or val id)))) + +(defn remap-id + [item key] + (cond-> item + (contains? item key) + (update key lookup-index))) + +(defn- index-object + [index obj & attrs] + (reduce (fn [index attr-fn] + (let [old-id (attr-fn obj) + new-id (if (::overwrite *options*) old-id (uuid/next))] + (assoc index old-id new-id))) + index + attrs)) + +(defn update-index + ([index coll] + (update-index index coll identity)) + ([index coll attr] + (reduce #(index-object %1 %2 attr) index coll))) + +(defn decode-row + "A generic decode row helper" + [{:keys [data features] :as row}] + (cond-> row + features (assoc :features (db/decode-pgarray features #{})) + data (assoc :data (blob/decode data)))) + +(defn get-file + [cfg file-id] + (db/run! cfg (fn [{:keys [::db/conn] :as cfg}] + (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] + (when-let [file (db/get* conn :file {:id file-id} + {::db/remove-deleted false})] + (-> file + (decode-row) + (update :data feat.fdata/process-pointers deref) + (update :data feat.fdata/process-objects (partial into {})))))))) + +(defn get-project + [cfg project-id] + (db/get cfg :project {:id project-id})) + +(defn get-team + [cfg team-id] + (-> (db/get cfg :team {:id team-id}) + (decode-row))) + +(defn get-fonts + [cfg team-id] + (db/query cfg :team-font-variant + {:team-id team-id + :deleted-at nil})) + +(defn get-files-rels + "Given a set of file-id's, return all matching relations with the libraries" + [cfg ids] + + (dm/assert! + "expected a set of uuids" + (and (set? ids) + (every? uuid? ids))) + + (db/run! cfg (fn [{:keys [::db/conn]}] + (let [ids (db/create-array conn "uuid" ids) + sql (str "SELECT flr.* FROM file_library_rel AS flr " + " JOIN file AS l ON (flr.library_file_id = l.id) " + " WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")] + (db/exec! conn [sql ids]))))) + + +;; NOTE: Will be used in future, commented for satisfy linter +(def ^:private sql:get-libraries + "WITH RECURSIVE libs AS ( + SELECT fl.id + FROM file AS fl + JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) + WHERE flr.file_id = ANY(?) + UNION + SELECT fl.id + FROM file AS fl + JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) + JOIN libs AS l ON (flr.file_id = l.id) + ) + SELECT DISTINCT l.id + FROM libs AS l") + +(defn get-libraries + "Get all libraries ids related to provided file ids" + [cfg ids] + (db/run! cfg (fn [{:keys [::db/conn]}] + (let [ids' (db/create-array conn "uuid" ids)] + (->> (db/exec! conn [sql:get-libraries ids']) + (into #{} xf-map-id)))))) + +(defn get-file-object-thumbnails + "Return all file object thumbnails for a given file." + [cfg file-id] + (db/query cfg :file-tagged-object-thumbnail + {:file-id file-id + :deleted-at nil})) + +(defn get-file-thumbnail + "Return the thumbnail for the specified file-id" + [cfg {:keys [id revn]}] + (db/get* cfg :file-thumbnail + {:file-id id + :revn revn + :data nil} + {::sql/columns [:media-id :file-id :revn]})) + + +(def ^:private + xform:collect-media-id + (comp + (map :objects) + (mapcat vals) + (mapcat (fn [obj] + ;; NOTE: because of some bug, we ended with + ;; many shape types having the ability to + ;; have fill-image attribute (which initially + ;; designed for :path shapes). + (sequence + (keep :id) + (concat [(:fill-image obj) + (:metadata obj)] + (map :fill-image (:fills obj)) + (map :stroke-image (:strokes obj)) + (->> (:content obj) + (tree-seq map? :children) + (mapcat :fills) + (map :fill-image)))))))) + +(defn collect-used-media + "Given a fdata (file data), returns all media references." + [data] + (-> #{} + (into xform:collect-media-id (vals (:pages-index data))) + (into xform:collect-media-id (vals (:components data))) + (into (keys (:media data))))) + +(defn get-file-media + [cfg {:keys [data id] :as file}] + (db/run! cfg (fn [{:keys [::db/conn]}] + (let [ids (collect-used-media data) + ids (db/create-array conn "uuid" ids) + sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")] + + ;; We assoc the file-id again to the file-media-object row + ;; because there are cases that used objects refer to other + ;; files and we need to ensure in the exportation process that + ;; all ids matches + (->> (db/exec! conn [sql ids]) + (mapv #(assoc % :file-id id))))))) + +(def ^:private sql:get-team-files + "SELECT f.id FROM file AS f + JOIN project AS p ON (p.id = f.project_id) + WHERE p.team_id = ?") + +(defn get-team-files + "Get a set of file ids for the specified team-id" + [{:keys [::db/conn]} team-id] + (->> (db/exec! conn [sql:get-team-files team-id]) + (into #{} xf-map-id))) + +(def ^:private sql:get-team-projects + "SELECT p.id FROM project AS p + WHERE p.team_id = ? + AND p.deleted_at IS NULL") + +(defn get-team-projects + "Get a set of project ids for the team" + [{:keys [::db/conn]} team-id] + (->> (db/exec! conn [sql:get-team-projects team-id]) + (into #{} xf-map-id))) + +(def ^:private sql:get-project-files + "SELECT f.id FROM file AS f + WHERE f.project_id = ? + AND f.deleted_at IS NULL") + +(defn get-project-files + "Get a set of file ids for the project" + [{:keys [::db/conn]} project-id] + (->> (db/exec! conn [sql:get-project-files project-id]) + (into #{} xf-map-id))) + +(defn- relink-shapes + "A function responsible to analyze all file data and + replace the old :component-file reference with the new + ones, using the provided file-index." + [data] + (letfn [(process-map-form [form] + (cond-> form + ;; Relink image shapes + (and (map? (:metadata form)) + (= :image (:type form))) + (update-in [:metadata :id] lookup-index) + + ;; Relink paths with fill image + (map? (:fill-image form)) + (update-in [:fill-image :id] lookup-index) + + ;; This covers old shapes and the new :fills. + (uuid? (:fill-color-ref-file form)) + (update :fill-color-ref-file lookup-index) + + ;; This covers the old shapes and the new :strokes + (uuid? (:storage-color-ref-file form)) + (update :stroke-color-ref-file lookup-index) + + ;; This covers all text shapes that have typography referenced + (uuid? (:typography-ref-file form)) + (update :typography-ref-file lookup-index) + + ;; This covers the component instance links + (uuid? (:component-file form)) + (update :component-file lookup-index) + + ;; This covers the shadows and grids (they have directly + ;; the :file-id prop) + (uuid? (:file-id form)) + (update :file-id lookup-index))) + + (process-form [form] + (if (map? form) + (try + (process-map-form form) + (catch Throwable cause + (l/warn :hint "failed form" :form (pr-str form) ::l/sync? true) + (throw cause))) + form))] + + (walk/postwalk process-form data))) + +(defn- relink-media + "A function responsible of process the :media attr of file data and + remap the old ids with the new ones." + [media] + (reduce-kv (fn [res k v] + (let [id (lookup-index k)] + (if (uuid? id) + (-> res + (assoc id (assoc v :id id)) + (dissoc k)) + res))) + media + media)) + +(defn- relink-colors + "A function responsible of process the :colors attr of file data and + remap the old ids with the new ones." + [colors] + (reduce-kv (fn [res k v] + (if (:image v) + (update-in res [k :image :id] lookup-index) + res)) + colors + colors)) + +(defn embed-assets + [cfg data file-id] + (letfn [(walk-map-form [form state] + (cond + (uuid? (:fill-color-ref-file form)) + (do + (vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)]) + (assoc form :fill-color-ref-file file-id)) + + (uuid? (:stroke-color-ref-file form)) + (do + (vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)]) + (assoc form :stroke-color-ref-file file-id)) + + (uuid? (:typography-ref-file form)) + (do + (vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)]) + (assoc form :typography-ref-file file-id)) + + (uuid? (:component-file form)) + (do + (vswap! state conj [(:component-file form) :components (:component-id form)]) + (assoc form :component-file file-id)) + + :else + form)) + + (process-group-of-assets [data [lib-id items]] + ;; NOTE: there is a possibility that shape refers to an + ;; non-existant file because the file was removed. In this + ;; case we just ignore the asset. + (if-let [lib (get-file cfg lib-id)] + (reduce (partial process-asset lib) data items) + data)) + + (process-asset [lib data [bucket asset-id]] + (let [asset (get-in lib [:data bucket asset-id]) + ;; Add a special case for colors that need to have + ;; correctly set the :file-id prop (pending of the + ;; refactor that will remove it). + asset (cond-> asset + (= bucket :colors) (assoc :file-id file-id))] + (update data bucket assoc asset-id asset)))] + + (let [assets (volatile! [])] + (walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data) + (->> (deref assets) + (filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id)))) + (d/group-by first rest) + (reduce (partial process-group-of-assets) data))))) + + +(defn process-file + [{:keys [id] :as file}] + (-> file + (update :data (fn [fdata] + (-> fdata + (assoc :id id) + (dissoc :recent-colors) + (cond-> (> (:version fdata) cfd/version) + (assoc :version cfd/version)) + ;; FIXME: We're temporarily activating all + ;; migrations because a problem in the + ;; environments messed up with the version + ;; numbers When this problem is fixed delete + ;; the following line + (cond-> (> (:version fdata) 22) + (assoc :version 22))))) + (fmg/migrate-file) + (update :data (fn [fdata] + (-> fdata + (update :pages-index relink-shapes) + (update :components relink-shapes) + (update :media relink-media) + (update :colors relink-colors) + (d/without-nils)))))) + + +(defn- upsert-file! + [conn file] + (let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?) " + "ON CONFLICT (id) DO UPDATE SET data=?")] + (db/exec-one! conn [sql + (:id file) + (:project-id file) + (:name file) + (:revn file) + (:is-shared file) + (:data file) + (:created-at file) + (:modified-at file) + (:data file)]))) + +(defn persist-file! + "Applies all the final validations and perist the file." + [{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}] + + (dm/assert! + "expected valid timestamp" + (dt/instant? timestamp)) + + (let [file (-> file + (assoc :created-at timestamp) + (assoc :modified-at timestamp) + (assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5}))) + (update :features + (fn [features] + (let [features (cfeat/check-supported-features! features)] + (-> (::features cfg #{}) + (set/difference cfeat/frontend-only-features) + (set/union features)))))) + + _ (when (contains? cf/flags :file-schema-validation) + (fval/validate-file-schema! file)) + + _ (when (contains? cf/flags :soft-file-schema-validation) + (let [result (ex/try! (fval/validate-file-schema! file))] + (when (ex/exception? result) + (l/error :hint "file schema validation error" :cause result)))) + + file (if (contains? (:features file) "fdata/objects-map") + (feat.fdata/enable-objects-map file) + file) + + file (if (contains? (:features file) "fdata/pointer-map") + (binding [pmap/*tracked* (pmap/create-tracked)] + (let [file (feat.fdata/enable-pointer-map file)] + (feat.fdata/persist-pointers! cfg id) + file)) + file) + + params (-> file + (update :features db/encode-pgarray conn "text") + (update :data blob/encode))] + + (if (::overwrite cfg) + (upsert-file! conn params) + (db/insert! conn :file params ::db/return-keys false)) + + file)) + +(defn apply-pending-migrations! + "Apply alredy registered pending migrations to files" + [cfg] + (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)] + (case feature + "components/v2" + (feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true)) + + "fdata/shape-data-type" + nil + + (ex/raise :type :internal + :code :no-migration-defined + :hint (str/ffmt "no migation for feature '%' on file importation" feature) + :feature feature)))) diff --git a/backend/src/app/binfile/v1.clj b/backend/src/app/binfile/v1.clj new file mode 100644 index 000000000..183c3ac69 --- /dev/null +++ b/backend/src/app/binfile/v1.clj @@ -0,0 +1,762 @@ +;; This Source Code Form is subject to the terms of the Mozilla Public +;; License, v. 2.0. If a copy of the MPL was not distributed with this +;; file, You can obtain one at http://mozilla.org/MPL/2.0/. +;; +;; Copyright (c) KALEIDOS INC + +(ns app.binfile.v1 + "A custom, perfromance and efficiency focused binfile format impl" + (:refer-clojure :exclude [assert]) + (:require + [app.binfile.common :as bfc] + [app.common.data :as d] + [app.common.data.macros :as dm] + [app.common.exceptions :as ex] + [app.common.features :as cfeat] + [app.common.fressian :as fres] + [app.common.logging :as l] + [app.common.spec :as us] + [app.common.types.file :as ctf] + [app.common.uuid :as uuid] + [app.config :as cf] + [app.db :as db] + [app.http.sse :as sse] + [app.loggers.audit :as-alias audit] + [app.loggers.webhooks :as-alias webhooks] + [app.media :as media] + [app.rpc :as-alias rpc] + [app.rpc.commands.teams :as teams] + [app.rpc.doc :as-alias doc] + [app.storage :as sto] + [app.storage.tmp :as tmp] + [app.tasks.file-gc] + [app.util.time :as dt] + [app.worker :as-alias wrk] + [clojure.java.io :as jio] + [clojure.set :as set] + [clojure.spec.alpha :as s] + [cuerdas.core :as str] + [datoteka.io :as io] + [promesa.util :as pu] + [yetti.adapter :as yt]) + (:import + com.github.luben.zstd.ZstdInputStream + com.github.luben.zstd.ZstdOutputStream + java.io.DataInputStream + java.io.DataOutputStream + java.io.InputStream + java.io.OutputStream)) + +(set! *warn-on-reflection* true) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; DEFAULTS +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;; Threshold in MiB when we pass from using +;; in-memory byte-array's to use temporal files. +(def temp-file-threshold + (* 1024 1024 2)) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; LOW LEVEL STREAM IO API +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(def ^:const buffer-size (:xnio/buffer-size yt/defaults)) +(def ^:const penpot-magic-number 800099563638710213) +(def ^:const max-object-size (* 1024 1024 100)) ; Only allow 100MiB max file size. + +(def ^:dynamic *position* nil) + +(defn get-mark + [id] + (case id + :header 1 + :stream 2 + :uuid 3 + :label 4 + :obj 5 + (ex/raise :type :validation + :code :invalid-mark-id + :hint (format "invalid mark id %s" id)))) + +(defmacro assert + [expr hint] + `(when-not ~expr + (ex/raise :type :validation + :code :unexpected-condition + :hint ~hint))) + +(defmacro assert-mark + [v type] + `(let [expected# (get-mark ~type) + val# (long ~v)] + (when (not= val# expected#) + (ex/raise :type :validation + :code :unexpected-mark + :hint (format "received mark %s, expected %s" val# expected#))))) + +(defmacro assert-label + [expr label] + `(let [v# ~expr] + (when (not= v# ~label) + (ex/raise :type :assertion + :code :unexpected-label + :hint (format "received label %s, expected %s" v# ~label))))) + +;; --- PRIMITIVE IO + +(defn write-byte! + [^DataOutputStream output data] + (l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true) + (.writeByte output (byte data)) + (swap! *position* inc)) + +(defn read-byte! + [^DataInputStream input] + (let [v (.readByte input)] + (l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true) + (swap! *position* inc) + v)) + +(defn write-long! + [^DataOutputStream output data] + (l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true) + (.writeLong output (long data)) + (swap! *position* + 8)) + + +(defn read-long! + [^DataInputStream input] + (let [v (.readLong input)] + (l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true) + (swap! *position* + 8) + v)) + +(defn write-bytes! + [^DataOutputStream output ^bytes data] + (let [size (alength data)] + (l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true) + (.write output data 0 size) + (swap! *position* + size))) + +(defn read-bytes! + [^InputStream input ^bytes buff] + (let [size (alength buff) + readed (.readNBytes input buff 0 size)] + (l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true) + (swap! *position* + readed) + readed)) + +;; --- COMPOSITE IO + +(defn write-uuid! + [^DataOutputStream output id] + (l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true) + + (doto output + (write-byte! (get-mark :uuid)) + (write-long! (uuid/get-word-high id)) + (write-long! (uuid/get-word-low id)))) + +(defn read-uuid! + [^DataInputStream input] + (l/trace :fn "read-uuid!" :position @*position* ::l/sync? true) + (let [m (read-byte! input)] + (assert-mark m :uuid) + (let [a (read-long! input) + b (read-long! input)] + (uuid/custom a b)))) + +(defn write-obj! + [^DataOutputStream output data] + (l/trace :fn "write-obj!" :position @*position* ::l/sync? true) + (let [^bytes data (fres/encode data)] + (doto output + (write-byte! (get-mark :obj)) + (write-long! (alength data)) + (write-bytes! data)))) + +(defn read-obj! + [^DataInputStream input] + (l/trace :fn "read-obj!" :position @*position* ::l/sync? true) + (let [m (read-byte! input)] + (assert-mark m :obj) + (let [size (read-long! input)] + (assert (pos? size) "incorrect header size found on reading header") + (let [buff (byte-array size)] + (read-bytes! input buff) + (fres/decode buff))))) + +(defn write-label! + [^DataOutputStream output label] + (l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true) + (doto output + (write-byte! (get-mark :label)) + (write-obj! label))) + +(defn read-label! + [^DataInputStream input] + (l/trace :fn "read-label!" :position @*position* ::l/sync? true) + (let [m (read-byte! input)] + (assert-mark m :label) + (read-obj! input))) + +(defn write-header! + [^OutputStream output version] + (l/trace :fn "write-header!" + :version version + :position @*position* + ::l/sync? true) + (let [vers (-> version name (subs 1) parse-long) + output (io/data-output-stream output)] + (doto output + (write-byte! (get-mark :header)) + (write-long! penpot-magic-number) + (write-long! vers)))) + +(defn read-header! + [^InputStream input] + (l/trace :fn "read-header!" :position @*position* ::l/sync? true) + (let [input (io/data-input-stream input) + mark (read-byte! input) + mnum (read-long! input) + vers (read-long! input)] + + (when (or (not= mark (get-mark :header)) + (not= mnum penpot-magic-number)) + (ex/raise :type :validation + :code :invalid-penpot-file + :hint "invalid penpot file")) + + (keyword (str "v" vers)))) + +(defn copy-stream! + [^OutputStream output ^InputStream input ^long size] + (let [written (io/copy! input output :size size)] + (l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true) + (swap! *position* + written) + written)) + +(defn write-stream! + [^DataOutputStream output stream size] + (l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size) + (doto output + (write-byte! (get-mark :stream)) + (write-long! size)) + + (copy-stream! output stream size)) + +(defn read-stream! + [^DataInputStream input] + (l/trace :fn "read-stream!" :position @*position* ::l/sync? true) + (let [m (read-byte! input) + s (read-long! input) + p (tmp/tempfile :prefix "penpot.binfile.")] + (assert-mark m :stream) + + (when (> s max-object-size) + (ex/raise :type :validation + :code :max-file-size-reached + :hint (str/ffmt "unable to import storage object with size % bytes" s))) + + (if (> s temp-file-threshold) + (with-open [^OutputStream output (io/output-stream p)] + (let [readed (io/copy! input output :offset 0 :size s)] + (l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true) + (swap! *position* + readed) + [s p])) + [s (io/read-as-bytes input :size s)]))) + +(defmacro assert-read-label! + [input expected-label] + `(let [readed# (read-label! ~input) + expected# ~expected-label] + (when (not= readed# expected#) + (ex/raise :type :validation + :code :unexpected-label + :hint (format "unexpected label found: %s, expected: %s" readed# expected#))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; API +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +;; --- HELPERS + +(defn zstd-input-stream + ^InputStream + [input] + (ZstdInputStream. ^InputStream input)) + +(defn zstd-output-stream + ^OutputStream + [output & {:keys [level] :or {level 0}}] + (ZstdOutputStream. ^OutputStream output (int level))) + +(defn- get-files + [cfg ids] + (db/run! cfg (fn [{:keys [::db/conn]}] + (let [sql (str "SELECT id FROM file " + " WHERE id = ANY(?) ") + ids (db/create-array conn "uuid" ids)] + (->> (db/exec! conn [sql ids]) + (into [] (map :id)) + (not-empty)))))) + +;; --- EXPORT WRITER + +(defmulti write-export ::version) +(defmulti write-section ::section) + +(defn write-export! + [{:keys [::include-libraries ::embed-assets] :as cfg}] + (when (and include-libraries embed-assets) + (throw (IllegalArgumentException. + "the `include-libraries` and `embed-assets` are mutally excluding options"))) + + (write-export cfg)) + +(defmethod write-export :default + [{:keys [::output] :as options}] + (write-header! output :v1) + (pu/with-open [output (zstd-output-stream output :level 12) + output (io/data-output-stream output)] + (binding [bfc/*state* (volatile! {})] + (run! (fn [section] + (l/dbg :hint "write section" :section section ::l/sync? true) + (write-label! output section) + (let [options (-> options + (assoc ::output output) + (assoc ::section section))] + (binding [bfc/*options* options] + (write-section options)))) + + [:v1/metadata :v1/files :v1/rels :v1/sobjects])))) + +(defmethod write-section :v1/metadata + [{:keys [::output ::ids ::include-libraries] :as cfg}] + (if-let [fids (get-files cfg ids)] + (let [lids (when include-libraries + (bfc/get-libraries cfg ids)) + ids (into fids lids)] + (write-obj! output {:version cf/version :files ids}) + (vswap! bfc/*state* assoc :files ids)) + (ex/raise :type :not-found + :code :files-not-found + :hint "unable to retrieve files for export"))) + +(defmethod write-section :v1/files + [{:keys [::output ::embed-assets ::include-libraries] :as cfg}] + + ;; Initialize SIDS with empty vector + (vswap! bfc/*state* assoc :sids []) + + (doseq [file-id (-> bfc/*state* deref :files)] + (let [detach? (and (not embed-assets) (not include-libraries)) + thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id) + (mapv #(dissoc % :file-id))) + + file (cond-> (bfc/get-file cfg file-id) + detach? + (-> (ctf/detach-external-references file-id) + (dissoc :libraries)) + + embed-assets + (update :data #(bfc/embed-assets cfg % file-id)) + + :always + (assoc :thumbnails thumbnails)) + + media (bfc/get-file-media cfg file)] + + (l/dbg :hint "write penpot file" + :id (str file-id) + :name (:name file) + :thumbnails (count thumbnails) + :features (:features file) + :media (count media) + ::l/sync? true) + + (doseq [item media] + (l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true)) + + (doseq [item thumbnails] + (l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true)) + + (doto output + (write-obj! file) + (write-obj! media)) + + (vswap! bfc/*state* update :sids into bfc/xf-map-media-id media) + (vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails)))) + +(defmethod write-section :v1/rels + [{:keys [::output ::include-libraries] :as cfg}] + (let [ids (-> bfc/*state* deref :files set) + rels (when include-libraries + (bfc/get-files-rels cfg ids))] + (l/dbg :hint "found rels" :total (count rels) ::l/sync? true) + (write-obj! output rels))) + +(defmethod write-section :v1/sobjects + [{:keys [::sto/storage ::output]}] + (let [sids (-> bfc/*state* deref :sids) + storage (media/configure-assets-storage storage)] + + (l/dbg :hint "found sobjects" + :items (count sids) + ::l/sync? true) + + ;; Write all collected storage objects + (write-obj! output sids) + + (doseq [id sids] + (let [{:keys [size] :as obj} (sto/get-object storage id)] + (l/dbg :hint "write sobject" :id (str id) ::l/sync? true) + + (doto output + (write-uuid! id) + (write-obj! (meta obj))) + + (pu/with-open [stream (sto/get-object-data storage obj)] + (let [written (write-stream! output stream size)] + (when (not= written size) + (ex/raise :type :validation + :code :mismatch-readed-size + :hint (str/ffmt "found unexpected object size; size=% written=%" size written))))))))) + +;; --- EXPORT READER + +(defmulti read-import ::version) +(defmulti read-section ::section) + +(s/def ::profile-id ::us/uuid) +(s/def ::project-id ::us/uuid) +(s/def ::input io/input-stream?) +(s/def ::overwrite? (s/nilable ::us/boolean)) +(s/def ::ignore-index-errors? (s/nilable ::us/boolean)) + +;; FIXME: replace with schema +(s/def ::read-import-options + (s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input] + :opt [::overwrite? ::ignore-index-errors?])) + +(defn read-import! + "Do the importation of the specified resource in penpot custom binary + format. There are some options for customize the importation + behavior: + + `::bfc/overwrite`: if true, instead of creating new files and remapping id references, + it reuses all ids and updates existing objects; defaults to `false`." + [{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}] + + (dm/assert! + "expected input stream" + (io/input-stream? input)) + + (dm/assert! + "expected valid instant" + (dt/instant? timestamp)) + + (let [version (read-header! input)] + (read-import (assoc options ::version version ::bfc/timestamp timestamp)))) + +(defn- read-import-v1 + [{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}] + (db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"]) + (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) + + (pu/with-open [input (zstd-input-stream input) + input (io/data-input-stream input)] + (binding [bfc/*state* (volatile! {:media [] :index {}})] + (let [team (teams/get-team conn + :profile-id profile-id + :project-id project-id) + + features (cfeat/get-team-enabled-features cf/flags team)] + + (sse/tap {:type :import-progress + :section :read-import}) + + ;; Process all sections + (run! (fn [section] + (l/dbg :hint "reading section" :section section ::l/sync? true) + (assert-read-label! input section) + (let [options (-> cfg + (assoc ::bfc/features features) + (assoc ::section section) + (assoc ::input input))] + (binding [bfc/*options* options] + (sse/tap {:type :import-progress + :section section}) + (read-section options)))) + [:v1/metadata :v1/files :v1/rels :v1/sobjects]) + + (bfc/apply-pending-migrations! cfg) + + ;; Knowing that the ids of the created files are in index, + ;; just lookup them and return it as a set + (let [files (-> bfc/*state* deref :files)] + (into #{} (keep #(get-in @bfc/*state* [:index %])) files)))))) + +(defmethod read-import :v1 + [options] + (db/tx-run! options read-import-v1)) + +(defmethod read-section :v1/metadata + [{:keys [::input]}] + (let [{:keys [version files]} (read-obj! input)] + (l/dbg :hint "metadata readed" + :version (:full version) + :files (mapv str files) + ::l/sync? true) + (vswap! bfc/*state* update :index bfc/update-index files) + (vswap! bfc/*state* assoc :version version :files files))) + +(defn- remap-thumbnails + [thumbnails file-id] + (mapv (fn [thumbnail] + (-> thumbnail + (assoc :file-id file-id) + (update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/"))))) + thumbnails)) + +(defmethod read-section :v1/files + [{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}] + + (doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))] + (let [file (read-obj! input) + media (read-obj! input) + + file-id (:id file) + file-id' (bfc/lookup-index file-id) + + thumbnails (:thumbnails file)] + + (when (not= file-id expected-file-id) + (ex/raise :type :validation + :code :inconsistent-penpot-file + :found-id file-id + :expected-id expected-file-id + :hint "the penpot file seems corrupt, found unexpected uuid (file-id)")) + + (l/dbg :hint "processing file" + :id (str file-id) + :features (:features file) + :version (-> file :data :version) + :media (count media) + :thumbnails (count thumbnails) + ::l/sync? true) + + (when (seq thumbnails) + (let [thumbnails (remap-thumbnails thumbnails file-id')] + (l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true) + (vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails))) + + (when (seq media) + ;; Update index with media + (l/dbg :hint "update index with media" :total (count media) ::l/sync? true) + (vswap! bfc/*state* update :index bfc/update-index (map :id media)) + + ;; Store file media for later insertion + (l/dbg :hint "update media references" ::l/sync? true) + (vswap! bfc/*state* update :media into (map #(update % :id bfc/lookup-index)) media)) + + (let [file (-> file + (assoc :id file-id') + (cond-> (and (= idx 0) (some? name)) + (assoc :name name)) + (assoc :project-id project-id) + (dissoc :thumbnails) + (bfc/process-file))] + + ;; All features that are enabled and requires explicit migration are + ;; added to the state for a posterior migration step. + (doseq [feature (-> (::bfc/features system) + (set/difference cfeat/no-migration-features) + (set/difference (:features file)))] + (vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id'])) + + (l/dbg :hint "create file" :id (str file-id') ::l/sync? true) + (bfc/persist-file! system file) + + (when overwrite + (db/delete! conn :file-thumbnail {:file-id file-id'})) + + file-id')))) + +(defmethod read-section :v1/rels + [{:keys [::db/conn ::input ::bfc/timestamp]}] + (let [rels (read-obj! input) + ids (into #{} (-> bfc/*state* deref :files))] + ;; Insert all file relations + (doseq [{:keys [library-file-id] :as rel} rels] + (let [rel (-> rel + (assoc :synced-at timestamp) + (update :file-id bfc/lookup-index) + (update :library-file-id bfc/lookup-index))] + + (if (contains? ids library-file-id) + (do + (l/dbg :hint "create file library link" + :file-id (:file-id rel) + :lib-id (:library-file-id rel) + ::l/sync? true) + (db/insert! conn :file-library-rel rel)) + + (l/warn :hint "ignoring file library link" + :file-id (:file-id rel) + :lib-id (:library-file-id rel) + ::l/sync? true)))))) + +(defmethod read-section :v1/sobjects + [{:keys [::sto/storage ::db/conn ::input ::bfc/overwrite ::bfc/timestamp]}] + (let [storage (media/configure-assets-storage storage) + ids (read-obj! input) + thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))] + + (doseq [expected-storage-id ids] + (let [id (read-uuid! input) + mdata (read-obj! input)] + + (when (not= id expected-storage-id) + (ex/raise :type :validation + :code :inconsistent-penpot-file + :hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)")) + + (l/dbg :hint "readed storage object" :id (str id) ::l/sync? true) + + (let [[size resource] (read-stream! input) + hash (sto/calculate-hash resource) + content (-> (sto/content resource size) + (sto/wrap-with-hash hash)) + + params (-> mdata + (assoc ::sto/content content) + (assoc ::sto/deduplicate? true) + (assoc ::sto/touched-at timestamp)) + + params (if (thumb? id) + (assoc params :bucket "file-object-thumbnail") + (assoc params :bucket "file-media-object")) + + sobject (sto/put-object! storage params)] + + (l/dbg :hint "persisted storage object" + :old-id (str id) + :new-id (str (:id sobject)) + :is-thumbnail (boolean (thumb? id)) + ::l/sync? true) + + (vswap! bfc/*state* update :index assoc id (:id sobject))))) + + (doseq [item (:media @bfc/*state*)] + (l/dbg :hint "inserting file media object" + :id (str (:id item)) + :file-id (str (:file-id item)) + ::l/sync? true) + + (let [file-id (bfc/lookup-index (:file-id item))] + (if (= file-id (:file-id item)) + (l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true) + (db/insert! conn :file-media-object + (-> item + (assoc :file-id file-id) + (d/update-when :media-id bfc/lookup-index) + (d/update-when :thumbnail-id bfc/lookup-index)) + {::db/on-conflict-do-nothing? overwrite})))) + + (doseq [item (:thumbnails @bfc/*state*)] + (let [item (update item :media-id bfc/lookup-index)] + (l/dbg :hint "inserting file object thumbnail" + :file-id (str (:file-id item)) + :media-id (str (:media-id item)) + :object-id (:object-id item) + ::l/sync? true) + (db/insert! conn :file-tagged-object-thumbnail item + {::db/on-conflict-do-nothing? overwrite}))))) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; HIGH LEVEL API +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(defn export-files! + "Do the exportation of a specified file in custom penpot binary + format. There are some options available for customize the output: + + `::include-libraries`: additionally to the specified file, all the + linked libraries also will be included (including transitive + dependencies). + + `::embed-assets`: instead of including the libraries, embed in the + same file library all assets used from external libraries." + + [{:keys [::ids] :as cfg} output] + + (dm/assert! + "expected a set of uuid's for `::ids` parameter" + (and (set? ids) + (every? uuid? ids))) + + (dm/assert! + "expected instance of jio/IOFactory for `input`" + (satisfies? jio/IOFactory output)) + + (let [id (uuid/next) + tp (dt/tpoint) + ab (volatile! false) + cs (volatile! nil)] + (try + (l/info :hint "start exportation" :export-id (str id)) + (pu/with-open [output (io/output-stream output)] + (binding [*position* (atom 0)] + (write-export! (assoc cfg ::output output)))) + + (catch java.io.IOException _cause + ;; Do nothing, EOF means client closes connection abruptly + (vreset! ab true) + nil) + + (catch Throwable cause + (vreset! cs cause) + (vreset! ab true) + (throw cause)) + + (finally + (l/info :hint "exportation finished" :export-id (str id) + :elapsed (str (inst-ms (tp)) "ms") + :aborted @ab + :cause @cs))))) + +(defn import-files! + [cfg input] + + (dm/assert! + "expected valid profile-id and project-id on `cfg`" + (and (uuid? (::profile-id cfg)) + (uuid? (::project-id cfg)))) + + (dm/assert! + "expected instance of jio/IOFactory for `input`" + (satisfies? jio/IOFactory input)) + + (let [id (uuid/next) + tp (dt/tpoint) + cs (volatile! nil)] + + (l/info :hint "import: started" :id (str id)) + (try + (binding [*position* (atom 0)] + (pu/with-open [input (io/input-stream input)] + (read-import! (assoc cfg ::input input)))) + + (catch Throwable cause + (vreset! cs cause) + (throw cause)) + + (finally + (l/info :hint "import: terminated" + :id (str id) + :elapsed (dt/format-duration (tp)) + :error? (some? @cs)))))) + diff --git a/backend/src/app/binfile/v2.clj b/backend/src/app/binfile/v2.clj index 2b3639b39..33a92a03b 100644 --- a/backend/src/app/binfile/v2.clj +++ b/backend/src/app/binfile/v2.clj @@ -9,31 +9,24 @@ of entire team (or multiple teams) at once." (:refer-clojure :exclude [read]) (:require + [app.binfile.common :as bfc] [app.common.data :as d] - [app.common.exceptions :as ex] [app.common.features :as cfeat] - [app.common.files.defaults :as cfd] - [app.common.files.migrations :as fmg] - [app.common.files.validate :as fval] [app.common.logging :as l] [app.common.transit :as t] [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] [app.db.sql :as sql] - [app.features.fdata :as feat.fdata] [app.http.sse :as sse] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] [app.media :as media] [app.storage :as sto] [app.storage.tmp :as tmp] - [app.util.blob :as blob] - [app.util.pointer-map :as pmap] [app.util.time :as dt] [app.worker :as-alias wrk] [clojure.set :as set] - [clojure.walk :as walk] [cuerdas.core :as str] [datoteka.io :as io] [promesa.util :as pu]) @@ -42,34 +35,10 @@ (set! *warn-on-reflection* true) -(def ^:dynamic *state* nil) -(def ^:dynamic *options* nil) - ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; LOW LEVEL API ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -(defn- lookup-index - [id] - (when-let [val (get-in @*state* [:index id])] - (l/trc :fn "lookup-index" :id (some-> id str) :result (some-> val str) ::l/sync? true) - (or val id))) - -(defn- index-object - [index obj & attrs] - (reduce (fn [index attr-fn] - (let [old-id (attr-fn obj) - new-id (uuid/next)] - (assoc index old-id new-id))) - index - attrs)) - -(defn- update-index - ([index coll] - (update-index index coll identity)) - ([index coll attr] - (reduce #(index-object %1 %2 attr) index coll))) - (defn- create-database ([cfg] (let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")] @@ -92,12 +61,6 @@ "CREATE INDEX kvdata__tag_key__idx ON kvdata (tag, key)") -(defn- decode-row - [{:keys [data features] :as row}] - (cond-> row - features (assoc :features (db/decode-pgarray features #{})) - data (assoc :data (blob/decode data)))) - (defn- setup-schema! [{:keys [::db]}] (db/exec-one! db [sql:create-kvdata-table]) @@ -147,156 +110,62 @@ ;; IMPORT/EXPORT IMPL ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -(def ^:private xf-map-id - (map :id)) - -(def ^:private xf-map-media-id - (comp - (mapcat (juxt :media-id - :thumbnail-id - :woff1-file-id - :woff2-file-id - :ttf-file-id - :otf-file-id)) - (filter uuid?))) - -;; NOTE: Will be used in future, commented for satisfy linter -;; (def ^:private sql:get-libraries -;; "WITH RECURSIVE libs AS ( -;; SELECT fl.id -;; FROM file AS fl -;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) -;; WHERE flr.file_id = ANY(?) -;; UNION -;; SELECT fl.id -;; FROM file AS fl -;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) -;; JOIN libs AS l ON (flr.file_id = l.id) -;; ) -;; SELECT DISTINCT l.id -;; FROM libs AS l") -;; -;; (defn- get-libraries -;; "Get all libraries ids related to provided file ids" -;; [{:keys [::db/conn]} ids] -;; (let [ids' (db/create-array conn "uuid" ids)] -;; (->> (db/exec! conn [sql:get-libraries ids]) -;; (into #{} xf-map-id)))) -;; -;; (def ^:private sql:get-project-files -;; "SELECT f.id FROM file AS f -;; WHERE f.project_id = ?") - -;; (defn- get-project-files -;; "Get a set of file ids for the project" -;; [{:keys [::db/conn]} project-id] -;; (->> (db/exec! conn [sql:get-project-files project-id]) -;; (into #{} xf-map-id))) - -(def ^:private sql:get-team-files - "SELECT f.id FROM file AS f - JOIN project AS p ON (p.id = f.project_id) - WHERE p.team_id = ?") - -(defn- get-team-files - "Get a set of file ids for the specified team-id" - [{:keys [::db/conn]} team-id] - (->> (db/exec! conn [sql:get-team-files team-id]) - (into #{} xf-map-id))) - -(def ^:private sql:get-team-projects - "SELECT p.id FROM project AS p - WHERE p.team_id = ?") - -(defn- get-team-projects - "Get a set of project ids for the team" - [{:keys [::db/conn]} team-id] - (->> (db/exec! conn [sql:get-team-projects team-id]) - (into #{} xf-map-id))) - (declare ^:private write-project!) (declare ^:private write-file!) (defn- write-team! - [{:keys [::db/conn] :as cfg} team-id] + [cfg team-id] (sse/tap {:type :export-progress :section :write-team :id team-id}) - (let [team (db/get conn :team {:id team-id} - ::db/remove-deleted false - ::db/check-deleted false) - team (decode-row team) - fonts (db/query conn :team-font-variant - {:team-id team-id - :deleted-at nil} - {::sql/for-share true})] + (let [team (bfc/get-team cfg team-id) + fonts (bfc/get-fonts cfg team-id)] (l/trc :hint "write" :obj "team" :id (str team-id) :fonts (count fonts)) - (vswap! *state* update :teams conj team-id) - (vswap! *state* update :storage-objects into xf-map-media-id fonts) + (vswap! bfc/*state* update :teams conj team-id) + (vswap! bfc/*state* bfc/collect-storage-objects fonts) (write! cfg :team team-id team) (doseq [{:keys [id] :as font} fonts] - (vswap! *state* update :team-font-variants conj id) + (vswap! bfc/*state* update :team-font-variants conj id) (write! cfg :team-font-variant id font)))) (defn- write-project! - [{:keys [::db/conn] :as cfg} project-id] + [cfg project-id] (sse/tap {:type :export-progress :section :write-project :id project-id}) - (let [project (db/get conn :project {:id project-id} - ::db/remove-deleted false - ::db/check-deleted false)] - + (let [project (bfc/get-project cfg project-id)] (l/trc :hint "write" :obj "project" :id (str project-id)) (write! cfg :project (str project-id) project) - - (vswap! *state* update :projects conj project-id))) + (vswap! bfc/*state* update :projects conj project-id))) (defn- write-file! - [{:keys [::db/conn] :as cfg} file-id] + [cfg file-id] (sse/tap {:type :export-progress :section :write-file :id file-id}) - (let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] - (-> (db/get conn :file {:id file-id} - ::sql/for-share true - ::db/remove-deleted false - ::db/check-deleted false) - (decode-row) - (update :data feat.fdata/process-pointers deref) - (update :data feat.fdata/process-objects (partial into {})))) + (let [file (bfc/get-file cfg file-id) + thumbs (bfc/get-file-object-thumbnails cfg file-id) + media (bfc/get-file-media cfg file) + rels (bfc/get-files-rels cfg #{file-id})] - thumbs (db/query conn :file-tagged-object-thumbnail - {:file-id file-id - :deleted-at nil} - {::sql/for-share true}) - - media (db/query conn :file-media-object - {:file-id file-id - :deleted-at nil} - {::sql/for-share true}) - - rels (db/query conn :file-library-rel - {:file-id file-id})] - - (vswap! *state* (fn [state] - (-> state - (update :files conj file-id) - (update :file-media-objects into (map :id) media) - (update :storage-objects into xf-map-media-id thumbs) - (update :storage-objects into xf-map-media-id media)))) + (vswap! bfc/*state* (fn [state] + (-> state + (update :files conj file-id) + (update :file-media-objects into bfc/xf-map-id media) + (bfc/collect-storage-objects thumbs) + (bfc/collect-storage-objects media)))) (write! cfg :file file-id file) (write! cfg :file-rels file-id rels) @@ -304,13 +173,8 @@ (run! (partial write! cfg :file-media-object file-id) media) (run! (partial write! cfg :file-object-thumbnail file-id) thumbs) - (when-let [thumb (db/get* conn :file-thumbnail - {:file-id file-id - :revn (:revn file) - :data nil} - {::sql/for-share true - ::sql/columns [:media-id :file-id :revn]})] - (vswap! *state* update :storage-objects into xf-map-media-id [thumb]) + (when-let [thumb (bfc/get-file-thumbnail cfg file)] + (vswap! bfc/*state* bfc/collect-storage-objects [thumb]) (write! cfg :file-thumbnail file-id thumb)) (l/trc :hint "write" :obj "file" @@ -328,7 +192,7 @@ (write! cfg :storage-object id (meta sobj) data))) (defn- read-storage-object! - [{:keys [::sto/storage ::timestamp] :as cfg} id] + [{:keys [::sto/storage ::bfc/timestamp] :as cfg} id] (let [mdata (read-obj cfg :storage-object id) data (read-blob cfg :storage-object id) hash (sto/calculate-hash data) @@ -343,7 +207,7 @@ sobject (sto/put-object! storage params)] - (vswap! *state* update :index assoc id (:id sobject)) + (vswap! bfc/*state* update :index assoc id (:id sobject)) (l/trc :hint "read" :obj "storage-object" :id (str id) @@ -351,7 +215,7 @@ :size (:size sobject)))) (defn read-team! - [{:keys [::db/conn ::timestamp] :as cfg} team-id] + [{:keys [::db/conn ::bfc/timestamp] :as cfg} team-id] (l/trc :hint "read" :obj "team" :id (str team-id)) (sse/tap {:type :import-progress @@ -360,8 +224,8 @@ (let [team (read-obj cfg :team team-id) team (-> team - (update :id lookup-index) - (update :photo-id lookup-index) + (update :id bfc/lookup-index) + (update :photo-id bfc/lookup-index) (assoc :created-at timestamp) (assoc :modified-at timestamp))] @@ -372,12 +236,12 @@ (doseq [font (->> (read-seq cfg :team-font-variant) (filter #(= team-id (:team-id %))))] (let [font (-> font - (update :id lookup-index) - (update :team-id lookup-index) - (update :woff1-file-id lookup-index) - (update :woff2-file-id lookup-index) - (update :ttf-file-id lookup-index) - (update :otf-file-id lookup-index) + (update :id bfc/lookup-index) + (update :team-id bfc/lookup-index) + (update :woff1-file-id bfc/lookup-index) + (update :woff2-file-id bfc/lookup-index) + (update :ttf-file-id bfc/lookup-index) + (update :otf-file-id bfc/lookup-index) (assoc :created-at timestamp) (assoc :modified-at timestamp))] (db/insert! conn :team-font-variant font @@ -386,7 +250,7 @@ team)) (defn read-project! - [{:keys [::db/conn ::timestamp] :as cfg} project-id] + [{:keys [::db/conn ::bfc/timestamp] :as cfg} project-id] (l/trc :hint "read" :obj "project" :id (str project-id)) (sse/tap {:type :import-progress @@ -395,175 +259,40 @@ (let [project (read-obj cfg :project project-id) project (-> project - (update :id lookup-index) - (update :team-id lookup-index) + (update :id bfc/lookup-index) + (update :team-id bfc/lookup-index) (assoc :created-at timestamp) (assoc :modified-at timestamp))] (db/insert! conn :project project ::db/return-keys false))) -(defn- relink-shapes - "A function responsible to analyze all file data and - replace the old :component-file reference with the new - ones, using the provided file-index." - [data] - (letfn [(process-map-form [form] - (cond-> form - ;; Relink image shapes - (and (map? (:metadata form)) - (= :image (:type form))) - (update-in [:metadata :id] lookup-index) - - ;; Relink paths with fill image - (map? (:fill-image form)) - (update-in [:fill-image :id] lookup-index) - - ;; This covers old shapes and the new :fills. - (uuid? (:fill-color-ref-file form)) - (update :fill-color-ref-file lookup-index) - - ;; This covers the old shapes and the new :strokes - (uuid? (:storage-color-ref-file form)) - (update :stroke-color-ref-file lookup-index) - - ;; This covers all text shapes that have typography referenced - (uuid? (:typography-ref-file form)) - (update :typography-ref-file lookup-index) - - ;; This covers the component instance links - (uuid? (:component-file form)) - (update :component-file lookup-index) - - ;; This covers the shadows and grids (they have directly - ;; the :file-id prop) - (uuid? (:file-id form)) - (update :file-id lookup-index)))] - - (walk/postwalk (fn [form] - (if (map? form) - (try - (process-map-form form) - (catch Throwable cause - (l/warn :hint "failed form" :form (pr-str form) ::l/sync? true) - (throw cause))) - form)) - data))) - -(defn- relink-media - "A function responsible of process the :media attr of file data and - remap the old ids with the new ones." - [media] - (reduce-kv (fn [res k v] - (let [id (lookup-index k)] - (if (uuid? id) - (-> res - (assoc id (assoc v :id id)) - (dissoc k)) - res))) - media - media)) - -(defn- relink-colors - "A function responsible of process the :colors attr of file data and - remap the old ids with the new ones." - [colors] - (reduce-kv (fn [res k v] - (if (:image v) - (update-in res [k :image :id] lookup-index) - res)) - colors - colors)) - -(defn- process-file - [{:keys [id] :as file}] - (-> file - (update :data (fn [fdata] - (-> fdata - (assoc :id id) - (dissoc :recent-colors) - (cond-> (> (:version fdata) cfd/version) - (assoc :version cfd/version)) - ;; FIXME: We're temporarily activating all - ;; migrations because a problem in the - ;; environments messed up with the version - ;; numbers When this problem is fixed delete - ;; the following line - (cond-> (> (:version fdata) 22) - (assoc :version 22))))) - (fmg/migrate-file) - (update :data (fn [fdata] - (-> fdata - (update :pages-index relink-shapes) - (update :components relink-shapes) - (update :media relink-media) - (update :colors relink-colors) - (d/without-nils)))))) - (defn read-file! - [{:keys [::db/conn ::timestamp] :as cfg} file-id] + [{:keys [::db/conn ::bfc/timestamp] :as cfg} file-id] (l/trc :hint "read" :obj "file" :id (str file-id)) (sse/tap {:type :import-progress :section :read-file :id file-id}) - (let [file (read-obj cfg :file file-id) + (let [file (-> (read-obj cfg :file file-id) + (update :id bfc/lookup-index) + (update :project-id bfc/lookup-index) + (bfc/process-file))] - file (-> file - (update :id lookup-index) - (process-file)) + ;; All features that are enabled and requires explicit migration are + ;; added to the state for a posterior migration step. + (doseq [feature (-> (::bfc/features cfg) + (set/difference cfeat/no-migration-features) + (set/difference (:features file)))] + (vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)])) - ;; All features that are enabled and requires explicit migration are - ;; added to the state for a posterior migration step. - _ (doseq [feature (-> (::features cfg) - (set/difference cfeat/no-migration-features) - (set/difference (:features file)))] - (vswap! *state* update :pending-to-migrate (fnil conj []) [feature (:id file)])) - - - file (-> file - (update :project-id lookup-index)) - - file (-> file - (assoc :created-at timestamp) - (assoc :modified-at timestamp) - (update :features - (fn [features] - (let [features (cfeat/check-supported-features! features)] - (-> (::features cfg) - (set/difference cfeat/frontend-only-features) - (set/union features)))))) - - _ (when (contains? cf/flags :file-schema-validation) - (fval/validate-file-schema! file)) - - _ (when (contains? cf/flags :soft-file-schema-validation) - (let [result (ex/try! (fval/validate-file-schema! file))] - (when (ex/exception? result) - (l/error :hint "file schema validation error" :cause result)))) - - file (if (contains? (:features file) "fdata/objects-map") - (feat.fdata/enable-objects-map file) - file) - - file (if (contains? (:features file) "fdata/pointer-map") - (binding [pmap/*tracked* (pmap/create-tracked)] - (let [file (feat.fdata/enable-pointer-map file)] - (feat.fdata/persist-pointers! cfg (:id file)) - file)) - file)] - - (db/insert! conn :file - (-> file - (update :features db/encode-pgarray conn "text") - (update :data blob/encode)) - {::db/return-keys false})) + (bfc/persist-file! cfg file)) (doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)] (let [thumbnail (-> thumbnail - (update :file-id lookup-index) - (update :media-id lookup-index)) + (update :file-id bfc/lookup-index) + (update :media-id bfc/lookup-index)) file-id (:file-id thumbnail) thumbnail (update thumbnail :object-id @@ -574,20 +303,21 @@ (doseq [rel (read-obj cfg :file-rels file-id)] (let [rel (-> rel - (update :file-id lookup-index) - (update :library-file-id lookup-index) + (update :file-id bfc/lookup-index) + (update :library-file-id bfc/lookup-index) (assoc :synced-at timestamp))] (db/insert! conn :file-library-rel rel ::db/return-keys false))) (doseq [media (read-seq cfg :file-media-object file-id)] (let [media (-> media - (update :id lookup-index) - (update :file-id lookup-index) - (update :media-id lookup-index) - (update :thumbnail-id lookup-index))] + (update :id bfc/lookup-index) + (update :file-id bfc/lookup-index) + (update :media-id bfc/lookup-index) + (update :thumbnail-id bfc/lookup-index))] (db/insert! conn :file-media-object media - ::db/return-keys false)))) + ::db/return-keys false + ::sql/on-conflict-do-nothing true)))) (def ^:private empty-summary {:teams #{} @@ -617,20 +347,20 @@ (try (db/tx-run! cfg (fn [cfg] (setup-schema! cfg) - (binding [*state* (volatile! empty-summary)] + (binding [bfc/*state* (volatile! empty-summary)] (write-team! cfg team-id) (run! (partial write-project! cfg) - (get-team-projects cfg team-id)) + (bfc/get-team-projects cfg team-id)) (run! (partial write-file! cfg) - (get-team-files cfg team-id)) + (bfc/get-team-files cfg team-id)) (run! (partial write-storage-object! cfg) - (-> *state* deref :storage-objects)) + (-> bfc/*state* deref :storage-objects)) (write! cfg :manifest "team-id" team-id) - (write! cfg :manifest "objects" (deref *state*)) + (write! cfg :manifest "objects" (deref bfc/*state*)) (::path cfg)))) (finally @@ -642,19 +372,6 @@ :id (str id) :elapsed (dt/format-duration elapsed))))))) -;; NOTE: will be used in future, commented for satisfy linter -;; (defn- run-pending-migrations! -;; [cfg] -;; ;; Run all pending migrations -;; (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)] -;; (case feature -;; "components/v2" -;; (feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true)) -;; (ex/raise :type :internal -;; :code :no-migration-defined -;; :hint (str/ffmt "no migation for feature '%' on file importation" feature) -;; :feature feature)))) - (defn import-team! [cfg path] (let [id (uuid/next) @@ -662,7 +379,7 @@ cfg (-> (create-database cfg path) (update ::sto/storage media/configure-assets-storage) - (assoc ::timestamp (dt/now)))] + (assoc ::bfc/timestamp (dt/now)))] (l/inf :hint "start" :operation "import" @@ -674,7 +391,7 @@ (db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"]) (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) - (binding [*state* (volatile! {:index {}})] + (binding [bfc/*state* (volatile! {:index {}})] (let [objects (read-obj cfg :manifest "objects")] ;; We first process all storage objects, they have @@ -683,19 +400,19 @@ (run! (partial read-storage-object! cfg) (:storage-objects objects)) ;; Populate index with all the incoming objects - (vswap! *state* update :index + (vswap! bfc/*state* update :index (fn [index] (-> index - (update-index (:teams objects)) - (update-index (:projects objects)) - (update-index (:files objects)) - (update-index (:file-media-objects objects)) - (update-index (:team-font-variants objects))))) + (bfc/update-index (:teams objects)) + (bfc/update-index (:projects objects)) + (bfc/update-index (:files objects)) + (bfc/update-index (:file-media-objects objects)) + (bfc/update-index (:team-font-variants objects))))) (let [team-id (read-obj cfg :manifest "team-id") team (read-team! cfg team-id) features (cfeat/get-team-enabled-features cf/flags team) - cfg (assoc cfg ::features features)] + cfg (assoc cfg ::bfc/features features)] (run! (partial read-project! cfg) (:projects objects)) (run! (partial read-file! cfg) (:files objects)) diff --git a/backend/src/app/db/sql.clj b/backend/src/app/db/sql.clj index 37814733d..81ce636c5 100644 --- a/backend/src/app/db/sql.clj +++ b/backend/src/app/db/sql.clj @@ -30,6 +30,9 @@ (let [opts (merge default-opts opts) opts (cond-> opts (::db/on-conflict-do-nothing? opts) + (assoc :suffix "ON CONFLICT DO NOTHING") + + (::on-conflict-do-nothing opts) (assoc :suffix "ON CONFLICT DO NOTHING"))] (sql/for-insert table key-map opts)))) @@ -46,7 +49,7 @@ opts (cond-> opts (::columns opts) (assoc :columns (::columns opts)) (::for-update opts) (assoc :suffix "FOR UPDATE") - (::for-share opts) (assoc :suffix "FOR KEY SHARE"))] + (::for-share opts) (assoc :suffix "FOR SHARE"))] (sql/for-query table where-params opts)))) (defn update diff --git a/backend/src/app/http/debug.clj b/backend/src/app/http/debug.clj index 163028904..3f5d6a7b7 100644 --- a/backend/src/app/http/debug.clj +++ b/backend/src/app/http/debug.clj @@ -7,6 +7,7 @@ (ns app.http.debug (:refer-clojure :exclude [error-handler]) (:require + [app.binfile.v1 :as bf.v1] [app.common.data :as d] [app.common.exceptions :as ex] [app.common.logging :as l] @@ -17,11 +18,11 @@ [app.http.session :as session] [app.main :as-alias main] [app.rpc.commands.auth :as auth] - [app.rpc.commands.binfile :as binf] [app.rpc.commands.files-create :refer [create-file]] [app.rpc.commands.profile :as profile] [app.srepl.helpers :as srepl] [app.storage :as-alias sto] + [app.storage.tmp :as tmp] [app.util.blob :as blob] [app.util.template :as tmpl] [app.util.time :as dt] @@ -268,9 +269,10 @@ (defn export-handler [{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}] - (let [file-ids (->> (:file-ids params) - (remove empty?) - (mapv parse-uuid)) + (let [file-ids (into #{} + (comp (remove empty?) + (map parse-uuid)) + (:file-ids params)) libs? (contains? params :includelibs) clone? (contains? params :clone) embed? (contains? params :embedassets)] @@ -279,22 +281,22 @@ (ex/raise :type :validation :code :missing-arguments)) - (let [path (-> cfg - (assoc ::binf/file-ids file-ids) - (assoc ::binf/embed-assets? embed?) - (assoc ::binf/include-libraries? libs?) - (binf/export-to-tmpfile!))] + (let [path (tmp/tempfile :prefix "penpot.export.")] + (with-open [output (io/output-stream path)] + (-> cfg + (assoc ::bf.v1/ids file-ids) + (assoc ::bf.v1/embed-assets embed?) + (assoc ::bf.v1/include-libraries libs?) + (bf.v1/export-files! output))) + (if clone? (let [profile (profile/get-profile pool profile-id) - project-id (:default-project-id profile)] - (binf/import! - (assoc cfg - ::binf/input path - ::binf/overwrite? false - ::binf/ignore-index-errors? true - ::binf/profile-id profile-id - ::binf/project-id project-id)) - + project-id (:default-project-id profile) + cfg (assoc cfg + ::bf.v1/overwrite false + ::bf.v1/profile-id profile-id + ::bf.v1/project-id project-id)] + (bf.v1/import-files! cfg path) {::rres/status 200 ::rres/headers {"content-type" "text/plain"} ::rres/body "OK CLONED"}) @@ -305,7 +307,6 @@ "content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}})))) - (defn import-handler [{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}] (when-not (contains? params :file) @@ -316,26 +317,23 @@ (let [profile (profile/get-profile pool profile-id) project-id (:default-project-id profile) overwrite? (contains? params :overwrite) - migrate? (contains? params :migrate) - ignore-index-errors? (contains? params :ignore-index-errors)] + migrate? (contains? params :migrate)] (when-not project-id (ex/raise :type :validation :code :missing-project :hint "project not found")) - (binf/import! - (assoc cfg - ::binf/input (-> params :file :path) - ::binf/overwrite? overwrite? - ::binf/migrate? migrate? - ::binf/ignore-index-errors? ignore-index-errors? - ::binf/profile-id profile-id - ::binf/project-id project-id)) - - {::rres/status 200 - ::rres/headers {"content-type" "text/plain"} - ::rres/body "OK"})) + (let [path (-> params :file :path) + cfg (assoc cfg + ::bf.v1/overwrite overwrite? + ::bf.v1/migrate migrate? + ::bf.v1/profile-id profile-id + ::bf.v1/project-id project-id)] + (bf.v1/import-files! cfg path) + {::rres/status 200 + ::rres/headers {"content-type" "text/plain"} + ::rres/body "OK"}))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ACTIONS diff --git a/backend/src/app/rpc/commands/binfile.clj b/backend/src/app/rpc/commands/binfile.clj index 206b8931a..2621cce6a 100644 --- a/backend/src/app/rpc/commands/binfile.clj +++ b/backend/src/app/rpc/commands/binfile.clj @@ -7,22 +7,9 @@ (ns app.rpc.commands.binfile (:refer-clojure :exclude [assert]) (:require - [app.common.data :as d] - [app.common.exceptions :as ex] - [app.common.features :as cfeat] - [app.common.files.defaults :as cfd] - [app.common.files.migrations :as fmg] - [app.common.files.validate :as fval] - [app.common.fressian :as fres] - [app.common.logging :as l] + [app.binfile.v1 :as bf.v1] [app.common.schema :as sm] - [app.common.spec :as us] - [app.common.types.file :as ctf] - [app.common.uuid :as uuid] - [app.config :as cf] [app.db :as db] - [app.features.components-v2 :as feat.compv2] - [app.features.fdata :as feat.fdata] [app.http.sse :as sse] [app.loggers.audit :as-alias audit] [app.loggers.webhooks :as-alias webhooks] @@ -30,1065 +17,33 @@ [app.rpc :as-alias rpc] [app.rpc.commands.files :as files] [app.rpc.commands.projects :as projects] - [app.rpc.commands.teams :as teams] [app.rpc.doc :as-alias doc] - [app.storage :as sto] - [app.storage.tmp :as tmp] [app.tasks.file-gc] - [app.util.blob :as blob] - [app.util.pointer-map :as pmap] [app.util.services :as sv] [app.util.time :as dt] [app.worker :as-alias wrk] - [clojure.set :as set] - [clojure.spec.alpha :as s] - [clojure.walk :as walk] - [cuerdas.core :as str] - [datoteka.io :as io] - [promesa.core :as p] - [promesa.util :as pu] - [ring.response :as rres] - [yetti.adapter :as yt]) - (:import - com.github.luben.zstd.ZstdInputStream - com.github.luben.zstd.ZstdOutputStream - java.io.DataInputStream - java.io.DataOutputStream - java.io.InputStream - java.io.OutputStream)) + [promesa.exec :as px] + [ring.response :as rres])) (set! *warn-on-reflection* true) -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; DEFAULTS -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -;; Threshold in MiB when we pass from using -;; in-memory byte-array's to use temporal files. -(def temp-file-threshold - (* 1024 1024 2)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; LOW LEVEL STREAM IO API -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -(def ^:const buffer-size (:xnio/buffer-size yt/defaults)) -(def ^:const penpot-magic-number 800099563638710213) -(def ^:const max-object-size (* 1024 1024 100)) ; Only allow 100MiB max file size. - -(def ^:dynamic *position* nil) - -(defn get-mark - [id] - (case id - :header 1 - :stream 2 - :uuid 3 - :label 4 - :obj 5 - (ex/raise :type :validation - :code :invalid-mark-id - :hint (format "invalid mark id %s" id)))) - -(defmacro assert - [expr hint] - `(when-not ~expr - (ex/raise :type :validation - :code :unexpected-condition - :hint ~hint))) - -(defmacro assert-mark - [v type] - `(let [expected# (get-mark ~type) - val# (long ~v)] - (when (not= val# expected#) - (ex/raise :type :validation - :code :unexpected-mark - :hint (format "received mark %s, expected %s" val# expected#))))) - -(defmacro assert-label - [expr label] - `(let [v# ~expr] - (when (not= v# ~label) - (ex/raise :type :assertion - :code :unexpected-label - :hint (format "received label %s, expected %s" v# ~label))))) - -;; --- PRIMITIVE IO - -(defn write-byte! - [^DataOutputStream output data] - (l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true) - (.writeByte output (byte data)) - (swap! *position* inc)) - -(defn read-byte! - [^DataInputStream input] - (let [v (.readByte input)] - (l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true) - (swap! *position* inc) - v)) - -(defn write-long! - [^DataOutputStream output data] - (l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true) - (.writeLong output (long data)) - (swap! *position* + 8)) - - -(defn read-long! - [^DataInputStream input] - (let [v (.readLong input)] - (l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true) - (swap! *position* + 8) - v)) - -(defn write-bytes! - [^DataOutputStream output ^bytes data] - (let [size (alength data)] - (l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true) - (.write output data 0 size) - (swap! *position* + size))) - -(defn read-bytes! - [^InputStream input ^bytes buff] - (let [size (alength buff) - readed (.readNBytes input buff 0 size)] - (l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true) - (swap! *position* + readed) - readed)) - -;; --- COMPOSITE IO - -(defn write-uuid! - [^DataOutputStream output id] - (l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true) - - (doto output - (write-byte! (get-mark :uuid)) - (write-long! (uuid/get-word-high id)) - (write-long! (uuid/get-word-low id)))) - -(defn read-uuid! - [^DataInputStream input] - (l/trace :fn "read-uuid!" :position @*position* ::l/sync? true) - (let [m (read-byte! input)] - (assert-mark m :uuid) - (let [a (read-long! input) - b (read-long! input)] - (uuid/custom a b)))) - -(defn write-obj! - [^DataOutputStream output data] - (l/trace :fn "write-obj!" :position @*position* ::l/sync? true) - (let [^bytes data (fres/encode data)] - (doto output - (write-byte! (get-mark :obj)) - (write-long! (alength data)) - (write-bytes! data)))) - -(defn read-obj! - [^DataInputStream input] - (l/trace :fn "read-obj!" :position @*position* ::l/sync? true) - (let [m (read-byte! input)] - (assert-mark m :obj) - (let [size (read-long! input)] - (assert (pos? size) "incorrect header size found on reading header") - (let [buff (byte-array size)] - (read-bytes! input buff) - (fres/decode buff))))) - -(defn write-label! - [^DataOutputStream output label] - (l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true) - (doto output - (write-byte! (get-mark :label)) - (write-obj! label))) - -(defn read-label! - [^DataInputStream input] - (l/trace :fn "read-label!" :position @*position* ::l/sync? true) - (let [m (read-byte! input)] - (assert-mark m :label) - (read-obj! input))) - -(defn write-header! - [^OutputStream output version] - (l/trace :fn "write-header!" - :version version - :position @*position* - ::l/sync? true) - (let [vers (-> version name (subs 1) parse-long) - output (io/data-output-stream output)] - (doto output - (write-byte! (get-mark :header)) - (write-long! penpot-magic-number) - (write-long! vers)))) - -(defn read-header! - [^InputStream input] - (l/trace :fn "read-header!" :position @*position* ::l/sync? true) - (let [input (io/data-input-stream input) - mark (read-byte! input) - mnum (read-long! input) - vers (read-long! input)] - - (when (or (not= mark (get-mark :header)) - (not= mnum penpot-magic-number)) - (ex/raise :type :validation - :code :invalid-penpot-file - :hint "invalid penpot file")) - - (keyword (str "v" vers)))) - -(defn copy-stream! - [^OutputStream output ^InputStream input ^long size] - (let [written (io/copy! input output :size size)] - (l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true) - (swap! *position* + written) - written)) - -(defn write-stream! - [^DataOutputStream output stream size] - (l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size) - (doto output - (write-byte! (get-mark :stream)) - (write-long! size)) - - (copy-stream! output stream size)) - -(defn read-stream! - [^DataInputStream input] - (l/trace :fn "read-stream!" :position @*position* ::l/sync? true) - (let [m (read-byte! input) - s (read-long! input) - p (tmp/tempfile :prefix "penpot.binfile.")] - (assert-mark m :stream) - - (when (> s max-object-size) - (ex/raise :type :validation - :code :max-file-size-reached - :hint (str/ffmt "unable to import storage object with size % bytes" s))) - - (if (> s temp-file-threshold) - (with-open [^OutputStream output (io/output-stream p)] - (let [readed (io/copy! input output :offset 0 :size s)] - (l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true) - (swap! *position* + readed) - [s p])) - [s (io/read-as-bytes input :size s)]))) - -(defmacro assert-read-label! - [input expected-label] - `(let [readed# (read-label! ~input) - expected# ~expected-label] - (when (not= readed# expected#) - (ex/raise :type :validation - :code :unexpected-label - :hint (format "unexpected label found: %s, expected: %s" readed# expected#))))) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; API -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -;; --- HELPERS - -(defn zstd-input-stream - ^InputStream - [input] - (ZstdInputStream. ^InputStream input)) - -(defn zstd-output-stream - ^OutputStream - [output & {:keys [level] :or {level 0}}] - (ZstdOutputStream. ^OutputStream output (int level))) - -(defn- get-files - [cfg ids] - (db/run! cfg (fn [{:keys [::db/conn]}] - (let [sql (str "SELECT id FROM file " - " WHERE id = ANY(?) ") - ids (db/create-array conn "uuid" ids)] - (->> (db/exec! conn [sql ids]) - (into [] (map :id)) - (not-empty)))))) - -(defn- get-file - [cfg file-id] - (db/run! cfg (fn [{:keys [::db/conn] :as cfg}] - (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] - (some-> (db/get* conn :file {:id file-id} {::db/remove-deleted false}) - (files/decode-row) - (update :data feat.fdata/process-pointers deref)))))) - -(defn- get-file-media - [{:keys [::db/pool]} {:keys [data id] :as file}] - (pu/with-open [conn (db/open pool)] - (let [ids (app.tasks.file-gc/collect-used-media data) - ids (db/create-array conn "uuid" ids) - sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")] - - ;; We assoc the file-id again to the file-media-object row - ;; because there are cases that used objects refer to other - ;; files and we need to ensure in the exportation process that - ;; all ids matches - (->> (db/exec! conn [sql ids]) - (mapv #(assoc % :file-id id)))))) - -(defn- get-file-thumbnails - "Return all file thumbnails for a given file." - [{:keys [::db/pool]} id] - (pu/with-open [conn (db/open pool)] - (let [sql "SELECT * FROM file_tagged_object_thumbnail WHERE file_id = ?"] - (->> (db/exec! conn [sql id]) - (mapv #(dissoc % :file-id)))))) - -(def ^:private storage-object-id-xf - (comp - (mapcat (juxt :media-id :thumbnail-id)) - (filter uuid?))) - -(def ^:private sql:file-libraries - "WITH RECURSIVE libs AS ( - SELECT fl.id - FROM file AS fl - JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) - WHERE flr.file_id = ANY(?) - UNION - SELECT fl.id - FROM file AS fl - JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id) - JOIN libs AS l ON (flr.file_id = l.id) - ) - SELECT DISTINCT l.id - FROM libs AS l") - -(defn- get-libraries - [{:keys [::db/pool]} ids] - (pu/with-open [conn (db/open pool)] - (let [ids (db/create-array conn "uuid" ids)] - (map :id (db/exec! pool [sql:file-libraries ids]))))) - -(defn- get-library-relations - [cfg ids] - (db/run! cfg (fn [{:keys [::db/conn]}] - (let [ids (db/create-array conn "uuid" ids) - sql (str "SELECT flr.* FROM file_library_rel AS flr " - " WHERE flr.file_id = ANY(?)")] - (db/exec! conn [sql ids]))))) - -(defn- create-or-update-file! - [conn params] - (let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) " - "VALUES (?, ?, ?, ?, ?, ?, ?, ?) " - "ON CONFLICT (id) DO UPDATE SET data=?")] - (db/exec-one! conn [sql - (:id params) - (:project-id params) - (:name params) - (:revn params) - (:is-shared params) - (:data params) - (:created-at params) - (:modified-at params) - (:data params)]))) - -;; --- GENERAL PURPOSE DYNAMIC VARS - -(def ^:dynamic *state* nil) -(def ^:dynamic *options* nil) - -;; --- EXPORT WRITER - -(defn- embed-file-assets - [data cfg file-id] - (letfn [(walk-map-form [form state] - (cond - (uuid? (:fill-color-ref-file form)) - (do - (vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)]) - (assoc form :fill-color-ref-file file-id)) - - (uuid? (:stroke-color-ref-file form)) - (do - (vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)]) - (assoc form :stroke-color-ref-file file-id)) - - (uuid? (:typography-ref-file form)) - (do - (vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)]) - (assoc form :typography-ref-file file-id)) - - (uuid? (:component-file form)) - (do - (vswap! state conj [(:component-file form) :components (:component-id form)]) - (assoc form :component-file file-id)) - - :else - form)) - - (process-group-of-assets [data [lib-id items]] - ;; NOTE: there is a possibility that shape refers to an - ;; non-existant file because the file was removed. In this - ;; case we just ignore the asset. - (if-let [lib (get-file cfg lib-id)] - (reduce (partial process-asset lib) data items) - data)) - - (process-asset [lib data [bucket asset-id]] - (let [asset (get-in lib [:data bucket asset-id]) - ;; Add a special case for colors that need to have - ;; correctly set the :file-id prop (pending of the - ;; refactor that will remove it). - asset (cond-> asset - (= bucket :colors) (assoc :file-id file-id))] - (update data bucket assoc asset-id asset)))] - - (let [assets (volatile! [])] - (walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data) - (->> (deref assets) - (filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id)))) - (d/group-by first rest) - (reduce (partial process-group-of-assets) data))))) - -(defmulti write-export ::version) -(defmulti write-section ::section) - -(s/def ::output io/output-stream?) -(s/def ::file-ids (s/every ::us/uuid :kind vector? :min-count 1)) -(s/def ::include-libraries? (s/nilable ::us/boolean)) -(s/def ::embed-assets? (s/nilable ::us/boolean)) - -(s/def ::write-export-options - (s/keys :req [::db/pool ::sto/storage ::output ::file-ids] - :opt [::include-libraries? ::embed-assets?])) - -(defn write-export! - "Do the exportation of a specified file in custom penpot binary - format. There are some options available for customize the output: - - `::include-libraries?`: additionally to the specified file, all the - linked libraries also will be included (including transitive - dependencies). - - `::embed-assets?`: instead of including the libraries, embed in the - same file library all assets used from external libraries." - [{:keys [::include-libraries? ::embed-assets?] :as options}] - - (us/assert! ::write-export-options options) - (us/verify! - :expr (not (and include-libraries? embed-assets?)) - :hint "the `include-libraries?` and `embed-assets?` are mutally excluding options") - (write-export options)) - -(defmethod write-export :default - [{:keys [::output] :as options}] - (write-header! output :v1) - (pu/with-open [output (zstd-output-stream output :level 12) - output (io/data-output-stream output)] - (binding [*state* (volatile! {})] - (run! (fn [section] - (l/dbg :hint "write section" :section section ::l/sync? true) - (write-label! output section) - (let [options (-> options - (assoc ::output output) - (assoc ::section section))] - (binding [*options* options] - (write-section options)))) - - [:v1/metadata :v1/files :v1/rels :v1/sobjects])))) - -(defmethod write-section :v1/metadata - [{:keys [::output ::file-ids ::include-libraries?] :as cfg}] - (if-let [fids (get-files cfg file-ids)] - (let [lids (when include-libraries? - (get-libraries cfg file-ids)) - ids (into fids lids)] - (write-obj! output {:version cf/version :files ids}) - (vswap! *state* assoc :files ids)) - (ex/raise :type :not-found - :code :files-not-found - :hint "unable to retrieve files for export"))) - -(defmethod write-section :v1/files - [{:keys [::output ::embed-assets? ::include-libraries?] :as cfg}] - - ;; Initialize SIDS with empty vector - (vswap! *state* assoc :sids []) - - (doseq [file-id (-> *state* deref :files)] - (let [detach? (and (not embed-assets?) (not include-libraries?)) - thumbnails (get-file-thumbnails cfg file-id) - file (cond-> (get-file cfg file-id) - detach? - (-> (ctf/detach-external-references file-id) - (dissoc :libraries)) - - embed-assets? - (update :data embed-file-assets cfg file-id) - - :always - (assoc :thumbnails thumbnails)) - - media (get-file-media cfg file)] - - (l/dbg :hint "write penpot file" - :id (str file-id) - :name (:name file) - :thumbnails (count thumbnails) - :features (:features file) - :media (count media) - ::l/sync? true) - - (doseq [item media] - (l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true)) - - (doseq [item thumbnails] - (l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true)) - - (doto output - (write-obj! file) - (write-obj! media)) - - (vswap! *state* update :sids into storage-object-id-xf media) - (vswap! *state* update :sids into storage-object-id-xf thumbnails)))) - -(defmethod write-section :v1/rels - [{:keys [::output ::include-libraries?] :as cfg}] - (let [ids (-> *state* deref :files) - rels (when include-libraries? - (get-library-relations cfg ids))] - (l/dbg :hint "found rels" :total (count rels) ::l/sync? true) - (write-obj! output rels))) - -(defmethod write-section :v1/sobjects - [{:keys [::sto/storage ::output]}] - (let [sids (-> *state* deref :sids) - storage (media/configure-assets-storage storage)] - - (l/dbg :hint "found sobjects" - :items (count sids) - ::l/sync? true) - - ;; Write all collected storage objects - (write-obj! output sids) - - (doseq [id sids] - (let [{:keys [size] :as obj} (sto/get-object storage id)] - (l/dbg :hint "write sobject" :id (str id) ::l/sync? true) - - (doto output - (write-uuid! id) - (write-obj! (meta obj))) - - (pu/with-open [stream (sto/get-object-data storage obj)] - (let [written (write-stream! output stream size)] - (when (not= written size) - (ex/raise :type :validation - :code :mismatch-readed-size - :hint (str/ffmt "found unexpected object size; size=% written=%" size written))))))))) - -;; --- EXPORT READER - -(declare lookup-index) -(declare update-index) -(declare relink-media) -(declare relink-colors) -(declare relink-shapes) - -(defmulti read-import ::version) -(defmulti read-section ::section) - -(s/def ::profile-id ::us/uuid) -(s/def ::project-id ::us/uuid) -(s/def ::input io/input-stream?) -(s/def ::overwrite? (s/nilable ::us/boolean)) -(s/def ::ignore-index-errors? (s/nilable ::us/boolean)) - -;; FIXME: replace with schema -(s/def ::read-import-options - (s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input] - :opt [::overwrite? ::ignore-index-errors?])) - -(defn read-import! - "Do the importation of the specified resource in penpot custom binary - format. There are some options for customize the importation - behavior: - - `::overwrite?`: if true, instead of creating new files and remapping id references, - it reuses all ids and updates existing objects; defaults to `false`. - - `::ignore-index-errors?`: if true, do not fail on index lookup errors, can - happen with broken files; defaults to: `false`. - " - - [{:keys [::input ::timestamp] :or {timestamp (dt/now)} :as options}] - (us/verify! ::read-import-options options) - (let [version (read-header! input)] - (read-import (assoc options ::version version ::timestamp timestamp)))) - -(defn- read-import-v1 - [{:keys [::db/conn ::project-id ::profile-id ::input] :as options}] - (db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"]) - (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) - - (pu/with-open [input (zstd-input-stream input) - input (io/data-input-stream input)] - (binding [*state* (volatile! {:media [] :index {}})] - (let [team (teams/get-team conn - :profile-id profile-id - :project-id project-id) - - validate? (contains? cf/flags :file-validation) - features (cfeat/get-team-enabled-features cf/flags team)] - - (sse/tap {:type :import-progress - :section :read-import}) - - ;; Process all sections - (run! (fn [section] - (l/dbg :hint "reading section" :section section ::l/sync? true) - (assert-read-label! input section) - (let [options (-> options - (assoc ::enabled-features features) - (assoc ::section section) - (assoc ::input input))] - (binding [*options* options] - (sse/tap {:type :import-progress - :section section}) - (read-section options)))) - [:v1/metadata :v1/files :v1/rels :v1/sobjects]) - - ;; Run all pending migrations - (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)] - (case feature - "components/v2" - (feat.compv2/migrate-file! options file-id - :validate? validate?) - - "fdata/shape-data-type" - nil - - (ex/raise :type :internal - :code :no-migration-defined - :hint (str/ffmt "no migation for feature '%' on file importation" feature) - :feature feature))) - - ;; Knowing that the ids of the created files are in index, - ;; just lookup them and return it as a set - (let [files (-> *state* deref :files)] - (into #{} (keep #(get-in @*state* [:index %])) files)))))) - -(defmethod read-import :v1 - [options] - (db/tx-run! options read-import-v1)) - -(defmethod read-section :v1/metadata - [{:keys [::input]}] - (let [{:keys [version files]} (read-obj! input)] - (l/dbg :hint "metadata readed" - :version (:full version) - :files (mapv str files) - ::l/sync? true) - (vswap! *state* update :index update-index files) - (vswap! *state* assoc :version version :files files))) - -(defn- get-remaped-thumbnails - [thumbnails file-id] - (mapv (fn [thumbnail] - (-> thumbnail - (assoc :file-id file-id) - (update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/"))))) - thumbnails)) - -(defn- process-file - [{:keys [id] :as file}] - (-> file - (update :data (fn [fdata] - (-> fdata - (assoc :id id) - (dissoc :recent-colors) - (cond-> (> (:version fdata) cfd/version) - (assoc :version cfd/version)) - ;; FIXME: We're temporarily activating all - ;; migrations because a problem in the - ;; environments messed up with the version - ;; numbers When this problem is fixed delete - ;; the following line - (cond-> (> (:version fdata) 22) - (assoc :version 22))))) - (fmg/migrate-file) - (update :data (fn [fdata] - (-> fdata - (update :pages-index relink-shapes) - (update :components relink-shapes) - (update :media relink-media) - (update :colors relink-colors) - (d/without-nils)))))) - - -(defmethod read-section :v1/files - [{:keys [::db/conn ::input ::project-id ::enabled-features ::timestamp ::overwrite? ::name] :as system}] - - (doseq [[idx expected-file-id] (d/enumerate (-> *state* deref :files))] - (let [file (read-obj! input) - - media (read-obj! input) - - file-id (:id file) - file-id' (lookup-index file-id) - - thumbnails (:thumbnails file)] - - (when (not= file-id expected-file-id) - (ex/raise :type :validation - :code :inconsistent-penpot-file - :found-id file-id - :expected-id expected-file-id - :hint "the penpot file seems corrupt, found unexpected uuid (file-id)")) - - (l/dbg :hint "processing file" - :id (str file-id) - :features (:features file) - :version (-> file :data :version) - :media (count media) - :thumbnails (count thumbnails) - ::l/sync? true) - - (when (seq thumbnails) - (let [thumbnails (get-remaped-thumbnails thumbnails file-id')] - (l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true) - (vswap! *state* update :thumbnails (fnil into []) thumbnails))) - - (when (seq media) - ;; Update index with media - (l/dbg :hint "update index with media" :total (count media) ::l/sync? true) - (vswap! *state* update :index update-index (map :id media)) - - ;; Store file media for later insertion - (l/dbg :hint "update media references" ::l/sync? true) - (vswap! *state* update :media into (map #(update % :id lookup-index)) media)) - - (let [file (-> file - (assoc :id file-id') - (cond-> (and (= idx 0) (some? name)) - (assoc :name name)) - (process-file)) - - ;; All features that are enabled and requires explicit migration are - ;; added to the state for a posterior migration step. - _ (doseq [feature (-> enabled-features - (set/difference cfeat/no-migration-features) - (set/difference (:features file)))] - (vswap! *state* update :pending-to-migrate (fnil conj []) [feature file-id'])) - - file (-> file - (assoc :project-id project-id) - (assoc :created-at timestamp) - (assoc :modified-at timestamp) - (dissoc :thumbnails) - (update :features - (fn [features] - (let [features (cfeat/check-supported-features! features)] - (-> enabled-features - (set/difference cfeat/frontend-only-features) - (set/union features)))))) - - _ (when (contains? cf/flags :file-schema-validation) - (fval/validate-file-schema! file)) - - _ (when (contains? cf/flags :soft-file-schema-validation) - (let [result (ex/try! (fval/validate-file-schema! file))] - (when (ex/exception? result) - (l/error :hint "file schema validation error" :cause result)))) - - file (if (contains? (:features file) "fdata/objects-map") - (feat.fdata/enable-objects-map file) - file) - - file (if (contains? (:features file) "fdata/pointer-map") - (binding [pmap/*tracked* (pmap/create-tracked)] - (let [file (feat.fdata/enable-pointer-map file)] - (feat.fdata/persist-pointers! system file-id') - file)) - file) - - file (-> file - (update :features #(db/create-array conn "text" %)) - (update :data blob/encode))] - - (l/dbg :hint "create file" :id (str file-id') ::l/sync? true) - - (if overwrite? - (create-or-update-file! conn file) - (db/insert! conn :file file)) - - (when overwrite? - (db/delete! conn :file-thumbnail {:file-id file-id'})) - - file-id')))) - -(defmethod read-section :v1/rels - [{:keys [::db/conn ::input ::timestamp]}] - (let [rels (read-obj! input) - ids (into #{} (-> *state* deref :files))] - ;; Insert all file relations - (doseq [{:keys [library-file-id] :as rel} rels] - (let [rel (-> rel - (assoc :synced-at timestamp) - (update :file-id lookup-index) - (update :library-file-id lookup-index))] - - (if (contains? ids library-file-id) - (do - (l/dbg :hint "create file library link" - :file-id (:file-id rel) - :lib-id (:library-file-id rel) - ::l/sync? true) - (db/insert! conn :file-library-rel rel)) - - (l/warn :hint "ignoring file library link" - :file-id (:file-id rel) - :lib-id (:library-file-id rel) - ::l/sync? true)))))) - -(defmethod read-section :v1/sobjects - [{:keys [::sto/storage ::db/conn ::input ::overwrite? ::timestamp]}] - (let [storage (media/configure-assets-storage storage) - ids (read-obj! input) - thumb? (into #{} (map :media-id) (:thumbnails @*state*))] - - (doseq [expected-storage-id ids] - (let [id (read-uuid! input) - mdata (read-obj! input)] - - (when (not= id expected-storage-id) - (ex/raise :type :validation - :code :inconsistent-penpot-file - :hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)")) - - (l/dbg :hint "readed storage object" :id (str id) ::l/sync? true) - - (let [[size resource] (read-stream! input) - hash (sto/calculate-hash resource) - content (-> (sto/content resource size) - (sto/wrap-with-hash hash)) - - params (-> mdata - (assoc ::sto/content content) - (assoc ::sto/deduplicate? true) - (assoc ::sto/touched-at timestamp)) - - params (if (thumb? id) - (assoc params :bucket "file-object-thumbnail") - (assoc params :bucket "file-media-object")) - - sobject (sto/put-object! storage params)] - - (l/dbg :hint "persisted storage object" - :old-id (str id) - :new-id (str (:id sobject)) - :is-thumbnail (boolean (thumb? id)) - ::l/sync? true) - - (vswap! *state* update :index assoc id (:id sobject))))) - - (doseq [item (:media @*state*)] - (l/dbg :hint "inserting file media object" - :id (str (:id item)) - :file-id (str (:file-id item)) - ::l/sync? true) - - (let [file-id (lookup-index (:file-id item))] - (if (= file-id (:file-id item)) - (l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true) - (db/insert! conn :file-media-object - (-> item - (assoc :file-id file-id) - (d/update-when :media-id lookup-index) - (d/update-when :thumbnail-id lookup-index)) - {::db/on-conflict-do-nothing? overwrite?})))) - - (doseq [item (:thumbnails @*state*)] - (let [item (update item :media-id lookup-index)] - (l/dbg :hint "inserting file object thumbnail" - :file-id (str (:file-id item)) - :media-id (str (:media-id item)) - :object-id (:object-id item) - ::l/sync? true) - (db/insert! conn :file-tagged-object-thumbnail item - {::db/on-conflict-do-nothing? overwrite?}))))) - -(defn- lookup-index - [id] - (let [val (get-in @*state* [:index id])] - (l/trc :fn "lookup-index" :id id :val val ::l/sync? true) - (when (and (not (::ignore-index-errors? *options*)) (not val)) - (ex/raise :type :validation - :code :incomplete-index - :hint "looks like index has missing data")) - (or val id))) - -(defn- update-index - [index coll] - (loop [items (seq coll) - index index] - (if-let [id (first items)] - (let [new-id (if (::overwrite? *options*) id (uuid/next))] - (l/trc :fn "update-index" :id id :new-id new-id ::l/sync? true) - (recur (rest items) - (assoc index id new-id))) - index))) - -(defn- relink-shapes - "A function responsible to analyze all file data and - replace the old :component-file reference with the new - ones, using the provided file-index." - [data] - (letfn [(process-map-form [form] - (cond-> form - ;; Relink image shapes - (and (map? (:metadata form)) - (= :image (:type form))) - (update-in [:metadata :id] lookup-index) - - ;; Relink paths with fill image - (map? (:fill-image form)) - (update-in [:fill-image :id] lookup-index) - - ;; This covers old shapes and the new :fills. - (uuid? (:fill-color-ref-file form)) - (update :fill-color-ref-file lookup-index) - - ;; This covers the old shapes and the new :strokes - (uuid? (:storage-color-ref-file form)) - (update :stroke-color-ref-file lookup-index) - - ;; This covers all text shapes that have typography referenced - (uuid? (:typography-ref-file form)) - (update :typography-ref-file lookup-index) - - ;; This covers the component instance links - (uuid? (:component-file form)) - (update :component-file lookup-index) - - ;; This covers the shadows and grids (they have directly - ;; the :file-id prop) - (uuid? (:file-id form)) - (update :file-id lookup-index)))] - - (walk/postwalk (fn [form] - (if (map? form) - (try - (process-map-form form) - (catch Throwable cause - (l/warn :hint "failed form" :form (pr-str form) ::l/sync? true) - (throw cause))) - form)) - data))) - -(defn- relink-media - "A function responsible of process the :media attr of file data and - remap the old ids with the new ones." - [media] - (reduce-kv (fn [res k v] - (let [id (lookup-index k)] - (if (uuid? id) - (-> res - (assoc id (assoc v :id id)) - (dissoc k)) - res))) - media - media)) - -(defn- relink-colors - "A function responsible of process the :colors attr of file data and - remap the old ids with the new ones." - [colors] - (reduce-kv (fn [res k v] - (if (:image v) - (update-in res [k :image :id] lookup-index) - res)) - colors - colors)) - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; HIGH LEVEL API -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - -(defn export! - [cfg output] - (let [id (uuid/next) - tp (dt/tpoint) - ab (volatile! false) - cs (volatile! nil)] - (try - (l/info :hint "start exportation" :export-id (str id)) - (pu/with-open [output (io/output-stream output)] - (binding [*position* (atom 0)] - (write-export! (assoc cfg ::output output)))) - - (catch java.io.IOException _cause - ;; Do nothing, EOF means client closes connection abruptly - (vreset! ab true) - nil) - - (catch Throwable cause - (vreset! cs cause) - (vreset! ab true) - (throw cause)) - - (finally - (l/info :hint "exportation finished" :export-id (str id) - :elapsed (str (inst-ms (tp)) "ms") - :aborted @ab - :cause @cs))))) - -(defn export-to-tmpfile! - [cfg] - (let [path (tmp/tempfile :prefix "penpot.export.")] - (pu/with-open [output (io/output-stream path)] - (export! cfg output) - path))) - -(defn import! - [{:keys [::input] :as cfg}] - (let [id (uuid/next) - tp (dt/tpoint) - cs (volatile! nil)] - (l/info :hint "import: started" :id (str id)) - (try - (binding [*position* (atom 0)] - (pu/with-open [input (io/input-stream input)] - (read-import! (assoc cfg ::input input)))) - - (catch Throwable cause - (vreset! cs cause) - (throw cause)) - - (finally - (l/info :hint "import: terminated" - :id (str id) - :elapsed (dt/format-duration (tp)) - :error? (some? @cs)))))) - - ;; --- Command: export-binfile (def ^:private schema:export-binfile (sm/define [:map {:title "export-binfile"} + [:name :string] [:file-id ::sm/uuid] - [:include-libraries? :boolean] - [:embed-assets? :boolean]])) + [:include-libraries :boolean] + [:embed-assets :boolean]])) (sv/defmethod ::export-binfile "Export a penpot file in a binary format." {::doc/added "1.15" ::webhooks/event? true ::sm/result schema:export-binfile} - [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries? embed-assets?] :as params}] + [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries embed-assets] :as params}] (files/check-read-permissions! pool profile-id file-id) (fn [_] {::rres/status 200 @@ -1096,14 +51,27 @@ ::rres/body (reify rres/StreamableResponseBody (-write-body-to-stream [_ _ output-stream] (-> cfg - (assoc ::file-ids [file-id]) - (assoc ::embed-assets? embed-assets?) - (assoc ::include-libraries? include-libraries?) - (export! output-stream))))})) - + (assoc ::bf.v1/ids #{file-id}) + (assoc ::bf.v1/embed-assets embed-assets) + (assoc ::bf.v1/include-libraries include-libraries) + (bf.v1/export-files! output-stream))))})) ;; --- Command: import-binfile +(defn- import-binfile + [{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} input] + (db/tx-run! cfg + (fn [{:keys [::db/conn] :as cfg}] + ;; NOTE: the importation process performs some operations that + ;; are not very friendly with virtual threads, and for avoid + ;; unexpected blocking of other concurrent operations we + ;; dispatch that operation to a dedicated executor. + (let [result (px/submit! executor (partial bf.v1/import-files! cfg input))] + (db/update! conn :project + {:modified-at (dt/now)} + {:id project-id}) + (deref result))))) + (def ^:private schema:import-binfile (sm/define @@ -1112,8 +80,6 @@ [:project-id ::sm/uuid] [:file ::media/upload]])) -(declare ^:private import-binfile) - (sv/defmethod ::import-binfile "Import a penpot file in a binary format." {::doc/added "1.15" @@ -1122,26 +88,10 @@ ::sm/params schema:import-binfile} [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id name project-id file] :as params}] (projects/check-read-permissions! pool profile-id project-id) - (let [params (-> cfg - (assoc ::input (:path file)) - (assoc ::project-id project-id) - (assoc ::profile-id profile-id) - (assoc ::name name) - (assoc ::ignore-index-errors? true))] + (let [cfg (-> cfg + (assoc ::bf.v1/project-id project-id) + (assoc ::bf.v1/profile-id profile-id) + (assoc ::bf.v1/name name))] (with-meta - (sse/response #(import-binfile params)) + (sse/response #(import-binfile cfg (:path file))) {::audit/props {:file nil}}))) - -(defn- import-binfile - [{:keys [::wrk/executor ::project-id] :as params}] - (db/tx-run! params - (fn [{:keys [::db/conn] :as params}] - ;; NOTE: the importation process performs some operations that - ;; are not very friendly with virtual threads, and for avoid - ;; unexpected blocking of other concurrent operations we - ;; dispatch that operation to a dedicated executor. - (let [result (p/thread-call executor (partial import! params))] - (db/update! conn :project - {:modified-at (dt/now)} - {:id project-id}) - (deref result))))) diff --git a/backend/src/app/rpc/commands/management.clj b/backend/src/app/rpc/commands/management.clj index 416056a91..5d01d9ec6 100644 --- a/backend/src/app/rpc/commands/management.clj +++ b/backend/src/app/rpc/commands/management.clj @@ -7,51 +7,83 @@ (ns app.rpc.commands.management "A collection of RPC methods for manage the files, projects and team organization." (:require - [app.common.data :as d] + [app.binfile.common :as bfc] + [app.binfile.v1 :as bf.v1] [app.common.exceptions :as ex] [app.common.features :as cfeat] - [app.common.files.migrations :as pmg] [app.common.schema :as sm] [app.common.uuid :as uuid] [app.config :as cf] [app.db :as db] - [app.features.fdata :as feat.fdata] [app.http.sse :as sse] [app.loggers.webhooks :as-alias webhooks] [app.rpc :as-alias rpc] - [app.rpc.commands.binfile :as binfile] [app.rpc.commands.files :as files] [app.rpc.commands.projects :as proj] [app.rpc.commands.teams :as teams] [app.rpc.doc :as-alias doc] [app.setup :as-alias setup] [app.setup.templates :as tmpl] - [app.util.blob :as blob] - [app.util.pointer-map :as pmap] [app.util.services :as sv] [app.util.time :as dt] [app.worker :as-alias wrk] - [clojure.walk :as walk] - [promesa.core :as p] [promesa.exec :as px])) -(defn- index-row - [index obj] - (assoc index (:id obj) (uuid/next))) - -(defn- lookup-index - [id index] - (get index id id)) - -(defn- remap-id - [item index key] - (cond-> item - (contains? item key) - (update key lookup-index index))) - ;; --- COMMAND: Duplicate File -(declare duplicate-file) +(defn duplicate-file + [{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}] + (let [;; We don't touch the original file on duplication + file (bfc/get-file cfg file-id) + project-id (:project-id file) + file (-> file + (update :id bfc/lookup-index) + (update :project-id bfc/lookup-index) + (cond-> (string? name) + (assoc :name name)) + (cond-> (true? reset-shared-flag) + (assoc :is-shared false))) + + flibs (bfc/get-files-rels cfg #{file-id}) + fmeds (bfc/get-file-media cfg file)] + + (when (uuid? profile-id) + (proj/check-edition-permissions! conn profile-id project-id)) + + (vswap! bfc/*state* update :index bfc/update-index fmeds :id) + + ;; Process and persist file + (let [file (->> (bfc/process-file file) + (bfc/persist-file! cfg))] + + ;; The file profile creation is optional, so when no profile is + ;; present (when this function is called from profile less + ;; environment: SREPL) we just omit the creation of the relation + (when (uuid? profile-id) + (db/insert! conn :file-profile-rel + {:file-id (:id file) + :profile-id profile-id + :is-owner true + :is-admin true + :can-edit true} + {::db/return-keys? false})) + + (doseq [params (sequence (comp + (map #(bfc/remap-id % :file-id)) + (map #(bfc/remap-id % :library-file-id)) + (map #(assoc % :synced-at timestamp)) + (map #(assoc % :created-at timestamp))) + flibs)] + (db/insert! conn :file-library-rel params ::db/return-keys false)) + + (doseq [params (sequence (comp + (map #(bfc/remap-id % :id)) + (map #(assoc % :created-at timestamp)) + (map #(bfc/remap-id % :file-id))) + fmeds)] + (db/insert! conn :file-media-object params ::db/return-keys false)) + + file))) (def ^:private schema:duplicate-file @@ -69,178 +101,51 @@ (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}] (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) - (let [params (-> params - (assoc :index {file-id (uuid/next)}) - (assoc :profile-id profile-id) - (assoc ::reset-shared-flag? true))] - (duplicate-file cfg params))))) - -(defn- process-file - [cfg index {:keys [id] :as file}] - (letfn [(process-form [form] - (cond-> form - ;; Relink library items - (and (map? form) - (uuid? (:component-file form))) - (update :component-file #(get index % %)) - - (and (map? form) - (uuid? (:fill-color-ref-file form))) - (update :fill-color-ref-file #(get index % %)) - - (and (map? form) - (uuid? (:stroke-color-ref-file form))) - (update :stroke-color-ref-file #(get index % %)) - - (and (map? form) - (uuid? (:typography-ref-file form))) - (update :typography-ref-file #(get index % %)) - - ;; Relink Image Shapes - (and (map? form) - (map? (:metadata form)) - (= :image (:type form))) - (update-in [:metadata :id] #(get index % %)))) - - ;; A function responsible to analyze all file data and - ;; replace the old :component-file reference with the new - ;; ones, using the provided file-index - (relink-shapes [data] - (walk/postwalk process-form data)) - - ;; A function responsible of process the :media attr of file - ;; data and remap the old ids with the new ones. - (relink-media [media] - (reduce-kv (fn [res k v] - (let [id (get index k)] - (if (uuid? id) - (-> res - (assoc id (assoc v :id id)) - (dissoc k)) - res))) - media - media)) - - (process-file [{:keys [id] :as file}] - (-> file - (update :data assoc :id id) - (pmg/migrate-file) - (update :data (fn [data] - (-> data - (update :pages-index relink-shapes) - (update :components relink-shapes) - (update :media relink-media) - (d/without-nils))))))] - - (let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)] - (-> file - (update :id lookup-index index) - (update :project-id lookup-index index) - (update :data feat.fdata/process-pointers deref) - (process-file))) - - file (if (contains? (:features file) "fdata/objects-map") - (feat.fdata/enable-objects-map file) - file) - - file (if (contains? (:features file) "fdata/pointer-map") - (binding [pmap/*tracked* (pmap/create-tracked)] - (let [file (feat.fdata/enable-pointer-map file)] - (feat.fdata/persist-pointers! cfg (:id file)) - file)) - file)] - file))) - -(defn duplicate-file - [{:keys [::db/conn] :as cfg} {:keys [profile-id index file-id name ::reset-shared-flag?]}] - (let [;; We don't touch the original file on duplication - file (files/get-file cfg file-id :migrate? false) - - ;; We only check permissions if profile-id is present; it can - ;; be omited when this function is called from SREPL helpers - _ (when (uuid? profile-id) - (proj/check-edition-permissions! conn profile-id (:project-id file))) - - flibs (let [sql (str "SELECT flr.* " - " FROM file_library_rel AS flr " - " JOIN file AS l ON (flr.library_file_id = l.id) " - " WHERE flr.file_id = ? AND l.deleted_at is null")] - (db/exec! conn [sql file-id])) - - fmeds (let [sql (str "SELECT fmo.* " - " FROM file_media_object AS fmo " - " JOIN storage_object AS so ON (fmo.media_id = so.id) " - " WHERE fmo.file_id = ? AND so.deleted_at is null")] - (db/exec! conn [sql file-id])) - - ;; memo uniform creation/modification date - now (dt/now) - ignore (dt/plus now (dt/duration {:seconds 5})) - - ;; add to the index all file media objects. - index (reduce index-row index fmeds) - - flibs-xf (comp - (map #(remap-id % index :file-id)) - (map #(remap-id % index :library-file-id)) - (map #(assoc % :synced-at now)) - (map #(assoc % :created-at now))) - - ;; remap all file-library-rel row - flibs (sequence flibs-xf flibs) - - fmeds-xf (comp - (map #(assoc % :id (get index (:id %)))) - (map #(assoc % :created-at now)) - (map #(remap-id % index :file-id))) - - ;; remap all file-media-object rows - fmeds (sequence fmeds-xf fmeds) - - file (cond-> file - (string? name) - (assoc :name name) - - (true? reset-shared-flag?) - (assoc :is-shared false)) - - file (-> file - (assoc :created-at now) - (assoc :modified-at now) - (assoc :ignore-sync-until ignore)) - - file (process-file cfg index file)] - - (db/insert! conn :file - (-> file - (update :features #(db/create-array conn "text" %)) - (update :data blob/encode)) - {::db/return-keys false}) - - ;; The file profile creation is optional, so when no profile is - ;; present (when this function is called from profile less - ;; environment: SREPL) we just omit the creation of the relation - - (when (uuid? profile-id) - (db/insert! conn :file-profile-rel - {:file-id (:id file) - :profile-id profile-id - :is-owner true - :is-admin true - :can-edit true} - {::db/return-keys? false})) - - (doseq [params flibs] - (db/insert! conn :file-library-rel params ::db/return-keys false)) - - (doseq [params fmeds] - (db/insert! conn :file-media-object params ::db/return-keys false)) - - file)) + (binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})] + (duplicate-file (assoc cfg ::bfc/timestamp (dt/now)) + (-> params + (assoc :profile-id profile-id) + (assoc :reset-shared-flag true))))))) ;; --- COMMAND: Duplicate Project -(declare duplicate-project) +(defn duplicate-project + [{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id project-id name] :as params}] + (binding [bfc/*state* (volatile! {:index {project-id (uuid/next)}})] + (let [project (-> (db/get-by-id conn :project project-id) + (assoc :created-at timestamp) + (assoc :modified-at timestamp) + (assoc :is-pinned false) + (update :id bfc/lookup-index) + (cond-> (string? name) + (assoc :name name))) + + files (bfc/get-project-files cfg project-id)] + + ;; Update index with the project files and the project-id + (vswap! bfc/*state* update :index bfc/update-index files) + + + ;; Check if the source team-id allow creating new project for current user + (teams/check-edition-permissions! conn profile-id (:team-id project)) + + ;; create the duplicated project and assign the current profile as + ;; a project owner + (let [project (teams/create-project conn project)] + ;; The project profile creation is optional, so when no profile is + ;; present (when this function is called from profile less + ;; environment: SREPL) we just omit the creation of the relation + (when (uuid? profile-id) + (teams/create-project-role conn profile-id (:id project) :owner)) + + (doseq [file-id files] + (let [params (-> params + (dissoc :name) + (assoc :file-id file-id) + (assoc :reset-shared-flag false))] + (duplicate-file cfg params))) + + project)))) (def ^:private schema:duplicate-project @@ -256,54 +161,13 @@ ::sm/params schema:duplicate-project} [cfg {:keys [::rpc/profile-id] :as params}] (db/tx-run! cfg (fn [cfg] - ;; Defer all constraints + ;; Defer all constraints (db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"]) - (duplicate-project cfg (assoc params :profile-id profile-id))))) - -(defn duplicate-project - [{:keys [::db/conn] :as cfg} {:keys [profile-id project-id name] :as params}] - (let [project (-> (db/get-by-id conn :project project-id) - (assoc :is-pinned false)) - - files (db/query conn :file - {:project-id project-id - :deleted-at nil} - {:columns [:id]}) - - index (reduce index-row {project-id (uuid/next)} files) - - project (cond-> project - (string? name) - (assoc :name name) - - :always - (update :id lookup-index index))] - - ;; Check if the source team-id allow creating new project for current user - (teams/check-edition-permissions! conn profile-id (:team-id project)) - - ;; create the duplicated project and assign the current profile as - ;; a project owner - (teams/create-project conn project) - - ;; The project profile creation is optional, so when no profile is - ;; present (when this function is called from profile less - ;; environment: SREPL) we just omit the creation of the relation - (when (uuid? profile-id) - (teams/create-project-role conn profile-id (:id project) :owner)) - - (doseq [{:keys [id] :as file} files] - (let [params (-> params - (dissoc :name) - (assoc :file-id id) - (assoc :index index) - (assoc ::reset-shared-flag? false))] - (duplicate-file cfg params))) - - project)) + (-> (assoc cfg ::bfc/timestamp (dt/now)) + (duplicate-project (assoc params :profile-id profile-id)))))) (defn duplicate-team - [{:keys [::db/conn] :as cfg} & {:keys [profile-id team-id name] :as params}] + [{:keys [::db/conn ::bfc/timestamp] :as cfg} & {:keys [profile-id team-id name] :as params}] ;; Check if the source team-id allowed to be read by the user if ;; profile-id is present; it can be ommited if this function is @@ -311,92 +175,79 @@ (when (uuid? profile-id) (teams/check-read-permissions! conn profile-id team-id)) - (let [projs (db/query conn :project - {:team-id team-id}) + (binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})] + (let [projs (bfc/get-team-projects cfg team-id) + files (bfc/get-team-files cfg team-id) + frels (bfc/get-files-rels cfg files) - files (let [sql (str "SELECT f.id " - " FROM file AS f " - " JOIN project AS p ON (p.id = f.project_id) " - " WHERE p.team_id = ? " - " AND p.deleted_at IS NULL " - " AND f.deleted_at IS NULL")] - (db/exec! conn [sql team-id])) + team (-> (db/get-by-id conn :team team-id) + (assoc :created-at timestamp) + (assoc :modified-at timestamp) + (update :id bfc/lookup-index) + (cond-> (string? name) + (assoc :name name))) - trels (db/query conn :team-profile-rel - {:team-id team-id}) + fonts (db/query conn :team-font-variant + {:team-id team-id})] - prels (let [sql (str "SELECT r.* FROM project_profile_rel AS r " - " JOIN project AS p ON (r.project_id = p.id) " - " WHERE p.team_id = ?")] - (db/exec! conn [sql team-id])) + (vswap! bfc/*state* update :index + (fn [index] + (-> index + (bfc/update-index projs) + (bfc/update-index files) + (bfc/update-index fonts :id)))) + ;; FIXME: disallow clone default team + ;; Create the new team in the database + (db/insert! conn :team team) - fonts (db/query conn :team-font-variant - {:team-id team-id}) + ;; Duplicate team <-> profile relations + (doseq [params frels] + (let [params (-> params + (assoc :id (uuid/next)) + (update :team-id bfc/lookup-index) + (assoc :created-at timestamp) + (assoc :modified-at timestamp))] + (db/insert! conn :team-profile-rel params + {::db/return-keys false}))) - index (as-> {team-id (uuid/next)} index - (reduce index-row index projs) - (reduce index-row index files) - (reduce index-row index fonts)) + ;; Duplicate team fonts + (doseq [font fonts] + (let [params (-> font + (update :id bfc/lookup-index) + (update :team-id bfc/lookup-index) + (assoc :created-at timestamp) + (assoc :modified-at timestamp))] + (db/insert! conn :team-font-variant params + {::db/return-keys false}))) - team (db/get-by-id conn :team team-id) - team (cond-> team - (string? name) - (assoc :name name) + ;; Duplicate projects; We don't reuse the `duplicate-project` + ;; here because we handle files duplication by whole team + ;; instead of by project and we want to preserve some project + ;; props which are reset on the `duplicate-project` impl + (doseq [project-id projs] + (let [project (db/get conn :project {:id project-id}) + project (-> project + (assoc :created-at timestamp) + (assoc :modified-at timestamp) + (update :id bfc/lookup-index) + (update :team-id bfc/lookup-index))] + (teams/create-project conn project) - :always - (update :id lookup-index index))] + ;; The project profile creation is optional, so when no profile is + ;; present (when this function is called from profile less + ;; environment: SREPL) we just omit the creation of the relation + (when (uuid? profile-id) + (teams/create-project-role conn profile-id (:id project) :owner)))) - ;; FIXME: disallow clone default team + (doseq [file-id files] + (let [params (-> params + (dissoc :name) + (assoc :file-id file-id) + (assoc :reset-shared-flag false))] + (duplicate-file cfg params))) - ;; Create the new team in the database - (db/insert! conn :team team) - - ;; Duplicate team <-> profile relations - (doseq [params trels] - (let [params (-> params - (assoc :id (uuid/next)) - (update :team-id lookup-index index))] - (db/insert! conn :team-profile-rel params - {::db/return-keys? false}))) - - ;; Duplucate team fonts - (doseq [font fonts] - (let [params (-> font - (update :id lookup-index index) - (update :team-id lookup-index index))] - (db/insert! conn :team-font-variant params - {::db/return-keys? false}))) - - ;; Create all the projects in the database - (doseq [project projs] - (let [project (-> project - (update :id lookup-index index) - (update :team-id lookup-index index))] - (teams/create-project conn project) - - ;; The project profile creation is optional, so when no profile is - ;; present (when this function is called from profile less - ;; environment: SREPL) we just omit the creation of the relation - (when (uuid? profile-id) - (teams/create-project-role conn profile-id (:id project) :owner)))) - - ;; Duplicate project <-> profile relations - (doseq [params prels] - (let [params (-> params - (assoc :id (uuid/next)) - (update :project-id lookup-index index))] - (db/insert! conn :project-profile-rel params))) - - (doseq [file-id (map :id files)] - (let [params (-> params - (dissoc :name) - (assoc :index index) - (assoc :file-id file-id) - (assoc ::reset-shared-flag? false))] - (duplicate-file cfg params))) - - team)) + team))) ;; --- COMMAND: Move file @@ -545,6 +396,19 @@ ;; --- COMMAND: Clone Template +(defn- clone-template + [{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} template] + (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}] + ;; NOTE: the importation process performs some operations that + ;; are not very friendly with virtual threads, and for avoid + ;; unexpected blocking of other concurrent operations we + ;; dispatch that operation to a dedicated executor. + (let [result (px/submit! executor (partial bf.v1/import-files! cfg template))] + (db/update! conn :project + {:modified-at (dt/now)} + {:id project-id}) + (deref result))))) + (def ^:private schema:clone-template (sm/define @@ -552,8 +416,6 @@ [:project-id ::sm/uuid] [:template-id ::sm/word-string]])) -(declare ^:private clone-template) - (sv/defmethod ::clone-template "Clone into the specified project the template by its id." {::doc/added "1.16" @@ -565,33 +427,14 @@ _ (teams/check-edition-permissions! pool profile-id (:team-id project)) template (tmpl/get-template-stream cfg template-id) params (-> cfg - (assoc ::binfile/input template) - (assoc ::binfile/project-id (:id project)) - (assoc ::binfile/profile-id profile-id) - (assoc ::binfile/ignore-index-errors? true) - (assoc ::binfile/migrate? true))] - + (assoc ::bf.v1/project-id (:id project)) + (assoc ::bf.v1/profile-id profile-id))] (when-not template (ex/raise :type :not-found :code :template-not-found :hint "template not found")) - (sse/response #(clone-template params)))) - -(defn- clone-template - [{:keys [::wrk/executor ::binfile/project-id] :as params}] - (db/tx-run! params - (fn [{:keys [::db/conn] :as params}] - ;; NOTE: the importation process performs some operations that - ;; are not very friendly with virtual threads, and for avoid - ;; unexpected blocking of other concurrent operations we - ;; dispatch that operation to a dedicated executor. - (let [result (p/thread-call executor (partial binfile/import! params))] - (db/update! conn :project - {:modified-at (dt/now)} - {:id project-id}) - - (deref result))))) + (sse/response #(clone-template params template)))) ;; --- COMMAND: Get list of builtin templates diff --git a/backend/src/app/rpc/commands/teams.clj b/backend/src/app/rpc/commands/teams.clj index 264fca2a1..381611f81 100644 --- a/backend/src/app/rpc/commands/teams.clj +++ b/backend/src/app/rpc/commands/teams.clj @@ -416,14 +416,16 @@ ;; namespace too. (defn create-project - [conn {:keys [id team-id name is-default] :as params}] + [conn {:keys [id team-id name is-default created-at modified-at]}] (let [id (or id (uuid/next)) - is-default (if (boolean? is-default) is-default false)] - (db/insert! conn :project - {:id id - :name name - :team-id team-id - :is-default is-default}))) + is-default (if (boolean? is-default) is-default false) + params {:id id + :name name + :team-id team-id + :is-default is-default + :created-at created-at + :modified-at modified-at}] + (db/insert! conn :project (d/without-nils params)))) (defn create-project-role [conn profile-id project-id role] diff --git a/backend/src/app/srepl/main.clj b/backend/src/app/srepl/main.clj index d186bfe11..599afa646 100644 --- a/backend/src/app/srepl/main.clj +++ b/backend/src/app/srepl/main.clj @@ -9,6 +9,7 @@ #_:clj-kondo/ignore (:require [app.auth :refer [derive-password]] + [app.binfile.common :as bfc] [app.common.data :as d] [app.common.data.macros :as dm] [app.common.features :as cfeat] @@ -336,4 +337,5 @@ (db/tx-run! main/system (fn [cfg] (db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"]) - (mgmt/duplicate-team cfg :team-id team-id :name name))))) + (-> (assoc cfg ::bfc/timestamp (dt/now)) + (mgmt/duplicate-team :team-id team-id :name name)))))) diff --git a/backend/src/app/tasks/file_gc.clj b/backend/src/app/tasks/file_gc.clj index dcb92a457..b5b9e4dd9 100644 --- a/backend/src/app/tasks/file_gc.clj +++ b/backend/src/app/tasks/file_gc.clj @@ -10,6 +10,7 @@ file is eligible to be garbage collected after some period of inactivity (the default threshold is 72h)." (:require + [app.binfile.common :as bfc] [app.common.files.migrations :as pmg] [app.common.logging :as l] [app.common.thumbnails :as thc] @@ -99,35 +100,6 @@ (->> (db/cursor conn [sql:get-candidates min-age] {:chunk-size 1}) (map #(update % :features db/decode-pgarray #{})))))) -(defn collect-used-media - "Given a fdata (file data), returns all media references." - [data] - (let [xform (comp - (map :objects) - (mapcat vals) - (mapcat (fn [obj] - ;; NOTE: because of some bug, we ended with - ;; many shape types having the ability to - ;; have fill-image attribute (which initially - ;; designed for :path shapes). - (sequence - (keep :id) - (concat [(:fill-image obj) - (:metadata obj)] - (map :fill-image (:fills obj)) - (map :stroke-image (:strokes obj)) - (->> (:content obj) - (tree-seq map? :children) - (mapcat :fills) - (map :fill-image))))))) - pages (concat - (vals (:pages-index data)) - (vals (:components data)))] - (-> #{} - (into xform pages) - (into (keys (:media data)))))) - - (def ^:private sql:mark-file-media-object-deleted "UPDATE file_media_object SET deleted_at = now() @@ -137,7 +109,7 @@ (defn- clean-file-media! "Performs the garbage collection of file media objects." [conn file-id data] - (let [used (collect-used-media data) + (let [used (bfc/collect-used-media data) ids (db/create-array conn "uuid" used) unused (->> (db/exec! conn [sql:mark-file-media-object-deleted file-id ids]) (into #{} (map :id)))] diff --git a/frontend/src/app/worker/export.cljs b/frontend/src/app/worker/export.cljs index 24de022fd..604845c73 100644 --- a/frontend/src/app/worker/export.cljs +++ b/frontend/src/app/worker/export.cljs @@ -414,8 +414,8 @@ (rx/mapcat (fn [file] (->> (rp/cmd! :export-binfile {:file-id (:id file) - :include-libraries? (= export-type :all) - :embed-assets? (= export-type :merge)}) + :include-libraries (= export-type :all) + :embed-assets (= export-type :merge)}) (rx/map #(hash-map :type :finish :file-id (:id file) :filename (:name file)