♻️ Refactor exportation and duplicate mechanism

Previously the file processing was implemented 3 times using similar
approaches bug each own with its own bugs. This PR unifies the
loging to a single implementation used by the 3 operations.
This commit is contained in:
Andrey Antukh 2024-01-22 15:19:03 +01:00
parent a85a7c74c3
commit 7f60946204
12 changed files with 1587 additions and 1858 deletions

View file

@ -145,17 +145,6 @@ Debug Main Page
</small> </small>
</div> </div>
<div class="row">
<label>Ignore index errors?</label>
<input type="checkbox" name="ignore-index-errors" checked/>
<br />
<small>
Do not break on index lookup errors (remap operation).
Useful when importing a broken file that has broken
relations or missing pieces.
</small>
</div>
<div class="row"> <div class="row">
<input type="submit" name="upload" value="Upload" /> <input type="submit" name="upload" value="Upload" />
</div> </div>

View file

@ -0,0 +1,491 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.common
"A binfile related file processing common code, used for different
binfile format implementations and management rpc methods."
(:require
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.files.defaults :as cfd]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.db.sql :as sql]
[app.features.components-v2 :as feat.compv2]
[app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.set :as set]
[clojure.walk :as walk]
[cuerdas.core :as str]))
(set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
(def xf-map-id
(map :id))
(def xf-map-media-id
(comp
(mapcat (juxt :media-id
:thumbnail-id
:woff1-file-id
:woff2-file-id
:ttf-file-id
:otf-file-id))
(filter uuid?)))
(def into-vec
(fnil into []))
(def conj-vec
(fnil conj []))
(defn collect-storage-objects
[state items]
(update state :storage-objects into xf-map-media-id items))
(defn collect-summary
[state key items]
(update state key into xf-map-media-id items))
(defn lookup-index
[id]
(when id
(let [val (get-in @*state* [:index id])]
(l/trc :fn "lookup-index" :id (str id) :result (some-> val str) ::l/sync? true)
(or val id))))
(defn remap-id
[item key]
(cond-> item
(contains? item key)
(update key lookup-index)))
(defn- index-object
[index obj & attrs]
(reduce (fn [index attr-fn]
(let [old-id (attr-fn obj)
new-id (if (::overwrite *options*) old-id (uuid/next))]
(assoc index old-id new-id)))
index
attrs))
(defn update-index
([index coll]
(update-index index coll identity))
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn decode-row
"A generic decode row helper"
[{:keys [data features] :as row}]
(cond-> row
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn get-file
[cfg file-id]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(when-let [file (db/get* conn :file {:id file-id}
{::db/remove-deleted false})]
(-> file
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))))))
(defn get-project
[cfg project-id]
(db/get cfg :project {:id project-id}))
(defn get-team
[cfg team-id]
(-> (db/get cfg :team {:id team-id})
(decode-row)))
(defn get-fonts
[cfg team-id]
(db/query cfg :team-font-variant
{:team-id team-id
:deleted-at nil}))
(defn get-files-rels
"Given a set of file-id's, return all matching relations with the libraries"
[cfg ids]
(dm/assert!
"expected a set of uuids"
(and (set? ids)
(every? uuid? ids)))
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (db/create-array conn "uuid" ids)
sql (str "SELECT flr.* FROM file_library_rel AS flr "
" JOIN file AS l ON (flr.library_file_id = l.id) "
" WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")]
(db/exec! conn [sql ids])))))
;; NOTE: Will be used in future, commented for satisfy linter
(def ^:private sql:get-libraries
"WITH RECURSIVE libs AS (
SELECT fl.id
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
WHERE flr.file_id = ANY(?)
UNION
SELECT fl.id
FROM file AS fl
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
JOIN libs AS l ON (flr.file_id = l.id)
)
SELECT DISTINCT l.id
FROM libs AS l")
(defn get-libraries
"Get all libraries ids related to provided file ids"
[cfg ids]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids' (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql:get-libraries ids'])
(into #{} xf-map-id))))))
(defn get-file-object-thumbnails
"Return all file object thumbnails for a given file."
[cfg file-id]
(db/query cfg :file-tagged-object-thumbnail
{:file-id file-id
:deleted-at nil}))
(defn get-file-thumbnail
"Return the thumbnail for the specified file-id"
[cfg {:keys [id revn]}]
(db/get* cfg :file-thumbnail
{:file-id id
:revn revn
:data nil}
{::sql/columns [:media-id :file-id :revn]}))
(def ^:private
xform:collect-media-id
(comp
(map :objects)
(mapcat vals)
(mapcat (fn [obj]
;; NOTE: because of some bug, we ended with
;; many shape types having the ability to
;; have fill-image attribute (which initially
;; designed for :path shapes).
(sequence
(keep :id)
(concat [(:fill-image obj)
(:metadata obj)]
(map :fill-image (:fills obj))
(map :stroke-image (:strokes obj))
(->> (:content obj)
(tree-seq map? :children)
(mapcat :fills)
(map :fill-image))))))))
(defn collect-used-media
"Given a fdata (file data), returns all media references."
[data]
(-> #{}
(into xform:collect-media-id (vals (:pages-index data)))
(into xform:collect-media-id (vals (:components data)))
(into (keys (:media data)))))
(defn get-file-media
[cfg {:keys [data id] :as file}]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (collect-used-media data)
ids (db/create-array conn "uuid" ids)
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
;; We assoc the file-id again to the file-media-object row
;; because there are cases that used objects refer to other
;; files and we need to ensure in the exportation process that
;; all ids matches
(->> (db/exec! conn [sql ids])
(mapv #(assoc % :file-id id)))))))
(def ^:private sql:get-team-files
"SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?")
(defn get-team-files
"Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-team-projects
"SELECT p.id FROM project AS p
WHERE p.team_id = ?
AND p.deleted_at IS NULL")
(defn get-team-projects
"Get a set of project ids for the team"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-projects team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-project-files
"SELECT f.id FROM file AS f
WHERE f.project_id = ?
AND f.deleted_at IS NULL")
(defn get-project-files
"Get a set of file ids for the project"
[{:keys [::db/conn]} project-id]
(->> (db/exec! conn [sql:get-project-files project-id])
(into #{} xf-map-id)))
(defn- relink-shapes
"A function responsible to analyze all file data and
replace the old :component-file reference with the new
ones, using the provided file-index."
[data]
(letfn [(process-map-form [form]
(cond-> form
;; Relink image shapes
(and (map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] lookup-index)
;; Relink paths with fill image
(map? (:fill-image form))
(update-in [:fill-image :id] lookup-index)
;; This covers old shapes and the new :fills.
(uuid? (:fill-color-ref-file form))
(update :fill-color-ref-file lookup-index)
;; This covers the old shapes and the new :strokes
(uuid? (:storage-color-ref-file form))
(update :stroke-color-ref-file lookup-index)
;; This covers all text shapes that have typography referenced
(uuid? (:typography-ref-file form))
(update :typography-ref-file lookup-index)
;; This covers the component instance links
(uuid? (:component-file form))
(update :component-file lookup-index)
;; This covers the shadows and grids (they have directly
;; the :file-id prop)
(uuid? (:file-id form))
(update :file-id lookup-index)))
(process-form [form]
(if (map? form)
(try
(process-map-form form)
(catch Throwable cause
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
(throw cause)))
form))]
(walk/postwalk process-form data)))
(defn- relink-media
"A function responsible of process the :media attr of file data and
remap the old ids with the new ones."
[media]
(reduce-kv (fn [res k v]
(let [id (lookup-index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(defn- relink-colors
"A function responsible of process the :colors attr of file data and
remap the old ids with the new ones."
[colors]
(reduce-kv (fn [res k v]
(if (:image v)
(update-in res [k :image :id] lookup-index)
res))
colors
colors))
(defn embed-assets
[cfg data file-id]
(letfn [(walk-map-form [form state]
(cond
(uuid? (:fill-color-ref-file form))
(do
(vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)])
(assoc form :fill-color-ref-file file-id))
(uuid? (:stroke-color-ref-file form))
(do
(vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)])
(assoc form :stroke-color-ref-file file-id))
(uuid? (:typography-ref-file form))
(do
(vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)])
(assoc form :typography-ref-file file-id))
(uuid? (:component-file form))
(do
(vswap! state conj [(:component-file form) :components (:component-id form)])
(assoc form :component-file file-id))
:else
form))
(process-group-of-assets [data [lib-id items]]
;; NOTE: there is a possibility that shape refers to an
;; non-existant file because the file was removed. In this
;; case we just ignore the asset.
(if-let [lib (get-file cfg lib-id)]
(reduce (partial process-asset lib) data items)
data))
(process-asset [lib data [bucket asset-id]]
(let [asset (get-in lib [:data bucket asset-id])
;; Add a special case for colors that need to have
;; correctly set the :file-id prop (pending of the
;; refactor that will remove it).
asset (cond-> asset
(= bucket :colors) (assoc :file-id file-id))]
(update data bucket assoc asset-id asset)))]
(let [assets (volatile! [])]
(walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data)
(->> (deref assets)
(filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id))))
(d/group-by first rest)
(reduce (partial process-group-of-assets) data)))))
(defn process-file
[{:keys [id] :as file}]
(-> file
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors)
(cond-> (> (:version fdata) cfd/version)
(assoc :version cfd/version))
;; FIXME: We're temporarily activating all
;; migrations because a problem in the
;; environments messed up with the version
;; numbers When this problem is fixed delete
;; the following line
(cond-> (> (:version fdata) 22)
(assoc :version 22)))))
(fmg/migrate-file)
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))))
(defn- upsert-file!
[conn file]
(let [sql (str "INSERT INTO file (id, project_id, name, revn, is_shared, data, created_at, modified_at) "
"VALUES (?, ?, ?, ?, ?, ?, ?, ?) "
"ON CONFLICT (id) DO UPDATE SET data=?")]
(db/exec-one! conn [sql
(:id file)
(:project-id file)
(:name file)
(:revn file)
(:is-shared file)
(:data file)
(:created-at file)
(:modified-at file)
(:data file)])))
(defn persist-file!
"Applies all the final validations and perist the file."
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
(dm/assert!
"expected valid timestamp"
(dt/instant? timestamp))
(let [file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg #{})
(set/difference cfeat/frontend-only-features)
(set/union features))))))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
params (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(if (::overwrite cfg)
(upsert-file! conn params)
(db/insert! conn :file params ::db/return-keys false))
file))
(defn apply-pending-migrations!
"Apply alredy registered pending migrations to files"
[cfg]
(doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
(case feature
"components/v2"
(feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true))
"fdata/shape-data-type"
nil
(ex/raise :type :internal
:code :no-migration-defined
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
:feature feature))))

View file

@ -0,0 +1,762 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.v1
"A custom, perfromance and efficiency focused binfile format impl"
(:refer-clojure :exclude [assert])
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.features :as cfeat]
[app.common.fressian :as fres]
[app.common.logging :as l]
[app.common.spec :as us]
[app.common.types.file :as ctf]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.http.sse :as sse]
[app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks]
[app.media :as media]
[app.rpc :as-alias rpc]
[app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.tasks.file-gc]
[app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as jio]
[clojure.set :as set]
[clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.io :as io]
[promesa.util :as pu]
[yetti.adapter :as yt])
(:import
com.github.luben.zstd.ZstdInputStream
com.github.luben.zstd.ZstdOutputStream
java.io.DataInputStream
java.io.DataOutputStream
java.io.InputStream
java.io.OutputStream))
(set! *warn-on-reflection* true)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL STREAM IO API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
(def ^:const penpot-magic-number 800099563638710213)
(def ^:const max-object-size (* 1024 1024 100)) ; Only allow 100MiB max file size.
(def ^:dynamic *position* nil)
(defn get-mark
[id]
(case id
:header 1
:stream 2
:uuid 3
:label 4
:obj 5
(ex/raise :type :validation
:code :invalid-mark-id
:hint (format "invalid mark id %s" id))))
(defmacro assert
[expr hint]
`(when-not ~expr
(ex/raise :type :validation
:code :unexpected-condition
:hint ~hint)))
(defmacro assert-mark
[v type]
`(let [expected# (get-mark ~type)
val# (long ~v)]
(when (not= val# expected#)
(ex/raise :type :validation
:code :unexpected-mark
:hint (format "received mark %s, expected %s" val# expected#)))))
(defmacro assert-label
[expr label]
`(let [v# ~expr]
(when (not= v# ~label)
(ex/raise :type :assertion
:code :unexpected-label
:hint (format "received label %s, expected %s" v# ~label)))))
;; --- PRIMITIVE IO
(defn write-byte!
[^DataOutputStream output data]
(l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true)
(.writeByte output (byte data))
(swap! *position* inc))
(defn read-byte!
[^DataInputStream input]
(let [v (.readByte input)]
(l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true)
(swap! *position* inc)
v))
(defn write-long!
[^DataOutputStream output data]
(l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true)
(.writeLong output (long data))
(swap! *position* + 8))
(defn read-long!
[^DataInputStream input]
(let [v (.readLong input)]
(l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true)
(swap! *position* + 8)
v))
(defn write-bytes!
[^DataOutputStream output ^bytes data]
(let [size (alength data)]
(l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true)
(.write output data 0 size)
(swap! *position* + size)))
(defn read-bytes!
[^InputStream input ^bytes buff]
(let [size (alength buff)
readed (.readNBytes input buff 0 size)]
(l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed)
readed))
;; --- COMPOSITE IO
(defn write-uuid!
[^DataOutputStream output id]
(l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true)
(doto output
(write-byte! (get-mark :uuid))
(write-long! (uuid/get-word-high id))
(write-long! (uuid/get-word-low id))))
(defn read-uuid!
[^DataInputStream input]
(l/trace :fn "read-uuid!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :uuid)
(let [a (read-long! input)
b (read-long! input)]
(uuid/custom a b))))
(defn write-obj!
[^DataOutputStream output data]
(l/trace :fn "write-obj!" :position @*position* ::l/sync? true)
(let [^bytes data (fres/encode data)]
(doto output
(write-byte! (get-mark :obj))
(write-long! (alength data))
(write-bytes! data))))
(defn read-obj!
[^DataInputStream input]
(l/trace :fn "read-obj!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :obj)
(let [size (read-long! input)]
(assert (pos? size) "incorrect header size found on reading header")
(let [buff (byte-array size)]
(read-bytes! input buff)
(fres/decode buff)))))
(defn write-label!
[^DataOutputStream output label]
(l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true)
(doto output
(write-byte! (get-mark :label))
(write-obj! label)))
(defn read-label!
[^DataInputStream input]
(l/trace :fn "read-label!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)]
(assert-mark m :label)
(read-obj! input)))
(defn write-header!
[^OutputStream output version]
(l/trace :fn "write-header!"
:version version
:position @*position*
::l/sync? true)
(let [vers (-> version name (subs 1) parse-long)
output (io/data-output-stream output)]
(doto output
(write-byte! (get-mark :header))
(write-long! penpot-magic-number)
(write-long! vers))))
(defn read-header!
[^InputStream input]
(l/trace :fn "read-header!" :position @*position* ::l/sync? true)
(let [input (io/data-input-stream input)
mark (read-byte! input)
mnum (read-long! input)
vers (read-long! input)]
(when (or (not= mark (get-mark :header))
(not= mnum penpot-magic-number))
(ex/raise :type :validation
:code :invalid-penpot-file
:hint "invalid penpot file"))
(keyword (str "v" vers))))
(defn copy-stream!
[^OutputStream output ^InputStream input ^long size]
(let [written (io/copy! input output :size size)]
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
(swap! *position* + written)
written))
(defn write-stream!
[^DataOutputStream output stream size]
(l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size)
(doto output
(write-byte! (get-mark :stream))
(write-long! size))
(copy-stream! output stream size))
(defn read-stream!
[^DataInputStream input]
(l/trace :fn "read-stream!" :position @*position* ::l/sync? true)
(let [m (read-byte! input)
s (read-long! input)
p (tmp/tempfile :prefix "penpot.binfile.")]
(assert-mark m :stream)
(when (> s max-object-size)
(ex/raise :type :validation
:code :max-file-size-reached
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
(if (> s temp-file-threshold)
(with-open [^OutputStream output (io/output-stream p)]
(let [readed (io/copy! input output :offset 0 :size s)]
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed)
[s p]))
[s (io/read-as-bytes input :size s)])))
(defmacro assert-read-label!
[input expected-label]
`(let [readed# (read-label! ~input)
expected# ~expected-label]
(when (not= readed# expected#)
(ex/raise :type :validation
:code :unexpected-label
:hint (format "unexpected label found: %s, expected: %s" readed# expected#)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; --- HELPERS
(defn zstd-input-stream
^InputStream
[input]
(ZstdInputStream. ^InputStream input))
(defn zstd-output-stream
^OutputStream
[output & {:keys [level] :or {level 0}}]
(ZstdOutputStream. ^OutputStream output (int level)))
(defn- get-files
[cfg ids]
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [sql (str "SELECT id FROM file "
" WHERE id = ANY(?) ")
ids (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql ids])
(into [] (map :id))
(not-empty))))))
;; --- EXPORT WRITER
(defmulti write-export ::version)
(defmulti write-section ::section)
(defn write-export!
[{:keys [::include-libraries ::embed-assets] :as cfg}]
(when (and include-libraries embed-assets)
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
(write-export cfg))
(defmethod write-export :default
[{:keys [::output] :as options}]
(write-header! output :v1)
(pu/with-open [output (zstd-output-stream output :level 12)
output (io/data-output-stream output)]
(binding [bfc/*state* (volatile! {})]
(run! (fn [section]
(l/dbg :hint "write section" :section section ::l/sync? true)
(write-label! output section)
(let [options (-> options
(assoc ::output output)
(assoc ::section section))]
(binding [bfc/*options* options]
(write-section options))))
[:v1/metadata :v1/files :v1/rels :v1/sobjects]))))
(defmethod write-section :v1/metadata
[{:keys [::output ::ids ::include-libraries] :as cfg}]
(if-let [fids (get-files cfg ids)]
(let [lids (when include-libraries
(bfc/get-libraries cfg ids))
ids (into fids lids)]
(write-obj! output {:version cf/version :files ids})
(vswap! bfc/*state* assoc :files ids))
(ex/raise :type :not-found
:code :files-not-found
:hint "unable to retrieve files for export")))
(defmethod write-section :v1/files
[{:keys [::output ::embed-assets ::include-libraries] :as cfg}]
;; Initialize SIDS with empty vector
(vswap! bfc/*state* assoc :sids [])
(doseq [file-id (-> bfc/*state* deref :files)]
(let [detach? (and (not embed-assets) (not include-libraries))
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
(mapv #(dissoc % :file-id)))
file (cond-> (bfc/get-file cfg file-id)
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
embed-assets
(update :data #(bfc/embed-assets cfg % file-id))
:always
(assoc :thumbnails thumbnails))
media (bfc/get-file-media cfg file)]
(l/dbg :hint "write penpot file"
:id (str file-id)
:name (:name file)
:thumbnails (count thumbnails)
:features (:features file)
:media (count media)
::l/sync? true)
(doseq [item media]
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
(doseq [item thumbnails]
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
(doto output
(write-obj! file)
(write-obj! media))
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id media)
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails))))
(defmethod write-section :v1/rels
[{:keys [::output ::include-libraries] :as cfg}]
(let [ids (-> bfc/*state* deref :files set)
rels (when include-libraries
(bfc/get-files-rels cfg ids))]
(l/dbg :hint "found rels" :total (count rels) ::l/sync? true)
(write-obj! output rels)))
(defmethod write-section :v1/sobjects
[{:keys [::sto/storage ::output]}]
(let [sids (-> bfc/*state* deref :sids)
storage (media/configure-assets-storage storage)]
(l/dbg :hint "found sobjects"
:items (count sids)
::l/sync? true)
;; Write all collected storage objects
(write-obj! output sids)
(doseq [id sids]
(let [{:keys [size] :as obj} (sto/get-object storage id)]
(l/dbg :hint "write sobject" :id (str id) ::l/sync? true)
(doto output
(write-uuid! id)
(write-obj! (meta obj)))
(pu/with-open [stream (sto/get-object-data storage obj)]
(let [written (write-stream! output stream size)]
(when (not= written size)
(ex/raise :type :validation
:code :mismatch-readed-size
:hint (str/ffmt "found unexpected object size; size=% written=%" size written)))))))))
;; --- EXPORT READER
(defmulti read-import ::version)
(defmulti read-section ::section)
(s/def ::profile-id ::us/uuid)
(s/def ::project-id ::us/uuid)
(s/def ::input io/input-stream?)
(s/def ::overwrite? (s/nilable ::us/boolean))
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
;; FIXME: replace with schema
(s/def ::read-import-options
(s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input]
:opt [::overwrite? ::ignore-index-errors?]))
(defn read-import!
"Do the importation of the specified resource in penpot custom binary
format. There are some options for customize the importation
behavior:
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`."
[{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert!
"expected input stream"
(io/input-stream? input))
(dm/assert!
"expected valid instant"
(dt/instant? timestamp))
(let [version (read-header! input)]
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
(defn- read-import-v1
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(pu/with-open [input (zstd-input-stream input)
input (io/data-input-stream input)]
(binding [bfc/*state* (volatile! {:media [] :index {}})]
(let [team (teams/get-team conn
:profile-id profile-id
:project-id project-id)
features (cfeat/get-team-enabled-features cf/flags team)]
(sse/tap {:type :import-progress
:section :read-import})
;; Process all sections
(run! (fn [section]
(l/dbg :hint "reading section" :section section ::l/sync? true)
(assert-read-label! input section)
(let [options (-> cfg
(assoc ::bfc/features features)
(assoc ::section section)
(assoc ::input input))]
(binding [bfc/*options* options]
(sse/tap {:type :import-progress
:section section})
(read-section options))))
[:v1/metadata :v1/files :v1/rels :v1/sobjects])
(bfc/apply-pending-migrations! cfg)
;; Knowing that the ids of the created files are in index,
;; just lookup them and return it as a set
(let [files (-> bfc/*state* deref :files)]
(into #{} (keep #(get-in @bfc/*state* [:index %])) files))))))
(defmethod read-import :v1
[options]
(db/tx-run! options read-import-v1))
(defmethod read-section :v1/metadata
[{:keys [::input]}]
(let [{:keys [version files]} (read-obj! input)]
(l/dbg :hint "metadata readed"
:version (:full version)
:files (mapv str files)
::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index files)
(vswap! bfc/*state* assoc :version version :files files)))
(defn- remap-thumbnails
[thumbnails file-id]
(mapv (fn [thumbnail]
(-> thumbnail
(assoc :file-id file-id)
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
thumbnails))
(defmethod read-section :v1/files
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
(let [file (read-obj! input)
media (read-obj! input)
file-id (:id file)
file-id' (bfc/lookup-index file-id)
thumbnails (:thumbnails file)]
(when (not= file-id expected-file-id)
(ex/raise :type :validation
:code :inconsistent-penpot-file
:found-id file-id
:expected-id expected-file-id
:hint "the penpot file seems corrupt, found unexpected uuid (file-id)"))
(l/dbg :hint "processing file"
:id (str file-id)
:features (:features file)
:version (-> file :data :version)
:media (count media)
:thumbnails (count thumbnails)
::l/sync? true)
(when (seq thumbnails)
(let [thumbnails (remap-thumbnails thumbnails file-id')]
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
(when (seq media)
;; Update index with media
(l/dbg :hint "update index with media" :total (count media) ::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
;; Store file media for later insertion
(l/dbg :hint "update media references" ::l/sync? true)
(vswap! bfc/*state* update :media into (map #(update % :id bfc/lookup-index)) media))
(let [file (-> file
(assoc :id file-id')
(cond-> (and (= idx 0) (some? name))
(assoc :name name))
(assoc :project-id project-id)
(dissoc :thumbnails)
(bfc/process-file))]
;; All features that are enabled and requires explicit migration are
;; added to the state for a posterior migration step.
(doseq [feature (-> (::bfc/features system)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
(bfc/persist-file! system file)
(when overwrite
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id'))))
(defmethod read-section :v1/rels
[{:keys [::db/conn ::input ::bfc/timestamp]}]
(let [rels (read-obj! input)
ids (into #{} (-> bfc/*state* deref :files))]
;; Insert all file relations
(doseq [{:keys [library-file-id] :as rel} rels]
(let [rel (-> rel
(assoc :synced-at timestamp)
(update :file-id bfc/lookup-index)
(update :library-file-id bfc/lookup-index))]
(if (contains? ids library-file-id)
(do
(l/dbg :hint "create file library link"
:file-id (:file-id rel)
:lib-id (:library-file-id rel)
::l/sync? true)
(db/insert! conn :file-library-rel rel))
(l/warn :hint "ignoring file library link"
:file-id (:file-id rel)
:lib-id (:library-file-id rel)
::l/sync? true))))))
(defmethod read-section :v1/sobjects
[{:keys [::sto/storage ::db/conn ::input ::bfc/overwrite ::bfc/timestamp]}]
(let [storage (media/configure-assets-storage storage)
ids (read-obj! input)
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
(doseq [expected-storage-id ids]
(let [id (read-uuid! input)
mdata (read-obj! input)]
(when (not= id expected-storage-id)
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"))
(l/dbg :hint "readed storage object" :id (str id) ::l/sync? true)
(let [[size resource] (read-stream! input)
hash (sto/calculate-hash resource)
content (-> (sto/content resource size)
(sto/wrap-with-hash hash))
params (-> mdata
(assoc ::sto/content content)
(assoc ::sto/deduplicate? true)
(assoc ::sto/touched-at timestamp))
params (if (thumb? id)
(assoc params :bucket "file-object-thumbnail")
(assoc params :bucket "file-media-object"))
sobject (sto/put-object! storage params)]
(l/dbg :hint "persisted storage object"
:old-id (str id)
:new-id (str (:id sobject))
:is-thumbnail (boolean (thumb? id))
::l/sync? true)
(vswap! bfc/*state* update :index assoc id (:id sobject)))))
(doseq [item (:media @bfc/*state*)]
(l/dbg :hint "inserting file media object"
:id (str (:id item))
:file-id (str (:file-id item))
::l/sync? true)
(let [file-id (bfc/lookup-index (:file-id item))]
(if (= file-id (:file-id item))
(l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true)
(db/insert! conn :file-media-object
(-> item
(assoc :file-id file-id)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))
{::db/on-conflict-do-nothing? overwrite}))))
(doseq [item (:thumbnails @bfc/*state*)]
(let [item (update item :media-id bfc/lookup-index)]
(l/dbg :hint "inserting file object thumbnail"
:file-id (str (:file-id item))
:media-id (str (:media-id item))
:object-id (:object-id item)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail item
{::db/on-conflict-do-nothing? overwrite})))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HIGH LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn export-files!
"Do the exportation of a specified file in custom penpot binary
format. There are some options available for customize the output:
`::include-libraries`: additionally to the specified file, all the
linked libraries also will be included (including transitive
dependencies).
`::embed-assets`: instead of including the libraries, embed in the
same file library all assets used from external libraries."
[{:keys [::ids] :as cfg} output]
(dm/assert!
"expected a set of uuid's for `::ids` parameter"
(and (set? ids)
(every? uuid? ids)))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory output))
(let [id (uuid/next)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
(l/info :hint "start exportation" :export-id (str id))
(pu/with-open [output (io/output-stream output)]
(binding [*position* (atom 0)]
(write-export! (assoc cfg ::output output))))
(catch java.io.IOException _cause
;; Do nothing, EOF means client closes connection abruptly
(vreset! ab true)
nil)
(catch Throwable cause
(vreset! cs cause)
(vreset! ab true)
(throw cause))
(finally
(l/info :hint "exportation finished" :export-id (str id)
:elapsed (str (inst-ms (tp)) "ms")
:aborted @ab
:cause @cs)))))
(defn import-files!
[cfg input]
(dm/assert!
"expected valid profile-id and project-id on `cfg`"
(and (uuid? (::profile-id cfg))
(uuid? (::project-id cfg))))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory input))
(let [id (uuid/next)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
(try
(binding [*position* (atom 0)]
(pu/with-open [input (io/input-stream input)]
(read-import! (assoc cfg ::input input))))
(catch Throwable cause
(vreset! cs cause)
(throw cause))
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))

View file

@ -9,31 +9,24 @@
of entire team (or multiple teams) at once." of entire team (or multiple teams) at once."
(:refer-clojure :exclude [read]) (:refer-clojure :exclude [read])
(:require (:require
[app.binfile.common :as bfc]
[app.common.data :as d] [app.common.data :as d]
[app.common.exceptions :as ex]
[app.common.features :as cfeat] [app.common.features :as cfeat]
[app.common.files.defaults :as cfd]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as fval]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.transit :as t] [app.common.transit :as t]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
[app.db.sql :as sql] [app.db.sql :as sql]
[app.features.fdata :as feat.fdata]
[app.http.sse :as sse] [app.http.sse :as sse]
[app.loggers.audit :as-alias audit] [app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks] [app.loggers.webhooks :as-alias webhooks]
[app.media :as media] [app.media :as media]
[app.storage :as sto] [app.storage :as sto]
[app.storage.tmp :as tmp] [app.storage.tmp :as tmp]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk] [app.worker :as-alias wrk]
[clojure.set :as set] [clojure.set :as set]
[clojure.walk :as walk]
[cuerdas.core :as str] [cuerdas.core :as str]
[datoteka.io :as io] [datoteka.io :as io]
[promesa.util :as pu]) [promesa.util :as pu])
@ -42,34 +35,10 @@
(set! *warn-on-reflection* true) (set! *warn-on-reflection* true)
(def ^:dynamic *state* nil)
(def ^:dynamic *options* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL API ;; LOW LEVEL API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- lookup-index
[id]
(when-let [val (get-in @*state* [:index id])]
(l/trc :fn "lookup-index" :id (some-> id str) :result (some-> val str) ::l/sync? true)
(or val id)))
(defn- index-object
[index obj & attrs]
(reduce (fn [index attr-fn]
(let [old-id (attr-fn obj)
new-id (uuid/next)]
(assoc index old-id new-id)))
index
attrs))
(defn- update-index
([index coll]
(update-index index coll identity))
([index coll attr]
(reduce #(index-object %1 %2 attr) index coll)))
(defn- create-database (defn- create-database
([cfg] ([cfg]
(let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")] (let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")]
@ -92,12 +61,6 @@
"CREATE INDEX kvdata__tag_key__idx "CREATE INDEX kvdata__tag_key__idx
ON kvdata (tag, key)") ON kvdata (tag, key)")
(defn- decode-row
[{:keys [data features] :as row}]
(cond-> row
features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data))))
(defn- setup-schema! (defn- setup-schema!
[{:keys [::db]}] [{:keys [::db]}]
(db/exec-one! db [sql:create-kvdata-table]) (db/exec-one! db [sql:create-kvdata-table])
@ -147,156 +110,62 @@
;; IMPORT/EXPORT IMPL ;; IMPORT/EXPORT IMPL
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private xf-map-id
(map :id))
(def ^:private xf-map-media-id
(comp
(mapcat (juxt :media-id
:thumbnail-id
:woff1-file-id
:woff2-file-id
:ttf-file-id
:otf-file-id))
(filter uuid?)))
;; NOTE: Will be used in future, commented for satisfy linter
;; (def ^:private sql:get-libraries
;; "WITH RECURSIVE libs AS (
;; SELECT fl.id
;; FROM file AS fl
;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
;; WHERE flr.file_id = ANY(?)
;; UNION
;; SELECT fl.id
;; FROM file AS fl
;; JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
;; JOIN libs AS l ON (flr.file_id = l.id)
;; )
;; SELECT DISTINCT l.id
;; FROM libs AS l")
;;
;; (defn- get-libraries
;; "Get all libraries ids related to provided file ids"
;; [{:keys [::db/conn]} ids]
;; (let [ids' (db/create-array conn "uuid" ids)]
;; (->> (db/exec! conn [sql:get-libraries ids])
;; (into #{} xf-map-id))))
;;
;; (def ^:private sql:get-project-files
;; "SELECT f.id FROM file AS f
;; WHERE f.project_id = ?")
;; (defn- get-project-files
;; "Get a set of file ids for the project"
;; [{:keys [::db/conn]} project-id]
;; (->> (db/exec! conn [sql:get-project-files project-id])
;; (into #{} xf-map-id)))
(def ^:private sql:get-team-files
"SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?")
(defn- get-team-files
"Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files team-id])
(into #{} xf-map-id)))
(def ^:private sql:get-team-projects
"SELECT p.id FROM project AS p
WHERE p.team_id = ?")
(defn- get-team-projects
"Get a set of project ids for the team"
[{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-projects team-id])
(into #{} xf-map-id)))
(declare ^:private write-project!) (declare ^:private write-project!)
(declare ^:private write-file!) (declare ^:private write-file!)
(defn- write-team! (defn- write-team!
[{:keys [::db/conn] :as cfg} team-id] [cfg team-id]
(sse/tap {:type :export-progress (sse/tap {:type :export-progress
:section :write-team :section :write-team
:id team-id}) :id team-id})
(let [team (db/get conn :team {:id team-id} (let [team (bfc/get-team cfg team-id)
::db/remove-deleted false fonts (bfc/get-fonts cfg team-id)]
::db/check-deleted false)
team (decode-row team)
fonts (db/query conn :team-font-variant
{:team-id team-id
:deleted-at nil}
{::sql/for-share true})]
(l/trc :hint "write" :obj "team" (l/trc :hint "write" :obj "team"
:id (str team-id) :id (str team-id)
:fonts (count fonts)) :fonts (count fonts))
(vswap! *state* update :teams conj team-id) (vswap! bfc/*state* update :teams conj team-id)
(vswap! *state* update :storage-objects into xf-map-media-id fonts) (vswap! bfc/*state* bfc/collect-storage-objects fonts)
(write! cfg :team team-id team) (write! cfg :team team-id team)
(doseq [{:keys [id] :as font} fonts] (doseq [{:keys [id] :as font} fonts]
(vswap! *state* update :team-font-variants conj id) (vswap! bfc/*state* update :team-font-variants conj id)
(write! cfg :team-font-variant id font)))) (write! cfg :team-font-variant id font))))
(defn- write-project! (defn- write-project!
[{:keys [::db/conn] :as cfg} project-id] [cfg project-id]
(sse/tap {:type :export-progress (sse/tap {:type :export-progress
:section :write-project :section :write-project
:id project-id}) :id project-id})
(let [project (db/get conn :project {:id project-id} (let [project (bfc/get-project cfg project-id)]
::db/remove-deleted false
::db/check-deleted false)]
(l/trc :hint "write" :obj "project" :id (str project-id)) (l/trc :hint "write" :obj "project" :id (str project-id))
(write! cfg :project (str project-id) project) (write! cfg :project (str project-id) project)
(vswap! bfc/*state* update :projects conj project-id)))
(vswap! *state* update :projects conj project-id)))
(defn- write-file! (defn- write-file!
[{:keys [::db/conn] :as cfg} file-id] [cfg file-id]
(sse/tap {:type :export-progress (sse/tap {:type :export-progress
:section :write-file :section :write-file
:id file-id}) :id file-id})
(let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] (let [file (bfc/get-file cfg file-id)
(-> (db/get conn :file {:id file-id} thumbs (bfc/get-file-object-thumbnails cfg file-id)
::sql/for-share true media (bfc/get-file-media cfg file)
::db/remove-deleted false rels (bfc/get-files-rels cfg #{file-id})]
::db/check-deleted false)
(decode-row)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))
thumbs (db/query conn :file-tagged-object-thumbnail (vswap! bfc/*state* (fn [state]
{:file-id file-id (-> state
:deleted-at nil} (update :files conj file-id)
{::sql/for-share true}) (update :file-media-objects into bfc/xf-map-id media)
(bfc/collect-storage-objects thumbs)
media (db/query conn :file-media-object (bfc/collect-storage-objects media))))
{:file-id file-id
:deleted-at nil}
{::sql/for-share true})
rels (db/query conn :file-library-rel
{:file-id file-id})]
(vswap! *state* (fn [state]
(-> state
(update :files conj file-id)
(update :file-media-objects into (map :id) media)
(update :storage-objects into xf-map-media-id thumbs)
(update :storage-objects into xf-map-media-id media))))
(write! cfg :file file-id file) (write! cfg :file file-id file)
(write! cfg :file-rels file-id rels) (write! cfg :file-rels file-id rels)
@ -304,13 +173,8 @@
(run! (partial write! cfg :file-media-object file-id) media) (run! (partial write! cfg :file-media-object file-id) media)
(run! (partial write! cfg :file-object-thumbnail file-id) thumbs) (run! (partial write! cfg :file-object-thumbnail file-id) thumbs)
(when-let [thumb (db/get* conn :file-thumbnail (when-let [thumb (bfc/get-file-thumbnail cfg file)]
{:file-id file-id (vswap! bfc/*state* bfc/collect-storage-objects [thumb])
:revn (:revn file)
:data nil}
{::sql/for-share true
::sql/columns [:media-id :file-id :revn]})]
(vswap! *state* update :storage-objects into xf-map-media-id [thumb])
(write! cfg :file-thumbnail file-id thumb)) (write! cfg :file-thumbnail file-id thumb))
(l/trc :hint "write" :obj "file" (l/trc :hint "write" :obj "file"
@ -328,7 +192,7 @@
(write! cfg :storage-object id (meta sobj) data))) (write! cfg :storage-object id (meta sobj) data)))
(defn- read-storage-object! (defn- read-storage-object!
[{:keys [::sto/storage ::timestamp] :as cfg} id] [{:keys [::sto/storage ::bfc/timestamp] :as cfg} id]
(let [mdata (read-obj cfg :storage-object id) (let [mdata (read-obj cfg :storage-object id)
data (read-blob cfg :storage-object id) data (read-blob cfg :storage-object id)
hash (sto/calculate-hash data) hash (sto/calculate-hash data)
@ -343,7 +207,7 @@
sobject (sto/put-object! storage params)] sobject (sto/put-object! storage params)]
(vswap! *state* update :index assoc id (:id sobject)) (vswap! bfc/*state* update :index assoc id (:id sobject))
(l/trc :hint "read" :obj "storage-object" (l/trc :hint "read" :obj "storage-object"
:id (str id) :id (str id)
@ -351,7 +215,7 @@
:size (:size sobject)))) :size (:size sobject))))
(defn read-team! (defn read-team!
[{:keys [::db/conn ::timestamp] :as cfg} team-id] [{:keys [::db/conn ::bfc/timestamp] :as cfg} team-id]
(l/trc :hint "read" :obj "team" :id (str team-id)) (l/trc :hint "read" :obj "team" :id (str team-id))
(sse/tap {:type :import-progress (sse/tap {:type :import-progress
@ -360,8 +224,8 @@
(let [team (read-obj cfg :team team-id) (let [team (read-obj cfg :team team-id)
team (-> team team (-> team
(update :id lookup-index) (update :id bfc/lookup-index)
(update :photo-id lookup-index) (update :photo-id bfc/lookup-index)
(assoc :created-at timestamp) (assoc :created-at timestamp)
(assoc :modified-at timestamp))] (assoc :modified-at timestamp))]
@ -372,12 +236,12 @@
(doseq [font (->> (read-seq cfg :team-font-variant) (doseq [font (->> (read-seq cfg :team-font-variant)
(filter #(= team-id (:team-id %))))] (filter #(= team-id (:team-id %))))]
(let [font (-> font (let [font (-> font
(update :id lookup-index) (update :id bfc/lookup-index)
(update :team-id lookup-index) (update :team-id bfc/lookup-index)
(update :woff1-file-id lookup-index) (update :woff1-file-id bfc/lookup-index)
(update :woff2-file-id lookup-index) (update :woff2-file-id bfc/lookup-index)
(update :ttf-file-id lookup-index) (update :ttf-file-id bfc/lookup-index)
(update :otf-file-id lookup-index) (update :otf-file-id bfc/lookup-index)
(assoc :created-at timestamp) (assoc :created-at timestamp)
(assoc :modified-at timestamp))] (assoc :modified-at timestamp))]
(db/insert! conn :team-font-variant font (db/insert! conn :team-font-variant font
@ -386,7 +250,7 @@
team)) team))
(defn read-project! (defn read-project!
[{:keys [::db/conn ::timestamp] :as cfg} project-id] [{:keys [::db/conn ::bfc/timestamp] :as cfg} project-id]
(l/trc :hint "read" :obj "project" :id (str project-id)) (l/trc :hint "read" :obj "project" :id (str project-id))
(sse/tap {:type :import-progress (sse/tap {:type :import-progress
@ -395,175 +259,40 @@
(let [project (read-obj cfg :project project-id) (let [project (read-obj cfg :project project-id)
project (-> project project (-> project
(update :id lookup-index) (update :id bfc/lookup-index)
(update :team-id lookup-index) (update :team-id bfc/lookup-index)
(assoc :created-at timestamp) (assoc :created-at timestamp)
(assoc :modified-at timestamp))] (assoc :modified-at timestamp))]
(db/insert! conn :project project (db/insert! conn :project project
::db/return-keys false))) ::db/return-keys false)))
(defn- relink-shapes
"A function responsible to analyze all file data and
replace the old :component-file reference with the new
ones, using the provided file-index."
[data]
(letfn [(process-map-form [form]
(cond-> form
;; Relink image shapes
(and (map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] lookup-index)
;; Relink paths with fill image
(map? (:fill-image form))
(update-in [:fill-image :id] lookup-index)
;; This covers old shapes and the new :fills.
(uuid? (:fill-color-ref-file form))
(update :fill-color-ref-file lookup-index)
;; This covers the old shapes and the new :strokes
(uuid? (:storage-color-ref-file form))
(update :stroke-color-ref-file lookup-index)
;; This covers all text shapes that have typography referenced
(uuid? (:typography-ref-file form))
(update :typography-ref-file lookup-index)
;; This covers the component instance links
(uuid? (:component-file form))
(update :component-file lookup-index)
;; This covers the shadows and grids (they have directly
;; the :file-id prop)
(uuid? (:file-id form))
(update :file-id lookup-index)))]
(walk/postwalk (fn [form]
(if (map? form)
(try
(process-map-form form)
(catch Throwable cause
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
(throw cause)))
form))
data)))
(defn- relink-media
"A function responsible of process the :media attr of file data and
remap the old ids with the new ones."
[media]
(reduce-kv (fn [res k v]
(let [id (lookup-index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(defn- relink-colors
"A function responsible of process the :colors attr of file data and
remap the old ids with the new ones."
[colors]
(reduce-kv (fn [res k v]
(if (:image v)
(update-in res [k :image :id] lookup-index)
res))
colors
colors))
(defn- process-file
[{:keys [id] :as file}]
(-> file
(update :data (fn [fdata]
(-> fdata
(assoc :id id)
(dissoc :recent-colors)
(cond-> (> (:version fdata) cfd/version)
(assoc :version cfd/version))
;; FIXME: We're temporarily activating all
;; migrations because a problem in the
;; environments messed up with the version
;; numbers When this problem is fixed delete
;; the following line
(cond-> (> (:version fdata) 22)
(assoc :version 22)))))
(fmg/migrate-file)
(update :data (fn [fdata]
(-> fdata
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(update :colors relink-colors)
(d/without-nils))))))
(defn read-file! (defn read-file!
[{:keys [::db/conn ::timestamp] :as cfg} file-id] [{:keys [::db/conn ::bfc/timestamp] :as cfg} file-id]
(l/trc :hint "read" :obj "file" :id (str file-id)) (l/trc :hint "read" :obj "file" :id (str file-id))
(sse/tap {:type :import-progress (sse/tap {:type :import-progress
:section :read-file :section :read-file
:id file-id}) :id file-id})
(let [file (read-obj cfg :file file-id) (let [file (-> (read-obj cfg :file file-id)
(update :id bfc/lookup-index)
(update :project-id bfc/lookup-index)
(bfc/process-file))]
file (-> file ;; All features that are enabled and requires explicit migration are
(update :id lookup-index) ;; added to the state for a posterior migration step.
(process-file)) (doseq [feature (-> (::bfc/features cfg)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
;; All features that are enabled and requires explicit migration are (bfc/persist-file! cfg file))
;; added to the state for a posterior migration step.
_ (doseq [feature (-> (::features cfg)
(set/difference cfeat/no-migration-features)
(set/difference (:features file)))]
(vswap! *state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
file (-> file
(update :project-id lookup-index))
file (-> file
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(update :features
(fn [features]
(let [features (cfeat/check-supported-features! features)]
(-> (::features cfg)
(set/difference cfeat/frontend-only-features)
(set/union features))))))
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg (:id file))
file))
file)]
(db/insert! conn :file
(-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))
{::db/return-keys false}))
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)] (doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
(let [thumbnail (-> thumbnail (let [thumbnail (-> thumbnail
(update :file-id lookup-index) (update :file-id bfc/lookup-index)
(update :media-id lookup-index)) (update :media-id bfc/lookup-index))
file-id (:file-id thumbnail) file-id (:file-id thumbnail)
thumbnail (update thumbnail :object-id thumbnail (update thumbnail :object-id
@ -574,20 +303,21 @@
(doseq [rel (read-obj cfg :file-rels file-id)] (doseq [rel (read-obj cfg :file-rels file-id)]
(let [rel (-> rel (let [rel (-> rel
(update :file-id lookup-index) (update :file-id bfc/lookup-index)
(update :library-file-id lookup-index) (update :library-file-id bfc/lookup-index)
(assoc :synced-at timestamp))] (assoc :synced-at timestamp))]
(db/insert! conn :file-library-rel rel (db/insert! conn :file-library-rel rel
::db/return-keys false))) ::db/return-keys false)))
(doseq [media (read-seq cfg :file-media-object file-id)] (doseq [media (read-seq cfg :file-media-object file-id)]
(let [media (-> media (let [media (-> media
(update :id lookup-index) (update :id bfc/lookup-index)
(update :file-id lookup-index) (update :file-id bfc/lookup-index)
(update :media-id lookup-index) (update :media-id bfc/lookup-index)
(update :thumbnail-id lookup-index))] (update :thumbnail-id bfc/lookup-index))]
(db/insert! conn :file-media-object media (db/insert! conn :file-media-object media
::db/return-keys false)))) ::db/return-keys false
::sql/on-conflict-do-nothing true))))
(def ^:private empty-summary (def ^:private empty-summary
{:teams #{} {:teams #{}
@ -617,20 +347,20 @@
(try (try
(db/tx-run! cfg (fn [cfg] (db/tx-run! cfg (fn [cfg]
(setup-schema! cfg) (setup-schema! cfg)
(binding [*state* (volatile! empty-summary)] (binding [bfc/*state* (volatile! empty-summary)]
(write-team! cfg team-id) (write-team! cfg team-id)
(run! (partial write-project! cfg) (run! (partial write-project! cfg)
(get-team-projects cfg team-id)) (bfc/get-team-projects cfg team-id))
(run! (partial write-file! cfg) (run! (partial write-file! cfg)
(get-team-files cfg team-id)) (bfc/get-team-files cfg team-id))
(run! (partial write-storage-object! cfg) (run! (partial write-storage-object! cfg)
(-> *state* deref :storage-objects)) (-> bfc/*state* deref :storage-objects))
(write! cfg :manifest "team-id" team-id) (write! cfg :manifest "team-id" team-id)
(write! cfg :manifest "objects" (deref *state*)) (write! cfg :manifest "objects" (deref bfc/*state*))
(::path cfg)))) (::path cfg))))
(finally (finally
@ -642,19 +372,6 @@
:id (str id) :id (str id)
:elapsed (dt/format-duration elapsed))))))) :elapsed (dt/format-duration elapsed)))))))
;; NOTE: will be used in future, commented for satisfy linter
;; (defn- run-pending-migrations!
;; [cfg]
;; ;; Run all pending migrations
;; (doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
;; (case feature
;; "components/v2"
;; (feat.compv2/migrate-file! cfg file-id :validate? (::validate cfg true))
;; (ex/raise :type :internal
;; :code :no-migration-defined
;; :hint (str/ffmt "no migation for feature '%' on file importation" feature)
;; :feature feature))))
(defn import-team! (defn import-team!
[cfg path] [cfg path]
(let [id (uuid/next) (let [id (uuid/next)
@ -662,7 +379,7 @@
cfg (-> (create-database cfg path) cfg (-> (create-database cfg path)
(update ::sto/storage media/configure-assets-storage) (update ::sto/storage media/configure-assets-storage)
(assoc ::timestamp (dt/now)))] (assoc ::bfc/timestamp (dt/now)))]
(l/inf :hint "start" (l/inf :hint "start"
:operation "import" :operation "import"
@ -674,7 +391,7 @@
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"]) (db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(binding [*state* (volatile! {:index {}})] (binding [bfc/*state* (volatile! {:index {}})]
(let [objects (read-obj cfg :manifest "objects")] (let [objects (read-obj cfg :manifest "objects")]
;; We first process all storage objects, they have ;; We first process all storage objects, they have
@ -683,19 +400,19 @@
(run! (partial read-storage-object! cfg) (:storage-objects objects)) (run! (partial read-storage-object! cfg) (:storage-objects objects))
;; Populate index with all the incoming objects ;; Populate index with all the incoming objects
(vswap! *state* update :index (vswap! bfc/*state* update :index
(fn [index] (fn [index]
(-> index (-> index
(update-index (:teams objects)) (bfc/update-index (:teams objects))
(update-index (:projects objects)) (bfc/update-index (:projects objects))
(update-index (:files objects)) (bfc/update-index (:files objects))
(update-index (:file-media-objects objects)) (bfc/update-index (:file-media-objects objects))
(update-index (:team-font-variants objects))))) (bfc/update-index (:team-font-variants objects)))))
(let [team-id (read-obj cfg :manifest "team-id") (let [team-id (read-obj cfg :manifest "team-id")
team (read-team! cfg team-id) team (read-team! cfg team-id)
features (cfeat/get-team-enabled-features cf/flags team) features (cfeat/get-team-enabled-features cf/flags team)
cfg (assoc cfg ::features features)] cfg (assoc cfg ::bfc/features features)]
(run! (partial read-project! cfg) (:projects objects)) (run! (partial read-project! cfg) (:projects objects))
(run! (partial read-file! cfg) (:files objects)) (run! (partial read-file! cfg) (:files objects))

View file

@ -30,6 +30,9 @@
(let [opts (merge default-opts opts) (let [opts (merge default-opts opts)
opts (cond-> opts opts (cond-> opts
(::db/on-conflict-do-nothing? opts) (::db/on-conflict-do-nothing? opts)
(assoc :suffix "ON CONFLICT DO NOTHING")
(::on-conflict-do-nothing opts)
(assoc :suffix "ON CONFLICT DO NOTHING"))] (assoc :suffix "ON CONFLICT DO NOTHING"))]
(sql/for-insert table key-map opts)))) (sql/for-insert table key-map opts))))
@ -46,7 +49,7 @@
opts (cond-> opts opts (cond-> opts
(::columns opts) (assoc :columns (::columns opts)) (::columns opts) (assoc :columns (::columns opts))
(::for-update opts) (assoc :suffix "FOR UPDATE") (::for-update opts) (assoc :suffix "FOR UPDATE")
(::for-share opts) (assoc :suffix "FOR KEY SHARE"))] (::for-share opts) (assoc :suffix "FOR SHARE"))]
(sql/for-query table where-params opts)))) (sql/for-query table where-params opts))))
(defn update (defn update

View file

@ -7,6 +7,7 @@
(ns app.http.debug (ns app.http.debug
(:refer-clojure :exclude [error-handler]) (:refer-clojure :exclude [error-handler])
(:require (:require
[app.binfile.v1 :as bf.v1]
[app.common.data :as d] [app.common.data :as d]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.logging :as l] [app.common.logging :as l]
@ -17,11 +18,11 @@
[app.http.session :as session] [app.http.session :as session]
[app.main :as-alias main] [app.main :as-alias main]
[app.rpc.commands.auth :as auth] [app.rpc.commands.auth :as auth]
[app.rpc.commands.binfile :as binf]
[app.rpc.commands.files-create :refer [create-file]] [app.rpc.commands.files-create :refer [create-file]]
[app.rpc.commands.profile :as profile] [app.rpc.commands.profile :as profile]
[app.srepl.helpers :as srepl] [app.srepl.helpers :as srepl]
[app.storage :as-alias sto] [app.storage :as-alias sto]
[app.storage.tmp :as tmp]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.template :as tmpl] [app.util.template :as tmpl]
[app.util.time :as dt] [app.util.time :as dt]
@ -268,9 +269,10 @@
(defn export-handler (defn export-handler
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}] [{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
(let [file-ids (->> (:file-ids params) (let [file-ids (into #{}
(remove empty?) (comp (remove empty?)
(mapv parse-uuid)) (map parse-uuid))
(:file-ids params))
libs? (contains? params :includelibs) libs? (contains? params :includelibs)
clone? (contains? params :clone) clone? (contains? params :clone)
embed? (contains? params :embedassets)] embed? (contains? params :embedassets)]
@ -279,22 +281,22 @@
(ex/raise :type :validation (ex/raise :type :validation
:code :missing-arguments)) :code :missing-arguments))
(let [path (-> cfg (let [path (tmp/tempfile :prefix "penpot.export.")]
(assoc ::binf/file-ids file-ids) (with-open [output (io/output-stream path)]
(assoc ::binf/embed-assets? embed?) (-> cfg
(assoc ::binf/include-libraries? libs?) (assoc ::bf.v1/ids file-ids)
(binf/export-to-tmpfile!))] (assoc ::bf.v1/embed-assets embed?)
(assoc ::bf.v1/include-libraries libs?)
(bf.v1/export-files! output)))
(if clone? (if clone?
(let [profile (profile/get-profile pool profile-id) (let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile)] project-id (:default-project-id profile)
(binf/import! cfg (assoc cfg
(assoc cfg ::bf.v1/overwrite false
::binf/input path ::bf.v1/profile-id profile-id
::binf/overwrite? false ::bf.v1/project-id project-id)]
::binf/ignore-index-errors? true (bf.v1/import-files! cfg path)
::binf/profile-id profile-id
::binf/project-id project-id))
{::rres/status 200 {::rres/status 200
::rres/headers {"content-type" "text/plain"} ::rres/headers {"content-type" "text/plain"}
::rres/body "OK CLONED"}) ::rres/body "OK CLONED"})
@ -305,7 +307,6 @@
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}})))) "content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
(defn import-handler (defn import-handler
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}] [{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
(when-not (contains? params :file) (when-not (contains? params :file)
@ -316,26 +317,23 @@
(let [profile (profile/get-profile pool profile-id) (let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile) project-id (:default-project-id profile)
overwrite? (contains? params :overwrite) overwrite? (contains? params :overwrite)
migrate? (contains? params :migrate) migrate? (contains? params :migrate)]
ignore-index-errors? (contains? params :ignore-index-errors)]
(when-not project-id (when-not project-id
(ex/raise :type :validation (ex/raise :type :validation
:code :missing-project :code :missing-project
:hint "project not found")) :hint "project not found"))
(binf/import! (let [path (-> params :file :path)
(assoc cfg cfg (assoc cfg
::binf/input (-> params :file :path) ::bf.v1/overwrite overwrite?
::binf/overwrite? overwrite? ::bf.v1/migrate migrate?
::binf/migrate? migrate? ::bf.v1/profile-id profile-id
::binf/ignore-index-errors? ignore-index-errors? ::bf.v1/project-id project-id)]
::binf/profile-id profile-id (bf.v1/import-files! cfg path)
::binf/project-id project-id)) {::rres/status 200
::rres/headers {"content-type" "text/plain"}
{::rres/status 200 ::rres/body "OK"})))
::rres/headers {"content-type" "text/plain"}
::rres/body "OK"}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ACTIONS ;; ACTIONS

File diff suppressed because it is too large Load diff

View file

@ -7,51 +7,83 @@
(ns app.rpc.commands.management (ns app.rpc.commands.management
"A collection of RPC methods for manage the files, projects and team organization." "A collection of RPC methods for manage the files, projects and team organization."
(:require (:require
[app.common.data :as d] [app.binfile.common :as bfc]
[app.binfile.v1 :as bf.v1]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.features :as cfeat] [app.common.features :as cfeat]
[app.common.files.migrations :as pmg]
[app.common.schema :as sm] [app.common.schema :as sm]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
[app.features.fdata :as feat.fdata]
[app.http.sse :as sse] [app.http.sse :as sse]
[app.loggers.webhooks :as-alias webhooks] [app.loggers.webhooks :as-alias webhooks]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.commands.binfile :as binfile]
[app.rpc.commands.files :as files] [app.rpc.commands.files :as files]
[app.rpc.commands.projects :as proj] [app.rpc.commands.projects :as proj]
[app.rpc.commands.teams :as teams] [app.rpc.commands.teams :as teams]
[app.rpc.doc :as-alias doc] [app.rpc.doc :as-alias doc]
[app.setup :as-alias setup] [app.setup :as-alias setup]
[app.setup.templates :as tmpl] [app.setup.templates :as tmpl]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk] [app.worker :as-alias wrk]
[clojure.walk :as walk]
[promesa.core :as p]
[promesa.exec :as px])) [promesa.exec :as px]))
(defn- index-row
[index obj]
(assoc index (:id obj) (uuid/next)))
(defn- lookup-index
[id index]
(get index id id))
(defn- remap-id
[item index key]
(cond-> item
(contains? item key)
(update key lookup-index index)))
;; --- COMMAND: Duplicate File ;; --- COMMAND: Duplicate File
(declare duplicate-file) (defn duplicate-file
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
(let [;; We don't touch the original file on duplication
file (bfc/get-file cfg file-id)
project-id (:project-id file)
file (-> file
(update :id bfc/lookup-index)
(update :project-id bfc/lookup-index)
(cond-> (string? name)
(assoc :name name))
(cond-> (true? reset-shared-flag)
(assoc :is-shared false)))
flibs (bfc/get-files-rels cfg #{file-id})
fmeds (bfc/get-file-media cfg file)]
(when (uuid? profile-id)
(proj/check-edition-permissions! conn profile-id project-id))
(vswap! bfc/*state* update :index bfc/update-index fmeds :id)
;; Process and persist file
(let [file (->> (bfc/process-file file)
(bfc/persist-file! cfg))]
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(db/insert! conn :file-profile-rel
{:file-id (:id file)
:profile-id profile-id
:is-owner true
:is-admin true
:can-edit true}
{::db/return-keys? false}))
(doseq [params (sequence (comp
(map #(bfc/remap-id % :file-id))
(map #(bfc/remap-id % :library-file-id))
(map #(assoc % :synced-at timestamp))
(map #(assoc % :created-at timestamp)))
flibs)]
(db/insert! conn :file-library-rel params ::db/return-keys false))
(doseq [params (sequence (comp
(map #(bfc/remap-id % :id))
(map #(assoc % :created-at timestamp))
(map #(bfc/remap-id % :file-id)))
fmeds)]
(db/insert! conn :file-media-object params ::db/return-keys false))
file)))
(def ^:private (def ^:private
schema:duplicate-file schema:duplicate-file
@ -69,178 +101,51 @@
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}] (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) (db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
(let [params (-> params (binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
(assoc :index {file-id (uuid/next)}) (duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
(assoc :profile-id profile-id) (-> params
(assoc ::reset-shared-flag? true))] (assoc :profile-id profile-id)
(duplicate-file cfg params))))) (assoc :reset-shared-flag true)))))))
(defn- process-file
[cfg index {:keys [id] :as file}]
(letfn [(process-form [form]
(cond-> form
;; Relink library items
(and (map? form)
(uuid? (:component-file form)))
(update :component-file #(get index % %))
(and (map? form)
(uuid? (:fill-color-ref-file form)))
(update :fill-color-ref-file #(get index % %))
(and (map? form)
(uuid? (:stroke-color-ref-file form)))
(update :stroke-color-ref-file #(get index % %))
(and (map? form)
(uuid? (:typography-ref-file form)))
(update :typography-ref-file #(get index % %))
;; Relink Image Shapes
(and (map? form)
(map? (:metadata form))
(= :image (:type form)))
(update-in [:metadata :id] #(get index % %))))
;; A function responsible to analyze all file data and
;; replace the old :component-file reference with the new
;; ones, using the provided file-index
(relink-shapes [data]
(walk/postwalk process-form data))
;; A function responsible of process the :media attr of file
;; data and remap the old ids with the new ones.
(relink-media [media]
(reduce-kv (fn [res k v]
(let [id (get index k)]
(if (uuid? id)
(-> res
(assoc id (assoc v :id id))
(dissoc k))
res)))
media
media))
(process-file [{:keys [id] :as file}]
(-> file
(update :data assoc :id id)
(pmg/migrate-file)
(update :data (fn [data]
(-> data
(update :pages-index relink-shapes)
(update :components relink-shapes)
(update :media relink-media)
(d/without-nils))))))]
(let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> file
(update :id lookup-index index)
(update :project-id lookup-index index)
(update :data feat.fdata/process-pointers deref)
(process-file)))
file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg (:id file))
file))
file)]
file)))
(defn duplicate-file
[{:keys [::db/conn] :as cfg} {:keys [profile-id index file-id name ::reset-shared-flag?]}]
(let [;; We don't touch the original file on duplication
file (files/get-file cfg file-id :migrate? false)
;; We only check permissions if profile-id is present; it can
;; be omited when this function is called from SREPL helpers
_ (when (uuid? profile-id)
(proj/check-edition-permissions! conn profile-id (:project-id file)))
flibs (let [sql (str "SELECT flr.* "
" FROM file_library_rel AS flr "
" JOIN file AS l ON (flr.library_file_id = l.id) "
" WHERE flr.file_id = ? AND l.deleted_at is null")]
(db/exec! conn [sql file-id]))
fmeds (let [sql (str "SELECT fmo.* "
" FROM file_media_object AS fmo "
" JOIN storage_object AS so ON (fmo.media_id = so.id) "
" WHERE fmo.file_id = ? AND so.deleted_at is null")]
(db/exec! conn [sql file-id]))
;; memo uniform creation/modification date
now (dt/now)
ignore (dt/plus now (dt/duration {:seconds 5}))
;; add to the index all file media objects.
index (reduce index-row index fmeds)
flibs-xf (comp
(map #(remap-id % index :file-id))
(map #(remap-id % index :library-file-id))
(map #(assoc % :synced-at now))
(map #(assoc % :created-at now)))
;; remap all file-library-rel row
flibs (sequence flibs-xf flibs)
fmeds-xf (comp
(map #(assoc % :id (get index (:id %))))
(map #(assoc % :created-at now))
(map #(remap-id % index :file-id)))
;; remap all file-media-object rows
fmeds (sequence fmeds-xf fmeds)
file (cond-> file
(string? name)
(assoc :name name)
(true? reset-shared-flag?)
(assoc :is-shared false))
file (-> file
(assoc :created-at now)
(assoc :modified-at now)
(assoc :ignore-sync-until ignore))
file (process-file cfg index file)]
(db/insert! conn :file
(-> file
(update :features #(db/create-array conn "text" %))
(update :data blob/encode))
{::db/return-keys false})
;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(db/insert! conn :file-profile-rel
{:file-id (:id file)
:profile-id profile-id
:is-owner true
:is-admin true
:can-edit true}
{::db/return-keys? false}))
(doseq [params flibs]
(db/insert! conn :file-library-rel params ::db/return-keys false))
(doseq [params fmeds]
(db/insert! conn :file-media-object params ::db/return-keys false))
file))
;; --- COMMAND: Duplicate Project ;; --- COMMAND: Duplicate Project
(declare duplicate-project) (defn duplicate-project
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id project-id name] :as params}]
(binding [bfc/*state* (volatile! {:index {project-id (uuid/next)}})]
(let [project (-> (db/get-by-id conn :project project-id)
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(assoc :is-pinned false)
(update :id bfc/lookup-index)
(cond-> (string? name)
(assoc :name name)))
files (bfc/get-project-files cfg project-id)]
;; Update index with the project files and the project-id
(vswap! bfc/*state* update :index bfc/update-index files)
;; Check if the source team-id allow creating new project for current user
(teams/check-edition-permissions! conn profile-id (:team-id project))
;; create the duplicated project and assign the current profile as
;; a project owner
(let [project (teams/create-project conn project)]
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))
(doseq [file-id files]
(let [params (-> params
(dissoc :name)
(assoc :file-id file-id)
(assoc :reset-shared-flag false))]
(duplicate-file cfg params)))
project))))
(def ^:private (def ^:private
schema:duplicate-project schema:duplicate-project
@ -256,54 +161,13 @@
::sm/params schema:duplicate-project} ::sm/params schema:duplicate-project}
[cfg {:keys [::rpc/profile-id] :as params}] [cfg {:keys [::rpc/profile-id] :as params}]
(db/tx-run! cfg (fn [cfg] (db/tx-run! cfg (fn [cfg]
;; Defer all constraints ;; Defer all constraints
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"]) (db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
(duplicate-project cfg (assoc params :profile-id profile-id))))) (-> (assoc cfg ::bfc/timestamp (dt/now))
(duplicate-project (assoc params :profile-id profile-id))))))
(defn duplicate-project
[{:keys [::db/conn] :as cfg} {:keys [profile-id project-id name] :as params}]
(let [project (-> (db/get-by-id conn :project project-id)
(assoc :is-pinned false))
files (db/query conn :file
{:project-id project-id
:deleted-at nil}
{:columns [:id]})
index (reduce index-row {project-id (uuid/next)} files)
project (cond-> project
(string? name)
(assoc :name name)
:always
(update :id lookup-index index))]
;; Check if the source team-id allow creating new project for current user
(teams/check-edition-permissions! conn profile-id (:team-id project))
;; create the duplicated project and assign the current profile as
;; a project owner
(teams/create-project conn project)
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))
(doseq [{:keys [id] :as file} files]
(let [params (-> params
(dissoc :name)
(assoc :file-id id)
(assoc :index index)
(assoc ::reset-shared-flag? false))]
(duplicate-file cfg params)))
project))
(defn duplicate-team (defn duplicate-team
[{:keys [::db/conn] :as cfg} & {:keys [profile-id team-id name] :as params}] [{:keys [::db/conn ::bfc/timestamp] :as cfg} & {:keys [profile-id team-id name] :as params}]
;; Check if the source team-id allowed to be read by the user if ;; Check if the source team-id allowed to be read by the user if
;; profile-id is present; it can be ommited if this function is ;; profile-id is present; it can be ommited if this function is
@ -311,92 +175,79 @@
(when (uuid? profile-id) (when (uuid? profile-id)
(teams/check-read-permissions! conn profile-id team-id)) (teams/check-read-permissions! conn profile-id team-id))
(let [projs (db/query conn :project (binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
{:team-id team-id}) (let [projs (bfc/get-team-projects cfg team-id)
files (bfc/get-team-files cfg team-id)
frels (bfc/get-files-rels cfg files)
files (let [sql (str "SELECT f.id " team (-> (db/get-by-id conn :team team-id)
" FROM file AS f " (assoc :created-at timestamp)
" JOIN project AS p ON (p.id = f.project_id) " (assoc :modified-at timestamp)
" WHERE p.team_id = ? " (update :id bfc/lookup-index)
" AND p.deleted_at IS NULL " (cond-> (string? name)
" AND f.deleted_at IS NULL")] (assoc :name name)))
(db/exec! conn [sql team-id]))
trels (db/query conn :team-profile-rel fonts (db/query conn :team-font-variant
{:team-id team-id}) {:team-id team-id})]
prels (let [sql (str "SELECT r.* FROM project_profile_rel AS r " (vswap! bfc/*state* update :index
" JOIN project AS p ON (r.project_id = p.id) " (fn [index]
" WHERE p.team_id = ?")] (-> index
(db/exec! conn [sql team-id])) (bfc/update-index projs)
(bfc/update-index files)
(bfc/update-index fonts :id))))
;; FIXME: disallow clone default team
;; Create the new team in the database
(db/insert! conn :team team)
fonts (db/query conn :team-font-variant ;; Duplicate team <-> profile relations
{:team-id team-id}) (doseq [params frels]
(let [params (-> params
(assoc :id (uuid/next))
(update :team-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(db/insert! conn :team-profile-rel params
{::db/return-keys false})))
index (as-> {team-id (uuid/next)} index ;; Duplicate team fonts
(reduce index-row index projs) (doseq [font fonts]
(reduce index-row index files) (let [params (-> font
(reduce index-row index fonts)) (update :id bfc/lookup-index)
(update :team-id bfc/lookup-index)
(assoc :created-at timestamp)
(assoc :modified-at timestamp))]
(db/insert! conn :team-font-variant params
{::db/return-keys false})))
team (db/get-by-id conn :team team-id) ;; Duplicate projects; We don't reuse the `duplicate-project`
team (cond-> team ;; here because we handle files duplication by whole team
(string? name) ;; instead of by project and we want to preserve some project
(assoc :name name) ;; props which are reset on the `duplicate-project` impl
(doseq [project-id projs]
(let [project (db/get conn :project {:id project-id})
project (-> project
(assoc :created-at timestamp)
(assoc :modified-at timestamp)
(update :id bfc/lookup-index)
(update :team-id bfc/lookup-index))]
(teams/create-project conn project)
:always ;; The project profile creation is optional, so when no profile is
(update :id lookup-index index))] ;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))))
;; FIXME: disallow clone default team (doseq [file-id files]
(let [params (-> params
(dissoc :name)
(assoc :file-id file-id)
(assoc :reset-shared-flag false))]
(duplicate-file cfg params)))
;; Create the new team in the database team)))
(db/insert! conn :team team)
;; Duplicate team <-> profile relations
(doseq [params trels]
(let [params (-> params
(assoc :id (uuid/next))
(update :team-id lookup-index index))]
(db/insert! conn :team-profile-rel params
{::db/return-keys? false})))
;; Duplucate team fonts
(doseq [font fonts]
(let [params (-> font
(update :id lookup-index index)
(update :team-id lookup-index index))]
(db/insert! conn :team-font-variant params
{::db/return-keys? false})))
;; Create all the projects in the database
(doseq [project projs]
(let [project (-> project
(update :id lookup-index index)
(update :team-id lookup-index index))]
(teams/create-project conn project)
;; The project profile creation is optional, so when no profile is
;; present (when this function is called from profile less
;; environment: SREPL) we just omit the creation of the relation
(when (uuid? profile-id)
(teams/create-project-role conn profile-id (:id project) :owner))))
;; Duplicate project <-> profile relations
(doseq [params prels]
(let [params (-> params
(assoc :id (uuid/next))
(update :project-id lookup-index index))]
(db/insert! conn :project-profile-rel params)))
(doseq [file-id (map :id files)]
(let [params (-> params
(dissoc :name)
(assoc :index index)
(assoc :file-id file-id)
(assoc ::reset-shared-flag? false))]
(duplicate-file cfg params)))
team))
;; --- COMMAND: Move file ;; --- COMMAND: Move file
@ -545,6 +396,19 @@
;; --- COMMAND: Clone Template ;; --- COMMAND: Clone Template
(defn- clone-template
[{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} template]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
;; NOTE: the importation process performs some operations that
;; are not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we
;; dispatch that operation to a dedicated executor.
(let [result (px/submit! executor (partial bf.v1/import-files! cfg template))]
(db/update! conn :project
{:modified-at (dt/now)}
{:id project-id})
(deref result)))))
(def ^:private (def ^:private
schema:clone-template schema:clone-template
(sm/define (sm/define
@ -552,8 +416,6 @@
[:project-id ::sm/uuid] [:project-id ::sm/uuid]
[:template-id ::sm/word-string]])) [:template-id ::sm/word-string]]))
(declare ^:private clone-template)
(sv/defmethod ::clone-template (sv/defmethod ::clone-template
"Clone into the specified project the template by its id." "Clone into the specified project the template by its id."
{::doc/added "1.16" {::doc/added "1.16"
@ -565,33 +427,14 @@
_ (teams/check-edition-permissions! pool profile-id (:team-id project)) _ (teams/check-edition-permissions! pool profile-id (:team-id project))
template (tmpl/get-template-stream cfg template-id) template (tmpl/get-template-stream cfg template-id)
params (-> cfg params (-> cfg
(assoc ::binfile/input template) (assoc ::bf.v1/project-id (:id project))
(assoc ::binfile/project-id (:id project)) (assoc ::bf.v1/profile-id profile-id))]
(assoc ::binfile/profile-id profile-id)
(assoc ::binfile/ignore-index-errors? true)
(assoc ::binfile/migrate? true))]
(when-not template (when-not template
(ex/raise :type :not-found (ex/raise :type :not-found
:code :template-not-found :code :template-not-found
:hint "template not found")) :hint "template not found"))
(sse/response #(clone-template params)))) (sse/response #(clone-template params template))))
(defn- clone-template
[{:keys [::wrk/executor ::binfile/project-id] :as params}]
(db/tx-run! params
(fn [{:keys [::db/conn] :as params}]
;; NOTE: the importation process performs some operations that
;; are not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we
;; dispatch that operation to a dedicated executor.
(let [result (p/thread-call executor (partial binfile/import! params))]
(db/update! conn :project
{:modified-at (dt/now)}
{:id project-id})
(deref result)))))
;; --- COMMAND: Get list of builtin templates ;; --- COMMAND: Get list of builtin templates

View file

@ -416,14 +416,16 @@
;; namespace too. ;; namespace too.
(defn create-project (defn create-project
[conn {:keys [id team-id name is-default] :as params}] [conn {:keys [id team-id name is-default created-at modified-at]}]
(let [id (or id (uuid/next)) (let [id (or id (uuid/next))
is-default (if (boolean? is-default) is-default false)] is-default (if (boolean? is-default) is-default false)
(db/insert! conn :project params {:id id
{:id id :name name
:name name :team-id team-id
:team-id team-id :is-default is-default
:is-default is-default}))) :created-at created-at
:modified-at modified-at}]
(db/insert! conn :project (d/without-nils params))))
(defn create-project-role (defn create-project-role
[conn profile-id project-id role] [conn profile-id project-id role]

View file

@ -9,6 +9,7 @@
#_:clj-kondo/ignore #_:clj-kondo/ignore
(:require (:require
[app.auth :refer [derive-password]] [app.auth :refer [derive-password]]
[app.binfile.common :as bfc]
[app.common.data :as d] [app.common.data :as d]
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.common.features :as cfeat] [app.common.features :as cfeat]
@ -336,4 +337,5 @@
(db/tx-run! main/system (db/tx-run! main/system
(fn [cfg] (fn [cfg]
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"]) (db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
(mgmt/duplicate-team cfg :team-id team-id :name name))))) (-> (assoc cfg ::bfc/timestamp (dt/now))
(mgmt/duplicate-team :team-id team-id :name name))))))

View file

@ -10,6 +10,7 @@
file is eligible to be garbage collected after some period of file is eligible to be garbage collected after some period of
inactivity (the default threshold is 72h)." inactivity (the default threshold is 72h)."
(:require (:require
[app.binfile.common :as bfc]
[app.common.files.migrations :as pmg] [app.common.files.migrations :as pmg]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.thumbnails :as thc] [app.common.thumbnails :as thc]
@ -99,35 +100,6 @@
(->> (db/cursor conn [sql:get-candidates min-age] {:chunk-size 1}) (->> (db/cursor conn [sql:get-candidates min-age] {:chunk-size 1})
(map #(update % :features db/decode-pgarray #{})))))) (map #(update % :features db/decode-pgarray #{}))))))
(defn collect-used-media
"Given a fdata (file data), returns all media references."
[data]
(let [xform (comp
(map :objects)
(mapcat vals)
(mapcat (fn [obj]
;; NOTE: because of some bug, we ended with
;; many shape types having the ability to
;; have fill-image attribute (which initially
;; designed for :path shapes).
(sequence
(keep :id)
(concat [(:fill-image obj)
(:metadata obj)]
(map :fill-image (:fills obj))
(map :stroke-image (:strokes obj))
(->> (:content obj)
(tree-seq map? :children)
(mapcat :fills)
(map :fill-image)))))))
pages (concat
(vals (:pages-index data))
(vals (:components data)))]
(-> #{}
(into xform pages)
(into (keys (:media data))))))
(def ^:private sql:mark-file-media-object-deleted (def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object "UPDATE file_media_object
SET deleted_at = now() SET deleted_at = now()
@ -137,7 +109,7 @@
(defn- clean-file-media! (defn- clean-file-media!
"Performs the garbage collection of file media objects." "Performs the garbage collection of file media objects."
[conn file-id data] [conn file-id data]
(let [used (collect-used-media data) (let [used (bfc/collect-used-media data)
ids (db/create-array conn "uuid" used) ids (db/create-array conn "uuid" used)
unused (->> (db/exec! conn [sql:mark-file-media-object-deleted file-id ids]) unused (->> (db/exec! conn [sql:mark-file-media-object-deleted file-id ids])
(into #{} (map :id)))] (into #{} (map :id)))]

View file

@ -414,8 +414,8 @@
(rx/mapcat (rx/mapcat
(fn [file] (fn [file]
(->> (rp/cmd! :export-binfile {:file-id (:id file) (->> (rp/cmd! :export-binfile {:file-id (:id file)
:include-libraries? (= export-type :all) :include-libraries (= export-type :all)
:embed-assets? (= export-type :merge)}) :embed-assets (= export-type :merge)})
(rx/map #(hash-map :type :finish (rx/map #(hash-map :type :finish
:file-id (:id file) :file-id (:id file)
:filename (:name file) :filename (:name file)