Merge pull request #5665 from penpot/niwinz-instantiate-component-bug-5

🎉 Add srepl fix function for process media refs on a file
This commit is contained in:
Alejandro 2025-01-31 11:56:22 +01:00 committed by GitHub
commit d7477cd448
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 366 additions and 407 deletions

View file

@ -6,6 +6,33 @@
### :boom: Breaking changes & Deprecations ### :boom: Breaking changes & Deprecations
Although this is not a breaking change, we believe its important to highlight it in this
section:
This release includes a fix for an internal bug in Penpot that caused incorrect handling
of media assets (e.g., fill images). The issue has been resolved since version 2.4.3, so
no new incorrect references will be generated. However, existing files may still contain
incorrect references.
To address this, weve provided a script to correct these references in existing files.
While having incorrect references generally doesnt result in visible issues, there are
rare cases where it can cause problems. For example, if a component library (containing
images) is deleted, and that library is being used in other files, running the FileGC task
(responsible for freeing up space and performing logical deletions) could leave those
files with broken references to the images.
To execute script:
```bash
docker exec -ti <container-name-or-id> ./run.sh app.migrations.media-refs '{:max-jobs 1}'
```
If you have a big database and many cores available, you can reduce the time of processing
all files by increasing paralelizacion changing the `max-jobs` value from 1 to N (where N
is a number of cores)
### :heart: Community contributions (Thank you!) ### :heart: Community contributions (Thank you!)
### :sparkles: New features ### :sparkles: New features

View file

@ -25,6 +25,7 @@
[app.features.fdata :as feat.fdata] [app.features.fdata :as feat.fdata]
[app.loggers.audit :as-alias audit] [app.loggers.audit :as-alias audit]
[app.loggers.webhooks :as-alias webhooks] [app.loggers.webhooks :as-alias webhooks]
[app.storage :as sto]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.pointer-map :as pmap] [app.util.pointer-map :as pmap]
[app.util.time :as dt] [app.util.time :as dt]
@ -148,17 +149,30 @@
features (assoc :features (db/decode-pgarray features #{})) features (assoc :features (db/decode-pgarray features #{}))
data (assoc :data (blob/decode data)))) data (assoc :data (blob/decode data))))
(defn get-file (defn decode-file
[cfg file-id] "A general purpose file decoding function that resolves all external
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}] pointers, run migrations and return plain vanilla file map"
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] [cfg {:keys [id] :as file}]
(when-let [file (db/get* conn :file {:id file-id} (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
{::db/remove-deleted false})] (-> (feat.fdata/resolve-file-data cfg file)
(let [file (feat.fdata/resolve-file-data cfg file)] (update :features db/decode-pgarray #{})
(-> file (update :data blob/decode)
(decode-row)
(update :data feat.fdata/process-pointers deref) (update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))))))))) (update :data feat.fdata/process-objects (partial into {}))
(update :data assoc :id id)
(fmg/migrate-file))))
(defn get-file
"Get file, resolve all features and apply migrations.
Usefull when you have plan to apply massive or not cirurgical
operations on file, because it removes the ovehead of lazy fetching
and decoding."
[cfg file-id & {:as opts}]
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
(some->> (db/get* conn :file {:id file-id}
(assoc opts ::db/remove-deleted false))
(decode-file cfg)))))
(defn clean-file-features (defn clean-file-features
[file] [file]
@ -306,19 +320,15 @@
file)) file))
(defn get-file-media (def sql:get-file-media
[cfg {:keys [data id] :as file}] "SELECT * FROM file_media_object WHERE id = ANY(?)")
(db/run! cfg (fn [{:keys [::db/conn]}]
(let [ids (cfh/collect-used-media data)
ids (db/create-array conn "uuid" ids)
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
;; We assoc the file-id again to the file-media-object row (defn get-file-media
;; because there are cases that used objects refer to other [cfg {:keys [data] :as file}]
;; files and we need to ensure in the exportation process that (db/run! cfg (fn [{:keys [::db/conn]}]
;; all ids matches (let [used (cfh/collect-used-media data)
(->> (db/exec! conn [sql ids]) used (db/create-array conn "uuid" used)]
(mapv #(assoc % :file-id id))))))) (db/exec! conn [sql:get-file-media used])))))
(def ^:private sql:get-team-files-ids (def ^:private sql:get-team-files-ids
"SELECT f.id FROM file AS f "SELECT f.id FROM file AS f
@ -431,27 +441,69 @@
(update :colors relink-colors) (update :colors relink-colors)
(d/without-nils)))))) (d/without-nils))))))
(defn- upsert-file! (defn- encode-file
[conn file] [{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) " (let [file (if (contains? (:features file) "fdata/objects-map")
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) " (feat.fdata/enable-objects-map file)
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")] file)
(db/exec-one! conn [sql
(:id file)
(:project-id file)
(:name file)
(:revn file)
(:version file)
(:is-shared file)
(:data file)
(:created-at file)
(:modified-at file)
(:data file)
(:version file)])))
(defn persist-file! file (if (contains? (:features file) "fdata/pointer-map")
"Applies all the final validations and perist the file." (binding [pmap/*tracked* (pmap/create-tracked)]
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}] (let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)]
(-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))))
(defn- file->params
[file]
(let [params {:has-media-trimmed (:has-media-trimmed file)
:ignore-sync-until (:ignore-sync-until file)
:project-id (:project-id file)
:features (:features file)
:name (:name file)
:is-shared (:is-shared file)
:version (:version file)
:data (:data file)
:id (:id file)
:deleted-at (:deleted-at file)
:created-at (:created-at file)
:modified-at (:modified-at file)
:revn (:revn file)
:vern (:vern file)}]
(-> (d/without-nils params)
(assoc :data-backend nil)
(assoc :data-ref-id nil))))
(defn insert-file!
"Insert a new file into the database table"
[{:keys [::db/conn] :as cfg} file]
(let [params (-> (encode-file cfg file)
(file->params))]
(db/insert! conn :file params {::db/return-keys true})))
(defn update-file!
"Update an existing file on the database."
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
(let [file (encode-file cfg file)
params (-> (file->params file)
(dissoc :id))]
;; If file was already offloaded, we touch the underlying storage
;; object for properly trigger storage-gc-touched task
(when (feat.fdata/offloaded? file)
(some->> (:data-ref-id file) (sto/touch-object! storage)))
(db/update! conn :file params {:id id} {::db/return-keys true})))
(defn save-file!
"Applies all the final validations and perist the file, binfile
specific, should not be used outside of binfile domain"
[{:keys [::timestamp] :as cfg} file]
(dm/assert! (dm/assert!
"expected valid timestamp" "expected valid timestamp"
@ -468,38 +520,17 @@
(set/union features) (set/union features)
;; We never want to store ;; We never want to store
;; frontend-only features on file ;; frontend-only features on file
(set/difference cfeat/frontend-only-features)))))) (set/difference cfeat/frontend-only-features))))))]
(when (contains? cf/flags :file-schema-validation)
_ (when (contains? cf/flags :file-schema-validation)
(fval/validate-file-schema! file)) (fval/validate-file-schema! file))
_ (when (contains? cf/flags :soft-file-schema-validation) (when (contains? cf/flags :soft-file-schema-validation)
(let [result (ex/try! (fval/validate-file-schema! file))] (let [result (ex/try! (fval/validate-file-schema! file))]
(when (ex/exception? result) (when (ex/exception? result)
(l/error :hint "file schema validation error" :cause result)))) (l/error :hint "file schema validation error" :cause result))))
file (if (contains? (:features file) "fdata/objects-map") (insert-file! cfg file)))
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
params (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(if (::overwrite cfg)
(upsert-file! conn params)
(db/insert! conn :file params ::db/return-keys false))
file))
(defn register-pending-migrations (defn register-pending-migrations
"All features that are enabled and requires explicit migration are "All features that are enabled and requires explicit migration are

View file

@ -424,21 +424,15 @@
(s/def ::bfc/profile-id ::us/uuid) (s/def ::bfc/profile-id ::us/uuid)
(s/def ::bfc/project-id ::us/uuid) (s/def ::bfc/project-id ::us/uuid)
(s/def ::bfc/input io/input-stream?) (s/def ::bfc/input io/input-stream?)
(s/def ::overwrite? (s/nilable ::us/boolean))
(s/def ::ignore-index-errors? (s/nilable ::us/boolean)) (s/def ::ignore-index-errors? (s/nilable ::us/boolean))
;; FIXME: replace with schema
(s/def ::read-import-options (s/def ::read-import-options
(s/keys :req [::db/pool ::sto/storage ::bfc/project-id ::bfc/profile-id ::bfc/input] (s/keys :req [::db/pool ::sto/storage ::bfc/project-id ::bfc/profile-id ::bfc/input]
:opt [::overwrite? ::ignore-index-errors?])) :opt [::ignore-index-errors?]))
(defn read-import! (defn read-import!
"Do the importation of the specified resource in penpot custom binary "Do the importation of the specified resource in penpot custom binary
format. There are some options for customize the importation format."
behavior:
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
it reuses all ids and updates existing objects; defaults to `false`."
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}] [{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
(dm/assert! (dm/assert!
@ -509,8 +503,7 @@
thumbnails)) thumbnails))
(defmethod read-section :v1/files (defmethod read-section :v1/files
[{:keys [::db/conn ::bfc/input ::bfc/project-id ::bfc/overwrite ::bfc/name] :as system}] [{:keys [::bfc/input ::bfc/project-id ::bfc/name] :as system}]
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))] (doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
(let [file (read-obj! input) (let [file (read-obj! input)
media (read-obj! input) media (read-obj! input)
@ -568,10 +561,7 @@
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id'])) (vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true) (l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
(bfc/persist-file! system file) (bfc/save-file! system file)
(when overwrite
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id')))) file-id'))))
@ -600,7 +590,7 @@
::l/sync? true)))))) ::l/sync? true))))))
(defmethod read-section :v1/sobjects (defmethod read-section :v1/sobjects
[{:keys [::db/conn ::bfc/input ::bfc/overwrite ::bfc/timestamp] :as cfg}] [{:keys [::db/conn ::bfc/input ::bfc/timestamp] :as cfg}]
(let [storage (sto/resolve cfg) (let [storage (sto/resolve cfg)
ids (read-obj! input) ids (read-obj! input)
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))] thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
@ -653,8 +643,7 @@
(-> item (-> item
(assoc :file-id file-id) (assoc :file-id file-id)
(d/update-when :media-id bfc/lookup-index) (d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index)) (d/update-when :thumbnail-id bfc/lookup-index))))))
{::db/on-conflict-do-nothing? overwrite}))))
(doseq [item (:thumbnails @bfc/*state*)] (doseq [item (:thumbnails @bfc/*state*)]
(let [item (update item :media-id bfc/lookup-index)] (let [item (update item :media-id bfc/lookup-index)]
@ -663,8 +652,7 @@
:media-id (str (:media-id item)) :media-id (str (:media-id item))
:object-id (:object-id item) :object-id (:object-id item)
::l/sync? true) ::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail item (db/insert! conn :file-tagged-object-thumbnail item)))))
{::db/on-conflict-do-nothing? overwrite})))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HIGH LEVEL API ;; HIGH LEVEL API

View file

@ -297,7 +297,7 @@
(set/difference (:features file)))] (set/difference (:features file)))]
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)])) (vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
(bfc/persist-file! cfg file)) (bfc/save-file! cfg file))
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)] (doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
(let [thumbnail (-> thumbnail (let [thumbnail (-> thumbnail

View file

@ -28,6 +28,7 @@
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
[app.db.sql :as-alias sql]
[app.storage :as sto] [app.storage :as sto]
[app.storage.impl :as sto.impl] [app.storage.impl :as sto.impl]
[app.util.events :as events] [app.util.events :as events]
@ -212,9 +213,9 @@
(throw (IllegalArgumentException. (throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options"))) "the `include-libraries` and `embed-assets` are mutally excluding options")))
(let [detach? (and (not embed-assets) (not include-libraries)) (let [detach? (and (not embed-assets) (not include-libraries))]
file (bfc/get-file cfg file-id)] (db/tx-run! cfg (fn [cfg]
(cond-> file (cond-> (bfc/get-file cfg file-id {::sql/for-update true})
detach? detach?
(-> (ctf/detach-external-references file-id) (-> (ctf/detach-external-references file-id)
(dissoc :libraries)) (dissoc :libraries))
@ -223,7 +224,7 @@
(update :data #(bfc/embed-assets cfg % file-id)) (update :data #(bfc/embed-assets cfg % file-id))
:always :always
(bfc/clean-file-features)))) (bfc/clean-file-features))))))
(defn- resolve-extension (defn- resolve-extension
[mtype] [mtype]
@ -262,6 +263,7 @@
(defn- export-file (defn- export-file
[{:keys [::file-id ::output] :as cfg}] [{:keys [::file-id ::output] :as cfg}]
(let [file (get-file cfg file-id) (let [file (get-file cfg file-id)
media (->> (bfc/get-file-media cfg file) media (->> (bfc/get-file-media cfg file)
(map (fn [media] (map (fn [media]
(dissoc media :file-id)))) (dissoc media :file-id))))
@ -684,7 +686,7 @@
:plugin-data plugin-data})) :plugin-data plugin-data}))
(defn- import-file (defn- import-file
[{:keys [::db/conn ::bfc/project-id ::file-id ::file-name] :as cfg}] [{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
(let [file-id' (bfc/lookup-index file-id) (let [file-id' (bfc/lookup-index file-id)
file (read-file cfg) file (read-file cfg)
media (read-file-media cfg) media (read-file-media cfg)
@ -734,10 +736,7 @@
(->> file (->> file
(bfc/register-pending-migrations cfg) (bfc/register-pending-migrations cfg)
(bfc/persist-file! cfg)) (bfc/save-file! cfg))
(when (::bfc/overwrite cfg)
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id'))) file-id')))
@ -832,8 +831,7 @@
:file-id (str (:file-id params)) :file-id (str (:file-id params))
::l/sync? true) ::l/sync? true)
(db/insert! conn :file-media-object params (db/insert! conn :file-media-object params))))
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(defn- import-file-thumbnails (defn- import-file-thumbnails
[{:keys [::db/conn] :as cfg}] [{:keys [::db/conn] :as cfg}]
@ -853,8 +851,7 @@
:media-id (str media-id) :media-id (str media-id)
::l/sync? true) ::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params (db/insert! conn :file-tagged-object-thumbnail params))))
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(defn- import-files (defn- import-files
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}] [{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]

View file

@ -413,7 +413,7 @@
(def ^:private default-plan-opts (def ^:private default-plan-opts
(-> default-opts (-> default-opts
(assoc :fetch-size 1) (assoc :fetch-size 1000)
(assoc :concurrency :read-only) (assoc :concurrency :read-only)
(assoc :cursors :close) (assoc :cursors :close)
(assoc :result-type :forward-only))) (assoc :result-type :forward-only)))

View file

@ -25,7 +25,6 @@
[app.loggers.webhooks :as-alias webhooks] [app.loggers.webhooks :as-alias webhooks]
[app.metrics :as-alias mtx] [app.metrics :as-alias mtx]
[app.metrics.definition :as-alias mdef] [app.metrics.definition :as-alias mdef]
[app.migrations.v2 :as migrations.v2]
[app.msgbus :as-alias mbus] [app.msgbus :as-alias mbus]
[app.redis :as-alias rds] [app.redis :as-alias rds]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
@ -609,11 +608,6 @@
(nrepl/start-server :bind "0.0.0.0" :port 6064 :handler cider-nrepl-handler)) (nrepl/start-server :bind "0.0.0.0" :port 6064 :handler cider-nrepl-handler))
(start) (start)
(when (contains? cf/flags :v2-migration)
(px/sleep 5000)
(migrations.v2/migrate app.main/system))
(deref p)) (deref p))
(catch Throwable cause (catch Throwable cause
(ex/print-throwable cause) (ex/print-throwable cause)

View file

@ -0,0 +1,49 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.migrations.media-refs
"A media refs migration fixer script"
(:require
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.common.pprint]
[app.srepl.fixes.media-refs :refer [process-file]]
[app.srepl.main :as srepl]
[clojure.edn :as edn]))
(def ^:private required-services
[:app.storage.s3/backend
:app.storage.fs/backend
:app.storage/storage
:app.metrics/metrics
:app.db/pool
:app.worker/executor])
(defn -main
[& [options]]
(try
(let [config-var (requiring-resolve 'app.main/system-config)
start-var (requiring-resolve 'app.main/start-custom)
stop-var (requiring-resolve 'app.main/stop)
config (select-keys @config-var required-services)]
(start-var config)
(let [options (if (string? options)
(ex/ignoring (edn/read-string options))
{})]
(l/inf :hint "executing media-refs migration" :options options)
(srepl/process-files! process-file options))
(stop-var)
(System/exit 0))
(catch Throwable cause
(ex/print-throwable cause)
(flush)
(System/exit -1))))

View file

@ -1,103 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.migrations.v2
(:require
[app.common.exceptions :as ex]
[app.common.logging :as l]
[app.db :as db]
[app.features.components-v2 :as feat]
[app.setup :as setup]
[app.util.time :as dt]))
(def ^:private sql:get-teams
"SELECT id, features,
row_number() OVER (ORDER BY created_at DESC) AS rown
FROM team
WHERE deleted_at IS NULL
AND (not (features @> '{components/v2}') OR features IS NULL)
ORDER BY created_at DESC")
(defn- get-teams
[conn]
(->> (db/cursor conn [sql:get-teams] {:chunk-size 1})
(map feat/decode-row)))
(defn- migrate-teams
[{:keys [::db/conn] :as system}]
;; Allow long running transaction for this connection
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
;; Do not allow other migration running in the same time
(db/xact-lock! conn 0)
;; Run teams migration
(run! (fn [{:keys [id rown]}]
(try
(-> (assoc system ::db/rollback false)
(feat/migrate-team! id
:rown rown
:label "v2-migration"
:validate? false
:skip-on-graphics-error? true))
(catch Throwable _
(swap! feat/*stats* update :errors (fnil inc 0))
(l/wrn :hint "error on migrating team (skiping)"))))
(get-teams conn))
(setup/set-prop! system :v2-migrated true))
(defn migrate
[system]
(let [tpoint (dt/tpoint)
stats (atom {})
migrated? (setup/get-prop system :v2-migrated false)]
(when-not migrated?
(l/inf :hint "v2 migration started")
(try
(binding [feat/*stats* stats]
(db/tx-run! system migrate-teams))
(let [stats (deref stats)
elapsed (dt/format-duration (tpoint))]
(l/inf :hint "v2 migration finished"
:files (:processed-files stats)
:teams (:processed-teams stats)
:errors (:errors stats)
:elapsed elapsed))
(catch Throwable cause
(l/err :hint "error on aplying v2 migration" :cause cause))))))
(def ^:private required-services
[[:app.main/assets :app.storage.s3/backend]
[:app.main/assets :app.storage.fs/backend]
:app.storage/storage
:app.db/pool
:app.setup/props
:app.svgo/optimizer
:app.metrics/metrics
:app.migrations/migrations
:app.http.client/client])
(defn -main
[& _args]
(try
(let [config-var (requiring-resolve 'app.main/system-config)
start-var (requiring-resolve 'app.main/start-custom)
stop-var (requiring-resolve 'app.main/stop)
system-var (requiring-resolve 'app.main/system)
config (select-keys @config-var required-services)]
(start-var config)
(migrate @system-var)
(stop-var)
(System/exit 0))
(catch Throwable cause
(ex/print-throwable cause)
(flush)
(System/exit -1))))

View file

@ -119,6 +119,7 @@
(sv/defmethod ::update-file (sv/defmethod ::update-file
{::climit/id [[:update-file/by-profile ::rpc/profile-id] {::climit/id [[:update-file/by-profile ::rpc/profile-id]
[:update-file/global]] [:update-file/global]]
::webhooks/event? true ::webhooks/event? true
::webhooks/batch-timeout (dt/duration "2m") ::webhooks/batch-timeout (dt/duration "2m")
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id) ::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)

View file

@ -57,7 +57,7 @@
;; Process and persist file ;; Process and persist file
(let [file (->> (bfc/process-file file) (let [file (->> (bfc/process-file file)
(bfc/persist-file! cfg))] (bfc/save-file! cfg))]
;; The file profile creation is optional, so when no profile is ;; The file profile creation is optional, so when no profile is
;; present (when this function is called from profile less ;; present (when this function is called from profile less
@ -86,7 +86,7 @@
fmeds)] fmeds)]
(db/insert! conn :file-media-object params ::db/return-keys false)) (db/insert! conn :file-media-object params ::db/return-keys false))
file))) (bfc/decode-file cfg file))))
(def ^:private (def ^:private
schema:duplicate-file schema:duplicate-file

View file

@ -272,6 +272,7 @@
(reduce +))) (reduce +)))
num-missing-slots (count-slots-data (:data file))] num-missing-slots (count-slots-data (:data file))]
(when (pos? num-missing-slots) (when (pos? num-missing-slots)
(l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file)))) (l/trc :info (str "Shapes with children with the same swap slot: " num-missing-slots) :file-id (str (:id file))))
file)) file))

View file

@ -0,0 +1,43 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.srepl.fixes.media-refs
(:require
[app.binfile.common :as bfc]
[app.common.files.helpers :as cfh]
[app.srepl.helpers :as h]))
(defn- collect-media-refs
"Given a file data map, returns all media references used on pages of
the file data; components and other parts of the file data are not analized"
[data]
(let [xform-collect
(comp
(map val)
(mapcat (fn [object]
(->> (cfh/collect-shape-media-refs object)
(map (fn [id]
{:object-id (:id object)
:id id}))))))
process-page
(fn [result page-id container]
(let [xform (comp xform-collect (map #(assoc % :page-id page-id)))]
(into result xform (:objects container))))]
(reduce-kv process-page [] (:pages-index data))))
(defn update-all-media-references
"Check if all file media object references are in plance and create
new ones if some of them are missing; this prevents strange bugs on
having the same media object associated with two different files;"
[cfg file]
(let [media-refs (collect-media-refs (:data file))]
(bfc/update-media-references! cfg file media-refs)))
(defn process-file
[file _opts]
(let [system (h/get-current-system)]
(update-all-media-references system file)))

View file

@ -8,17 +8,14 @@
"A main namespace for server repl." "A main namespace for server repl."
(:refer-clojure :exclude [parse-uuid]) (:refer-clojure :exclude [parse-uuid])
(:require (:require
[app.binfile.common :as bfc]
[app.common.data :as d] [app.common.data :as d]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as cfv] [app.common.files.validate :as cfv]
[app.db :as db] [app.db :as db]
[app.features.components-v2 :as feat.comp-v2] [app.features.components-v2 :as feat.comp-v2]
[app.features.fdata :as feat.fdata]
[app.main :as main] [app.main :as main]
[app.rpc.commands.files :as files] [app.rpc.commands.files :as files]
[app.rpc.commands.files-snapshot :as fsnap] [app.rpc.commands.files-snapshot :as fsnap]))
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]))
(def ^:dynamic *system* nil) (def ^:dynamic *system* nil)
@ -27,6 +24,10 @@
(locking println (locking println
(apply println params))) (apply println params)))
(defn get-current-system
[]
*system*)
(defn parse-uuid (defn parse-uuid
[v] [v]
(if (string? v) (if (string? v)
@ -35,49 +36,21 @@
(defn get-file (defn get-file
"Get the migrated data of one file." "Get the migrated data of one file."
([id] (get-file (or *system* main/system) id nil)) ([id]
([system id & {:keys [raw?] :as opts}] (get-file (or *system* main/system) id))
([system id]
(db/run! system (db/run! system
(fn [system] (fn [system]
(let [file (files/get-file system id :migrate? false)] (->> (bfc/get-file system id ::db/for-update true)
(if raw? (bfc/decode-file system))))))
file
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
(-> file
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(fmg/migrate-file)))))))))
(defn update-file! (defn get-raw-file
[system {:keys [id] :as file}] "Get the migrated data of one file."
(let [conn (db/get-connection system) ([id] (get-raw-file (or *system* main/system) id))
file (if (contains? (:features file) "fdata/objects-map") ([system id]
(feat.fdata/enable-objects-map file) (db/run! system
file) (fn [system]
(files/get-file system id :migrate? false)))))
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! system id)
file))
file)
file (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
(db/update! conn :file
{:revn (:revn file)
:data (:data file)
:version (:version file)
:features (:features file)
:deleted-at (:deleted-at file)
:created-at (:created-at file)
:modified-at (:modified-at file)
:data-backend nil
:data-ref-id nil
:has-media-trimmed false}
{:id (:id file)})))
(defn update-team! (defn update-team!
[system {:keys [id] :as team}] [system {:keys [id] :as team}]
@ -90,14 +63,6 @@
{:id id}) {:id id})
team)) team))
(defn get-raw-file
"Get the migrated data of one file."
([id] (get-raw-file (or *system* main/system) id))
([system id]
(db/run! system
(fn [system]
(files/get-file system id :migrate? false)))))
(defn reset-file-data! (defn reset-file-data!
"Hardcode replace of the data of one file." "Hardcode replace of the data of one file."
[system id data] [system id data]
@ -162,16 +127,12 @@
(defn process-file! (defn process-file!
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}] [system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
(when (string? label)
(fsnap/create-file-snapshot! system nil file-id label))
(let [conn (db/get-connection system) (let [conn (db/get-connection system)
file (get-file system file-id opts) file (bfc/get-file system file-id ::db/for-update true)
libs (when with-libraries? libs (when with-libraries?
(->> (files/get-file-libraries conn file-id) (->> (files/get-file-libraries conn file-id)
(into [file] (map (fn [{:keys [id]}] (into [file] (map (fn [{:keys [id]}]
(get-file system id)))) (bfc/get-file system id))))
(d/index-by :id))) (d/index-by :id)))
file' (if with-libraries? file' (if with-libraries?
@ -180,7 +141,12 @@
(when (and (some? file') (when (and (some? file')
(not (identical? file file'))) (not (identical? file file')))
(when validate? (cfv/validate-file-schema! file')) (when validate?
(cfv/validate-file-schema! file'))
(when (string? label)
(fsnap/create-file-snapshot! system nil file-id label))
(let [file' (update file' :revn inc)] (let [file' (update file' :revn inc)]
(update-file! system file') (bfc/update-file! system file')
true)))) true))))

View file

@ -10,8 +10,8 @@
file is eligible to be garbage collected after some period of file is eligible to be garbage collected after some period of
inactivity (the default threshold is 72h)." inactivity (the default threshold is 72h)."
(:require (:require
[app.binfile.common :as bfc]
[app.common.files.helpers :as cfh] [app.common.files.helpers :as cfh]
[app.common.files.migrations :as fmg]
[app.common.files.validate :as cfv] [app.common.files.validate :as cfv]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.thumbnails :as thc] [app.common.thumbnails :as thc]
@ -22,28 +22,25 @@
[app.db :as db] [app.db :as db]
[app.features.fdata :as feat.fdata] [app.features.fdata :as feat.fdata]
[app.storage :as sto] [app.storage :as sto]
[app.util.blob :as blob]
[app.util.pointer-map :as pmap]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk] [app.worker :as wrk]
[integrant.core :as ig])) [integrant.core :as ig]))
(declare ^:private get-file) (declare get-file)
(declare ^:private decode-file)
(declare ^:private persist-file!)
(def ^:private sql:get-snapshots (def sql:get-snapshots
"SELECT f.file_id AS id, "SELECT fc.file_id AS id,
f.data, fc.id AS snapshot_id,
f.revn, fc.data,
f.version, fc.revn,
f.features, fc.version,
f.data_backend, fc.features,
f.data_ref_id fc.data_backend,
FROM file_change AS f fc.data_ref_id
WHERE f.file_id = ? FROM file_change AS fc
AND f.data IS NOT NULL WHERE fc.file_id = ?
ORDER BY f.created_at ASC") AND fc.data IS NOT NULL
ORDER BY fc.created_at ASC")
(def ^:private sql:mark-file-media-object-deleted (def ^:private sql:mark-file-media-object-deleted
"UPDATE file_media_object "UPDATE file_media_object
@ -51,7 +48,7 @@
WHERE file_id = ? AND id != ALL(?::uuid[]) WHERE file_id = ? AND id != ALL(?::uuid[])
RETURNING id") RETURNING id")
(def ^:private xf:collect-used-media (def xf:collect-used-media
(comp (comp
(map :data) (map :data)
(mapcat cfh/collect-used-media))) (mapcat cfh/collect-used-media)))
@ -60,10 +57,10 @@
"Performs the garbage collection of file media objects." "Performs the garbage collection of file media objects."
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}] [{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(let [xform (comp (let [xform (comp
(map (partial decode-file cfg)) (map (partial bfc/decode-file cfg))
xf:collect-used-media) xf:collect-used-media)
used (->> (db/plan conn [sql:get-snapshots id]) used (->> (db/plan conn [sql:get-snapshots id] {:fetch-size 1})
(transduce xform conj #{})) (transduce xform conj #{}))
used (into used xf:collect-used-media [file]) used (into used xf:collect-used-media [file])
@ -149,8 +146,6 @@
AND f.deleted_at IS null AND f.deleted_at IS null
ORDER BY f.modified_at ASC") ORDER BY f.modified_at ASC")
(def ^:private xf:map-id (map :id))
(defn- get-used-components (defn- get-used-components
"Given a file and a set of components marked for deletion, return a "Given a file and a set of components marked for deletion, return a
filtered set of component ids that are still un use" filtered set of component ids that are still un use"
@ -169,15 +164,15 @@
(mapcat (partial get-used-components deleted-components file-id)) (mapcat (partial get-used-components deleted-components file-id))
used-remote used-remote
(->> (db/plan conn [sql:get-files-for-library file-id]) (->> (db/plan conn [sql:get-files-for-library file-id] {:fetch-size 1})
(transduce (comp (map (partial decode-file cfg)) xform) conj #{})) (transduce (comp (map (partial bfc/decode-file cfg)) xform) conj #{}))
used-local used-local
(into #{} xform [file]) (into #{} xform [file])
unused unused
(transduce xf:map-id disj (transduce bfc/xf-map-id disj
(into #{} xf:map-id deleted-components) (into #{} bfc/xf-map-id deleted-components)
(concat used-remote used-local)) (concat used-remote used-local))
file file
@ -204,31 +199,32 @@
(def ^:private xf:collect-pointers (def ^:private xf:collect-pointers
(comp (map :data) (comp (map :data)
(map blob/decode)
(mapcat feat.fdata/get-used-pointer-ids))) (mapcat feat.fdata/get-used-pointer-ids)))
(defn- clean-data-fragments! (defn- clean-fragments!
[{:keys [::db/conn]} {:keys [id] :as file}] [{:keys [::db/conn]} {:keys [id] :as file}]
(let [used (into #{} xf:collect-pointers [file]) (let [used (into #{} xf:collect-pointers [file])
unused (let [ids (db/create-array conn "uuid" used)] unused (->> (db/exec! conn [sql:mark-deleted-data-fragments id
(->> (db/exec! conn [sql:mark-deleted-data-fragments id ids]) (db/create-array conn "uuid" used)])
(into #{} (map :id))))] (into #{} bfc/xf-map-id))]
(l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused)) (l/dbg :hint "clean" :rel "file-data-fragment" :file-id (str id) :total (count unused))
(doseq [id unused] (doseq [id unused]
(l/trc :hint "mark deleted" (l/trc :hint "mark deleted"
:rel "file-data-fragment" :rel "file-data-fragment"
:id (str id) :id (str id)
:file-id (str id))))) :file-id (str id)))
file))
(defn- clean-media! (defn- clean-media!
[cfg file] [cfg file]
(let [file (->> file (let [file (->> file
(clean-deleted-components! cfg)
(clean-file-media! cfg) (clean-file-media! cfg)
(clean-file-thumbnails! cfg) (clean-file-thumbnails! cfg)
(clean-file-object-thumbnails! cfg) (clean-file-object-thumbnails! cfg))]
(clean-deleted-components! cfg))]
(cfv/validate-file-schema! file) (cfv/validate-file-schema! file)
file)) file))
@ -249,65 +245,27 @@
FOR UPDATE FOR UPDATE
SKIP LOCKED") SKIP LOCKED")
(defn- get-file (defn get-file
[{:keys [::db/conn ::min-age ::file-id]}] [{:keys [::db/conn ::min-age]} file-id]
(let [min-age (if min-age
(db/interval min-age)
(db/interval 0))]
(->> (db/exec! conn [sql:get-file min-age file-id]) (->> (db/exec! conn [sql:get-file min-age file-id])
(first))) (first))))
(defn- decode-file
[cfg {:keys [id] :as file}]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
(-> (feat.fdata/resolve-file-data cfg file)
(update :features db/decode-pgarray #{})
(update :data blob/decode)
(update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {}))
(update :data assoc :id id)
(fmg/migrate-file))))
(defn- persist-file!
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file}]
(let [file (if (contains? (:features file) "fdata/objects-map")
(feat.fdata/enable-objects-map file)
file)
file (if (contains? (:features file) "fdata/pointer-map")
(binding [pmap/*tracked* (pmap/create-tracked)]
(let [file (feat.fdata/enable-pointer-map file)]
(feat.fdata/persist-pointers! cfg id)
file))
file)
file (-> file
(update :features db/encode-pgarray conn "text")
(update :data blob/encode))]
;; If file was already offloaded, we touch the underlying storage
;; object for properly trigger storage-gc-touched task
(when (feat.fdata/offloaded? file)
(some->> (:data-ref-id file) (sto/touch-object! storage)))
(db/update! conn :file
{:has-media-trimmed true
:features (:features file)
:version (:version file)
:data (:data file)
:data-backend nil
:data-ref-id nil}
{:id id}
{::db/return-keys true})))
(defn- process-file! (defn- process-file!
[cfg] [cfg file-id]
(if-let [file (get-file cfg)] (if-let [file (get-file cfg file-id)]
(let [file (decode-file cfg file) (let [file (->> file
file (clean-media! cfg file) (bfc/decode-file cfg)
file (persist-file! cfg file)] (clean-media! cfg)
(clean-data-fragments! cfg file) (clean-fragments! cfg))
file (assoc file :has-media-trimmed true)]
(bfc/update-file! cfg file)
true) true)
(do (do
(l/dbg :hint "skip" :file-id (str (::file-id cfg))) (l/dbg :hint "skip" :file-id (str file-id))
false))) false)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -324,15 +282,15 @@
(fn [{:keys [props] :as task}] (fn [{:keys [props] :as task}]
(let [min-age (dt/duration (or (:min-age props) (let [min-age (dt/duration (or (:min-age props)
(cf/get-deletion-delay))) (cf/get-deletion-delay)))
file-id (get props :file-id)
cfg (-> cfg cfg (-> cfg
(assoc ::db/rollback (:rollback? props)) (assoc ::db/rollback (:rollback? props))
(assoc ::file-id (:file-id props)) (assoc ::min-age min-age))]
(assoc ::min-age (db/interval min-age)))]
(try (try
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}] (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(let [cfg (update cfg ::sto/storage sto/configure conn) (let [cfg (update cfg ::sto/storage sto/configure conn)
processed? (process-file! cfg)] processed? (process-file! cfg file-id)]
(when (and processed? (contains? cf/flags :tiered-file-data-storage)) (when (and processed? (contains? cf/flags :tiered-file-data-storage))
(wrk/submit! (-> cfg (wrk/submit! (-> cfg
(assoc ::wrk/task :offload-file-data) (assoc ::wrk/task :offload-file-data)

View file

@ -16,6 +16,7 @@
[app.db.sql :as sql] [app.db.sql :as sql]
[app.http :as http] [app.http :as http]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.commands.files :as files]
[app.storage :as sto] [app.storage :as sto]
[app.util.time :as dt] [app.util.time :as dt]
[backend-tests.helpers :as th] [backend-tests.helpers :as th]
@ -1827,5 +1828,3 @@
(t/is (= (:id file-2) (:file-id (get rows 0)))) (t/is (= (:id file-2) (:file-id (get rows 0))))
(t/is (nil? (:deleted-at (get rows 0))))))) (t/is (nil? (:deleted-at (get rows 0)))))))

View file

@ -6,6 +6,7 @@
(ns backend-tests.rpc-management-test (ns backend-tests.rpc-management-test
(:require (:require
[app.binfile.common :as bfc]
[app.common.features :as cfeat] [app.common.features :as cfeat]
[app.common.pprint :as pp] [app.common.pprint :as pp]
[app.common.types.shape :as cts] [app.common.types.shape :as cts]
@ -82,7 +83,6 @@
;; Check that result is correct ;; Check that result is correct
(t/is (nil? (:error out))) (t/is (nil? (:error out)))
(let [result (:result out)] (let [result (:result out)]
;; Check that the returned result is a file but has different id ;; Check that the returned result is a file but has different id
;; and different name. ;; and different name.
(t/is (= "file 1 (copy)" (:name result))) (t/is (= "file 1 (copy)" (:name result)))

View file

@ -571,7 +571,8 @@
:code :schema-validation :code :schema-validation
:hint (str/ffmt "invalid file data structure found on file '%'" id) :hint (str/ffmt "invalid file data structure found on file '%'" id)
:file-id id :file-id id
::sm/explain (get-fdata-explain data)))) ::sm/explain (get-fdata-explain data)))
file)
(defn validate-file! (defn validate-file!
"Validate full referential integrity and semantic coherence on file data. "Validate full referential integrity and semantic coherence on file data.

View file

@ -202,7 +202,8 @@
position position
components-v2 components-v2
(cond-> {} (cond-> {}
force-frame? (assoc :force-frame-id frame-id))) force-frame?
(assoc :force-frame-id frame-id)))
first-shape first-shape
(cond-> (first new-shapes) (cond-> (first new-shapes)

View file

@ -347,11 +347,14 @@
Clone the shapes of the component, generating new names and ids, and Clone the shapes of the component, generating new names and ids, and
linking each new shape to the corresponding one of the linking each new shape to the corresponding one of the
component. Place the new instance coordinates in the given component. Place the new instance coordinates in the given
position." position.
([container component library-data position components-v2]
(make-component-instance container component library-data position components-v2 {}))
([container component library-data position components-v2 WARNING: This process does not remap media references (on fills, strokes, ...); that is
delegated to an async process on the backend side that checks unreferenced shapes and
automatically creates correct references."
([page component library-data position components-v2]
(make-component-instance page component library-data position components-v2 {}))
([page component library-data position components-v2
{:keys [main-instance? force-id force-frame-id keep-ids?] {:keys [main-instance? force-id force-frame-id keep-ids?]
:or {main-instance? false force-id nil force-frame-id nil keep-ids? false}}] :or {main-instance? false force-id nil force-frame-id nil keep-ids? false}}]
(let [component-page (when components-v2 (let [component-page (when components-v2
@ -367,7 +370,7 @@
orig-pos (gpt/point (:x component-shape) (:y component-shape)) orig-pos (gpt/point (:x component-shape) (:y component-shape))
delta (gpt/subtract position orig-pos) delta (gpt/subtract position orig-pos)
objects (:objects container) objects (:objects page)
unames (volatile! (cfh/get-used-names objects)) unames (volatile! (cfh/get-used-names objects))
component-children component-children
@ -384,7 +387,7 @@
(nil? (get component-children (:id %))) (nil? (get component-children (:id %)))
;; We must avoid that destiny frame is inside a copy ;; We must avoid that destiny frame is inside a copy
(not (ctk/in-component-copy? %)))})) (not (ctk/in-component-copy? %)))}))
frame (get-shape container frame-id) frame (get-shape page frame-id)
component-frame (get-component-shape objects frame {:allow-main? true}) component-frame (get-component-shape objects frame {:allow-main? true})
ids-map (volatile! {}) ids-map (volatile! {})
@ -437,7 +440,7 @@
:force-id force-id :force-id force-id
:keep-ids? keep-ids? :keep-ids? keep-ids?
:frame-id frame-id :frame-id frame-id
:dest-objects (:objects container)) :dest-objects (:objects page))
;; Fix empty parent-id and remap all grid cells to the new ids. ;; Fix empty parent-id and remap all grid cells to the new ids.
remap-ids remap-ids

View file

@ -233,23 +233,26 @@ This will fetch the latest images. When you do <code class="language-bash">docke
**Important: Upgrade from version 1.x to 2.0** **Important: Upgrade from version 1.x to 2.0**
The migration to version 2.0, due to the incorporation of the new v2 The migration to version 2.0, due to the incorporation of the new v2 components, includes
components, includes an additional process that runs automatically as an additional process that runs automatically as soon as the application starts. If your
soon as the application starts. If your on-premises Penpot instance on-premises Penpot instance contains a significant amount of data (such as hundreds of
contains a significant amount of data (such as hundreds of penpot penpot files, especially those utilizing SVG components and assets extensively), this
files, especially those utilizing SVG components and assets process may take a few minutes.
extensively), this process may take a few minutes.
In some cases, such as when the script encounters an error, it may be In some cases, such as when the script encounters an error, it may be convenient to run
convenient to run the process manually. To do this, you can disable the process manually. To do this, you can disable the automatic migration process using
the automatic migration process using the <code class="language-bash">disable-v2-migration</code> flag the <code class="language-bash">disable-v2-migration</code> flag in <code
in <code class="language-bash">PENPOT_FLAGS</code> environment variable. You can then execute the class="language-bash">PENPOT_FLAGS</code> environment variable. You can then execute the
migration process manually with the following command: migration process manually with the following command:
```bash ```bash
docker exec -ti <container-name-or-id> ./run.sh app.migrations.v2 docker exec -ti <container-name-or-id> ./run.sh app.migrations.v2
``` ```
**IMPORTANT:** this script should be executed on passing from 1.19.x to 2.0.x. Executing
it on versions greater or equal to 2.1 of penpot will not work correctly. It is known that
this script is removed since 2.4.3
### Backup Penpot ### Backup Penpot