Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Andrés Moya 2024-11-07 13:49:11 +01:00
commit e9c32841a9
1048 changed files with 33672 additions and 54976 deletions

View file

@ -1,6 +1,124 @@
version: 2 version: 2.1
jobs: jobs:
build: test-common:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "common/deps.edn"}}
- run:
name: "fmt check & linter"
working_directory: "./common"
command: |
yarn install
yarn run fmt:clj:check
yarn run lint:clj
- run:
name: "JVM tests"
working_directory: "./common"
command: |
clojure -M:dev:test
- save_cache:
paths:
- ~/.m2
key: v1-dependencies-{{ checksum "common/deps.edn"}}
test-frontend:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
- run:
name: "prepopulate linter cache"
working_directory: "./common"
command: |
yarn install
yarn run lint:clj
- run:
name: "fmt check & linter"
working_directory: "./frontend"
command: |
yarn install
yarn run fmt:clj:check
yarn run fmt:js:check
yarn run lint:scss
yarn run lint:clj
- run:
name: "unit tests"
working_directory: "./frontend"
command: |
yarn install
yarn run test
- save_cache:
paths:
- ~/.m2
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}
test-integration:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: large
environment:
JAVA_OPTS: -Xmx6g -Xms2g
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
# Download and cache dependencies
- restore_cache:
keys:
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
- run:
name: "integration tests"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
yarn run build:app
yarn run build:app:libs
yarn run playwright install --with-deps chromium
yarn run test:e2e -x --workers=4
test-backend:
docker: docker:
- image: penpotapp/devenv:latest - image: penpotapp/devenv:latest
- image: cimg/postgres:14.5 - image: cimg/postgres:14.5
@ -20,101 +138,27 @@ jobs:
steps: steps:
- checkout - checkout
# Download and cache dependencies
- restore_cache: - restore_cache:
keys: keys:
- v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}} - v1-dependencies-{{ checksum "backend/deps.edn" }}
# fallback to using the latest cache if no exact match is found
- v1-dependencies-
- run: cd .clj-kondo && cat config.edn
- run: cat .cljfmt.edn
- run: clj-kondo --version
- run: - run:
name: "backend fmt check" name: "prepopulate linter cache"
working_directory: "./common"
command: |
yarn install
yarn run lint:clj
- run:
name: "fmt check & linter"
working_directory: "./backend" working_directory: "./backend"
command: | command: |
yarn install yarn install
yarn run fmt:clj:check yarn run fmt:clj:check
- run:
name: "exporter fmt check"
working_directory: "./exporter"
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "common fmt check"
working_directory: "./common"
command: |
yarn install
yarn run fmt:clj:check
- run:
name: "frontend fmt check"
working_directory: "./frontend"
command: |
yarn install
yarn run fmt:clj:check
yarn run fmt:js:check
- run:
name: "common linter check"
working_directory: "./common"
command: |
yarn install
yarn run lint:clj yarn run lint:clj
- run: - run:
name: "frontend linter check" name: "tests"
working_directory: "./frontend"
command: |
yarn install
yarn run lint:scss
yarn run lint:clj
- run:
name: "backend linter check"
working_directory: "./backend"
command: |
yarn install
yarn run lint:clj
- run:
name: "exporter linter check"
working_directory: "./exporter"
command: |
yarn install
yarn run lint:clj
- run:
name: "common tests"
working_directory: "./common"
command: |
yarn test
clojure -M:dev:test
- run:
name: "frontend tests"
working_directory: "./frontend"
command: |
yarn install
yarn test
- run:
name: "frontend integration tests"
working_directory: "./frontend"
command: |
yarn install
yarn run build:app:assets
clojure -M:dev:shadow-cljs release main
yarn playwright install --with-deps chromium
yarn test:e2e
- run:
name: "backend tests"
working_directory: "./backend" working_directory: "./backend"
command: | command: |
clojure -M:dev:test clojure -M:dev:test
@ -128,4 +172,43 @@ jobs:
- save_cache: - save_cache:
paths: paths:
- ~/.m2 - ~/.m2
key: v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}} key: v1-dependencies-{{ checksum "backend/deps.edn" }}
test-exporter:
docker:
- image: penpotapp/devenv:latest
working_directory: ~/repo
resource_class: medium+
environment:
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
NODE_OPTIONS: --max-old-space-size=4096
steps:
- checkout
- run:
name: "prepopulate linter cache"
working_directory: "./common"
command: |
yarn install
yarn run lint:clj
- run:
name: "fmt check & linter"
working_directory: "./exporter"
command: |
yarn install
yarn run fmt:clj:check
yarn run lint:clj
workflows:
penpot:
jobs:
- test-frontend
- test-integration
- test-backend
- test-common
- test-exporter

2
.gitignore vendored
View file

@ -74,3 +74,5 @@ node_modules
/playwright-report/ /playwright-report/
/blob-report/ /blob-report/
/playwright/.cache/ /playwright/.cache/
/frontend/vendor/draft-js/.yarn/
/frontend/vendor/hljs/.yarn

View file

@ -6,14 +6,30 @@
### :boom: Breaking changes & Deprecations ### :boom: Breaking changes & Deprecations
- Use [nginx-unprivileged](https://hub.docker.com/r/nginxinc/nginx-unprivileged) as base image for Penpot's frontend docker image. Now all the docker images runs with the same unprivileged user (penpot). Because of that, the default NGINX listen port now is 8080, instead of 80, so you will have to modify your infrastructure to apply this change. - Use [nginx-unprivileged](https://hub.docker.com/r/nginxinc/nginx-unprivileged) as base image for
Penpot's frontend docker image. Now all the docker images runs with the same unprivileged user
(penpot). Because of that, the default NGINX listen port now is 8080, instead of 80, so you will
have to modify your infrastructure to apply this change.
### :heart: Community contributions (Thank you!) ### :heart: Community contributions (Thank you!)
### :sparkles: New features ### :sparkles: New features
- Viewer role for team members [Taiga #1056 & #6590](https://tree.taiga.io/project/penpot/us/1056 & https://tree.taiga.io/project/penpot/us/6590)
- File history versions management [Taiga](https://tree.taiga.io/project/penpot/us/187?milestone=411120)
- Rename selected layer via keyboard shortcut and context menu option [Taiga #8882](https://tree.taiga.io/project/penpot/us/8882)
### :bug: Bugs fixed ### :bug: Bugs fixed
## 2.3.1
### :bug: Bugs fixed
- Fix unexpected issue on interaction between plugins sandbox and
internal impl of promise
## 2.3.0 ## 2.3.0
### :rocket: Epics and highlights ### :rocket: Epics and highlights
@ -60,6 +76,9 @@
- Fix percent calculation on grid layout tracks [Github #4688](https://github.com/penpot/penpot/issues/4688) - Fix percent calculation on grid layout tracks [Github #4688](https://github.com/penpot/penpot/issues/4688)
- Fix problem with caps and inner shadows [Github #4517](https://github.com/penpot/penpot/issues/4517) - Fix problem with caps and inner shadows [Github #4517](https://github.com/penpot/penpot/issues/4517)
- Fix problem with horizontal/vertical lines and shadows [Github #4516](https://github.com/penpot/penpot/issues/4516) - Fix problem with horizontal/vertical lines and shadows [Github #4516](https://github.com/penpot/penpot/issues/4516)
- Fix problem with layers overflowing panel [Taiga #9021](https://tree.taiga.io/project/penpot/issue/9021)
- Fix in workspace you can manage rulers on view mode [Taiga #8966](https://tree.taiga.io/project/penpot/issue/8966)
- Fix problem with swap components in grid layout [Taiga #9066](https://tree.taiga.io/project/penpot/issue/9066)
## 2.2.1 ## 2.2.1
@ -159,6 +178,7 @@ time being.
- Fix problem with comments max length [Taiga #8778](https://tree.taiga.io/project/penpot/issue/8778) - Fix problem with comments max length [Taiga #8778](https://tree.taiga.io/project/penpot/issue/8778)
- Fix copy/paste images in Safari [Taiga #8771](https://tree.taiga.io/project/penpot/issue/8771) - Fix copy/paste images in Safari [Taiga #8771](https://tree.taiga.io/project/penpot/issue/8771)
- Fix swap when the copy is the only child of a group [#5075](https://github.com/penpot/penpot/issues/5075) - Fix swap when the copy is the only child of a group [#5075](https://github.com/penpot/penpot/issues/5075)
- Fix file builder hangs when exporting [#5099](https://github.com/penpot/penpot/issues/5099)
## 2.1.5 ## 2.1.5

View file

@ -8,10 +8,12 @@
<img alt="penpot header image" src="https://penpot.app/images/readme/github-light-mode.png"> <img alt="penpot header image" src="https://penpot.app/images/readme/github-light-mode.png">
</picture> </picture>
<p align="center"><a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img src="https://camo.githubusercontent.com/3fcf3d6b678ea15fde3cf7d6af0e242160366282d62a7c182d83a50bfee3f45e/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f4d504c2d322e302d626c75652e737667" alt="License: MPL-2.0" data-canonical-src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a> <p align="center">
<a href="https://gitter.im/penpot/community" rel="nofollow"><img src="https://camo.githubusercontent.com/5b0aecb33434f82a7b158eab7247544235ada0cf7eeb9ce8e52562dd67f614b7/68747470733a2f2f6261646765732e6769747465722e696d2f736572656e6f2d78797a2f636f6d6d756e6974792e737667" alt="Gitter" data-canonical-src="https://badges.gitter.im/sereno-xyz/community.svg" style="max-width:100%;"></a> <a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img alt="License: MPL-2.0" src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img src="https://camo.githubusercontent.com/4a1d1112f0272e3393b1e8da312ff4435418e9e2eb4c0964881e3680f90a653c/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6d616e61676564253230776974682d54414947412e696f2d3730396631342e737667" alt="Managed with Taiga.io" data-canonical-src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a> <a href="https://community.penpot.app" rel="nofollow"><img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app" style="max-width:100%;"></a>
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img src="https://camo.githubusercontent.com/daadb4894128d1e19b72d80236f5959f1f2b47f9fe081373f3246131f0189f6c/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f476974706f642d72656164792d2d746f2d2d636f64652d626c75653f6c6f676f3d676974706f64" alt="Gitpod ready-to-code" data-canonical-src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a></p> <a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a>
</p>
<p align="center"> <p align="center">
<a href="https://penpot.app/"><b>Website</b></a> <a href="https://penpot.app/"><b>Website</b></a>
@ -58,6 +60,9 @@ Penpots latest [huge release 2.0](https://penpot.app/dev-diaries), takes the
Penpot expresses designs as code. Designers can do their best work and see it will be beautifully implemented by developers in a two-way collaboration. Penpot expresses designs as code. Designers can do their best work and see it will be beautifully implemented by developers in a two-way collaboration.
### Plugin system ###
[Penpot plugins](https://penpot.app/penpothub/plugins) let you expand the platform's capabilities, give you the flexibility to integrate it with other apps, and design custom solutions.
### Designed for developers ### ### Designed for developers ###
Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo". Penpot was built to serve both designers and developers and create a fluid design-code process. You have the choice to enjoy real-time collaboration or play "solo".

View file

@ -3,10 +3,10 @@
:deps :deps
{penpot/common {:local/root "../common"} {penpot/common {:local/root "../common"}
org.clojure/clojure {:mvn/version "1.12.0-alpha12"} org.clojure/clojure {:mvn/version "1.12.0"}
org.clojure/tools.namespace {:mvn/version "1.5.0"} org.clojure/tools.namespace {:mvn/version "1.5.0"}
com.github.luben/zstd-jni {:mvn/version "1.5.6-3"} com.github.luben/zstd-jni {:mvn/version "1.5.6-6"}
io.prometheus/simpleclient {:mvn/version "0.16.0"} io.prometheus/simpleclient {:mvn/version "0.16.0"}
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"} io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
@ -17,33 +17,33 @@
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"} io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
io.lettuce/lettuce-core {:mvn/version "6.3.2.RELEASE"} io.lettuce/lettuce-core {:mvn/version "6.4.0.RELEASE"}
java-http-clj/java-http-clj {:mvn/version "0.4.3"} java-http-clj/java-http-clj {:mvn/version "0.4.3"}
funcool/yetti funcool/yetti
{:git/tag "v10.0" {:git/tag "v11.4"
:git/sha "520613f" :git/sha "ce50d42"
:git/url "https://github.com/funcool/yetti.git" :git/url "https://github.com/funcool/yetti.git"
:exclusions [org.slf4j/slf4j-api]} :exclusions [org.slf4j/slf4j-api]}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.939"} com.github.seancorfield/next.jdbc {:mvn/version "1.3.955"}
metosin/reitit-core {:mvn/version "0.7.0"} metosin/reitit-core {:mvn/version "0.7.2"}
nrepl/nrepl {:mvn/version "1.1.2"} nrepl/nrepl {:mvn/version "1.3.0"}
cider/cider-nrepl {:mvn/version "0.48.0"} cider/cider-nrepl {:mvn/version "0.50.2"}
org.postgresql/postgresql {:mvn/version "42.7.3"} org.postgresql/postgresql {:mvn/version "42.7.4"}
org.xerial/sqlite-jdbc {:mvn/version "3.46.0.0"} org.xerial/sqlite-jdbc {:mvn/version "3.46.1.3"}
com.zaxxer/HikariCP {:mvn/version "5.1.0"} com.zaxxer/HikariCP {:mvn/version "6.0.0"}
io.whitfin/siphash {:mvn/version "2.0.0"} io.whitfin/siphash {:mvn/version "2.0.0"}
buddy/buddy-hashers {:mvn/version "2.0.167"} buddy/buddy-hashers {:mvn/version "2.0.167"}
buddy/buddy-sign {:mvn/version "3.5.351"} buddy/buddy-sign {:mvn/version "3.6.1-359"}
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"} com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"}
org.jsoup/jsoup {:mvn/version "1.17.2"} org.jsoup/jsoup {:mvn/version "1.18.1"}
org.im4java/im4java org.im4java/im4java
{:git/tag "1.4.0-penpot-2" {:git/tag "1.4.0-penpot-2"
:git/sha "e2b3e16" :git/sha "e2b3e16"
@ -58,7 +58,7 @@
;; Pretty Print specs ;; Pretty Print specs
pretty-spec/pretty-spec {:mvn/version "0.1.4"} pretty-spec/pretty-spec {:mvn/version "0.1.4"}
software.amazon.awssdk/s3 {:mvn/version "2.25.63"} software.amazon.awssdk/s3 {:mvn/version "2.28.26"}
} }
:paths ["src" "resources" "target/classes"] :paths ["src" "resources" "target/classes"]
@ -74,7 +74,7 @@
:build :build
{:extra-deps {:extra-deps
{io.github.clojure/tools.build {:git/tag "v0.10.3" :git/sha "15ead66"}} {io.github.clojure/tools.build {:git/tag "v0.10.5" :git/sha "2a21b7a"}}
:ns-default build} :ns-default build}
:test :test

View file

@ -1,15 +1,15 @@
[{:id "wireframing-kit" [{:id "wireframing-kit"
:name "Wireframe library" :name "Wireframe library"
:file-uri "https://github.com/penpot/penpot-files/raw/main/wireframing-kit.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
{:id "prototype-examples" {:id "prototype-examples"
:name "Prototype template" :name "Prototype template"
:file-uri "https://github.com/penpot/penpot-files/raw/main/prototype-examples.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Prototype%20examples%20v1.1.penpot"}
{:id "plants-app" {:id "plants-app"
:name "UI mockup example" :name "UI mockup example"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
{:id "penpot-design-system" {:id "penpot-design-system"
:name "Design system example" :name "Design system example"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Penpot-Design-system.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Penpot%20-%20Design%20System%20v2.1.penpot"}
{:id "tutorial-for-beginners" {:id "tutorial-for-beginners"
:name "Tutorial for beginners" :name "Tutorial for beginners"
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
@ -36,7 +36,7 @@
:file-uri "https://github.com/penpot/penpot-files/raw/main/Open%20Color%20Scheme%20(v1.9.1).penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/main/Open%20Color%20Scheme%20(v1.9.1).penpot"}
{:id "flex-layout-playground" {:id "flex-layout-playground"
:name "Flex Layout Playground" :name "Flex Layout Playground"
:file-uri "https://github.com/penpot/penpot-files/raw/main/Flex%20Layout%20Playground.penpot"} :file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Flex%20Layout%20Playground%20v2.0.penpot"}
{:id "welcome" {:id "welcome"
:name "Welcome" :name "Welcome"
:file-uri "https://github.com/penpot/penpot-files/raw/main/welcome.penpot"}] :file-uri "https://github.com/penpot/penpot-files/raw/main/welcome.penpot"}]

View file

@ -7,7 +7,7 @@ Debug Main Page
{% block content %} {% block content %}
<nav> <nav>
<div class="title"> <div class="title">
<h1>ADMIN DEBUG INTERFACE</h1> <h1>ADMIN DEBUG INTERFACE (VERSION: {{version}})</h1>
</div> </div>
</nav> </nav>
<main class="dashboard"> <main class="dashboard">

View file

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Configuration status="info" monitorInterval="30"> <Configuration status="fatal" monitorInterval="30">
<Appenders> <Appenders>
<Console name="console" target="SYSTEM_OUT"> <Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n" <PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View file

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Configuration status="info" monitorInterval="30"> <Configuration status="fatal" monitorInterval="30">
<Appenders> <Appenders>
<Console name="console" target="SYSTEM_OUT"> <Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n" <PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View file

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Configuration status="info" monitorInterval="30"> <Configuration status="fatal" monitorInterval="30">
<Appenders> <Appenders>
<Console name="console" target="SYSTEM_OUT"> <Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n" <PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View file

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Configuration status="info" monitorInterval="60"> <Configuration status="fatal" monitorInterval="60">
<Appenders> <Appenders>
<Console name="console" target="SYSTEM_OUT"> <Console name="console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n" <PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"

View file

@ -7,6 +7,8 @@ set -ex
rm -rf target; rm -rf target;
mkdir -p target/classes; mkdir -p target/classes;
mkdir -p target/dist; mkdir -p target/dist;
mkdir -p target/dist/scripts;
echo "$CURRENT_VERSION" > target/classes/version.txt; echo "$CURRENT_VERSION" > target/classes/version.txt;
cp ../CHANGES.md target/classes/changelog.md; cp ../CHANGES.md target/classes/changelog.md;
@ -15,6 +17,7 @@ mv target/penpot.jar target/dist/penpot.jar
cp resources/log4j2.xml target/dist/log4j2.xml cp resources/log4j2.xml target/dist/log4j2.xml
cp scripts/run.template.sh target/dist/run.sh; cp scripts/run.template.sh target/dist/run.sh;
cp scripts/manage.py target/dist/manage.py cp scripts/manage.py target/dist/manage.py
cp scripts/svgo-cli.js target/dist/scripts/;
chmod +x target/dist/run.sh; chmod +x target/dist/run.sh;
chmod +x target/dist/manage.py chmod +x target/dist/manage.py

View file

@ -68,6 +68,7 @@ export AWS_SECRET_ACCESS_KEY=penpot-devenv
export PENPOT_OBJECTS_STORAGE_BACKEND=s3 export PENPOT_OBJECTS_STORAGE_BACKEND=s3
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000 export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
export OPTIONS=" export OPTIONS="
-A:jmx-remote -A:dev \ -A:jmx-remote -A:dev \

214
backend/scripts/svgo-cli.js Normal file

File diff suppressed because one or more lines are too long

View file

@ -35,8 +35,8 @@
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; HELPERS ;; HELPERS
@ -492,8 +492,8 @@
(defn- redirect-response (defn- redirect-response
[uri] [uri]
{::rres/status 302 {::yres/status 302
::rres/headers {"location" (str uri)}}) ::yres/headers {"location" (str uri)}})
(defn- redirect-with-error (defn- redirect-with-error
([error] (redirect-with-error error nil)) ([error] (redirect-with-error error nil))
@ -598,7 +598,7 @@
(defn- get-external-session-id (defn- get-external-session-id
[request] [request]
(let [session-id (rreq/get-header request "x-external-session-id")] (let [session-id (yreq/get-header request "x-external-session-id")]
(when (string? session-id) (when (string? session-id)
(if (or (> (count session-id) 256) (if (or (> (count session-id) 256)
(= session-id "null") (= session-id "null")
@ -618,8 +618,8 @@
state (tokens/generate (::setup/props cfg) state (tokens/generate (::setup/props cfg)
(d/without-nils params)) (d/without-nils params))
uri (build-auth-uri cfg state)] uri (build-auth-uri cfg state)]
{::rres/status 200 {::yres/status 200
::rres/body {:redirect-uri uri}})) ::yres/body {:redirect-uri uri}}))
(defn- callback-handler (defn- callback-handler
[{:keys [::provider] :as cfg} request] [{:keys [::provider] :as cfg} request]

View file

@ -37,6 +37,21 @@
(def ^:dynamic *state* nil) (def ^:dynamic *state* nil)
(def ^:dynamic *options* nil) (def ^:dynamic *options* nil)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;; A maximum (storage) object size allowed: 100MiB
(def ^:const max-object-size
(* 1024 1024 100))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def xf-map-id (def xf-map-id
(map :id)) (map :id))
@ -56,6 +71,13 @@
(def conj-vec (def conj-vec
(fnil conj [])) (fnil conj []))
(defn initial-state
[]
{:storage-objects #{}
:files #{}
:teams #{}
:projects #{}})
(defn collect-storage-objects (defn collect-storage-objects
[state items] [state items]
(update state :storage-objects into xf-map-media-id items)) (update state :storage-objects into xf-map-media-id items))
@ -87,6 +109,8 @@
attrs)) attrs))
(defn update-index (defn update-index
([coll]
(update-index {} coll identity))
([index coll] ([index coll]
(update-index index coll identity)) (update-index index coll identity))
([index coll attr] ([index coll attr]
@ -114,6 +138,16 @@
[cfg project-id] [cfg project-id]
(db/get cfg :project {:id project-id})) (db/get cfg :project {:id project-id}))
(def ^:private sql:get-teams
"SELECT t.* FROM team WHERE id = ANY(?)")
(defn get-teams
[cfg ids]
(let [conn (db/get-connection cfg)
ids (db/create-array conn "uuid" ids)]
(->> (db/exec! conn [sql:get-teams ids])
(map decode-row))))
(defn get-team (defn get-team
[cfg team-id] [cfg team-id]
(-> (db/get cfg :team {:id team-id}) (-> (db/get cfg :team {:id team-id})
@ -167,9 +201,10 @@
(defn get-file-object-thumbnails (defn get-file-object-thumbnails
"Return all file object thumbnails for a given file." "Return all file object thumbnails for a given file."
[cfg file-id] [cfg file-id]
(db/query cfg :file-tagged-object-thumbnail (->> (db/query cfg :file-tagged-object-thumbnail
{:file-id file-id {:file-id file-id
:deleted-at nil})) :deleted-at nil})
(not-empty)))
(defn get-file-thumbnail (defn get-file-thumbnail
"Return the thumbnail for the specified file-id" "Return the thumbnail for the specified file-id"
@ -224,26 +259,26 @@
(->> (db/exec! conn [sql ids]) (->> (db/exec! conn [sql ids])
(mapv #(assoc % :file-id id))))))) (mapv #(assoc % :file-id id)))))))
(def ^:private sql:get-team-files (def ^:private sql:get-team-files-ids
"SELECT f.id FROM file AS f "SELECT f.id FROM file AS f
JOIN project AS p ON (p.id = f.project_id) JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?") WHERE p.team_id = ?")
(defn get-team-files (defn get-team-files-ids
"Get a set of file ids for the specified team-id" "Get a set of file ids for the specified team-id"
[{:keys [::db/conn]} team-id] [{:keys [::db/conn]} team-id]
(->> (db/exec! conn [sql:get-team-files team-id]) (->> (db/exec! conn [sql:get-team-files-ids team-id])
(into #{} xf-map-id))) (into #{} xf-map-id)))
(def ^:private sql:get-team-projects (def ^:private sql:get-team-projects
"SELECT p.id FROM project AS p "SELECT p.* FROM project AS p
WHERE p.team_id = ? WHERE p.team_id = ?
AND p.deleted_at IS NULL") AND p.deleted_at IS NULL")
(defn get-team-projects (defn get-team-projects
"Get a set of project ids for the team" "Get a set of project ids for the team"
[{:keys [::db/conn]} team-id] [cfg team-id]
(->> (db/exec! conn [sql:get-team-projects team-id]) (->> (db/exec! cfg [sql:get-team-projects team-id])
(into #{} xf-map-id))) (into #{} xf-map-id)))
(def ^:private sql:get-project-files (def ^:private sql:get-project-files
@ -257,6 +292,10 @@
(->> (db/exec! conn [sql:get-project-files project-id]) (->> (db/exec! conn [sql:get-project-files project-id])
(into #{} xf-map-id))) (into #{} xf-map-id)))
(defn remap-thumbnail-object-id
[object-id file-id]
(str/replace-first object-id #"^(.*?)/" (str file-id "/")))
(defn- relink-shapes (defn- relink-shapes
"A function responsible to analyze all file data and "A function responsible to analyze all file data and
replace the old :component-file reference with the new replace the old :component-file reference with the new
@ -339,6 +378,12 @@
data data
library-ids))) library-ids)))
(defn disable-database-timeouts!
[cfg]
(let [conn (db/get-connection cfg)]
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
(defn- fix-version (defn- fix-version
[file] [file]
(let [file (fmg/fix-version file)] (let [file (fmg/fix-version file)]
@ -432,6 +477,20 @@
file)) file))
(defn register-pending-migrations
"All features that are enabled and requires explicit migration are
added to the state for a posterior migration step."
[cfg {:keys [id features] :as file}]
(doseq [feature (-> (::features cfg)
(set/difference cfeat/no-migration-features)
(set/difference cfeat/backend-only-features)
(set/difference features))]
(vswap! *state* update :pending-to-migrate (fnil conj []) [feature id]))
file)
(defn apply-pending-migrations! (defn apply-pending-migrations!
"Apply alredy registered pending migrations to files" "Apply alredy registered pending migrations to files"
[cfg] [cfg]

View file

@ -49,15 +49,6 @@
(set! *warn-on-reflection* true) (set! *warn-on-reflection* true)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Threshold in MiB when we pass from using
;; in-memory byte-array's to use temporal files.
(def temp-file-threshold
(* 1024 1024 2))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; LOW LEVEL STREAM IO API ;; LOW LEVEL STREAM IO API
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -65,11 +56,6 @@
(def ^:const buffer-size (:xnio/buffer-size yt/defaults)) (def ^:const buffer-size (:xnio/buffer-size yt/defaults))
(def ^:const penpot-magic-number 800099563638710213) (def ^:const penpot-magic-number 800099563638710213)
;; A maximum (storage) object size allowed: 100MiB
(def ^:const max-object-size
(* 1024 1024 100))
(def ^:dynamic *position* nil) (def ^:dynamic *position* nil)
(defn get-mark (defn get-mark
@ -236,7 +222,7 @@
(defn copy-stream! (defn copy-stream!
[^OutputStream output ^InputStream input ^long size] [^OutputStream output ^InputStream input ^long size]
(let [written (io/copy! input output :size size)] (let [written (io/copy input output :size size)]
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true) (l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
(swap! *position* + written) (swap! *position* + written)
written)) written))
@ -258,18 +244,18 @@
p (tmp/tempfile :prefix "penpot.binfile.")] p (tmp/tempfile :prefix "penpot.binfile.")]
(assert-mark m :stream) (assert-mark m :stream)
(when (> s max-object-size) (when (> s bfc/max-object-size)
(ex/raise :type :validation (ex/raise :type :validation
:code :max-file-size-reached :code :max-file-size-reached
:hint (str/ffmt "unable to import storage object with size % bytes" s))) :hint (str/ffmt "unable to import storage object with size % bytes" s)))
(if (> s temp-file-threshold) (if (> s bfc/temp-file-threshold)
(with-open [^OutputStream output (io/output-stream p)] (with-open [^OutputStream output (io/output-stream p)]
(let [readed (io/copy! input output :offset 0 :size s)] (let [readed (io/copy input output :offset 0 :size s)]
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true) (l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
(swap! *position* + readed) (swap! *position* + readed)
[s p])) [s p]))
[s (io/read-as-bytes input :size s)]))) [s (io/read input :size s)])))
(defmacro assert-read-label! (defmacro assert-read-label!
[input expected-label] [input expected-label]
@ -381,10 +367,12 @@
::l/sync? true) ::l/sync? true)
(doseq [item media] (doseq [item media]
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true)) (l/dbg :hint "write penpot file media object"
:id (:id item) ::l/sync? true))
(doseq [item thumbnails] (doseq [item thumbnails]
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true)) (l/dbg :hint "write penpot file object thumbnail"
:media-id (str (:media-id item)) ::l/sync? true))
(doto output (doto output
(write-obj! file) (write-obj! file)
@ -466,8 +454,8 @@
(defn- read-import-v1 (defn- read-import-v1
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}] [{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"]) (bfc/disable-database-timeouts! cfg)
(pu/with-open [input (zstd-input-stream input) (pu/with-open [input (zstd-input-stream input)
input (io/data-input-stream input)] input (io/data-input-stream input)]
@ -559,7 +547,9 @@
(when (seq thumbnails) (when (seq thumbnails)
(let [thumbnails (remap-thumbnails thumbnails file-id')] (let [thumbnails (remap-thumbnails thumbnails file-id')]
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true) (l/dbg :hint "updated index with thumbnails"
:total (count thumbnails)
::l/sync? true)
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails))) (vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
(when (seq media) (when (seq media)
@ -709,7 +699,7 @@
(dm/assert! (dm/assert!
"expected instance of jio/IOFactory for `input`" "expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory output)) (io/coercible? output))
(let [id (uuid/next) (let [id (uuid/next)
tp (dt/tpoint) tp (dt/tpoint)
@ -738,7 +728,7 @@
:cause @cs))))) :cause @cs)))))
(defn import-files! (defn import-files!
[cfg input] [{:keys [::input] :as cfg}]
(dm/assert! (dm/assert!
"expected valid profile-id and project-id on `cfg`" "expected valid profile-id and project-id on `cfg`"

View file

@ -141,16 +141,15 @@
(write! cfg :team-font-variant id font)))) (write! cfg :team-font-variant id font))))
(defn- write-project! (defn- write-project!
[cfg project-id] [cfg project]
(let [project (bfc/get-project cfg project-id)] (events/tap :progress
(events/tap :progress {:op :export
{:op :export :section :write-project
:section :write-project :id (:id project)
:id project-id :name (:name project)})
:name (:name project)}) (l/trc :hint "write" :obj "project" :id (str (:id project)))
(l/trc :hint "write" :obj "project" :id (str project-id)) (write! cfg :project (str (:id project)) project)
(write! cfg :project (str project-id) project) (vswap! bfc/*state* update :projects conj (:id project)))
(vswap! bfc/*state* update :projects conj project-id)))
(defn- write-file! (defn- write-file!
[cfg file-id] [cfg file-id]
@ -191,7 +190,7 @@
[{:keys [::sto/storage] :as cfg} id] [{:keys [::sto/storage] :as cfg} id]
(let [sobj (sto/get-object storage id) (let [sobj (sto/get-object storage id)
data (with-open [input (sto/get-object-data storage sobj)] data (with-open [input (sto/get-object-data storage sobj)]
(io/read-as-bytes input))] (io/read input))]
(l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj)) (l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj))
(write! cfg :storage-object id (meta sobj) data))) (write! cfg :storage-object id (meta sobj) data)))
@ -363,7 +362,7 @@
(bfc/get-team-projects cfg team-id)) (bfc/get-team-projects cfg team-id))
(run! (partial write-file! cfg) (run! (partial write-file! cfg)
(bfc/get-team-files cfg team-id)) (bfc/get-team-files-ids cfg team-id))
(run! (partial write-storage-object! cfg) (run! (partial write-storage-object! cfg)
(-> bfc/*state* deref :storage-objects)) (-> bfc/*state* deref :storage-objects))

View file

@ -0,0 +1,951 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.binfile.v3
"A ZIP based binary file exportation"
(:refer-clojure :exclude [read])
(:require
[app.binfile.common :as bfc]
[app.common.data :as d]
[app.common.data.macros :as dm]
[app.common.exceptions :as ex]
[app.common.json :as json]
[app.common.logging :as l]
[app.common.schema :as sm]
[app.common.thumbnails :as cth]
[app.common.types.color :as ctcl]
[app.common.types.component :as ctc]
[app.common.types.file :as ctf]
[app.common.types.page :as ctp]
[app.common.types.plugins :as ctpg]
[app.common.types.shape :as cts]
[app.common.types.typography :as cty]
[app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db]
[app.storage :as sto]
[app.storage.impl :as sto.impl]
[app.util.events :as events]
[app.util.time :as dt]
[clojure.java.io :as jio]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[datoteka.io :as io])
(:import
java.io.InputStream
java.io.OutputStreamWriter
java.util.zip.ZipEntry
java.util.zip.ZipFile
java.util.zip.ZipOutputStream))
;; --- SCHEMA
(def ^:private schema:manifest
[:map {:title "Manifest"}
[:version ::sm/int]
[:type :string]
[:generated-by {:optional true} :string]
[:files
[:vector
[:map
[:id ::sm/uuid]
[:name :string]
[:project-id ::sm/uuid]]]]
[:relations {:optional true}
[:vector
[:tuple ::sm/uuid ::sm/uuid]]]])
(def ^:private schema:storage-object
[:map {:title "StorageObject"}
[:id ::sm/uuid]
[:size ::sm/int]
[:content-type :string]
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
[:hash :string]])
(def ^:private schema:file-thumbnail
[:map {:title "FileThumbnail"}
[:file-id ::sm/uuid]
[:page-id ::sm/uuid]
[:frame-id ::sm/uuid]
[:tag :string]
[:media-id ::sm/uuid]])
;; --- ENCODERS
(def encode-file
(sm/encoder ::ctf/file sm/json-transformer))
(def encode-page
(sm/encoder ::ctp/page sm/json-transformer))
(def encode-shape
(sm/encoder ::cts/shape sm/json-transformer))
(def encode-media
(sm/encoder ::ctf/media sm/json-transformer))
(def encode-component
(sm/encoder ::ctc/component sm/json-transformer))
(def encode-color
(sm/encoder ::ctcl/color sm/json-transformer))
(def encode-typography
(sm/encoder ::cty/typography sm/json-transformer))
(def encode-plugin-data
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
(def encode-storage-object
(sm/encoder schema:storage-object sm/json-transformer))
(def encode-file-thumbnail
(sm/encoder schema:file-thumbnail sm/json-transformer))
;; --- DECODERS
(def decode-manifest
(sm/decoder schema:manifest sm/json-transformer))
(def decode-media
(sm/decoder ::ctf/media sm/json-transformer))
(def decode-component
(sm/decoder ::ctc/component sm/json-transformer))
(def decode-color
(sm/decoder ::ctcl/color sm/json-transformer))
(def decode-file
(sm/decoder ::ctf/file sm/json-transformer))
(def decode-page
(sm/decoder ::ctp/page sm/json-transformer))
(def decode-shape
(sm/decoder ::cts/shape sm/json-transformer))
(def decode-typography
(sm/decoder ::cty/typography sm/json-transformer))
(def decode-plugin-data
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
(def decode-storage-object
(sm/decoder schema:storage-object sm/json-transformer))
(def decode-file-thumbnail
(sm/decoder schema:file-thumbnail sm/json-transformer))
;; --- VALIDATORS
(def validate-manifest
(sm/check-fn schema:manifest))
(def validate-file
(sm/check-fn ::ctf/file))
(def validate-page
(sm/check-fn ::ctp/page))
(def validate-shape
(sm/check-fn ::cts/shape))
(def validate-media
(sm/check-fn ::ctf/media))
(def validate-color
(sm/check-fn ::ctcl/color))
(def validate-component
(sm/check-fn ::ctc/component))
(def validate-typography
(sm/check-fn ::cty/typography))
(def validate-plugin-data
(sm/check-fn ::ctpg/plugin-data))
(def validate-storage-object
(sm/check-fn schema:storage-object))
(def validate-file-thumbnail
(sm/check-fn schema:file-thumbnail))
;; --- EXPORT IMPL
(defn- write-entry!
[^ZipOutputStream output ^String path data]
(.putNextEntry output (ZipEntry. path))
(let [writer (OutputStreamWriter. output "UTF-8")]
(json/write writer data :indent true :key-fn json/write-camel-key)
(.flush writer))
(.closeEntry output))
(defn- get-file
[{:keys [::embed-assets ::include-libraries] :as cfg} file-id]
(when (and include-libraries embed-assets)
(throw (IllegalArgumentException.
"the `include-libraries` and `embed-assets` are mutally excluding options")))
(let [detach? (and (not embed-assets) (not include-libraries))]
(cond-> (bfc/get-file cfg file-id)
detach?
(-> (ctf/detach-external-references file-id)
(dissoc :libraries))
embed-assets
(update :data #(bfc/embed-assets cfg % file-id)))))
(defn- resolve-extension
[mtype]
(case mtype
"image/png" ".png"
"image/jpeg" ".jpg"
"image/gif" ".gif"
"image/svg+xml" ".svg"
"image/webp" ".webp"
"font/woff" ".woff"
"font/woff2" ".woff2"
"font/ttf" ".ttf"
"font/otf" ".otf"
"application/octet-stream" ".bin"))
(defn- export-storage-objects
[{:keys [::output] :as cfg}]
(let [storage (sto/resolve cfg)]
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
(let [sobject (sto/get-object storage id)
smeta (meta sobject)
ext (resolve-extension (:content-type smeta))
path (str "objects/" id ".json")
params (-> (meta sobject)
(assoc :id (:id sobject))
(assoc :size (:size sobject))
(encode-storage-object))]
(write-entry! output path params)
(with-open [input (sto/get-object-data storage sobject)]
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
(io/copy input output :size (:size sobject))
(.closeEntry output))))))
(defn- export-file
[{:keys [::file-id ::output] :as cfg}]
(let [file (get-file cfg file-id)
media (->> (bfc/get-file-media cfg file)
(map (fn [media]
(dissoc media :file-id))))
data (:data file)
typographies (:typographies data)
components (:components data)
colors (:colors data)
pages (:pages data)
pages-index (:pages-index data)
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
(vswap! bfc/*state* update :files assoc file-id
{:id file-id
:project-id (:project-id file)
:name (:name file)})
(let [file (cond-> (dissoc file :data)
(:options data)
(assoc :options (:options data))
:always
(encode-file))
path (str "files/" file-id ".json")]
(write-entry! output path file))
(doseq [[index page-id] (d/enumerate pages)]
(let [path (str "files/" file-id "/pages/" page-id ".json")
page (get pages-index page-id)
objects (:objects page)
page (-> page
(dissoc :objects)
(assoc :index index))
page (encode-page page)]
(write-entry! output path page)
(doseq [[shape-id shape] objects]
(let [path (str "files/" file-id "/pages/" page-id "/" shape-id ".json")
shape (assoc shape :page-id page-id)
shape (encode-shape shape)]
(write-entry! output path shape)))))
(vswap! bfc/*state* bfc/collect-storage-objects media)
(vswap! bfc/*state* bfc/collect-storage-objects thumbnails)
(doseq [{:keys [id] :as media} media]
(let [path (str "files/" file-id "/media/" id ".json")
media (encode-media media)]
(write-entry! output path media)))
(doseq [thumbnail thumbnails]
(let [data (cth/parse-object-id (:object-id thumbnail))
path (str "files/" file-id "/thumbnails/" (:page-id data)
"/" (:frame-id data) ".json")
data (-> data
(assoc :media-id (:media-id thumbnail))
(encode-file-thumbnail))]
(write-entry! output path data)))
(doseq [[id component] components]
(let [path (str "files/" file-id "/components/" id ".json")
component (encode-component component)]
(write-entry! output path component)))
(doseq [[id color] colors]
(let [path (str "files/" file-id "/colors/" id ".json")
color (-> (encode-color color)
(dissoc :file-id))
color (cond-> color
(and (contains? color :path)
(str/empty? (:path color)))
(dissoc :path))]
(write-entry! output path color)))
(doseq [[id object] typographies]
(let [path (str "files/" file-id "/typographies/" id ".json")
color (encode-typography object)]
(write-entry! output path color)))))
(defn- export-files
[{:keys [::ids ::include-libraries ::output] :as cfg}]
(let [ids (into ids (when include-libraries (bfc/get-libraries cfg ids)))
rels (if include-libraries
(->> (bfc/get-files-rels cfg ids)
(mapv (juxt :file-id :library-file-id)))
[])]
(vswap! bfc/*state* assoc :files (d/ordered-map))
;; Write all the exporting files
(doseq [[index file-id] (d/enumerate ids)]
(-> cfg
(assoc ::file-id file-id)
(assoc ::file-seqn index)
(export-file)))
;; Write manifest file
(let [files (:files @bfc/*state*)
params {:type "penpot/export-files"
:version 1
:generated-by (str "penpot/" (:full cf/version))
:files (vec (vals files))
:relations rels}]
(write-entry! output "manifest.json" params))))
;; --- IMPORT IMPL
(defn- read-zip-entries
[^ZipFile input]
(into #{} (iterator-seq (.entries input))))
(defn- get-zip-entry*
[^ZipFile input ^String path]
(.getEntry input path))
(defn- get-zip-entry
[input path]
(let [entry (get-zip-entry* input path)]
(when-not entry
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "the penpot file seems corrupt, missing underlying zip entry"
:path path))
entry))
(defn- get-zip-entry-size
[^ZipEntry entry]
(.getSize entry))
(defn- zip-entry-name
[^ZipEntry entry]
(.getName entry))
(defn- zip-entry-stream
^InputStream
[^ZipFile input ^ZipEntry entry]
(.getInputStream input entry))
(defn- zip-entry-reader
[^ZipFile input ^ZipEntry entry]
(-> (zip-entry-stream input entry)
(io/reader :encoding "UTF-8")))
(defn- zip-entry-storage-content
"Wraps a ZipFile and ZipEntry into a penpot storage compatible
object and avoid creating temporal objects"
[input entry]
(let [hash (delay (->> entry
(zip-entry-stream input)
(sto.impl/calculate-hash)))]
(reify
sto.impl/IContentObject
(get-size [_]
(get-zip-entry-size entry))
sto.impl/IContentHash
(get-hash [_]
(deref hash))
jio/IOFactory
(make-reader [this opts]
(jio/make-reader this opts))
(make-writer [_ _]
(throw (UnsupportedOperationException. "not implemented")))
(make-input-stream [_ _]
(zip-entry-stream input entry))
(make-output-stream [_ _]
(throw (UnsupportedOperationException. "not implemented"))))))
(defn- read-manifest
[^ZipFile input]
(let [entry (get-zip-entry input "manifest.json")]
(with-open [reader (zip-entry-reader input entry)]
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
(decode-manifest manifest)))))
(defn- match-media-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/media/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- match-color-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/colors/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- match-component-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/components/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- match-typography-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/typographies/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- match-thumbnail-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/thumbnails/([^/]+)/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ page-id frame-id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:page-id (parse-uuid page-id)
:frame-id (parse-uuid frame-id)
:file-id file-id}))))
(defn- match-page-entry-fn
[file-id]
(let [pattern (str "^files/" file-id "/pages/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- match-shape-entry-fn
[file-id page-id]
(let [pattern (str "^files/" file-id "/pages/" page-id "/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:page-id page-id
:id (parse-uuid id)}))))
(defn- match-storage-entry-fn
[]
(let [pattern (str "^objects/([^/]+).json$")
pattern (re-pattern pattern)]
(fn [entry]
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
{:entry entry
:id (parse-uuid id)}))))
(defn- read-entry
[^ZipFile input entry]
(with-open [reader (zip-entry-reader input entry)]
(json/read reader :key-fn json/read-kebab-key)))
(defn- read-file
[{:keys [::input ::file-id]}]
(let [path (str "files/" file-id ".json")
entry (get-zip-entry input path)]
(-> (read-entry input entry)
(decode-file)
(validate-file))))
(defn- read-file-plugin-data
[{:keys [::input ::file-id]}]
(let [path (str "files/" file-id "/plugin-data.json")
entry (get-zip-entry* input path)]
(some->> entry
(read-entry input)
(decode-plugin-data)
(validate-plugin-data))))
(defn- read-file-media
[{:keys [::input ::file-id ::entries]}]
(->> (keep (match-media-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
(decode-media)
(validate-media))
object (assoc object :file-id file-id)]
(if (= id (:id object))
(conj result object)
result)))
[])
(not-empty)))
(defn- read-file-colors
[{:keys [::input ::file-id ::entries]}]
(->> (keep (match-color-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
(decode-color)
(validate-color))]
(if (= id (:id object))
(assoc result id object)
result)))
{})
(not-empty)))
(defn- read-file-components
[{:keys [::input ::file-id ::entries]}]
(->> (keep (match-component-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
(decode-component)
(validate-component))]
(if (= id (:id object))
(assoc result id object)
result)))
{})
(not-empty)))
(defn- read-file-typographies
[{:keys [::input ::file-id ::entries]}]
(->> (keep (match-typography-entry-fn file-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
(decode-typography)
(validate-typography))]
(if (= id (:id object))
(assoc result id object)
result)))
{})
(not-empty)))
(defn- read-file-shapes
[{:keys [::input ::file-id ::page-id ::entries] :as cfg}]
(->> (keep (match-shape-entry-fn file-id page-id) entries)
(reduce (fn [result {:keys [id entry]}]
(let [object (->> (read-entry input entry)
(decode-shape)
(validate-shape))]
(if (= id (:id object))
(assoc result id object)
result)))
{})
(not-empty)))
(defn- read-file-pages
[{:keys [::input ::file-id ::entries] :as cfg}]
(->> (keep (match-page-entry-fn file-id) entries)
(keep (fn [{:keys [id entry]}]
(let [page (->> (read-entry input entry)
(decode-page))
page (dissoc page :options)]
(when (= id (:id page))
(let [objects (-> (assoc cfg ::page-id id)
(read-file-shapes))]
(assoc page :objects objects))))))
(sort-by :index)
(reduce (fn [result {:keys [id] :as page}]
(assoc result id (dissoc page :index)))
(d/ordered-map))))
(defn- read-file-thumbnails
[{:keys [::input ::file-id ::entries] :as cfg}]
(->> (keep (match-thumbnail-entry-fn file-id) entries)
(reduce (fn [result {:keys [page-id frame-id entry]}]
(let [object (->> (read-entry input entry)
(decode-file-thumbnail)
(validate-file-thumbnail))]
(if (and (= frame-id (:frame-id object))
(= page-id (:page-id object)))
(conj result object)
result)))
[])
(not-empty)))
(defn- read-file-data
[{:keys [] :as cfg}]
(let [colors (read-file-colors cfg)
typographies (read-file-typographies cfg)
components (read-file-components cfg)
plugin-data (read-file-plugin-data cfg)
pages (read-file-pages cfg)]
{:pages (-> pages keys vec)
:pages-index (into {} pages)
:colors colors
:typographies typographies
:components components
:plugin-data plugin-data}))
(defn- import-file
[{:keys [::db/conn ::project-id ::file-id ::file-name] :as cfg}]
(let [file-id' (bfc/lookup-index file-id)
file (read-file cfg)
media (read-file-media cfg)
thumbnails (read-file-thumbnails cfg)]
(l/dbg :hint "processing file"
:id (str file-id')
:prev-id (str file-id)
:features (str/join "," (:features file))
:version (:version file)
::l/sync? true)
(events/tap :progress {:section :file :name file-name})
(when media
;; Update index with media
(l/dbg :hint "update media index"
:file-id (str file-id')
:total (count media)
::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
(vswap! bfc/*state* update :media into media))
(when thumbnails
(l/dbg :hint "update thumbnails index"
:file-id (str file-id')
:total (count thumbnails)
::l/sync? true)
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
(vswap! bfc/*state* update :thumbnails into thumbnails))
(let [data (-> (read-file-data cfg)
(d/without-nils)
(assoc :id file-id')
(cond-> (:options file)
(assoc :options (:options file))))
file (-> file
(assoc :id file-id')
(assoc :data data)
(assoc :name file-name)
(assoc :project-id project-id)
(dissoc :options)
(bfc/process-file))]
(->> file
(bfc/register-pending-migrations cfg)
(bfc/persist-file! cfg))
(when (::bfc/overwrite cfg)
(db/delete! conn :file-thumbnail {:file-id file-id'}))
file-id')))
(defn- import-file-relations
[{:keys [::db/conn ::manifest ::bfc/timestamp] :as cfg}]
(events/tap :progress {:section :relations})
(doseq [[file-id libr-id] (:relations manifest)]
(let [file-id (bfc/lookup-index file-id)
libr-id (bfc/lookup-index libr-id)]
(when (and file-id libr-id)
(l/dbg :hint "create file library link"
:file-id (str file-id)
:lib-id (str libr-id)
::l/sync? true)
(db/insert! conn :file-library-rel
{:synced-at timestamp
:file-id file-id
:library-file-id libr-id})))))
(defn- import-storage-objects
[{:keys [::input ::entries ::bfc/timestamp] :as cfg}]
(events/tap :progress {:section :storage-objects})
(let [storage (sto/resolve cfg)
entries (keep (match-storage-entry-fn) entries)]
(doseq [{:keys [id entry]} entries]
(let [object (->> (read-entry input entry)
(decode-storage-object)
(validate-storage-object))]
(when (not= id (:id object))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
:expected-id (str id)
:found-id (str (:id object))))
(let [ext (resolve-extension (:content-type object))
path (str "objects/" id ext)
content (->> path
(get-zip-entry input)
(zip-entry-storage-content input))]
(when (not= (:size object) (sto/get-size content))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "found corrupted storage object: size does not match"
:path path
:expected-size (:size object)
:found-size (sto/get-size content)))
(when (not= (:hash object) (sto/get-hash content))
(ex/raise :type :validation
:code :inconsistent-penpot-file
:hint "found corrupted storage object: hash does not match"
:path path
:expected-hash (:hash object)
:found-hash (sto/get-hash content)))
(let [params (-> object
(dissoc :id :size)
(assoc ::sto/content content)
(assoc ::sto/deduplicate? true)
(assoc ::sto/touched-at timestamp))
sobject (sto/put-object! storage params)]
(l/dbg :hint "persisted storage object"
:id (str (:id sobject))
:prev-id (str id)
:bucket (:bucket params)
::l/sync? true)
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
(defn- import-file-media
[{:keys [::db/conn] :as cfg}]
(events/tap :progress {:section :media})
(doseq [item (:media @bfc/*state*)]
(let [params (-> item
(update :id bfc/lookup-index)
(update :file-id bfc/lookup-index)
(d/update-when :media-id bfc/lookup-index)
(d/update-when :thumbnail-id bfc/lookup-index))]
(l/dbg :hint "inserting file media object"
:id (str (:id params))
:file-id (str (:file-id params))
::l/sync? true)
(db/insert! conn :file-media-object params
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(defn- import-file-thumbnails
[{:keys [::db/conn] :as cfg}]
(events/tap :progress {:section :thumbnails})
(doseq [item (:thumbnails @bfc/*state*)]
(let [file-id (bfc/lookup-index (:file-id item))
media-id (bfc/lookup-index (:media-id item))
object-id (-> (assoc item :file-id file-id)
(cth/fmt-object-id))
params {:file-id file-id
:object-id object-id
:tag (:tag item)
:media-id media-id}]
(l/dbg :hint "inserting file object thumbnail"
:file-id (str file-id)
:media-id (str media-id)
::l/sync? true)
(db/insert! conn :file-tagged-object-thumbnail params
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
(defn- import-files
[{:keys [::bfc/timestamp ::input ::name] :or {timestamp (dt/now)} :as cfg}]
(dm/assert!
"expected zip file"
(instance? ZipFile input))
(dm/assert!
"expected valid instant"
(dt/instant? timestamp))
(let [manifest (-> (read-manifest input)
(validate-manifest))
entries (read-zip-entries input)]
(when-not (= "penpot/export-files" (:type manifest))
(ex/raise :type :validation
:code :invalid-binfile-v3-manifest
:hint "unexpected type on manifest"
:manifest manifest))
;; Check if all files referenced on manifest are present
(doseq [{file-id :id} (:files manifest)]
(let [path (str "files/" file-id ".json")]
(when-not (get-zip-entry input path)
(ex/raise :type :validation
:code :invalid-binfile-v3
:hint "some files referenced on manifest not found"
:path path
:file-id file-id))))
(events/tap :progress {:section :manifest})
(let [index (bfc/update-index (map :id (:files manifest)))
state {:media [] :index index}
cfg (-> cfg
(assoc ::entries entries)
(assoc ::manifest manifest)
(assoc ::bfc/timestamp timestamp))]
(binding [bfc/*state* (volatile! state)]
(db/tx-run! cfg (fn [cfg]
(bfc/disable-database-timeouts! cfg)
(let [ids (->> (:files manifest)
(reduce (fn [result {:keys [id] :as file}]
(let [name' (get file :name)
name' (if (map? name)
(get name id)
name')]
(conj result (-> cfg
(assoc ::file-id id)
(assoc ::file-name name')
(import-file)))))
[]))]
(import-file-relations cfg)
(import-storage-objects cfg)
(import-file-media cfg)
(import-file-thumbnails cfg)
(bfc/apply-pending-migrations! cfg)
ids)))))))
;; --- PUBLIC API
(defn export-files!
"Do the exportation of a specified file in custom penpot binary
format. There are some options available for customize the output:
`::include-libraries`: additionally to the specified file, all the
linked libraries also will be included (including transitive
dependencies).
`::embed-assets`: instead of including the libraries, embed in the
same file library all assets used from external libraries."
[{:keys [::ids] :as cfg} output]
(dm/assert!
"expected a set of uuid's for `::ids` parameter"
(and (set? ids)
(every? uuid? ids)))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(satisfies? jio/IOFactory output))
(let [id (uuid/next)
tp (dt/tpoint)
ab (volatile! false)
cs (volatile! nil)]
(try
(l/info :hint "start exportation" :export-id (str id))
(binding [bfc/*state* (volatile! (bfc/initial-state))]
(with-open [output (io/output-stream output)]
(with-open [output (ZipOutputStream. output)]
(let [cfg (assoc cfg ::output output)]
(export-files cfg)
(export-storage-objects cfg)))))
(catch java.io.IOException _cause
;; Do nothing, EOF means client closes connection abruptly
(vreset! ab true)
nil)
(catch Throwable cause
(vreset! cs cause)
(vreset! ab true)
(throw cause))
(finally
(l/info :hint "exportation finished" :export-id (str id)
:elapsed (str (inst-ms (tp)) "ms")
:aborted @ab
:cause @cs)))))
(defn import-files!
[{:keys [::input] :as cfg}]
(dm/assert!
"expected valid profile-id and project-id on `cfg`"
(and (uuid? (::profile-id cfg))
(uuid? (::project-id cfg))))
(dm/assert!
"expected instance of jio/IOFactory for `input`"
(io/coercible? input))
(let [id (uuid/next)
tp (dt/tpoint)
cs (volatile! nil)]
(l/info :hint "import: started" :id (str id))
(try
(with-open [input (ZipFile. (fs/file input))]
(import-files (assoc cfg ::input input)))
(catch Throwable cause
(vreset! cs cause)
(throw cause))
(finally
(l/info :hint "import: terminated"
:id (str id)
:elapsed (dt/format-duration (tp))
:error? (some? @cs))))))

View file

@ -142,6 +142,8 @@
[:quotes-font-variants-per-team {:optional true} ::sm/int] [:quotes-font-variants-per-team {:optional true} ::sm/int]
[:quotes-comment-threads-per-file {:optional true} ::sm/int] [:quotes-comment-threads-per-file {:optional true} ::sm/int]
[:quotes-comments-per-file {:optional true} ::sm/int] [:quotes-comments-per-file {:optional true} ::sm/int]
[:quotes-snapshots-per-file {:optional true} ::sm/int]
[:quotes-snapshots-per-team {:optional true} ::sm/int]
[:auth-data-cookie-domain {:optional true} :string] [:auth-data-cookie-domain {:optional true} :string]
[:auth-token-cookie-name {:optional true} :string] [:auth-token-cookie-name {:optional true} :string]

View file

@ -41,6 +41,7 @@
[app.common.types.shape.path :as ctsp] [app.common.types.shape.path :as ctsp]
[app.common.types.shape.text :as ctsx] [app.common.types.shape.text :as ctsx]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf]
[app.db :as db] [app.db :as db]
[app.db.sql :as sql] [app.db.sql :as sql]
[app.features.fdata :as fdata] [app.features.fdata :as fdata]
@ -1298,7 +1299,7 @@
(let [[mtype data] (parse-datauri href) (let [[mtype data] (parse-datauri href)
size (alength ^bytes data) size (alength ^bytes data)
path (tmp/tempfile :prefix "penpot.media.download.") path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write-to-file! data path :size size)] written (io/write* path data :size size)]
(when (not= written size) (when (not= written size)
(ex/raise :type :internal (ex/raise :type :internal
@ -1381,7 +1382,9 @@
(defn get-optimized-svg (defn get-optimized-svg
[sid] [sid]
(let [svg-text (get-sobject-content sid) (let [svg-text (get-sobject-content sid)
svg-text (svgo/optimize *system* svg-text)] svg-text (if (contains? cf/flags :backend-svgo)
(svgo/optimize *system* svg-text)
svg-text)]
(csvg/parse svg-text))) (csvg/parse svg-text)))
(def base-path "/data/cache") (def base-path "/data/cache")
@ -1484,11 +1487,6 @@
:file-id (str (:id fdata)) :file-id (str (:id fdata))
:id (str (:id mobj))) :id (str (:id mobj)))
(instance? org.graalvm.polyglot.PolyglotException cause)
(l/inf :hint "skip processing media object: invalid svg found"
:file-id (str (:id fdata))
:id (str (:id mobj)))
(= (:type edata) :not-found) (= (:type edata) :not-found)
(l/inf :hint "skip processing media object: underlying object does not exist" (l/inf :hint "skip processing media object: underlying object does not exist"
:file-id (str (:id fdata)) :file-id (str (:id fdata))
@ -1747,8 +1745,8 @@
(fn [system] (fn [system]
(binding [*system* system] (binding [*system* system]
(when (string? label) (when (string? label)
(fsnap/take-file-snapshot! system {:file-id file-id (fsnap/create-file-snapshot! system nil file-id (str "migration/" label)))
:label (str "migration/" label)}))
(let [file (get-file system file-id) (let [file (get-file system file-id)
file (process-file! system file :validate? validate?)] file (process-file! system file :validate? validate?)]

View file

@ -29,9 +29,9 @@
[promesa.exec :as px] [promesa.exec :as px]
[reitit.core :as r] [reitit.core :as r]
[reitit.middleware :as rr] [reitit.middleware :as rr]
[ring.request :as rreq] [yetti.adapter :as yt]
[ring.response :as-alias rres] [yetti.request :as yreq]
[yetti.adapter :as yt])) [yetti.response :as-alias yres]))
(declare router-handler) (declare router-handler)
@ -100,12 +100,12 @@
(defn- not-found-handler (defn- not-found-handler
[_] [_]
{::rres/status 404}) {::yres/status 404})
(defn- router-handler (defn- router-handler
[router] [router]
(letfn [(resolve-handler [request] (letfn [(resolve-handler [request]
(if-let [match (r/match-by-path router (rreq/path request))] (if-let [match (r/match-by-path router (yreq/path request))]
(let [params (:path-params match) (let [params (:path-params match)
result (:result match) result (:result match)
handler (or (:handler result) not-found-handler) handler (or (:handler result) not-found-handler)
@ -114,11 +114,11 @@
(partial not-found-handler request))) (partial not-found-handler request)))
(on-error [cause request] (on-error [cause request]
(let [{:keys [::rres/body] :as response} (errors/handle cause request)] (let [{:keys [::yres/body] :as response} (errors/handle cause request)]
(cond-> response (cond-> response
(map? body) (map? body)
(-> (update ::rres/headers assoc "content-type" "application/transit+json") (-> (update ::yres/headers assoc "content-type" "application/transit+json")
(assoc ::rres/body (t/encode-str body {:type :json-verbose}))))))] (assoc ::yres/body (t/encode-str body {:type :json-verbose}))))))]
(fn [request] (fn [request]
(let [handler (resolve-handler request)] (let [handler (resolve-handler request)]

View file

@ -12,13 +12,13 @@
[app.main :as-alias main] [app.main :as-alias main]
[app.setup :as-alias setup] [app.setup :as-alias setup]
[app.tokens :as tokens] [app.tokens :as tokens]
[ring.request :as rreq])) [yetti.request :as yreq]))
(def header-re #"^Token\s+(.*)") (def header-re #"^Token\s+(.*)")
(defn- get-token (defn- get-token
[request] [request]
(some->> (rreq/get-header request "authorization") (some->> (yreq/get-header request "authorization")
(re-matches header-re) (re-matches header-re)
(second))) (second)))

View file

@ -16,7 +16,7 @@
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig] [integrant.core :as ig]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
(def ^:private cache-max-age (def ^:private cache-max-age
(dt/duration {:hours 24})) (dt/duration {:hours 24}))
@ -37,8 +37,8 @@
(defn- serve-object-from-s3 (defn- serve-object-from-s3
[{:keys [::sto/storage] :as cfg} obj] [{:keys [::sto/storage] :as cfg} obj]
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})] (let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
{::rres/status 307 {::yres/status 307
::rres/headers {"location" (str url) ::yres/headers {"location" (str url)
"x-host" (cond-> host port (str ":" port)) "x-host" (cond-> host port (str ":" port))
"x-mtype" (-> obj meta :content-type) "x-mtype" (-> obj meta :content-type)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}})) "cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
@ -51,8 +51,8 @@
headers {"x-accel-redirect" (:path purl) headers {"x-accel-redirect" (:path purl)
"content-type" (:content-type mdata) "content-type" (:content-type mdata)
"cache-control" (str "max-age=" (inst-ms cache-max-age))}] "cache-control" (str "max-age=" (inst-ms cache-max-age))}]
{::rres/status 204 {::yres/status 204
::rres/headers headers})) ::yres/headers headers}))
(defn- serve-object (defn- serve-object
"Helper function that returns the appropriate response depending on "Helper function that returns the appropriate response depending on
@ -69,7 +69,7 @@
obj (sto/get-object storage id)] obj (sto/get-object storage id)]
(if obj (if obj
(serve-object cfg obj) (serve-object cfg obj)
{::rres/status 404}))) {::yres/status 404})))
(defn- generic-handler (defn- generic-handler
"A generic handler helper/common code for file-media based handlers." "A generic handler helper/common code for file-media based handlers."
@ -80,7 +80,7 @@
sobj (sto/get-object storage (kf mobj))] sobj (sto/get-object storage (kf mobj))]
(if sobj (if sobj
(serve-object cfg sobj) (serve-object cfg sobj)
{::rres/status 404}))) {::yres/status 404})))
(defn file-objects-handler (defn file-objects-handler
"Handler that serves storage objects by file media id." "Handler that serves storage objects by file media id."

View file

@ -22,8 +22,8 @@
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.exec :as px] [promesa.exec :as px]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
(declare parse-json) (declare parse-json)
(declare handle-request) (declare handle-request)
@ -38,9 +38,9 @@
(defmethod ig/init-key ::routes (defmethod ig/init-key ::routes
[_ cfg] [_ cfg]
(letfn [(handler [request] (letfn [(handler [request]
(let [data (-> request rreq/body slurp)] (let [data (-> request yreq/body slurp)]
(px/run! :vthread (partial handle-request cfg data))) (px/run! :vthread (partial handle-request cfg data)))
{::rres/status 200})] {::yres/status 200})]
["/sns" {:handler handler ["/sns" {:handler handler
:allowed-methods #{:post}}])) :allowed-methods #{:post}}]))

View file

@ -33,8 +33,8 @@
[integrant.core :as ig] [integrant.core :as ig]
[markdown.core :as md] [markdown.core :as md]
[markdown.transformers :as mdt] [markdown.transformers :as mdt]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.response :as rres])) [yetti.response :as yres]))
;; (selmer.parser/cache-off!) ;; (selmer.parser/cache-off!)
@ -44,10 +44,10 @@
(defn index-handler (defn index-handler
[_cfg _request] [_cfg _request]
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/html"} ::yres/headers {"content-type" "text/html"}
::rres/body (-> (io/resource "app/templates/debug.tmpl") ::yres/body (-> (io/resource "app/templates/debug.tmpl")
(tmpl/render {}))}) (tmpl/render {:version (:full cf/version)}))})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; FILE CHANGES ;; FILE CHANGES
@ -56,17 +56,17 @@
(defn prepare-response (defn prepare-response
[body] [body]
(let [headers {"content-type" "application/transit+json"}] (let [headers {"content-type" "application/transit+json"}]
{::rres/status 200 {::yres/status 200
::rres/body body ::yres/body body
::rres/headers headers})) ::yres/headers headers}))
(defn prepare-download-response (defn prepare-download-response
[body filename] [body filename]
(let [headers {"content-disposition" (str "attachment; filename=" filename) (let [headers {"content-disposition" (str "attachment; filename=" filename)
"content-type" "application/octet-stream"}] "content-type" "application/octet-stream"}]
{::rres/status 200 {::yres/status 200
::rres/body body ::yres/body body
::rres/headers headers})) ::yres/headers headers}))
(def sql:retrieve-range-of-changes (def sql:retrieve-range-of-changes
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn") "select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
@ -108,8 +108,8 @@
(db/update! conn :file (db/update! conn :file
{:data data} {:data data}
{:id file-id}) {:id file-id})
{::rres/status 201 {::yres/status 201
::rres/body "OK CREATED"}))) ::yres/body "OK CREATED"})))
:else :else
(prepare-response (blob/decode data)))))) (prepare-response (blob/decode data))))))
@ -123,7 +123,7 @@
[{:keys [::db/pool]} {:keys [::session/profile-id params] :as request}] [{:keys [::db/pool]} {:keys [::session/profile-id params] :as request}]
(let [profile (profile/get-profile pool profile-id) (let [profile (profile/get-profile pool profile-id)
project-id (:default-project-id profile) project-id (:default-project-id profile)
data (some-> params :file :path io/read-as-bytes)] data (some-> params :file :path io/read*)]
(if (and data project-id) (if (and data project-id)
(let [fname (str "Imported file *: " (dt/now)) (let [fname (str "Imported file *: " (dt/now))
@ -138,8 +138,8 @@
{:data data {:data data
:deleted-at nil} :deleted-at nil}
{:id file-id}) {:id file-id})
{::rres/status 200 {::yres/status 200
::rres/body "OK UPDATED"}) ::yres/body "OK UPDATED"})
(db/run! pool (fn [{:keys [::db/conn] :as cfg}] (db/run! pool (fn [{:keys [::db/conn] :as cfg}]
(create-file cfg {:id file-id (create-file cfg {:id file-id
@ -149,15 +149,15 @@
(db/update! conn :file (db/update! conn :file
{:data data} {:data data}
{:id file-id}) {:id file-id})
{::rres/status 201 {::yres/status 201
::rres/body "OK CREATED"})))) ::yres/body "OK CREATED"}))))
{::rres/status 500 {::yres/status 500
::rres/body "ERROR"}))) ::yres/body "ERROR"})))
(defn file-data-handler (defn file-data-handler
[cfg request] [cfg request]
(case (rreq/method request) (case (yreq/method request)
:get (retrieve-file-data cfg request) :get (retrieve-file-data cfg request)
:post (upload-file-data cfg request) :post (upload-file-data cfg request)
(ex/raise :type :http (ex/raise :type :http
@ -238,12 +238,12 @@
1 (render-template-v1 report) 1 (render-template-v1 report)
2 (render-template-v2 report) 2 (render-template-v2 report)
3 (render-template-v3 report))] 3 (render-template-v3 report))]
{::rres/status 200 {::yres/status 200
::rres/body result ::yres/body result
::rres/headers {"content-type" "text/html; charset=utf-8" ::yres/headers {"content-type" "text/html; charset=utf-8"
"x-robots-tag" "noindex"}}) "x-robots-tag" "noindex"}})
{::rres/status 404 {::yres/status 404
::rres/body "not found"}))) ::yres/body "not found"})))
(def sql:error-reports (def sql:error-reports
"SELECT id, created_at, "SELECT id, created_at,
@ -256,10 +256,10 @@
[{:keys [::db/pool]} _request] [{:keys [::db/pool]} _request]
(let [items (->> (db/exec! pool [sql:error-reports]) (let [items (->> (db/exec! pool [sql:error-reports])
(map #(update % :created-at dt/format-instant :rfc1123)))] (map #(update % :created-at dt/format-instant :rfc1123)))]
{::rres/status 200 {::yres/status 200
::rres/body (-> (io/resource "app/templates/error-list.tmpl") ::yres/body (-> (io/resource "app/templates/error-list.tmpl")
(tmpl/render {:items items})) (tmpl/render {:items items}))
::rres/headers {"content-type" "text/html; charset=utf-8" ::yres/headers {"content-type" "text/html; charset=utf-8"
"x-robots-tag" "noindex"}})) "x-robots-tag" "noindex"}}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -295,15 +295,16 @@
cfg (assoc cfg cfg (assoc cfg
::bf.v1/overwrite false ::bf.v1/overwrite false
::bf.v1/profile-id profile-id ::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)] ::bf.v1/project-id project-id
(bf.v1/import-files! cfg path) ::bf.v1/input path)]
{::rres/status 200 (bf.v1/import-files! cfg)
::rres/headers {"content-type" "text/plain"} {::yres/status 200
::rres/body "OK CLONED"}) ::yres/headers {"content-type" "text/plain"}
::yres/body "OK CLONED"})
{::rres/status 200 {::yres/status 200
::rres/body (io/input-stream path) ::yres/body (io/input-stream path)
::rres/headers {"content-type" "application/octet-stream" ::yres/headers {"content-type" "application/octet-stream"
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}})))) "content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
@ -329,11 +330,12 @@
::bf.v1/overwrite overwrite? ::bf.v1/overwrite overwrite?
::bf.v1/migrate migrate? ::bf.v1/migrate migrate?
::bf.v1/profile-id profile-id ::bf.v1/profile-id profile-id
::bf.v1/project-id project-id)] ::bf.v1/project-id project-id
(bf.v1/import-files! cfg path) ::bf.v1/input path)]
{::rres/status 200 (bf.v1/import-files! cfg)
::rres/headers {"content-type" "text/plain"} {::yres/status 200
::rres/body "OK"}))) ::yres/headers {"content-type" "text/plain"}
::yres/body "OK"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ACTIONS ;; ACTIONS
@ -363,34 +365,34 @@
(db/update! conn :profile {:is-blocked true} {:id (:id profile)}) (db/update! conn :profile {:is-blocked true} {:id (:id profile)})
(db/delete! conn :http-session {:profile-id (:id profile)}) (db/delete! conn :http-session {:profile-id (:id profile)})
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))}) ::yres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
(contains? params :unblock) (contains? params :unblock)
(do (do
(db/update! conn :profile {:is-blocked false} {:id (:id profile)}) (db/update! conn :profile {:is-blocked false} {:id (:id profile)})
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))}) ::yres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
(contains? params :resend) (contains? params :resend)
(if (:is-blocked profile) (if (:is-blocked profile)
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body "PROFILE ALREADY BLOCKED"} ::yres/body "PROFILE ALREADY BLOCKED"}
(do (do
(#'auth/send-email-verification! cfg profile) (#'auth/send-email-verification! cfg profile)
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "RESENDED FOR '%'" (:email profile))})) ::yres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
:else :else
(do (do
(db/update! conn :profile {:is-active true} {:id (:id profile)}) (db/update! conn :profile {:is-active true} {:id (:id profile)})
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))})))))) ::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
(defn- reset-file-version (defn- reset-file-version
@ -415,9 +417,9 @@
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version)) (db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/plain"} ::yres/headers {"content-type" "text/plain"}
::rres/body "OK"})) ::yres/body "OK"}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -429,13 +431,13 @@
[{:keys [::db/pool]} _] [{:keys [::db/pool]} _]
(try (try
(db/exec-one! pool ["select count(*) as count from server_prop;"]) (db/exec-one! pool ["select count(*) as count from server_prop;"])
{::rres/status 200 {::yres/status 200
::rres/body "OK"} ::yres/body "OK"}
(catch Throwable cause (catch Throwable cause
(l/warn :hint "unable to execute query on health handler" (l/warn :hint "unable to execute query on health handler"
:cause cause) :cause cause)
{::rres/status 503 {::yres/status 503
::rres/body "KO"}))) ::yres/body "KO"})))
(defn changelog-handler (defn changelog-handler
[_ _] [_ _]
@ -444,11 +446,11 @@
(md->html [text] (md->html [text]
(md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))] (md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))]
(if-let [clog (io/resource "changelog.md")] (if-let [clog (io/resource "changelog.md")]
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/html; charset=utf-8"} ::yres/headers {"content-type" "text/html; charset=utf-8"}
::rres/body (-> clog slurp md->html)} ::yres/body (-> clog slurp md->html)}
{::rres/status 404 {::yres/status 404
::rres/body "NOT FOUND"}))) ::yres/body "NOT FOUND"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; INIT ;; INIT

View file

@ -16,8 +16,8 @@
[app.http.session :as-alias session] [app.http.session :as-alias session]
[app.util.inet :as inet] [app.util.inet :as inet]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.response :as rres])) [yetti.response :as yres]))
(defn request->context (defn request->context
"Extracts error report relevant context data from request." "Extracts error report relevant context data from request."
@ -29,10 +29,10 @@
{:request/path (:path request) {:request/path (:path request)
:request/method (:method request) :request/method (:method request)
:request/params (:params request) :request/params (:params request)
:request/user-agent (rreq/get-header request "user-agent") :request/user-agent (yreq/get-header request "user-agent")
:request/ip-addr (inet/parse-request request) :request/ip-addr (inet/parse-request request)
:request/profile-id (:uid claims) :request/profile-id (:uid claims)
:version/frontend (or (rreq/get-header request "x-frontend-version") "unknown") :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")
:version/backend (:full cf/version)})) :version/backend (:full cf/version)}))
@ -46,34 +46,34 @@
(defmethod handle-error :authentication (defmethod handle-error :authentication
[err _ _] [err _ _]
{::rres/status 401 {::yres/status 401
::rres/body (ex-data err)}) ::yres/body (ex-data err)})
(defmethod handle-error :authorization (defmethod handle-error :authorization
[err _ _] [err _ _]
{::rres/status 403 {::yres/status 403
::rres/body (ex-data err)}) ::yres/body (ex-data err)})
(defmethod handle-error :restriction (defmethod handle-error :restriction
[err _ _] [err _ _]
(let [{:keys [code] :as data} (ex-data err)] (let [{:keys [code] :as data} (ex-data err)]
(if (= code :method-not-allowed) (if (= code :method-not-allowed)
{::rres/status 405 {::yres/status 405
::rres/body data} ::yres/body data}
{::rres/status 400 {::yres/status 400
::rres/body data}))) ::yres/body data})))
(defmethod handle-error :rate-limit (defmethod handle-error :rate-limit
[err _ _] [err _ _]
(let [headers (-> err ex-data ::http/headers)] (let [headers (-> err ex-data ::http/headers)]
{::rres/status 429 {::yres/status 429
::rres/headers headers})) ::yres/headers headers}))
(defmethod handle-error :concurrency-limit (defmethod handle-error :concurrency-limit
[err _ _] [err _ _]
(let [headers (-> err ex-data ::http/headers)] (let [headers (-> err ex-data ::http/headers)]
{::rres/status 429 {::yres/status 429
::rres/headers headers})) ::yres/headers headers}))
(defmethod handle-error :validation (defmethod handle-error :validation
[err request parent-cause] [err request parent-cause]
@ -84,22 +84,26 @@
(= code :schema-validation) (= code :schema-validation)
(= code :data-validation)) (= code :data-validation))
(let [explain (ex/explain data)] (let [explain (ex/explain data)]
{::rres/status 400 {::yres/status 400
::rres/body (-> data ::yres/body (-> data
(dissoc ::s/problems ::s/value ::s/spec ::sm/explain) (dissoc ::s/problems ::s/value ::s/spec ::sm/explain)
(cond-> explain (assoc :explain explain)))}) (cond-> explain (assoc :explain explain)))})
(= code :vern-conflict)
{::yres/status 409 ;; 409 - Conflict
::yres/body data}
(= code :request-body-too-large) (= code :request-body-too-large)
{::rres/status 413 ::rres/body data} {::yres/status 413 ::yres/body data}
(= code :invalid-image) (= code :invalid-image)
(binding [l/*context* (request->context request)] (binding [l/*context* (request->context request)]
(let [cause (or parent-cause err)] (let [cause (or parent-cause err)]
(l/warn :hint "unexpected error on processing image" :cause cause) (l/warn :hint "unexpected error on processing image" :cause cause)
{::rres/status 400 ::rres/body data})) {::yres/status 400 ::yres/body data}))
:else :else
{::rres/status 400 ::rres/body data}))) {::yres/status 400 ::yres/body data})))
(defmethod handle-error :assertion (defmethod handle-error :assertion
[error request parent-cause] [error request parent-cause]
@ -110,46 +114,47 @@
(= code :data-validation) (= code :data-validation)
(let [explain (ex/explain data)] (let [explain (ex/explain data)]
(l/error :hint "data assertion error" :cause cause) (l/error :hint "data assertion error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body (-> data
:code :assertion (dissoc ::sm/explain)
:data (-> data (cond-> explain (assoc :explain explain))
(dissoc ::sm/explain) (assoc :type :server-error)
(cond-> explain (assoc :explain explain)))}}) (assoc :code :assertion))})
(= code :spec-validation) (= code :spec-validation)
(let [explain (ex/explain data)] (let [explain (ex/explain data)]
(l/error :hint "spec assertion error" :cause cause) (l/error :hint "spec assertion error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body (-> data
:code :assertion (dissoc ::s/problems ::s/value ::s/spec)
:data (-> data (cond-> explain (assoc :explain explain))
(dissoc ::s/problems ::s/value ::s/spec) (assoc :type :server-error)
(cond-> explain (assoc :explain explain)))}}) (assoc :code :assertion))})
:else :else
(do (do
(l/error :hint "assertion error" :cause cause) (l/error :hint "assertion error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body (-> data
:code :assertion (assoc :type :server-error)
:data data}}))))) (assoc :code :assertion))})))))
(defmethod handle-error :not-found (defmethod handle-error :not-found
[err _ _] [err _ _]
{::rres/status 404 {::yres/status 404
::rres/body (ex-data err)}) ::yres/body (ex-data err)})
(defmethod handle-error :internal (defmethod handle-error :internal
[error request parent-cause] [error request parent-cause]
(binding [l/*context* (request->context request)] (binding [l/*context* (request->context request)]
(let [cause (or parent-cause error)] (let [cause (or parent-cause error)
data (ex-data error)]
(l/error :hint "internal error" :cause cause) (l/error :hint "internal error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body (-> data
:code :unhandled (assoc :type :server-error)
:hint (ex-message error) (update :code #(or % :unhandled))
:data (ex-data error)}}))) (assoc :hint (ex-message error)))})))
(defmethod handle-error :default (defmethod handle-error :default
[error request parent-cause] [error request parent-cause]
@ -173,20 +178,20 @@
:cause cause) :cause cause)
(cond (cond
(= state "57014") (= state "57014")
{::rres/status 504 {::yres/status 504
::rres/body {:type :server-error ::yres/body {:type :server-error
:code :statement-timeout :code :statement-timeout
:hint (ex-message error)}} :hint (ex-message error)}}
(= state "25P03") (= state "25P03")
{::rres/status 504 {::yres/status 504
::rres/body {:type :server-error ::yres/body {:type :server-error
:code :idle-in-transaction-timeout :code :idle-in-transaction-timeout
:hint (ex-message error)}} :hint (ex-message error)}}
:else :else
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body {:type :server-error
:code :unexpected :code :unexpected
:hint (ex-message error) :hint (ex-message error)
:state state}})))) :state state}}))))
@ -200,25 +205,25 @@
(nil? edata) (nil? edata)
(binding [l/*context* (request->context request)] (binding [l/*context* (request->context request)]
(l/error :hint "unexpected error" :cause cause) (l/error :hint "unexpected error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body {:type :server-error
:code :unexpected :code :unexpected
:hint (ex-message error)}}) :hint (ex-message error)}})
:else :else
(binding [l/*context* (request->context request)] (binding [l/*context* (request->context request)]
(l/error :hint "unhandled error" :cause cause) (l/error :hint "unhandled error" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body (-> edata
:code :unhandled (assoc :type :server-error)
:hint (ex-message error) (update :code #(or % :unhandled))
:data edata}})))) (assoc :hint (ex-message error)))}))))
(defmethod handle-exception java.io.IOException (defmethod handle-exception java.io.IOException
[cause _ _] [cause _ _]
(l/wrn :hint "io exception" :cause cause) (l/wrn :hint "io exception" :cause cause)
{::rres/status 500 {::yres/status 500
::rres/body {:type :server-error ::yres/body {:type :server-error
:code :io-exception :code :io-exception
:hint (ex-message cause)}}) :hint (ex-message cause)}})
@ -244,4 +249,4 @@
(defn handle' (defn handle'
[cause request] [cause request]
(::rres/body (handle cause request))) (::yres/body (handle cause request)))

View file

@ -15,10 +15,10 @@
[app.http.errors :as errors] [app.http.errors :as errors]
[app.util.pointer-map :as pmap] [app.util.pointer-map :as pmap]
[cuerdas.core :as str] [cuerdas.core :as str]
[ring.request :as rreq]
[ring.response :as rres]
[yetti.adapter :as yt] [yetti.adapter :as yt]
[yetti.middleware :as ymw]) [yetti.middleware :as ymw]
[yetti.request :as yreq]
[yetti.response :as yres])
(:import (:import
io.undertow.server.RequestTooBigException io.undertow.server.RequestTooBigException
java.io.InputStream java.io.InputStream
@ -37,17 +37,17 @@
(defn- get-reader (defn- get-reader
^java.io.BufferedReader ^java.io.BufferedReader
[request] [request]
(let [^InputStream body (rreq/body request)] (let [^InputStream body (yreq/body request)]
(java.io.BufferedReader. (java.io.BufferedReader.
(java.io.InputStreamReader. body)))) (java.io.InputStreamReader. body))))
(defn wrap-parse-request (defn wrap-parse-request
[handler] [handler]
(letfn [(process-request [request] (letfn [(process-request [request]
(let [header (rreq/get-header request "content-type")] (let [header (yreq/get-header request "content-type")]
(cond (cond
(str/starts-with? header "application/transit+json") (str/starts-with? header "application/transit+json")
(with-open [^InputStream is (rreq/body request)] (with-open [^InputStream is (yreq/body request)]
(let [params (t/read! (t/reader is))] (let [params (t/read! (t/reader is))]
(-> request (-> request
(assoc :body-params params) (assoc :body-params params)
@ -85,7 +85,7 @@
(errors/handle cause request)))] (errors/handle cause request)))]
(fn [request] (fn [request]
(if (= (rreq/method request) :post) (if (= (yreq/method request) :post)
(try (try
(-> request process-request handler) (-> request process-request handler)
(catch Throwable cause (catch Throwable cause
@ -113,57 +113,53 @@
(defn wrap-format-response (defn wrap-format-response
[handler] [handler]
(letfn [(transit-streamable-body [data opts] (letfn [(transit-streamable-body [data opts _ output-stream]
(reify rres/StreamableResponseBody (try
(-write-body-to-stream [_ _ output-stream] (with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
(try (let [tw (t/writer bos opts)]
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)] (t/write! tw data)))
(let [tw (t/writer bos opts)] (catch java.io.IOException _)
(t/write! tw data))) (catch Throwable cause
(catch java.io.IOException _) (binding [l/*context* {:value data}]
(catch Throwable cause (l/error :hint "unexpected error on encoding response"
(binding [l/*context* {:value data}] :cause cause)))
(l/error :hint "unexpected error on encoding response" (finally
:cause cause))) (.close ^OutputStream output-stream))))
(finally
(.close ^OutputStream output-stream))))))
(json-streamable-body [data] (json-streamable-body [data _ output-stream]
(reify rres/StreamableResponseBody (try
(-write-body-to-stream [_ _ output-stream] (let [encode (or (-> data meta :encode/json) identity)
(try data (encode data)]
(let [encode (or (-> data meta :encode/json) identity) (with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
data (encode data)] (with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)]
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)] (json/write writer data :key-fn json/write-camel-key :value-fn write-json-value))))
(with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)] (catch java.io.IOException _)
(json/write writer data :key-fn json/write-camel-key :value-fn write-json-value)))) (catch Throwable cause
(catch java.io.IOException _) (binding [l/*context* {:value data}]
(catch Throwable cause (l/error :hint "unexpected error on encoding response"
(binding [l/*context* {:value data}] :cause cause)))
(l/error :hint "unexpected error on encoding response" (finally
:cause cause))) (.close ^OutputStream output-stream))))
(finally
(.close ^OutputStream output-stream))))))
(format-response-with-json [response _] (format-response-with-json [response _]
(let [body (::rres/body response)] (let [body (::yres/body response)]
(if (or (boolean? body) (coll? body)) (if (or (boolean? body) (coll? body))
(-> response (-> response
(update ::rres/headers assoc "content-type" "application/json") (update ::yres/headers assoc "content-type" "application/json")
(assoc ::rres/body (json-streamable-body body))) (assoc ::yres/body (yres/stream-body (partial json-streamable-body body))))
response))) response)))
(format-response-with-transit [response request] (format-response-with-transit [response request]
(let [body (::rres/body response)] (let [body (::yres/body response)]
(if (or (boolean? body) (coll? body)) (if (or (boolean? body) (coll? body))
(let [qs (rreq/query request) (let [qs (yreq/query request)
opts (if (or (contains? cf/flags :transit-readable-response) opts (if (or (contains? cf/flags :transit-readable-response)
(str/includes? qs "transit_verbose")) (str/includes? qs "transit_verbose"))
{:type :json-verbose} {:type :json-verbose}
{:type :json})] {:type :json})]
(-> response (-> response
(update ::rres/headers assoc "content-type" "application/transit+json") (update ::yres/headers assoc "content-type" "application/transit+json")
(assoc ::rres/body (transit-streamable-body body opts)))) (assoc ::yres/body (yres/stream-body (partial transit-streamable-body body opts)))))
response))) response)))
(format-from-params [{:keys [query-params] :as request}] (format-from-params [{:keys [query-params] :as request}]
@ -172,7 +168,7 @@
(format-response [response request] (format-response [response request]
(let [accept (or (format-from-params request) (let [accept (or (format-from-params request)
(rreq/get-header request "accept"))] (yreq/get-header request "accept"))]
(cond (cond
(or (= accept "application/transit+json") (or (= accept "application/transit+json")
(str/includes? accept "application/transit+json")) (str/includes? accept "application/transit+json"))
@ -221,11 +217,11 @@
(defn wrap-cors (defn wrap-cors
[handler] [handler]
(fn [request] (fn [request]
(let [response (if (= (rreq/method request) :options) (let [response (if (= (yreq/method request) :options)
{::rres/status 200} {::yres/status 200}
(handler request)) (handler request))
origin (rreq/get-header request "origin")] origin (yreq/get-header request "origin")]
(update response ::rres/headers with-cors-headers origin)))) (update response ::yres/headers with-cors-headers origin))))
(def cors (def cors
{:name ::cors {:name ::cors
@ -240,7 +236,7 @@
(when-let [allowed (:allowed-methods data)] (when-let [allowed (:allowed-methods data)]
(fn [handler] (fn [handler]
(fn [request] (fn [request]
(let [method (rreq/method request)] (let [method (yreq/method request)]
(if (contains? allowed method) (if (contains? allowed method)
(handler request) (handler request)
{::rres/status 405}))))))}) {::yres/status 405}))))))})

View file

@ -22,8 +22,7 @@
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[ring.request :as rreq] [yetti.request :as yreq]))
[yetti.request :as yrq]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DEFAULTS ;; DEFAULTS
@ -145,7 +144,7 @@
(us/assert! ::us/uuid profile-id) (us/assert! ::us/uuid profile-id)
(fn [request response] (fn [request response]
(let [uagent (rreq/get-header request "user-agent") (let [uagent (yreq/get-header request "user-agent")
params {:profile-id profile-id params {:profile-id profile-id
:user-agent uagent :user-agent uagent
:created-at (dt/now)} :created-at (dt/now)}
@ -161,7 +160,7 @@
(us/assert! ::manager manager) (us/assert! ::manager manager)
(fn [request response] (fn [request response]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name) (let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (yrq/get-cookie request cname)] cookie (yreq/get-cookie request cname)]
(l/trace :hint "delete" :profile-id (:profile-id request)) (l/trace :hint "delete" :profile-id (:profile-id request))
(some->> (:value cookie) (delete! manager)) (some->> (:value cookie) (delete! manager))
(-> response (-> response
@ -183,7 +182,7 @@
(defn- get-token (defn- get-token
[request] [request]
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name) (let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
cookie (some-> (yrq/get-cookie request cname) :value)] cookie (some-> (yreq/get-cookie request cname) :value)]
(when-not (str/empty? cookie) (when-not (str/empty? cookie)
cookie))) cookie)))

View file

@ -16,7 +16,7 @@
[promesa.exec :as px] [promesa.exec :as px]
[promesa.exec.csp :as sp] [promesa.exec.csp :as sp]
[promesa.util :as pu] [promesa.util :as pu]
[ring.response :as rres]) [yetti.response :as yres])
(:import (:import
java.io.OutputStream)) java.io.OutputStream))
@ -49,24 +49,24 @@
(defn response (defn response
[handler & {:keys [buf] :or {buf 32} :as opts}] [handler & {:keys [buf] :or {buf 32} :as opts}]
(fn [request] (fn [request]
{::rres/headers default-headers {::yres/headers default-headers
::rres/status 200 ::yres/status 200
::rres/body (reify rres/StreamableResponseBody ::yres/body (yres/stream-body
(-write-body-to-stream [_ _ output] (fn [_ output]
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))] (binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
(let [listener (events/start-listener (let [listener (events/start-listener
(partial write! output) (partial write! output)
(partial pu/close! output))] (partial pu/close! output))]
(try (try
(let [result (handler)] (let [result (handler)]
(events/tap :end result)) (events/tap :end result))
(catch java.io.EOFException cause (catch java.io.EOFException cause
(events/tap :error (errors/handle' cause request))) (events/tap :error (errors/handle' cause request)))
(catch Throwable cause (catch Throwable cause
(l/err :hint "unexpected error on processing sse response" (l/err :hint "unexpected error on processing sse response"
:cause cause) :cause cause)
(events/tap :error (errors/handle' cause request))) (events/tap :error (errors/handle' cause request)))
(finally (finally
(sp/close! events/*channel*) (sp/close! events/*channel*)
(px/await! listener)))))))})) (px/await! listener)))))))}))

View file

@ -21,7 +21,6 @@
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.exec.csp :as sp] [promesa.exec.csp :as sp]
[ring.websocket :as rws]
[yetti.websocket :as yws])) [yetti.websocket :as yws]))
(def recv-labels (def recv-labels
@ -113,7 +112,6 @@
fsub (::file-subscription @state) fsub (::file-subscription @state)
tsub (::team-subscription @state) tsub (::team-subscription @state)
msg {:type :disconnect msg {:type :disconnect
:subs-id profile-id
:profile-id profile-id :profile-id profile-id
:session-id session-id}] :session-id session-id}]
@ -138,9 +136,7 @@
(l/trace :fn "handle-message" :event "subscribe-team" :team-id team-id :conn-id id) (l/trace :fn "handle-message" :event "subscribe-team" :team-id team-id :conn-id id)
(let [prev-subs (get @state ::team-subscription) (let [prev-subs (get @state ::team-subscription)
channel (sp/chan :buf (sp/dropping-buffer 64) channel (sp/chan :buf (sp/dropping-buffer 64)
:xf (comp :xf (remove #(= (:session-id %) session-id)))]
(remove #(= (:session-id %) session-id))
(map #(assoc % :subs-id team-id))))]
(sp/pipe channel output-ch false) (sp/pipe channel output-ch false)
(mbus/sub! msgbus :topic team-id :chan channel) (mbus/sub! msgbus :topic team-id :chan channel)
@ -159,8 +155,7 @@
(l/trace :fn "handle-message" :event "subscribe-file" :file-id file-id :conn-id id) (l/trace :fn "handle-message" :event "subscribe-file" :file-id file-id :conn-id id)
(let [psub (::file-subscription @state) (let [psub (::file-subscription @state)
fch (sp/chan :buf (sp/dropping-buffer 64) fch (sp/chan :buf (sp/dropping-buffer 64)
:xf (comp (remove #(= (:session-id %) session-id)) :xf (remove #(= (:session-id %) session-id)))]
(map #(assoc % :subs-id file-id))))]
(let [subs {:file-id file-id :channel fch :topic file-id}] (let [subs {:file-id file-id :channel fch :topic file-id}]
(swap! state assoc ::file-subscription subs)) (swap! state assoc ::file-subscription subs))
@ -191,7 +186,6 @@
;; Notifify the rest of participants of the new connection. ;; Notifify the rest of participants of the new connection.
(let [message {:type :join-file (let [message {:type :join-file
:file-id file-id :file-id file-id
:subs-id file-id
:session-id session-id :session-id session-id
:profile-id profile-id}] :profile-id profile-id}]
(mbus/pub! msgbus :topic file-id :message message)))) (mbus/pub! msgbus :topic file-id :message message))))
@ -303,7 +297,7 @@
:else :else
(do (do
(l/trace :hint "websocket request" :profile-id profile-id :session-id session-id) (l/trace :hint "websocket request" :profile-id profile-id :session-id session-id)
{::rws/listener (ws/listener request {::yws/listener (ws/listener request
::ws/on-rcv-message (partial on-rcv-message cfg) ::ws/on-rcv-message (partial on-rcv-message cfg)
::ws/on-snd-message (partial on-snd-message cfg) ::ws/on-snd-message (partial on-snd-message cfg)
::ws/on-connect (partial on-connect cfg) ::ws/on-connect (partial on-connect cfg)

View file

@ -63,7 +63,7 @@
(ex/format-throwable cause :data? false :explain? false :header? false :summary? false))} (ex/format-throwable cause :data? false :explain? false :header? false :summary? false))}
(when-let [params (or (:request/params context) (:params context))] (when-let [params (or (:request/params context) (:params context))]
{:params (pp/pprint-str params :length 30 :level 12)}) {:params (pp/pprint-str params :length 30 :level 13)})
(when-let [value (:value context)] (when-let [value (:value context)]
{:value (pp/pprint-str value :length 30 :level 12)}) {:value (pp/pprint-str value :length 30 :level 12)})

View file

@ -319,7 +319,6 @@
::mtx/metrics (ig/ref ::mtx/metrics) ::mtx/metrics (ig/ref ::mtx/metrics)
::mbus/msgbus (ig/ref ::mbus/msgbus) ::mbus/msgbus (ig/ref ::mbus/msgbus)
::rds/redis (ig/ref ::rds/redis) ::rds/redis (ig/ref ::rds/redis)
::svgo/optimizer (ig/ref ::svgo/optimizer)
::rpc/climit (ig/ref ::rpc/climit) ::rpc/climit (ig/ref ::rpc/climit)
::rpc/rlimit (ig/ref ::rpc/rlimit) ::rpc/rlimit (ig/ref ::rpc/rlimit)
@ -430,9 +429,6 @@
;; module requires the migrations to run before initialize. ;; module requires the migrations to run before initialize.
::migrations (ig/ref :app.migrations/migrations)} ::migrations (ig/ref :app.migrations/migrations)}
::svgo/optimizer
{}
:app.loggers.audit.archive-task/handler :app.loggers.audit.archive-task/handler
{::setup/props (ig/ref ::setup/props) {::setup/props (ig/ref ::setup/props)
::db/pool (ig/ref ::db/pool) ::db/pool (ig/ref ::db/pool)
@ -475,7 +471,8 @@
::sto.s3/bucket (or (cf/get :storage-assets-s3-bucket) ::sto.s3/bucket (or (cf/get :storage-assets-s3-bucket)
(cf/get :objects-storage-s3-bucket)) (cf/get :objects-storage-s3-bucket))
::sto.s3/io-threads (or (cf/get :storage-assets-s3-io-threads) ::sto.s3/io-threads (or (cf/get :storage-assets-s3-io-threads)
(cf/get :objects-storage-s3-io-threads))} (cf/get :objects-storage-s3-io-threads))
::wrk/executor (ig/ref ::wrk/executor)}
:app.storage.fs/backend :app.storage.fs/backend
{::sto.fs/directory (or (cf/get :storage-assets-fs-directory) {::sto.fs/directory (or (cf/get :storage-assets-fs-directory)
@ -487,10 +484,7 @@
{::wrk/registry (ig/ref ::wrk/registry) {::wrk/registry (ig/ref ::wrk/registry)
::db/pool (ig/ref ::db/pool) ::db/pool (ig/ref ::db/pool)
::wrk/entries ::wrk/entries
[{:cron #app/cron "0 0 * * * ?" ;; hourly [{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :file-xlog-gc}
{:cron #app/cron "0 0 0 * * ?" ;; daily
:task :session-gc} :task :session-gc}
{:cron #app/cron "0 0 0 * * ?" ;; daily {:cron #app/cron "0 0 0 * * ?" ;; daily

View file

@ -225,7 +225,7 @@
(letfn [(ttf->otf [data] (letfn [(ttf->otf [data]
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "") (let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
foutput (fs/path (str finput ".otf")) foutput (fs/path (str finput ".otf"))
_ (io/write-to-file! data finput) _ (io/write* finput data)
res (sh/sh "fontforge" "-lang=ff" "-c" res (sh/sh "fontforge" "-lang=ff" "-c"
(str/fmt "Open('%s'); Generate('%s')" (str/fmt "Open('%s'); Generate('%s')"
(str finput) (str finput)
@ -236,7 +236,7 @@
(otf->ttf [data] (otf->ttf [data]
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "") (let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
foutput (fs/path (str finput ".ttf")) foutput (fs/path (str finput ".ttf"))
_ (io/write-to-file! data finput) _ (io/write* finput data)
res (sh/sh "fontforge" "-lang=ff" "-c" res (sh/sh "fontforge" "-lang=ff" "-c"
(str/fmt "Open('%s'); Generate('%s')" (str/fmt "Open('%s'); Generate('%s')"
(str finput) (str finput)
@ -250,14 +250,14 @@
;; command. ;; command.
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "") (let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
foutput (fs/path (str finput ".woff")) foutput (fs/path (str finput ".woff"))
_ (io/write-to-file! data finput) _ (io/write* finput data)
res (sh/sh "sfnt2woff" (str finput))] res (sh/sh "sfnt2woff" (str finput))]
(when (zero? (:exit res)) (when (zero? (:exit res))
foutput))) foutput)))
(woff->sfnt [data] (woff->sfnt [data]
(let [finput (tmp/tempfile :prefix "penpot" :suffix "") (let [finput (tmp/tempfile :prefix "penpot" :suffix "")
_ (io/write-to-file! data finput) _ (io/write* finput data)
res (sh/sh "woff2sfnt" (str finput) res (sh/sh "woff2sfnt" (str finput)
:out-enc :bytes)] :out-enc :bytes)]
(when (zero? (:exit res)) (when (zero? (:exit res))

View file

@ -412,7 +412,19 @@
:fn (mg/resource "app/migrations/sql/0129-mod-file-change-table.sql")} :fn (mg/resource "app/migrations/sql/0129-mod-file-change-table.sql")}
{:name "0130-mod-file-change-table" {:name "0130-mod-file-change-table"
:fn (mg/resource "app/migrations/sql/0130-mod-file-change-table.sql")}]) :fn (mg/resource "app/migrations/sql/0130-mod-file-change-table.sql")}
{:name "0131-mod-webhook-table"
:fn (mg/resource "app/migrations/sql/0131-mod-webhook-table.sql")}
{:name "0132-mod-file-change-table"
:fn (mg/resource "app/migrations/sql/0132-mod-file-change-table.sql")}
{:name "0133-mod-file-table"
:fn (mg/resource "app/migrations/sql/0133-mod-file-table.sql")}
{:name "0134-mod-file-change-table"
:fn (mg/resource "app/migrations/sql/0134-mod-file-change-table.sql")}])
(defn apply-migrations! (defn apply-migrations!
[pool name migrations] [pool name migrations]

View file

@ -0,0 +1,6 @@
ALTER TABLE webhook
ADD COLUMN profile_id uuid NULL REFERENCES profile (id) ON DELETE SET NULL;
CREATE INDEX webhook__profile_id__idx
ON webhook (profile_id)
WHERE profile_id IS NOT NULL;

View file

@ -0,0 +1,2 @@
ALTER TABLE file_change
ADD COLUMN created_by text NOT NULL DEFAULT 'system';

View file

@ -0,0 +1,2 @@
ALTER TABLE file
ADD COLUMN vern int NOT NULL DEFAULT 0;

View file

@ -0,0 +1,18 @@
ALTER TABLE file_change
ADD COLUMN updated_at timestamptz DEFAULT now(),
ADD COLUMN deleted_at timestamptz DEFAULT NULL,
ALTER COLUMN created_at SET DEFAULT now();
DROP INDEX file_change__created_at__idx;
DROP INDEX file_change__created_at__label__idx;
DROP INDEX file_change__label__idx;
CREATE INDEX file_change__deleted_at__idx
ON file_change (deleted_at, id)
WHERE deleted_at IS NOT NULL;
CREATE INDEX file_change__system_snapshots__idx
ON file_change (file_id, created_at)
WHERE data IS NOT NULL
AND created_by = 'system'
AND deleted_at IS NULL;

View file

@ -99,8 +99,9 @@
nil)) nil))
(defn pub! (defn pub!
[{::keys [pub-ch]} & {:as params}] [{::keys [pub-ch]} & {:keys [topic] :as params}]
(sp/put! pub-ch params)) (let [params (update params :message assoc :topic topic)]
(sp/put! pub-ch params)))
(defn purge! (defn purge!
[{:keys [::state ::wrk/executor] :as msgbus} chans] [{:keys [::state ::wrk/executor] :as msgbus} chans]
@ -230,7 +231,6 @@
(l/debug :hint "io-loop thread terminated"))))) (l/debug :hint "io-loop thread terminated")))))
(defn- redis-pub! (defn- redis-pub!
"Publish a message to the redis server. Asynchronous operation, "Publish a message to the redis server. Asynchronous operation,
intended to be used in core.async go blocks." intended to be used in core.async go blocks."

View file

@ -36,8 +36,8 @@
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.core :as p] [promesa.core :as p]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.response :as rres])) [yetti.response :as yres]))
(s/def ::profile-id ::us/uuid) (s/def ::profile-id ::us/uuid)
@ -64,16 +64,16 @@
response (if (fn? result) response (if (fn? result)
(result request) (result request)
(let [result (rph/unwrap result)] (let [result (rph/unwrap result)]
{::rres/status (::http/status mdata 200) {::yres/status (::http/status mdata 200)
::rres/headers (::http/headers mdata {}) ::yres/headers (::http/headers mdata {})
::rres/body result}))] ::yres/body result}))]
(-> response (-> response
(handle-response-transformation request mdata) (handle-response-transformation request mdata)
(handle-before-comple-hook mdata)))) (handle-before-comple-hook mdata))))
(defn get-external-session-id (defn get-external-session-id
[request] [request]
(when-let [session-id (rreq/get-header request "x-external-session-id")] (when-let [session-id (yreq/get-header request "x-external-session-id")]
(when-not (or (> (count session-id) 256) (when-not (or (> (count session-id) 256)
(= session-id "null") (= session-id "null")
(str/blank? session-id)) (str/blank? session-id))
@ -81,7 +81,7 @@
(defn- get-external-event-origin (defn- get-external-event-origin
[request] [request]
(when-let [origin (rreq/get-header request "x-event-origin")] (when-let [origin (yreq/get-header request "x-event-origin")]
(when-not (or (> (count origin) 256) (when-not (or (> (count origin) 256)
(= origin "null") (= origin "null")
(str/blank? origin)) (str/blank? origin))
@ -92,7 +92,7 @@
internal async flow into ring async flow." internal async flow into ring async flow."
[methods {:keys [params path-params method] :as request}] [methods {:keys [params path-params method] :as request}]
(let [handler-name (:type path-params) (let [handler-name (:type path-params)
etag (rreq/get-header request "if-none-match") etag (yreq/get-header request "if-none-match")
profile-id (or (::session/profile-id request) profile-id (or (::session/profile-id request)
(::actoken/profile-id request)) (::actoken/profile-id request))

View file

@ -8,6 +8,7 @@
(:refer-clojure :exclude [assert]) (:refer-clojure :exclude [assert])
(:require (:require
[app.binfile.v1 :as bf.v1] [app.binfile.v1 :as bf.v1]
[app.binfile.v3 :as bf.v3]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.schema :as sm] [app.common.schema :as sm]
[app.db :as db] [app.db :as db]
@ -24,7 +25,7 @@
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk] [app.worker :as-alias wrk]
[promesa.exec :as px] [promesa.exec :as px]
[ring.response :as rres])) [yetti.response :as yres]))
(set! *warn-on-reflection* true) (set! *warn-on-reflection* true)
@ -35,51 +36,103 @@
[:map {:title "export-binfile"} [:map {:title "export-binfile"}
[:name [:string {:max 250}]] [:name [:string {:max 250}]]
[:file-id ::sm/uuid] [:file-id ::sm/uuid]
[:include-libraries :boolean] [:version {:optional true} ::sm/int]
[:embed-assets :boolean]]) [:include-libraries ::sm/boolean]
[:embed-assets ::sm/boolean]])
(defn stream-export-v1
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(yres/stream-body
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bf.v1/ids #{file-id})
(assoc ::bf.v1/embed-assets embed-assets)
(assoc ::bf.v1/include-libraries include-libraries)
(bf.v1/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(defn stream-export-v3
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
(yres/stream-body
(fn [_ output-stream]
(try
(-> cfg
(assoc ::bf.v3/ids #{file-id})
(assoc ::bf.v3/embed-assets embed-assets)
(assoc ::bf.v3/include-libraries include-libraries)
(bf.v3/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause))))))
(sv/defmethod ::export-binfile (sv/defmethod ::export-binfile
"Export a penpot file in a binary format." "Export a penpot file in a binary format."
{::doc/added "1.15" {::doc/added "1.15"
::webhooks/event? true ::webhooks/event? true
::sm/result schema:export-binfile} ::sm/result schema:export-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries embed-assets] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
(files/check-read-permissions! pool profile-id file-id) (files/check-read-permissions! pool profile-id file-id)
(fn [_] (fn [_]
{::rres/status 200 (let [version (or version 1)
::rres/headers {"content-type" "application/octet-stream"} body (case (int version)
::rres/body (reify rres/StreamableResponseBody 1 (stream-export-v1 cfg params)
(-write-body-to-stream [_ _ output-stream] 2 (throw (ex-info "not-implemented" {}))
(try 3 (stream-export-v3 cfg params))]
(-> cfg
(assoc ::bf.v1/ids #{file-id}) {::yres/status 200
(assoc ::bf.v1/embed-assets embed-assets) ::yres/headers {"content-type" "application/octet-stream"}
(assoc ::bf.v1/include-libraries include-libraries) ::yres/body body})))
(bf.v1/export-files! output-stream))
(catch Throwable cause
(l/err :hint "exception on exporting file"
:file-id (str file-id)
:cause cause)))))}))
;; --- Command: import-binfile ;; --- Command: import-binfile
(defn- import-binfile-v1
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
(let [cfg (-> cfg
(assoc ::bf.v1/project-id project-id)
(assoc ::bf.v1/profile-id profile-id)
(assoc ::bf.v1/name name)
(assoc ::bf.v1/input (:path file)))]
;; NOTE: the importation process performs some operations that are
;; not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we dispatch
;; that operation to a dedicated executor.
(px/invoke! executor (partial bf.v1/import-files! cfg))))
(defn- import-binfile-v3
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
(let [cfg (-> cfg
(assoc ::bf.v3/project-id project-id)
(assoc ::bf.v3/profile-id profile-id)
(assoc ::bf.v3/name name)
(assoc ::bf.v3/input (:path file)))]
;; NOTE: the importation process performs some operations that are
;; not very friendly with virtual threads, and for avoid
;; unexpected blocking of other concurrent operations we dispatch
;; that operation to a dedicated executor.
(px/invoke! executor (partial bf.v3/import-files! cfg))))
(defn- import-binfile (defn- import-binfile
[{:keys [::wrk/executor ::bf.v1/project-id ::db/pool] :as cfg} input] [{:keys [::db/pool] :as cfg} {:keys [project-id version] :as params}]
;; NOTE: the importation process performs some operations that (let [result (case (int version)
;; are not very friendly with virtual threads, and for avoid 1 (import-binfile-v1 cfg params)
;; unexpected blocking of other concurrent operations we 3 (import-binfile-v3 cfg params))]
;; dispatch that operation to a dedicated executor.
(let [result (px/invoke! executor (partial bf.v1/import-files! cfg input))]
(db/update! pool :project (db/update! pool :project
{:modified-at (dt/now)} {:modified-at (dt/now)}
{:id project-id}) {:id project-id})
result)) result))
(def ^:private (def ^:private schema:import-binfile
schema:import-binfile
[:map {:title "import-binfile"} [:map {:title "import-binfile"}
[:name [:string {:max 250}]] [:name [:or [:string {:max 250}]
[:map-of ::sm/uuid [:string {:max 250}]]]]
[:project-id ::sm/uuid] [:project-id ::sm/uuid]
[:version {:optional true} ::sm/int]
[:file ::media/upload]]) [:file ::media/upload]])
(sv/defmethod ::import-binfile (sv/defmethod ::import-binfile
@ -88,12 +141,11 @@
::webhooks/event? true ::webhooks/event? true
::sse/stream? true ::sse/stream? true
::sm/params schema:import-binfile} ::sm/params schema:import-binfile}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id name project-id file] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version] :as params}]
(projects/check-read-permissions! pool profile-id project-id) (projects/check-edition-permissions! pool profile-id project-id)
(let [cfg (-> cfg (let [params (-> params
(assoc ::bf.v1/project-id project-id) (assoc :profile-id profile-id)
(assoc ::bf.v1/profile-id profile-id) (assoc :version (or version 1)))]
(assoc ::bf.v1/name name))]
(with-meta (with-meta
(sse/response #(import-binfile cfg (:path file))) (sse/response (partial import-binfile cfg params))
{::audit/props {:file nil}}))) {::audit/props {:file nil}})))

View file

@ -182,6 +182,7 @@
[:comment-thread-seqn [::sm/int {:min 0}]] [:comment-thread-seqn [::sm/int {:min 0}]]
[:name [:string {:max 250}]] [:name [:string {:max 250}]]
[:revn [::sm/int {:min 0}]] [:revn [::sm/int {:min 0}]]
[:vern [::sm/int {:min 0}]]
[:modified-at ::dt/instant] [:modified-at ::dt/instant]
[:is-shared ::sm/boolean] [:is-shared ::sm/boolean]
[:project-id ::sm/uuid] [:project-id ::sm/uuid]
@ -270,7 +271,7 @@
(defn get-minimal-file (defn get-minimal-file
[cfg id & {:as opts}] [cfg id & {:as opts}]
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :data-ref-id :data-backend])] (let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern :data-ref-id :data-backend])]
(db/get cfg :file {:id id} opts))) (db/get cfg :file {:id id} opts)))
(defn- get-minimal-file-with-perms (defn- get-minimal-file-with-perms
@ -280,8 +281,8 @@
(assoc mfile :permissions perms))) (assoc mfile :permissions perms)))
(defn get-file-etag (defn get-file-etag
[{:keys [::rpc/profile-id]} {:keys [modified-at revn permissions]}] [{:keys [::rpc/profile-id]} {:keys [modified-at revn vern permissions]}]
(str profile-id "/" revn "/" (str profile-id "/" revn "/" vern "/"
(dt/format-instant modified-at :iso) (dt/format-instant modified-at :iso)
"/" "/"
(uri/map->query-string permissions))) (uri/map->query-string permissions)))
@ -371,8 +372,9 @@
f.modified_at, f.modified_at,
f.name, f.name,
f.revn, f.revn,
f.vern,
f.is_shared, f.is_shared,
ft.media_id ft.media_id AS thumbnail_id
from file as f from file as f
left join file_thumbnail as ft on (ft.file_id = f.id left join file_thumbnail as ft on (ft.file_id = f.id
and ft.revn = f.revn and ft.revn = f.revn
@ -383,13 +385,7 @@
(defn get-project-files (defn get-project-files
[conn project-id] [conn project-id]
(->> (db/exec! conn [sql:project-files project-id]) (db/exec! conn [sql:project-files project-id]))
(mapv (fn [row]
(if-let [media-id (:media-id row)]
(-> row
(dissoc :media-id)
(assoc :thumbnail-uri (resolve-public-uri media-id)))
(dissoc row :media-id))))))
(def schema:get-project-files (def schema:get-project-files
[:map {:title "get-project-files"} [:map {:title "get-project-files"}
@ -526,6 +522,7 @@
(def ^:private sql:team-shared-files (def ^:private sql:team-shared-files
"select f.id, "select f.id,
f.revn, f.revn,
f.vern,
f.data, f.data,
f.project_id, f.project_id,
f.created_at, f.created_at,
@ -609,6 +606,7 @@
l.deleted_at, l.deleted_at,
l.name, l.name,
l.revn, l.revn,
l.vern,
l.synced_at l.synced_at
FROM libs AS l FROM libs AS l
WHERE l.deleted_at IS NULL OR l.deleted_at > now();") WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
@ -670,6 +668,7 @@
"with recent_files as ( "with recent_files as (
select f.id, select f.id,
f.revn, f.revn,
f.vern,
f.project_id, f.project_id,
f.created_at, f.created_at,
f.modified_at, f.modified_at,

View file

@ -15,10 +15,11 @@
[app.db.sql :as-alias sql] [app.db.sql :as-alias sql]
[app.features.fdata :as feat.fdata] [app.features.fdata :as feat.fdata]
[app.main :as-alias main] [app.main :as-alias main]
[app.msgbus :as mbus]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.commands.files :as files] [app.rpc.commands.files :as files]
[app.rpc.commands.profile :as profile]
[app.rpc.doc :as-alias doc] [app.rpc.doc :as-alias doc]
[app.rpc.quotes :as quotes]
[app.storage :as sto] [app.storage :as sto]
[app.util.blob :as blob] [app.util.blob :as blob]
[app.util.pointer-map :as pmap] [app.util.pointer-map :as pmap]
@ -26,173 +27,52 @@
[app.util.time :as dt] [app.util.time :as dt]
[cuerdas.core :as str])) [cuerdas.core :as str]))
(defn check-authorized!
[{:keys [::db/pool]} profile-id]
(when-not (or (= "devenv" (cf/get :host))
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
admins (or (cf/get :admins) #{})]
(contains? admins (:email profile))))
(ex/raise :type :authentication
:code :authentication-required
:hint "only admins allowed")))
(def sql:get-file-snapshots (def sql:get-file-snapshots
"SELECT id, label, revn, created_at "SELECT id, label, revn, created_at, created_by, profile_id
FROM file_change FROM file_change
WHERE file_id = ? WHERE file_id = ?
AND created_at < ? AND data IS NOT NULL
AND label IS NOT NULL AND (deleted_at IS NULL OR deleted_at > now())
ORDER BY created_at DESC ORDER BY created_at DESC
LIMIT ?") LIMIT 20")
(defn get-file-snapshots (defn get-file-snapshots
[{:keys [::db/conn]} {:keys [file-id limit start-at] [conn file-id]
:or {limit Long/MAX_VALUE}}] (db/exec! conn [sql:get-file-snapshots file-id]))
(let [start-at (or start-at (dt/now))
limit (min limit 20)]
(->> (db/exec! conn [sql:get-file-snapshots file-id start-at limit])
(mapv (fn [row]
(update row :created-at dt/format-instant :rfc1123))))))
(def ^:private schema:get-file-snapshots (def ^:private schema:get-file-snapshots
[:map [:file-id ::sm/uuid]]) [:map {:title "get-file-snapshots"}
[:file-id ::sm/uuid]])
(sv/defmethod ::get-file-snapshots (sv/defmethod ::get-file-snapshots
{::doc/added "1.20" {::doc/added "1.20"
::doc/skip true
::sm/params schema:get-file-snapshots} ::sm/params schema:get-file-snapshots}
[cfg {:keys [::rpc/profile-id] :as params}] [cfg {:keys [::rpc/profile-id file-id] :as params}]
(check-authorized! cfg profile-id) (db/run! cfg (fn [{:keys [::db/conn]}]
(db/run! cfg get-file-snapshots params)) (files/check-read-permissions! conn profile-id file-id)
(get-file-snapshots conn file-id))))
(defn restore-file-snapshot! (def ^:private sql:get-file
[{:keys [::db/conn] :as cfg} {:keys [file-id id]}] "SELECT f.*,
(let [storage (sto/resolve cfg {::db/reuse-conn true}) p.id AS project_id,
file (files/get-minimal-file conn file-id {::db/for-update true}) p.team_id AS team_id
snapshot (db/get* conn :file-change FROM file AS f
{:file-id file-id INNER JOIN project AS p ON (p.id = f.project_id)
:id id} WHERE f.id = ?")
{::db/for-share true})]
(when-not snapshot
(ex/raise :type :not-found
:code :snapshot-not-found
:hint "unable to find snapshot with the provided label"
:id id
:file-id file-id))
(let [snapshot (feat.fdata/resolve-file-data cfg snapshot)]
(when-not (:data snapshot)
(ex/raise :type :precondition
:code :snapshot-without-data
:hint "snapshot has no data"
:label (:label snapshot)
:file-id file-id))
(l/dbg :hint "restoring snapshot"
:file-id (str file-id)
:label (:label snapshot)
:snapshot-id (str (:id snapshot)))
;; If the file was already offloaded, on restring the snapshot
;; we are going to replace the file data, so we need to touch
;; the old referenced storage object and avoid possible leaks
(when (feat.fdata/offloaded? file)
(sto/touch-object! storage (:data-ref-id file)))
(db/update! conn :file
{:data (:data snapshot)
:revn (inc (:revn file))
:version (:version snapshot)
:data-backend nil
:data-ref-id nil
:has-media-trimmed false
:features (:features snapshot)}
{:id file-id})
;; clean object thumbnails
(let [sql (str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; clean file thumbnails
(let [sql (str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
{:id (:id snapshot)
:label (:label snapshot)})))
(defn- resolve-snapshot-by-label
[conn file-id label]
(->> (db/query conn :file-change
{:file-id file-id
:label label}
{::sql/order-by [[:created-at :desc]]
::sql/columns [:file-id :id :label]})
(first)))
(def ^:private
schema:restore-file-snapshot
[:and
[:map
[:file-id ::sm/uuid]
[:id {:optional true} ::sm/uuid]
[:label {:optional true} :string]]
[::sm/contains-any #{:id :label}]])
(sv/defmethod ::restore-file-snapshot
{::doc/added "1.20"
::doc/skip true
::sm/params schema:restore-file-snapshot}
[cfg {:keys [::rpc/profile-id file-id id label] :as params}]
(check-authorized! cfg profile-id)
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(let [params (cond-> params
(and (not id) (string? label))
(merge (resolve-snapshot-by-label conn file-id label)))]
(restore-file-snapshot! cfg params)))))
(defn- get-file (defn- get-file
[cfg file-id] [cfg file-id]
(let [file (->> (db/get cfg :file {:id file-id}) (let [file (->> (db/exec-one! cfg [sql:get-file file-id])
(feat.fdata/resolve-file-data cfg))] (feat.fdata/resolve-file-data cfg))]
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)] (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
(-> file (-> file
(update :data blob/decode) (update :data blob/decode)
(update :data feat.fdata/process-pointers deref) (update :data feat.fdata/process-pointers deref)
(update :data feat.fdata/process-objects (partial into {})) (update :data feat.fdata/process-objects (partial into {}))
(update :data assoc ::id file-id)
(update :data blob/encode))))) (update :data blob/encode)))))
(defn take-file-snapshot! (defn- generate-snapshot-label
[cfg {:keys [file-id label ::rpc/profile-id]}]
(let [file (get-file cfg file-id)
id (uuid/next)]
(l/debug :hint "creating file snapshot"
:file-id (str file-id)
:label label)
(db/insert! cfg :file-change
{:id id
:revn (:revn file)
:data (:data file)
:version (:version file)
:features (:features file)
:profile-id profile-id
:file-id (:id file)
:label label}
{::db/return-keys false})
{:id id :label label}))
(defn generate-snapshot-label
[] []
(let [ts (-> (dt/now) (let [ts (-> (dt/now)
(dt/format-instant) (dt/format-instant)
@ -200,17 +80,218 @@
(str/rtrim "Z"))] (str/rtrim "Z"))]
(str "snapshot-" ts))) (str "snapshot-" ts)))
(def ^:private schema:take-file-snapshot (defn create-file-snapshot!
[:map [:file-id ::sm/uuid]]) [cfg profile-id file-id label]
(let [file (get-file cfg file-id)
(sv/defmethod ::take-file-snapshot ;; NOTE: final user never can provide label as `:system`
;; keyword because the validator implies label always as
;; string; keyword is used for signal a special case
created-by
(if (= label :system)
"system"
"user")
deleted-at
(if (= label :system)
(dt/plus (dt/now) (cf/get-deletion-delay))
nil)
label
(if (= label :system)
(str "internal/snapshot/" (:revn file))
(or label (generate-snapshot-label)))
snapshot-id
(uuid/next)]
(-> cfg
(assoc ::quotes/profile-id profile-id)
(assoc ::quotes/project-id (:project-id file))
(assoc ::quotes/team-id (:team-id file))
(assoc ::quotes/file-id (:id file))
(quotes/check! {::quotes/id ::quotes/snapshots-per-file}
{::quotes/id ::quotes/snapshots-per-team}))
(l/debug :hint "creating file snapshot"
:file-id (str file-id)
:id (str snapshot-id)
:label label)
(db/insert! cfg :file-change
{:id snapshot-id
:revn (:revn file)
:data (:data file)
:version (:version file)
:features (:features file)
:profile-id profile-id
:file-id (:id file)
:label label
:deleted-at deleted-at
:created-by created-by}
{::db/return-keys false})
{:id snapshot-id :label label}))
(def ^:private schema:create-file-snapshot
[:map
[:file-id ::sm/uuid]
[:label {:optional true} :string]])
(sv/defmethod ::create-file-snapshot
{::doc/added "1.20" {::doc/added "1.20"
::doc/skip true ::sm/params schema:create-file-snapshot}
::sm/params schema:take-file-snapshot} [cfg {:keys [::rpc/profile-id file-id label]}]
[cfg {:keys [::rpc/profile-id] :as params}] (db/tx-run! cfg
(check-authorized! cfg profile-id) (fn [{:keys [::db/conn] :as cfg}]
(db/tx-run! cfg (fn [cfg] (files/check-edition-permissions! conn profile-id file-id)
(let [params (update params :label (fn [label] (create-file-snapshot! cfg profile-id file-id label))))
(or label (generate-snapshot-label))))]
(take-file-snapshot! cfg params)))))
(defn restore-file-snapshot!
[{:keys [::db/conn ::mbus/msgbus] :as cfg} file-id snapshot-id]
(let [storage (sto/resolve cfg {::db/reuse-conn true})
file (files/get-minimal-file conn file-id {::db/for-update true})
vern (rand-int Integer/MAX_VALUE)
snapshot (some->> (db/get* conn :file-change
{:file-id file-id
:id snapshot-id}
{::db/for-share true})
(feat.fdata/resolve-file-data cfg))]
(when-not snapshot
(ex/raise :type :not-found
:code :snapshot-not-found
:hint "unable to find snapshot with the provided label"
:snapshot-id snapshot-id
:file-id file-id))
(when-not (:data snapshot)
(ex/raise :type :validation
:code :snapshot-without-data
:hint "snapshot has no data"
:label (:label snapshot)
:file-id file-id))
(l/dbg :hint "restoring snapshot"
:file-id (str file-id)
:label (:label snapshot)
:snapshot-id (str (:id snapshot)))
;; If the file was already offloaded, on restring the snapshot
;; we are going to replace the file data, so we need to touch
;; the old referenced storage object and avoid possible leaks
(when (feat.fdata/offloaded? file)
(sto/touch-object! storage (:data-ref-id file)))
(db/update! conn :file
{:data (:data snapshot)
:revn (inc (:revn file))
:vern vern
:version (:version snapshot)
:data-backend nil
:data-ref-id nil
:has-media-trimmed false
:features (:features snapshot)}
{:id file-id})
;; clean object thumbnails
(let [sql (str "update file_tagged_object_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; clean file thumbnails
(let [sql (str "update file_thumbnail "
" set deleted_at = now() "
" where file_id=? returning media_id")
res (db/exec! conn [sql file-id])]
(doseq [media-id (into #{} (keep :media-id) res)]
(sto/touch-object! storage media-id)))
;; Send to the clients a notification to reload the file
(mbus/pub! msgbus
:topic (:id file)
:message {:type :file-restore
:file-id (:id file)
:vern vern})
{:id (:id snapshot)
:label (:label snapshot)}))
(def ^:private schema:restore-file-snapshot
[:map {:title "restore-file-snapshot"}
[:file-id ::sm/uuid]
[:id ::sm/uuid]])
(sv/defmethod ::restore-file-snapshot
{::doc/added "1.20"
::sm/params schema:restore-file-snapshot}
[cfg {:keys [::rpc/profile-id file-id id] :as params}]
(db/tx-run! cfg
(fn [{:keys [::db/conn] :as cfg}]
(files/check-edition-permissions! conn profile-id file-id)
(create-file-snapshot! cfg profile-id file-id :system)
(restore-file-snapshot! cfg file-id id))))
(def ^:private schema:update-file-snapshot
[:map {:title "update-file-snapshot"}
[:id ::sm/uuid]
[:label ::sm/text]])
(defn- update-file-snapshot!
[conn snapshot-id label]
(-> (db/update! conn :file-change
{:label label
:created-by "user"
:deleted-at nil}
{:id snapshot-id}
{::db/return-keys true})
(dissoc :data :features)))
(defn- get-snapshot
"Get a minimal snapshot from database and lock for update"
[conn id]
(db/get conn :file-change
{:id id}
{::sql/columns [:id :file-id :created-by :deleted-at]
::db/for-update true}))
(sv/defmethod ::update-file-snapshot
{::doc/added "1.20"
::sm/params schema:update-file-snapshot}
[cfg {:keys [::rpc/profile-id id label]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn]}]
(let [snapshot (get-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(update-file-snapshot! conn id label)))))
(def ^:private schema:remove-file-snapshot
[:map {:title "remove-file-snapshot"}
[:id ::sm/uuid]])
(defn- delete-file-snapshot!
[conn snapshot-id]
(db/update! conn :file-change
{:deleted-at (dt/now)}
{:id snapshot-id}
{::db/return-keys false})
nil)
(sv/defmethod ::delete-file-snapshot
{::doc/added "1.20"
::sm/params schema:remove-file-snapshot}
[cfg {:keys [::rpc/profile-id id]}]
(db/tx-run! cfg
(fn [{:keys [::db/conn]}]
(let [snapshot (get-snapshot conn id)]
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
(when (not= (:created-by snapshot) "user")
(ex/raise :type :validation
:code :system-snapshots-cant-be-deleted
:snapshot-id id
:profile-id profile-id))
(delete-file-snapshot! conn id)))))

View file

@ -34,7 +34,7 @@
[app.util.pointer-map :as pmap] [app.util.pointer-map :as pmap]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk] [app.worker :as wrk]
[clojure.set :as set] [clojure.set :as set]
[promesa.exec :as px])) [promesa.exec :as px]))
@ -44,7 +44,6 @@
(declare ^:private update-file*) (declare ^:private update-file*)
(declare ^:private process-changes-and-validate) (declare ^:private process-changes-and-validate)
(declare ^:private take-snapshot?) (declare ^:private take-snapshot?)
(declare ^:private delete-old-snapshots!)
;; PUBLIC API; intended to be used outside of this module ;; PUBLIC API; intended to be used outside of this module
(declare update-file!) (declare update-file!)
@ -60,6 +59,7 @@
[:id ::sm/uuid] [:id ::sm/uuid]
[:session-id ::sm/uuid] [:session-id ::sm/uuid]
[:revn {:min 0} ::sm/int] [:revn {:min 0} ::sm/int]
[:vern {:min 0} ::sm/int]
[:features {:optional true} ::cfeat/features] [:features {:optional true} ::cfeat/features]
[:changes {:optional true} [:vector ::cpc/change]] [:changes {:optional true} [:vector ::cpc/change]]
[:changes-with-metadata {:optional true} [:changes-with-metadata {:optional true}
@ -157,6 +157,14 @@
tpoint (dt/tpoint)] tpoint (dt/tpoint)]
(when (not= (:vern params)
(:vern file))
(ex/raise :type :validation
:code :vern-conflict
:hint "A different version has been restored for the file."
:context {:incoming-revn (:revn params)
:stored-revn (:revn file)}))
(when (> (:revn params) (when (> (:revn params)
(:revn file)) (:revn file))
(ex/raise :type :validation (ex/raise :type :validation
@ -215,23 +223,34 @@
(let [storage (sto/resolve cfg ::db/reuse-conn true)] (let [storage (sto/resolve cfg ::db/reuse-conn true)]
(some->> (:data-ref-id file) (sto/touch-object! storage)))) (some->> (:data-ref-id file) (sto/touch-object! storage))))
;; TODO: move this to asynchronous task (-> cfg
(when (::snapshot-data file) (assoc ::wrk/task :file-xlog-gc)
(delete-old-snapshots! cfg file)) (assoc ::wrk/label (str "xlog:" (:id file)))
(assoc ::wrk/params {:file-id (:id file)})
(assoc ::wrk/delay (dt/duration "5m"))
(assoc ::wrk/dedupe true)
(assoc ::wrk/priority 1)
(wrk/submit!))
(persist-file! cfg file) (persist-file! cfg file)
(let [params (assoc params :file file) (let [params (assoc params :file file)
response {:revn (:revn file) response {:revn (:revn file)
:lagged (get-lagged-changes conn params)} :lagged (get-lagged-changes conn params)}
features (db/create-array conn "text" (:features file))] features (db/create-array conn "text" (:features file))
deleted-at (if (::snapshot-data file)
(dt/plus timestamp (cf/get-deletion-delay))
(dt/plus timestamp (dt/duration {:hours 1})))]
;; Insert change (xlog) ;; Insert change (xlog) with deleted_at in a future data for
;; make them automatically eleggible for GC once they expires
(db/insert! conn :file-change (db/insert! conn :file-change
{:id (uuid/next) {:id (uuid/next)
:session-id session-id :session-id session-id
:profile-id profile-id :profile-id profile-id
:created-at timestamp :created-at timestamp
:updated-at timestamp
:deleted-at deleted-at
:file-id (:id file) :file-id (:id file)
:revn (:revn file) :revn (:revn file)
:version (:version file) :version (:version file)
@ -449,33 +468,6 @@
(> (inst-ms (dt/diff modified-at (dt/now))) (> (inst-ms (dt/diff modified-at (dt/now)))
(inst-ms timeout)))))) (inst-ms timeout))))))
;; Get the latest available snapshots without exceeding the total
;; snapshot limit.
(def ^:private sql:get-latest-snapshots
"SELECT fch.id, fch.created_at
FROM file_change AS fch
WHERE fch.file_id = ?
AND fch.label LIKE 'internal/%'
ORDER BY fch.created_at DESC
LIMIT ?")
;; Mark all snapshots that are outside the allowed total threshold
;; available for the GC.
(def ^:private sql:delete-snapshots
"UPDATE file_change
SET label = NULL
WHERE file_id = ?
AND label LIKE 'internal/%'
AND created_at < ?")
(defn- delete-old-snapshots!
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
(when-let [snapshots (not-empty (db/exec! conn [sql:get-latest-snapshots id
(cf/get :auto-file-snapshot-total 10)]))]
(let [last-date (-> snapshots peek :created-at)
result (db/exec-one! conn [sql:delete-snapshots id last-date])]
(l/trc :hint "delete old snapshots" :file-id (str id) :total (db/get-update-count result)))))
(def ^:private sql:lagged-changes (def ^:private sql:lagged-changes
"select s.id, s.revn, s.file_id, "select s.id, s.revn, s.file_id,
s.session_id, s.changes s.session_id, s.changes
@ -502,6 +494,7 @@
:file-id (:id file) :file-id (:id file)
:session-id (:session-id params) :session-id (:session-id params)
:revn (:revn file) :revn (:revn file)
:vern (:vern file)
:changes changes}) :changes changes})
(when (and (:is-shared file) (seq lchanges)) (when (and (:is-shared file) (seq lchanges))

View file

@ -176,7 +176,7 @@
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})] (binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
(let [projs (bfc/get-team-projects cfg team-id) (let [projs (bfc/get-team-projects cfg team-id)
files (bfc/get-team-files cfg team-id) files (bfc/get-team-files-ids cfg team-id)
frels (bfc/get-files-rels cfg files) frels (bfc/get-files-rels cfg files)
team (-> (db/get-by-id conn :team team-id) team (-> (db/get-by-id conn :team team-id)
@ -396,14 +396,15 @@
(defn clone-template (defn clone-template
[cfg {:keys [project-id profile-id] :as params} template] [cfg {:keys [project-id profile-id] :as params} template]
(db/tx-run! cfg (fn [{:keys [::db/conn ::wrk/executor] :as cfg}] (db/tx-run! cfg (fn [{:keys [::db/conn ::wrk/executor] :as cfg}]
;; NOTE: the importation process performs some operations that ;; NOTE: the importation process performs some operations
;; are not very friendly with virtual threads, and for avoid ;; that are not very friendly with virtual threads, and for
;; unexpected blocking of other concurrent operations we ;; avoid unexpected blocking of other concurrent operations
;; dispatch that operation to a dedicated executor. ;; we dispatch that operation to a dedicated executor.
(let [cfg (-> cfg (let [cfg (-> cfg
(assoc ::bf.v1/project-id project-id) (assoc ::bf.v1/project-id project-id)
(assoc ::bf.v1/profile-id profile-id)) (assoc ::bf.v1/profile-id profile-id)
result (px/invoke! executor (partial bf.v1/import-files! cfg template))] (assoc ::bf.v1/input template))
result (px/invoke! executor (partial bf.v1/import-files! cfg))]
(db/update! conn :project (db/update! conn :project
{:modified-at (dt/now)} {:modified-at (dt/now)}

View file

@ -216,7 +216,7 @@
{:response-type :input-stream :sync? true}) {:response-type :input-stream :sync? true})
{:keys [size mtype]} (parse-and-validate response) {:keys [size mtype]} (parse-and-validate response)
path (tmp/tempfile :prefix "penpot.media.download.") path (tmp/tempfile :prefix "penpot.media.download.")
written (io/write-to-file! body path :size size)] written (io/write* path body :size size)]
(when (not= written size) (when (not= written size)
(ex/raise :type :internal (ex/raise :type :internal

View file

@ -222,7 +222,7 @@
::webhooks/event? true} ::webhooks/event? true}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id team-id is-pinned] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id team-id is-pinned] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(check-edition-permissions! conn profile-id id) (check-read-permissions! conn profile-id id)
(db/exec-one! conn [sql:update-project-pin team-id id profile-id is-pinned is-pinned]) (db/exec-one! conn [sql:update-project-pin team-id id profile-id is-pinned is-pinned])
nil)) nil))

View file

@ -12,6 +12,7 @@
[app.common.features :as cfeat] [app.common.features :as cfeat]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.schema :as sm] [app.common.schema :as sm]
[app.common.types.team :as tt]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
[app.db :as db] [app.db :as db]
@ -20,6 +21,7 @@
[app.loggers.audit :as audit] [app.loggers.audit :as audit]
[app.main :as-alias main] [app.main :as-alias main]
[app.media :as media] [app.media :as media]
[app.msgbus :as mbus]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.commands.profile :as profile] [app.rpc.commands.profile :as profile]
[app.rpc.doc :as-alias doc] [app.rpc.doc :as-alias doc]
@ -605,14 +607,8 @@
nil))) nil)))
;; --- Mutation: Team Update Role ;; --- Mutation: Team Update Role
;; Temporarily disabled viewer role
;; https://tree.taiga.io/project/penpot/issue/1083
(def valid-roles
#{:owner :admin :editor #_:viewer})
(def schema:role (def schema:role
[::sm/one-of valid-roles]) [::sm/one-of tt/valid-roles])
(defn role->params (defn role->params
[role] [role]
@ -623,7 +619,7 @@
:viewer {:is-owner false :is-admin false :can-edit false})) :viewer {:is-owner false :is-admin false :can-edit false}))
(defn update-team-member-role (defn update-team-member-role
[conn {:keys [profile-id team-id member-id role] :as params}] [{:keys [::db/conn ::mbus/msgbus]} {:keys [profile-id team-id member-id role] :as params}]
;; We retrieve all team members instead of query the ;; We retrieve all team members instead of query the
;; database for a single member. This is just for ;; database for a single member. This is just for
;; convenience, if this becomes a bottleneck or problematic, ;; convenience, if this becomes a bottleneck or problematic,
@ -631,7 +627,6 @@
(let [perms (get-permissions conn profile-id team-id) (let [perms (get-permissions conn profile-id team-id)
members (get-team-members conn team-id) members (get-team-members conn team-id)
member (d/seek #(= member-id (:id %)) members) member (d/seek #(= member-id (:id %)) members)
is-owner? (:is-owner perms) is-owner? (:is-owner perms)
is-admin? (:is-admin perms)] is-admin? (:is-admin perms)]
@ -655,6 +650,13 @@
(ex/raise :type :validation (ex/raise :type :validation
:code :cant-promote-to-owner)) :code :cant-promote-to-owner))
(mbus/pub! msgbus
:topic member-id
:message {:type :team-role-change
:topic member-id
:team-id team-id
:role role})
(let [params (role->params role)] (let [params (role->params role)]
;; Only allow single owner on team ;; Only allow single owner on team
(when (= role :owner) (when (= role :owner)
@ -678,9 +680,8 @@
(sv/defmethod ::update-team-member-role (sv/defmethod ::update-team-member-role
{::doc/added "1.17" {::doc/added "1.17"
::sm/params schema:update-team-member-role} ::sm/params schema:update-team-member-role}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}] [cfg {:keys [::rpc/profile-id] :as params}]
(db/with-atomic [conn pool] (db/tx-run! cfg update-team-member-role (assoc params :profile-id profile-id)))
(update-team-member-role conn (assoc params :profile-id profile-id))))
;; --- Mutation: Delete Team Member ;; --- Mutation: Delete Team Member
@ -692,9 +693,10 @@
(sv/defmethod ::delete-team-member (sv/defmethod ::delete-team-member
{::doc/added "1.17" {::doc/added "1.17"
::sm/params schema:delete-team-member} ::sm/params schema:delete-team-member}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id member-id] :as params}] [{:keys [::db/pool ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id team-id member-id] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)] (let [team (get-team pool :profile-id profile-id :team-id team-id)
perms (get-permissions conn profile-id team-id)]
(when-not (or (:is-owner perms) (when-not (or (:is-owner perms)
(:is-admin perms)) (:is-admin perms))
(ex/raise :type :validation (ex/raise :type :validation
@ -707,6 +709,13 @@
(db/delete! conn :team-profile-rel {:profile-id member-id (db/delete! conn :team-profile-rel {:profile-id member-id
:team-id team-id}) :team-id team-id})
(mbus/pub! msgbus
:topic member-id
:message {:type :team-membership-change
:change :removed
:team-id team-id
:team-name (:name team)})
nil))) nil)))
;; --- Mutation: Update Team Photo ;; --- Mutation: Update Team Photo
@ -724,6 +733,7 @@
::sm/params schema:update-team-photo} ::sm/params schema:update-team-photo}
[cfg {:keys [::rpc/profile-id file] :as params}] [cfg {:keys [::rpc/profile-id file] :as params}]
;; Validate incoming mime type ;; Validate incoming mime type
(media/validate-media-type! file #{"image/jpeg" "image/png" "image/webp"}) (media/validate-media-type! file #{"image/jpeg" "image/png" "image/webp"})
(update-team-photo cfg (assoc params :profile-id profile-id))) (update-team-photo cfg (assoc params :profile-id profile-id)))
@ -789,7 +799,7 @@
[:map [:map
[:id ::sm/uuid] [:id ::sm/uuid]
[:fullname :string]]] [:fullname :string]]]
[:role [::sm/one-of valid-roles]] [:role [::sm/one-of tt/valid-roles]]
[:email ::sm/email]]) [:email ::sm/email]])
(def ^:private check-create-invitation-params! (def ^:private check-create-invitation-params!
@ -1115,7 +1125,7 @@
::sm/params schema:update-team-invitation-role} ::sm/params schema:update-team-invitation-role}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email role] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email role] :as params}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [perms (get-permissions conn profile-id team-id)] (let [perms (get-permissions conn profile-id team-id)]
(when-not (:is-admin perms) (when-not (:is-admin perms)
(ex/raise :type :validation (ex/raise :type :validation
@ -1124,6 +1134,7 @@
(db/update! conn :team-invitation (db/update! conn :team-invitation
{:role (name role) :updated-at (dt/now)} {:role (name role) :updated-at (dt/now)}
{:team-id team-id :email-to (profile/clean-email email)}) {:team-id team-id :email-to (profile/clean-email email)})
nil))) nil)))
;; --- Mutation: Delete invitation ;; --- Mutation: Delete invitation

View file

@ -15,12 +15,27 @@
[app.http.client :as http] [app.http.client :as http]
[app.loggers.webhooks :as webhooks] [app.loggers.webhooks :as webhooks]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.commands.teams :refer [check-edition-permissions! check-read-permissions!]] [app.rpc.commands.teams :refer [check-read-permissions!] :as t]
[app.rpc.doc :as-alias doc] [app.rpc.doc :as-alias doc]
[app.rpc.permissions :as perms]
[app.util.services :as sv] [app.util.services :as sv]
[app.util.time :as dt] [app.util.time :as dt]
[cuerdas.core :as str])) [cuerdas.core :as str]))
(defn get-webhooks-permissions
[conn profile-id team-id creator-id]
(let [permissions (t/get-permissions conn profile-id team-id)
can-edit (boolean (or (:can-edit permissions)
(= profile-id creator-id)))]
(assoc permissions :can-edit can-edit)))
(def has-webhook-edit-permissions?
(perms/make-edition-predicate-fn get-webhooks-permissions))
(def check-webhook-edition-permissions!
(perms/make-check-fn has-webhook-edit-permissions?))
(defn decode-row (defn decode-row
[{:keys [uri] :as row}] [{:keys [uri] :as row}]
(cond-> row (cond-> row
@ -65,11 +80,12 @@
max-hooks-for-team))))) max-hooks-for-team)))))
(defn- insert-webhook! (defn- insert-webhook!
[{:keys [::db/pool]} {:keys [team-id uri mtype is-active] :as params}] [{:keys [::db/pool]} {:keys [team-id uri mtype is-active ::rpc/profile-id] :as params}]
(-> (db/insert! pool :webhook (-> (db/insert! pool :webhook
{:id (uuid/next) {:id (uuid/next)
:team-id team-id :team-id team-id
:uri (str uri) :uri (str uri)
:profile-id profile-id
:is-active is-active :is-active is-active
:mtype mtype}) :mtype mtype})
(decode-row))) (decode-row)))
@ -101,7 +117,7 @@
{::doc/added "1.17" {::doc/added "1.17"
::sm/params schema:create-webhook} ::sm/params schema:create-webhook}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
(check-edition-permissions! pool profile-id team-id) (check-webhook-edition-permissions! pool profile-id team-id profile-id)
(validate-quotes! cfg params) (validate-quotes! cfg params)
(validate-webhook! cfg nil params) (validate-webhook! cfg nil params)
(insert-webhook! cfg params)) (insert-webhook! cfg params))
@ -118,7 +134,7 @@
::sm/params schema:update-webhook} ::sm/params schema:update-webhook}
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
(let [whook (-> (db/get pool :webhook {:id id}) (decode-row))] (let [whook (-> (db/get pool :webhook {:id id}) (decode-row))]
(check-edition-permissions! pool profile-id (:team-id whook)) (check-webhook-edition-permissions! pool profile-id (:team-id whook) (:profile-id whook))
(validate-webhook! cfg whook params) (validate-webhook! cfg whook params)
(update-webhook! cfg whook params))) (update-webhook! cfg whook params)))
@ -132,15 +148,17 @@
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}] [{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}]
(db/with-atomic [conn pool] (db/with-atomic [conn pool]
(let [whook (-> (db/get conn :webhook {:id id}) decode-row)] (let [whook (-> (db/get conn :webhook {:id id}) decode-row)]
(check-edition-permissions! conn profile-id (:team-id whook)) (check-webhook-edition-permissions! conn profile-id (:team-id whook) (:profile-id whook))
(db/delete! conn :webhook {:id id}) (db/delete! conn :webhook {:id id})
nil))) nil)))
;; --- Query: Webhooks ;; --- Query: Webhooks
(def sql:get-webhooks (def sql:get-webhooks
"select id, uri, mtype, is_active, error_code, error_count "SELECT id, uri, mtype, is_active, error_code, error_count, profile_id
from webhook where team_id = ? order by uri") FROM webhook
WHERE team_id = ?
ORDER BY uri")
(def ^:private schema:get-webhooks (def ^:private schema:get-webhooks
[:map {:title "get-webhooks"} [:map {:title "get-webhooks"}

View file

@ -29,7 +29,7 @@
[app.util.services :as-alias sv] [app.util.services :as-alias sv]
[buddy.core.codecs :as bc] [buddy.core.codecs :as bc]
[buddy.core.hash :as bh] [buddy.core.hash :as bh]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
(def (def
^{:dynamic true ^{:dynamic true
@ -59,7 +59,7 @@
key' (when (some? object) key' (when (some? object)
(->> object (key-fn params) (fmt-key)))] (->> object (key-fn params) (fmt-key)))]
(if (and (some? key) (= key key')) (if (and (some? key) (= key key'))
(fn [_] {::rres/status 304}) (fn [_] {::yres/status 304})
(let [params (if (some? object) (let [params (if (some? object)
(assoc params ::object object) (assoc params ::object object)
params) params)

View file

@ -27,7 +27,7 @@
[cuerdas.core :as str] [cuerdas.core :as str]
[integrant.core :as ig] [integrant.core :as ig]
[pretty-spec.core :as ps] [pretty-spec.core :as ps]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; DOC (human readable) ;; DOC (human readable)
@ -87,11 +87,11 @@
(let [params (:query-params request) (let [params (:query-params request)
pstyle (:type params "js") pstyle (:type params "js")
context (assoc context :param-style pstyle)] context (assoc context :param-style pstyle)]
{::rres/status 200 {::yres/status 200
::rres/body (-> (io/resource "app/templates/api-doc.tmpl") ::yres/body (-> (io/resource "app/templates/api-doc.tmpl")
(tmpl/render context))})) (tmpl/render context))}))
(fn [_] (fn [_]
{::rres/status 404}))) {::yres/status 404})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; OPENAPI / SWAGGER (v3.1) ;; OPENAPI / SWAGGER (v3.1)
@ -175,12 +175,12 @@
[context] [context]
(if (contains? cf/flags :backend-openapi-doc) (if (contains? cf/flags :backend-openapi-doc)
(fn [_] (fn [_]
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "application/json; charset=utf-8"} ::yres/headers {"content-type" "application/json; charset=utf-8"}
::rres/body (json/encode context)}) ::yres/body (json/encode context)})
(fn [_] (fn [_]
{::rres/status 404}))) {::yres/status 404})))
(defn openapi-handler (defn openapi-handler
[] []
@ -191,12 +191,12 @@
context {:public-uri (cf/get :public-uri) context {:public-uri (cf/get :public-uri)
:swagger-js swagger-js :swagger-js swagger-js
:swagger-css swagger-cs}] :swagger-css swagger-cs}]
{::rres/status 200 {::yres/status 200
::rres/headers {"content-type" "text/html"} ::yres/headers {"content-type" "text/html"}
::rres/body (-> (io/resource "app/templates/openapi.tmpl") ::yres/body (-> (io/resource "app/templates/openapi.tmpl")
(tmpl/render context))})) (tmpl/render context))}))
(fn [_] (fn [_]
{::rres/status 404}))) {::yres/status 404})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MODULE INIT ;; MODULE INIT

View file

@ -11,7 +11,7 @@
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.http :as-alias http] [app.http :as-alias http]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[ring.response :as-alias rres])) [yetti.response :as-alias yres]))
;; A utilty wrapper object for wrap service responses that does not ;; A utilty wrapper object for wrap service responses that does not
;; implements the IObj interface that make possible attach metadata to ;; implements the IObj interface that make possible attach metadata to
@ -77,4 +77,4 @@
(fn [_ response] (fn [_ response]
(let [exp (if (integer? max-age) max-age (inst-ms max-age)) (let [exp (if (integer? max-age) max-age (inst-ms max-age))
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))] val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
(update response ::rres/headers assoc "cache-control" val))))) (update response ::yres/headers assoc "cache-control" val)))))

View file

@ -408,6 +408,70 @@
(assoc ::count-sql [sql:get-comments-per-file file-id]) (assoc ::count-sql [sql:get-comments-per-file file-id])
(generic-check!))) (generic-check!)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; QUOTE: SNAPSHOTS-PER-FILE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:snapshots-per-file
[:map
[::profile-id ::sm/uuid]
[::project-id ::sm/uuid]
[::team-id ::sm/uuid]
[::file-id ::sm/uuid]])
(def ^:private valid-snapshots-per-file-quote?
(sm/lazy-validator schema:snapshots-per-file))
(def ^:private sql:get-snapshots-per-file
"SELECT count(*) AS total
FROM file_change AS fc
WHERE fc.file_id = ?
AND fc.created_by = 'user'
AND fc.deleted_at IS NULL
AND fc.data IS NOT NULL")
(defmethod check-quote ::snapshots-per-file
[{:keys [::profile-id ::file-id ::team-id ::project-id ::target] :as quote}]
(assert (valid-snapshots-per-file-quote? quote) "invalid quote parameters")
(-> quote
(assoc ::default (cf/get :quotes-snapshots-per-file Integer/MAX_VALUE))
(assoc ::quote-sql [sql:get-quotes-4 target file-id profile-id project-id
profile-id team-id profile-id profile-id])
(assoc ::count-sql [sql:get-snapshots-per-file file-id])
(generic-check!)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; QUOTE: SNAPSHOTS-PER-FILE
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(def ^:private schema:snapshots-per-team
[:map
[::profile-id ::sm/uuid]
[::team-id ::sm/uuid]])
(def ^:private valid-snapshots-per-team-quote?
(sm/lazy-validator schema:snapshots-per-team))
(def ^:private sql:get-snapshots-per-team
"SELECT count(*) AS total
FROM file_change AS fc
JOIN file AS f ON (f.id = fc.file_id)
JOIN project AS p ON (p.id = f.project_id)
WHERE p.team_id = ?
AND fc.created_by = 'user'
AND fc.deleted_at IS NULL
AND fc.data IS NOT NULL")
(defmethod check-quote ::snapshots-per-team
[{:keys [::profile-id ::team-id ::target] :as quote}]
(assert (valid-snapshots-per-team-quote? quote) "invalid quote parameters")
(-> quote
(assoc ::default (cf/get :quotes-snapshots-per-team Integer/MAX_VALUE))
(assoc ::quote-sql [sql:get-quotes-2 target team-id profile-id profile-id])
(assoc ::count-sql [sql:get-snapshots-per-team team-id])
(generic-check!)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; QUOTE: DEFAULT ;; QUOTE: DEFAULT
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;

View file

@ -10,6 +10,7 @@
[app.db :as db] [app.db :as db]
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.rpc.climit :as-alias climit] [app.rpc.climit :as-alias climit]
[app.rpc.commands.files :as files]
[app.rpc.commands.files-update :as fupdate] [app.rpc.commands.files-update :as fupdate]
[app.rpc.commands.management :as management] [app.rpc.commands.management :as management]
[app.rpc.commands.profile :as profile] [app.rpc.commands.profile :as profile]
@ -51,9 +52,11 @@
:project-id (:default-project-id profile)} :project-id (:default-project-id profile)}
template-stream (tmpl/get-template-stream cfg "welcome") template-stream (tmpl/get-template-stream cfg "welcome")
file-id (-> (management/clone-template cfg params template-stream) file-id (-> (management/clone-template cfg params template-stream)
first)] first)
file-name (str fullname "'s first file")]
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}] (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
(files/rename-file conn {:id file-id :name file-name})
(fupdate/update-file! cfg file-id update-welcome-shape fullname) (fupdate/update-file! cfg file-id update-welcome-shape fullname)
(profile/update-profile-props cfg id {:welcome-file-id file-id}) (profile/update-profile-props cfg id {:welcome-file-id file-id})
(db/exec-one! conn [sql:mark-file-object-thumbnails-deleted file-id]) (db/exec-one! conn [sql:mark-file-object-thumbnails-deleted file-id])

View file

@ -122,22 +122,19 @@
WHERE file_id = ANY(?) WHERE file_id = ANY(?)
AND id IS NOT NULL") AND id IS NOT NULL")
(defn get-file-snapshots (defn search-file-snapshots
"Get a seq parirs of file-id and snapshot-id for a set of files "Get a seq parirs of file-id and snapshot-id for a set of files
and specified label" and specified label"
[conn label ids] [conn file-ids label]
(db/exec! conn [sql:snapshots-with-file label (db/exec! conn [sql:snapshots-with-file label
(db/create-array conn "uuid" ids)])) (db/create-array conn "uuid" file-ids)]))
(defn take-team-snapshot! (defn take-team-snapshot!
[system team-id label] [system team-id label]
(let [conn (db/get-connection system)] (let [conn (db/get-connection system)]
(->> (feat.comp-v2/get-and-lock-team-files conn team-id) (->> (feat.comp-v2/get-and-lock-team-files conn team-id)
(map (fn [file-id] (reduce (fn [result file-id]
{:file-id file-id (fsnap/create-file-snapshot! system nil file-id label)
:label label}))
(reduce (fn [result params]
(fsnap/take-file-snapshot! conn params)
(inc result)) (inc result))
0)))) 0))))
@ -147,7 +144,7 @@
ids (->> (feat.comp-v2/get-and-lock-team-files conn team-id) ids (->> (feat.comp-v2/get-and-lock-team-files conn team-id)
(into #{})) (into #{}))
snap (get-file-snapshots conn label ids) snap (search-file-snapshots conn ids label)
ids' (into #{} (map :file-id) snap) ids' (into #{} (map :file-id) snap)
team (-> (feat.comp-v2/get-team conn team-id) team (-> (feat.comp-v2/get-team conn team-id)
@ -157,8 +154,8 @@
(throw (RuntimeException. "no uniform snapshot available"))) (throw (RuntimeException. "no uniform snapshot available")))
(feat.comp-v2/update-team! conn team) (feat.comp-v2/update-team! conn team)
(reduce (fn [result params] (reduce (fn [result {:keys [file-id id]}]
(fsnap/restore-file-snapshot! conn params) (fsnap/restore-file-snapshot! system file-id id)
(inc result)) (inc result))
0 0
snap))) snap)))
@ -167,7 +164,7 @@
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}] [system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
(when (string? label) (when (string? label)
(fsnap/take-file-snapshot! system {:file-id file-id :label label})) (fsnap/create-file-snapshot! system nil file-id label))
(let [conn (db/get-connection system) (let [conn (db/get-connection system)
file (get-file system file-id opts) file (get-file system file-id opts)

View file

@ -311,33 +311,29 @@
collectable file-changes entry." collectable file-changes entry."
[& {:keys [file-id label]}] [& {:keys [file-id label]}]
(let [file-id (h/parse-uuid file-id)] (let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system fsnap/take-file-snapshot! {:file-id file-id :label label}))) (db/tx-run! main/system fsnap/create-file-snapshot! {:file-id file-id :label label})))
(defn restore-file-snapshot! (defn restore-file-snapshot!
[file-id label] [file-id label]
(let [file-id (h/parse-uuid file-id)] (let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system (db/tx-run! main/system
(fn [{:keys [::db/conn] :as system}] (fn [{:keys [::db/conn] :as system}]
(when-let [snapshot (->> (h/get-file-snapshots conn label #{file-id}) (when-let [snapshot (->> (h/search-file-snapshots conn #{file-id} label)
(map :id) (map :id)
(first))] (first))]
(fsnap/restore-file-snapshot! system (fsnap/restore-file-snapshot! system file-id (:id snapshot)))))))
{:id (:id snapshot)
:file-id file-id}))))))
(defn list-file-snapshots! (defn list-file-snapshots!
[file-id & {:keys [limit]}] [file-id & {:as _}]
(let [file-id (h/parse-uuid file-id)] (let [file-id (h/parse-uuid file-id)]
(db/tx-run! main/system (db/tx-run! main/system
(fn [system] (fn [{:keys [::db/conn]}]
(let [params {:file-id file-id :limit limit}] (->> (fsnap/get-file-snapshots conn file-id)
(->> (fsnap/get-file-snapshots system (d/without-nils params)) (print-table [:label :id :revn :created-at]))))))
(print-table [:label :id :revn :created-at])))))))
(defn take-team-snapshot! (defn take-team-snapshot!
[team-id & {:keys [label rollback?] :or {rollback? true}}] [team-id & {:keys [label rollback?] :or {rollback? true}}]
(let [team-id (h/parse-uuid team-id) (let [team-id (h/parse-uuid team-id)]
label (or label (fsnap/generate-snapshot-label))]
(-> (assoc main/system ::db/rollback rollback?) (-> (assoc main/system ::db/rollback rollback?)
(db/tx-run! h/take-team-snapshot! team-id label)))) (db/tx-run! h/take-team-snapshot! team-id label))))

View file

@ -10,6 +10,7 @@
(:require (:require
[app.common.data :as d] [app.common.data :as d]
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.common.logging :as l]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
@ -19,6 +20,7 @@
[app.storage.s3 :as ss3] [app.storage.s3 :as ss3]
[app.util.time :as dt] [app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[cuerdas.core :as str]
[datoteka.fs :as fs] [datoteka.fs :as fs]
[integrant.core :as ig]) [integrant.core :as ig])
(:import (:import
@ -30,7 +32,17 @@
(case name (case name
:assets-fs :fs :assets-fs :fs
:assets-s3 :s3 :assets-s3 :s3
:fs))) nil)))
(def valid-buckets
#{"file-media-object"
"team-font-variant"
"file-object-thumbnail"
"file-thumbnail"
"profile"
"file-data"
"file-data-fragment"
"file-change"})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Storage Module State ;; Storage Module State
@ -52,11 +64,19 @@
(defmethod ig/init-key ::storage (defmethod ig/init-key ::storage
[_ {:keys [::backends ::db/pool] :as cfg}] [_ {:keys [::backends ::db/pool] :as cfg}]
(-> (d/without-nils cfg) (let [backend (or (get-legacy-backend)
(assoc ::backends (d/without-nils backends)) (cf/get :objects-storage-backend)
(assoc ::backend (or (get-legacy-backend) :fs)
(cf/get :objects-storage-backend :fs))) backends (d/without-nils backends)]
(assoc ::db/connectable pool)))
(l/dbg :hint "initialize"
:default (d/name backend)
:available (str/join "," (map d/name (keys backends))))
(-> (d/without-nils cfg)
(assoc ::backends backends)
(assoc ::backend backend)
(assoc ::db/connectable pool))))
(s/def ::backend keyword?) (s/def ::backend keyword?)
(s/def ::storage (s/def ::storage
@ -257,6 +277,8 @@
(pos? (db/get-update-count res)))) (pos? (db/get-update-count res))))
(dm/export impl/calculate-hash) (dm/export impl/calculate-hash)
(dm/export impl/get-hash)
(dm/export impl/get-size)
(defn configure (defn configure
[storage connectable] [storage connectable]

View file

@ -6,7 +6,6 @@
(ns app.storage.fs (ns app.storage.fs
(:require (:require
[app.common.data.macros :as dm]
[app.common.exceptions :as ex] [app.common.exceptions :as ex]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uri :as u] [app.common.uri :as u]
@ -18,9 +17,13 @@
[datoteka.io :as io] [datoteka.io :as io]
[integrant.core :as ig]) [integrant.core :as ig])
(:import (:import
java.io.InputStream
java.io.OutputStream
java.nio.file.Files java.nio.file.Files
java.nio.file.Path)) java.nio.file.Path))
(set! *warn-on-reflection* true)
;; --- BACKEND INIT ;; --- BACKEND INIT
(s/def ::directory ::us/string) (s/def ::directory ::us/string)
@ -58,9 +61,9 @@
(when-not (fs/exists? (fs/parent full)) (when-not (fs/exists? (fs/parent full))
(fs/create-dir (fs/parent full))) (fs/create-dir (fs/parent full)))
(dm/with-open [src (io/input-stream content) (with-open [^InputStream src (io/input-stream content)]
dst (io/output-stream full)] (with-open [^OutputStream dst (io/output-stream full)]
(io/copy! src dst)) (io/copy src dst)))
object)) object))
@ -78,8 +81,8 @@
(defmethod impl/get-object-bytes :fs (defmethod impl/get-object-bytes :fs
[backend object] [backend object]
(dm/with-open [input (impl/get-object-data backend object)] (with-open [^InputStream input (impl/get-object-data backend object)]
(io/read-as-bytes input))) (io/read input)))
(defmethod impl/get-object-url :fs (defmethod impl/get-object-url :fs
[{:keys [::uri] :as backend} {:keys [id] :as object} _] [{:keys [::uri] :as backend} {:keys [id] :as object} _]

View file

@ -17,6 +17,7 @@
[app.storage.impl :as impl] [app.storage.impl :as impl]
[app.storage.tmp :as tmp] [app.storage.tmp :as tmp]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as-alias wrk]
[clojure.java.io :as io] [clojure.java.io :as io]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[datoteka.fs :as fs] [datoteka.fs :as fs]
@ -27,17 +28,15 @@
java.io.FilterInputStream java.io.FilterInputStream
java.io.InputStream java.io.InputStream
java.net.URI java.net.URI
java.nio.ByteBuffer
java.nio.file.Path java.nio.file.Path
java.time.Duration java.time.Duration
java.util.Collection java.util.Collection
java.util.Optional java.util.Optional
java.util.concurrent.Semaphore
org.reactivestreams.Subscriber org.reactivestreams.Subscriber
org.reactivestreams.Subscription
software.amazon.awssdk.core.ResponseBytes software.amazon.awssdk.core.ResponseBytes
software.amazon.awssdk.core.async.AsyncRequestBody software.amazon.awssdk.core.async.AsyncRequestBody
software.amazon.awssdk.core.async.AsyncResponseTransformer software.amazon.awssdk.core.async.AsyncResponseTransformer
software.amazon.awssdk.core.async.BlockingInputStreamAsyncRequestBody
software.amazon.awssdk.core.client.config.ClientAsyncConfiguration software.amazon.awssdk.core.client.config.ClientAsyncConfiguration
software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption software.amazon.awssdk.core.client.config.SdkAdvancedAsyncClientOption
software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient
@ -59,6 +58,20 @@
software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest
software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest)) software.amazon.awssdk.services.s3.presigner.model.PresignedGetObjectRequest))
(def ^:private max-retries
"A maximum number of retries on internal operations"
3)
(def ^:private max-concurrency
"Maximum concurrent request to S3 service"
128)
(def ^:private max-pending-connection-acquires
20000)
(def default-timeout
(dt/duration {:seconds 30}))
(declare put-object) (declare put-object)
(declare get-object-bytes) (declare get-object-bytes)
(declare get-object-data) (declare get-object-data)
@ -80,7 +93,7 @@
(s/def ::io-threads ::us/integer) (s/def ::io-threads ::us/integer)
(defmethod ig/pre-init-spec ::backend [_] (defmethod ig/pre-init-spec ::backend [_]
(s/keys :opt [::region ::bucket ::prefix ::endpoint ::io-threads])) (s/keys :opt [::region ::bucket ::prefix ::endpoint ::io-threads ::wrk/executor]))
(defmethod ig/prep-key ::backend (defmethod ig/prep-key ::backend
[_ {:keys [::prefix ::region] :as cfg}] [_ {:keys [::prefix ::region] :as cfg}]
@ -128,18 +141,29 @@
[backend object] [backend object]
(us/assert! ::backend backend) (us/assert! ::backend backend)
(let [result (p/await (get-object-data backend object))] (loop [result (get-object-data backend object)
(if (ex/exception? result) retryn 0]
(cond
(ex/instance? NoSuchKeyException result)
(ex/raise :type :not-found
:code :object-not-found
:hint "s3 object not found"
:cause result)
:else
(throw result))
result))) (let [result (p/await result)]
(if (ex/exception? result)
(cond
(ex/instance? NoSuchKeyException result)
(ex/raise :type :not-found
:code :object-not-found
:hint "s3 object not found"
:object-id (:id object)
:object-path (impl/id->path (:id object))
:cause result)
(and (ex/instance? java.nio.file.FileAlreadyExistsException result)
(< retryn max-retries))
(recur (get-object-data backend object)
(inc retryn))
:else
(throw result))
result))))
(defmethod impl/get-object-bytes :s3 (defmethod impl/get-object-bytes :s3
[backend object] [backend object]
@ -163,18 +187,14 @@
;; --- HELPERS ;; --- HELPERS
(def default-timeout
(dt/duration {:seconds 30}))
(defn- lookup-region (defn- lookup-region
^Region ^Region
[region] [region]
(Region/of (name region))) (Region/of (name region)))
(defn- build-s3-client (defn- build-s3-client
[{:keys [::region ::endpoint ::io-threads]}] [{:keys [::region ::endpoint ::io-threads ::wrk/executor]}]
(let [executor (px/resolve-executor :virtual) (let [aconfig (-> (ClientAsyncConfiguration/builder)
aconfig (-> (ClientAsyncConfiguration/builder)
(.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor) (.advancedOption SdkAdvancedAsyncClientOption/FUTURE_COMPLETION_EXECUTOR executor)
(.build)) (.build))
@ -190,6 +210,8 @@
(.connectionTimeout default-timeout) (.connectionTimeout default-timeout)
(.readTimeout default-timeout) (.readTimeout default-timeout)
(.writeTimeout default-timeout) (.writeTimeout default-timeout)
(.maxConcurrency (int max-concurrency))
(.maxPendingConnectionAcquires (int max-pending-connection-acquires))
(.build)) (.build))
client (let [builder (S3AsyncClient/builder) client (let [builder (S3AsyncClient/builder)
@ -223,69 +245,38 @@
(.serviceConfiguration ^S3Configuration config) (.serviceConfiguration ^S3Configuration config)
(.build)))) (.build))))
(defn- upload-thread (defn- write-input-stream
[id subscriber sem content] [delegate input]
(px/thread (try
{:name "penpot/s3/uploader" (.writeInputStream ^BlockingInputStreamAsyncRequestBody delegate
:virtual true ^InputStream input)
:daemon true} (catch Throwable cause
(l/trace :hint "start upload thread" (l/error :hint "encountered error while writing input stream to service"
:object-id (str id) :cause cause))
:size (impl/get-size content) (finally
::l/sync? true) (.close ^InputStream input))))
(let [stream (io/input-stream content)
bsize (* 1024 64)
tpoint (dt/tpoint)]
(try
(loop []
(.acquire ^Semaphore sem 1)
(let [buffer (byte-array bsize)
readed (.read ^InputStream stream buffer)]
(when (pos? readed)
(let [data (ByteBuffer/wrap ^bytes buffer 0 readed)]
(.onNext ^Subscriber subscriber ^ByteBuffer data)
(when (= readed bsize)
(recur))))))
(.onComplete ^Subscriber subscriber)
(catch InterruptedException _
(l/trace :hint "interrupted upload thread"
:object-:id (str id)
::l/sync? true)
nil)
(catch Throwable cause
(.onError ^Subscriber subscriber cause))
(finally
(l/trace :hint "end upload thread"
:object-id (str id)
:elapsed (dt/format-duration (tpoint))
::l/sync? true)
(.close ^InputStream stream))))))
(defn- make-request-body (defn- make-request-body
[id content] [executor content]
(reify (let [size (impl/get-size content)]
AsyncRequestBody (reify
(contentLength [_] AsyncRequestBody
(Optional/of (long (impl/get-size content)))) (contentLength [_]
(Optional/of (long size)))
(^void subscribe [_ ^Subscriber subscriber]
(let [sem (Semaphore. 0)
thr (upload-thread id subscriber sem content)]
(.onSubscribe subscriber
(reify Subscription
(cancel [_]
(px/interrupt! thr)
(.release sem 1))
(request [_ n]
(.release sem (int n)))))))))
(^void subscribe [_ ^Subscriber subscriber]
(let [delegate (AsyncRequestBody/forBlockingInputStream (long size))
input (io/input-stream content)]
(px/run! executor (partial write-input-stream delegate input))
(.subscribe ^BlockingInputStreamAsyncRequestBody delegate
^Subscriber subscriber))))))
(defn- put-object (defn- put-object
[{:keys [::client ::bucket ::prefix]} {:keys [id] :as object} content] [{:keys [::client ::bucket ::prefix ::wrk/executor]} {:keys [id] :as object} content]
(let [path (dm/str prefix (impl/id->path id)) (let [path (dm/str prefix (impl/id->path id))
mdata (meta object) mdata (meta object)
mtype (:content-type mdata "application/octet-stream") mtype (:content-type mdata "application/octet-stream")
rbody (make-request-body id content) rbody (make-request-body executor content)
request (.. (PutObjectRequest/builder) request (.. (PutObjectRequest/builder)
(bucket bucket) (bucket bucket)
(contentType mtype) (contentType mtype)

View file

@ -11,13 +11,16 @@
permanently delete these files (look at systemd-tempfiles)." permanently delete these files (look at systemd-tempfiles)."
(:require (:require
[app.common.logging :as l] [app.common.logging :as l]
[app.common.uuid :as uuid]
[app.util.time :as dt] [app.util.time :as dt]
[app.worker :as wrk] [app.worker :as wrk]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[datoteka.fs :as fs] [datoteka.fs :as fs]
[integrant.core :as ig] [integrant.core :as ig]
[promesa.exec :as px] [promesa.exec :as px]
[promesa.exec.csp :as sp])) [promesa.exec.csp :as sp])
(:import
java.nio.file.Files))
(def default-tmp-dir "/tmp/penpot") (def default-tmp-dir "/tmp/penpot")
@ -76,11 +79,9 @@
[& {:keys [suffix prefix min-age] [& {:keys [suffix prefix min-age]
:or {prefix "penpot." :or {prefix "penpot."
suffix ".tmp"}}] suffix ".tmp"}}]
(let [path (fs/create-tempfile (let [attrs (fs/make-permissions "rw-r--r--")
:perms "rw-r--r--" path (fs/join default-tmp-dir (str prefix (uuid/next) suffix))
:dir default-tmp-dir path (Files/createFile path attrs)]
:suffix suffix
:prefix prefix)]
(fs/delete-on-exit! path) (fs/delete-on-exit! path)
(sp/offer! queue [path (some-> min-age dt/duration)]) (sp/offer! queue [path (some-> min-age dt/duration)])
path)) path))

View file

@ -7,36 +7,32 @@
(ns app.svgo (ns app.svgo
"A SVG Optimizer service" "A SVG Optimizer service"
(:require (:require
[app.common.jsrt :as jsrt]
[app.common.logging :as l] [app.common.logging :as l]
[app.worker :as-alias wrk] [app.util.shell :as shell]
[integrant.core :as ig] [datoteka.fs :as fs]
[promesa.exec.semaphore :as ps] [promesa.exec.semaphore :as ps]))
[promesa.util :as pu]))
(def ^:dynamic *semaphore* (def ^:dynamic *semaphore*
"A dynamic variable that can optionally contain a traffic light to "A dynamic variable that can optionally contain a traffic light to
appropriately delimit the use of resources, managed externally." appropriately delimit the use of resources, managed externally."
nil) nil)
(set! *warn-on-reflection* true)
(defn optimize (defn optimize
[{pool ::optimizer} data] [system data]
(try (try
(some-> *semaphore* ps/acquire!) (some-> *semaphore* ps/acquire!)
(jsrt/run! pool (let [script (fs/join fs/*cwd* "scripts/svgo-cli.js")
(fn [context] cmd ["node" (str script)]
(jsrt/set! context "svgData" data) result (shell/exec! system
(jsrt/eval! context "penpotSvgo.optimize(svgData, {plugins: ['safeAndFastPreset']})"))) :cmd cmd
:in data)]
(if (= (:exit result) 0)
(:out result)
(do
(l/raw! :warn (str "Error on optimizing svg, returning svg as-is." (:err result)))
data)))
(finally (finally
(some-> *semaphore* ps/release!)))) (some-> *semaphore* ps/release!))))
(defmethod ig/init-key ::optimizer
[_ _]
(l/inf :hint "initializing svg optimizer pool")
(let [init (jsrt/resource->source "app/common/svg/optimizer.js")]
(jsrt/pool :init init)))
(defmethod ig/halt-key! ::optimizer
[_ pool]
(l/info :hint "stopping svg optimizer pool")
(pu/close! pool))

View file

@ -44,7 +44,7 @@
f.data_ref_id f.data_ref_id
FROM file_change AS f FROM file_change AS f
WHERE f.file_id = ? WHERE f.file_id = ?
AND f.label IS NOT NULL AND f.data IS NOT NULL
ORDER BY f.created_at ASC") ORDER BY f.created_at ASC")
(def ^:private sql:mark-file-media-object-deleted (def ^:private sql:mark-file-media-object-deleted

View file

@ -5,47 +5,51 @@
;; Copyright (c) KALEIDOS INC ;; Copyright (c) KALEIDOS INC
(ns app.tasks.file-xlog-gc (ns app.tasks.file-xlog-gc
"A maintenance task that performs a garbage collection of the file
change (transaction) log."
(:require (:require
[app.common.logging :as l] [app.common.logging :as l]
[app.config :as cf]
[app.db :as db] [app.db :as db]
[app.features.fdata :as feat.fdata]
[app.storage :as sto]
[app.util.time :as dt]
[clojure.spec.alpha :as s] [clojure.spec.alpha :as s]
[integrant.core :as ig])) [integrant.core :as ig]))
(def ^:private ;; Get the latest available snapshots without exceeding the total
sql:delete-files-xlog ;; snapshot limit
"DELETE FROM file_change (def ^:private sql:get-latest-snapshots
WHERE id IN (SELECT id FROM file_change "SELECT fch.id, fch.created_at
WHERE label IS NULL FROM file_change AS fch
AND created_at < ? WHERE fch.file_id = ?
ORDER BY created_at LIMIT ?) AND fch.created_by = 'system'
RETURNING id, data_backend, data_ref_id") AND fch.data IS NOT NULL
AND fch.deleted_at > now()
ORDER BY fch.created_at DESC
LIMIT ?")
(def xf:filter-offloded ;; Mark all snapshots that are outside the allowed total threshold
(comp ;; available for the GC
(filter feat.fdata/offloaded?) (def ^:private sql:delete-snapshots
(keep :data-ref-id))) "UPDATE file_change
SET deleted_at = now()
WHERE file_id = ?
AND deleted_at > now()
AND data IS NOT NULL
AND created_by = 'system'
AND created_at < ?")
(defn- delete-in-chunks (defn- get-alive-snapshots
[{:keys [::chunk-size ::threshold] :as cfg}] [conn file-id]
(let [storage (sto/resolve cfg ::db/reuse-conn true)] (let [total (cf/get :auto-file-snapshot-total 10)
(loop [total 0] snapshots (db/exec! conn [sql:get-latest-snapshots file-id total])]
(let [chunk (db/exec! cfg [sql:delete-files-xlog threshold chunk-size]) (not-empty snapshots)))
length (count chunk)]
;; touch all references on offloaded changes entries (defn- delete-old-snapshots!
(doseq [data-ref-id (sequence xf:filter-offloded chunk)] [{:keys [::db/conn] :as cfg} file-id]
(l/trc :hint "touching referenced storage object" (when-let [snapshots (get-alive-snapshots conn file-id)]
:storage-object-id (str data-ref-id)) (let [last-date (-> snapshots peek :created-at)
(sto/touch-object! storage data-ref-id)) result (db/exec-one! conn [sql:delete-snapshots file-id last-date])]
(l/inf :hint "delete old file snapshots"
(if (pos? length) :file-id (str file-id)
(recur (+ total length)) :current (count snapshots)
total))))) :deleted (db/get-update-count result)))))
(defmethod ig/pre-init-spec ::handler [_] (defmethod ig/pre-init-spec ::handler [_]
(s/keys :req [::db/pool])) (s/keys :req [::db/pool]))
@ -53,16 +57,8 @@
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ cfg] [_ cfg]
(fn [{:keys [props] :as task}] (fn [{:keys [props] :as task}]
(let [min-age (or (:min-age props) (let [file-id (:file-id props)]
(dt/duration "72h")) (assert (uuid? file-id) "expected file-id on props")
chunk-size (:chunk-size props 5000)
threshold (dt/minus (dt/now) min-age)]
(-> cfg (-> cfg
(assoc ::db/rollback (:rollback props false)) (assoc ::db/rollback (:rollback props false))
(assoc ::threshold threshold) (db/tx-run! delete-old-snapshots! file-id)))))
(assoc ::chunk-size chunk-size)
(db/tx-run! (fn [cfg]
(let [total (delete-in-chunks cfg)]
(l/trc :hint "file xlog cleaned" :total total)
total)))))))

View file

@ -27,7 +27,7 @@
(defn- delete-profiles! (defn- delete-profiles!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-profiles min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-profiles min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id photo-id]}] (reduce (fn [total {:keys [id photo-id]}]
(l/trc :hint "permanently delete" :rel "profile" :id (str id)) (l/trc :hint "permanently delete" :rel "profile" :id (str id))
@ -50,7 +50,7 @@
(defn- delete-teams! (defn- delete-teams!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-teams min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-teams min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id photo-id deleted-at]}] (reduce (fn [total {:keys [id photo-id deleted-at]}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "team" :rel "team"
@ -78,7 +78,7 @@
(defn- delete-fonts! (defn- delete-fonts!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-fonts min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-fonts min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id team-id deleted-at] :as font}] (reduce (fn [total {:keys [id team-id deleted-at] :as font}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "team-font-variant" :rel "team-font-variant"
@ -110,7 +110,7 @@
(defn- delete-projects! (defn- delete-projects!
[{:keys [::db/conn ::min-age ::chunk-size] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size] :as cfg}]
(->> (db/cursor conn [sql:get-projects min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-projects min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id team-id deleted-at]}] (reduce (fn [total {:keys [id team-id deleted-at]}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "project" :rel "project"
@ -136,7 +136,7 @@
(defn- delete-files! (defn- delete-files!
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}] [{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
(->> (db/cursor conn [sql:get-files min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-files min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id deleted-at project-id] :as file}] (reduce (fn [total {:keys [id deleted-at project-id] :as file}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "file" :rel "file"
@ -165,7 +165,7 @@
(defn delete-file-thumbnails! (defn delete-file-thumbnails!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-file-thumbnails min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-file-thumbnails min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}] (reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "file-thumbnail" :rel "file-thumbnail"
@ -194,7 +194,7 @@
(defn delete-file-object-thumbnails! (defn delete-file-object-thumbnails!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-file-object-thumbnails min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-file-object-thumbnails min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}] (reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "file-tagged-object-thumbnail" :rel "file-tagged-object-thumbnail"
@ -223,7 +223,7 @@
(defn- delete-file-data-fragments! (defn- delete-file-data-fragments!
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}] [{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
(->> (db/cursor conn [sql:get-file-data-fragments min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-file-data-fragments min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}] (reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "file-data-fragment" :rel "file-data-fragment"
@ -249,7 +249,7 @@
(defn- delete-file-media-objects! (defn- delete-file-media-objects!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}] [{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-file-media-objects min-age chunk-size] {:chunk-size 1}) (->> (db/cursor conn [sql:get-file-media-objects min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}] (reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
(l/trc :hint "permanently delete" (l/trc :hint "permanently delete"
:rel "file-media-object" :rel "file-media-object"
@ -266,6 +266,34 @@
(inc total)) (inc total))
0))) 0)))
(def ^:private sql:get-file-change
"SELECT id, file_id, deleted_at, data_backend, data_ref_id
FROM file_change
WHERE deleted_at IS NOT NULL
AND deleted_at < now() - ?::interval
ORDER BY deleted_at ASC
LIMIT ?
FOR UPDATE
SKIP LOCKED")
(defn- delete-file-change!
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
(->> (db/cursor conn [sql:get-file-change min-age chunk-size] {:chunk-size 5})
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
(l/trc :hint "permanently delete"
:rel "file-change"
:id (str id)
:file-id (str file-id)
:deleted-at (dt/format-instant deleted-at))
(when (= "objects-storage" (:data-backend xlog))
(sto/touch-object! storage (:data-ref-id xlog)))
(db/delete! conn :file-change {:id id})
(inc total))
0)))
(def ^:private deletion-proc-vars (def ^:private deletion-proc-vars
[#'delete-profiles! [#'delete-profiles!
#'delete-file-media-objects! #'delete-file-media-objects!
@ -275,7 +303,8 @@
#'delete-files! #'delete-files!
#'delete-projects! #'delete-projects!
#'delete-fonts! #'delete-fonts!
#'delete-teams!]) #'delete-teams!
#'delete-file-change!])
(defn- execute-proc! (defn- execute-proc!
"A generic function that executes the specified proc iterativelly "A generic function that executes the specified proc iterativelly
@ -296,7 +325,7 @@
[_ cfg] [_ cfg]
(assoc cfg (assoc cfg
::min-age (cf/get-deletion-delay) ::min-age (cf/get-deletion-delay)
::chunk-size 10)) ::chunk-size 50))
(defmethod ig/init-key ::handler (defmethod ig/init-key ::handler
[_ cfg] [_ cfg]

View file

@ -8,7 +8,7 @@
"INET addr parsing and validation helpers" "INET addr parsing and validation helpers"
(:require (:require
[cuerdas.core :as str] [cuerdas.core :as str]
[ring.request :as rreq]) [yetti.request :as yreq])
(:import (:import
com.google.common.net.InetAddresses com.google.common.net.InetAddresses
java.net.InetAddress)) java.net.InetAddress))
@ -27,11 +27,11 @@
(defn parse-request (defn parse-request
[request] [request]
(or (some-> (rreq/get-header request "x-real-ip") (or (some-> (yreq/get-header request "x-real-ip")
(normalize)) (normalize))
(some-> (rreq/get-header request "x-forwarded-for") (some-> (yreq/get-header request "x-forwarded-for")
(str/split #"\s*,\s*") (str/split #"\s*,\s*")
(first) (first)
(normalize)) (normalize))
(some-> (rreq/remote-addr request) (some-> (yreq/remote-addr request)
(normalize)))) (normalize))))

View file

@ -0,0 +1,71 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.util.shell
"A penpot specific, modern api for executing external (shell)
subprocesses"
(:require
[app.worker :as-alias wrk]
[datoteka.io :as io]
[promesa.exec :as px])
(:import
java.io.InputStream
java.io.OutputStream
java.util.List
org.apache.commons.io.IOUtils))
(set! *warn-on-reflection* true)
(defn- read-as-bytes
[in]
(with-open [^InputStream input (io/input-stream in)]
(io/read input)))
(defn- read-as-string
([in] (read-as-string in "UTF-8"))
([in enc]
(IOUtils/toString ^InputStream in ^String enc)))
(defn- read-with-enc
[stream enc]
(if (= enc :bytes)
(read-as-bytes stream)
(read-as-string stream enc)))
(defn- set-env
[penv k v]
(.put ^java.util.Map penv
^String k
^String v))
(defn exec!
[system & {:keys [cmd in out-enc in-enc env]
:or {out-enc "UTF-8"
in-enc "UTF-8"}}]
(assert (vector? cmd) "a command parameter should be a vector")
(assert (every? string? cmd) "the command should be a vector of strings")
(let [executor (::wrk/executor system)
builder (ProcessBuilder. ^List cmd)
env-map (.environment ^ProcessBuilder builder)
_ (reduce-kv set-env env-map env)
process (.start builder)]
(if in
(px/run! executor
(fn []
(with-open [^OutputStream stdin (.getOutputStream ^Process process)]
(io/write stdin in :encoding in-enc))))
(io/close (.getOutputStream ^Process process)))
(with-open [stdout (.getInputStream ^Process process)
stderr (.getErrorStream ^Process process)]
(let [out (px/submit! executor (fn [] (read-with-enc stdout out-enc)))
err (px/submit! executor (fn [] (read-as-string stderr)))
ext (.waitFor ^Process process)]
{:exit ext
:out @out
:err @err}))))

View file

@ -16,8 +16,7 @@
[promesa.exec :as px] [promesa.exec :as px]
[promesa.exec.csp :as sp] [promesa.exec.csp :as sp]
[promesa.util :as pu] [promesa.util :as pu]
[ring.request :as rreq] [yetti.request :as yreq]
[ring.websocket :as rws]
[yetti.websocket :as yws]) [yetti.websocket :as yws])
(:import (:import
java.nio.ByteBuffer)) java.nio.ByteBuffer))
@ -85,7 +84,7 @@
hbeat-ch (sp/chan :buf (sp/sliding-buffer 6)) hbeat-ch (sp/chan :buf (sp/sliding-buffer 6))
close-ch (sp/chan) close-ch (sp/chan)
ip-addr (inet/parse-request request) ip-addr (inet/parse-request request)
uagent (rreq/get-header request "user-agent") uagent (yreq/get-header request "user-agent")
id (uuid/next) id (uuid/next)
state (atom {}) state (atom {})
beats (atom #{}) beats (atom #{})
@ -138,7 +137,7 @@
(defn- handle-ping! (defn- handle-ping!
[{:keys [::id ::beats ::channel] :as wsp} beat-id] [{:keys [::id ::beats ::channel] :as wsp} beat-id]
(l/trc :hint "send ping" :beat beat-id :conn-id (str id)) (l/trc :hint "send ping" :beat beat-id :conn-id (str id))
(rws/ping channel (encode-beat beat-id)) (yws/ping channel (encode-beat beat-id))
(let [issued (swap! beats conj (long beat-id))] (let [issued (swap! beats conj (long beat-id))]
(not (>= (count issued) max-missed-heartbeats)))) (not (>= (count issued) max-missed-heartbeats))))
@ -151,14 +150,14 @@
(loop [i 0] (loop [i 0]
(let [ping-ch (sp/timeout-chan heartbeat-interval) (let [ping-ch (sp/timeout-chan heartbeat-interval)
[msg p] (sp/alts! [close-ch input-ch output-ch heartbeat-ch ping-ch])] [msg p] (sp/alts! [close-ch input-ch output-ch heartbeat-ch ping-ch])]
(when (rws/open? channel) (when (yws/open? channel)
(cond (cond
(identical? p ping-ch) (identical? p ping-ch)
(if (handle-ping! wsp i) (if (handle-ping! wsp i)
(recur (inc i)) (recur (inc i))
(do (do
(l/trc :hint "closing" :reason "missing to many pings") (l/trc :hint "closing" :reason "missing to many pings")
(rws/close channel 8802 "missing to many pings"))) (yws/close channel 8802 "missing to many pings")))
(or (identical? p close-ch) (nil? msg)) (or (identical? p close-ch) (nil? msg))
(do :nothing) (do :nothing)
@ -183,7 +182,7 @@
(identical? p output-ch) (identical? p output-ch)
(let [message (on-snd-message msg) (let [message (on-snd-message msg)
message (t/encode-str message {:type :json-verbose})] message (t/encode-str message {:type :json-verbose})]
(rws/send channel message) (yws/send channel message)
(recur i)))))) (recur i))))))
(catch InterruptedException _cause (catch InterruptedException _cause
@ -202,13 +201,13 @@
(try (try
(handler wsp {:type :close}) (handler wsp {:type :close})
(when (rws/open? channel) (when (yws/open? channel)
;; NOTE: we need to ignore all exceptions here because ;; NOTE: we need to ignore all exceptions here because
;; there can be a race condition that first returns that ;; there can be a race condition that first returns that
;; channel is connected but on closing, will raise that ;; channel is connected but on closing, will raise that
;; channel is already closed. ;; channel is already closed.
(ex/ignoring (ex/ignoring
(rws/close channel 8899 "terminated"))) (yws/close channel 8899 "terminated")))
(when-let [on-disconnect (::on-disconnect wsp)] (when-let [on-disconnect (::on-disconnect wsp)]
(on-disconnect)) (on-disconnect))

View file

@ -10,6 +10,7 @@
[app.common.data :as d] [app.common.data :as d]
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.common.logging :as l] [app.common.logging :as l]
[app.common.schema :as sm]
[app.common.spec :as us] [app.common.spec :as us]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.config :as cf] [app.config :as cf]
@ -73,29 +74,27 @@
AND status = 'new' AND status = 'new'
AND scheduled_at > now()") AND scheduled_at > now()")
(s/def ::label string?) (def ^:private schema:options
(s/def ::task (s/or :kw keyword? :str string?)) [:map {:title "submit-options"}
(s/def ::queue (s/or :kw keyword? :str string?)) [::task [:or ::sm/text :keyword]]
(s/def ::delay (s/or :int integer? :duration dt/duration?)) [::label {:optional true} ::sm/text]
(s/def ::priority integer?) [::delay {:optional true}
(s/def ::max-retries integer?) [:or ::sm/int ::dt/duration]]
(s/def ::dedupe boolean?) [::queue {:optional true} [:or ::sm/text :keyword]]
[::priority {:optional true} ::sm/int]
[::max-retries {:optional true} ::sm/int]
[::dedupe {:optional true} ::sm/boolean]])
(s/def ::submit-options (def check-options!
(s/and (sm/check-fn schema:options))
(s/keys :req [::task]
:opt [::label ::delay ::queue ::priority ::max-retries ::dedupe])
(fn [{:keys [::dedupe ::label] :or {label ""}}]
(if dedupe
(not= label "")
true))))
(defn submit! (defn submit!
[& {:keys [::params ::task ::delay ::queue ::priority ::max-retries ::dedupe ::label] [& {:keys [::params ::task ::delay ::queue ::priority ::max-retries ::dedupe ::label]
:or {delay 0 queue :default priority 100 max-retries 3 label ""} :or {delay 0 queue :default priority 100 max-retries 3 label ""}
:as options}] :as options}]
(us/verify! ::submit-options options) (check-options! options)
(let [duration (dt/duration delay) (let [duration (dt/duration delay)
interval (db/interval duration) interval (db/interval duration)
props (db/tjson params) props (db/tjson params)

View file

@ -17,12 +17,11 @@
[integrant.core :as ig] [integrant.core :as ig]
[promesa.exec :as px]) [promesa.exec :as px])
(:import (:import
java.util.concurrent.Executor
java.util.concurrent.ThreadPoolExecutor)) java.util.concurrent.ThreadPoolExecutor))
(set! *warn-on-reflection* true) (set! *warn-on-reflection* true)
(s/def ::wrk/executor #(instance? Executor %)) (s/def ::wrk/executor #(instance? ThreadPoolExecutor %))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; EXECUTOR ;; EXECUTOR
@ -36,30 +35,22 @@
(let [factory (px/thread-factory :prefix "penpot/default/") (let [factory (px/thread-factory :prefix "penpot/default/")
executor (px/cached-executor :factory factory :keepalive 60000)] executor (px/cached-executor :factory factory :keepalive 60000)]
(l/inf :hint "executor started") (l/inf :hint "executor started")
(reify executor))
java.lang.AutoCloseable
(close [_]
(l/inf :hint "stoping executor")
(px/shutdown! executor))
clojure.lang.IDeref
(deref [_]
{:active (.getPoolSize ^ThreadPoolExecutor executor)
:running (.getActiveCount ^ThreadPoolExecutor executor)
:completed (.getCompletedTaskCount ^ThreadPoolExecutor executor)})
Executor
(execute [_ runnable]
(.execute ^Executor executor ^Runnable runnable)))))
(defmethod ig/halt-key! ::wrk/executor (defmethod ig/halt-key! ::wrk/executor
[_ instance] [_ instance]
(.close ^java.lang.AutoCloseable instance)) (px/shutdown! instance))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; MONITOR ;; MONITOR
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn- get-stats
[^ThreadPoolExecutor executor]
{:active (.getPoolSize ^ThreadPoolExecutor executor)
:running (.getActiveCount ^ThreadPoolExecutor executor)
:completed (.getCompletedTaskCount ^ThreadPoolExecutor executor)})
(s/def ::name ::us/keyword) (s/def ::name ::us/keyword)
(defmethod ig/pre-init-spec ::wrk/monitor [_] (defmethod ig/pre-init-spec ::wrk/monitor [_]
@ -74,7 +65,7 @@
[_ {:keys [::wrk/executor ::mtx/metrics ::interval ::wrk/name]}] [_ {:keys [::wrk/executor ::mtx/metrics ::interval ::wrk/name]}]
(letfn [(monitor! [executor prev-completed] (letfn [(monitor! [executor prev-completed]
(let [labels (into-array String [(d/name name)]) (let [labels (into-array String [(d/name name)])
stats (deref executor) stats (get-stats executor)
completed (:completed stats) completed (:completed stats)
completed-inc (- completed prev-completed) completed-inc (- completed prev-completed)

View file

@ -0,0 +1,107 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns backend-tests.binfile-test
"Internal binfile test, no RPC involved"
(:require
[app.binfile.v3 :as v3]
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.db :as db]
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.storage.tmp :as tmp]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]
[datoteka.fs :as fs]
[datoteka.io :as io]))
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(defn- update-file!
[& {:keys [profile-id file-id changes revn] :or {revn 0}}]
(let [params {::th/type :update-file
::rpc/profile-id profile-id
:id file-id
:session-id (uuid/random)
:revn revn
:vern 0
:features cfeat/supported-features
:changes changes}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(:result out)))
(defn- prepare-simple-file
[profile]
(let [page-id-1 (uuid/custom 1 1)
page-id-2 (uuid/custom 1 2)
shape-id (uuid/custom 2 1)
file (th/create-file* 1 {:profile-id (:id profile)
:project-id (:default-project-id profile)
:is-shared false})]
(update-file!
:file-id (:id file)
:profile-id (:id profile)
:revn 0
:vern 0
:changes
[{:type :add-page
:name "test 1"
:id page-id-1}
{:type :add-page
:name "test 2"
:id page-id-2}])
(update-file!
:file-id (:id file)
:profile-id (:id profile)
:revn 0
:vern 0
:changes
[{:type :add-obj
:page-id page-id-1
:id shape-id
:parent-id uuid/zero
:frame-id uuid/zero
:components-v2 true
:obj (cts/setup-shape
{:id shape-id
:name "image"
:frame-id uuid/zero
:parent-id uuid/zero
:type :rect})}])
(dissoc file :data)))
(t/deftest export-binfile-v3
(let [profile (th/create-profile* 1)
file (prepare-simple-file profile)
output (tmp/tempfile :suffix ".zip")]
(v3/export-files!
(-> th/*system*
(assoc ::v3/ids #{(:id file)})
(assoc ::v3/embed-assets false)
(assoc ::v3/include-libraries false))
(io/output-stream output))
(let [result (-> th/*system*
(assoc ::v3/project-id (:default-project-id profile))
(assoc ::v3/profile-id (:id profile))
(assoc ::v3/input output)
(v3/import-files!))]
(t/is (= (count result) 1))
(t/is (every? uuid? result)))))

View file

@ -47,8 +47,9 @@
[mockery.core :as mk] [mockery.core :as mk]
[promesa.core :as p] [promesa.core :as p]
[promesa.exec :as px] [promesa.exec :as px]
[ring.response :as rres] [ring.core.protocols :as rcp]
[yetti.request :as yrq]) [yetti.request :as yrq]
[yetti.response :as yres])
(:import (:import
java.io.PipedInputStream java.io.PipedInputStream
java.io.PipedOutputStream java.io.PipedOutputStream
@ -311,6 +312,7 @@
(#'files.update/update-file* system (#'files.update/update-file* system
{:id file-id {:id file-id
:revn revn :revn revn
:vern 0
:file file :file file
:features (:features file) :features (:features file)
:changes changes :changes changes
@ -326,12 +328,14 @@
:id file-id :id file-id
:session-id (uuid/random) :session-id (uuid/random)
:revn revn :revn revn
:vern 0
:features features :features features
:changes changes} :changes changes}
out (command! params)] out (command! params)]
(t/is (nil? (:error out))) (t/is (nil? (:error out)))
(:result out))) (:result out)))
(defn create-webhook* (defn create-webhook*
([params] (create-webhook* *system* params)) ([params] (create-webhook* *system* params))
([system {:keys [team-id id uri mtype is-active] ([system {:keys [team-id id uri mtype is-active]
@ -547,15 +551,16 @@
(defn consume-sse (defn consume-sse
[callback] [callback]
(let [{:keys [::rres/status ::rres/body ::rres/headers] :as response} (callback {}) (let [{:keys [::yres/status ::yres/body ::yres/headers] :as response} (callback {})
output (PipedOutputStream.) output (PipedOutputStream.)
input (PipedInputStream. output)] input (PipedInputStream. output)]
(try (try
(px/exec! :virtual #(rres/-write-body-to-stream body nil output)) (px/exec! :virtual #(rcp/write-body-to-stream body nil output))
(into [] (into []
(map (fn [event] (map (fn [event]
(let [[item1 item2] (re-seq #"(.*): (.*)\n?" event)] (let [[item1 item2] (re-seq #"(.*): (.*)\n?" event)]
[(keyword (nth item1 2)) [(keyword (nth item1 2))
(tr/decode-str (nth item2 2))]))) (tr/decode-str (nth item2 2))])))
(-> (slurp' input) (-> (slurp' input)

View file

@ -12,7 +12,8 @@
[app.rpc :as-alias rpc] [app.rpc :as-alias rpc]
[app.util.time :as dt] [app.util.time :as dt]
[backend-tests.helpers :as th] [backend-tests.helpers :as th]
[clojure.test :as t])) [clojure.test :as t]
[yetti.request]))
(t/use-fixtures :once th/state-init) (t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset) (t/use-fixtures :each th/database-reset)
@ -25,7 +26,7 @@
(def http-request (def http-request
(reify (reify
ring.request/Request yetti.request/IRequest
(get-header [_ name] (get-header [_ name]
(case name (case name
"x-forwarded-for" "127.0.0.44" "x-forwarded-for" "127.0.0.44"

View file

@ -44,5 +44,5 @@
{:keys [error result]} (th/command! (assoc params ::cond/key etag))] {:keys [error result]} (th/command! (assoc params ::cond/key etag))]
(t/is (nil? error)) (t/is (nil? error))
(t/is (fn? result)) (t/is (fn? result))
(t/is (= 304 (-> (result nil) :ring.response/status)))))))) (t/is (= 304 (-> (result nil) :yetti.response/status))))))))

View file

@ -0,0 +1,134 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns backend-tests.rpc-file-snapshot-test
(:require
[app.common.features :as cfeat]
[app.common.pprint :as pp]
[app.common.pprint :as pp]
[app.common.thumbnails :as thc]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid]
[app.db :as db]
[app.db.sql :as sql]
[app.http :as http]
[app.rpc :as-alias rpc]
[app.storage :as sto]
[app.util.time :as dt]
[backend-tests.helpers :as th]
[clojure.test :as t]
[cuerdas.core :as str]))
(t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset)
(defn- update-file!
[& {:keys [profile-id file-id changes revn] :or {revn 0}}]
(let [params {::th/type :update-file
::rpc/profile-id profile-id
:id file-id
:session-id (uuid/random)
:revn revn
:vern 0
:features cfeat/supported-features
:changes changes}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(:result out)))
(t/deftest generic-ops
(let [profile (th/create-profile* 1 {:is-active true})
team-id (:default-team-id profile)
proj-id (:default-project-id profile)
file (th/create-file* 1 {:profile-id (:id profile)
:project-id proj-id
:is-shared false})
snapshot-id (volatile! nil)]
(t/testing "create snapshot"
(let [params {::th/type :create-file-snapshot
::rpc/profile-id (:id profile)
:file-id (:id file)
:label "label1"}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (= "label1" (:label result)))
(t/is (uuid? (:id result)))
(vswap! snapshot-id (constantly (:id result))))))
(t/testing "list snapshots"
(let [params {::th/type :get-file-snapshots
::rpc/profile-id (:id profile)
:file-id (:id file)}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [[row :as result] (:result out)]
(t/is (= 1 (count result)))
(t/is (= "label1" (:label row)))
(t/is (uuid? (:id row)))
(t/is (= @snapshot-id (:id row)))
(t/is (= 0 (:revn row)))
(t/is (= (:id profile) (:profile-id row))))))
(t/testing "restore snapshot"
(let [params {::th/type :restore-file-snapshot
::rpc/profile-id (:id profile)
:file-id (:id file)
:id @snapshot-id}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(let [result (:result out)]
(t/is (= "label1" (:label result)))
(t/is (uuid? (:id result)))))
(let [[row1 row2 :as rows]
(th/db-query :file-change
{:file-id (:id file)}
{:order-by [:created-at]})]
(t/is (= 2 (count rows)))
(t/is (= "user" (:created-by row1)))
(t/is (= "system" (:created-by row2)))))
(t/testing "delete snapshot"
(let [[row1 row2 :as rows]
(th/db-query :file-change
{:file-id (:id file)}
{:order-by [:created-at]})]
(t/testing "delete user created snapshot"
(let [params {::th/type :delete-file-snapshot
::rpc/profile-id (:id profile)
:file-id (:id file)
:id (:id row1)}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))))
(t/testing "delete system created snapshot"
(let [params {::th/type :delete-file-snapshot
::rpc/profile-id (:id profile)
:file-id (:id file)
:id (:id row2)}
out (th/command! params)]
;; (th/print-result! out)
(let [error (:error out)
data (ex-data error)]
(t/is (th/ex-info? error))
(t/is (= (:type data) :validation))
(t/is (= (:code data) :system-snapshots-cant-be-deleted)))))))))

View file

@ -32,6 +32,7 @@
:id file-id :id file-id
:session-id (uuid/random) :session-id (uuid/random)
:revn revn :revn revn
:vern 0
:features cfeat/supported-features :features cfeat/supported-features
:changes changes} :changes changes}
out (th/command! params)] out (th/command! params)]
@ -147,6 +148,7 @@
:id file-id :id file-id
:session-id (uuid/random) :session-id (uuid/random)
:revn revn :revn revn
:vern 0
:features cfeat/supported-features :features cfeat/supported-features
:changes changes} :changes changes}
out (th/command! params)] out (th/command! params)]
@ -174,6 +176,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-page [{:type :add-page
:name "test" :name "test"
@ -203,6 +206,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-obj [{:type :add-obj
:page-id page-id :page-id page-id
@ -279,6 +283,7 @@
:id file-id :id file-id
:session-id (uuid/random) :session-id (uuid/random)
:revn revn :revn revn
:vern 0
:features cfeat/supported-features :features cfeat/supported-features
:changes changes} :changes changes}
out (th/command! params)] out (th/command! params)]
@ -305,6 +310,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-obj [{:type :add-obj
:page-id page-id :page-id page-id
@ -367,6 +373,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes [{:type :del-obj :changes [{:type :del-obj
:page-id (first (get-in file [:data :pages])) :page-id (first (get-in file [:data :pages]))
:id shid}]) :id shid}])
@ -418,6 +425,7 @@
:id file-id :id file-id
:session-id (uuid/random) :session-id (uuid/random)
:revn revn :revn revn
:vern 0
:components-v2 true :components-v2 true
:changes changes} :changes changes}
out (th/command! params)] out (th/command! params)]
@ -452,6 +460,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-obj [{:type :add-obj
:page-id page-id :page-id page-id
@ -528,6 +537,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes [{:type :del-obj :changes [{:type :del-obj
:page-id (first (get-in file [:data :pages])) :page-id (first (get-in file [:data :pages]))
:id s-shid} :id s-shid}
@ -622,6 +632,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-obj [{:type :add-obj
:page-id page-id :page-id page-id
@ -688,6 +699,7 @@
:file-id file-id :file-id file-id
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes [{:type :del-obj :changes [{:type :del-obj
:page-id page-id :page-id page-id
:id frame-id-2}]) :id frame-id-2}])
@ -721,6 +733,7 @@
:file-id file-id :file-id file-id
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes [{:type :del-obj :changes [{:type :del-obj
:page-id page-id :page-id page-id
:id frame-id-1}]) :id frame-id-1}])
@ -978,6 +991,7 @@
(th/update-file* {:file-id (:id file) (th/update-file* {:file-id (:id file)
:profile-id (:id prof) :profile-id (:id prof)
:revn 0 :revn 0
:vern 0
:components-v2 true :components-v2 true
:changes changes}) :changes changes})
@ -1178,6 +1192,7 @@
file (th/create-file* 1 {:profile-id (:id prof) file (th/create-file* 1 {:profile-id (:id prof)
:project-id (:default-project-id prof) :project-id (:default-project-id prof)
:revn 2 :revn 2
:vern 0
:is-shared false})] :is-shared false})]
(t/testing "create a file thumbnail" (t/testing "create a file thumbnail"
@ -1286,6 +1301,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-page [{:type :add-page
:name "test" :name "test"

View file

@ -42,6 +42,7 @@
:file-id (:id file) :file-id (:id file)
:profile-id (:id profile) :profile-id (:id profile)
:revn 0 :revn 0
:vern 0
:changes :changes
[{:type :add-obj [{:type :add-obj
:page-id page-id :page-id page-id
@ -253,7 +254,8 @@
file (th/create-file* 1 {:profile-id (:id profile) file (th/create-file* 1 {:profile-id (:id profile)
:project-id (:default-project-id profile) :project-id (:default-project-id profile)
:is-shared false :is-shared false
:revn 3}) :revn 3
:vern 0})
data1 {::th/type :create-file-thumbnail data1 {::th/type :create-file-thumbnail
::rpc/profile-id (:id profile) ::rpc/profile-id (:id profile)

View file

@ -28,8 +28,7 @@
font-id (uuid/custom 10 1) font-id (uuid/custom 10 1)
ttfdata (-> (io/resource "backend_tests/test_files/font-1.ttf") ttfdata (-> (io/resource "backend_tests/test_files/font-1.ttf")
io/input-stream (io/read*))
io/read-as-bytes)
params {::th/type :create-font-variant params {::th/type :create-font-variant
::rpc/profile-id (:id prof) ::rpc/profile-id (:id prof)
@ -65,8 +64,7 @@
font-id (uuid/custom 10 1) font-id (uuid/custom 10 1)
data (-> (io/resource "backend_tests/test_files/font-1.woff") data (-> (io/resource "backend_tests/test_files/font-1.woff")
io/input-stream (io/read*))
io/read-as-bytes)
params {::th/type :create-font-variant params {::th/type :create-font-variant
::rpc/profile-id (:id prof) ::rpc/profile-id (:id prof)
@ -100,12 +98,10 @@
font-id (uuid/custom 10 1) font-id (uuid/custom 10 1)
data1 (-> (io/resource "backend_tests/test_files/font-1.woff") data1 (-> (io/resource "backend_tests/test_files/font-1.woff")
io/input-stream (io/read*))
io/read-as-bytes)
data2 (-> (io/resource "backend_tests/test_files/font-2.woff") data2 (-> (io/resource "backend_tests/test_files/font-2.woff")
io/input-stream (io/read*))]
io/read-as-bytes)]
;; Create front variant ;; Create front variant
(let [params {::th/type :create-font-variant (let [params {::th/type :create-font-variant
@ -162,12 +158,10 @@
font-id (uuid/custom 10 1) font-id (uuid/custom 10 1)
data1 (-> (io/resource "backend_tests/test_files/font-1.woff") data1 (-> (io/resource "backend_tests/test_files/font-1.woff")
io/input-stream (io/read*))
io/read-as-bytes)
data2 (-> (io/resource "backend_tests/test_files/font-2.woff") data2 (-> (io/resource "backend_tests/test_files/font-2.woff")
io/input-stream (io/read*))]
io/read-as-bytes)]
;; Create front variant ;; Create front variant
(let [params {::th/type :create-font-variant (let [params {::th/type :create-font-variant
@ -224,12 +218,10 @@
font-id (uuid/custom 10 1) font-id (uuid/custom 10 1)
data1 (-> (io/resource "backend_tests/test_files/font-1.woff") data1 (-> (io/resource "backend_tests/test_files/font-1.woff")
io/input-stream (io/read*))
io/read-as-bytes)
data2 (-> (io/resource "backend_tests/test_files/font-2.woff") data2 (-> (io/resource "backend_tests/test_files/font-2.woff")
io/input-stream (io/read*))
io/read-as-bytes)
params1 {::th/type :create-font-variant params1 {::th/type :create-font-variant
::rpc/profile-id (:id prof) ::rpc/profile-id (:id prof)
:team-id team-id :team-id team-id

View file

@ -6,7 +6,9 @@
(ns backend-tests.rpc-management-test (ns backend-tests.rpc-management-test
(:require (:require
[app.common.features :as cfeat]
[app.common.pprint :as pp] [app.common.pprint :as pp]
[app.common.types.shape :as cts]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[app.db :as db] [app.db :as db]
[app.http :as http] [app.http :as http]
@ -21,6 +23,21 @@
(t/use-fixtures :once th/state-init) (t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset) (t/use-fixtures :each th/database-reset)
(defn- update-file!
[& {:keys [profile-id file-id changes revn] :or {revn 0}}]
(let [params {::th/type :update-file
::rpc/profile-id profile-id
:id file-id
:session-id (uuid/random)
:revn revn
:vern 0
:features cfeat/supported-features
:changes changes}
out (th/command! params)]
;; (th/print-result! out)
(t/is (nil? (:error out)))
(:result out)))
;; TODO: migrate to commands ;; TODO: migrate to commands
(t/deftest duplicate-file (t/deftest duplicate-file
@ -45,11 +62,14 @@
mobj (th/create-file-media-object* {:file-id (:id file1) mobj (th/create-file-media-object* {:file-id (:id file1)
:is-local false :is-local false
:media-id (:id sobject)})] :media-id (:id sobject)})]
(th/update-file* (update-file!
{:file-id (:id file1) :file-id (:id file1)
:profile-id (:id profile) :profile-id (:id profile)
:changes [{:type :add-media :revn 0
:object (select-keys mobj [:id :width :height :mtype :name])}]}) :vern 0
:changes
[{:type :add-media
:object mobj}])
(let [data {::th/type :duplicate-file (let [data {::th/type :duplicate-file
::rpc/profile-id (:id profile) ::rpc/profile-id (:id profile)
@ -173,13 +193,14 @@
:is-local false :is-local false
:media-id (:id sobject)})] :media-id (:id sobject)})]
(update-file!
(th/update-file* :file-id (:id file1)
{:file-id (:id file1) :profile-id (:id profile)
:profile-id (:id profile) :revn 0
:changes [{:type :add-media :vern 0
:object (select-keys mobj [:id :width :height :mtype :name])}]}) :changes
[{:type :add-media
:object mobj}])
(let [data {::th/type :duplicate-project (let [data {::th/type :duplicate-project
::rpc/profile-id (:id profile) ::rpc/profile-id (:id profile)

View file

@ -47,11 +47,7 @@
(t/is (sto/object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 122785 (:size mobj1))) (t/is (= 122785 (:size mobj1)))
;; This is because in ubuntu 21.04 generates different (t/is (= 3302 (:size mobj2)))))))
;; thumbnail that in ubuntu 22.04. This hack should be removed
;; when we all use the ubuntu 22.04 devenv image.
(t/is (or (= 3302 (:size mobj2))
(= 3303 (:size mobj2))))))))
(t/deftest media-object-upload (t/deftest media-object-upload
(let [prof (th/create-profile* 1) (let [prof (th/create-profile* 1)
@ -166,11 +162,7 @@
(t/is (sto/object? mobj1)) (t/is (sto/object? mobj1))
(t/is (sto/object? mobj2)) (t/is (sto/object? mobj2))
(t/is (= 122785 (:size mobj1))) (t/is (= 122785 (:size mobj1)))
;; This is because in ubuntu 21.04 generates different (t/is (= 3302 (:size mobj2)))))))
;; thumbnail that in ubuntu 22.04. This hack should be removed
;; when we all use the ubuntu 22.04 devenv image.
(t/is (or (= 3302 (:size mobj2))
(= 3303 (:size mobj2))))))))
(t/deftest media-object-upload-command (t/deftest media-object-upload-command
(let [prof (th/create-profile* 1) (let [prof (th/create-profile* 1)

View file

@ -152,7 +152,7 @@
(t/is (th/ex-info? error)) (t/is (th/ex-info? error))
(t/is (th/ex-of-type? error :not-found)))) (t/is (th/ex-of-type? error :not-found))))
(t/deftest permissions-checks-delete-project (t/deftest permissions-checks-pin-project
(let [profile1 (th/create-profile* 1) (let [profile1 (th/create-profile* 1)
profile2 (th/create-profile* 2) profile2 (th/create-profile* 2)
project (th/create-project* 1 {:team-id (:default-team-id profile1) project (th/create-project* 1 {:team-id (:default-team-id profile1)

View file

@ -19,6 +19,23 @@
(t/use-fixtures :once th/state-init) (t/use-fixtures :once th/state-init)
(t/use-fixtures :each th/database-reset) (t/use-fixtures :each th/database-reset)
(defn create-webhook-params [id team]
{::th/type :create-webhook
::rpc/profile-id id
:team-id team
:uri (u/uri "http://example.com")
:mtype "application/json"})
(defn check-webhook-format
([result]
(t/is (contains? result :id))
(t/is (contains? result :team-id))
(t/is (contains? result :created-at))
(t/is (contains? result :profile-id))
(t/is (contains? result :updated-at))
(t/is (contains? result :uri))
(t/is (contains? result :mtype))))
(t/deftest webhook-crud (t/deftest webhook-crud
(with-mocks [http-mock {:target 'app.http.client/req! (with-mocks [http-mock {:target 'app.http.client/req!
:return {:status 200}}] :return {:status 200}}]
@ -39,15 +56,8 @@
(t/is (nil? (:error out))) (t/is (nil? (:error out)))
(t/is (= 1 (:call-count @http-mock))) (t/is (= 1 (:call-count @http-mock)))
;; (th/print-result! out)
(let [result (:result out)] (let [result (:result out)]
(t/is (contains? result :id)) (check-webhook-format result)
(t/is (contains? result :team-id))
(t/is (contains? result :created-at))
(t/is (contains? result :updated-at))
(t/is (contains? result :uri))
(t/is (contains? result :mtype))
(t/is (= (:uri params) (:uri result))) (t/is (= (:uri params) (:uri result)))
(t/is (= (:team-id params) (:team-id result))) (t/is (= (:team-id params) (:team-id result)))
@ -69,12 +79,7 @@
(t/is (= 0 (:call-count @http-mock))) (t/is (= 0 (:call-count @http-mock)))
(let [result (:result out)] (let [result (:result out)]
(t/is (contains? result :id)) (check-webhook-format result)
(t/is (contains? result :team-id))
(t/is (contains? result :created-at))
(t/is (contains? result :updated-at))
(t/is (contains? result :uri))
(t/is (contains? result :mtype))
(t/is (= (:id params) (:id result))) (t/is (= (:id params) (:id result)))
(t/is (= (:id @whook) (:id result))) (t/is (= (:id @whook) (:id result)))
@ -130,13 +135,14 @@
(let [rows (th/db-exec! ["select * from webhook"])] (let [rows (th/db-exec! ["select * from webhook"])]
(t/is (= 0 (count rows)))))) (t/is (= 0 (count rows))))))
(t/testing "delete webhook (unauthorozed)" (th/reset-mock! http-mock)
(t/testing "delete webhook (unauthorized)"
(let [params {::th/type :delete-webhook (let [params {::th/type :delete-webhook
::rpc/profile-id uuid/zero ::rpc/profile-id uuid/zero
:id (:id @whook)} :id (:id @whook)}
out (th/command! params)] out (th/command! params)]
;; (th/print-result! out)
(t/is (= 0 (:call-count @http-mock))) (t/is (= 0 (:call-count @http-mock)))
(let [error (:error out) (let [error (:error out)
error-data (ex-data error)] error-data (ex-data error)]
@ -144,6 +150,124 @@
(t/is (= (:type error-data) :not-found)) (t/is (= (:type error-data) :not-found))
(t/is (= (:code error-data) :object-not-found)))))))) (t/is (= (:code error-data) :object-not-found))))))))
(t/deftest webhooks-permissions-crud-viewer-only
(with-mocks [http-mock {:target 'app.http.client/req!
:return {:status 200}}]
(let [owner (th/create-profile* 1 {:is-active true})
viewer (th/create-profile* 2 {:is-active true})
team (th/create-team* 1 {:profile-id (:id owner)})
whook (volatile! nil)]
(th/create-team-role* {:team-id (:id team)
:profile-id (:id viewer)
:role :viewer})
;; Assert all roles for team
(let [roles (th/db-query :team-profile-rel {:team-id (:id team)})]
(t/is (= 2 (count roles))))
(t/testing "viewer creates a webhook"
(let [viewers-webhook (create-webhook-params (:id viewer) (:id team))
out (th/command! viewers-webhook)]
(t/is (nil? (:error out)))
(t/is (= 1 (:call-count @http-mock)))
(let [result (:result out)]
(check-webhook-format result)
(t/is (= (:uri viewers-webhook) (:uri result)))
(t/is (= (:team-id viewers-webhook) (:team-id result)))
(t/is (= (::rpc/profile-id viewers-webhook) (:profile-id result)))
(t/is (= (:mtype viewers-webhook) (:mtype result)))
(vreset! whook result))))
(th/reset-mock! http-mock)
(t/testing "viewer updates it's own webhook (success)"
(let [params {::th/type :update-webhook
::rpc/profile-id (:id viewer)
:id (:id @whook)
:uri (:uri @whook)
:mtype "application/transit+json"
:is-active false}
out (th/command! params)
result (:result out)]
(t/is (nil? (:error out)))
(t/is (= 0 (:call-count @http-mock)))
(check-webhook-format result)
(t/is (= (:is-active params) (:is-active result)))
(t/is (= (:team-id @whook) (:team-id result)))
(t/is (= (:mtype params) (:mtype result)))
(vreset! whook result)))
(th/reset-mock! http-mock)
(t/testing "viewer deletes it's own webhook (success)"
(let [params {::th/type :delete-webhook
::rpc/profile-id (:id viewer)
:id (:id @whook)}
out (th/command! params)]
(t/is (= 0 (:call-count @http-mock)))
(t/is (nil? (:error out)))
(t/is (nil? (:result out)))
(let [rows (th/db-exec! ["select * from webhook"])]
(t/is (= 0 (count rows))))))
(th/reset-mock! http-mock))))
(t/deftest webhooks-permissions-crud-viewer-owner
(with-mocks [http-mock {:target 'app.http.client/req!
:return {:status 200}}]
(let [owner (th/create-profile* 1 {:is-active true})
viewer (th/create-profile* 2 {:is-active true})
team (th/create-team* 1 {:profile-id (:id owner)})
whook (volatile! nil)]
(th/create-team-role* {:team-id (:id team)
:profile-id (:id viewer)
:role :viewer})
(t/testing "owner creates a wehbook"
(let [owners-webhook (create-webhook-params (:id owner) (:id team))
out (th/command! owners-webhook)
result (:result out)]
(t/is (nil? (:error out)))
(t/is (= 1 (:call-count @http-mock)))
(check-webhook-format result)
(t/is (= (:uri owners-webhook) (:uri result)))
(t/is (= (:team-id owners-webhook) (:team-id result)))
(t/is (= (:mtype owners-webhook) (:mtype result)))
(vreset! whook result)))
(th/reset-mock! http-mock)
(t/testing "viewer updates owner's webhook (unauthorized)"
(let [params {::th/type :update-webhook
::rpc/profile-id (:id viewer)
:id (:id @whook)
:uri (str (:uri @whook) "/test")
:mtype "application/transit+json"
:is-active false}
out (th/command! params)]
(t/is (= 0 (:call-count @http-mock)))
(let [error (:error out)
error-data (ex-data error)]
(t/is (th/ex-info? error))
(t/is (= (:type error-data) :not-found))
(t/is (= (:code error-data) :object-not-found)))))
(th/reset-mock! http-mock)
(t/testing "viewer deletes owner's webhook (unauthorized)"
(let [params {::th/type :delete-webhook
::rpc/profile-id (:id viewer)
:id (:id @whook)}
out (th/command! params)
error (:error out)
error-data (ex-data error)]
(t/is (= 0 (:call-count @http-mock)))
(t/is (th/ex-info? error))
(t/is (= (:type error-data) :not-found))
(t/is (= (:code error-data) :object-not-found)))))))
(t/deftest webhooks-quotes (t/deftest webhooks-quotes
(with-mocks [http-mock {:target 'app.http.client/req! (with-mocks [http-mock {:target 'app.http.client/req!
:return {:status 200}}] :return {:status 200}}]

View file

@ -202,8 +202,7 @@
:is-shared false}) :is-shared false})
ttfdata (-> (io/resource "backend_tests/test_files/font-1.ttf") ttfdata (-> (io/resource "backend_tests/test_files/font-1.ttf")
io/input-stream (io/read*))
io/read-as-bytes)
mfile {:filename "sample.jpg" mfile {:filename "sample.jpg"
:path (th/tempfile "backend_tests/test_files/sample.jpg") :path (th/tempfile "backend_tests/test_files/sample.jpg")

View file

@ -1,5 +1,5 @@
{:deps {:deps
{org.clojure/clojure {:mvn/version "1.11.2"} {org.clojure/clojure {:mvn/version "1.12.0"}
org.clojure/data.json {:mvn/version "2.5.0"} org.clojure/data.json {:mvn/version "2.5.0"}
org.clojure/tools.cli {:mvn/version "1.1.230"} org.clojure/tools.cli {:mvn/version "1.1.230"}
org.clojure/clojurescript {:mvn/version "1.11.132"} org.clojure/clojurescript {:mvn/version "1.11.132"}
@ -7,19 +7,19 @@
org.clojure/data.fressian {:mvn/version "1.1.0"} org.clojure/data.fressian {:mvn/version "1.1.0"}
;; Logging ;; Logging
org.apache.logging.log4j/log4j-api {:mvn/version "2.23.1"} org.apache.logging.log4j/log4j-api {:mvn/version "2.24.1"}
org.apache.logging.log4j/log4j-core {:mvn/version "2.23.1"} org.apache.logging.log4j/log4j-core {:mvn/version "2.24.1"}
org.apache.logging.log4j/log4j-web {:mvn/version "2.23.1"} org.apache.logging.log4j/log4j-web {:mvn/version "2.24.1"}
org.apache.logging.log4j/log4j-jul {:mvn/version "2.23.1"} org.apache.logging.log4j/log4j-jul {:mvn/version "2.24.1"}
org.apache.logging.log4j/log4j-slf4j2-impl {:mvn/version "2.23.1"} org.apache.logging.log4j/log4j-slf4j2-impl {:mvn/version "2.24.1"}
org.slf4j/slf4j-api {:mvn/version "2.0.13"} org.slf4j/slf4j-api {:mvn/version "2.0.16"}
pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.32"} pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.32"}
selmer/selmer {:mvn/version "1.12.61"} selmer/selmer {:mvn/version "1.12.61"}
criterium/criterium {:mvn/version "0.4.6"} criterium/criterium {:mvn/version "0.4.6"}
metosin/jsonista {:mvn/version "0.3.8"} metosin/jsonista {:mvn/version "0.3.11"}
metosin/malli {:mvn/version "0.16.1"} metosin/malli {:mvn/version "0.16.4"}
expound/expound {:mvn/version "0.9.0"} expound/expound {:mvn/version "0.9.0"}
com.cognitect/transit-clj {:mvn/version "1.0.333"} com.cognitect/transit-clj {:mvn/version "1.0.333"}
@ -27,9 +27,6 @@
java-http-clj/java-http-clj {:mvn/version "0.4.3"} java-http-clj/java-http-clj {:mvn/version "0.4.3"}
integrant/integrant {:mvn/version "0.8.1"} integrant/integrant {:mvn/version "0.8.1"}
org.apache.commons/commons-pool2 {:mvn/version "2.12.0"}
org.graalvm.js/js {:mvn/version "23.0.4"}
funcool/tubax {:mvn/version "2021.05.20-0"} funcool/tubax {:mvn/version "2021.05.20-0"}
funcool/cuerdas {:mvn/version "2023.11.09-407"} funcool/cuerdas {:mvn/version "2023.11.09-407"}
funcool/promesa funcool/promesa
@ -37,8 +34,8 @@
:git/url "https://github.com/funcool/promesa"} :git/url "https://github.com/funcool/promesa"}
funcool/datoteka funcool/datoteka
{:git/sha "5ac3781" {:git/tag "4.0.0"
:git/tag "3.0.0" :git/sha "3372f3a"
:git/url "https://github.com/funcool/datoteka"} :git/url "https://github.com/funcool/datoteka"}
lambdaisland/uri {:mvn/version "1.19.155" lambdaisland/uri {:mvn/version "1.19.155"
@ -53,8 +50,8 @@
fipp/fipp {:mvn/version "0.6.26"} fipp/fipp {:mvn/version "0.6.26"}
io.github.eerohele/pp io.github.eerohele/pp
{:git/tag "2024-01-04.60" {:git/tag "2024-09-09.69"
:git/sha "e8a9773"} :git/sha "de4b20f"}
io.aviso/pretty {:mvn/version "1.4.4"} io.aviso/pretty {:mvn/version "1.4.4"}
environ/environ {:mvn/version "1.2.0"}} environ/environ {:mvn/version "1.2.0"}}
@ -63,7 +60,7 @@
{:dev {:dev
{:extra-deps {:extra-deps
{org.clojure/tools.namespace {:mvn/version "RELEASE"} {org.clojure/tools.namespace {:mvn/version "RELEASE"}
thheller/shadow-cljs {:mvn/version "2.28.8"} thheller/shadow-cljs {:mvn/version "2.28.18"}
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"} com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
com.bhauman/rebel-readline {:mvn/version "RELEASE"} com.bhauman/rebel-readline {:mvn/version "RELEASE"}
criterium/criterium {:mvn/version "RELEASE"} criterium/criterium {:mvn/version "RELEASE"}
@ -72,7 +69,7 @@
:build :build
{:extra-deps {:extra-deps
{io.github.clojure/tools.build {:git/tag "v0.10.3" :git/sha "15ead66"}} {io.github.clojure/tools.build {:git/tag "v0.10.5" :git/sha "2a21b7a"}}
:ns-default build} :ns-default build}
:test :test

View file

@ -15,14 +15,14 @@
"sax": "^1.4.1" "sax": "^1.4.1"
}, },
"devDependencies": { "devDependencies": {
"shadow-cljs": "2.28.11", "shadow-cljs": "2.28.18",
"source-map-support": "^0.5.21", "source-map-support": "^0.5.21",
"ws": "^8.17.0" "ws": "^8.17.0"
}, },
"scripts": { "scripts": {
"fmt:clj:check": "cljfmt check --parallel=false src/ test/", "fmt:clj:check": "cljfmt check --parallel=false src/ test/",
"fmt:clj": "cljfmt fix --parallel=true src/ test/", "fmt:clj": "cljfmt fix --parallel=true src/ test/",
"lint:clj": "clj-kondo --parallel --lint src/", "lint:clj": "clj-kondo --parallel=true --lint src/",
"test:watch": "clojure -M:dev:shadow-cljs watch test", "test:watch": "clojure -M:dev:shadow-cljs watch test",
"test:compile": "clojure -M:dev:shadow-cljs compile test --config-merge '{:autorun false}'", "test:compile": "clojure -M:dev:shadow-cljs compile test --config-merge '{:autorun false}'",
"test:run": "node target/test.js", "test:run": "node target/test.js",

View file

@ -229,14 +229,15 @@
coll)))) coll))))
(defn seek (defn seek
"Find the first boletus croquetta, settles for jamon if none found."
([pred coll] ([pred coll]
(seek pred coll nil)) (seek pred coll nil))
([pred coll not-found] ([pred coll ham]
(reduce (fn [_ x] (reduce (fn [_ x]
(if (pred x) (if (pred x)
(reduced x) (reduced x)
not-found)) ham))
not-found coll))) ham coll)))
(defn index-by (defn index-by
"Return a indexed map of the collection keyed by the result of "Return a indexed map of the collection keyed by the result of

View file

@ -501,7 +501,8 @@
(cts/shape? shape-new)) (cts/shape? shape-new))
(ex/raise :type :assertion (ex/raise :type :assertion
:code :data-validation :code :data-validation
:hint "invalid shape found after applying changes")))))] :hint "invalid shape found after applying changes"
::sm/explain (cts/explain-shape shape-new))))))]
(->> (into #{} (map :page-id) items) (->> (into #{} (map :page-id) items)
(mapcat (fn [page-id] (mapcat (fn [page-id]
@ -549,7 +550,7 @@
#?(:clj (validate-shapes! data result items)) #?(:clj (validate-shapes! data result items))
result)))) result))))
;; DEPRECATED: remove before 2.3 release ;; DEPRECATED: remove after 2.3 release
(defmethod process-change :set-option (defmethod process-change :set-option
[data _] [data _]
data) data)

View file

@ -20,8 +20,8 @@
[app.common.types.component :as ctk] [app.common.types.component :as ctk]
[app.common.types.file :as ctf] [app.common.types.file :as ctf]
[app.common.types.shape.layout :as ctl] [app.common.types.shape.layout :as ctl]
[app.common.uuid :as uuid] [app.common.types.tokens-lib :as ctob]
[app.common.types.tokens-lib :as ctob])) [app.common.uuid :as uuid]))
;; Auxiliary functions to help create a set of changes (undo + redo) ;; Auxiliary functions to help create a set of changes (undo + redo)

View file

@ -6,4 +6,4 @@
(ns app.common.files.defaults) (ns app.common.files.defaults)
(def version 55) (def version 57)

View file

@ -10,8 +10,6 @@
[app.common.data.macros :as dm] [app.common.data.macros :as dm]
[app.common.geom.shapes.common :as gco] [app.common.geom.shapes.common :as gco]
[app.common.schema :as sm] [app.common.schema :as sm]
[app.common.types.components-list :as ctkl]
[app.common.types.pages-list :as ctpl]
[app.common.uuid :as uuid] [app.common.uuid :as uuid]
[clojure.set :as set] [clojure.set :as set]
[cuerdas.core :as str])) [cuerdas.core :as str]))
@ -369,17 +367,6 @@
[container] [container]
(= (:type container) :component)) (= (:type container) :component))
(defn get-container
[file type id]
(dm/assert! (map? file))
(dm/assert! (keyword? type))
(dm/assert! (uuid? id))
(-> (if (= type :page)
(ctpl/get-page file id)
(ctkl/get-component file id))
(assoc :type type)))
(defn component-touched? (defn component-touched?
"Check if any shape in the component is touched" "Check if any shape in the component is touched"
[objects root-id] [objects root-id]

View file

@ -13,6 +13,7 @@
[app.common.files.defaults :as cfd] [app.common.files.defaults :as cfd]
[app.common.files.helpers :as cfh] [app.common.files.helpers :as cfh]
[app.common.geom.matrix :as gmt] [app.common.geom.matrix :as gmt]
[app.common.geom.point :as gpt]
[app.common.geom.rect :as grc] [app.common.geom.rect :as grc]
[app.common.geom.shapes :as gsh] [app.common.geom.shapes :as gsh]
[app.common.geom.shapes.path :as gsp] [app.common.geom.shapes.path :as gsp]
@ -499,7 +500,7 @@
object object
(-> object (-> object
(update :selrect grc/make-rect) (update :selrect grc/make-rect)
(cts/map->Shape)))) (cts/create-shape))))
(update-container [container] (update-container [container]
(d/update-when container :objects update-vals update-object))] (d/update-when container :objects update-vals update-object))]
(-> data (-> data
@ -1075,6 +1076,60 @@
(update data :pages-index d/update-vals update-page))) (update data :pages-index d/update-vals update-page)))
(defn migrate-up-56
[data]
(letfn [(fix-fills [object]
(d/update-when object :fills (partial filterv valid-fill?)))
(update-object [object]
(-> object
(fix-fills)
;; If shape contains shape-ref but has a nil value, we
;; should remove it from shape object
(cond-> (and (contains? object :shape-ref)
(nil? (get object :shape-ref)))
(dissoc :shape-ref))
;; The text shape also can has fills on the text
;; fragments so we need to fix fills there
(cond-> (cfh/text-shape? object)
(update :content (partial txt/transform-nodes identity fix-fills)))))
(update-container [container]
(d/update-when container :objects update-vals update-object))]
(-> data
(update :pages-index update-vals update-container)
(update :components update-vals update-container))))
(defn migrate-up-57
[data]
(letfn [(fix-thread-positions [positions]
(reduce-kv (fn [result id {:keys [position] :as data}]
(let [data (cond
(gpt/point? position)
data
(and (map? position)
(gpt/valid-point-attrs? position))
(assoc data :position (gpt/point position))
:else
(assoc data :position (gpt/point 0 0)))]
(assoc result id data)))
positions
positions))
(update-page [page]
(d/update-when page :comment-thread-positions fix-thread-positions))]
(-> data
(update :pages (fn [pages] (into [] (remove nil?) pages)))
(update :pages-index dissoc nil)
(update :pages-index update-vals update-page))))
(def migrations (def migrations
"A vector of all applicable migrations" "A vector of all applicable migrations"
[{:id 2 :migrate-up migrate-up-2} [{:id 2 :migrate-up migrate-up-2}
@ -1121,4 +1176,7 @@
{:id 52 :migrate-up migrate-up-52} {:id 52 :migrate-up migrate-up-52}
{:id 53 :migrate-up migrate-up-26} {:id 53 :migrate-up migrate-up-26}
{:id 54 :migrate-up migrate-up-54} {:id 54 :migrate-up migrate-up-54}
{:id 55 :migrate-up migrate-up-55}]) {:id 55 :migrate-up migrate-up-55}
{:id 56 :migrate-up migrate-up-56}
{:id 57 :migrate-up migrate-up-57}])

View file

@ -308,6 +308,17 @@
(reduce calculate-modifiers [modif-tree bounds]) (reduce calculate-modifiers [modif-tree bounds])
(first)))) (first))))
(defn filter-layouts-ids
"Returns a list of ids without the root-frames with only move"
[objects modif-tree]
(->> modif-tree
(remove (fn [[id {:keys [modifiers]}]]
(or (ctm/empty? modifiers)
(and (cfh/root-frame? objects id)
(ctm/only-move? modifiers)))))
(map first)
(set)))
(defn set-objects-modifiers (defn set-objects-modifiers
"Applies recursively the modifiers and calculate the layouts and constraints for all the items to be placed correctly" "Applies recursively the modifiers and calculate the layouts and constraints for all the items to be placed correctly"
([modif-tree objects] ([modif-tree objects]
@ -331,9 +342,13 @@
(cgt/apply-structure-modifiers modif-tree)) (cgt/apply-structure-modifiers modif-tree))
;; Creates the sequence of shapes with the shapes that are modified ;; Creates the sequence of shapes with the shapes that are modified
shapes-tree shapes-tree-all
(cgst/resolve-tree (-> modif-tree keys set) objects) (cgst/resolve-tree (-> modif-tree keys set) objects)
;; This second sequence is used to recalculate layouts (we remove moved root-frames)
shapes-tree-layout
(cgst/resolve-tree (filter-layouts-ids objects modif-tree) objects)
bounds-map bounds-map
(cond-> (cgb/objects->bounds-map objects) (cond-> (cgb/objects->bounds-map objects)
(some? old-modif-tree) (some? old-modif-tree)
@ -347,13 +362,13 @@
;; Propagates the modifiers to the normal shapes with constraints ;; Propagates the modifiers to the normal shapes with constraints
modif-tree modif-tree
(propagate-modifiers-constraints objects bounds-map ignore-constraints modif-tree shapes-tree) (propagate-modifiers-constraints objects bounds-map ignore-constraints modif-tree shapes-tree-all)
bounds-map bounds-map
(cgb/transform-bounds-map bounds-map objects modif-tree) (cgb/transform-bounds-map bounds-map objects modif-tree)
modif-tree-layout modif-tree-layout
(propagate-modifiers-layouts objects bounds-map ignore-constraints shapes-tree) (propagate-modifiers-layouts objects bounds-map ignore-constraints shapes-tree-layout)
modif-tree modif-tree
(cgt/merge-modif-tree modif-tree modif-tree-layout) (cgt/merge-modif-tree modif-tree modif-tree-layout)
@ -363,7 +378,7 @@
(cgb/transform-bounds-map bounds-map objects modif-tree-layout) (cgb/transform-bounds-map bounds-map objects modif-tree-layout)
;; Find layouts with auto width/height ;; Find layouts with auto width/height
sizing-auto-layouts (find-auto-layouts objects shapes-tree) sizing-auto-layouts (find-auto-layouts objects shapes-tree-layout)
modif-tree modif-tree
(sizing-auto-modifiers modif-tree sizing-auto-layouts objects bounds-map ignore-constraints) (sizing-auto-modifiers modif-tree sizing-auto-layouts objects bounds-map ignore-constraints)

View file

@ -56,6 +56,9 @@
[:x ::sm/safe-number] [:x ::sm/safe-number]
[:y ::sm/safe-number]]) [:y ::sm/safe-number]])
(def valid-point-attrs?
(sm/validator schema:point-attrs))
(def valid-point? (def valid-point?
(sm/validator (sm/validator
[:and [:fn point?] schema:point-attrs])) [:and [:fn point?] schema:point-attrs]))

View file

@ -139,6 +139,7 @@
:width (mth/abs (- x2 x1)) :width (mth/abs (- x2 x1))
:height (mth/abs (- y2 y1)))) :height (mth/abs (- y2 y1))))
;; FIXME: looks unused
:position :position
(let [x (dm/get-prop rect :x) (let [x (dm/get-prop rect :x)
y (dm/get-prop rect :y) y (dm/get-prop rect :y)
@ -158,22 +159,22 @@
y (dm/get-prop rect :y) y (dm/get-prop rect :y)
w (dm/get-prop rect :width) w (dm/get-prop rect :width)
h (dm/get-prop rect :height)] h (dm/get-prop rect :height)]
(rc/assoc! rect (assoc rect
:x1 x :x1 x
:y1 y :y1 y
:x2 (+ x w) :x2 (+ x w)
:y2 (+ y h))) :y2 (+ y h)))
:corners :corners
(let [x1 (dm/get-prop rect :x1) (let [x1 (dm/get-prop rect :x1)
y1 (dm/get-prop rect :y1) y1 (dm/get-prop rect :y1)
x2 (dm/get-prop rect :x2) x2 (dm/get-prop rect :x2)
y2 (dm/get-prop rect :y2)] y2 (dm/get-prop rect :y2)]
(rc/assoc! rect (assoc rect
:x (mth/min x1 x2) :x (mth/min x1 x2)
:y (mth/min y1 y2) :y (mth/min y1 y2)
:width (mth/abs (- x2 x1)) :width (mth/abs (- x2 x1))
:height (mth/abs (- y2 y1)))))) :height (mth/abs (- y2 y1))))))
(defn close-rect? (defn close-rect?
[rect1 rect2] [rect1 rect2]

View file

@ -16,7 +16,6 @@
[app.common.geom.shapes.common :as gco] [app.common.geom.shapes.common :as gco]
[app.common.geom.shapes.path :as gpa] [app.common.geom.shapes.path :as gpa]
[app.common.math :as mth] [app.common.math :as mth]
[app.common.record :as cr]
[app.common.types.modifiers :as ctm])) [app.common.types.modifiers :as ctm]))
#?(:clj (set! *warn-on-reflection* true)) #?(:clj (set! *warn-on-reflection* true))
@ -280,7 +279,7 @@
transform (calculate-transform points center selrect)] transform (calculate-transform points center selrect)]
[selrect transform (when (some? transform) (gmt/inverse transform))])) [selrect transform (when (some? transform) (gmt/inverse transform))]))
(defn- adjust-shape-flips! (defn- adjust-shape-flips
"After some tranformations the flip-x/flip-y flags can change we need "After some tranformations the flip-x/flip-y flags can change we need
to check this before adjusting the selrect" to check this before adjusting the selrect"
[shape points] [shape points]
@ -299,16 +298,16 @@
(cond-> shape (cond-> shape
(neg? dot-x) (neg? dot-x)
(cr/update! :flip-x not) (update :flip-x not)
(neg? dot-x) (neg? dot-x)
(cr/update! :rotation -) (update :rotation -)
(neg? dot-y) (neg? dot-y)
(cr/update! :flip-y not) (update :flip-y not)
(neg? dot-y) (neg? dot-y)
(cr/update! :rotation -)))) (update :rotation -))))
(defn- apply-transform-move (defn- apply-transform-move
"Given a new set of points transformed, set up the rectangle so it keeps "Given a new set of points transformed, set up the rectangle so it keeps
@ -318,9 +317,6 @@
points (gco/transform-points (dm/get-prop shape :points) transform-mtx) points (gco/transform-points (dm/get-prop shape :points) transform-mtx)
selrect (gco/transform-selrect (dm/get-prop shape :selrect) transform-mtx) selrect (gco/transform-selrect (dm/get-prop shape :selrect) transform-mtx)
;; NOTE: ensure we start with a fresh copy of shape for mutabilty
shape (cr/clone shape)
shape (if (= type :bool) shape (if (= type :bool)
(update shape :bool-content gpa/transform-content transform-mtx) (update shape :bool-content gpa/transform-content transform-mtx)
shape) shape)
@ -329,14 +325,14 @@
shape) shape)
shape (if (= type :path) shape (if (= type :path)
(update shape :content gpa/transform-content transform-mtx) (update shape :content gpa/transform-content transform-mtx)
(cr/assoc! shape (assoc shape
:x (dm/get-prop selrect :x) :x (dm/get-prop selrect :x)
:y (dm/get-prop selrect :y) :y (dm/get-prop selrect :y)
:width (dm/get-prop selrect :width) :width (dm/get-prop selrect :width)
:height (dm/get-prop selrect :height)))] :height (dm/get-prop selrect :height)))]
(-> shape (-> shape
(cr/assoc! :selrect selrect) (assoc :selrect selrect)
(cr/assoc! :points points)))) (assoc :points points))))
(defn- apply-transform-generic (defn- apply-transform-generic
@ -346,9 +342,7 @@
(let [points (-> (dm/get-prop shape :points) (let [points (-> (dm/get-prop shape :points)
(gco/transform-points transform-mtx)) (gco/transform-points transform-mtx))
;; NOTE: ensure we have a fresh shallow copy of shape shape (adjust-shape-flips shape points)
shape (cr/clone shape)
shape (adjust-shape-flips! shape points)
center (gco/points->center points) center (gco/points->center points)
selrect (calculate-selrect points center) selrect (calculate-selrect points center)
@ -367,17 +361,17 @@
shape (if (= type :path) shape (if (= type :path)
(update shape :content gpa/transform-content transform-mtx) (update shape :content gpa/transform-content transform-mtx)
(cr/assoc! shape (assoc shape
:x (dm/get-prop selrect :x) :x (dm/get-prop selrect :x)
:y (dm/get-prop selrect :y) :y (dm/get-prop selrect :y)
:width (dm/get-prop selrect :width) :width (dm/get-prop selrect :width)
:height (dm/get-prop selrect :height)))] :height (dm/get-prop selrect :height)))]
(-> shape (-> shape
(cr/assoc! :transform transform) (assoc :transform transform)
(cr/assoc! :transform-inverse inverse) (assoc :transform-inverse inverse)
(cr/assoc! :selrect selrect) (assoc :selrect selrect)
(cr/assoc! :points points) (assoc :points points)
(cr/assoc! :rotation rotation)))))) (assoc :rotation rotation))))))
(defn- apply-transform (defn- apply-transform
"Given a new set of points transformed, set up the rectangle so it keeps "Given a new set of points transformed, set up the rectangle so it keeps

View file

@ -1,77 +0,0 @@
;; This Source Code Form is subject to the terms of the Mozilla Public
;; License, v. 2.0. If a copy of the MPL was not distributed with this
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
;;
;; Copyright (c) KALEIDOS INC
(ns app.common.jsrt
"A JS runtime for the JVM"
(:refer-clojure :exclude [run!])
(:require
[clojure.java.io :as io])
(:import
org.apache.commons.pool2.ObjectPool
org.apache.commons.pool2.PooledObject
org.apache.commons.pool2.PooledObjectFactory
org.apache.commons.pool2.impl.DefaultPooledObject
org.apache.commons.pool2.impl.SoftReferenceObjectPool
org.graalvm.polyglot.Context
org.graalvm.polyglot.Source
org.graalvm.polyglot.Value))
(defn resource->source
[path]
(let [resource (io/resource path)]
(.. (Source/newBuilder "js" resource)
(build))))
(defn pool?
[o]
(instance? ObjectPool o))
(defn pool
[& {:keys [init]}]
(SoftReferenceObjectPool.
(reify PooledObjectFactory
(activateObject [_ _])
(destroyObject [_ o]
(let [context (.getObject ^PooledObject o)]
(.close ^java.lang.AutoCloseable context)))
(destroyObject [_ o _]
(let [context (.getObject ^PooledObject o)]
(.close ^java.lang.AutoCloseable context)))
(passivateObject [_ _])
(validateObject [_ _] true)
(makeObject [_]
(let [context (Context/create (into-array String ["js"]))]
(.initialize ^Context context "js")
(when (instance? Source init)
(.eval ^Context context ^Source init))
(DefaultPooledObject. context))))))
(defn run!
[^ObjectPool pool f]
(let [ctx (.borrowObject pool)]
(try
(f ctx)
(finally
(.returnObject pool ctx)))))
(defn eval!
[context data & {:keys [as] :or {as :string}}]
(let [result (.eval ^Context context "js" ^String data)]
(case as
(:string :str) (.asString ^Value result)
:long (.asLong ^Value result)
:int (.asInt ^Value result)
:float (.asFloat ^Value result)
:double (.asDouble ^Value result))))
(defn set!
[context attr value]
(let [bindings (.getBindings ^Context context "js")]
(.putMember ^Value bindings ^String attr ^String value)
context))

View file

@ -288,7 +288,8 @@
(when (ex/exception? cause) (when (ex/exception? cause)
(let [data (ex-data cause) (let [data (ex-data cause)
explain (ex/explain data)] explain (or (:explain data)
(ex/explain data))]
(when explain (when explain
(js/console.log "Explain:") (js/console.log "Explain:")
(js/console.log explain)) (js/console.log explain))

View file

@ -232,6 +232,7 @@
[(:parent-id first-shape)] [(:parent-id first-shape)]
(fn [shape objects] (fn [shape objects]
(-> shape (-> shape
(ctl/assign-cells objects)
(ctl/push-into-cell [(:id first-shape)] row column) (ctl/push-into-cell [(:id first-shape)] row column)
(ctl/assign-cells objects))) (ctl/assign-cells objects)))
{:with-objects? true}) {:with-objects? true})
@ -1831,7 +1832,7 @@
"Generate changes for remove all references to components in the shape, "Generate changes for remove all references to components in the shape,
with the given id and all its children, at the current page." with the given id and all its children, at the current page."
[changes id file page-id libraries] [changes id file page-id libraries]
(let [container (cfh/get-container file :page page-id)] (let [container (ctn/get-container file :page page-id)]
(-> changes (-> changes
(pcb/with-container container) (pcb/with-container container)
(pcb/with-objects (:objects container)) (pcb/with-objects (:objects container))
@ -1988,7 +1989,8 @@
(+ (:position guide) (- (:y new-frame) (:y frame)))) (+ (:position guide) (- (:y new-frame) (:y frame))))
guide {:id guide-id guide {:id guide-id
:frame-id new-id :frame-id new-id
:position position}] :position position
:axis (:axis guide)}]
(pcb/set-guide changes guide-id guide)) (pcb/set-guide changes guide-id guide))
changes)) changes))
changes changes

View file

@ -29,8 +29,7 @@
(defprotocol ILazySchema (defprotocol ILazySchema
(-validate [_ o]) (-validate [_ o])
(-explain [_ o]) (-explain [_ o]))
(-decode [_ o]))
(def default-options (def default-options
{:registry sr/default-registry}) {:registry sr/default-registry})
@ -194,11 +193,9 @@
(defn humanize-explain (defn humanize-explain
"Returns a string representation of the explain data structure" "Returns a string representation of the explain data structure"
[{:keys [schema errors value]} & {:keys [length level]}] [{:keys [errors value]} & {:keys [length level]}]
(let [errors (mapv #(update % :schema form) errors)] (let [errors (mapv #(update % :schema form) errors)]
(with-out-str (with-out-str
(println "Schema: ")
(println (pp/pprint-str (form schema) {:width 100 :level 15 :length 20}))
(println "Errors:") (println "Errors:")
(println (pp/pprint-str errors {:width 100 :level 15 :length 20})) (println (pp/pprint-str errors {:width 100 :level 15 :length 20}))
(println "Value:") (println "Value:")
@ -273,7 +270,18 @@
(fast-check! s type code hint value))) (fast-check! s type code hint value)))
(defn register! [type s] (defn register! [type s]
(let [s (if (map? s) (m/-simple-schema s) s)] (let [s (if (map? s)
(cond
(= :set (:type s))
(m/-collection-schema s)
(= :vec (:type s))
(m/-collection-schema s)
:else
(m/-simple-schema s))
s)]
(swap! sr/registry assoc type s) (swap! sr/registry assoc type s)
nil)) nil))
@ -328,9 +336,7 @@
(-validate [_ o] (-validate [_ o]
(@validator o)) (@validator o))
(-explain [_ o] (-explain [_ o]
(@explainer o)) (@explainer o)))))
(-decode [_ o]
(@decoder o)))))
;; --- BUILTIN SCHEMAS ;; --- BUILTIN SCHEMAS
@ -402,7 +408,7 @@
;; NOTE: this is general purpose set spec and should be used over the other ;; NOTE: this is general purpose set spec and should be used over the other
(register! ::set (def type:set
{:type :set {:type :set
:min 0 :min 0
:max 1 :max 1
@ -479,6 +485,7 @@
{:pred pred {:pred pred
:empty #{}
:type-properties :type-properties
{:title "set" {:title "set"
:description "Set of Strings" :description "Set of Strings"
@ -493,6 +500,7 @@
::oapi/items {:type "string"} ::oapi/items {:type "string"}
::oapi/unique-items true}}))}) ::oapi/unique-items true}}))})
(register! ::set type:set)
(register! ::vec (register! ::vec
{:type :vector {:type :vector
@ -686,8 +694,8 @@
pred) pred)
pred (if (some? max) pred (if (some? max)
(fn [v] (fn [v]
(and (>= max v) (and (pred v)
(pred v))) (>= max v)))
pred)] pred)]
{:pred pred {:pred pred
@ -724,8 +732,8 @@
pred) pred)
pred (if (some? max) pred (if (some? max)
(fn [v] (fn [v]
(and (>= max v) (and (pred v)
(pred v))) (>= max v)))
pred)] pred)]
{:pred pred {:pred pred
@ -754,8 +762,8 @@
pred) pred)
pred (if (some? max) pred (if (some? max)
(fn [v] (fn [v]
(and (>= max v) (and (pred v)
(pred v))) (>= max v)))
pred) pred)
gen (sg/one-of gen (sg/one-of

Some files were not shown because too many files have changed in this diff Show more