mirror of
https://github.com/penpot/penpot.git
synced 2026-01-04 12:28:52 -05:00
Compare commits
442 Commits
2.7.0
...
azazeln28-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c598ace7c4 | ||
|
|
e9bd44b819 | ||
|
|
2244bf6aa7 | ||
|
|
f4ef4a705c | ||
|
|
fe8d9fdd76 | ||
|
|
401fa823a0 | ||
|
|
3da3281a56 | ||
|
|
3131eec271 | ||
|
|
1909189ce0 | ||
|
|
0ec0917b6d | ||
|
|
0e4c535edc | ||
|
|
46f330fef3 | ||
|
|
f067c86b02 | ||
|
|
2b6a91819b | ||
|
|
1f652fe364 | ||
|
|
e70da78a77 | ||
|
|
27ab910a64 | ||
|
|
c1fa6be7c4 | ||
|
|
2398c1fc2b | ||
|
|
13859f90b9 | ||
|
|
e2724d180b | ||
|
|
c6bccafd98 | ||
|
|
1357ab34eb | ||
|
|
6e9ee3d310 | ||
|
|
5816695246 | ||
|
|
0d9160506b | ||
|
|
c3c6628bf1 | ||
|
|
8642ffba46 | ||
|
|
25372c3edf | ||
|
|
e13d1743da | ||
|
|
02d1a1f0b1 | ||
|
|
08aeb93710 | ||
|
|
04f0f77cd8 | ||
|
|
15adf1bd06 | ||
|
|
1080ffc6b8 | ||
|
|
1450672341 | ||
|
|
483e88d6a3 | ||
|
|
9fee16f4a9 | ||
|
|
89a09346a5 | ||
|
|
77fa235965 | ||
|
|
03e4ca12be | ||
|
|
229c9b8385 | ||
|
|
a4fab5c5bd | ||
|
|
d8913ab18b | ||
|
|
1d065e68f4 | ||
|
|
c9ceceb7e9 | ||
|
|
ad26efaa5d | ||
|
|
a3e17047a4 | ||
|
|
0552ef55cf | ||
|
|
d4c6063378 | ||
|
|
f23e460b2a | ||
|
|
35b29bb203 | ||
|
|
cd02905d1f | ||
|
|
87d917bc2e | ||
|
|
e8d1ea24d1 | ||
|
|
ad842872fb | ||
|
|
90744c182e | ||
|
|
78aaf28532 | ||
|
|
4e2f905a26 | ||
|
|
d2cd99ed44 | ||
|
|
885231e9a1 | ||
|
|
baabfe2de8 | ||
|
|
facb0227a0 | ||
|
|
f6fe41af96 | ||
|
|
f8489a521f | ||
|
|
cc76a42088 | ||
|
|
50cc70201d | ||
|
|
e88b3bae5a | ||
|
|
2b2939b4b7 | ||
|
|
6b25720155 | ||
|
|
96d099b71e | ||
|
|
fab9e842e8 | ||
|
|
ee022e225c | ||
|
|
1b3fcb0432 | ||
|
|
37f88067b9 | ||
|
|
2650eccd09 | ||
|
|
969b171510 | ||
|
|
4b22a0ebfb | ||
|
|
eafea7aec9 | ||
|
|
ce23fee292 | ||
|
|
f3d734357a | ||
|
|
d31f64796f | ||
|
|
3a27a5a542 | ||
|
|
2a04f78337 | ||
|
|
aeee05c90d | ||
|
|
6fc63f14a0 | ||
|
|
f33c1fb530 | ||
|
|
75170bb043 | ||
|
|
c0a98288d0 | ||
|
|
7d5739b663 | ||
|
|
fe60016124 | ||
|
|
5c58a04fc2 | ||
|
|
04a1f8475d | ||
|
|
3c05f09fd1 | ||
|
|
5eaea63ca8 | ||
|
|
bcfa9a82ea | ||
|
|
170d35dde2 | ||
|
|
2943f80db5 | ||
|
|
46b0e4f0e7 | ||
|
|
878952f7b5 | ||
|
|
f84ffc3562 | ||
|
|
e9edebbbb5 | ||
|
|
14afd58eac | ||
|
|
827d39a406 | ||
|
|
e4a1c373bb | ||
|
|
be13704934 | ||
|
|
88e77e3218 | ||
|
|
443cabe94e | ||
|
|
c7c8e91183 | ||
|
|
327db5a1a3 | ||
|
|
da10425800 | ||
|
|
3e4c80fa27 | ||
|
|
179a5654e7 | ||
|
|
bc38bd6a9c | ||
|
|
1c5d182a90 | ||
|
|
a85a42d367 | ||
|
|
1a705cee24 | ||
|
|
a771ca91ab | ||
|
|
4326e2c5a4 | ||
|
|
050ffa235c | ||
|
|
3dfccdaf9b | ||
|
|
e5bc369e56 | ||
|
|
fdd6502671 | ||
|
|
e698fd7d35 | ||
|
|
5e8929e504 | ||
|
|
3ee3ee2059 | ||
|
|
9eacde567d | ||
|
|
ac0b74e11a | ||
|
|
9638fd274f | ||
|
|
b5d96d312a | ||
|
|
7c072abe28 | ||
|
|
603e41bbfd | ||
|
|
b561ad033c | ||
|
|
7373056037 | ||
|
|
a9173f672d | ||
|
|
44829ff1ae | ||
|
|
927ee9e55e | ||
|
|
066b252522 | ||
|
|
556a68a78f | ||
|
|
f9bbf2d524 | ||
|
|
eaaca5629e | ||
|
|
0df2a12814 | ||
|
|
df27db1996 | ||
|
|
7fc0d15418 | ||
|
|
413fc6de16 | ||
|
|
d54a7d0401 | ||
|
|
ed53793d9d | ||
|
|
58b1cf6b0c | ||
|
|
f9c9e865b5 | ||
|
|
ebe321d9d3 | ||
|
|
0683fbd17c | ||
|
|
09a7ef3e45 | ||
|
|
172b70d8a7 | ||
|
|
3597e5bb54 | ||
|
|
949b6d1205 | ||
|
|
c0af77faf7 | ||
|
|
8f7a674000 | ||
|
|
e4f2dfaa11 | ||
|
|
ec29c4f5fe | ||
|
|
c21f5221bb | ||
|
|
42ef2f929a | ||
|
|
2b21401368 | ||
|
|
a5c8063b2c | ||
|
|
2ad2af2aea | ||
|
|
c2ce7c6cf6 | ||
|
|
47490db4be | ||
|
|
a2ac2bc6c6 | ||
|
|
e80ca7e332 | ||
|
|
e4644ff506 | ||
|
|
662b926b4b | ||
|
|
6319ed78f9 | ||
|
|
3abc8774f6 | ||
|
|
af1c90c252 | ||
|
|
8019ae7840 | ||
|
|
6bd615ff8b | ||
|
|
c4a793d306 | ||
|
|
631b3ac062 | ||
|
|
48995850fa | ||
|
|
a5c7a2c97b | ||
|
|
3a8285bc69 | ||
|
|
02e3cc089e | ||
|
|
17e19afcbd | ||
|
|
a2b52a6408 | ||
|
|
8cc4b69291 | ||
|
|
045ddf5829 | ||
|
|
1d0335aba6 | ||
|
|
5412d72236 | ||
|
|
896ee43212 | ||
|
|
5d42b9793b | ||
|
|
6cd2c712ab | ||
|
|
a575410a29 | ||
|
|
6b5703c1fe | ||
|
|
22c3d4d807 | ||
|
|
b0701f6bb4 | ||
|
|
9bad9b8e91 | ||
|
|
b6563f620b | ||
|
|
a63fa2944d | ||
|
|
fd89c9d82c | ||
|
|
a706907b26 | ||
|
|
a3b4fc9545 | ||
|
|
71bb2556f9 | ||
|
|
f36aa30525 | ||
|
|
8f7c63d6e2 | ||
|
|
965b22718f | ||
|
|
48a3d38d82 | ||
|
|
31f642ed25 | ||
|
|
9f414b6ecd | ||
|
|
334d7833d5 | ||
|
|
f7311cbb6b | ||
|
|
0d60e3d997 | ||
|
|
645c4a57db | ||
|
|
778de6adaf | ||
|
|
29d23577d2 | ||
|
|
1fea1e8f5b | ||
|
|
c8a211742a | ||
|
|
2da8747485 | ||
|
|
6803c78e80 | ||
|
|
d8daea72de | ||
|
|
36b162b4fa | ||
|
|
4c487834f0 | ||
|
|
dc7e53881a | ||
|
|
1a01c9ee4a | ||
|
|
4a27e8d1dd | ||
|
|
1bc97f9ad0 | ||
|
|
b2d6342422 | ||
|
|
ba1e16b55b | ||
|
|
ef95e3ecb0 | ||
|
|
55d21761fc | ||
|
|
0b4a367e9e | ||
|
|
8f2ca15ec0 | ||
|
|
21041eb925 | ||
|
|
53cfc29a1f | ||
|
|
96d44e0631 | ||
|
|
8afd217a80 | ||
|
|
330e49db56 | ||
|
|
aa39170d47 | ||
|
|
8fa7a69094 | ||
|
|
33d989feb2 | ||
|
|
e309a57223 | ||
|
|
051c2a7e99 | ||
|
|
887fa6b77b | ||
|
|
d9f98008f4 | ||
|
|
0cb6e0dee2 | ||
|
|
ad87e9842d | ||
|
|
e22a55334e | ||
|
|
f5e81debbc | ||
|
|
ddfd55261d | ||
|
|
300e24b403 | ||
|
|
a00e7c1061 | ||
|
|
968ea56197 | ||
|
|
2635873b9a | ||
|
|
676c4d2dfe | ||
|
|
5b8d1c1ca6 | ||
|
|
24e2948407 | ||
|
|
c569c71306 | ||
|
|
fef08dfa18 | ||
|
|
831422feaf | ||
|
|
d01e3085f4 | ||
|
|
d9ca82dc15 | ||
|
|
1e7127d98a | ||
|
|
002ae8b91a | ||
|
|
6831acb71d | ||
|
|
1f44d53f6b | ||
|
|
91fbe8f8ef | ||
|
|
69cc4fb4c2 | ||
|
|
c2b67d7c67 | ||
|
|
294ce7bb1b | ||
|
|
a558bfdb2f | ||
|
|
33c260c35b | ||
|
|
94312bb35c | ||
|
|
eb76d16b3b | ||
|
|
c0eaa75232 | ||
|
|
dbb9971482 | ||
|
|
0828994840 | ||
|
|
9c24d3a521 | ||
|
|
480e0887e3 | ||
|
|
e0e381bdfc | ||
|
|
69062f03ee | ||
|
|
eb04fa19e1 | ||
|
|
03b4fe3558 | ||
|
|
b349d08155 | ||
|
|
15e9d92094 | ||
|
|
a5660819de | ||
|
|
d277fefc87 | ||
|
|
1383010826 | ||
|
|
59982c9056 | ||
|
|
afcff84e38 | ||
|
|
8fa7fa8c4b | ||
|
|
23bde76192 | ||
|
|
ca7a80fb83 | ||
|
|
cf0d9a433d | ||
|
|
568af52ebc | ||
|
|
eddabc0d68 | ||
|
|
6b300d516b | ||
|
|
e271caa32b | ||
|
|
694a2084e2 | ||
|
|
fef19a3c80 | ||
|
|
3da8b945ca | ||
|
|
8f27b82edd | ||
|
|
8b529d308c | ||
|
|
ab01f0b274 | ||
|
|
b71b9edee7 | ||
|
|
bd514c0594 | ||
|
|
36e1ad287c | ||
|
|
92f5b5f92b | ||
|
|
0b7b6e2c23 | ||
|
|
46709fb02e | ||
|
|
61eb2f4a19 | ||
|
|
8f9298fac8 | ||
|
|
8bdec66927 | ||
|
|
66ee9edaf8 | ||
|
|
ffd7bc883d | ||
|
|
1bcfa4b8dc | ||
|
|
99e325acaf | ||
|
|
8badd1f2eb | ||
|
|
44bf276c49 | ||
|
|
0f3a4db71e | ||
|
|
751bed4117 | ||
|
|
ea095a98ba | ||
|
|
348a9c82bf | ||
|
|
e2918f4148 | ||
|
|
c45187eedd | ||
|
|
eeea5f2cc8 | ||
|
|
05b6aeef3e | ||
|
|
6323031b40 | ||
|
|
6ccb6cafaa | ||
|
|
be26985ca5 | ||
|
|
2aa2525d0e | ||
|
|
7cb2f307d8 | ||
|
|
f1a557c372 | ||
|
|
202337b135 | ||
|
|
4e3abcbd45 | ||
|
|
122e5a4b57 | ||
|
|
1981946480 | ||
|
|
7d327d23a2 | ||
|
|
500c27859b | ||
|
|
c6f68e6ed1 | ||
|
|
b48faf8fe0 | ||
|
|
fa24ced3a3 | ||
|
|
b9ea2425b9 | ||
|
|
1abaff9c52 | ||
|
|
6f2ccabaa2 | ||
|
|
1c77126fe6 | ||
|
|
7196be2a23 | ||
|
|
d509b840dc | ||
|
|
61c23877c1 | ||
|
|
0e61398d67 | ||
|
|
f12656463d | ||
|
|
ba9fc37226 | ||
|
|
60f754f172 | ||
|
|
3a22545158 | ||
|
|
1d0020f6e6 | ||
|
|
f3c3f3e2d8 | ||
|
|
9ba0ae5532 | ||
|
|
db73c2eea0 | ||
|
|
753823c0b3 | ||
|
|
44e8eacb8d | ||
|
|
33bcbd89f1 | ||
|
|
b0cbe3cec8 | ||
|
|
3ca76c9ef7 | ||
|
|
93199e1a70 | ||
|
|
93a601a1e7 | ||
|
|
3d864c4ff1 | ||
|
|
da2f519805 | ||
|
|
230e330eb2 | ||
|
|
4f6dffabb4 | ||
|
|
09c3490cae | ||
|
|
1fc0203c38 | ||
|
|
f545d7b3ea | ||
|
|
b242eb5b32 | ||
|
|
be9e3fa355 | ||
|
|
fac93e4ff8 | ||
|
|
8609db2182 | ||
|
|
ec73bd640c | ||
|
|
cba65972dd | ||
|
|
e62231cfed | ||
|
|
3249fb43c3 | ||
|
|
ee0ba15f9e | ||
|
|
784aecd1a1 | ||
|
|
173d6c23b0 | ||
|
|
abc1241402 | ||
|
|
f30441626e | ||
|
|
5ae125db94 | ||
|
|
093fa18839 | ||
|
|
81f18ad7f4 | ||
|
|
875e019d4f | ||
|
|
8e18a0880e | ||
|
|
86a498fc29 | ||
|
|
aae81b8a04 | ||
|
|
486f036a11 | ||
|
|
f8602810eb | ||
|
|
219ddfabaf | ||
|
|
88e5209856 | ||
|
|
9eefe13e8b | ||
|
|
7eab6a2f1d | ||
|
|
2306df5fb7 | ||
|
|
56ecacee21 | ||
|
|
5c74349de0 | ||
|
|
4a7b72dae1 | ||
|
|
23e17d7f30 | ||
|
|
37cf829188 | ||
|
|
f213ffabe1 | ||
|
|
a1921bb767 | ||
|
|
213c04bc8a | ||
|
|
916eb530a0 | ||
|
|
1f0644ea91 | ||
|
|
b20147255a | ||
|
|
38728eb342 | ||
|
|
18c7890f65 | ||
|
|
1c224609b9 | ||
|
|
4b81468c9c | ||
|
|
cffac2a56a | ||
|
|
05c0f8d69f | ||
|
|
5db5bc65de | ||
|
|
952ab032f9 | ||
|
|
2df6f2b8b1 | ||
|
|
58e0b26493 | ||
|
|
c75380e063 | ||
|
|
3d67c7930c | ||
|
|
b55ec38c35 | ||
|
|
02a1cfb457 | ||
|
|
b2ba38b5de | ||
|
|
68ce13368e | ||
|
|
a55db1d52b | ||
|
|
ee96c5599c | ||
|
|
21702c090d | ||
|
|
c4254106e8 | ||
|
|
981336ed5e | ||
|
|
3864ce6855 | ||
|
|
ec0183ce94 | ||
|
|
f587ed4ade | ||
|
|
bb5a103944 | ||
|
|
34b3520fb2 | ||
|
|
3217ba5a77 | ||
|
|
a91caded9e | ||
|
|
05ba1c3e64 | ||
|
|
77f025eb8d | ||
|
|
aacec1809b | ||
|
|
0435f560a4 | ||
|
|
766f034e5e | ||
|
|
8502d9d21b | ||
|
|
6c874b2bb7 |
@@ -1,5 +1,53 @@
|
||||
version: 2.1
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "fmt check"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "lint clj common"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:common
|
||||
|
||||
- run:
|
||||
name: "lint clj frontend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:frontend
|
||||
|
||||
- run:
|
||||
name: "lint clj backend"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:backend
|
||||
|
||||
- run:
|
||||
name: "lint clj exporter"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:exporter
|
||||
|
||||
- run:
|
||||
name: "lint clj library"
|
||||
working_directory: "."
|
||||
command: |
|
||||
yarn run lint:clj:library
|
||||
|
||||
test-common:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
@@ -17,15 +65,7 @@ jobs:
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
- v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "JVM tests"
|
||||
@@ -37,12 +77,16 @@ jobs:
|
||||
name: "NODE tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
key: v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "common/deps.edn"}}-{{ checksum "common/yarn.lock" }}
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
@@ -61,36 +105,36 @@ jobs:
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
name: "install dependencies"
|
||||
working_directory: "./frontend"
|
||||
# We install playwright here because the dependent tasks
|
||||
# uses the same cache as this task so we prepopulate it
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
name: "lint scss on frontend"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run fmt:js:check
|
||||
yarn run lint:scss
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "unit tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
- ~/.yarn
|
||||
- ~/.gitlibs
|
||||
- ~/.cache/ms-playwright
|
||||
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
test-components:
|
||||
docker:
|
||||
@@ -109,14 +153,14 @@ jobs:
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: Install dependencies
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn
|
||||
npx playwright install --with-deps
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
|
||||
- run:
|
||||
name: Build Storybook
|
||||
@@ -148,7 +192,7 @@ jobs:
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "integration tests"
|
||||
@@ -158,7 +202,7 @@ jobs:
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
yarn run playwright install --with-deps chromium
|
||||
yarn run playwright install chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
test-backend:
|
||||
@@ -185,21 +229,6 @@ jobs:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
- run:
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "tests"
|
||||
working_directory: "./backend"
|
||||
@@ -215,37 +244,9 @@ jobs:
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
- ~/.gitlibs
|
||||
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
test-exporter:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
|
||||
test-render-wasm:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
@@ -278,10 +279,27 @@ jobs:
|
||||
workflows:
|
||||
penpot:
|
||||
jobs:
|
||||
- test-frontend
|
||||
- test-components
|
||||
- test-integration
|
||||
- test-backend
|
||||
- test-common
|
||||
- test-exporter
|
||||
- lint
|
||||
- test-frontend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-components:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-integration:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-backend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-common:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-render-wasm
|
||||
|
||||
2
.github/workflows/commit-checker.yml
vendored
2
.github/workflows/commit-checker.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Check Commit Type
|
||||
uses: gsactions/commit-message-checker@v2
|
||||
with:
|
||||
pattern: '^:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):\s[A-Z].*[^.]$'
|
||||
pattern: '^(Merge|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):)\s[A-Z].*[^.]$'
|
||||
flags: 'gm'
|
||||
error: 'Commit should match CONTRIBUTING.md guideline'
|
||||
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -68,6 +68,8 @@
|
||||
/vendor/**/target
|
||||
/vendor/svgclean/bundle*.js
|
||||
/web
|
||||
/library/target/
|
||||
|
||||
clj-profiler/
|
||||
node_modules
|
||||
/test-results/
|
||||
|
||||
58
CHANGES.md
58
CHANGES.md
@@ -1,6 +1,62 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 2.7.0 (Unreleased)
|
||||
## 2.8.0 (Next / Unreleased)
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
**Penpot Library**
|
||||
|
||||
The initial prototype is completly reworked for provide a more consistent API
|
||||
and to have proper validation and params decoding. All the details can be found
|
||||
on [its own changelog](library/CHANGES.md)
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Optimize profile setup flow for better user experience [Taiga #10028](https://tree.taiga.io/project/penpot/us/10028)
|
||||
- Update base image for Docker Backend and Exporter to Ubuntu 24.04
|
||||
- Update base image for Docker Frontend to Nginx 1.28.0
|
||||
- Allow multi file token import [Github #27](https://github.com/tokens-studio/penpot/issues/27)
|
||||
- Create `input*` wrapper component, and `label*`, `input-field*` and `hint-message*` components [Taiga #10713](https://tree.taiga.io/project/penpot/us/10713)
|
||||
- Deselect layers (and path nodes) with Ctrl+Shift+Drag [Github #2509](https://github.com/penpot/penpot/issues/2509)
|
||||
- Copy to SVG from contextual menu [Github #838](https://github.com/penpot/penpot/issues/838)
|
||||
- Add styles for Inkeep Chat at workspace [Taiga #10708](https://tree.taiga.io/project/penpot/us/10708)
|
||||
- On components overrides, separate the content of the text from the rest of properties [Taiga #7434](https://tree.taiga.io/project/penpot/us/7434)
|
||||
- Add configuration for air gapped installations with Docker
|
||||
- Support system color scheme [Github #5030](https://github.com/penpot/penpot/issues/5030)
|
||||
- Persist ruler visibility across files and reloads [GitHub #4586](https://github.com/penpot/penpot/issues/4586)
|
||||
- Update google fonts (at 2025/05/19) [Taiga 10792](https://tree.taiga.io/project/penpot/us/10792)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
- Fix getCurrentUser for plugins api [Taiga #11057](https://tree.taiga.io/project/penpot/issue/11057)
|
||||
- Fix spacing / sizes of different elements in the measurements section of the design tab [Taiga #11076](https://tree.taiga.io/project/penpot/issue/11076)
|
||||
- Fix selection of short paths [Github #4472](https://github.com/penpot/penpot/issues/4472)
|
||||
- Fix element positioning on the right side to adjust to grid [#11073](https://tree.taiga.io/project/penpot/issue/11073)
|
||||
- Fix palette is over sidebar [#11160](https://tree.taiga.io/project/penpot/issue/11160)
|
||||
- Fix font size input not displaying "mixed" when multiple texts are selected [Taiga #11177](https://tree.taiga.io/project/penpot/issue/11177)
|
||||
|
||||
|
||||
## 2.7.2 (Unreleased)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Update plugins runtime [Github #6604](https://github.com/penpot/penpot/pull/6604)
|
||||
- Backport from develop a minor fix that enables import of files
|
||||
generated by penpot library [Github #6614](https://github.com/penpot/penpot/pull/6614)
|
||||
|
||||
|
||||
## 2.7.1
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix incorrect handling of strokes with images on importing files
|
||||
- Fix tokens disappearing after manual additions [Taiga #11063](https://tree.taiga.io/project/penpot/issue/11063)
|
||||
|
||||
|
||||
## 2.7.0
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
|
||||
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Penpot's Code of Conduct
|
||||
|
||||
Check it at: https://help.penpot.app/contributing-guide/coc/
|
||||
133
CONTRIBUTING.md
133
CONTRIBUTING.md
@@ -1,62 +1,59 @@
|
||||
# Contributing Guide #
|
||||
|
||||
Thank you for your interest in contributing to Penpot. This is a
|
||||
generic guide that details how to contribute to Penpot in a way that
|
||||
is efficient for everyone. If you want a specific documentation for
|
||||
different parts of the platform, please refer to `docs/` directory.
|
||||
|
||||
generic guide that details how to contribute to the project in a way that
|
||||
is efficient for everyone. If you are looking for specific documentation on
|
||||
different parts of the platform, please refer to the `docs/` directory,
|
||||
or the rendered version at the [Help Center](https://help.penpot.app/).
|
||||
|
||||
## Reporting Bugs ##
|
||||
|
||||
We are using [GitHub Issues](https://github.com/penpot/penpot/issues)
|
||||
for our public bugs. We keep a close eye on this and try to make it
|
||||
for our public bugs. We keep a close eye on them and try to make it
|
||||
clear when we have an internal fix in progress. Before filing a new
|
||||
task, try to make sure your problem doesn't already exist.
|
||||
|
||||
If you found a bug, please report it, as far as possible with:
|
||||
If you found a bug, please report it, as far as possible, with:
|
||||
|
||||
- a detailed explanation of steps to reproduce the error
|
||||
- a browser and the browser version used
|
||||
- a dev tools console exception stack trace (if it is available)
|
||||
- the browser and browser version used
|
||||
- a dev tools console exception stack trace (if available)
|
||||
|
||||
If you found a bug that you consider better discuss in private (for
|
||||
example: security bugs), consider first send an email to
|
||||
If you found a bug which you think is better to discuss in private (for
|
||||
example, security bugs), consider first sending an email to
|
||||
`support@penpot.app`.
|
||||
|
||||
**We don't have formal bug bounty program for security reports; this
|
||||
is an open source application and your contribution will be recognized
|
||||
**We don't have a formal bug bounty program for security reports; this
|
||||
is an open source application, and your contribution will be recognized
|
||||
in the changelog.**
|
||||
|
||||
|
||||
## Pull requests ##
|
||||
## Pull Requests ##
|
||||
|
||||
If you want propose a change or bug fix with the Pull-Request system
|
||||
firstly you should carefully read the **DCO** section and format your
|
||||
commits accordingly.
|
||||
If you want to propose a change or bug fix via a pull request (PR),
|
||||
you should first carefully read the section **Developer's Certificate of
|
||||
Origin**. You must also format your code and commits according to the
|
||||
instructions below.
|
||||
|
||||
If you intend to fix a bug it's fine to submit a pull request right
|
||||
away but we still recommend to file an issue detailing what you're
|
||||
If you intend to fix a bug, it's fine to submit a pull request right
|
||||
away, but we still recommend filing an issue detailing what you're
|
||||
fixing. This is helpful in case we don't accept that specific fix but
|
||||
want to keep track of the issue.
|
||||
|
||||
If you want to implement or start working in a new feature, please
|
||||
open a **question** / **discussion** issue for it. No pull-request
|
||||
will be accepted without previous chat about the changes,
|
||||
independently if it is a new feature, already planned feature or small
|
||||
quick win.
|
||||
If you want to implement or start working on a new feature, please
|
||||
open a **question*- / **discussion*- issue for it. No PR
|
||||
will be accepted without a prior discussion about the changes,
|
||||
whether it is a new feature, an already planned one, or a quick win.
|
||||
|
||||
If is going to be your first pull request, You can learn how from this
|
||||
free video series:
|
||||
|
||||
https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github
|
||||
|
||||
We will use the `easy fix` mark for tag for indicate issues that are
|
||||
easy for beginners.
|
||||
If it is your first PR, you can learn how to proceed from
|
||||
[this free video
|
||||
series](https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github)
|
||||
|
||||
We use the `easy fix` tag to indicate issues that are appropriate for beginners.
|
||||
|
||||
## Commit Guidelines ##
|
||||
|
||||
We have very precise rules over how our git commit messages can be formatted.
|
||||
We have very precise rules on how our git commit messages must be formatted.
|
||||
|
||||
The commit message format is:
|
||||
|
||||
@@ -71,34 +68,37 @@ The commit message format is:
|
||||
Where type is:
|
||||
|
||||
- :bug: `:bug:` a commit that fixes a bug
|
||||
- :sparkles: `:sparkles:` a commit that an improvement
|
||||
- :tada: `:tada:` a commit with new feature
|
||||
- :sparkles: `:sparkles:` a commit that adds an improvement
|
||||
- :tada: `:tada:` a commit with a new feature
|
||||
- :recycle: `:recycle:` a commit that introduces a refactor
|
||||
- :lipstick: `:lipstick:` a commit with cosmetic changes
|
||||
- :ambulance: `:ambulance:` a commit that fixes critical bug
|
||||
- :ambulance: `:ambulance:` a commit that fixes a critical bug
|
||||
- :books: `:books:` a commit that improves or adds documentation
|
||||
- :construction: `:construction:`: a wip commit
|
||||
- :construction: `:construction:` a WIP commit
|
||||
- :boom: `:boom:` a commit with breaking changes
|
||||
- :wrench: `:wrench:` a commit for config updates
|
||||
- :zap: `:zap:` a commit with performance improvements
|
||||
- :whale: `:whale:` a commit for docker related stuff
|
||||
- :paperclip: `:paperclip:` a commit with other not relevant changes
|
||||
- :arrow_up: `:arrow_up:` a commit with dependencies updates
|
||||
- :arrow_down: `:arrow_down:` a commit with dependencies downgrades
|
||||
- :whale: `:whale:` a commit for Docker-related stuff
|
||||
- :paperclip: `:paperclip:` a commit with other non-relevant changes
|
||||
- :arrow_up: `:arrow_up:` a commit with dependency updates
|
||||
- :arrow_down: `:arrow_down:` a commit with dependency downgrades
|
||||
- :fire: `:fire:` a commit that removes files or code
|
||||
- :globe_with_meridians: `:globe_with_meridians:` a commit that adds or updates
|
||||
translations
|
||||
|
||||
More info:
|
||||
|
||||
- https://gist.github.com/parmentf/035de27d6ed1dce0b36a
|
||||
- https://gist.github.com/rxaviers/7360908
|
||||
|
||||
Each commit should have:
|
||||
|
||||
- A concise subject using imperative mood.
|
||||
- The subject should have capitalized the first letter, without period
|
||||
at the end and no larger than 65 characters.
|
||||
- A concise subject using the imperative mood.
|
||||
- The subject should capitalize the first letter, omit the period
|
||||
at the end, and be no longer than 65 characters.
|
||||
- A blank line between the subject line and the body.
|
||||
- An entry on the CHANGES.md file if applicable, referencing the
|
||||
github or taiga issue/user-story using the these same rules.
|
||||
- An entry in the CHANGES.md file if applicable, referencing the
|
||||
GitHub or Taiga issue/user story using these same rules.
|
||||
|
||||
Examples of good commit messages:
|
||||
|
||||
@@ -111,8 +111,30 @@ Examples of good commit messages:
|
||||
- `:ambulance: Fix critical bug on user registration process`
|
||||
- `:tada: Add new approach for user registration`
|
||||
|
||||
## Formatting and Linting ##
|
||||
|
||||
## Code of conduct ##
|
||||
You will want to make sure your code is formatted and linted before submitting
|
||||
a PR. We use [cljfmt](https://github.com/weavejester/cljfmt) and
|
||||
[clj-kondo](https://github.com/clj-kondo/clj-kondo) for this. After installing
|
||||
them on your system, you can run them with:
|
||||
|
||||
```bash
|
||||
# Check formatting
|
||||
yarn fmt:clj:check
|
||||
|
||||
# Check and fix formatting
|
||||
yarn fmt:clj
|
||||
|
||||
# Run the linter
|
||||
yarn lint:clj
|
||||
```
|
||||
|
||||
There are more choices in `package.json`.
|
||||
|
||||
Ideally, you should run these commands as git pre-commit hooks. A convenient way
|
||||
of defining them is to use [Husky](https://typicode.github.io/husky/#/).
|
||||
|
||||
## Code of Conduct ##
|
||||
|
||||
As contributors and maintainers of this project, we pledge to respect
|
||||
all people who contribute through reporting issues, posting feature
|
||||
@@ -132,11 +154,11 @@ unprofessional conduct.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit,
|
||||
or reject comments, commits, code, wiki edits, issues, and other
|
||||
contributions that are not aligned to this Code of Conduct. Project
|
||||
contributions that are not aligned with this Code of Conduct. Project
|
||||
maintainers who do not follow the Code of Conduct may be removed from
|
||||
the project team.
|
||||
|
||||
This code of conduct applies both within project spaces and in public
|
||||
This Code of Conduct applies both within project spaces and in public
|
||||
spaces when an individual is representing the project or its
|
||||
community.
|
||||
|
||||
@@ -145,12 +167,11 @@ may be reported by opening an issue or contacting one or more of the
|
||||
project maintainers.
|
||||
|
||||
This Code of Conduct is adapted from the Contributor Covenant, version
|
||||
1.1.0, available from http://contributor-covenant.org/version/1/1/0/
|
||||
1.1.0, available from [http://contributor-covenant.org/version/1/1/0/](http://contributor-covenant.org/version/1/1/0/)
|
||||
|
||||
## Developer's Certificate of Origin (DCO)
|
||||
|
||||
## Developer's Certificate of Origin (DCO) ##
|
||||
|
||||
By submitting code you are agree and can certify the below:
|
||||
By submitting code you agree to and can certify the following:
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
@@ -178,13 +199,15 @@ By submitting code you are agree and can certify the below:
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
Then, all your code patches (**documentation are excluded**) should
|
||||
Then, all your code patches (**documentation is excluded**) should
|
||||
contain a sign-off at the end of the patch/commit description body. It
|
||||
can be automatically added on adding `-s` parameter to `git commit`.
|
||||
can be automatically added by adding the `-s` parameter to `git commit`.
|
||||
|
||||
This is an example of the aspect of the line:
|
||||
This is an example of what the line should look like:
|
||||
|
||||
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
|
||||
```
|
||||
Signed-off-by: Andrey Antukh <niwi@niwi.nz>
|
||||
```
|
||||
|
||||
Please, use your real name (sorry, no pseudonyms or anonymous
|
||||
contributions are allowed).
|
||||
|
||||
11
README.md
11
README.md
@@ -93,10 +93,9 @@ With Penpot’s standardized [design tokens](https://penpot.dev/collaboration/de
|
||||
|
||||
## Getting started ##
|
||||
|
||||
### Install with Elestio ###
|
||||
Penpot is the only design & prototype platform that is deployment agnostic. You can use it or deploy it anywhere.
|
||||
Penpot is the only design & prototype platform that is deployment agnostic. You can use it in our [SAAS](https://design.penpot.app) or deploy it anywhere.
|
||||
|
||||
Learn how to install it with Elestio and Docker, or other options on [our website](https://penpot.app/self-host).
|
||||
Learn how to install it with Docker, Kubernetes, Elestio or other options on [our website](https://penpot.app/self-host).
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
@@ -128,6 +127,12 @@ You will find the following categories:
|
||||
</p>
|
||||
<br />
|
||||
|
||||
### Code of Conduct ###
|
||||
|
||||
Anyone who contributes to Penpot, whether through code, in the community, or at an event, must adhere to the
|
||||
[code of conduct](https://help.penpot.app/contributing-guide/coc/) and foster a positive and safe environment.
|
||||
|
||||
|
||||
## Contributing ##
|
||||
|
||||
Any contribution will make a difference to improve Penpot. How can you get involved?
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
org.clojure/clojure {:mvn/version "1.12.0"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.6-9"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-3"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.5.2.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.6.0.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
@@ -27,15 +27,15 @@
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc
|
||||
{:mvn/version "1.3.994"}
|
||||
metosin/reitit-core {:mvn/version "0.7.2"}
|
||||
{:mvn/version "1.3.1002"}
|
||||
metosin/reitit-core {:mvn/version "0.8.0"}
|
||||
nrepl/nrepl {:mvn/version "1.3.1"}
|
||||
cider/cider-nrepl {:mvn/version "0.52.0"}
|
||||
cider/cider-nrepl {:mvn/version "0.55.7"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.7.5"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.48.0.0"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "6.2.1"}
|
||||
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.0"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.18.3"}
|
||||
org.jsoup/jsoup {:mvn/version "1.20.1"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
@@ -55,11 +55,11 @@
|
||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||
|
||||
dawran6/emoji {:mvn/version "0.1.5"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.2"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.3"}
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.28.26"}}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.31.48"}}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
:aliases
|
||||
@@ -74,7 +74,7 @@
|
||||
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.6" :git/sha "52cf7d6"}}
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.9" :git/sha "e405aac"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"license": "MPL-2.0",
|
||||
"author": "Kaleidos INC",
|
||||
"private": true,
|
||||
"packageManager": "yarn@4.8.1+sha512.bc946f2a022d7a1a38adfc15b36a66a3807a67629789496c3714dd1703d2e6c6b1c69ff9ec3b43141ac7a1dd853b7685638eb0074300386a59c18df351ef8ff6",
|
||||
"packageManager": "yarn@4.9.1+sha512.f95ce356460e05be48d66401c1ae64ef84d163dd689964962c6888a9810865e39097a5e9de748876c2e0bf89b232d583c33982773e9903ae7a76257270986538",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/penpot/penpot"
|
||||
|
||||
@@ -35,40 +35,35 @@ def get_prepl_conninfo():
|
||||
|
||||
return host, port
|
||||
|
||||
def send_eval(expr):
|
||||
def send(data):
|
||||
host, port = get_prepl_conninfo()
|
||||
with socket.create_connection((host, port)) as s:
|
||||
f = s.makefile(mode="rw")
|
||||
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.connect((host, port))
|
||||
s.send(expr.encode("utf-8"))
|
||||
s.send(b":repl/quit\n\n")
|
||||
json.dump(data, f)
|
||||
f.write("\n")
|
||||
f.flush()
|
||||
|
||||
with s.makefile() as f:
|
||||
while True:
|
||||
line = f.readline()
|
||||
result = json.loads(line)
|
||||
tag = result.get("tag", None)
|
||||
if tag == "ret":
|
||||
return result.get("val", None), result.get("exception", None)
|
||||
elif tag == "out":
|
||||
print(result.get("val"), end="")
|
||||
else:
|
||||
raise RuntimeError("unexpected response from PREPL")
|
||||
while True:
|
||||
line = f.readline()
|
||||
result = json.loads(line)
|
||||
tag = result.get("tag", None)
|
||||
|
||||
def encode(val):
|
||||
return json.dumps(json.dumps(val))
|
||||
if tag == "ret":
|
||||
return result.get("val", None), result.get("err", None)
|
||||
elif tag == "out":
|
||||
print(result.get("val"), end="")
|
||||
else:
|
||||
raise RuntimeError("unexpected response from PREPL")
|
||||
|
||||
def print_error(res):
|
||||
for error in res["via"]:
|
||||
print("ERR:", error["message"])
|
||||
break
|
||||
def print_error(error):
|
||||
print("ERR:", error["hint"])
|
||||
|
||||
def run_cmd(params):
|
||||
try:
|
||||
expr = "(app.srepl.cli/exec {})".format(encode(params))
|
||||
res, failed = send_eval(expr)
|
||||
if failed:
|
||||
print_error(res)
|
||||
res, err = send(params)
|
||||
if err:
|
||||
print_error(err)
|
||||
sys.exit(-1)
|
||||
|
||||
return res
|
||||
@@ -96,7 +91,7 @@ def update_profile(email, fullname, password, is_active):
|
||||
"email": email,
|
||||
"fullname": fullname,
|
||||
"password": password,
|
||||
"is_active": is_active
|
||||
"isActive": is_active
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,7 +133,7 @@ def derive_password(password):
|
||||
params = {
|
||||
"cmd": "derive-password",
|
||||
"params": {
|
||||
"password": password,
|
||||
"password": password
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,8 +31,8 @@ export PENPOT_FLAGS="\
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptons \
|
||||
enable-subscriptons-old";
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
@@ -24,8 +24,8 @@ export PENPOT_FLAGS="\
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptons \
|
||||
enable-subscriptons-old ";
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old ";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
for recently imported shapes."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -55,9 +56,52 @@
|
||||
(fn [shadows]
|
||||
(into [] xform shadows)))))
|
||||
|
||||
(defn- fix-root-shape
|
||||
"Ensure all root objects are well formed shapes"
|
||||
[shape]
|
||||
(if (= (:id shape) uuid/zero)
|
||||
(-> shape
|
||||
(assoc :parent-id uuid/zero)
|
||||
(assoc :frame-id uuid/zero)
|
||||
;; We explicitly dissoc them and let the shape-setup
|
||||
;; to regenerate it with valid values.
|
||||
(dissoc :selrect)
|
||||
(dissoc :points)
|
||||
(cts/setup-shape))
|
||||
shape))
|
||||
|
||||
(defn- fix-legacy-flex-dir
|
||||
"This operation is only relevant to old data and it is fixed just
|
||||
for convenience."
|
||||
[shape]
|
||||
(d/update-when shape :layout-flex-dir
|
||||
(fn [dir]
|
||||
(case dir
|
||||
:reverse-row :row-reverse
|
||||
:reverse-column :column-reverse
|
||||
dir))))
|
||||
|
||||
(defn clean-shape-post-decode
|
||||
"A shape procesor that expected to be executed after schema decoding
|
||||
process but before validation."
|
||||
[shape]
|
||||
(-> shape
|
||||
(fix-shape-shadow-color)))
|
||||
(fix-shape-shadow-color)
|
||||
(fix-root-shape)
|
||||
(fix-legacy-flex-dir)))
|
||||
|
||||
(defn- fix-container
|
||||
[container]
|
||||
(-> container
|
||||
;; Remove possible `nil` keys on objects
|
||||
(d/update-when :objects dissoc nil)
|
||||
(d/update-when :objects d/update-vals clean-shape-post-decode)))
|
||||
|
||||
(defn clean-file
|
||||
[file & {:as _opts}]
|
||||
(update file :data
|
||||
(fn [data]
|
||||
(-> data
|
||||
(d/update-when :pages-index d/update-vals fix-container)
|
||||
(d/update-when :components d/update-vals fix-container)
|
||||
(d/without-nils)))))
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
(* 1024 1024 100))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
(declare get-resolved-file-libraries)
|
||||
|
||||
(def file-attrs
|
||||
#{:id
|
||||
@@ -143,11 +144,13 @@
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn decode-row
|
||||
"A generic decode row helper"
|
||||
[{:keys [data features] :as row}]
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
data (assoc :data (blob/decode data))))
|
||||
[{:keys [data changes features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
|
||||
|
||||
(defn decode-file
|
||||
"A general purpose file decoding function that resolves all external
|
||||
@@ -156,7 +159,8 @@
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [file (->> file
|
||||
(feat.fmigr/resolve-applied-migrations cfg)
|
||||
(feat.fdata/resolve-file-data cfg))]
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
libs (delay (get-resolved-file-libraries cfg file))]
|
||||
|
||||
(-> file
|
||||
(update :features db/decode-pgarray #{})
|
||||
@@ -164,7 +168,7 @@
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(fmg/migrate-file)))))
|
||||
(fmg/migrate-file libs)))))
|
||||
|
||||
(defn get-file
|
||||
"Get file, resolve all features and apply migrations.
|
||||
@@ -418,28 +422,35 @@
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||
|
||||
(defn process-file
|
||||
[{:keys [id] :as file}]
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(assoc :id id)
|
||||
(dissoc :recent-colors))))
|
||||
(fmg/migrate-file)
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(update :colors relink-colors)
|
||||
(d/without-nils))))))
|
||||
[cfg {:keys [id] :as file}]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))]
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(assoc :id id)
|
||||
(dissoc :recent-colors))))
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(update :colors relink-colors)
|
||||
(d/without-nils))))
|
||||
(fmg/migrate-file libs)
|
||||
|
||||
;; NOTE: this is necessary because when we just creating a new
|
||||
;; file from imported artifact or cloned file there are no
|
||||
;; migrations registered on the database, so we need to persist
|
||||
;; all of them, not only the applied
|
||||
(vary-meta dissoc ::fmg/migrated))))
|
||||
|
||||
(defn encode-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [file (if (contains? (:features file) "fdata/objects-map")
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id features] :as file}]
|
||||
(let [file (if (contains? features "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
file (if (contains? features "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
@@ -522,3 +533,49 @@
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
(insert-file! cfg file opts)))
|
||||
|
||||
|
||||
(def ^:private sql:get-file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
UNION
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT l.id,
|
||||
l.features,
|
||||
l.project_id,
|
||||
p.team_id,
|
||||
l.created_at,
|
||||
l.modified_at,
|
||||
l.deleted_at,
|
||||
l.name,
|
||||
l.revn,
|
||||
l.vern,
|
||||
l.synced_at,
|
||||
l.is_shared
|
||||
FROM libs AS l
|
||||
INNER JOIN project AS p ON (p.id = l.project_id)
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn get-file-libraries
|
||||
[conn file-id]
|
||||
(into []
|
||||
(comp
|
||||
;; FIXME: :is-indirect set to false to all rows looks
|
||||
;; completly useless
|
||||
(map #(assoc % :is-indirect false))
|
||||
(map decode-row))
|
||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
||||
|
||||
(defn get-resolved-file-libraries
|
||||
"A helper for preload file libraries"
|
||||
[{:keys [::db/conn] :as cfg} file]
|
||||
(->> (get-file-libraries conn (:id file))
|
||||
(into [file] (map #(get-file cfg (:id %))))
|
||||
(d/index-by :id)))
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
@@ -28,13 +27,11 @@
|
||||
|
||||
(defn apply-pending-migrations!
|
||||
"Apply alredy registered pending migrations to files"
|
||||
[cfg]
|
||||
(doseq [[feature file-id] (-> bfc/*state* deref :pending-to-migrate)]
|
||||
[_cfg]
|
||||
(doseq [[feature _file-id] (-> bfc/*state* deref :pending-to-migrate)]
|
||||
(case feature
|
||||
"components/v2"
|
||||
(feat.compv2/migrate-file! cfg file-id
|
||||
:validate? (::validate cfg true)
|
||||
:skip-on-graphic-error? true)
|
||||
nil
|
||||
|
||||
"fdata/shape-data-type"
|
||||
nil
|
||||
|
||||
@@ -551,8 +551,8 @@
|
||||
(cond-> (and (= idx 0) (some? name))
|
||||
(assoc :name name))
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :thumbnails)
|
||||
(bfc/process-file))]
|
||||
(dissoc :thumbnails))
|
||||
file (bfc/process-file system file)]
|
||||
|
||||
;; All features that are enabled and requires explicit migration are
|
||||
;; added to the state for a posterior migration step.
|
||||
|
||||
@@ -281,8 +281,8 @@
|
||||
|
||||
(let [file (-> (read-obj cfg :file file-id)
|
||||
(update :id bfc/lookup-index)
|
||||
(update :project-id bfc/lookup-index)
|
||||
(bfc/process-file))]
|
||||
(update :project-id bfc/lookup-index))
|
||||
file (bfc/process-file cfg file)]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
[app.common.files.migrations :as-alias fmg]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.media :as cmedia]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as cth]
|
||||
[app.common.types.color :as ctcl]
|
||||
@@ -73,7 +74,7 @@
|
||||
[:size ::sm/int]
|
||||
[:content-type :string]
|
||||
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
|
||||
[:hash :string]])
|
||||
[:hash {:optional true} :string]])
|
||||
|
||||
(def ^:private schema:file-thumbnail
|
||||
[:map {:title "FileThumbnail"}
|
||||
@@ -88,13 +89,19 @@
|
||||
ctf/schema:file
|
||||
[:map [:options {:optional true} ctf/schema:options]]])
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(defn- default-now
|
||||
[o]
|
||||
(or o (dt/now)))
|
||||
|
||||
;; --- ENCODERS
|
||||
|
||||
(def encode-file
|
||||
(sm/encoder schema:file sm/json-transformer))
|
||||
|
||||
(def encode-page
|
||||
(sm/encoder ::ctp/page sm/json-transformer))
|
||||
(sm/encoder ctp/schema:page sm/json-transformer))
|
||||
|
||||
(def encode-shape
|
||||
(sm/encoder ::cts/shape sm/json-transformer))
|
||||
@@ -129,7 +136,7 @@
|
||||
(sm/decoder schema:manifest sm/json-transformer))
|
||||
|
||||
(def decode-media
|
||||
(sm/decoder ::ctf/media sm/json-transformer))
|
||||
(sm/decoder ctf/schema:media sm/json-transformer))
|
||||
|
||||
(def decode-component
|
||||
(sm/decoder ::ctc/component sm/json-transformer))
|
||||
@@ -229,27 +236,13 @@
|
||||
:always
|
||||
(bfc/clean-file-features))))))
|
||||
|
||||
(defn- resolve-extension
|
||||
[mtype]
|
||||
(case mtype
|
||||
"image/png" ".png"
|
||||
"image/jpeg" ".jpg"
|
||||
"image/gif" ".gif"
|
||||
"image/svg+xml" ".svg"
|
||||
"image/webp" ".webp"
|
||||
"font/woff" ".woff"
|
||||
"font/woff2" ".woff2"
|
||||
"font/ttf" ".ttf"
|
||||
"font/otf" ".otf"
|
||||
"application/octet-stream" ".bin"))
|
||||
|
||||
(defn- export-storage-objects
|
||||
[{:keys [::output] :as cfg}]
|
||||
(let [storage (sto/resolve cfg)]
|
||||
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
|
||||
(let [sobject (sto/get-object storage id)
|
||||
smeta (meta sobject)
|
||||
ext (resolve-extension (:content-type smeta))
|
||||
ext (cmedia/mtype->extension (:content-type smeta))
|
||||
path (str "objects/" id ".json")
|
||||
params (-> (meta sobject)
|
||||
(assoc :id (:id sobject))
|
||||
@@ -574,7 +567,13 @@
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-media)
|
||||
(validate-media))
|
||||
object (assoc object :file-id file-id)]
|
||||
object (-> object
|
||||
(assoc :file-id file-id)
|
||||
(update :created-at default-now)
|
||||
;; FIXME: this is set default to true for
|
||||
;; setting a value, this prop is no longer
|
||||
;; relevant;
|
||||
(assoc :is-local true))]
|
||||
(if (= id (:id object))
|
||||
(conj result object)
|
||||
result)))
|
||||
@@ -755,15 +754,9 @@
|
||||
(assoc :data data)
|
||||
(assoc :name file-name)
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :options)
|
||||
(bfc/process-file)
|
||||
(dissoc :options))
|
||||
|
||||
;; NOTE: this is necessary because when we just
|
||||
;; creating a new file from imported artifact,
|
||||
;; there are no migrations registered on the
|
||||
;; database, so we need to persist all of them, not
|
||||
;; only the applied
|
||||
(vary-meta dissoc ::fmg/migrated))]
|
||||
file (bfc/process-file cfg file)]
|
||||
|
||||
(bfm/register-pending-migrations! cfg file)
|
||||
(bfc/save-file! cfg file ::db/return-keys false)
|
||||
@@ -807,7 +800,7 @@
|
||||
:expected-id (str id)
|
||||
:found-id (str (:id object))))
|
||||
|
||||
(let [ext (resolve-extension (:content-type object))
|
||||
(let [ext (cmedia/mtype->extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
@@ -821,13 +814,14 @@
|
||||
:expected-size (:size object)
|
||||
:found-size (sto/get-size content)))
|
||||
|
||||
(when (not= (:hash object) (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content)))
|
||||
(when-let [hash (get object :hash)]
|
||||
(when (not= hash (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content))))
|
||||
|
||||
(let [params (-> object
|
||||
(dissoc :id :size)
|
||||
|
||||
@@ -42,6 +42,8 @@
|
||||
org.postgresql.util.PGInterval
|
||||
org.postgresql.util.PGobject))
|
||||
|
||||
(def ^:dynamic *conn* nil)
|
||||
|
||||
(declare open)
|
||||
(declare create-pool)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,10 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.types.path :as path]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.storage :as sto]
|
||||
@@ -30,7 +33,7 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-objects-map
|
||||
[file]
|
||||
[file & _opts]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(if (and (pmap/pointer-map? page)
|
||||
@@ -136,10 +139,56 @@
|
||||
|
||||
(defn enable-pointer-map
|
||||
"Enable the fdata/pointer-map feature on the file."
|
||||
[file]
|
||||
[file & _opts]
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index d/update-vals pmap/wrap)
|
||||
(d/update-when :components pmap/wrap))))
|
||||
(update :features conj "fdata/pointer-map")))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PATH-DATA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-path-data
|
||||
"Enable the fdata/path-data feature on the file."
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content path/content)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features conj "fdata/path-data"))))
|
||||
|
||||
(defn disable-path-data
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content vec)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(when-let [conn db/*conn*]
|
||||
(db/delete! conn :file-migration {:file-id (:id file)
|
||||
:name "0003-convert-path-content"}))
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features disj "fdata/path-data")
|
||||
(update :migrations disj "0003-convert-path-content")
|
||||
(vary-meta update ::fmg/migrated disj "0003-convert-path-content"))))
|
||||
|
||||
31
backend/src/app/features/logical_deletion.clj
Normal file
31
backend/src/app/features/logical_deletion.clj
Normal file
@@ -0,0 +1,31 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.features.logical-deletion
|
||||
"A code related to handle logical deletion mechanism"
|
||||
(:require
|
||||
[app.config :as cf]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(defn get-deletion-delay
|
||||
"Calculate the next deleted-at for a resource (file, team, etc) in function
|
||||
of team settings"
|
||||
[team]
|
||||
(if-let [subscription (get team :subscription)]
|
||||
(cond
|
||||
(and (= (:type subscription) "unlimited")
|
||||
(= (:status subscription) "active"))
|
||||
(dt/duration {:days 30})
|
||||
|
||||
(and (= (:type subscription) "enterprise")
|
||||
(= (:status subscription) "active"))
|
||||
(dt/duration {:days 90})
|
||||
|
||||
:else
|
||||
(cf/get-deletion-delay))
|
||||
|
||||
(cf/get-deletion-delay)))
|
||||
|
||||
@@ -8,12 +8,11 @@
|
||||
"Media & Font postprocessing."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.media :as cm]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.openapi :as-alias oapi]
|
||||
[app.common.spec :as us]
|
||||
[app.common.svg :as csvg]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.storage :as-alias sto]
|
||||
@@ -22,39 +21,38 @@
|
||||
[buddy.core.bytes :as bb]
|
||||
[buddy.core.codecs :as bc]
|
||||
[clojure.java.shell :as sh]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.xml :as xml]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
clojure.lang.XMLHandler
|
||||
java.io.InputStream
|
||||
javax.xml.XMLConstants
|
||||
javax.xml.parsers.SAXParserFactory
|
||||
org.apache.commons.io.IOUtils
|
||||
org.im4java.core.ConvertCmd
|
||||
org.im4java.core.IMOperation
|
||||
org.im4java.core.Info))
|
||||
|
||||
(s/def ::path fs/path?)
|
||||
(s/def ::filename string?)
|
||||
(s/def ::size integer?)
|
||||
(s/def ::headers (s/map-of string? string?))
|
||||
(s/def ::mtype string?)
|
||||
(def schema:upload
|
||||
(sm/register!
|
||||
^{::sm/type ::upload}
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]]))
|
||||
|
||||
(s/def ::upload
|
||||
(s/keys :req-un [::filename ::size ::path]
|
||||
:opt-un [::mtype ::headers]))
|
||||
(def ^:private schema:input
|
||||
[:map {:title "Input"}
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} ::sm/text]])
|
||||
|
||||
;; A subset of fields from the ::upload spec
|
||||
(s/def ::input
|
||||
(s/keys :req-un [::path]
|
||||
:opt-un [::mtype]))
|
||||
|
||||
(sm/register!
|
||||
^{::sm/type ::upload}
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]])
|
||||
(def ^:private check-input
|
||||
(sm/check-fn schema:input))
|
||||
|
||||
(defn validate-media-type!
|
||||
([upload] (validate-media-type! upload cm/valid-image-types))
|
||||
@@ -97,17 +95,44 @@
|
||||
(catch Throwable e
|
||||
(process-error e))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SVG PARSING
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- secure-parser-factory
|
||||
[^InputStream input ^XMLHandler handler]
|
||||
(.. (doto (SAXParserFactory/newInstance)
|
||||
(.setFeature XMLConstants/FEATURE_SECURE_PROCESSING true)
|
||||
(.setFeature "http://apache.org/xml/features/disallow-doctype-decl" true))
|
||||
(newSAXParser)
|
||||
(parse input handler)))
|
||||
|
||||
(defn- strip-doctype
|
||||
[data]
|
||||
(cond-> data
|
||||
(str/includes? data "<!DOCTYPE")
|
||||
(str/replace #"<\!DOCTYPE[^>]*>" "")))
|
||||
|
||||
(defn- parse-svg
|
||||
[text]
|
||||
(let [text (strip-doctype text)]
|
||||
(dm/with-open [istream (IOUtils/toInputStream text "UTF-8")]
|
||||
(xml/parse istream secure-parser-factory))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMAGE THUMBNAILS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::width integer?)
|
||||
(s/def ::height integer?)
|
||||
(s/def ::format #{:jpeg :webp :png})
|
||||
(s/def ::quality #(< 0 % 101))
|
||||
(def ^:private schema:thumbnail-params
|
||||
[:map {:title "ThumbnailParams"}
|
||||
[:input schema:input]
|
||||
[:format [:enum :jpeg :webp :png]]
|
||||
[:quality [:int {:min 1 :max 100}]]
|
||||
[:width :int]
|
||||
[:height :int]])
|
||||
|
||||
(s/def ::thumbnail-params
|
||||
(s/keys :req-un [::input ::format ::width ::height]))
|
||||
(def ^:private check-thumbnail-params
|
||||
(sm/check-fn schema:thumbnail-params))
|
||||
|
||||
;; Related info on how thumbnails generation
|
||||
;; http://www.imagemagick.org/Usage/thumbnails/
|
||||
@@ -129,30 +154,38 @@
|
||||
:data tmp)))
|
||||
|
||||
(defmethod process :generic-thumbnail
|
||||
[{:keys [quality width height] :as params}]
|
||||
(us/assert ::thumbnail-params params)
|
||||
(let [op (doto (IMOperation.)
|
||||
(.addImage)
|
||||
(.autoOrient)
|
||||
(.strip)
|
||||
(.thumbnail ^Integer (int width) ^Integer (int height) ">")
|
||||
(.quality (double quality))
|
||||
(.addImage))]
|
||||
(generic-process (assoc params :operation op))))
|
||||
[params]
|
||||
(let [{:keys [quality width height] :as params}
|
||||
(check-thumbnail-params params)
|
||||
|
||||
operation
|
||||
(doto (IMOperation.)
|
||||
(.addImage)
|
||||
(.autoOrient)
|
||||
(.strip)
|
||||
(.thumbnail ^Integer (int width) ^Integer (int height) ">")
|
||||
(.quality (double quality))
|
||||
(.addImage))]
|
||||
|
||||
(generic-process (assoc params :operation operation))))
|
||||
|
||||
(defmethod process :profile-thumbnail
|
||||
[{:keys [quality width height] :as params}]
|
||||
(us/assert ::thumbnail-params params)
|
||||
(let [op (doto (IMOperation.)
|
||||
(.addImage)
|
||||
(.autoOrient)
|
||||
(.strip)
|
||||
(.thumbnail ^Integer (int width) ^Integer (int height) "^")
|
||||
(.gravity "center")
|
||||
(.extent (int width) (int height))
|
||||
(.quality (double quality))
|
||||
(.addImage))]
|
||||
(generic-process (assoc params :operation op))))
|
||||
[params]
|
||||
(let [{:keys [quality width height] :as params}
|
||||
(check-thumbnail-params params)
|
||||
|
||||
operation
|
||||
(doto (IMOperation.)
|
||||
(.addImage)
|
||||
(.autoOrient)
|
||||
(.strip)
|
||||
(.thumbnail ^Integer (int width) ^Integer (int height) "^")
|
||||
(.gravity "center")
|
||||
(.extent (int width) (int height))
|
||||
(.quality (double quality))
|
||||
(.addImage))]
|
||||
|
||||
(generic-process (assoc params :operation operation))))
|
||||
|
||||
(defn get-basic-info-from-svg
|
||||
[{:keys [tag attrs] :as data}]
|
||||
@@ -184,10 +217,9 @@
|
||||
|
||||
(defmethod process :info
|
||||
[{:keys [input] :as params}]
|
||||
(us/assert ::input input)
|
||||
(let [{:keys [path mtype]} input]
|
||||
(let [{:keys [path mtype] :as input} (check-input input)]
|
||||
(if (= mtype "image/svg+xml")
|
||||
(let [info (some-> path slurp csvg/parse get-basic-info-from-svg)]
|
||||
(let [info (some-> path slurp parse-svg get-basic-info-from-svg)]
|
||||
(when-not info
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-svg-file
|
||||
|
||||
@@ -231,7 +231,7 @@
|
||||
:hint "email has complaint reports")))
|
||||
|
||||
(defn prepare-register
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email accept-newsletter-updates] :as params}]
|
||||
|
||||
(validate-register-attempt! cfg params)
|
||||
|
||||
@@ -243,7 +243,8 @@
|
||||
:backend "penpot"
|
||||
:iss :prepared-register
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 7})}
|
||||
:exp (dt/in-future {:days 7})
|
||||
:props {:newsletter-updates (or accept-newsletter-updates false)}}
|
||||
|
||||
params (d/without-nils params)
|
||||
token (tokens/generate (::setup/props cfg) params)]
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.files
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
@@ -23,6 +24,7 @@
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -211,7 +213,8 @@
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} {:keys [read-only?]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [;; For avoid unnecesary overhead of creating multiple pointers and
|
||||
(let [libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple pointers and
|
||||
;; handly internally with objects map in their worst case (when
|
||||
;; probably all shapes and all pointers will be readed in any
|
||||
;; case), we just realize/resolve them before applying the
|
||||
@@ -219,7 +222,7 @@
|
||||
file (-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))]
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
@@ -615,44 +618,6 @@
|
||||
|
||||
;; --- COMMAND QUERY: get-file-libraries
|
||||
|
||||
(def ^:private sql:get-file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ?::uuid
|
||||
UNION
|
||||
SELECT fl.*, flr.synced_at
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT l.id,
|
||||
l.features,
|
||||
l.project_id,
|
||||
p.team_id,
|
||||
l.created_at,
|
||||
l.modified_at,
|
||||
l.deleted_at,
|
||||
l.name,
|
||||
l.revn,
|
||||
l.vern,
|
||||
l.synced_at,
|
||||
l.is_shared
|
||||
FROM libs AS l
|
||||
INNER JOIN project AS p ON (p.id = l.project_id)
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
|
||||
(defn get-file-libraries
|
||||
[conn file-id]
|
||||
(into []
|
||||
(comp
|
||||
;; FIXME: :is-indirect set to false to all rows looks
|
||||
;; completly useless
|
||||
(map #(assoc % :is-indirect false))
|
||||
(map decode-row))
|
||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
||||
|
||||
(def ^:private schema:get-file-libraries
|
||||
[:map {:title "get-file-libraries"}
|
||||
[:file-id ::sm/uuid]])
|
||||
@@ -664,7 +629,7 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(get-file-libraries conn file-id)))
|
||||
(bfc/get-file-libraries conn file-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: Files that use this File library
|
||||
@@ -970,12 +935,13 @@
|
||||
;; --- MUTATION COMMAND: delete-file
|
||||
|
||||
(defn- mark-file-deleted
|
||||
[conn file-id]
|
||||
(let [file (db/update! conn :file
|
||||
{:deleted-at (dt/now)}
|
||||
{:id file-id}
|
||||
{::db/return-keys [:id :name :is-shared :deleted-at
|
||||
:project-id :created-at :modified-at]})]
|
||||
[conn team file-id]
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
file (db/update! conn :file
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:id file-id}
|
||||
{::db/return-keys [:id :name :is-shared :deleted-at
|
||||
:project-id :created-at :modified-at]})]
|
||||
(wrk/submit! {::db/conn conn
|
||||
::wrk/task :delete-object
|
||||
::wrk/params {:object :file
|
||||
@@ -991,7 +957,11 @@
|
||||
(defn- delete-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id id] :as params}]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [file (mark-file-deleted conn id)]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id id)
|
||||
file (mark-file-deleted conn team id)]
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:project-id (:project-id file)
|
||||
:name (:name file)
|
||||
|
||||
@@ -55,8 +55,8 @@
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at
|
||||
:create-page create-page
|
||||
:deleted-at deleted-at}
|
||||
{:create-page create-page
|
||||
:page-id page-id})
|
||||
file (-> (bfc/insert-file! cfg file)
|
||||
(bfc/decode-row))]
|
||||
@@ -111,18 +111,21 @@
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/project-id project-id})
|
||||
|
||||
;; FIXME: IMPORTANT: this code can have race
|
||||
;; conditions, because we have no locks for updating
|
||||
;; team so, creating two files concurrently can lead
|
||||
;; to lost team features updating
|
||||
;; FIXME: IMPORTANT: this code can have race conditions, because
|
||||
;; we have no locks for updating team so, creating two files
|
||||
;; concurrently can lead to lost team features updating
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it
|
||||
(when (not= features (:features team))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (->> features
|
||||
(set/union (:features team))
|
||||
(db/create-array conn "text"))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id team-id})))
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
|
||||
(-> (create-file cfg params)
|
||||
(vary-meta assoc ::audit/props {:team-id team-id}))))
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -110,7 +109,7 @@
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id ::rpc/profile-id] :as params}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
|
||||
(let [file (files/get-file cfg id
|
||||
:migrate? false
|
||||
:lock-for-update? true)]
|
||||
@@ -119,7 +118,6 @@
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
|
||||
(let [changes (->> (db/cursor conn
|
||||
(sql/select :file-change {:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
@@ -147,19 +145,6 @@
|
||||
:revn 1
|
||||
:data (blob/encode (:data file))}
|
||||
{:id id})
|
||||
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id (:project-id file))
|
||||
file-features (:features file)
|
||||
team-features (cfeat/get-team-enabled-features cf/flags team)]
|
||||
(when (and (contains? team-features "components/v2")
|
||||
(not (contains? file-features "components/v2")))
|
||||
;; Migrate components v2
|
||||
(feat.compv2/migrate-file! cfg
|
||||
(:id file)
|
||||
:max-procs 2
|
||||
:validate? true
|
||||
:throw-on-validate? true)))
|
||||
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:persist-temp-file
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.http.errors :as errors]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
@@ -177,12 +178,19 @@
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it.
|
||||
(when (not= features (:features team))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
;; the features defined on team row, we update it
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (->> features
|
||||
(set/union (:features team))
|
||||
(db/create-array conn "text"))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)})))
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
@@ -202,7 +210,7 @@
|
||||
|
||||
Only intended for internal use on this module."
|
||||
[{:keys [::db/conn ::wrk/executor ::timestamp] :as cfg}
|
||||
{:keys [profile-id file features changes session-id skip-validate] :as params}]
|
||||
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
|
||||
|
||||
(let [;; Retrieve the file data
|
||||
file (feat.fmigr/resolve-applied-migrations cfg file)
|
||||
@@ -236,7 +244,7 @@
|
||||
:created-at timestamp
|
||||
:updated-at timestamp
|
||||
:deleted-at (if (::snapshot-data file)
|
||||
(dt/plus timestamp (cf/get-deletion-delay))
|
||||
(dt/plus timestamp (ldel/get-deletion-delay team))
|
||||
(dt/plus timestamp (dt/duration {:hours 1})))
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
@@ -333,6 +341,7 @@
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file)))))
|
||||
libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
|
||||
;; For avoid unnecesary overhead of creating multiple pointers
|
||||
;; and handly internally with objects map in their worst
|
||||
@@ -343,7 +352,7 @@
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))
|
||||
(fmg/migrate-file libs))
|
||||
file)
|
||||
|
||||
file (apply update-fn cfg file args)
|
||||
@@ -372,13 +381,6 @@
|
||||
|
||||
(bfc/encode-file cfg file))))
|
||||
|
||||
(defn- get-file-libraries
|
||||
"A helper for preload file libraries, mainly used for perform file
|
||||
semantical and structural validation"
|
||||
[{:keys [::db/conn] :as cfg} file]
|
||||
(->> (files/get-file-libraries conn (:id file))
|
||||
(into [file] (map #(bfc/get-file cfg (:id %))))
|
||||
(d/index-by :id)))
|
||||
|
||||
(defn- soft-validate-file-schema!
|
||||
[file]
|
||||
@@ -404,7 +406,7 @@
|
||||
(when (and (or (contains? cf/flags :file-validation)
|
||||
(contains? cf/flags :soft-file-validation))
|
||||
(not skip-validate))
|
||||
(get-file-libraries cfg file))
|
||||
(bfc/get-resolved-file-libraries cfg file))
|
||||
|
||||
|
||||
;; The main purpose of this atom is provide a contextual state
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
@@ -202,32 +203,40 @@
|
||||
(sv/defmethod ::delete-font
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id
|
||||
:font-id id
|
||||
:deleted-at nil}
|
||||
{::sql/for-update true})
|
||||
tnow (dt/now)]
|
||||
::sm/params schema:delete-font
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id team-id]}]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id team-id)
|
||||
|
||||
(when-not (seq fonts)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id
|
||||
:font-id id
|
||||
:deleted-at nil}
|
||||
{::sql/for-update true})
|
||||
|
||||
(doseq [font fonts]
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at tnow}
|
||||
{:id (:id font)}))
|
||||
delay (ldel/get-deletion-delay team)
|
||||
tnow (dt/in-future delay)]
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family (peek fonts))
|
||||
:profile-id profile-id}})))))
|
||||
(teams/check-edition-permissions! (:permissions team))
|
||||
|
||||
(when-not (seq fonts)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
|
||||
(doseq [font fonts]
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at tnow}
|
||||
{:id (:id font)}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family (peek fonts))
|
||||
:profile-id profile-id}})))
|
||||
|
||||
;; --- DELETE FONT VARIANT
|
||||
|
||||
@@ -239,19 +248,23 @@
|
||||
(sv/defmethod ::delete-font-variant
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font-variant}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [variant (db/get conn :team-font-variant
|
||||
{:id id :team-id team-id}
|
||||
{::sql/for-update true})]
|
||||
::sm/params schema:delete-font-variant
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id team-id]}]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id team-id)
|
||||
variant (db/get conn :team-font-variant
|
||||
{:id id :team-id team-id}
|
||||
{::sql/for-update true})
|
||||
delay (ldel/get-deletion-delay team)]
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:id (:id variant)})
|
||||
(teams/check-edition-permissions! (:permissions team))
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:id (:id variant)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}})))))
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}})))
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
(vswap! bfc/*state* update :index bfc/update-index fmeds :id)
|
||||
|
||||
;; Process and persist file
|
||||
(let [file (bfc/process-file file)]
|
||||
(let [file (bfc/process-file cfg file)]
|
||||
(bfc/insert-file! cfg file ::db/return-keys false)
|
||||
|
||||
;; The file profile creation is optional, so when no profile is
|
||||
|
||||
@@ -480,8 +480,7 @@
|
||||
JOIN team AS t ON (t.id = tpr.team_id)
|
||||
WHERE tpr.is_owner IS TRUE
|
||||
AND tpr.profile_id = ?
|
||||
AND (t.deleted_at IS NULL OR
|
||||
t.deleted_at > now())
|
||||
AND t.deleted_at IS NULL
|
||||
)
|
||||
SELECT tpr.team_id AS id,
|
||||
count(tpr.profile_id) - 1 AS participants
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.common.schema :as sm]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -253,9 +254,10 @@
|
||||
;; --- MUTATION: Delete Project
|
||||
|
||||
(defn- delete-project
|
||||
[conn project-id]
|
||||
(let [project (db/update! conn :project
|
||||
{:deleted-at (dt/now)}
|
||||
[conn team project-id]
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
project (db/update! conn :project
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:id project-id}
|
||||
{::db/return-keys true})]
|
||||
|
||||
@@ -272,7 +274,6 @@
|
||||
|
||||
project))
|
||||
|
||||
|
||||
(def ^:private schema:delete-project
|
||||
[:map {:title "delete-project"}
|
||||
[:id ::sm/uuid]])
|
||||
@@ -284,7 +285,10 @@
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id] :as params}]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [project (delete-project conn id)]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id id)
|
||||
project (delete-project conn team id)]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:team-id (:team-id project)
|
||||
:name (:name project)
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.email :as eml]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.media :as media]
|
||||
@@ -76,9 +77,10 @@
|
||||
(perms/make-check-fn has-read-permissions?))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [features] :as row}]
|
||||
[{:keys [features subscription] :as row}]
|
||||
(cond-> row
|
||||
(some? features) (assoc :features (db/decode-pgarray features #{}))))
|
||||
(some? features) (assoc :features (db/decode-pgarray features #{}))
|
||||
(some? subscription) (assoc :subscription (db/decode-transit-pgobject subscription))))
|
||||
|
||||
;; FIXME: move
|
||||
|
||||
@@ -113,29 +115,41 @@
|
||||
|
||||
;; --- Query: Teams
|
||||
|
||||
(declare get-teams)
|
||||
|
||||
(def ^:private schema:get-teams
|
||||
[:map {:title "get-teams"}])
|
||||
|
||||
(sv/defmethod ::get-teams
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-teams}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(get-teams conn profile-id)))
|
||||
|
||||
(def sql:get-teams-with-permissions
|
||||
"select t.*,
|
||||
"SELECT t.*,
|
||||
tp.is_owner,
|
||||
tp.is_admin,
|
||||
tp.can_edit,
|
||||
(t.id = ?) as is_default
|
||||
from team_profile_rel as tp
|
||||
join team as t on (t.id = tp.team_id)
|
||||
where t.deleted_at is null
|
||||
and tp.profile_id = ?
|
||||
order by tp.created_at asc")
|
||||
(t.id = ?) AS is_default
|
||||
FROM team_profile_rel AS tp
|
||||
JOIN team AS t ON (t.id = tp.team_id)
|
||||
WHERE t.deleted_at IS null
|
||||
AND tp.profile_id = ?
|
||||
ORDER BY tp.created_at ASC")
|
||||
|
||||
(def sql:get-teams-with-permissions-and-subscription
|
||||
"SELECT t.*,
|
||||
tp.is_owner,
|
||||
tp.is_admin,
|
||||
tp.can_edit,
|
||||
(t.id = ?) AS is_default,
|
||||
|
||||
jsonb_build_object(
|
||||
'~:type', COALESCE(p.props->'~:subscription'->>'~:type', 'professional'),
|
||||
'~:status', CASE COALESCE(p.props->'~:subscription'->>'~:type', 'professional')
|
||||
WHEN 'professional' THEN 'active'
|
||||
ELSE COALESCE(p.props->'~:subscription'->>'~:status', 'incomplete')
|
||||
END
|
||||
) AS subscription
|
||||
FROM team_profile_rel AS tp
|
||||
JOIN team AS t ON (t.id = tp.team_id)
|
||||
JOIN team_profile_rel AS tpr
|
||||
ON (tpr.team_id = t.id AND tpr.is_owner IS true)
|
||||
JOIN profile AS p
|
||||
ON (tpr.profile_id = p.id)
|
||||
WHERE t.deleted_at IS null
|
||||
AND tp.profile_id = ?
|
||||
ORDER BY tp.created_at ASC")
|
||||
|
||||
(defn process-permissions
|
||||
[team]
|
||||
@@ -150,13 +164,52 @@
|
||||
(dissoc :is-owner :is-admin :can-edit)
|
||||
(assoc :permissions permissions))))
|
||||
|
||||
(def ^:private
|
||||
xform:process-teams
|
||||
(comp
|
||||
(map decode-row)
|
||||
(map process-permissions)))
|
||||
|
||||
(defn get-teams
|
||||
[conn profile-id]
|
||||
(let [profile (profile/get-profile conn profile-id)]
|
||||
(->> (db/exec! conn [sql:get-teams-with-permissions (:default-team-id profile) profile-id])
|
||||
(map decode-row)
|
||||
(map process-permissions)
|
||||
(vec))))
|
||||
(let [profile (profile/get-profile conn profile-id)
|
||||
sql (if (contains? cf/flags :subscriptions)
|
||||
sql:get-teams-with-permissions-and-subscription
|
||||
sql:get-teams-with-permissions)]
|
||||
|
||||
(->> (db/exec! conn [sql (:default-team-id profile) profile-id])
|
||||
(into [] xform:process-teams))))
|
||||
|
||||
(def ^:private schema:get-teams
|
||||
[:map {:title "get-teams"}])
|
||||
|
||||
(sv/defmethod ::get-teams
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-teams}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(get-teams conn profile-id)))
|
||||
|
||||
(def ^:private sql:get-owned-teams
|
||||
"SELECT t.id, t.name,
|
||||
(SELECT count(*) FROM team_profile_rel WHERE team_id=t.id) AS total_members
|
||||
FROM team AS t
|
||||
JOIN team_profile_rel AS tpr ON (tpr.team_id = t.id)
|
||||
WHERE t.is_default IS false
|
||||
AND tpr.is_owner IS true
|
||||
AND tpr.profile_id = ?
|
||||
AND t.deleted_at IS NULL")
|
||||
|
||||
(defn- get-owned-teams
|
||||
[cfg profile-id]
|
||||
(->> (db/exec! cfg [sql:get-owned-teams profile-id])
|
||||
(into [] (map decode-row))))
|
||||
|
||||
(sv/defmethod ::get-owned-teams
|
||||
{::doc/added "2.8.0"
|
||||
::sm/params schema:get-teams}
|
||||
[cfg {:keys [::rpc/profile-id]}]
|
||||
(get-owned-teams cfg profile-id))
|
||||
|
||||
;; --- Query: Team (by ID)
|
||||
|
||||
@@ -181,39 +234,43 @@
|
||||
(defn get-team
|
||||
[conn & {:keys [profile-id team-id project-id file-id] :as params}]
|
||||
|
||||
(dm/assert!
|
||||
"connection or pool is mandatory"
|
||||
(or (db/connection? conn)
|
||||
(db/pool? conn)))
|
||||
(assert (uuid? profile-id) "profile-id is mandatory")
|
||||
(assert (or (db/connection? conn)
|
||||
(db/pool? conn))
|
||||
"connection or pool is mandatory")
|
||||
|
||||
(dm/assert!
|
||||
"profile-id is mandatory"
|
||||
(uuid? profile-id))
|
||||
(let [{:keys [default-team-id] :as profile}
|
||||
(profile/get-profile conn profile-id)
|
||||
|
||||
(let [{:keys [default-team-id] :as profile} (profile/get-profile conn profile-id)
|
||||
result (cond
|
||||
(some? team-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions
|
||||
") SELECT * FROM teams WHERE id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id team-id]))
|
||||
sql
|
||||
(if (contains? cf/flags :subscriptions)
|
||||
sql:get-teams-with-permissions-and-subscription
|
||||
sql:get-teams-with-permissions)
|
||||
|
||||
(some? project-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" WHERE p.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id project-id]))
|
||||
result
|
||||
(cond
|
||||
(some? team-id)
|
||||
(let [sql (str "WITH teams AS (" sql ") "
|
||||
"SELECT * FROM teams WHERE id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id team-id]))
|
||||
|
||||
(some? file-id)
|
||||
(let [sql (str "WITH teams AS (" sql:get-teams-with-permissions ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" JOIN file AS f ON (f.project_id = p.id) "
|
||||
" WHERE f.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id file-id]))
|
||||
(some? project-id)
|
||||
(let [sql (str "WITH teams AS (" sql ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" WHERE p.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id project-id]))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments")))]
|
||||
(some? file-id)
|
||||
(let [sql (str "WITH teams AS (" sql ") "
|
||||
"SELECT t.* FROM teams AS t "
|
||||
" JOIN project AS p ON (p.team_id = t.id) "
|
||||
" JOIN file AS f ON (f.project_id = p.id) "
|
||||
" WHERE f.id=?")]
|
||||
(db/exec-one! conn [sql default-team-id profile-id file-id]))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments")))]
|
||||
|
||||
(when-not result
|
||||
(ex/raise :type :not-found
|
||||
@@ -601,13 +658,13 @@
|
||||
|
||||
(defn- delete-team
|
||||
"Mark a team for deletion"
|
||||
[conn team-id]
|
||||
[conn {:keys [id] :as team}]
|
||||
|
||||
(let [deleted-at (dt/now)
|
||||
team (db/update! conn :team
|
||||
{:deleted-at deleted-at}
|
||||
{:id team-id}
|
||||
{::db/return-keys true})]
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
team (db/update! conn :team
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:id id}
|
||||
{::db/return-keys true})]
|
||||
|
||||
(when (:is-default team)
|
||||
(ex/raise :type :validation
|
||||
@@ -617,8 +674,8 @@
|
||||
(wrk/submit! {::db/conn conn
|
||||
::wrk/task :delete-object
|
||||
::wrk/params {:object :team
|
||||
:deleted-at deleted-at
|
||||
:id team-id}})
|
||||
:deleted-at (:deleted-at team)
|
||||
:id id}})
|
||||
team))
|
||||
|
||||
(def ^:private schema:delete-team
|
||||
@@ -630,12 +687,14 @@
|
||||
::sm/params schema:delete-team
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
(let [perms (get-permissions conn profile-id id)]
|
||||
(let [team (get-team conn :profile-id profile-id :team-id id)
|
||||
perms (get team :permissions)]
|
||||
|
||||
(when-not (:is-owner perms)
|
||||
(ex/raise :type :validation
|
||||
:code :only-owner-can-delete-team))
|
||||
|
||||
(delete-team conn id)
|
||||
(delete-team conn team)
|
||||
nil))
|
||||
|
||||
;; --- Mutation: Team Update Role
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.viewer
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.schema :as sm]
|
||||
@@ -78,7 +79,7 @@
|
||||
:always
|
||||
(update :data select-keys [:id :options :pages :pages-index :components]))
|
||||
|
||||
libs (->> (files/get-file-libraries conn file-id)
|
||||
libs (->> (bfc/get-file-libraries conn file-id)
|
||||
(mapv (fn [{:keys [id] :as lib}]
|
||||
(merge lib (files/get-file cfg id)))))
|
||||
|
||||
|
||||
@@ -6,13 +6,17 @@
|
||||
|
||||
(ns app.srepl
|
||||
"Server Repl."
|
||||
(:refer-clojure :exclude [read-line])
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.srepl.cli]
|
||||
[app.srepl.cli :as cli]
|
||||
[app.srepl.main]
|
||||
[app.util.json :as json]
|
||||
[app.util.locks :as locks]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core :as c]
|
||||
[clojure.core.server :as ccs]
|
||||
[clojure.main :as cm]
|
||||
[integrant.core :as ig]))
|
||||
@@ -28,17 +32,80 @@
|
||||
:init repl-init
|
||||
:read ccs/repl-read))
|
||||
|
||||
(defn- ex->data
|
||||
[cause phase]
|
||||
(let [data (ex-data cause)
|
||||
explain (ex/explain data)]
|
||||
(cond-> {:phase phase
|
||||
:code (get data :code :unknown)
|
||||
:type (get data :type :unknown)
|
||||
:hint (or (get data :hint) (ex-message cause))}
|
||||
(some? explain)
|
||||
(assoc :explain explain))))
|
||||
|
||||
(defn read-line
|
||||
[]
|
||||
(if-let [line (c/read-line)]
|
||||
(try
|
||||
(l/dbg :hint "decode" :data line)
|
||||
(json/decode line :key-fn json/read-kebab-key)
|
||||
(catch Throwable _cause
|
||||
(l/warn :hint "unable to decode data" :data line)
|
||||
nil))
|
||||
::eof))
|
||||
|
||||
(defn json-repl
|
||||
[]
|
||||
(let [out *out*
|
||||
lock (locks/create)]
|
||||
(ccs/prepl *in*
|
||||
(fn [m]
|
||||
(binding [*out* out,
|
||||
*flush-on-newline* true,
|
||||
*print-readably* true]
|
||||
(locks/locking lock
|
||||
(println (json/encode-str m))))))))
|
||||
(let [lock (locks/create)
|
||||
out *out*
|
||||
|
||||
out-fn
|
||||
(fn [m]
|
||||
(locks/locking lock
|
||||
(binding [*out* out]
|
||||
(l/warn :hint "write" :data m)
|
||||
(println (json/encode m :key-fn json/write-camel-key)))))
|
||||
|
||||
tapfn
|
||||
(fn [val]
|
||||
(out-fn {:tag :tap :val val}))]
|
||||
|
||||
(binding [*out* (PrintWriter-on #(out-fn {:tag :out :val %1}) nil true)
|
||||
*err* (PrintWriter-on #(out-fn {:tag :err :val %1}) nil true)]
|
||||
(try
|
||||
(add-tap tapfn)
|
||||
(loop []
|
||||
(when (try
|
||||
(let [data (read-line)
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
(l/dbg :hint "received" :data (if (= data ::eof) "EOF" data))
|
||||
|
||||
(try
|
||||
(when-not (= data ::eof)
|
||||
(when-not (nil? data)
|
||||
(let [result (cli/exec data)
|
||||
elapsed (tpoint)]
|
||||
(l/warn :hint "result" :data result)
|
||||
(out-fn {:tag :ret
|
||||
:val (if (instance? Throwable result)
|
||||
(Throwable->map result)
|
||||
result)
|
||||
:elapsed (inst-ms elapsed)})))
|
||||
true)
|
||||
(catch Throwable cause
|
||||
(let [elapsed (tpoint)]
|
||||
(out-fn {:tag :ret
|
||||
:err (ex->data cause :eval)
|
||||
:elapsed (inst-ms elapsed)})
|
||||
true))))
|
||||
(catch Throwable cause
|
||||
(out-fn {:tag :ret
|
||||
:err (ex->data cause :read)})
|
||||
true))
|
||||
(recur)))
|
||||
(finally
|
||||
(remove-tap tapfn))))))
|
||||
|
||||
;; --- State initialization
|
||||
|
||||
|
||||
@@ -9,14 +9,23 @@
|
||||
(:require
|
||||
[app.auth :as auth]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.rpc.commands.auth :as cmd.auth]
|
||||
[app.rpc.commands.profile :as cmd.profile]
|
||||
[app.util.json :as json]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn coercer
|
||||
[schema & {:as opts}]
|
||||
(let [decode-fn (sm/decoder schema sm/json-transformer)
|
||||
check-fn (sm/check-fn schema opts)]
|
||||
(fn [data]
|
||||
(-> data decode-fn check-fn))))
|
||||
|
||||
(defn- get-current-system
|
||||
[]
|
||||
(or (deref (requiring-resolve 'app.main/system))
|
||||
@@ -24,16 +33,21 @@
|
||||
|
||||
(defmulti ^:private exec-command ::cmd)
|
||||
|
||||
(defmethod exec-command :default
|
||||
[{:keys [::cmd]}]
|
||||
(ex/raise :type :internal
|
||||
:code :not-implemented
|
||||
:hint (str/ffmt "command '%' not implemented" cmd)))
|
||||
|
||||
(defn exec
|
||||
"Entry point with external tools integrations that uses PREPL
|
||||
interface for interacting with running penpot backend."
|
||||
[data]
|
||||
(let [data (json/decode data)]
|
||||
(-> {::cmd (keyword (:cmd data "default"))}
|
||||
(merge (:params data))
|
||||
(exec-command))))
|
||||
(-> {::cmd (get data :cmd)}
|
||||
(merge (:params data))
|
||||
(exec-command)))
|
||||
|
||||
(defmethod exec-command :create-profile
|
||||
(defmethod exec-command "create-profile"
|
||||
[{:keys [fullname email password is-active]
|
||||
:or {is-active true}}]
|
||||
(some-> (get-current-system)
|
||||
@@ -49,7 +63,7 @@
|
||||
(->> (cmd.auth/create-profile! conn params)
|
||||
(cmd.auth/create-profile-rels! conn)))))))
|
||||
|
||||
(defmethod exec-command :update-profile
|
||||
(defmethod exec-command "update-profile"
|
||||
[{:keys [fullname email password is-active]}]
|
||||
(some-> (get-current-system)
|
||||
(db/tx-run!
|
||||
@@ -70,7 +84,12 @@
|
||||
:deleted-at nil})]
|
||||
(pos? (db/get-update-count res)))))))))
|
||||
|
||||
(defmethod exec-command :delete-profile
|
||||
(defmethod exec-command "echo"
|
||||
[params]
|
||||
params)
|
||||
|
||||
|
||||
(defmethod exec-command "delete-profile"
|
||||
[{:keys [email soft]}]
|
||||
(when-not email
|
||||
(ex/raise :type :assertion
|
||||
@@ -88,7 +107,7 @@
|
||||
{:email email}))]
|
||||
(pos? (db/get-update-count res)))))))
|
||||
|
||||
(defmethod exec-command :search-profile
|
||||
(defmethod exec-command "search-profile"
|
||||
[{:keys [email]}]
|
||||
(when-not email
|
||||
(ex/raise :type :assertion
|
||||
@@ -102,12 +121,130 @@
|
||||
" where email similar to ? order by created_at desc limit 100")]
|
||||
(db/exec! conn [sql email]))))))
|
||||
|
||||
(defmethod exec-command :derive-password
|
||||
(defmethod exec-command "derive-password"
|
||||
[{:keys [password]}]
|
||||
(auth/derive-password password))
|
||||
|
||||
(defmethod exec-command :default
|
||||
[{:keys [::cmd]}]
|
||||
(ex/raise :type :internal
|
||||
:code :not-implemented
|
||||
:hint (str/ffmt "command '%' not implemented" (name cmd))))
|
||||
(defmethod exec-command "authenticate"
|
||||
[{:keys [token]}]
|
||||
(when-let [system (get-current-system)]
|
||||
(let [props (get system ::setup/props)]
|
||||
(tokens/verify props {:token token :iss "authentication"}))))
|
||||
|
||||
(def ^:private schema:get-customer
|
||||
[:map [:id ::sm/uuid]])
|
||||
|
||||
(def coerce-get-customer-params
|
||||
(coercer schema:get-customer
|
||||
:type :validation
|
||||
:hint "invalid data provided for `get-customer` rpc call"))
|
||||
|
||||
(def sql:get-customer-slots
|
||||
"WITH teams AS (
|
||||
SELECT tpr.team_id AS id,
|
||||
tpr.profile_id AS profile_id
|
||||
FROM team_profile_rel AS tpr
|
||||
WHERE tpr.is_owner IS true
|
||||
AND tpr.profile_id = ?
|
||||
), teams_with_slots AS (
|
||||
SELECT tpr.team_id AS id,
|
||||
count(*) AS total
|
||||
FROM team_profile_rel AS tpr
|
||||
WHERE tpr.team_id IN (SELECT id FROM teams)
|
||||
AND tpr.can_edit IS true
|
||||
GROUP BY 1
|
||||
ORDER BY 2
|
||||
)
|
||||
SELECT max(total) AS total FROM teams_with_slots;")
|
||||
|
||||
(defn- get-customer-slots
|
||||
[system profile-id]
|
||||
(let [result (db/exec-one! system [sql:get-customer-slots profile-id])]
|
||||
(:total result)))
|
||||
|
||||
(defmethod exec-command "get-customer"
|
||||
[params]
|
||||
(when-let [system (get-current-system)]
|
||||
(let [{:keys [id] :as params} (coerce-get-customer-params params)
|
||||
{:keys [props] :as profile} (cmd.profile/get-profile system id)]
|
||||
{:id (get profile :id)
|
||||
:name (get profile :fullname)
|
||||
:email (get profile :email)
|
||||
:num-editors (get-customer-slots system id)
|
||||
:subscription (get props :subscription)})))
|
||||
|
||||
(def ^:private schema:customer-subscription
|
||||
[:map {:title "CustomerSubscription"}
|
||||
[:id ::sm/text]
|
||||
[:customer-id ::sm/text]
|
||||
[:type [:enum
|
||||
"unlimited"
|
||||
"professional"
|
||||
"enterprise"]]
|
||||
[:status [:enum
|
||||
"active"
|
||||
"canceled"
|
||||
"incomplete"
|
||||
"incomplete_expired"
|
||||
"pass_due"
|
||||
"paused"
|
||||
"trialing"
|
||||
"unpaid"]]
|
||||
|
||||
[:billing-period [:enum
|
||||
"month"
|
||||
"day"
|
||||
"week"
|
||||
"year"]]
|
||||
[:quantity :int]
|
||||
[:description [:maybe ::sm/text]]
|
||||
[:created-at ::sm/timestamp]
|
||||
[:start-date [:maybe ::sm/timestamp]]
|
||||
[:ended-at [:maybe ::sm/timestamp]]
|
||||
[:trial-end [:maybe ::sm/timestamp]]
|
||||
[:trial-start [:maybe ::sm/timestamp]]
|
||||
[:cancel-at [:maybe ::sm/timestamp]]
|
||||
[:canceled-at [:maybe ::sm/timestamp]]
|
||||
|
||||
[:current-period-end ::sm/timestamp]
|
||||
[:current-period-start ::sm/timestamp]
|
||||
[:cancel-at-period-end :boolean]
|
||||
|
||||
[:cancellation-details
|
||||
[:map {:title "CancellationDetails"}
|
||||
[:comment [:maybe ::sm/text]]
|
||||
[:reason [:maybe ::sm/text]]
|
||||
[:feedback [:maybe
|
||||
[:enum
|
||||
"customer_service"
|
||||
"low_quality"
|
||||
"missing_feature"
|
||||
"other"
|
||||
"switched_service"
|
||||
"too_complex"
|
||||
"too_expensive"
|
||||
"unused"]]]]]])
|
||||
|
||||
(def ^:private schema:update-customer-subscription
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:subscription [:maybe schema:customer-subscription]]])
|
||||
|
||||
(def coerce-update-customer-subscription-params
|
||||
(coercer schema:update-customer-subscription
|
||||
:type :validation
|
||||
:hint "invalid data provided for `update-customer-subscription` rpc call"))
|
||||
|
||||
(defmethod exec-command "update-customer-subscription"
|
||||
[params]
|
||||
(when-let [system (get-current-system)]
|
||||
(let [{:keys [id subscription]} (coerce-update-customer-subscription-params params)
|
||||
;; FIXME: locking
|
||||
{:keys [props] :as profile} (cmd.profile/get-profile system id)
|
||||
props (assoc props :subscription subscription)]
|
||||
|
||||
(db/update! system :profile
|
||||
{:props (db/tjson props)}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
true)))
|
||||
|
||||
@@ -1,306 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.components-v2
|
||||
(:require
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.semaphore :as ps]
|
||||
[promesa.util :as pu]))
|
||||
|
||||
(def ^:dynamic *scope* nil)
|
||||
(def ^:dynamic *semaphore* nil)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PRIVATE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private sql:get-files-by-created-at
|
||||
"SELECT id, features,
|
||||
row_number() OVER (ORDER BY created_at DESC) AS rown
|
||||
FROM file
|
||||
WHERE deleted_at IS NULL
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn- get-files
|
||||
[conn]
|
||||
(->> (db/cursor conn [sql:get-files-by-created-at] {:chunk-size 500})
|
||||
(map feat/decode-row)
|
||||
(remove (fn [{:keys [features]}]
|
||||
(contains? features "components/v2")))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn migrate-file!
|
||||
[file-id & {:keys [rollback? validate? label cache skip-on-graphic-error?]
|
||||
:or {rollback? true
|
||||
validate? false
|
||||
skip-on-graphic-error? true}}]
|
||||
(l/dbg :hint "migrate:start" :rollback rollback?)
|
||||
(let [tpoint (dt/tpoint)
|
||||
file-id (h/parse-uuid file-id)]
|
||||
|
||||
(binding [feat/*stats* (atom {})
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(feat/migrate-file! file-id
|
||||
:validate? validate?
|
||||
:skip-on-graphic-error? skip-on-graphic-error?
|
||||
:label label))
|
||||
|
||||
(-> (deref feat/*stats*)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "migrate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
|
||||
|
||||
(defn migrate-team!
|
||||
[team-id & {:keys [rollback? skip-on-graphic-error? validate? label cache]
|
||||
:or {rollback? true
|
||||
validate? true
|
||||
skip-on-graphic-error? true}}]
|
||||
|
||||
(l/dbg :hint "migrate:start" :rollback rollback?)
|
||||
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
stats (atom {})
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(feat/migrate-team! team-id
|
||||
:label label
|
||||
:validate? validate?
|
||||
:skip-on-graphics-error? skip-on-graphic-error?))
|
||||
|
||||
(-> (deref feat/*stats*)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
|
||||
|
||||
(defn migrate-files!
|
||||
"A REPL helper for migrate all files.
|
||||
|
||||
This function starts multiple concurrent file migration processes
|
||||
until thw maximum number of jobs is reached which by default has the
|
||||
value of `1`. This is controled with the `:max-jobs` option.
|
||||
|
||||
If you want to run this on multiple machines you will need to specify
|
||||
the total number of partitions and the current partition.
|
||||
|
||||
In order to get the report table populated, you will need to provide
|
||||
a correct `:label`. That label is also used for persist a file
|
||||
snaphot before continue with the migration."
|
||||
[& {:keys [max-jobs max-items rollback? validate?
|
||||
cache skip-on-graphic-error?
|
||||
label partitions current-partition]
|
||||
:or {validate? false
|
||||
rollback? true
|
||||
max-jobs 1
|
||||
current-partition 1
|
||||
skip-on-graphic-error? true
|
||||
max-items Long/MAX_VALUE}}]
|
||||
|
||||
(when (int? partitions)
|
||||
(when-not (int? current-partition)
|
||||
(throw (IllegalArgumentException. "missing `current-partition` parameter")))
|
||||
(when-not (<= 0 current-partition partitions)
|
||||
(throw (IllegalArgumentException. "invalid value on `current-partition` parameter"))))
|
||||
|
||||
(let [stats (atom {})
|
||||
tpoint (dt/tpoint)
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/migration/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
|
||||
migrate-file
|
||||
(fn [file-id rown]
|
||||
(try
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [system]
|
||||
(db/exec-one! system ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(feat/migrate-file! system file-id
|
||||
:rown rown
|
||||
:label label
|
||||
:validate? validate?
|
||||
:skip-on-graphic-error? skip-on-graphic-error?)))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "unexpected error on processing file (skiping)"
|
||||
:file-id (str file-id))
|
||||
|
||||
(events/tap :error
|
||||
(ex-info "unexpected error on processing file (skiping)"
|
||||
{:file-id file-id}
|
||||
cause))
|
||||
|
||||
(swap! stats update :errors (fnil inc 0)))
|
||||
|
||||
(finally
|
||||
(ps/release! sjobs))))
|
||||
|
||||
process-file
|
||||
(fn [{:keys [id rown]}]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial migrate-file id rown)))]
|
||||
|
||||
(l/dbg :hint "migrate:start"
|
||||
:label label
|
||||
:rollback rollback?
|
||||
:max-jobs max-jobs
|
||||
:max-items max-items)
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(db/exec! conn ["SET LOCAL statement_timeout = 0"])
|
||||
(db/exec! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
(run! process-file
|
||||
(->> (get-files conn)
|
||||
(filter (fn [{:keys [rown] :as row}]
|
||||
(if (int? partitions)
|
||||
(= current-partition (inc (mod rown partitions)))
|
||||
true)))
|
||||
(take max-items)))
|
||||
|
||||
;; Close and await tasks
|
||||
(pu/close! executor)))
|
||||
|
||||
(-> (deref stats)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause)
|
||||
(events/tap :error cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end"
|
||||
:rollback rollback?
|
||||
:elapsed elapsed)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CACHE POPULATE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:sobjects-for-cache
|
||||
"SELECT id,
|
||||
row_number() OVER (ORDER BY created_at) AS index
|
||||
FROM storage_object
|
||||
WHERE (metadata->>'~:bucket' = 'file-media-object' OR
|
||||
metadata->>'~:bucket' IS NULL)
|
||||
AND metadata->>'~:content-type' = 'image/svg+xml'
|
||||
AND deleted_at IS NULL
|
||||
AND size < 1135899
|
||||
ORDER BY created_at ASC")
|
||||
|
||||
(defn populate-cache!
|
||||
"A REPL helper for migrate all files.
|
||||
|
||||
This function starts multiple concurrent file migration processes
|
||||
until thw maximum number of jobs is reached which by default has the
|
||||
value of `1`. This is controled with the `:max-jobs` option.
|
||||
|
||||
If you want to run this on multiple machines you will need to specify
|
||||
the total number of partitions and the current partition.
|
||||
|
||||
In order to get the report table populated, you will need to provide
|
||||
a correct `:label`. That label is also used for persist a file
|
||||
snaphot before continue with the migration."
|
||||
[& {:keys [max-jobs] :or {max-jobs 1}}]
|
||||
|
||||
(let [tpoint (dt/tpoint)
|
||||
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/cache/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
|
||||
retrieve-sobject
|
||||
(fn [id index]
|
||||
(let [path (feat/get-sobject-cache-path id)
|
||||
parent (fs/parent path)]
|
||||
|
||||
(try
|
||||
(when-not (fs/exists? parent)
|
||||
(fs/create-dir parent))
|
||||
|
||||
(if (fs/exists? path)
|
||||
(l/inf :hint "create cache entry" :status "exists" :index index :id (str id) :path (str path))
|
||||
(let [svg-data (feat/get-optimized-svg id)]
|
||||
(with-open [^java.lang.AutoCloseable stream (io/output-stream path)]
|
||||
(let [writer (fres/writer stream)]
|
||||
(fres/write! writer svg-data)))
|
||||
|
||||
(l/inf :hint "create cache entry" :status "created"
|
||||
:index index
|
||||
:id (str id)
|
||||
:path (str path))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "create cache entry"
|
||||
:status "error"
|
||||
:index index
|
||||
:id (str id)
|
||||
:path (str path)
|
||||
:cause cause))
|
||||
|
||||
(finally
|
||||
(ps/release! sjobs)))))
|
||||
|
||||
process-sobject
|
||||
(fn [{:keys [id index]}]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial retrieve-sobject id index)))]
|
||||
|
||||
(l/dbg :hint "migrate:start"
|
||||
:max-jobs max-jobs)
|
||||
|
||||
(try
|
||||
(binding [feat/*system* main/system]
|
||||
(run! process-sobject
|
||||
(db/exec! main/system [sql:sobjects-for-cache]))
|
||||
|
||||
;; Close and await tasks
|
||||
(pu/close! executor))
|
||||
|
||||
{:elapsed (dt/format-duration (tpoint))}
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "populate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "populate:end"
|
||||
:elapsed elapsed))))))
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat.comp-v2]
|
||||
[app.main :as main]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]
|
||||
@@ -62,6 +61,27 @@
|
||||
{:id id})
|
||||
team))
|
||||
|
||||
(def ^:private sql:get-and-lock-team-files
|
||||
"SELECT f.id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?
|
||||
AND p.deleted_at IS NULL
|
||||
AND f.deleted_at IS NULL
|
||||
FOR UPDATE")
|
||||
|
||||
(defn get-team
|
||||
[conn team-id]
|
||||
(-> (db/get conn :team {:id team-id}
|
||||
{::db/remove-deleted false
|
||||
::db/check-deleted false})
|
||||
(update :features db/decode-pgarray #{})))
|
||||
|
||||
(defn get-and-lock-team-files
|
||||
[conn team-id]
|
||||
(transduce (map :id) conj []
|
||||
(db/plan conn [sql:get-and-lock-team-files team-id])))
|
||||
|
||||
(defn reset-file-data!
|
||||
"Hardcode replace of the data of one file."
|
||||
[system id data]
|
||||
@@ -96,7 +116,7 @@
|
||||
(defn take-team-snapshot!
|
||||
[system team-id label]
|
||||
(let [conn (db/get-connection system)]
|
||||
(->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(->> (get-and-lock-team-files conn team-id)
|
||||
(reduce (fn [result file-id]
|
||||
(let [file (fsnap/get-file-snapshots system file-id)]
|
||||
(fsnap/create-file-snapshot! system file
|
||||
@@ -108,19 +128,16 @@
|
||||
(defn restore-team-snapshot!
|
||||
[system team-id label]
|
||||
(let [conn (db/get-connection system)
|
||||
ids (->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
ids (->> (get-and-lock-team-files conn team-id)
|
||||
(into #{}))
|
||||
|
||||
snap (search-file-snapshots conn ids label)
|
||||
|
||||
ids' (into #{} (map :file-id) snap)
|
||||
team (-> (feat.comp-v2/get-team conn team-id)
|
||||
(update :features disj "components/v2"))]
|
||||
ids' (into #{} (map :file-id) snap)]
|
||||
|
||||
(when (not= ids ids')
|
||||
(throw (RuntimeException. "no uniform snapshot available")))
|
||||
|
||||
(feat.comp-v2/update-team! conn team)
|
||||
(reduce (fn [result {:keys [file-id id]}]
|
||||
(fsnap/restore-file-snapshot! system file-id id)
|
||||
(inc result))
|
||||
@@ -129,13 +146,9 @@
|
||||
|
||||
(defn process-file!
|
||||
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
|
||||
(let [conn (db/get-connection system)
|
||||
file (bfc/get-file system file-id ::db/for-update true)
|
||||
(let [file (bfc/get-file system file-id ::db/for-update true)
|
||||
libs (when with-libraries?
|
||||
(->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(bfc/get-file system id))))
|
||||
(d/index-by :id)))
|
||||
(bfc/get-resolved-file-libraries system file))
|
||||
|
||||
file' (when file
|
||||
(if with-libraries?
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.components-v2 :as feat.comp-v2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as main]
|
||||
@@ -156,6 +155,10 @@
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-pointer-map opts))
|
||||
|
||||
(defn enable-path-data-feature-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-path-data opts))
|
||||
|
||||
(defn enable-storage-features-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(enable-objects-map-feature-on-file! file-id opts)
|
||||
@@ -387,12 +390,9 @@
|
||||
[file-id]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! (assoc main/system ::db/rollback true)
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(let [file (h/get-file system file-id)
|
||||
libs (->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(h/get-file system id))))
|
||||
(d/index-by :id))]
|
||||
(fn [system]
|
||||
(let [file (bfc/get-file system file-id)
|
||||
libs (bfc/get-resolved-file-libraries system file)]
|
||||
(cfv/validate-file file libs))))))
|
||||
|
||||
(defn repair-file!
|
||||
@@ -416,10 +416,12 @@
|
||||
"Apply a function to the file. Optionally save the changes or not.
|
||||
The function receives the decoded and migrated file data."
|
||||
[file-id update-fn & {:keys [rollback?] :or {rollback? true} :as opts}]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [system]
|
||||
(binding [h/*system* system]
|
||||
(h/process-file! system file-id update-fn opts)))))
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [system]
|
||||
(binding [h/*system* system
|
||||
db/*conn* (db/get-connection system)]
|
||||
(h/process-file! system file-id update-fn opts))))))
|
||||
|
||||
(defn process-team-files!
|
||||
"Apply a function to each file of the specified team."
|
||||
@@ -431,8 +433,9 @@
|
||||
(when (string? label)
|
||||
(h/take-team-snapshot! system team-id label))
|
||||
|
||||
(binding [h/*system* system]
|
||||
(->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(binding [h/*system* system
|
||||
db/*conn* (db/get-connection system)]
|
||||
(->> (h/get-and-lock-team-files conn team-id)
|
||||
(reduce (fn [result file-id]
|
||||
(if (h/process-file! system file-id update-fn opts)
|
||||
(inc result)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
file is eligible to be garbage collected after some period of
|
||||
inactivity (the default threshold is 72h)."
|
||||
(:require
|
||||
[app.binfile.cleaner :as bfl]
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.validate :as cfv]
|
||||
@@ -258,6 +259,7 @@
|
||||
(if-let [file (get-file cfg file-id)]
|
||||
(let [file (->> file
|
||||
(bfc/decode-file cfg)
|
||||
(bfl/clean-file)
|
||||
(clean-media! cfg)
|
||||
(clean-fragments! cfg))
|
||||
file (assoc file :has-media-trimmed true)]
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
of deleted or unreachable objects."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
@@ -18,15 +17,15 @@
|
||||
(def ^:private sql:get-profiles
|
||||
"SELECT id, photo_id FROM profile
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-profiles!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-profiles min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-profiles deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id]}]
|
||||
(l/trc :hint "permanently delete" :rel "profile" :id (str id))
|
||||
|
||||
@@ -41,15 +40,15 @@
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT deleted_at, id, photo_id FROM team
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-teams!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-teams min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-teams deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team"
|
||||
@@ -69,15 +68,15 @@
|
||||
"SELECT id, team_id, deleted_at, woff1_file_id, woff2_file_id, otf_file_id, ttf_file_id
|
||||
FROM team_font_variant
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-fonts!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-fonts min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-fonts deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team-font-variant"
|
||||
@@ -101,15 +100,15 @@
|
||||
"SELECT id, deleted_at, team_id
|
||||
FROM project
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-projects!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-projects min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-projects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "project"
|
||||
@@ -127,15 +126,15 @@
|
||||
"SELECT id, deleted_at, project_id, data_backend, data_ref_id
|
||||
FROM file
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-files!
|
||||
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-files min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-files deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file"
|
||||
@@ -156,15 +155,15 @@
|
||||
"SELECT file_id, revn, media_id, deleted_at
|
||||
FROM file_thumbnail
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn delete-file-thumbnails!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-thumbnails min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-thumbnail"
|
||||
@@ -185,15 +184,15 @@
|
||||
"SELECT file_id, object_id, media_id, deleted_at
|
||||
FROM file_tagged_object_thumbnail
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn delete-file-object-thumbnails!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-object-thumbnails min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-object-thumbnails deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-tagged-object-thumbnail"
|
||||
@@ -214,15 +213,15 @@
|
||||
"SELECT file_id, id, deleted_at, data_ref_id
|
||||
FROM file_data_fragment
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-data-fragments!
|
||||
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-data-fragments min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::sto/storage ::deletion-threshold ::chunk-size] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-data-fragments deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-data-fragment"
|
||||
@@ -240,15 +239,15 @@
|
||||
"SELECT id, file_id, media_id, thumbnail_id, deleted_at
|
||||
FROM file_media_object
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-media-objects!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-media-objects min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-media-objects deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-media-object"
|
||||
@@ -269,15 +268,15 @@
|
||||
"SELECT id, file_id, deleted_at, data_backend, data_ref_id
|
||||
FROM file_change
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
AND deleted_at < now() + ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-changes!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-change min-age chunk-size] {:fetch-size 5})
|
||||
[{:keys [::db/conn ::deletion-threshold ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/plan conn [sql:get-file-change deletion-threshold chunk-size] {:fetch-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-change"
|
||||
@@ -324,16 +323,13 @@
|
||||
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v
|
||||
::min-age (cf/get-deletion-delay)
|
||||
::chunk-size 100)})
|
||||
{k (assoc v ::chunk-size 100)})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [min-age (dt/duration (or (:min-age props) (::min-age cfg)))
|
||||
cfg (assoc cfg ::min-age (db/interval min-age))]
|
||||
|
||||
(let [threshold (dt/duration (get props :deletion-threshold 0))
|
||||
cfg (assoc cfg ::deletion-threshold (db/interval threshold))]
|
||||
(loop [procs (map deref deletion-proc-vars)
|
||||
total 0]
|
||||
(if-let [proc-fn (first procs)]
|
||||
|
||||
@@ -222,7 +222,7 @@
|
||||
([params]
|
||||
(mark-file-deleted* *system* params))
|
||||
([conn {:keys [id] :as params}]
|
||||
(#'files/mark-file-deleted conn id)))
|
||||
(#'files/mark-file-deleted conn {} id)))
|
||||
|
||||
(defn create-team*
|
||||
([i params] (create-team* *system* i params))
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
(:require
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as http]
|
||||
@@ -123,8 +123,27 @@
|
||||
:components-v2 true}
|
||||
out (th/command! data)]
|
||||
|
||||
;; (th/print-result! out)
|
||||
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:error out)))
|
||||
|
||||
(let [result (:result out)]
|
||||
(t/is (some? (:deleted-at result)))
|
||||
(t/is (= file-id (:id result)))
|
||||
(t/is (= "new name" (:name result)))
|
||||
(t/is (= 1 (count (get-in result [:data :pages]))))
|
||||
(t/is (nil? (:users result))))))
|
||||
|
||||
(th/db-update! :file
|
||||
{:deleted-at (dt/now)}
|
||||
{:id file-id})
|
||||
|
||||
(t/testing "query single file after delete and wait"
|
||||
(let [data {::th/type :get-file
|
||||
::rpc/profile-id (:id prof)
|
||||
:id file-id
|
||||
:components-v2 true}
|
||||
out (th/command! data)]
|
||||
(let [error (:error out)
|
||||
error-data (ex-data error)]
|
||||
(t/is (th/ex-info? error))
|
||||
@@ -195,7 +214,7 @@
|
||||
(t/is (= 5 (count rows))))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; Check the number of fragments
|
||||
@@ -230,7 +249,7 @@
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; Check the number of fragments;
|
||||
@@ -254,7 +273,7 @@
|
||||
(t/is (= 4 (count rows)))
|
||||
(t/is (= 2 (count (remove (comp some? :deleted-at) rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)})]
|
||||
@@ -355,7 +374,7 @@
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (= 1 (count (remove (comp some? :deleted-at) rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
;; check file media objects
|
||||
@@ -386,7 +405,7 @@
|
||||
|
||||
;; This only clears fragments, the file media objects still referenced because
|
||||
;; snapshots are preserved
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Mark all snapshots to be a non-snapshot file change
|
||||
@@ -395,7 +414,7 @@
|
||||
|
||||
;; Rerun the file-gc and objects-gc
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Now that file-gc have deleted the file-media-object usage,
|
||||
@@ -508,7 +527,7 @@
|
||||
;; run the task again
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
@@ -550,7 +569,7 @@
|
||||
|
||||
;; This only removes unused fragments, file media are still
|
||||
;; referenced on snapshots.
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Mark all snapshots to be a non-snapshot file change
|
||||
@@ -560,7 +579,7 @@
|
||||
;; Rerun file-gc and objects-gc task for the same file once all snapshots are
|
||||
;; "expired/deleted"
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 6 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-data-fragment {:file-id (:id file)
|
||||
@@ -712,7 +731,7 @@
|
||||
;; Now that file-gc have marked for deletion the object
|
||||
;; thumbnail lets execute the objects-gc task which remove
|
||||
;; the rows and mark as touched the storage object rows
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 5 (:processed res))))
|
||||
|
||||
;; Now that objects-gc have deleted the object thumbnail lets
|
||||
@@ -741,7 +760,7 @@
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= 0 (count (remove (comp some? :deleted-at) rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
;; (pp/pprint res)
|
||||
(t/is (= 3 (:processed res))))
|
||||
|
||||
@@ -876,7 +895,7 @@
|
||||
:profile-id (:id profile1)})]
|
||||
;; file is not deleted because it does not meet all
|
||||
;; conditions to be deleted.
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of files
|
||||
@@ -907,7 +926,7 @@
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion (should be noop)
|
||||
(let [result (th/run-task! :objects-gc {:min-age (dt/duration {:minutes 1})})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of file libraries of a after hard deletion
|
||||
@@ -921,7 +940,7 @@
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; query the list of file libraries of a after hard deletion
|
||||
@@ -1176,7 +1195,7 @@
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (= 1 (count (remove :deleted-at rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 4 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-tagged-object-thumbnail {:file-id (:id file)})]
|
||||
@@ -1232,7 +1251,7 @@
|
||||
(t/is (= 2 (count rows)))
|
||||
(t/is (= 1 (count (remove (comp some? :deleted-at) rows)))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [rows (th/db-query :file-thumbnail {:file-id (:id file)})]
|
||||
@@ -1251,7 +1270,7 @@
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; Preventive objects-gc
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
@@ -1272,7 +1291,7 @@
|
||||
(th/run-pending-tasks!))
|
||||
|
||||
;; Clean objects after file-gc
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
@@ -1324,7 +1343,7 @@
|
||||
(t/is (true? (th/run-task! :file-gc {:min-age 0 :file-id (:id file)})))
|
||||
|
||||
;; The objects-gc should remove unused fragments
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
;; Check the number of fragments before adding the page
|
||||
@@ -1712,6 +1731,7 @@
|
||||
[{:fill-image
|
||||
{:id (:id fmedia)
|
||||
:name "test"
|
||||
:mtype "image/jpeg"
|
||||
:width 200
|
||||
:height 200}}]]
|
||||
|
||||
@@ -1820,8 +1840,7 @@
|
||||
(t/is (= (:id file-2) (:file-id (get rows 1))))
|
||||
(t/is (nil? (:deleted-at (get rows 1)))))
|
||||
|
||||
(th/run-task! :objects-gc
|
||||
{:min-age 0})
|
||||
(th/run-task! :objects-gc {})
|
||||
|
||||
(let [rows (th/db-exec! ["SELECT * FROM file_media_object ORDER BY created_at ASC"])]
|
||||
(t/is (= 1 (count rows)))
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns backend-tests.rpc-font-test
|
||||
(:require
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -144,7 +145,7 @@
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 2 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
@@ -204,7 +205,7 @@
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
@@ -263,7 +264,7 @@
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
(let [res (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [res (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed res))))
|
||||
|
||||
(let [res (th/run-task! :storage-gc-touched {:min-age 0})]
|
||||
|
||||
@@ -209,16 +209,16 @@
|
||||
::rpc/profile-id (:id prof1)
|
||||
:id (:id team1)}
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
;; (th/print-result! out)
|
||||
|
||||
(let [team (th/db-get :team {:id (:id team1)} {::db/remove-deleted false})]
|
||||
(t/is (dt/instant? (:deleted-at team)))))
|
||||
|
||||
;; Request profile to be deleted
|
||||
;; Request profile to be deleted
|
||||
(let [params {::th/type :delete-profile
|
||||
::rpc/profile-id (:id prof1)}
|
||||
out (th/command! params)]
|
||||
;; (th/print-result! out)
|
||||
;; (th/print-result! out)
|
||||
(t/is (nil? (:result out)))
|
||||
(t/is (nil? (:error out)))))))
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns backend-tests.rpc-project-test
|
||||
(:require
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -178,7 +179,7 @@
|
||||
|
||||
;; project is not deleted because it does not meet all
|
||||
;; conditions to be deleted.
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of projects
|
||||
@@ -210,7 +211,7 @@
|
||||
(t/is (= 1 (count result)))))
|
||||
|
||||
;; run permanent deletion (should be noop)
|
||||
(let [result (th/run-task! :objects-gc {:min-age (dt/duration {:minutes 1})})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of files of a after soft deletion
|
||||
@@ -224,7 +225,7 @@
|
||||
(t/is (= 0 (count result)))))
|
||||
|
||||
;; run permanent deletion
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 1 (:processed result))))
|
||||
|
||||
;; query the list of files of a after hard deletion
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as http]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -449,6 +450,23 @@
|
||||
(t/is (nil? res)))))
|
||||
|
||||
|
||||
(t/deftest get-owned-teams
|
||||
(let [profile1 (th/create-profile* 1 {:is-active true})
|
||||
profile2 (th/create-profile* 2 {:is-active true})
|
||||
team1 (th/create-team* 1 {:profile-id (:id profile1)})
|
||||
team2 (th/create-team* 2 {:profile-id (:id profile2)})
|
||||
|
||||
params {::th/type :get-owned-teams
|
||||
::rpc/profile-id (:id profile1)}
|
||||
out (th/command! params)]
|
||||
|
||||
(t/is (th/success? out))
|
||||
(let [result (:result out)]
|
||||
(t/is (= 1 (count result)))
|
||||
(t/is (= (:id team1) (-> result first :id)))
|
||||
(t/is (not= (:default-team-id profile1) (-> result first :id))))))
|
||||
|
||||
|
||||
(t/deftest team-deletion-1
|
||||
(let [profile1 (th/create-profile* 1 {:is-active true})
|
||||
team (th/create-team* 1 {:profile-id (:id profile1)})
|
||||
@@ -459,7 +477,7 @@
|
||||
|
||||
;; team is not deleted because it does not meet all
|
||||
;; conditions to be deleted.
|
||||
(let [result (th/run-task! :objects-gc {:min-age (dt/duration 0)})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of teams
|
||||
@@ -493,7 +511,7 @@
|
||||
(th/run-pending-tasks!)
|
||||
|
||||
;; run permanent deletion (should be noop)
|
||||
(let [result (th/run-task! :objects-gc {:min-age (dt/duration {:minutes 1})})]
|
||||
(let [result (th/run-task! :objects-gc {})]
|
||||
(t/is (= 0 (:processed result))))
|
||||
|
||||
;; query the list of projects after hard deletion
|
||||
@@ -507,7 +525,7 @@
|
||||
(t/is (= :not-found (:type edata)))))
|
||||
|
||||
;; run permanent deletion
|
||||
(let [result (th/run-task! :objects-gc {:min-age (dt/duration 0)})]
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 2 (:processed result))))
|
||||
|
||||
;; query the list of projects of a after hard deletion
|
||||
@@ -521,7 +539,6 @@
|
||||
(let [edata (-> out :error ex-data)]
|
||||
(t/is (= :not-found (:type edata)))))))
|
||||
|
||||
|
||||
(t/deftest team-deletion-2
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(assoc ::sto/backend :assets-fs))
|
||||
@@ -564,7 +581,7 @@
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (dt/instant? (:deleted-at (first rows)))))
|
||||
|
||||
(let [result (th/run-task! :objects-gc {:min-age 0})]
|
||||
(let [result (th/run-task! :objects-gc {:deletion-threshold (cf/get-deletion-delay)})]
|
||||
(t/is (= 5 (:processed result))))))
|
||||
|
||||
(t/deftest create-team-access-request
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
{org.clojure/clojure {:mvn/version "1.12.0"}
|
||||
org.clojure/data.json {:mvn/version "2.5.1"}
|
||||
org.clojure/tools.cli {:mvn/version "1.1.230"}
|
||||
org.clojure/clojurescript {:mvn/version "1.11.132"}
|
||||
org.clojure/clojurescript {:mvn/version "1.12.38"}
|
||||
org.clojure/test.check {:mvn/version "1.1.1"}
|
||||
org.clojure/data.fressian {:mvn/version "1.1.0"}
|
||||
|
||||
@@ -12,14 +12,14 @@
|
||||
org.apache.logging.log4j/log4j-web {:mvn/version "2.24.3"}
|
||||
org.apache.logging.log4j/log4j-jul {:mvn/version "2.24.3"}
|
||||
org.apache.logging.log4j/log4j-slf4j2-impl {:mvn/version "2.24.3"}
|
||||
org.slf4j/slf4j-api {:mvn/version "2.0.16"}
|
||||
org.slf4j/slf4j-api {:mvn/version "2.0.17"}
|
||||
pl.tkowalcz.tjahzi/log4j2-appender {:mvn/version "0.9.32"}
|
||||
|
||||
selmer/selmer {:mvn/version "1.12.61"}
|
||||
selmer/selmer {:mvn/version "1.12.62"}
|
||||
criterium/criterium {:mvn/version "0.4.6"}
|
||||
|
||||
metosin/jsonista {:mvn/version "0.3.13"}
|
||||
metosin/malli {:mvn/version "0.17.0"}
|
||||
metosin/malli {:mvn/version "0.18.0"}
|
||||
|
||||
expound/expound {:mvn/version "0.9.0"}
|
||||
com.cognitect/transit-clj {:mvn/version "1.0.333"}
|
||||
@@ -28,9 +28,9 @@
|
||||
integrant/integrant {:mvn/version "0.13.1"}
|
||||
|
||||
funcool/tubax {:mvn/version "2021.05.20-0"}
|
||||
funcool/cuerdas {:mvn/version "2023.11.09-407"}
|
||||
funcool/cuerdas {:mvn/version "2025.05.26-411"}
|
||||
funcool/promesa
|
||||
{:git/sha "0c5ed6ad033515a2df4b55addea044f60e9653d0"
|
||||
{:git/sha "f52f58cfacf62f59eab717e2637f37729d0cc383"
|
||||
:git/url "https://github.com/funcool/promesa"}
|
||||
|
||||
funcool/datoteka
|
||||
@@ -59,7 +59,7 @@
|
||||
{:dev
|
||||
{:extra-deps
|
||||
{org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
thheller/shadow-cljs {:mvn/version "2.28.20"}
|
||||
thheller/shadow-cljs {:mvn/version "3.0.5"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
|
||||
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
criterium/criterium {:mvn/version "RELEASE"}
|
||||
@@ -68,7 +68,7 @@
|
||||
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.6" :git/sha "52cf7d6"}}
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.9" :git/sha "e405aac"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
@@ -76,9 +76,9 @@
|
||||
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
||||
|
||||
:shadow-cljs
|
||||
{:main-opts ["-m" "shadow.cljs.devtools.cli"]}
|
||||
{:main-opts ["-m" "shadow.cljs.devtools.cli"]
|
||||
:jvm-opts ["--sun-misc-unsafe-memory-access=allow"]}
|
||||
|
||||
:outdated
|
||||
{:extra-deps {com.github.liquidz/antq {:mvn/version "RELEASE"}}
|
||||
:main-opts ["-m" "antq.core"]}}}
|
||||
|
||||
|
||||
@@ -4,20 +4,19 @@
|
||||
"license": "MPL-2.0",
|
||||
"author": "Kaleidos INC",
|
||||
"private": true,
|
||||
"packageManager": "yarn@4.8.1+sha512.bc946f2a022d7a1a38adfc15b36a66a3807a67629789496c3714dd1703d2e6c6b1c69ff9ec3b43141ac7a1dd853b7685638eb0074300386a59c18df351ef8ff6",
|
||||
"packageManager": "yarn@4.9.1+sha512.f95ce356460e05be48d66401c1ae64ef84d163dd689964962c6888a9810865e39097a5e9de748876c2e0bf89b232d583c33982773e9903ae7a76257270986538",
|
||||
"type": "module",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/penpot/penpot"
|
||||
},
|
||||
"dependencies": {
|
||||
"luxon": "^3.4.4",
|
||||
"sax": "^1.4.1"
|
||||
"luxon": "^3.4.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"concurrently": "^9.0.1",
|
||||
"nodemon": "^3.1.7",
|
||||
"shadow-cljs": "2.28.20",
|
||||
"shadow-cljs": "3.0.5",
|
||||
"source-map-support": "^0.5.21",
|
||||
"ws": "^8.17.0"
|
||||
},
|
||||
|
||||
@@ -2,16 +2,20 @@
|
||||
|
||||
export PENPOT_FLAGS="enable-asserts enable-audit-log $PENPOT_FLAGS"
|
||||
|
||||
export OPTIONS="
|
||||
-A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J-XX:+EnableDynamicAgentLoading \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full"
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
export OPTIONS="-A:dev"
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
@@ -33,6 +33,12 @@
|
||||
(def boolean-or-nil?
|
||||
(some-fn nil? boolean?))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Commonly used transducers
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def xf:map-id (map :id))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Data Structures
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
#{"fdata/objects-map"
|
||||
"fdata/pointer-map"
|
||||
"fdata/shape-data-type"
|
||||
"fdata/path-data"
|
||||
"components/v2"
|
||||
"styles/v2"
|
||||
"layout/grid"
|
||||
@@ -58,12 +59,18 @@
|
||||
;; A set of features enabled by default
|
||||
(def default-features
|
||||
#{"fdata/shape-data-type"
|
||||
"fdata/path-data"
|
||||
"styles/v2"
|
||||
"layout/grid"
|
||||
"components/v2"
|
||||
"plugins/runtime"
|
||||
"design-tokens/v1"})
|
||||
|
||||
;; A set of features that should not be propagated to team on creating
|
||||
;; or modifying a file
|
||||
(def no-team-inheritable-features
|
||||
#{"fdata/path-data"})
|
||||
|
||||
;; A set of features which only affects on frontend and can be enabled
|
||||
;; and disabled freely by the user any time. This features does not
|
||||
;; persist on file features field but can be permanently enabled on
|
||||
@@ -86,8 +93,9 @@
|
||||
;; without migration applied)
|
||||
(def no-migration-features
|
||||
(-> #{"layout/grid"
|
||||
"design-tokens/v1"
|
||||
"fdata/shape-data-type"
|
||||
"design-tokens/v1"}
|
||||
"fdata/path-data"}
|
||||
(into frontend-only-features)
|
||||
(into backend-only-features)))
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -24,6 +24,7 @@
|
||||
[app.common.types.grid :as ctg]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.pages-list :as ctpl]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.shape-tree :as ctst]
|
||||
[app.common.types.tokens-lib :as ctob]
|
||||
@@ -310,12 +311,12 @@
|
||||
[:add-media
|
||||
[:map {:title "AddMediaChange"}
|
||||
[:type [:= :add-media]]
|
||||
[:object ::ctf/media-object]]]
|
||||
[:object ctf/schema:media]]]
|
||||
|
||||
[:mod-media
|
||||
[:map {:title "ModMediaChange"}
|
||||
[:type [:= :mod-media]]
|
||||
[:object ::ctf/media-object]]]
|
||||
[:object ctf/schema:media]]]
|
||||
|
||||
[:del-media
|
||||
[:map {:title "DelMediaChange"}
|
||||
@@ -425,7 +426,12 @@
|
||||
[:type [:= :set-token]]
|
||||
[:set-name :string]
|
||||
[:token-name :string]
|
||||
[:token [:maybe ctob/schema:token-attrs]]]]]])
|
||||
[:token [:maybe ctob/schema:token-attrs]]]]
|
||||
|
||||
[:set-base-font-size
|
||||
[:map {:title "ModBaseFontSize"}
|
||||
[:type [:= :set-base-font-size]]
|
||||
[:base-font-size :string]]]]])
|
||||
|
||||
(def schema:changes
|
||||
[:sequential {:gen/max 5 :gen/min 1} schema:change])
|
||||
@@ -732,20 +738,22 @@
|
||||
|
||||
(update-group [group objects]
|
||||
(let [lookup (d/getf objects)
|
||||
children (->> group :shapes (map lookup))]
|
||||
children (get group :shapes)]
|
||||
(cond
|
||||
;; If the group is empty we don't make any changes. Will be removed by a later process
|
||||
(empty? children)
|
||||
group
|
||||
|
||||
(= :bool (:type group))
|
||||
(gsh/update-bool-selrect group children objects)
|
||||
(path/update-bool-shape group objects)
|
||||
|
||||
(:masked-group group)
|
||||
(set-mask-selrect group children)
|
||||
(->> (map lookup children)
|
||||
(set-mask-selrect group))
|
||||
|
||||
:else
|
||||
(gsh/update-group-selrect group children))))]
|
||||
(->> (map lookup children)
|
||||
(gsh/update-group-selrect group)))))]
|
||||
|
||||
(if page-id
|
||||
(d/update-in-when data [:pages-index page-id :objects] reg-objects)
|
||||
@@ -1066,6 +1074,13 @@
|
||||
(ctob/ensure-tokens-lib)
|
||||
(ctob/move-set-group from-path to-path before-path before-group))))
|
||||
|
||||
;; === Base font size
|
||||
|
||||
(defmethod process-change :set-base-font-size
|
||||
[data {:keys [base-font-size]}]
|
||||
(ctf/set-base-font-size data base-font-size))
|
||||
|
||||
|
||||
;; === Operations
|
||||
|
||||
(def ^:private decode-shape
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cfc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
@@ -19,6 +18,7 @@
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.component :as ctk]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.shape.layout :as ctl]
|
||||
[app.common.types.tokens-lib :as ctob]
|
||||
[app.common.uuid :as uuid]))
|
||||
@@ -85,8 +85,7 @@
|
||||
|
||||
(defn with-objects
|
||||
[changes objects]
|
||||
(let [fdata (binding [cfeat/*current* #{"components/v2"}]
|
||||
(ctf/make-file-data (uuid/next) uuid/zero))
|
||||
(let [fdata (ctf/make-file-data (uuid/next) uuid/zero)
|
||||
fdata (assoc-in fdata [:pages-index uuid/zero :objects] objects)]
|
||||
(vary-meta changes assoc
|
||||
::file-data fdata
|
||||
@@ -127,28 +126,41 @@
|
||||
; TODO: remove this when not needed
|
||||
(defn- assert-page-id!
|
||||
[changes]
|
||||
(dm/assert!
|
||||
"Give a page-id or call (with-page) before using this function"
|
||||
(contains? (meta changes) ::page-id)))
|
||||
(assert
|
||||
(contains? (meta changes) ::page-id)
|
||||
"Give a page-id or call (with-page) before using this function"))
|
||||
|
||||
(defn- assert-page!
|
||||
[changes]
|
||||
(assert
|
||||
(contains? (meta changes) ::page)
|
||||
"Give a page or call (with-page) before using this function"))
|
||||
|
||||
(defn- assert-container-id!
|
||||
[changes]
|
||||
(dm/assert!
|
||||
"Give a page-id or call (with-container) before using this function"
|
||||
(assert
|
||||
(or (contains? (meta changes) ::page-id)
|
||||
(contains? (meta changes) ::component-id))))
|
||||
(contains? (meta changes) ::component-id))
|
||||
"Give a page-id or call (with-container) before using this function"))
|
||||
|
||||
(defn- assert-objects!
|
||||
[changes]
|
||||
(dm/assert!
|
||||
"Call (with-objects) before using this function"
|
||||
(contains? (meta changes) ::file-data)))
|
||||
(assert
|
||||
(contains? (meta changes) ::file-data)
|
||||
"Call (with-objects) before using this function"))
|
||||
|
||||
(defn- assert-library!
|
||||
[changes]
|
||||
(dm/assert!
|
||||
"Call (with-library-data) before using this function"
|
||||
(contains? (meta changes) ::library-data)))
|
||||
(assert
|
||||
(contains? (meta changes) ::library-data)
|
||||
"Call (with-library-data) before using this function"))
|
||||
|
||||
(defn- assert-file-data!
|
||||
[changes]
|
||||
(assert
|
||||
(contains? (meta changes) ::file-data)
|
||||
"Call (with-file-data) before using this function"))
|
||||
|
||||
|
||||
(defn- lookup-objects
|
||||
[changes]
|
||||
@@ -157,9 +169,9 @@
|
||||
|
||||
(defn apply-changes-local
|
||||
[changes & {:keys [apply-to-library?]}]
|
||||
(dm/assert!
|
||||
"expected valid changes"
|
||||
(check-changes! changes))
|
||||
(assert
|
||||
(check-changes! changes)
|
||||
"expected valid changes")
|
||||
|
||||
(if-let [file-data (::file-data (meta changes))]
|
||||
(let [library-data (::library-data (meta changes))
|
||||
@@ -198,6 +210,7 @@
|
||||
|
||||
(defn mod-page
|
||||
([changes options]
|
||||
(assert-page! changes)
|
||||
(let [page (::page (meta changes))]
|
||||
(mod-page changes page options)))
|
||||
|
||||
@@ -228,6 +241,7 @@
|
||||
([changes type id namespace key value]
|
||||
(set-plugin-data changes type id nil namespace key value))
|
||||
([changes type id page-id namespace key value]
|
||||
(assert-file-data! changes)
|
||||
(let [data (::file-data (meta changes))
|
||||
old-val
|
||||
(case type
|
||||
@@ -294,6 +308,8 @@
|
||||
|
||||
(defn set-guide
|
||||
[changes id guide]
|
||||
(assert-page-id! changes)
|
||||
(assert-page! changes)
|
||||
(let [page-id (::page-id (meta changes))
|
||||
page (::page (meta changes))
|
||||
old-val (dm/get-in page [:guides id])]
|
||||
@@ -307,8 +323,11 @@
|
||||
:page-id page-id
|
||||
:id id
|
||||
:params old-val}))))
|
||||
|
||||
(defn set-flow
|
||||
[changes id flow]
|
||||
(assert-page-id! changes)
|
||||
(assert-page! changes)
|
||||
(let [page-id (::page-id (meta changes))
|
||||
page (::page (meta changes))
|
||||
old-val (dm/get-in page [:flows id])
|
||||
@@ -327,6 +346,8 @@
|
||||
|
||||
(defn set-comment-thread-position
|
||||
[changes {:keys [id frame-id position] :as thread}]
|
||||
(assert-page-id! changes)
|
||||
(assert-page! changes)
|
||||
(let [page-id (::page-id (meta changes))
|
||||
page (::page (meta changes))
|
||||
|
||||
@@ -348,6 +369,8 @@
|
||||
|
||||
(defn set-default-grid
|
||||
[changes type params]
|
||||
(assert-page-id! changes)
|
||||
(assert-page! changes)
|
||||
(let [page-id (::page-id (meta changes))
|
||||
page (::page (meta changes))
|
||||
old-val (dm/get-in page [:grids type])
|
||||
@@ -481,9 +504,12 @@
|
||||
(let [old-val (get old attr)
|
||||
new-val (get new attr)]
|
||||
(not= old-val new-val)))
|
||||
new-obj (if with-objects?
|
||||
(update-fn object objects)
|
||||
(update-fn object))]
|
||||
|
||||
new-obj
|
||||
(if with-objects?
|
||||
(update-fn object objects)
|
||||
(update-fn object))]
|
||||
|
||||
(when-not (= object new-obj)
|
||||
(let [attrs (or attrs (d/concat-set (keys object) (keys new-obj)))]
|
||||
(filter (partial changed? object new-obj) attrs)))))
|
||||
@@ -498,6 +524,7 @@
|
||||
:or {ignore-geometry? false ignore-touched false with-objects? false}}]
|
||||
(assert-container-id! changes)
|
||||
(assert-objects! changes)
|
||||
(assert-page-id! changes)
|
||||
(let [page-id (::page-id (meta changes))
|
||||
component-id (::component-id (meta changes))
|
||||
objects (lookup-objects changes)
|
||||
@@ -659,10 +686,14 @@
|
||||
(empty? children) ;; a parent with no children will be deleted,
|
||||
nil ;; so it does not need resize
|
||||
|
||||
(= (:type parent) :bool)
|
||||
(gsh/update-bool-selrect parent children objects)
|
||||
(cfh/bool-shape? parent)
|
||||
(path/update-bool-shape parent objects)
|
||||
|
||||
(= (:type parent) :group)
|
||||
(cfh/group-shape? parent)
|
||||
;; FIXME: this functions should be
|
||||
;; normalized in the same way as
|
||||
;; update-bool in order to make all
|
||||
;; this code consistent
|
||||
(if (:masked-group parent)
|
||||
(gsh/update-mask-selrect parent children)
|
||||
(gsh/update-group-selrect parent children)))]
|
||||
@@ -842,6 +873,7 @@
|
||||
|
||||
(defn set-tokens-lib
|
||||
[changes tokens-lib]
|
||||
(assert-library! changes)
|
||||
(let [library-data (::library-data (meta changes))
|
||||
prev-tokens-lib (get library-data :tokens-lib)]
|
||||
(-> changes
|
||||
@@ -1131,3 +1163,16 @@
|
||||
(defn get-page-id
|
||||
[changes]
|
||||
(::page-id (meta changes)))
|
||||
|
||||
(defn set-base-font-size
|
||||
[changes new-base-font-size]
|
||||
(assert-file-data! changes)
|
||||
(let [file-data (::file-data (meta changes))
|
||||
previous-font-size (ctf/get-base-font-size file-data)]
|
||||
(-> changes
|
||||
(update :redo-changes conj {:type :set-base-font-size
|
||||
:base-font-size new-base-font-size})
|
||||
|
||||
(update :undo-changes conj {:type :set-base-font-size
|
||||
:base-font-size previous-font-size})
|
||||
(apply-changes-local))))
|
||||
|
||||
@@ -626,6 +626,9 @@
|
||||
(map? (:fill-image form))
|
||||
(update-in [:fill-image :id] lookup-index)
|
||||
|
||||
(map? (:stroke-image form))
|
||||
(update-in [:stroke-image :id] lookup-index)
|
||||
|
||||
;; This covers old shapes and the new :fills.
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(update :fill-color-ref-file lookup-index)
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.geom.shapes.path :as gsp]
|
||||
[app.common.geom.shapes.text :as gsht]
|
||||
[app.common.logging :as l]
|
||||
[app.common.math :as mth]
|
||||
@@ -27,9 +26,12 @@
|
||||
[app.common.types.component :as ctk]
|
||||
[app.common.types.container :as ctn]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.path.segment :as path.segment]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.shape.interactions :as ctsi]
|
||||
[app.common.types.shape.shadow :as ctss]
|
||||
[app.common.types.text :as cttx]
|
||||
[app.common.uuid :as uuid]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]))
|
||||
@@ -57,18 +59,21 @@
|
||||
(map :name))
|
||||
|
||||
(defn migrate
|
||||
[{:keys [id] :as file}]
|
||||
[{:keys [id] :as file} libs]
|
||||
|
||||
(let [diff
|
||||
(set/difference available-migrations (:migrations file))
|
||||
|
||||
data (-> (:data file)
|
||||
(assoc :libs libs))
|
||||
|
||||
data
|
||||
(reduce migrate-data (:data file) diff)
|
||||
(reduce migrate-data data diff)
|
||||
|
||||
data
|
||||
(-> data
|
||||
(assoc :id id)
|
||||
(dissoc :version))]
|
||||
(dissoc :version :libs))]
|
||||
|
||||
(-> file
|
||||
(assoc :data data)
|
||||
@@ -87,7 +92,7 @@
|
||||
result))
|
||||
|
||||
(defn migrate-file
|
||||
[file]
|
||||
[file libs]
|
||||
(binding [cfeat/*new* (atom #{})]
|
||||
(let [version (or (:version file)
|
||||
(-> file :data :version))]
|
||||
@@ -98,13 +103,13 @@
|
||||
(if (nil? migrations)
|
||||
(generate-migrations-from-version version)
|
||||
migrations)))
|
||||
(update :features (fnil into #{}) (deref cfeat/*new*))
|
||||
;; NOTE: in some future we can consider to apply
|
||||
;; a migration to the whole database and remove
|
||||
;; this code from this function that executes on
|
||||
;; each file migration operation
|
||||
(update :features cfeat/migrate-legacy-features)
|
||||
(migrate)))))
|
||||
(migrate libs)
|
||||
(update :features (fnil into #{}) (deref cfeat/*new*))))))
|
||||
|
||||
(defn migrated?
|
||||
[file]
|
||||
@@ -129,8 +134,8 @@
|
||||
[data _]
|
||||
(letfn [(migrate-path [shape]
|
||||
(if-not (contains? shape :content)
|
||||
(let [content (gsp/segments->content (:segments shape) (:close? shape))
|
||||
selrect (gsh/content->selrect content)
|
||||
(let [content (path.segment/points->content (:segments shape) :close (:close? shape))
|
||||
selrect (path.segment/content->selrect content)
|
||||
points (grc/rect->points selrect)]
|
||||
(-> shape
|
||||
(dissoc :segments)
|
||||
@@ -201,7 +206,7 @@
|
||||
(if (= (:type shape) :path)
|
||||
(let [{:keys [width height]} (grc/points->rect (:points shape))]
|
||||
(if (or (mth/almost-zero? width) (mth/almost-zero? height))
|
||||
(let [selrect (gsh/content->selrect (:content shape))
|
||||
(let [selrect (path.segment/content->selrect (:content shape))
|
||||
points (grc/rect->points selrect)
|
||||
transform (gmt/matrix)
|
||||
transform-inv (gmt/matrix)]
|
||||
@@ -1281,8 +1286,8 @@
|
||||
(d/update-when container :objects update-vals update-object))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index update-vals update-container)
|
||||
(update :components update-vals update-container))))
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
|
||||
(defmethod migrate-data "0003-fix-root-shape"
|
||||
[data _]
|
||||
@@ -1306,6 +1311,102 @@
|
||||
(d/update-when :components d/update-vals update-container)
|
||||
(d/without-nils))))
|
||||
|
||||
(defmethod migrate-data "0003-convert-path-content"
|
||||
[data _]
|
||||
(some-> cfeat/*new* (swap! conj "fdata/path-data"))
|
||||
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/bool-shape? object)
|
||||
(cfh/path-shape? object))
|
||||
(update object :content path/content)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects update-vals update-object))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
|
||||
|
||||
(defmethod migrate-data "0004-add-partial-text-touched-flags"
|
||||
[data _]
|
||||
(letfn [(update-object [page object]
|
||||
(if (and (cfh/text-shape? object)
|
||||
(ctk/in-component-copy? object))
|
||||
(let [file {:id (:id data) :data data}
|
||||
libs (when (:libs data)
|
||||
(deref (:libs data)))
|
||||
ref-shape (ctf/find-ref-shape file page libs object
|
||||
{:include-deleted? true :with-context? true})
|
||||
partial-touched (when ref-shape
|
||||
(cttx/get-diff-type (:content object) (:content ref-shape)))]
|
||||
(if (seq partial-touched)
|
||||
(update object :touched (fn [touched]
|
||||
(reduce #(ctk/set-touched-group %1 %2)
|
||||
touched
|
||||
partial-touched)))
|
||||
object))
|
||||
object))
|
||||
|
||||
(update-page [page]
|
||||
(d/update-when page :objects d/update-vals (partial update-object page)))]
|
||||
|
||||
(update data :pages-index d/update-vals update-page)))
|
||||
|
||||
(defmethod migrate-data "0005-deprecate-image-type"
|
||||
[data _]
|
||||
(letfn [(update-object [object]
|
||||
(if (cfh/image-shape? object)
|
||||
(let [metadata (:metadata object)
|
||||
fills (into [{:fill-image (assoc metadata :keep-aspect-ratio false)
|
||||
:opacity 1}]
|
||||
(:fills object))]
|
||||
(-> object
|
||||
(assoc :fills fills)
|
||||
(dissoc :metadata)
|
||||
(assoc :type :rect)))
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects update-vals update-object))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
|
||||
(defmethod migrate-data "0006-fix-old-texts-fills"
|
||||
[data _]
|
||||
(letfn [(fix-fills [node]
|
||||
(let [fills (cond
|
||||
(or (some? (:fill-color node))
|
||||
(some? (:fill-opacity node))
|
||||
(some? (:fill-color-gradient node)))
|
||||
[(d/without-nils (select-keys node [:fill-color :fill-opacity :fill-color-gradient
|
||||
:fill-color-ref-id :fill-color-ref-file]))]
|
||||
|
||||
(nil? (:fills node))
|
||||
[{:fill-color "#000000" :fill-opacity 1}]
|
||||
|
||||
:else
|
||||
(:fills node))]
|
||||
(-> node
|
||||
(assoc :fills fills)
|
||||
(dissoc :fill-color :fill-opacity :fill-color-gradient
|
||||
:fill-color-ref-id :fill-color-ref-file))))
|
||||
|
||||
(update-object [object]
|
||||
(if (cfh/text-shape? object)
|
||||
(update object :content (partial txt/transform-nodes identity fix-fills))
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
|
||||
(def available-migrations
|
||||
(into (d/ordered-set)
|
||||
["legacy-2"
|
||||
@@ -1363,4 +1464,8 @@
|
||||
"0001-remove-tokens-from-groups"
|
||||
"0002-normalize-bool-content"
|
||||
"0002-clean-shape-interactions"
|
||||
"0003-fix-root-shape"]))
|
||||
"0003-fix-root-shape"
|
||||
"0003-convert-path-content"
|
||||
"0004-add-partial-text-touched-flags"
|
||||
"0005-deprecate-image-type"
|
||||
"0006-fix-old-texts-fills"]))
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.shapes-builder
|
||||
(ns app.common.files.shapes-builder
|
||||
"A SVG to Shapes builder."
|
||||
(:require
|
||||
[app.common.colors :as clr]
|
||||
@@ -21,7 +21,8 @@
|
||||
[app.common.math :as mth]
|
||||
[app.common.schema :as sm :refer [max-safe-int min-safe-int]]
|
||||
[app.common.svg :as csvg]
|
||||
[app.common.svg.path :as path]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.path.segment :as path.segm]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.uuid :as uuid]
|
||||
[cuerdas.core :as str]))
|
||||
@@ -218,11 +219,11 @@
|
||||
(defn create-path-shape [name frame-id svg-data {:keys [attrs] :as data}]
|
||||
(when (and (contains? attrs :d) (seq (:d attrs)))
|
||||
(let [transform (csvg/parse-transform (:transform attrs))
|
||||
content (cond-> (path/parse (:d attrs))
|
||||
content (cond-> (path/from-string (:d attrs))
|
||||
(some? transform)
|
||||
(gsh/transform-content transform))
|
||||
(path.segm/transform-content transform))
|
||||
|
||||
selrect (gsh/content->selrect content)
|
||||
selrect (path.segm/content->selrect content)
|
||||
points (grc/rect->points selrect)
|
||||
origin (gpt/negate (gpt/point svg-data))
|
||||
attrs (-> (dissoc attrs :d :transform)
|
||||
@@ -15,6 +15,8 @@
|
||||
[app.common.types.shape.layout :as ctl]
|
||||
[app.common.uuid :as uuid]))
|
||||
|
||||
;; FIXME: move to logic?
|
||||
|
||||
(defn prepare-add-shape
|
||||
[changes shape objects]
|
||||
(let [index (:index (meta shape))
|
||||
@@ -35,6 +37,7 @@
|
||||
(pcb/update-shapes [(:parent-id shape)] #(ctl/push-into-cell % [id] row column)))
|
||||
(cond-> (ctl/grid-layout? objects (:parent-id shape))
|
||||
(pcb/update-shapes [(:parent-id shape)] ctl/assign-cells {:with-objects? true})))]
|
||||
|
||||
[shape changes]))
|
||||
|
||||
(defn prepare-move-shapes-into-frame
|
||||
@@ -44,6 +47,7 @@
|
||||
to-move (->> shapes
|
||||
(map (d/getf objects))
|
||||
(not-empty))]
|
||||
|
||||
(if to-move
|
||||
(-> changes
|
||||
(cond-> (and remove-layout-data?
|
||||
|
||||
@@ -8,8 +8,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.types.component :as ctc]
|
||||
[app.common.types.components-list :as ctcl]
|
||||
[app.common.types.variant :as ctv]
|
||||
[cuerdas.core :as str]))
|
||||
[app.common.types.variant :as ctv]))
|
||||
|
||||
|
||||
(defn find-variant-components
|
||||
@@ -21,11 +20,6 @@
|
||||
(map #(ctcl/get-component data % true))
|
||||
reverse))
|
||||
|
||||
(defn- dashes-to-end
|
||||
[property-values]
|
||||
(let [dashes (if (some #(= % "--") property-values) ["--"] [])]
|
||||
(concat (remove #(= % "--") property-values) dashes)))
|
||||
|
||||
|
||||
(defn extract-properties-names
|
||||
[shape data]
|
||||
@@ -42,10 +36,7 @@
|
||||
(group-by :name)
|
||||
(map (fn [[k v]]
|
||||
{:name k
|
||||
:value (->> v
|
||||
(map #(if (str/empty? (:value %)) "--" (:value %)))
|
||||
distinct
|
||||
dashes-to-end)}))))
|
||||
:value (->> v (map :value) distinct)}))))
|
||||
|
||||
(defn get-variant-mains
|
||||
[component data]
|
||||
|
||||
@@ -116,6 +116,7 @@
|
||||
:terms-and-privacy-checkbox
|
||||
;; Only for developtment.
|
||||
:tiered-file-data-storage
|
||||
:token-units
|
||||
:transit-readable-response
|
||||
:user-feedback
|
||||
;; TODO: remove this flag.
|
||||
@@ -126,7 +127,8 @@
|
||||
:render-wasm-dpr
|
||||
:hide-release-modal
|
||||
:subscriptions
|
||||
:subscriptions-old})
|
||||
:subscriptions-old
|
||||
:frontend-binary-fills})
|
||||
|
||||
(def all-flags
|
||||
(set/union email login varia))
|
||||
|
||||
@@ -126,21 +126,20 @@
|
||||
o)))
|
||||
|
||||
(def schema:matrix
|
||||
{:type :map
|
||||
:pred valid-matrix?
|
||||
:type-properties
|
||||
{:title "matrix"
|
||||
:description "Matrix instance"
|
||||
:error/message "expected a valid matrix instance"
|
||||
:gen/gen (matrix-generator)
|
||||
:decode/json decode-matrix
|
||||
:decode/string decode-matrix
|
||||
:encode/json matrix->json
|
||||
:encode/string matrix->str
|
||||
::oapi/type "string"
|
||||
::oapi/format "matrix"}})
|
||||
|
||||
(sm/register! ::matrix schema:matrix)
|
||||
(sm/register!
|
||||
{:type ::matrix
|
||||
:pred valid-matrix?
|
||||
:type-properties
|
||||
{:title "matrix"
|
||||
:description "Matrix instance"
|
||||
:error/message "expected a valid matrix instance"
|
||||
:gen/gen (matrix-generator)
|
||||
:decode/json decode-matrix
|
||||
:decode/string decode-matrix
|
||||
:encode/json matrix->json
|
||||
:encode/string matrix->str
|
||||
::oapi/type "string"
|
||||
::oapi/format "matrix"}}))
|
||||
|
||||
;; FIXME: deprecated
|
||||
(s/def ::a ::us/safe-float)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.geom.point
|
||||
(:refer-clojure :exclude [divide min max abs])
|
||||
(:refer-clojure :exclude [divide min max abs zero?])
|
||||
(:require
|
||||
#?(:clj [app.common.fressian :as fres])
|
||||
#?(:cljs [cljs.core :as c]
|
||||
@@ -85,24 +85,22 @@
|
||||
(into {} p)
|
||||
p))
|
||||
|
||||
;; FIXME: make like matrix
|
||||
(def schema:point
|
||||
{:type ::point
|
||||
:pred valid-point?
|
||||
:type-properties
|
||||
{:title "point"
|
||||
:description "Point"
|
||||
:error/message "expected a valid point"
|
||||
:gen/gen (->> (sg/tuple (sg/small-int) (sg/small-int))
|
||||
(sg/fmap #(apply pos->Point %)))
|
||||
::oapi/type "string"
|
||||
::oapi/format "point"
|
||||
:decode/json decode-point
|
||||
:decode/string decode-point
|
||||
:encode/json point->json
|
||||
:encode/string point->str}})
|
||||
|
||||
(sm/register! schema:point)
|
||||
(sm/register!
|
||||
{:type ::point
|
||||
:pred valid-point?
|
||||
:type-properties
|
||||
{:title "point"
|
||||
:description "Point"
|
||||
:error/message "expected a valid point"
|
||||
:gen/gen (->> (sg/tuple (sg/small-int) (sg/small-int))
|
||||
(sg/fmap #(apply pos->Point %)))
|
||||
::oapi/type "string"
|
||||
::oapi/format "point"
|
||||
:decode/json decode-point
|
||||
:decode/string decode-point
|
||||
:encode/json point->json
|
||||
:encode/string point->str}}))
|
||||
|
||||
(defn point-like?
|
||||
[{:keys [x y] :as v}]
|
||||
@@ -470,6 +468,13 @@
|
||||
(and ^boolean (mth/almost-zero? (dm/get-prop p :x))
|
||||
^boolean (mth/almost-zero? (dm/get-prop p :y))))
|
||||
|
||||
(defn zero?
|
||||
[p]
|
||||
(let [x (dm/get-prop p :x)
|
||||
y (dm/get-prop p :y)]
|
||||
(and ^boolean (== 0 x)
|
||||
^boolean (== 0 y))))
|
||||
|
||||
(defn lerp
|
||||
"Calculates a linear interpolation between two points given a tvalue"
|
||||
[p1 p2 t]
|
||||
|
||||
@@ -10,13 +10,11 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.bool :as gsb]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.geom.shapes.constraints :as gct]
|
||||
[app.common.geom.shapes.corners :as gsc]
|
||||
[app.common.geom.shapes.fit-frame :as gsff]
|
||||
[app.common.geom.shapes.intersect :as gsi]
|
||||
[app.common.geom.shapes.path :as gsp]
|
||||
[app.common.geom.shapes.transforms :as gtr]
|
||||
[app.common.math :as mth]))
|
||||
|
||||
@@ -166,7 +164,6 @@
|
||||
(dm/export gtr/calculate-geometry)
|
||||
(dm/export gtr/update-group-selrect)
|
||||
(dm/export gtr/update-mask-selrect)
|
||||
(dm/export gtr/update-bool-selrect)
|
||||
(dm/export gtr/apply-transform)
|
||||
(dm/export gtr/transform-shape)
|
||||
(dm/export gtr/transform-selrect)
|
||||
@@ -180,12 +177,6 @@
|
||||
;; Constratins
|
||||
(dm/export gct/calc-child-modifiers)
|
||||
|
||||
;; PATHS
|
||||
;; FIXME: rename
|
||||
(dm/export gsp/content->selrect)
|
||||
(dm/export gsp/transform-content)
|
||||
(dm/export gsp/open-path?)
|
||||
|
||||
;; Intersection
|
||||
(dm/export gsi/overlaps?)
|
||||
(dm/export gsi/overlaps-path?)
|
||||
@@ -193,9 +184,6 @@
|
||||
(dm/export gsi/has-point-rect?)
|
||||
(dm/export gsi/rect-contains-shape?)
|
||||
|
||||
;; Bool
|
||||
(dm/export gsb/calc-bool-content)
|
||||
|
||||
;; Constraints
|
||||
(dm/export gct/default-constraints-h)
|
||||
(dm/export gct/default-constraints-v)
|
||||
@@ -206,6 +194,7 @@
|
||||
|
||||
;; Rect
|
||||
(dm/export grc/rect->points)
|
||||
(dm/export grc/center->rect)
|
||||
|
||||
;;
|
||||
(dm/export gsff/fit-frame-modifiers)
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.geom.shapes.bool
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.files.helpers :as cpf]
|
||||
[app.common.svg.path.bool :as pb]
|
||||
[app.common.svg.path.shapes-to-path :as stp]))
|
||||
|
||||
(defn calc-bool-content
|
||||
[shape objects]
|
||||
|
||||
(let [extract-content-xf
|
||||
(comp (map (d/getf objects))
|
||||
(filter (comp not :hidden))
|
||||
(remove cpf/svg-raw-shape?)
|
||||
(map #(stp/convert-to-path % objects))
|
||||
(map :content))
|
||||
|
||||
shapes-content
|
||||
(into [] extract-content-xf (:shapes shape))]
|
||||
(pb/content-bool (:bool-type shape) shapes-content)))
|
||||
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.path :as gsp]
|
||||
[app.common.math :as mth]))
|
||||
[app.common.math :as mth]
|
||||
[app.common.types.path :as path]))
|
||||
|
||||
(defn shape-stroke-margin
|
||||
[shape stroke-width]
|
||||
@@ -104,7 +104,7 @@
|
||||
(let [strokes (:strokes shape)
|
||||
|
||||
open-path? (and ^boolean (cfh/path-shape? shape)
|
||||
^boolean (gsp/open-path? shape))
|
||||
^boolean (path/shape-with-open-path? shape))
|
||||
|
||||
stroke-width
|
||||
(->> strokes
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.geom.shapes.path :as gpp]
|
||||
[app.common.geom.shapes.text :as gte]
|
||||
[app.common.math :as mth]))
|
||||
[app.common.math :as mth]
|
||||
[app.common.types.path.segment :as path.segm]))
|
||||
|
||||
(defn orientation
|
||||
"Given three ordered points gives the orientation
|
||||
@@ -186,7 +186,7 @@
|
||||
rect-lines (points->lines rect-points)
|
||||
path-lines (if simple?
|
||||
(points->lines (:points shape))
|
||||
(gpp/path->lines shape))
|
||||
(path.segm/path->lines shape))
|
||||
start-point (-> shape :content (first) :params (gpt/point))]
|
||||
|
||||
(or (intersects-lines? rect-lines path-lines)
|
||||
|
||||
@@ -12,11 +12,10 @@
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.bool :as gshb]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.geom.shapes.path :as gpa]
|
||||
[app.common.math :as mth]
|
||||
[app.common.types.modifiers :as ctm]))
|
||||
[app.common.types.modifiers :as ctm]
|
||||
[app.common.types.path :as path]))
|
||||
|
||||
#?(:clj (set! *warn-on-reflection* true))
|
||||
|
||||
@@ -77,7 +76,11 @@
|
||||
position-data)
|
||||
position-data))))
|
||||
|
||||
;; FIXME: revist usage of mutability
|
||||
;; FIXME: review performance of this; this function is executing too
|
||||
;; many times, including when the point vector is 0,0. This function
|
||||
;; can be implemented in function of transform which is already mor
|
||||
;; performant
|
||||
|
||||
(defn move
|
||||
"Move the shape relatively to its current
|
||||
position applying the provided delta."
|
||||
@@ -96,7 +99,7 @@
|
||||
(d/update-when :y d/safe+ dy)
|
||||
(d/update-when :position-data move-position-data mvec)
|
||||
(cond-> (or (= :bool type) (= :path type))
|
||||
(update :content gpa/move-content mvec)))))
|
||||
(update :content path/move-content mvec)))))
|
||||
|
||||
;; --- Absolute Movement
|
||||
|
||||
@@ -321,7 +324,7 @@
|
||||
(update shape :position-data transform-position-data transform-mtx)
|
||||
shape)
|
||||
shape (if (or (= type :path) (= type :bool))
|
||||
(update shape :content gpa/transform-content transform-mtx)
|
||||
(update shape :content path/transform-content transform-mtx)
|
||||
(assoc shape
|
||||
:x (dm/get-prop selrect :x)
|
||||
:y (dm/get-prop selrect :y)
|
||||
@@ -343,36 +346,45 @@
|
||||
|
||||
center (gco/points->center points)
|
||||
selrect (calculate-selrect points center)
|
||||
transform (calculate-transform points center selrect)
|
||||
inverse (when (some? transform) (gmt/inverse transform))]
|
||||
|
||||
(if-not (and (some? inverse) (some? transform))
|
||||
shape
|
||||
(let [type (dm/get-prop shape :type)
|
||||
rotation (mod (+ (d/nilv (:rotation shape) 0)
|
||||
(d/nilv (dm/get-in shape [:modifiers :rotation]) 0))
|
||||
360)
|
||||
[transform inverse]
|
||||
(let [transform (calculate-transform points center selrect)
|
||||
inverse (when (some? transform) (gmt/inverse transform))]
|
||||
(if (and (some? transform) (some? inverse))
|
||||
[transform inverse]
|
||||
[(:transform shape (gmt/matrix)) (:transform-inverse shape (gmt/matrix))]))
|
||||
|
||||
shape (if (or (= type :path) (= type :bool))
|
||||
(update shape :content gpa/transform-content transform-mtx)
|
||||
(assoc shape
|
||||
:x (dm/get-prop selrect :x)
|
||||
:y (dm/get-prop selrect :y)
|
||||
:width (dm/get-prop selrect :width)
|
||||
:height (dm/get-prop selrect :height)))]
|
||||
(-> shape
|
||||
(assoc :transform transform)
|
||||
(assoc :transform-inverse inverse)
|
||||
(assoc :selrect selrect)
|
||||
(assoc :points points)
|
||||
(assoc :rotation rotation))))))
|
||||
type (dm/get-prop shape :type)
|
||||
rotation (mod (+ (d/nilv (:rotation shape) 0)
|
||||
(d/nilv (dm/get-in shape [:modifiers :rotation]) 0))
|
||||
360)
|
||||
|
||||
shape (if (or (= type :path) (= type :bool))
|
||||
(update shape :content path/transform-content transform-mtx)
|
||||
(assoc shape
|
||||
:x (dm/get-prop selrect :x)
|
||||
:y (dm/get-prop selrect :y)
|
||||
:width (dm/get-prop selrect :width)
|
||||
:height (dm/get-prop selrect :height)))]
|
||||
(-> shape
|
||||
(assoc :transform transform)
|
||||
(assoc :transform-inverse inverse)
|
||||
(assoc :selrect selrect)
|
||||
(assoc :points points)
|
||||
(assoc :rotation rotation))))
|
||||
|
||||
(defn apply-transform
|
||||
"Given a new set of points transformed, set up the rectangle so it keeps
|
||||
its properties. We adjust de x,y,width,height and create a custom transform"
|
||||
[shape transform-mtx]
|
||||
(if ^boolean (gmt/move? transform-mtx)
|
||||
(cond
|
||||
(nil? transform-mtx)
|
||||
shape
|
||||
|
||||
^boolean (gmt/move? transform-mtx)
|
||||
(apply-transform-move shape transform-mtx)
|
||||
|
||||
:else
|
||||
(apply-transform-generic shape transform-mtx)))
|
||||
|
||||
(defn- update-group-viewbox
|
||||
@@ -444,25 +456,7 @@
|
||||
(assoc :flip-x (-> mask :flip-x))
|
||||
(assoc :flip-y (-> mask :flip-y)))))
|
||||
|
||||
(defn update-bool-selrect
|
||||
"Calculates the selrect+points for the boolean shape"
|
||||
[shape children objects]
|
||||
|
||||
(let [content
|
||||
(gshb/calc-bool-content shape objects)
|
||||
|
||||
shape
|
||||
(assoc shape :content content)
|
||||
|
||||
[points selrect]
|
||||
(gpa/content->points+selrect shape content)]
|
||||
|
||||
(if (and (some? selrect) (d/not-empty? points))
|
||||
(-> shape
|
||||
(assoc :selrect selrect)
|
||||
(assoc :points points))
|
||||
(update-group-selrect shape children))))
|
||||
|
||||
;; FIXME: revisit
|
||||
(defn update-shapes-geometry
|
||||
[objects ids]
|
||||
(->> ids
|
||||
@@ -476,7 +470,7 @@
|
||||
(update-mask-selrect shape children)
|
||||
|
||||
(cfh/bool-shape? shape)
|
||||
(update-bool-selrect shape children objects)
|
||||
(path/update-bool-shape shape objects)
|
||||
|
||||
(cfh/group-shape? shape)
|
||||
(update-group-selrect shape children)
|
||||
|
||||
@@ -25,9 +25,11 @@
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.pages-list :as ctpl]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.shape-tree :as ctst]
|
||||
[app.common.types.shape.interactions :as ctsi]
|
||||
[app.common.types.shape.layout :as ctl]
|
||||
[app.common.types.text :as cttx]
|
||||
[app.common.types.token :as cto]
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.types.variant :as ctv]
|
||||
@@ -43,6 +45,12 @@
|
||||
(def log-shape-ids #{})
|
||||
(def log-container-ids #{})
|
||||
|
||||
(def updatable-attrs (->> (seq (keys ctk/sync-attrs))
|
||||
;; We don't update the flex-child attrs
|
||||
(remove ctk/swap-keep-attrs)
|
||||
;; We don't do automatic update of the `layout-grid-cells` property.
|
||||
(remove #(= :layout-grid-cells %))))
|
||||
|
||||
(defn enabled-shape?
|
||||
[id container]
|
||||
(or (empty? log-shape-ids)
|
||||
@@ -431,6 +439,8 @@
|
||||
(not inside-component?)
|
||||
(assoc :component-root true))
|
||||
|
||||
restoring-into-parent (get objects (:parent-id first-shape))
|
||||
|
||||
changes (-> changes
|
||||
(pcb/with-page page)
|
||||
(pcb/with-objects (:objects page))
|
||||
@@ -441,12 +451,15 @@
|
||||
changes
|
||||
(rest moved-shapes))
|
||||
changes (cond-> changes
|
||||
;; Remove variant info when restoring into a parent that is not a variant-container
|
||||
(and is-variant? parent (not (ctk/is-variant-container? parent)))
|
||||
;; Transform variant info into name when restoring into a parent that is not a variant-container,
|
||||
;; or when restoring into a variant-container that doesn't exists anymore
|
||||
(and is-variant?
|
||||
(or (and parent (not (ctk/is-variant-container? parent)))
|
||||
(nil? restoring-into-parent)))
|
||||
(clvp/generate-make-shapes-no-variant [first-shape])
|
||||
;; Add variant info and rename when restoring into a variant-container
|
||||
(ctk/is-variant-container? parent)
|
||||
(clvp/generate-make-shapes-variant [first-shape] parent))]
|
||||
(ctk/is-variant-container? restoring-into-parent)
|
||||
(clvp/generate-make-shapes-variant [first-shape] restoring-into-parent))]
|
||||
{:changes (pcb/restore-component changes component-id (:id page) minusdelta)
|
||||
:shape (first moved-shapes)})))
|
||||
|
||||
@@ -1608,6 +1621,124 @@
|
||||
:val dest-tokens
|
||||
:ignore-touched true}]}))))))
|
||||
|
||||
(defn- generate-update-tokens
|
||||
[changes container dest-shape origin-shape touched omit-touched?]
|
||||
(let [attrs (->> (seq (keys ctk/sync-attrs))
|
||||
;; We don't update the flex-child attrs
|
||||
(remove #(= :layout-grid-cells %)))
|
||||
|
||||
applied-tokens (reduce (fn [applied-tokens attr]
|
||||
(let [attr-group (get ctk/sync-attrs attr)
|
||||
token-attrs (cto/shape-attr->token-attrs attr)]
|
||||
(if (not (and (touched attr-group)
|
||||
omit-touched?))
|
||||
(into applied-tokens token-attrs)
|
||||
applied-tokens)))
|
||||
#{}
|
||||
attrs)]
|
||||
(cond-> changes
|
||||
(seq applied-tokens)
|
||||
(update-tokens container dest-shape origin-shape applied-tokens))))
|
||||
|
||||
(defn- add-update-attr-changes
|
||||
[changes dest-shape container roperations uoperations]
|
||||
(let [all-parents (cfh/get-parent-ids (:objects container)
|
||||
(:id dest-shape))]
|
||||
(-> changes
|
||||
(update :redo-changes conj (make-change
|
||||
container
|
||||
{:type :mod-obj
|
||||
:id (:id dest-shape)
|
||||
:operations roperations}))
|
||||
(update :redo-changes conj (make-change
|
||||
container
|
||||
{:type :reg-objects
|
||||
:shapes all-parents}))
|
||||
(update :undo-changes conj (make-change
|
||||
container
|
||||
{:type :mod-obj
|
||||
:id (:id dest-shape)
|
||||
:operations (vec uoperations)}))
|
||||
(update :undo-changes concat [(make-change
|
||||
container
|
||||
{:type :reg-objects
|
||||
:shapes all-parents})]))))
|
||||
|
||||
|
||||
(defn- add-update-attr-operations
|
||||
[attr dest-shape origin-shape roperations uoperations touched is-text-partial-change?]
|
||||
(let [orig-value (get origin-shape attr)
|
||||
dest-value (get dest-shape attr)
|
||||
;; position-data is a special case because can be affected by :geometry-group and :content-group
|
||||
;; so, if the position-data changes but the geometry is touched we need to reset the position-data
|
||||
;; so it's calculated again
|
||||
reset-pos-data?
|
||||
(and (cfh/text-shape? origin-shape)
|
||||
(= attr :position-data)
|
||||
(not= orig-value dest-value)
|
||||
(touched :geometry-group))
|
||||
|
||||
;; We want to split the changes on the text itself and on its properties
|
||||
text-value
|
||||
(when is-text-partial-change?
|
||||
(cond
|
||||
(touched :text-content-structure-same-attrs)
|
||||
;; Keep the dest structure and texts, update its attrs to make them like the origin
|
||||
(cttx/copy-attrs-keys dest-value (cttx/get-first-paragraph-text-attrs orig-value))
|
||||
|
||||
(touched :text-content-text)
|
||||
;; Keep the texts touched in dest: copy the texts from dest over the attrs of origin
|
||||
(cttx/copy-text-keys dest-value orig-value)
|
||||
|
||||
(touched :text-content-attribute)
|
||||
;; Keep the attrs touched in dest: copy the texts from origin over the attrs of dest
|
||||
(cttx/copy-text-keys orig-value dest-value)))
|
||||
|
||||
val (cond
|
||||
;; If position data changes and the geometry group is touched
|
||||
;; we need to put to nil so we can regenerate it
|
||||
reset-pos-data? nil
|
||||
is-text-partial-change? text-value
|
||||
:else orig-value)
|
||||
|
||||
roperation {:type :set
|
||||
:attr attr
|
||||
:val val
|
||||
:ignore-touched true}
|
||||
uoperation {:type :set
|
||||
:attr attr
|
||||
:val (get dest-shape attr)
|
||||
:ignore-touched true}]
|
||||
[(conj roperations roperation)
|
||||
(conj uoperations uoperation)]))
|
||||
|
||||
(defn- is-text-partial-change?
|
||||
"Check if the attr update is a text partial change"
|
||||
[origin-shape dest-shape attr touched]
|
||||
(let [partial-text-keys [:text-content-attribute :text-content-text]
|
||||
active-keys (filter touched partial-text-keys)
|
||||
orig-content (get origin-shape attr)
|
||||
orig-attrs (cttx/get-first-paragraph-text-attrs orig-content)
|
||||
|
||||
equal-orig-attrs? (cttx/equal-attrs? orig-content orig-attrs)]
|
||||
(and
|
||||
(or
|
||||
;; One and only one of the keys is pressent
|
||||
(= 1 (count active-keys))
|
||||
(and
|
||||
(not (touched :text-content-attribute))
|
||||
(touched :text-content-structure-same-attrs)))
|
||||
|
||||
(or
|
||||
;; Both has the same structure
|
||||
(cttx/equal-structure? (:content origin-shape) (:content dest-shape))
|
||||
|
||||
;; The origin and destiny have different structures, but each have the same attrs
|
||||
;; for all the items on its content tree
|
||||
(and
|
||||
equal-orig-attrs?
|
||||
(touched :text-content-structure-same-attrs))))))
|
||||
|
||||
(defn- update-attrs
|
||||
"The main function that implements the attribute sync algorithm. Copy
|
||||
attributes that have changed in the origin shape to the dest shape.
|
||||
@@ -1638,97 +1769,86 @@
|
||||
origin-shape (reposition-shape origin-shape origin-root dest-root)
|
||||
touched (get dest-shape :touched #{})]
|
||||
|
||||
(loop [attrs (->> (seq (keys ctk/sync-attrs))
|
||||
;; We don't update the flex-child attrs
|
||||
(remove ctk/swap-keep-attrs)
|
||||
;; We don't do automatic update of the `layout-grid-cells` property.
|
||||
(remove #(= :layout-grid-cells %)))
|
||||
applied-tokens #{}
|
||||
(loop [attrs updatable-attrs
|
||||
roperations []
|
||||
uoperations '()]
|
||||
|
||||
(let [attr (first attrs)]
|
||||
(if (nil? attr)
|
||||
(if (and (empty? roperations) (empty? applied-tokens))
|
||||
changes
|
||||
(let [all-parents (cfh/get-parent-ids (:objects container)
|
||||
(:id dest-shape))
|
||||
(cond-> changes
|
||||
(seq roperations)
|
||||
(add-update-attr-changes dest-shape container roperations uoperations)
|
||||
:always
|
||||
(generate-update-tokens container dest-shape origin-shape touched omit-touched?))
|
||||
|
||||
;; Sync tokens of attributes ignored above.
|
||||
;; FIXME: this probably may be merged with the other calculation
|
||||
;; of applied tokens, below, and to the calculation only once
|
||||
;; for all sync-attrs.
|
||||
applied-tokens (reduce (fn [applied-tokens attr]
|
||||
(let [attr-group (get ctk/sync-attrs attr)
|
||||
token-attrs (cto/shape-attr->token-attrs attr)]
|
||||
(if (not (and (touched attr-group)
|
||||
omit-touched?))
|
||||
(into applied-tokens token-attrs)
|
||||
applied-tokens)))
|
||||
applied-tokens
|
||||
ctk/swap-keep-attrs)]
|
||||
(cond-> changes
|
||||
(seq roperations)
|
||||
(-> (update :redo-changes conj (make-change
|
||||
container
|
||||
{:type :mod-obj
|
||||
:id (:id dest-shape)
|
||||
:operations roperations}))
|
||||
(update :redo-changes conj (make-change
|
||||
container
|
||||
{:type :reg-objects
|
||||
:shapes all-parents}))
|
||||
(update :undo-changes conj (make-change
|
||||
container
|
||||
{:type :mod-obj
|
||||
:id (:id dest-shape)
|
||||
:operations (vec uoperations)}))
|
||||
(update :undo-changes concat [(make-change
|
||||
container
|
||||
{:type :reg-objects
|
||||
:shapes all-parents})]))
|
||||
(seq applied-tokens)
|
||||
(update-tokens container dest-shape origin-shape applied-tokens))))
|
||||
(let [attr-group (get ctk/sync-attrs attr)
|
||||
;; On texts, when we want to omit the touched attrs, both text (the actual letters)
|
||||
;; and attrs (bold, font, etc) are in the same attr :content.
|
||||
;; If only one of them is touched, we want to adress this case and
|
||||
;; only update the untouched one
|
||||
text-partial-change? (when (and
|
||||
omit-touched?
|
||||
(= :text (:type origin-shape))
|
||||
(= :content attr)
|
||||
(touched attr-group))
|
||||
(is-text-partial-change? origin-shape dest-shape attr touched))
|
||||
|
||||
(let [;; position-data is a special case because can be affected by :geometry-group and :content-group
|
||||
;; so, if the position-data changes but the geometry is touched we need to reset the position-data
|
||||
;; so it's calculated again
|
||||
reset-pos-data?
|
||||
(and (cfh/text-shape? origin-shape)
|
||||
(= attr :position-data)
|
||||
(not= (get origin-shape attr) (get dest-shape attr))
|
||||
(touched :geometry-group))
|
||||
skip-operations? (or (= (get origin-shape attr) (get dest-shape attr))
|
||||
(and (touched attr-group)
|
||||
omit-touched?
|
||||
;; When it is a text-partial-change, we should generate operations
|
||||
;; even when omit-touched? is true, but updating only the text or
|
||||
;; the attributes, omiting the other part
|
||||
(not text-partial-change?)))
|
||||
|
||||
roperation {:type :set
|
||||
:attr attr
|
||||
:val (cond
|
||||
;; If position data changes and the geometry group is touched
|
||||
;; we need to put to nil so we can regenerate it
|
||||
reset-pos-data? nil
|
||||
:else (get origin-shape attr))
|
||||
:ignore-touched true}
|
||||
uoperation {:type :set
|
||||
:attr attr
|
||||
:val (get dest-shape attr)
|
||||
:ignore-touched true}
|
||||
[roperations' uoperations']
|
||||
(if skip-operations?
|
||||
[roperations uoperations]
|
||||
(add-update-attr-operations attr dest-shape origin-shape roperations uoperations touched text-partial-change?))]
|
||||
(recur (next attrs)
|
||||
roperations'
|
||||
uoperations')))))))
|
||||
|
||||
attr-group (get ctk/sync-attrs attr)
|
||||
(defn update-attrs-on-switch
|
||||
"Copy attributes that have changed in the origin shape to the dest shape. Used on variants switch"
|
||||
[changes dest-shape origin-shape dest-root origin-root origin-ref-shape container]
|
||||
(let [;; We need to sync only the position relative to the origin of the component.
|
||||
;; (see update-attrs for a full explanation)
|
||||
origin-shape (reposition-shape origin-shape origin-root dest-root)
|
||||
touched (get dest-shape :touched #{})
|
||||
touched-origin (get origin-shape :touched #{})]
|
||||
|
||||
token-attrs (cto/shape-attr->token-attrs attr)
|
||||
applied-tokens' (cond-> applied-tokens
|
||||
(not (and (touched attr-group)
|
||||
omit-touched?))
|
||||
(into token-attrs))]
|
||||
(if (or (= (get origin-shape attr) (get dest-shape attr))
|
||||
(and (touched attr-group) omit-touched?))
|
||||
(recur (next attrs)
|
||||
applied-tokens'
|
||||
roperations
|
||||
uoperations)
|
||||
(recur (next attrs)
|
||||
applied-tokens'
|
||||
(conj roperations roperation)
|
||||
(conj uoperations uoperation)))))))))
|
||||
(loop [attrs updatable-attrs
|
||||
roperations [{:type :set-touched :touched (:touched origin-shape)}]
|
||||
uoperations (list {:type :set-touched :touched (:touched dest-shape)})]
|
||||
(if-let [attr (first attrs)]
|
||||
(let [attr-group (get ctk/sync-attrs attr)
|
||||
[roperations' uoperations']
|
||||
(if (or
|
||||
;; If the attribute is not valid for the destiny, don't copy it
|
||||
(not (cts/is-allowed-attr? attr (:type dest-shape)))
|
||||
;; If the values are already equal, don't copy it
|
||||
(= (get origin-shape attr) (get dest-shape attr))
|
||||
;; If the referenced shape on the original component doesn't have the same value, don't copy it
|
||||
;; Exceptions: :points :selrect and :content can be different
|
||||
(and
|
||||
(not (contains? #{:points :selrect :content} attr))
|
||||
(not= (get origin-ref-shape attr) (get dest-shape attr)))
|
||||
;; The :content attr cant't be copied to elements of different type
|
||||
(and (= attr :content) (not= (:type origin-shape) (:type dest-shape)))
|
||||
;; If the attr is not touched in the origin shape, don't copy it
|
||||
(not (touched-origin attr-group)))
|
||||
[roperations uoperations]
|
||||
(add-update-attr-operations attr dest-shape origin-shape roperations uoperations touched false))]
|
||||
(recur (next attrs)
|
||||
roperations'
|
||||
uoperations'))
|
||||
(cond-> changes
|
||||
(> (count roperations) 1)
|
||||
(add-update-attr-changes dest-shape container roperations uoperations)
|
||||
|
||||
:always
|
||||
(generate-update-tokens container dest-shape origin-shape touched false))))))
|
||||
|
||||
(defn- propagate-attrs
|
||||
"Helper that puts the origin attributes (attrs) into dest but only if
|
||||
@@ -2003,7 +2123,8 @@
|
||||
(pcb/with-objects objects)
|
||||
(pcb/resize-parents new-objects-ids)
|
||||
;; Fix the order of the children inside the parent
|
||||
(pcb/reorder-children parent-id (get-in objects [parent-id :shapes])))]
|
||||
(cond-> (ctl/any-layout? objects parent-id)
|
||||
(pcb/reorder-children parent-id (get-in objects [parent-id :shapes]))))]
|
||||
(assoc changes :file-id library-id)))
|
||||
|
||||
(defn generate-detach-component
|
||||
@@ -2138,7 +2259,9 @@
|
||||
:starting-frame frame-id}]
|
||||
|
||||
(vswap! unames conj name)
|
||||
(pcb/set-flow changes flow-id new-flow)))
|
||||
(-> changes
|
||||
(pcb/with-page page)
|
||||
(pcb/set-flow flow-id new-flow))))
|
||||
|
||||
changes
|
||||
(->> shapes
|
||||
|
||||
@@ -151,7 +151,9 @@
|
||||
changes
|
||||
(reduce (fn [changes {:keys [id] :as flow}]
|
||||
(if (contains? ids-to-delete (:starting-frame flow))
|
||||
(pcb/set-flow changes id nil)
|
||||
(-> changes
|
||||
(pcb/with-page page)
|
||||
(pcb/set-flow id nil))
|
||||
changes))
|
||||
changes
|
||||
(:flows page))
|
||||
@@ -213,7 +215,9 @@
|
||||
(map :id))
|
||||
|
||||
changes (reduce (fn [changes guide-id]
|
||||
(pcb/set-flow changes guide-id nil))
|
||||
(-> changes
|
||||
(pcb/with-page page)
|
||||
(pcb/set-flow guide-id nil)))
|
||||
changes
|
||||
guides-to-delete)
|
||||
|
||||
|
||||
@@ -60,6 +60,17 @@
|
||||
(pcb/update-shapes [main-id] #(assoc % :variant-name name)))))
|
||||
|
||||
|
||||
(defn generate-set-variant-error
|
||||
[changes component-id value]
|
||||
(let [data (pcb/get-library-data changes)
|
||||
component (ctcl/get-component data component-id true)
|
||||
main-id (:main-instance-id component)]
|
||||
(-> changes
|
||||
(pcb/update-shapes [main-id] (if (str/blank? value)
|
||||
#(dissoc % :variant-error)
|
||||
#(assoc % :variant-error value))))))
|
||||
|
||||
|
||||
(defn generate-add-new-property
|
||||
[changes variant-id & {:keys [fill-values? property-name]}]
|
||||
(let [data (pcb/get-library-data changes)
|
||||
@@ -112,9 +123,10 @@
|
||||
(reduce generate-make-shape-no-variant changes shapes))
|
||||
|
||||
|
||||
(defn- generate-new-properties-from-variant
|
||||
(defn- create-new-properties-from-variant
|
||||
[shape min-props data container-name base-properties]
|
||||
(let [component (ctcl/get-component data (:component-id shape) true)
|
||||
|
||||
add-name? (not= (:name component) container-name)
|
||||
props (ctv/merge-properties base-properties
|
||||
(:variant-properties component))
|
||||
@@ -127,7 +139,7 @@
|
||||
(ctv/add-new-prop props (:name component))
|
||||
props)))
|
||||
|
||||
(defn- generate-new-properties-from-non-variant
|
||||
(defn- create-new-properties-from-non-variant
|
||||
[shape min-props container-name base-properties]
|
||||
(let [;; Remove container name from shape name if present
|
||||
shape-name (ctv/remove-prefix (:name shape) container-name)]
|
||||
@@ -155,14 +167,14 @@
|
||||
[cpath cname] (cfh/parse-path-name (:name variant-container))
|
||||
container-name (:name variant-container)
|
||||
|
||||
generate-new-properties
|
||||
create-new-properties
|
||||
(fn [shape min-props]
|
||||
(if (ctk/is-variant? shape)
|
||||
(generate-new-properties-from-variant shape min-props data container-name base-props)
|
||||
(generate-new-properties-from-non-variant shape min-props container-name base-props)))
|
||||
(create-new-properties-from-variant shape min-props data container-name base-props)
|
||||
(create-new-properties-from-non-variant shape min-props container-name base-props)))
|
||||
|
||||
total-props (reduce (fn [m shape]
|
||||
(max m (count (generate-new-properties shape num-base-props))))
|
||||
(max m (count (create-new-properties shape num-base-props))))
|
||||
0
|
||||
shapes)
|
||||
|
||||
@@ -180,19 +192,21 @@
|
||||
:name (:name variant-container)))]
|
||||
(reduce
|
||||
(fn [changes shape]
|
||||
(if (or (zero? num-base-props)
|
||||
(= variant-id (:variant-id shape)))
|
||||
changes ;; do nothing more if we aren't changing the parent or there are no base props
|
||||
(let [props (generate-new-properties shape total-props)
|
||||
variant-name (ctv/properties-to-name props)]
|
||||
(-> (pcb/update-component changes
|
||||
(:component-id shape)
|
||||
#(assoc % :variant-id variant-id
|
||||
:variant-properties props
|
||||
:name cname
|
||||
:path cpath)
|
||||
{:apply-changes-local-library? true})
|
||||
(pcb/update-shapes [(:id shape)]
|
||||
#(assoc % :variant-name variant-name))))))
|
||||
(let [component (ctcl/get-component data (:component-id shape) true)]
|
||||
(if (or (zero? num-base-props) ;; do nothing if there are no base props
|
||||
(and (= variant-id (:variant-id shape)) ;; or we are only moving the shape inside its parent (it is
|
||||
(not (:deleted component)))) ;; the same parent and the component isn't deleted)
|
||||
changes
|
||||
(let [props (create-new-properties shape total-props)
|
||||
variant-name (ctv/properties-to-name props)]
|
||||
(-> (pcb/update-component changes
|
||||
(:component-id shape)
|
||||
#(assoc % :variant-id variant-id
|
||||
:variant-properties props
|
||||
:name cname
|
||||
:path cpath)
|
||||
{:apply-changes-local-library? true})
|
||||
(pcb/update-shapes [(:id shape)]
|
||||
#(assoc % :variant-name variant-name)))))))
|
||||
changes
|
||||
shapes)))
|
||||
shapes)))
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
(ns app.common.logic.variants
|
||||
(:require
|
||||
[app.common.files.changes-builder :as pcb]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.variant :as cfv]
|
||||
[app.common.logic.libraries :as cll]
|
||||
[app.common.logic.variant-properties :as clvp]
|
||||
[app.common.types.container :as ctn]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.variant :as ctv]))
|
||||
|
||||
|
||||
(defn generate-add-new-variant
|
||||
[changes shape variant-id new-component-id new-shape-id prop-num]
|
||||
(let [data (pcb/get-library-data changes)
|
||||
@@ -28,3 +30,62 @@
|
||||
(-> changes
|
||||
(clvp/generate-update-property-value new-component-id prop-num value)
|
||||
(pcb/change-parent (:parent-id shape) [new-shape] 0))))
|
||||
|
||||
(defn- generate-path
|
||||
[path objects base-id shape]
|
||||
(let [get-type #(case %
|
||||
:frame :container
|
||||
:group :container
|
||||
:rect :shape
|
||||
:circle :shape
|
||||
:bool :shape
|
||||
:path :shape
|
||||
%)]
|
||||
(if (= base-id (:id shape))
|
||||
path
|
||||
(generate-path (str path " " (:name shape) (get-type (:type shape))) objects base-id (get objects (:parent-id shape))))))
|
||||
|
||||
(defn- add-unique-path
|
||||
"Adds a new property :shape-path to the shape, with the path of the shape.
|
||||
Suffixes like -1, -2, etc. are added to ensure uniqueness."
|
||||
[shapes objects base-id]
|
||||
(letfn [(unique-path [shape counts]
|
||||
(let [path (generate-path "" objects base-id shape)
|
||||
num (get counts path 1)]
|
||||
[(str path "-" num) (update counts path (fnil inc 1))]))]
|
||||
(first
|
||||
(reduce
|
||||
(fn [[result counts] shape]
|
||||
(let [[shape-path counts'] (unique-path shape counts)]
|
||||
[(conj result (assoc shape :shape-path shape-path)) counts']))
|
||||
[[] {}]
|
||||
shapes))))
|
||||
|
||||
|
||||
(defn generate-keep-touched
|
||||
[changes new-shape original-shape original-shapes page libraries]
|
||||
(let [objects (pcb/get-objects changes)
|
||||
orig-objects (into {} (map (juxt :id identity) original-shapes))
|
||||
orig-shapes-w-path (add-unique-path
|
||||
(reverse original-shapes)
|
||||
orig-objects
|
||||
(:id original-shape))
|
||||
new-shapes-w-path (add-unique-path
|
||||
(reverse (cfh/get-children-with-self objects (:id new-shape)))
|
||||
objects
|
||||
(:id new-shape))
|
||||
new-shapes-map (into {} (map (juxt :shape-path identity) new-shapes-w-path))
|
||||
orig-touched (filter (comp seq :touched) orig-shapes-w-path)
|
||||
|
||||
container (ctn/make-container page :page)]
|
||||
(reduce
|
||||
(fn [changes touched-shape]
|
||||
(let [related-shape (get new-shapes-map (:shape-path touched-shape))
|
||||
orig-ref-shape (ctf/find-ref-shape nil container libraries touched-shape)]
|
||||
(if related-shape
|
||||
(cll/update-attrs-on-switch
|
||||
changes related-shape touched-shape new-shape original-shape orig-ref-shape container)
|
||||
changes)))
|
||||
changes
|
||||
orig-touched)))
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.media
|
||||
"Media assets helpers (images, fonts, etc)"
|
||||
(:require
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; We have added ".ttf" as string to solve a problem with chrome input selector
|
||||
@@ -48,38 +48,28 @@
|
||||
(defn mtype->extension [mtype]
|
||||
;; https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types
|
||||
(case mtype
|
||||
"image/apng" ".apng"
|
||||
"image/avif" ".avif"
|
||||
"image/gif" ".gif"
|
||||
"image/jpeg" ".jpg"
|
||||
"image/png" ".png"
|
||||
"image/svg+xml" ".svg"
|
||||
"image/webp" ".webp"
|
||||
"application/zip" ".zip"
|
||||
"application/penpot" ".penpot"
|
||||
"application/pdf" ".pdf"
|
||||
"text/plain" ".txt"
|
||||
"image/apng" ".apng"
|
||||
"image/avif" ".avif"
|
||||
"image/gif" ".gif"
|
||||
"image/jpeg" ".jpg"
|
||||
"image/png" ".png"
|
||||
"image/svg+xml" ".svg"
|
||||
"image/webp" ".webp"
|
||||
"application/zip" ".zip"
|
||||
"application/penpot" ".penpot"
|
||||
"application/pdf" ".pdf"
|
||||
"text/plain" ".txt"
|
||||
"font/woff" ".woff"
|
||||
"font/woff2" ".woff2"
|
||||
"font/ttf" ".ttf"
|
||||
"font/otf" ".otf"
|
||||
"application/octet-stream" ".bin"
|
||||
nil))
|
||||
|
||||
(s/def ::id uuid?)
|
||||
(s/def ::name string?)
|
||||
(s/def ::width number?)
|
||||
(s/def ::height number?)
|
||||
(s/def ::created-at inst?)
|
||||
(s/def ::modified-at inst?)
|
||||
(s/def ::mtype string?)
|
||||
(s/def ::uri string?)
|
||||
|
||||
(s/def ::media-object
|
||||
(s/keys :req-un [::id
|
||||
::name
|
||||
::width
|
||||
::height
|
||||
::mtype
|
||||
::created-at
|
||||
::modified-at
|
||||
::uri]))
|
||||
|
||||
(defn strip-image-extension
|
||||
[filename]
|
||||
(let [image-extensions-re #"(\.png)|(\.jpg)|(\.jpeg)|(\.webp)|(\.gif)|(\.svg)$"]
|
||||
(str/replace filename image-extensions-re "")))
|
||||
|
||||
(defn parse-font-weight
|
||||
[variant]
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
#?(:cljs (:require-macros [app.common.schema :refer [ignoring]]))
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.math :as mth]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.schema.openapi :as-alias oapi]
|
||||
@@ -27,10 +28,6 @@
|
||||
[malli.transform :as mt]
|
||||
[malli.util :as mu]))
|
||||
|
||||
(defprotocol ILazySchema
|
||||
(-validate [_ o])
|
||||
(-explain [_ o]))
|
||||
|
||||
(def default-options
|
||||
{:registry sr/default-registry})
|
||||
|
||||
@@ -50,10 +47,6 @@
|
||||
[s]
|
||||
(m/type-properties s))
|
||||
|
||||
(defn- lazy-schema?
|
||||
[s]
|
||||
(satisfies? ILazySchema s))
|
||||
|
||||
(defn schema
|
||||
[s]
|
||||
(if (schema? s)
|
||||
@@ -110,12 +103,16 @@
|
||||
(malli.error/error-value exp {:malli.error/mask-valid-values '...}))
|
||||
|
||||
(defn optional-keys
|
||||
[schema]
|
||||
(mu/optional-keys schema default-options))
|
||||
([schema]
|
||||
(mu/optional-keys schema nil default-options))
|
||||
([schema keys]
|
||||
(mu/optional-keys schema keys default-options)))
|
||||
|
||||
(defn required-keys
|
||||
[schema]
|
||||
(mu/required-keys schema default-options))
|
||||
([schema]
|
||||
(mu/required-keys schema nil default-options))
|
||||
([schema keys]
|
||||
(mu/required-keys schema keys default-options)))
|
||||
|
||||
(defn transformer
|
||||
[& transformers]
|
||||
@@ -227,6 +224,11 @@
|
||||
(let [vfn (delay (decoder (if (delay? s) (deref s) s) transformer))]
|
||||
(fn [v] (@vfn v))))
|
||||
|
||||
(defn decode-fn
|
||||
[s transformer]
|
||||
(let [vfn (delay (decoder (if (delay? s) (deref s) s) transformer))]
|
||||
(fn [v] (@vfn v))))
|
||||
|
||||
(defn humanize-explain
|
||||
"Returns a string representation of the explain data structure"
|
||||
[{:keys [errors value]} & {:keys [length level]}]
|
||||
@@ -272,38 +274,36 @@
|
||||
([s] (lookup sr/default-registry s))
|
||||
([registry s] (schema (mr/schema registry s))))
|
||||
|
||||
(defn- fast-check
|
||||
"A fast path for checking process, assumes the ILazySchema protocol
|
||||
implemented on the provided `s` schema. Sould not be used directly."
|
||||
[s type code hint value]
|
||||
(when-not ^boolean (-validate s value)
|
||||
(let [explain (-explain s value)]
|
||||
(throw (ex-info hint {:type type
|
||||
:code code
|
||||
:hint hint
|
||||
::explain explain}))))
|
||||
value)
|
||||
|
||||
(declare ^:private lazy-schema)
|
||||
|
||||
(defn check-fn
|
||||
"Create a predefined check function"
|
||||
[s & {:keys [hint type code]}]
|
||||
(let [schema (if (lazy-schema? s) s (lazy-schema s))
|
||||
hint (or ^boolean hint "check error")
|
||||
type (or ^boolean type :assertion)
|
||||
code (or ^boolean code :data-validation)]
|
||||
(partial fast-check schema type code hint)))
|
||||
(let [s (schema s)
|
||||
validator* (delay (m/validator s))
|
||||
explainer* (delay (m/explainer s))
|
||||
hint (or ^boolean hint "check error")
|
||||
type (or ^boolean type :assertion)
|
||||
code (or ^boolean code :data-validation)]
|
||||
|
||||
(fn [value]
|
||||
(let [validate-fn @validator*]
|
||||
(when-not ^boolean (validate-fn value)
|
||||
(let [explain-fn @explainer*
|
||||
explain (explain-fn value)]
|
||||
(throw (ex-info hint {:type type
|
||||
:code code
|
||||
:hint hint
|
||||
::explain explain}))))
|
||||
value))))
|
||||
|
||||
(defn check
|
||||
"A helper intended to be used on assertions for validate/check the
|
||||
schema over provided data. Raises an assertion exception."
|
||||
[s value & {:keys [hint type code]}]
|
||||
(let [s (if (lazy-schema? s) s (lazy-schema s))
|
||||
hint (or ^boolean hint "check error")
|
||||
type (or ^boolean type :assertion)
|
||||
code (or ^boolean code :data-validation)]
|
||||
(fast-check s type code hint value)))
|
||||
schema over provided data. Raises an assertion exception.
|
||||
|
||||
Use only on non-performance sensitive code, because it creates the
|
||||
check-fn instance all the time it is invoked."
|
||||
[s value & {:as opts}]
|
||||
(let [check-fn (check-fn s opts)]
|
||||
(check-fn value)))
|
||||
|
||||
(defn type-schema
|
||||
[& {:as params}]
|
||||
@@ -343,73 +343,8 @@
|
||||
(throw (ex-info "Invalid Arguments" {}))))
|
||||
|
||||
([type params]
|
||||
(let [s (if (map? params)
|
||||
(cond
|
||||
(= :set (:type params))
|
||||
(m/-collection-schema params)
|
||||
|
||||
(= :vector (:type params))
|
||||
(m/-collection-schema params)
|
||||
|
||||
:else
|
||||
(m/-simple-schema params))
|
||||
params)]
|
||||
|
||||
(swap! sr/registry assoc type s)
|
||||
nil)))
|
||||
|
||||
(defn- lazy-schema
|
||||
"Create ans instance of ILazySchema"
|
||||
[s]
|
||||
(let [schema (schema s)
|
||||
validator (delay (m/validator schema))
|
||||
explainer (delay (m/explainer schema))]
|
||||
|
||||
(reify
|
||||
m/AST
|
||||
(-to-ast [_ options] (m/-to-ast schema options))
|
||||
|
||||
m/EntrySchema
|
||||
(-entries [_] (m/-entries schema))
|
||||
(-entry-parser [_] (m/-entry-parser schema))
|
||||
|
||||
m/Cached
|
||||
(-cache [_] (m/-cache schema))
|
||||
|
||||
m/LensSchema
|
||||
(-keep [_] (m/-keep schema))
|
||||
(-get [_ key default] (m/-get schema key default))
|
||||
(-set [_ key value] (m/-set schema key value))
|
||||
|
||||
m/Schema
|
||||
(-validator [_]
|
||||
(m/-validator schema))
|
||||
(-explainer [_ path]
|
||||
(m/-explainer schema path))
|
||||
(-parser [_]
|
||||
(m/-parser schema))
|
||||
(-unparser [_]
|
||||
(m/-unparser schema))
|
||||
(-transformer [_ transformer method options]
|
||||
(m/-transformer schema transformer method options))
|
||||
(-walk [_ walker path options]
|
||||
(m/-walk schema walker path options))
|
||||
(-properties [_]
|
||||
(m/-properties schema))
|
||||
(-options [_]
|
||||
(m/-options schema))
|
||||
(-children [_]
|
||||
(m/-children schema))
|
||||
(-parent [_]
|
||||
(m/-parent schema))
|
||||
(-form [_]
|
||||
(m/-form schema))
|
||||
|
||||
ILazySchema
|
||||
(-validate [_ o]
|
||||
(@validator o))
|
||||
(-explain [_ o]
|
||||
(@explainer o)))))
|
||||
(swap! sr/registry assoc type params)
|
||||
params))
|
||||
|
||||
;; --- BUILTIN SCHEMAS
|
||||
|
||||
@@ -835,7 +770,8 @@
|
||||
|
||||
gen (sg/one-of
|
||||
(sg/small-int :max max :min min)
|
||||
(sg/small-double :max max :min min))]
|
||||
(->> (sg/small-double :max max :min min)
|
||||
(sg/fmap #(mth/precision % 2))))]
|
||||
|
||||
{:pred pred
|
||||
:type-properties
|
||||
@@ -910,6 +846,22 @@
|
||||
::oapi/type "string"
|
||||
::oapi/format "iso"}})
|
||||
|
||||
(register!
|
||||
{:type ::timestamp
|
||||
:pred inst?
|
||||
:type-properties
|
||||
{:title "inst"
|
||||
:description "Satisfies Inst protocol"
|
||||
:error/message "should be an instant"
|
||||
:gen/gen (->> (sg/small-int)
|
||||
(sg/fmap (fn [v] (tm/parse-instant v))))
|
||||
:decode/string tm/parse-instant
|
||||
:encode/string inst-ms
|
||||
:decode/json tm/parse-instant
|
||||
:encode/json inst-ms
|
||||
::oapi/type "string"
|
||||
::oapi/format "number"}})
|
||||
|
||||
(register!
|
||||
{:type ::fn
|
||||
:pred fn?})
|
||||
|
||||
@@ -56,13 +56,8 @@
|
||||
(str "(pass=TRUE, tests=" (:num-tests params) ", seed=" (:seed params) ", elapsed=" time "ms)"))))
|
||||
|
||||
(defmethod ct/report #?(:clj ::thrunk :cljs [:cljs.test/default ::thrunk])
|
||||
[{:keys [::params] :as m}]
|
||||
(let [smallest (-> params :shrunk :smallest vec)]
|
||||
(println)
|
||||
(println "Condition failed with the following params:")
|
||||
(println "Seed:" (:seed params))
|
||||
(println)
|
||||
(pp/pprint smallest)))
|
||||
[_]
|
||||
nil)
|
||||
|
||||
(defmethod ct/report #?(:clj ::trial :cljs [:cljs.test/default ::trial])
|
||||
[_]
|
||||
@@ -76,9 +71,12 @@
|
||||
(let [tvar (get-testing-var)
|
||||
tsym (get-testing-sym tvar)
|
||||
res (:result params)]
|
||||
(println)
|
||||
|
||||
(println "---------------------------------------------------------")
|
||||
(println "Generative test:" (str "'" tsym "'")
|
||||
(str "(pass=FALSE, tests=" (:num-tests params) ", seed=" (:seed params) ")"))
|
||||
(pp/pprint (:fail params))
|
||||
(println "---------------------------------------------------------")
|
||||
|
||||
(when (ex/exception? res)
|
||||
#?(:clj (ex/print-throwable res)
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
|
||||
(ns app.common.svg
|
||||
(:require
|
||||
#?(:clj [clojure.xml :as xml]
|
||||
:cljs [tubax.core :as tubax])
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
@@ -15,15 +13,7 @@
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.math :as mth]
|
||||
[app.common.uuid :as uuid]
|
||||
[cuerdas.core :as str])
|
||||
#?(:clj
|
||||
(:import
|
||||
clojure.lang.XMLHandler
|
||||
java.io.InputStream
|
||||
javax.xml.XMLConstants
|
||||
javax.xml.parsers.SAXParserFactory
|
||||
org.apache.commons.io.IOUtils)))
|
||||
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; Regex for XML ids per Spec
|
||||
;; https://www.w3.org/TR/2008/REC-xml-20081126/#sec-common-syn
|
||||
@@ -1030,24 +1020,3 @@
|
||||
:height (d/parse-integer (:height attrs) 0)})))]
|
||||
(reduce-nodes redfn [] svg-data)))
|
||||
|
||||
#?(:clj
|
||||
(defn- secure-parser-factory
|
||||
[^InputStream input ^XMLHandler handler]
|
||||
(.. (doto (SAXParserFactory/newInstance)
|
||||
(.setFeature XMLConstants/FEATURE_SECURE_PROCESSING true)
|
||||
(.setFeature "http://apache.org/xml/features/disallow-doctype-decl" true))
|
||||
(newSAXParser)
|
||||
(parse input handler))))
|
||||
|
||||
(defn strip-doctype
|
||||
[data]
|
||||
(cond-> data
|
||||
(str/includes? data "<!DOCTYPE")
|
||||
(str/replace #"<\!DOCTYPE[^>]*>" "")))
|
||||
|
||||
(defn parse
|
||||
[text]
|
||||
#?(:cljs (tubax/xml->clj text)
|
||||
:clj (let [text (strip-doctype text)]
|
||||
(dm/with-open [istream (IOUtils/toInputStream text "UTF-8")]
|
||||
(xml/parse istream secure-parser-factory)))))
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.path.command
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.point :as gpt]))
|
||||
|
||||
(defn command->point
|
||||
([prev-pos {:keys [relative params] :as command}]
|
||||
(let [{:keys [x y] :or {x (:x prev-pos) y (:y prev-pos)}} params]
|
||||
(if relative
|
||||
(-> prev-pos (update :x + x) (update :y + y))
|
||||
(command->point command))))
|
||||
|
||||
([command]
|
||||
(when command
|
||||
(let [{:keys [x y]} (:params command)]
|
||||
(gpt/point x y)))))
|
||||
|
||||
|
||||
(defn make-move-to [to]
|
||||
{:command :move-to
|
||||
:relative false
|
||||
:params {:x (:x to)
|
||||
:y (:y to)}})
|
||||
|
||||
(defn make-line-to [to]
|
||||
{:command :line-to
|
||||
:relative false
|
||||
:params {:x (:x to)
|
||||
:y (:y to)}})
|
||||
|
||||
(defn make-curve-params
|
||||
([point]
|
||||
(make-curve-params point point point))
|
||||
|
||||
([point handler] (make-curve-params point handler point))
|
||||
|
||||
([point h1 h2]
|
||||
{:x (:x point)
|
||||
:y (:y point)
|
||||
:c1x (:x h1)
|
||||
:c1y (:y h1)
|
||||
:c2x (:x h2)
|
||||
:c2y (:y h2)}))
|
||||
|
||||
(defn update-curve-to
|
||||
[command h1 h2]
|
||||
(let [params {:x (-> command :params :x)
|
||||
:y (-> command :params :y)
|
||||
:c1x (:x h1)
|
||||
:c1y (:y h1)
|
||||
:c2x (:x h2)
|
||||
:c2y (:y h2)}]
|
||||
(-> command
|
||||
(assoc :command :curve-to)
|
||||
(assoc :params params))))
|
||||
|
||||
(defn make-curve-to
|
||||
[to h1 h2]
|
||||
{:command :curve-to
|
||||
:relative false
|
||||
:params (make-curve-params to h1 h2)})
|
||||
|
||||
(defn update-handler
|
||||
[command prefix point]
|
||||
(let [[cox coy] (if (= prefix :c1) [:c1x :c1y] [:c2x :c2y])]
|
||||
(-> command
|
||||
(assoc-in [:params cox] (:x point))
|
||||
(assoc-in [:params coy] (:y point)))))
|
||||
|
||||
(defn apply-content-modifiers
|
||||
"Apply to content a map with point translations"
|
||||
[content modifiers]
|
||||
(letfn [(apply-to-index [content [index params]]
|
||||
(if (contains? content index)
|
||||
(cond-> content
|
||||
(and
|
||||
(or (:c1x params) (:c1y params) (:c2x params) (:c2y params))
|
||||
(= :line-to (get-in content [index :command])))
|
||||
|
||||
(-> (assoc-in [index :command] :curve-to)
|
||||
(assoc-in [index :params]
|
||||
(make-curve-params
|
||||
(get-in content [index :params])
|
||||
(get-in content [(dec index) :params]))))
|
||||
|
||||
(:x params) (update-in [index :params :x] + (:x params))
|
||||
(:y params) (update-in [index :params :y] + (:y params))
|
||||
|
||||
(:c1x params) (update-in [index :params :c1x] + (:c1x params))
|
||||
(:c1y params) (update-in [index :params :c1y] + (:c1y params))
|
||||
|
||||
(:c2x params) (update-in [index :params :c2x] + (:c2x params))
|
||||
(:c2y params) (update-in [index :params :c2y] + (:c2y params)))
|
||||
content))]
|
||||
(let [content (if (vector? content) content (into [] content))]
|
||||
(reduce apply-to-index content modifiers))))
|
||||
|
||||
(defn get-handler [{:keys [params] :as command} prefix]
|
||||
(let [cx (d/prefix-keyword prefix :x)
|
||||
cy (d/prefix-keyword prefix :y)]
|
||||
(when (and command
|
||||
(contains? params cx)
|
||||
(contains? params cy))
|
||||
(gpt/point (get params cx)
|
||||
(get params cy)))))
|
||||
|
||||
(defn content->handlers
|
||||
"Retrieve a map where for every point will retrieve a list of
|
||||
the handlers that are associated with that point.
|
||||
point -> [[index, prefix]]"
|
||||
[content]
|
||||
(->> (d/with-prev content)
|
||||
(d/enumerate)
|
||||
(mapcat (fn [[index [cur-cmd pre-cmd]]]
|
||||
(if (and pre-cmd (= :curve-to (:command cur-cmd)))
|
||||
(let [cur-pos (command->point cur-cmd)
|
||||
pre-pos (command->point pre-cmd)]
|
||||
(-> [[pre-pos [index :c1]]
|
||||
[cur-pos [index :c2]]]))
|
||||
[])))
|
||||
|
||||
(group-by first)
|
||||
(d/mapm #(mapv second %2))))
|
||||
|
||||
(defn point-indices
|
||||
[content point]
|
||||
(->> (d/enumerate content)
|
||||
(filter (fn [[_ cmd]] (= point (command->point cmd))))
|
||||
(mapv (fn [[index _]] index))))
|
||||
|
||||
(defn handler-indices
|
||||
"Return an index where the key is the positions and the values the handlers"
|
||||
[content point]
|
||||
(->> (d/with-prev content)
|
||||
(d/enumerate)
|
||||
(mapcat (fn [[index [cur-cmd pre-cmd]]]
|
||||
(if (and (some? pre-cmd) (= :curve-to (:command cur-cmd)))
|
||||
(let [cur-pos (command->point cur-cmd)
|
||||
pre-pos (command->point pre-cmd)]
|
||||
(cond-> []
|
||||
(= pre-pos point) (conj [index :c1])
|
||||
(= cur-pos point) (conj [index :c2])))
|
||||
[])))))
|
||||
|
||||
(defn opposite-index
|
||||
"Calculates the opposite index given a prefix and an index"
|
||||
[content index prefix]
|
||||
|
||||
(let [point (if (= prefix :c2)
|
||||
(command->point (nth content index))
|
||||
(command->point (nth content (dec index))))
|
||||
|
||||
point->handlers (content->handlers content)
|
||||
|
||||
handlers (->> point
|
||||
(point->handlers)
|
||||
(filter (fn [[ci cp]] (and (not= index ci) (not= prefix cp)))))]
|
||||
|
||||
(cond
|
||||
(= (count handlers) 1)
|
||||
(->> handlers first)
|
||||
|
||||
(and (= :c1 prefix) (= (count content) index))
|
||||
[(dec index) :c2]
|
||||
|
||||
:else nil)))
|
||||
|
||||
|
||||
(defn get-commands
|
||||
"Returns the commands involving a point with its indices"
|
||||
[content point]
|
||||
(->> (d/enumerate content)
|
||||
(filterv (fn [[_ cmd]] (= (command->point cmd) point)))))
|
||||
|
||||
|
||||
(defn prefix->coords [prefix]
|
||||
(case prefix
|
||||
:c1 [:c1x :c1y]
|
||||
:c2 [:c2x :c2y]
|
||||
nil))
|
||||
|
||||
(defn handler->point [content index prefix]
|
||||
(when (and (some? index)
|
||||
(some? prefix)
|
||||
(contains? content index))
|
||||
(let [[cx cy] (prefix->coords prefix)]
|
||||
(if (= :curve-to (get-in content [index :command]))
|
||||
(gpt/point (get-in content [index :params cx])
|
||||
(get-in content [index :params cy]))
|
||||
|
||||
(gpt/point (get-in content [index :params :x])
|
||||
(get-in content [index :params :y]))))))
|
||||
|
||||
(defn handler->node [content index prefix]
|
||||
(if (= prefix :c1)
|
||||
(command->point (get content (dec index)))
|
||||
(command->point (get content index))))
|
||||
|
||||
@@ -1,324 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.path.legacy-parser1
|
||||
"The first SVG Path parser implementation.
|
||||
|
||||
Written in a mix of CLJS and JS code and used in production until
|
||||
1.19, used mainly for tests."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.shapes.path :as upg]
|
||||
[app.common.svg :as csvg]
|
||||
[app.common.svg.path.arc-to-bezier :as a2b]
|
||||
[app.common.svg.path.command :as upc]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(def commands-regex #"(?i)[mzlhvcsqta][^mzlhvcsqta]*")
|
||||
|
||||
;; Matches numbers for path values allows values like... -.01, 10, +12.22
|
||||
;; 0 and 1 are special because can refer to flags
|
||||
(def num-regex #"[+-]?(\d+(\.\d+)?|\.\d+)(e[+-]?\d+)?")
|
||||
|
||||
(def flag-regex #"[01]")
|
||||
|
||||
(defn extract-params [cmd-str extract-commands]
|
||||
(loop [result []
|
||||
extract-idx 0
|
||||
current {}
|
||||
remain (-> cmd-str (subs 1) (str/trim))]
|
||||
|
||||
(let [[param type] (nth extract-commands extract-idx)
|
||||
regex (case type
|
||||
:flag flag-regex
|
||||
#_:number num-regex)
|
||||
match (re-find regex remain)]
|
||||
|
||||
(if match
|
||||
(let [value (-> match first csvg/fix-dot-number d/read-string)
|
||||
remain (str/replace-first remain regex "")
|
||||
current (assoc current param value)
|
||||
extract-idx (inc extract-idx)
|
||||
[result current extract-idx]
|
||||
(if (>= extract-idx (count extract-commands))
|
||||
[(conj result current) {} 0]
|
||||
[result current extract-idx])]
|
||||
(recur result
|
||||
extract-idx
|
||||
current
|
||||
remain))
|
||||
(cond-> result
|
||||
(seq current) (conj current))))))
|
||||
|
||||
;; Path specification
|
||||
;; https://www.w3.org/TR/SVG11/paths.html
|
||||
(defmulti parse-command (comp str/upper first))
|
||||
|
||||
(defmethod parse-command "M" [cmd]
|
||||
(let [relative (str/starts-with? cmd "m")
|
||||
param-list (extract-params cmd [[:x :number]
|
||||
[:y :number]])]
|
||||
|
||||
(into [{:command :move-to
|
||||
:relative relative
|
||||
:params (first param-list)}]
|
||||
|
||||
(for [params (rest param-list)]
|
||||
{:command :line-to
|
||||
:relative relative
|
||||
:params params}))))
|
||||
|
||||
(defmethod parse-command "Z" [_]
|
||||
[{:command :close-path}])
|
||||
|
||||
(defmethod parse-command "L" [cmd]
|
||||
(let [relative (str/starts-with? cmd "l")
|
||||
param-list (extract-params cmd [[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :line-to
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "H" [cmd]
|
||||
(let [relative (str/starts-with? cmd "h")
|
||||
param-list (extract-params cmd [[:value :number]])]
|
||||
(for [params param-list]
|
||||
{:command :line-to-horizontal
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "V" [cmd]
|
||||
(let [relative (str/starts-with? cmd "v")
|
||||
param-list (extract-params cmd [[:value :number]])]
|
||||
(for [params param-list]
|
||||
{:command :line-to-vertical
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "C" [cmd]
|
||||
(let [relative (str/starts-with? cmd "c")
|
||||
param-list (extract-params cmd [[:c1x :number]
|
||||
[:c1y :number]
|
||||
[:c2x :number]
|
||||
[:c2y :number]
|
||||
[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :curve-to
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "S" [cmd]
|
||||
(let [relative (str/starts-with? cmd "s")
|
||||
param-list (extract-params cmd [[:cx :number]
|
||||
[:cy :number]
|
||||
[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :smooth-curve-to
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "Q" [cmd]
|
||||
(let [relative (str/starts-with? cmd "q")
|
||||
param-list (extract-params cmd [[:cx :number]
|
||||
[:cy :number]
|
||||
[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :quadratic-bezier-curve-to
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "T" [cmd]
|
||||
(let [relative (str/starts-with? cmd "t")
|
||||
param-list (extract-params cmd [[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :smooth-quadratic-bezier-curve-to
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defmethod parse-command "A" [cmd]
|
||||
(let [relative (str/starts-with? cmd "a")
|
||||
param-list (extract-params cmd [[:rx :number]
|
||||
[:ry :number]
|
||||
[:x-axis-rotation :number]
|
||||
[:large-arc-flag :flag]
|
||||
[:sweep-flag :flag]
|
||||
[:x :number]
|
||||
[:y :number]])]
|
||||
(for [params param-list]
|
||||
{:command :elliptical-arc
|
||||
:relative relative
|
||||
:params params})))
|
||||
|
||||
(defn smooth->curve
|
||||
[{:keys [params]} pos handler]
|
||||
(let [{c1x :x c1y :y} (upg/calculate-opposite-handler pos handler)]
|
||||
{:c1x c1x
|
||||
:c1y c1y
|
||||
:c2x (:cx params)
|
||||
:c2y (:cy params)}))
|
||||
|
||||
(defn quadratic->curve
|
||||
[sp ep cp]
|
||||
(let [cp1 (-> (gpt/to-vec sp cp)
|
||||
(gpt/scale (/ 2 3))
|
||||
(gpt/add sp))
|
||||
|
||||
cp2 (-> (gpt/to-vec ep cp)
|
||||
(gpt/scale (/ 2 3))
|
||||
(gpt/add ep))]
|
||||
|
||||
{:c1x (:x cp1)
|
||||
:c1y (:y cp1)
|
||||
:c2x (:x cp2)
|
||||
:c2y (:y cp2)}))
|
||||
|
||||
(defn arc->beziers*
|
||||
[from-x from-y x y large-arc-flag sweep-flag rx ry x-axis-rotation]
|
||||
(a2b/calculateBeziers from-x from-y x y large-arc-flag sweep-flag rx ry x-axis-rotation))
|
||||
|
||||
(defn arc->beziers [from-p command]
|
||||
(let [to-command
|
||||
(fn [[_ _ c1x c1y c2x c2y x y]]
|
||||
{:command :curve-to
|
||||
:relative (:relative command)
|
||||
:params {:c1x c1x :c1y c1y
|
||||
:c2x c2x :c2y c2y
|
||||
:x x :y y}})
|
||||
|
||||
{from-x :x from-y :y} from-p
|
||||
{:keys [rx ry x-axis-rotation large-arc-flag sweep-flag x y]} (:params command)
|
||||
result (arc->beziers* from-x from-y x y large-arc-flag sweep-flag rx ry x-axis-rotation)]
|
||||
(mapv to-command result)))
|
||||
|
||||
(defn simplify-commands
|
||||
"Removes some commands and convert relative to absolute coordinates"
|
||||
[commands]
|
||||
(let [simplify-command
|
||||
;; prev-pos : previous position for the current path. Necessary for relative commands
|
||||
;; prev-start : previous move-to necessary for Z commands
|
||||
;; prev-cc : previous command control point for cubic beziers
|
||||
;; prev-qc : previous command control point for quadratic curves
|
||||
(fn [[result prev-pos prev-start prev-cc prev-qc] [command _prev]]
|
||||
(let [command (assoc command :prev-pos prev-pos)
|
||||
|
||||
command
|
||||
(cond-> command
|
||||
(:relative command)
|
||||
(-> (assoc :relative false)
|
||||
(d/update-in-when [:params :c1x] + (:x prev-pos))
|
||||
(d/update-in-when [:params :c1y] + (:y prev-pos))
|
||||
|
||||
(d/update-in-when [:params :c2x] + (:x prev-pos))
|
||||
(d/update-in-when [:params :c2y] + (:y prev-pos))
|
||||
|
||||
(d/update-in-when [:params :cx] + (:x prev-pos))
|
||||
(d/update-in-when [:params :cy] + (:y prev-pos))
|
||||
|
||||
(d/update-in-when [:params :x] + (:x prev-pos))
|
||||
(d/update-in-when [:params :y] + (:y prev-pos))
|
||||
|
||||
(cond->
|
||||
(= :line-to-horizontal (:command command))
|
||||
(d/update-in-when [:params :value] + (:x prev-pos))
|
||||
|
||||
(= :line-to-vertical (:command command))
|
||||
(d/update-in-when [:params :value] + (:y prev-pos)))))
|
||||
|
||||
params (:params command)
|
||||
orig-command command
|
||||
|
||||
command
|
||||
(cond-> command
|
||||
(= :line-to-horizontal (:command command))
|
||||
(-> (assoc :command :line-to)
|
||||
(update :params dissoc :value)
|
||||
(assoc-in [:params :x] (:value params))
|
||||
(assoc-in [:params :y] (:y prev-pos)))
|
||||
|
||||
(= :line-to-vertical (:command command))
|
||||
(-> (assoc :command :line-to)
|
||||
(update :params dissoc :value)
|
||||
(assoc-in [:params :y] (:value params))
|
||||
(assoc-in [:params :x] (:x prev-pos)))
|
||||
|
||||
(= :smooth-curve-to (:command command))
|
||||
(-> (assoc :command :curve-to)
|
||||
(update :params dissoc :cx :cy)
|
||||
(update :params merge (smooth->curve command prev-pos prev-cc)))
|
||||
|
||||
(= :quadratic-bezier-curve-to (:command command))
|
||||
(-> (assoc :command :curve-to)
|
||||
(update :params dissoc :cx :cy)
|
||||
(update :params merge (quadratic->curve prev-pos (gpt/point params) (gpt/point (:cx params) (:cy params)))))
|
||||
|
||||
(= :smooth-quadratic-bezier-curve-to (:command command))
|
||||
(-> (assoc :command :curve-to)
|
||||
(update :params merge (quadratic->curve prev-pos (gpt/point params) (upg/calculate-opposite-handler prev-pos prev-qc)))))
|
||||
|
||||
result (if (= :elliptical-arc (:command command))
|
||||
(into result (arc->beziers prev-pos command))
|
||||
(conj result command))
|
||||
|
||||
next-cc (case (:command orig-command)
|
||||
:smooth-curve-to
|
||||
(gpt/point (get-in orig-command [:params :cx]) (get-in orig-command [:params :cy]))
|
||||
|
||||
:curve-to
|
||||
(gpt/point (get-in orig-command [:params :c2x]) (get-in orig-command [:params :c2y]))
|
||||
|
||||
(:line-to-horizontal :line-to-vertical)
|
||||
(gpt/point (get-in command [:params :x]) (get-in command [:params :y]))
|
||||
|
||||
(gpt/point (get-in orig-command [:params :x]) (get-in orig-command [:params :y])))
|
||||
|
||||
next-qc (case (:command orig-command)
|
||||
:quadratic-bezier-curve-to
|
||||
(gpt/point (get-in orig-command [:params :cx]) (get-in orig-command [:params :cy]))
|
||||
|
||||
:smooth-quadratic-bezier-curve-to
|
||||
(upg/calculate-opposite-handler prev-pos prev-qc)
|
||||
|
||||
(gpt/point (get-in orig-command [:params :x]) (get-in orig-command [:params :y])))
|
||||
|
||||
next-pos (if (= :close-path (:command command))
|
||||
prev-start
|
||||
(upc/command->point prev-pos command))
|
||||
|
||||
next-start (if (= :move-to (:command command)) next-pos prev-start)]
|
||||
|
||||
[result next-pos next-start next-cc next-qc]))
|
||||
|
||||
start (first commands)
|
||||
start (cond-> start
|
||||
(:relative start)
|
||||
(assoc :relative false))
|
||||
|
||||
start-pos (gpt/point (:params start))]
|
||||
|
||||
(->> (map vector (rest commands) commands)
|
||||
(reduce simplify-command [[start] start-pos start-pos start-pos start-pos])
|
||||
(first))))
|
||||
|
||||
(defn parse [path-str]
|
||||
(if (empty? path-str)
|
||||
path-str
|
||||
(let [clean-path-str
|
||||
(-> path-str
|
||||
(str/trim)
|
||||
;; Change "commas" for spaces
|
||||
(str/replace #"," " ")
|
||||
;; Remove all consecutive spaces
|
||||
(str/replace #"\s+" " "))
|
||||
commands (re-seq commands-regex clean-path-str)]
|
||||
(-> (mapcat parse-command commands)
|
||||
(simplify-commands)))))
|
||||
|
||||
@@ -12,15 +12,23 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.shapes.path :as upg]
|
||||
[app.common.math :as mth]
|
||||
[app.common.svg :as csvg]
|
||||
[app.common.svg.path.command :as upc]
|
||||
[app.common.types.path.helpers :as path.helpers]
|
||||
[app.common.types.path.segment :as path.segment]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(def commands-regex #"(?i)[mzlhvcsqta][^mzlhvcsqta]*")
|
||||
(def regex #"[+-]?(\d+(\.\d+)?|\.\d+)(e[+-]?\d+)?")
|
||||
|
||||
(defn- get-point
|
||||
"Get a point for a segment"
|
||||
[prev-pos {:keys [relative params] :as segment}]
|
||||
(let [{:keys [x y] :or {x (:x prev-pos) y (:y prev-pos)}} params]
|
||||
(if relative
|
||||
(-> prev-pos (update :x + x) (update :y + y))
|
||||
(path.helpers/segment->point segment))))
|
||||
|
||||
(defn extract-params
|
||||
[data pattern]
|
||||
(loop [result []
|
||||
@@ -185,7 +193,7 @@
|
||||
|
||||
(defn smooth->curve
|
||||
[{:keys [params]} pos handler]
|
||||
(let [{c1x :x c1y :y} (upg/calculate-opposite-handler pos handler)]
|
||||
(let [{c1x :x c1y :y} (path.segment/calculate-opposite-handler pos handler)]
|
||||
{:c1x c1x
|
||||
:c1y c1y
|
||||
:c2x (:cx params)
|
||||
@@ -413,7 +421,7 @@
|
||||
|
||||
(= :smooth-quadratic-bezier-curve-to (:command command))
|
||||
(-> (assoc :command :curve-to)
|
||||
(update :params merge (quadratic->curve prev-pos (gpt/point params) (upg/calculate-opposite-handler prev-pos prev-qc)))))
|
||||
(update :params merge (quadratic->curve prev-pos (gpt/point params) (path.segment/calculate-opposite-handler prev-pos prev-qc)))))
|
||||
|
||||
result (if (= :elliptical-arc (:command command))
|
||||
(into result (arc->beziers prev-pos command))
|
||||
@@ -436,13 +444,13 @@
|
||||
(gpt/point (get-in orig-command [:params :cx]) (get-in orig-command [:params :cy]))
|
||||
|
||||
:smooth-quadratic-bezier-curve-to
|
||||
(upg/calculate-opposite-handler prev-pos prev-qc)
|
||||
(path.segment/calculate-opposite-handler prev-pos prev-qc)
|
||||
|
||||
(gpt/point (get-in orig-command [:params :x]) (get-in orig-command [:params :y])))
|
||||
|
||||
next-pos (if (= :close-path (:command command))
|
||||
prev-start
|
||||
(upc/command->point prev-pos command))
|
||||
(get-point prev-pos command))
|
||||
|
||||
next-start (if (= :move-to (:command command)) next-pos prev-start)]
|
||||
|
||||
|
||||
@@ -14,7 +14,9 @@
|
||||
[app.common.test-helpers.components :as thc]
|
||||
[app.common.test-helpers.files :as thf]
|
||||
[app.common.test-helpers.shapes :as ths]
|
||||
[app.common.types.container :as ctn]))
|
||||
[app.common.text :as txt]
|
||||
[app.common.types.container :as ctn]
|
||||
[app.common.types.shape :as cts]))
|
||||
|
||||
;; ----- File building
|
||||
|
||||
@@ -58,6 +60,18 @@
|
||||
:parent-label frame-label}
|
||||
child-params))))
|
||||
|
||||
(defn add-frame-with-text
|
||||
[file frame-label child-label text & {:keys [frame-params child-params]}]
|
||||
(let [shape (-> (cts/setup-shape {:type :text :x 0 :y 0 :grow-type :auto-width})
|
||||
(txt/change-text text)
|
||||
(assoc :position-data nil
|
||||
:parent-label frame-label))]
|
||||
(-> file
|
||||
(add-frame frame-label frame-params)
|
||||
(ths/add-sample-shape child-label
|
||||
(merge shape
|
||||
child-params)))))
|
||||
|
||||
(defn add-minimal-component
|
||||
[file component-label root-label
|
||||
& {:keys [component-params root-params]}]
|
||||
|
||||
@@ -23,28 +23,32 @@
|
||||
|
||||
(defn sample-file
|
||||
[label & {:keys [page-label name view-only?] :as params}]
|
||||
(binding [ffeat/*current* #{"components/v2"}]
|
||||
(let [params (cond-> params
|
||||
label
|
||||
(assoc :id (thi/new-id! label))
|
||||
(let [params
|
||||
(cond-> params
|
||||
label
|
||||
(assoc :id (thi/new-id! label))
|
||||
|
||||
page-label
|
||||
(assoc :page-id (thi/new-id! page-label))
|
||||
(nil? name)
|
||||
(assoc :name "Test file")
|
||||
|
||||
(nil? name)
|
||||
(assoc :name "Test file"))
|
||||
:always
|
||||
(assoc :features ffeat/default-features))
|
||||
|
||||
file (-> (ctf/make-file (dissoc params :page-label))
|
||||
(assoc :features #{"components/v2"})
|
||||
(assoc :permissions {:can-edit (not (true? view-only?))}))
|
||||
opts
|
||||
(cond-> {}
|
||||
page-label
|
||||
(assoc :page-id (thi/new-id! page-label)))
|
||||
|
||||
page (-> file
|
||||
:data
|
||||
(ctpl/pages-seq)
|
||||
(first))]
|
||||
file (-> (ctf/make-file params opts)
|
||||
(assoc :permissions {:can-edit (not (true? view-only?))}))
|
||||
|
||||
(with-meta file
|
||||
{:current-page-id (:id page)}))))
|
||||
page (-> file
|
||||
:data
|
||||
(ctpl/pages-seq)
|
||||
(first))]
|
||||
|
||||
(with-meta file
|
||||
{:current-page-id (:id page)})))
|
||||
|
||||
(defn validate-file!
|
||||
([file] (validate-file! file {}))
|
||||
|
||||
@@ -41,25 +41,26 @@
|
||||
[o]
|
||||
(and (string? o) (some? (re-matches rgb-color-re o))))
|
||||
|
||||
(def ^:private type:rgb-color
|
||||
{:type :string
|
||||
:pred rgb-color-string?
|
||||
:type-properties
|
||||
{:title "rgb-color"
|
||||
:description "RGB Color String"
|
||||
:error/message "expected a valid RGB color"
|
||||
:error/code "errors.invalid-rgb-color"
|
||||
:gen/gen (generate-rgb-color)
|
||||
::oapi/type "integer"
|
||||
::oapi/format "int64"}})
|
||||
(def schema:rgb-color
|
||||
(sm/register!
|
||||
{:type ::rgb-color
|
||||
:pred rgb-color-string?
|
||||
:type-properties
|
||||
{:title "rgb-color"
|
||||
:description "RGB Color String"
|
||||
:error/message "expected a valid RGB color"
|
||||
:error/code "errors.invalid-rgb-color"
|
||||
:gen/gen (generate-rgb-color)
|
||||
::oapi/type "integer"
|
||||
::oapi/format "int64"}}))
|
||||
|
||||
(def schema:image-color
|
||||
(def schema:image
|
||||
[:map {:title "ImageColor"}
|
||||
[:name {:optional true} :string]
|
||||
[:width ::sm/int]
|
||||
[:height ::sm/int]
|
||||
[:mtype {:optional true} [:maybe :string]]
|
||||
[:mtype ::sm/text]
|
||||
[:id ::sm/uuid]
|
||||
[:name {:optional true} ::sm/text]
|
||||
[:keep-aspect-ratio {:optional true} :boolean]])
|
||||
|
||||
(def gradient-types
|
||||
@@ -76,7 +77,7 @@
|
||||
[:stops
|
||||
[:vector {:min 1 :gen/max 2}
|
||||
[:map {:title "GradientStop"}
|
||||
[:color ::rgb-color]
|
||||
[:color schema:rgb-color]
|
||||
[:opacity {:optional true} [:maybe ::sm/safe-number]]
|
||||
[:offset ::sm/safe-number]]]]])
|
||||
|
||||
@@ -86,13 +87,13 @@
|
||||
[:name {:optional true} :string]
|
||||
[:path {:optional true} [:maybe :string]]
|
||||
[:value {:optional true} [:maybe :string]]
|
||||
[:color {:optional true} [:maybe ::rgb-color]]
|
||||
[:color {:optional true} [:maybe schema:rgb-color]]
|
||||
[:opacity {:optional true} [:maybe ::sm/safe-number]]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:ref-id {:optional true} ::sm/uuid]
|
||||
[:ref-file {:optional true} ::sm/uuid]
|
||||
[:gradient {:optional true} [:maybe schema:gradient]]
|
||||
[:image {:optional true} [:maybe schema:image-color]]
|
||||
[:image {:optional true} [:maybe schema:image]]
|
||||
[:plugin-data {:optional true} ::ctpg/plugin-data]])
|
||||
|
||||
(def schema:color
|
||||
@@ -103,15 +104,21 @@
|
||||
[:and
|
||||
[:map {:title "RecentColor"}
|
||||
[:opacity {:optional true} [:maybe ::sm/safe-number]]
|
||||
[:color {:optional true} [:maybe ::rgb-color]]
|
||||
[:color {:optional true} [:maybe schema:rgb-color]]
|
||||
[:gradient {:optional true} [:maybe schema:gradient]]
|
||||
[:image {:optional true} [:maybe schema:image-color]]]
|
||||
[:image {:optional true} [:maybe schema:image]]]
|
||||
[::sm/contains-any {:strict true} [:color :gradient :image]]])
|
||||
|
||||
(sm/register! ::rgb-color type:rgb-color)
|
||||
;; Same as color but with :id prop required
|
||||
(def schema:library-color
|
||||
[:and
|
||||
(sm/required-keys schema:color-attrs [:id])
|
||||
[::sm/contains-any {:strict true} [:color :gradient :image]]])
|
||||
|
||||
;; FIXME: revisit if we really need this all registers
|
||||
(sm/register! ::color schema:color)
|
||||
(sm/register! ::gradient schema:gradient)
|
||||
(sm/register! ::image-color schema:image-color)
|
||||
(sm/register! ::image-color schema:image)
|
||||
(sm/register! ::recent-color schema:recent-color)
|
||||
(sm/register! ::color-attrs schema:color-attrs)
|
||||
|
||||
@@ -119,10 +126,13 @@
|
||||
(sm/lazy-validator schema:color))
|
||||
|
||||
(def check-color
|
||||
(sm/check-fn schema:color :hint "expected valid color struct"))
|
||||
(sm/check-fn schema:color :hint "expected valid color"))
|
||||
|
||||
(def check-library-color
|
||||
(sm/check-fn schema:library-color :hint "expected valid library color"))
|
||||
|
||||
(def check-recent-color
|
||||
(sm/check-fn schema:recent-color))
|
||||
(sm/check-fn schema:recent-color :hint "expected valid recent color"))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
|
||||
@@ -18,19 +18,19 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def schema:component
|
||||
[:merge
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:path {:optional true} [:maybe :string]]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:objects {:gen/max 10 :optional true} ::ctp/objects]
|
||||
[:main-instance-id ::sm/uuid]
|
||||
[:main-instance-page ::sm/uuid]
|
||||
[:plugin-data {:optional true} ::ctpg/plugin-data]]
|
||||
::ctv/variant-component])
|
||||
|
||||
(sm/register! ::component schema:component)
|
||||
(sm/register!
|
||||
^{::sm/type ::component}
|
||||
[:merge
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:path {:optional true} [:maybe :string]]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:objects {:gen/max 10 :optional true} ctp/schema:objects]
|
||||
[:main-instance-id ::sm/uuid]
|
||||
[:main-instance-page ::sm/uuid]
|
||||
[:plugin-data {:optional true} ctpg/schema:plugin-data]]
|
||||
ctv/schema:variant-component]))
|
||||
|
||||
(def check-component
|
||||
(sm/check-fn schema:component))
|
||||
@@ -287,7 +287,7 @@
|
||||
|
||||
(defn get-component-root
|
||||
[component]
|
||||
(if (true? (:main-instance-id component))
|
||||
(if (some? (:main-instance-id component))
|
||||
(get-in component [:objects (:main-instance-id component)])
|
||||
(get-in component [:objects (:id component)])))
|
||||
|
||||
|
||||
@@ -18,8 +18,10 @@
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape-tree :as ctst]
|
||||
[app.common.types.shape.layout :as ctl]
|
||||
[app.common.types.text :as cttx]
|
||||
[app.common.types.token :as ctt]
|
||||
[app.common.uuid :as uuid]))
|
||||
[app.common.uuid :as uuid]
|
||||
[clojure.set :as set]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA
|
||||
@@ -62,9 +64,9 @@
|
||||
|
||||
(defn get-container
|
||||
[file type id]
|
||||
(dm/assert! (map? file))
|
||||
(dm/assert! (contains? valid-container-types type))
|
||||
(dm/assert! (uuid? id))
|
||||
(assert (map? file))
|
||||
(assert (contains? valid-container-types type))
|
||||
(assert (uuid? id))
|
||||
|
||||
(-> (if (= type :page)
|
||||
(ctpl/get-page file id)
|
||||
@@ -534,8 +536,6 @@
|
||||
indicating if shape is touched or not."
|
||||
[shape attr val & {:keys [ignore-touched ignore-geometry]}]
|
||||
(let [group (get ctk/sync-attrs attr)
|
||||
token-groups (when (= attr :applied-tokens)
|
||||
(get-token-groups shape val))
|
||||
shape-val (get shape attr)
|
||||
|
||||
ignore?
|
||||
@@ -566,22 +566,33 @@
|
||||
(gsh/close-attrs? attr val shape-val))
|
||||
|
||||
touched?
|
||||
(and group (not equal?) (not (and ignore-geometry is-geometry?)))]
|
||||
(and group
|
||||
(not equal?)
|
||||
(not (and ignore-geometry is-geometry?)))
|
||||
|
||||
content-diff-type (when (and (= (:type shape) :text) (= attr :content))
|
||||
(cttx/get-diff-type (:content shape) val))
|
||||
|
||||
token-groups (if (= attr :applied-tokens)
|
||||
(get-token-groups shape val)
|
||||
#{})
|
||||
|
||||
groups (cond-> token-groups
|
||||
(and group (not equal?))
|
||||
(set/union #{group} content-diff-type))]
|
||||
(cond-> shape
|
||||
;; Depending on the origin of the attribute change, we need or not to
|
||||
;; set the "touched" flag for the group the attribute belongs to.
|
||||
;; In some cases we need to ignore touched only if the attribute is
|
||||
;; geometric (position, width or transformation).
|
||||
(and in-copy?
|
||||
(or (and group (not equal?)) (seq token-groups))
|
||||
(not ignore?) (not (and ignore-geometry is-geometry?)))
|
||||
(not-empty groups)
|
||||
(not ignore?)
|
||||
(not (and ignore-geometry is-geometry?)))
|
||||
(-> (update :touched (fn [touched]
|
||||
(reduce #(ctk/set-touched-group %1 %2)
|
||||
touched
|
||||
(if group
|
||||
(cons group token-groups)
|
||||
token-groups))))
|
||||
groups)))
|
||||
(dissoc :remote-synced))
|
||||
|
||||
(nil? val)
|
||||
|
||||
@@ -32,24 +32,31 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CONSTANTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defonce BASE-FONT-SIZE "16px")
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def schema:media
|
||||
"A schema that represents the file media object"
|
||||
[:map {:title "FileMediaObject"}
|
||||
[:map {:title "FileMedia"}
|
||||
[:id ::sm/uuid]
|
||||
[:created-at ::sm/inst]
|
||||
[:created-at {:optional true} ::sm/inst]
|
||||
[:deleted-at {:optional true} ::sm/inst]
|
||||
[:name :string]
|
||||
[:width ::sm/safe-int]
|
||||
[:height ::sm/safe-int]
|
||||
[:mtype :string]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:media-id ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:thumbnail-id {:optional true} ::sm/uuid]
|
||||
[:is-local :boolean]])
|
||||
[:is-local {:optional true} :boolean]])
|
||||
|
||||
(def schema:colors
|
||||
[:map-of {:gen/max 5} ::sm/uuid ::ctc/color])
|
||||
@@ -65,7 +72,8 @@
|
||||
|
||||
(def schema:options
|
||||
[:map {:title "FileOptions"}
|
||||
[:components-v2 {:optional true} ::sm/boolean]])
|
||||
[:components-v2 {:optional true} ::sm/boolean]
|
||||
[:base-font-size {:optional true} :string]])
|
||||
|
||||
(def schema:data
|
||||
[:map {:title "FileData"}
|
||||
@@ -83,6 +91,7 @@
|
||||
because sometimes we want to validate file without the data."
|
||||
[:map {:title "file"}
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:revn {:optional true} :int]
|
||||
[:vern {:optional true} :int]
|
||||
[:created-at {:optional true} ::sm/inst]
|
||||
@@ -102,12 +111,13 @@
|
||||
(sm/register! ::colors schema:colors)
|
||||
(sm/register! ::typographies schema:typographies)
|
||||
|
||||
(sm/register! ::media-object schema:media)
|
||||
(def check-file
|
||||
(sm/check-fn schema:file :hint "check error on validating file"))
|
||||
|
||||
(def check-file-data!
|
||||
(sm/check-fn ::data))
|
||||
(def check-file-data
|
||||
(sm/check-fn schema:data))
|
||||
|
||||
(def check-media-object!
|
||||
(def check-file-media
|
||||
(sm/check-fn schema:media))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -127,40 +137,44 @@
|
||||
(ctp/make-empty-page {:id page-id :name "Page 1"}))]
|
||||
|
||||
(cond-> (assoc empty-file-data :id file-id)
|
||||
(some? page-id)
|
||||
(some? page)
|
||||
(ctpl/add-page page)
|
||||
|
||||
:always
|
||||
(assoc-in [:options :components-v2] true)))))
|
||||
(update :options merge {:components-v2 true
|
||||
:base-font-size BASE-FONT-SIZE})))))
|
||||
|
||||
(defn make-file
|
||||
[{:keys [id project-id name revn is-shared features
|
||||
ignore-sync-until modified-at deleted-at
|
||||
create-page page-id]
|
||||
:or {is-shared false revn 0 create-page true}}]
|
||||
[{:keys [id project-id name revn is-shared features migrations
|
||||
ignore-sync-until modified-at deleted-at]
|
||||
:or {is-shared false revn 0}}
|
||||
|
||||
& {:keys [create-page page-id]
|
||||
:or {create-page true}}]
|
||||
|
||||
(let [id (or id (uuid/next))
|
||||
|
||||
data (if create-page
|
||||
(if page-id
|
||||
(make-file-data id page-id)
|
||||
(make-file-data id))
|
||||
(make-file-data id nil))
|
||||
|
||||
file {:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:vern 0
|
||||
:is-shared is-shared
|
||||
:version version
|
||||
:data data
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}]
|
||||
file (d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:vern 0
|
||||
:is-shared is-shared
|
||||
:version version
|
||||
:data data
|
||||
:features features
|
||||
:migrations migrations
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at})]
|
||||
|
||||
(d/without-nils file)))
|
||||
(check-file file)))
|
||||
|
||||
;; Helpers
|
||||
|
||||
@@ -285,7 +299,6 @@
|
||||
(ctkl/get-component (:data component-file) (:component-id head-shape) include-deleted?))]
|
||||
(when (some? component)
|
||||
(get-ref-shape (:data component-file) component shape :with-context? with-context?))))]
|
||||
|
||||
(some find-ref-shape-in-head (ctn/get-parent-heads (:objects container) shape))))
|
||||
|
||||
(defn advance-shape-ref
|
||||
@@ -1023,3 +1036,14 @@
|
||||
|
||||
(-> file
|
||||
(update-in [:data :pages-index] detach-pages))))
|
||||
|
||||
;; Base font size
|
||||
|
||||
(defn get-base-font-size
|
||||
"Retrieve the base font size value or token reference."
|
||||
[file-data]
|
||||
(get-in file-data [:options :base-font-size] BASE-FONT-SIZE))
|
||||
|
||||
(defn set-base-font-size
|
||||
[file-data base-font-size]
|
||||
(assoc-in file-data [:options :base-font-size] base-font-size))
|
||||
@@ -70,7 +70,7 @@
|
||||
(def valid-guide?
|
||||
(sm/lazy-validator schema:guide))
|
||||
|
||||
(def check-page!
|
||||
(def check-page
|
||||
(sm/check-fn schema:page))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -82,8 +82,7 @@
|
||||
(def root uuid/zero)
|
||||
|
||||
(def empty-page-data
|
||||
{:options {}
|
||||
:objects {root
|
||||
{:objects {root
|
||||
(cts/setup-shape {:id root
|
||||
:type :frame
|
||||
:parent-id root
|
||||
@@ -91,10 +90,12 @@
|
||||
:name "Root Frame"})}})
|
||||
|
||||
(defn make-empty-page
|
||||
[{:keys [id name]}]
|
||||
[{:keys [id name background]}]
|
||||
(-> empty-page-data
|
||||
(assoc :id (or id (uuid/next)))
|
||||
(assoc :name (or name "Page 1"))))
|
||||
(assoc :name (d/nilv name "Page 1"))
|
||||
(cond-> background
|
||||
(assoc :background background))))
|
||||
|
||||
(defn get-frame-flow
|
||||
[flows frame-id]
|
||||
|
||||
234
common/src/app/common/types/path.cljc
Normal file
234
common/src/app/common/types/path.cljc
Normal file
@@ -0,0 +1,234 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.types.path
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.helpers :as cpf]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.types.path.bool :as bool]
|
||||
[app.common.types.path.helpers :as helpers]
|
||||
[app.common.types.path.impl :as impl]
|
||||
[app.common.types.path.segment :as segment]
|
||||
[app.common.types.path.shape-to-path :as stp]
|
||||
[app.common.types.path.subpath :as subpath]))
|
||||
|
||||
#?(:clj (set! *warn-on-reflection* true))
|
||||
|
||||
(def ^:cosnt bool-group-style-properties bool/group-style-properties)
|
||||
(def ^:const bool-style-properties bool/style-properties)
|
||||
(def ^:const default-bool-fills bool/default-fills)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TRANSFORMATIONS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn content?
|
||||
[o]
|
||||
(impl/path-data? o))
|
||||
|
||||
(defn content
|
||||
"Create path content from plain data or bytes, returns itself if it
|
||||
is already PathData instance"
|
||||
[data]
|
||||
(impl/path-data data))
|
||||
|
||||
(defn from-bytes
|
||||
[data]
|
||||
(impl/from-bytes data))
|
||||
|
||||
(defn from-string
|
||||
[data]
|
||||
(impl/from-string data))
|
||||
|
||||
(defn check-path-content
|
||||
[content]
|
||||
(impl/check-content-like content))
|
||||
|
||||
(defn get-byte-size
|
||||
"Get byte size of a path content"
|
||||
[content]
|
||||
(impl/-get-byte-size content))
|
||||
|
||||
(defn write-to
|
||||
[content buffer offset]
|
||||
(impl/-write-to content buffer offset))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TRANSFORMATIONS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn close-subpaths
|
||||
"Given a content, searches a path for possible subpaths that can
|
||||
create closed loops and merge them; then return the transformed path
|
||||
conten as PathData instance"
|
||||
[content]
|
||||
(-> (subpath/close-subpaths content)
|
||||
(impl/from-plain)))
|
||||
|
||||
(defn apply-content-modifiers
|
||||
"Apply delta modifiers over the path content"
|
||||
[content modifiers]
|
||||
(assert (impl/check-content-like content))
|
||||
|
||||
(letfn [(apply-to-index [content [index params]]
|
||||
(if (contains? content index)
|
||||
(cond-> content
|
||||
(and
|
||||
(or (:c1x params) (:c1y params) (:c2x params) (:c2y params))
|
||||
(= :line-to (get-in content [index :command])))
|
||||
|
||||
(-> (assoc-in [index :command] :curve-to)
|
||||
(assoc-in [index :params]
|
||||
(helpers/make-curve-params
|
||||
(get-in content [index :params])
|
||||
(get-in content [(dec index) :params]))))
|
||||
|
||||
(:x params) (update-in [index :params :x] + (:x params))
|
||||
(:y params) (update-in [index :params :y] + (:y params))
|
||||
|
||||
(:c1x params) (update-in [index :params :c1x] + (:c1x params))
|
||||
(:c1y params) (update-in [index :params :c1y] + (:c1y params))
|
||||
|
||||
(:c2x params) (update-in [index :params :c2x] + (:c2x params))
|
||||
(:c2y params) (update-in [index :params :c2y] + (:c2y params)))
|
||||
content))]
|
||||
|
||||
(impl/path-data
|
||||
(reduce apply-to-index (vec content) modifiers))))
|
||||
|
||||
(defn transform-content
|
||||
"Applies a transformation matrix over content and returns a new
|
||||
content as PathData instance."
|
||||
[content transform]
|
||||
(segment/transform-content content transform))
|
||||
|
||||
(defn move-content
|
||||
[content move-vec]
|
||||
(if (gpt/zero? move-vec)
|
||||
content
|
||||
(segment/move-content content move-vec)))
|
||||
|
||||
(defn update-geometry
|
||||
"Update shape with new geometry calculated from provided content"
|
||||
([shape content]
|
||||
(update-geometry (assoc shape :content content)))
|
||||
([shape]
|
||||
(let [flip-x
|
||||
(get shape :flip-x)
|
||||
|
||||
flip-y
|
||||
(get shape :flip-y)
|
||||
|
||||
;; NOTE: we ensure that content is PathData instance
|
||||
content
|
||||
(impl/path-data
|
||||
(get shape :content))
|
||||
|
||||
;; Ensure plain format once
|
||||
transform
|
||||
(cond-> (:transform shape (gmt/matrix))
|
||||
flip-x (gmt/scale (gpt/point -1 1))
|
||||
flip-y (gmt/scale (gpt/point 1 -1)))
|
||||
|
||||
transform-inverse
|
||||
(cond-> (gmt/matrix)
|
||||
flip-x (gmt/scale (gpt/point -1 1))
|
||||
flip-y (gmt/scale (gpt/point 1 -1))
|
||||
:always (gmt/multiply (:transform-inverse shape (gmt/matrix))))
|
||||
|
||||
center
|
||||
(or (some-> (dm/get-prop shape :selrect) grc/rect->center)
|
||||
(segment/content-center content))
|
||||
|
||||
base-content
|
||||
(segment/transform-content content (gmt/transform-in center transform-inverse))
|
||||
|
||||
;; Calculates the new selrect with points given the old center
|
||||
points
|
||||
(-> (segment/content->selrect base-content)
|
||||
(grc/rect->points)
|
||||
(gco/transform-points center transform))
|
||||
|
||||
points-center
|
||||
(gco/points->center points)
|
||||
|
||||
;; Points is now the selrect but the center is different so we can create the selrect
|
||||
;; through points
|
||||
selrect
|
||||
(-> points
|
||||
(gco/transform-points points-center transform-inverse)
|
||||
(grc/points->rect))]
|
||||
|
||||
(-> shape
|
||||
(assoc :content content)
|
||||
(assoc :points points)
|
||||
(assoc :selrect selrect)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PATH SHAPE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn get-points
|
||||
"Returns points for the given segment, faster version of
|
||||
the `content->points`."
|
||||
[content]
|
||||
(some-> content segment/get-points))
|
||||
|
||||
(defn- calc-bool-content*
|
||||
"Calculate the boolean content from shape and objects. Returns plain
|
||||
vector of segments"
|
||||
[shape objects]
|
||||
(let [extract-content-xf
|
||||
(comp (map (d/getf objects))
|
||||
(remove :hidden)
|
||||
(remove cpf/svg-raw-shape?)
|
||||
(map #(stp/convert-to-path % objects))
|
||||
(map :content))
|
||||
|
||||
contents
|
||||
(sequence extract-content-xf (:shapes shape))]
|
||||
|
||||
(bool/calculate-content (:bool-type shape) contents)))
|
||||
|
||||
(defn calc-bool-content
|
||||
"Calculate the boolean content from shape and objects. Returns a
|
||||
packed PathData instance"
|
||||
[shape objects]
|
||||
(-> (calc-bool-content* shape objects)
|
||||
(impl/path-data)))
|
||||
|
||||
(defn update-bool-shape
|
||||
"Calculates the selrect+points for the boolean shape"
|
||||
[shape objects]
|
||||
(let [content (calc-bool-content shape objects)
|
||||
shape (assoc shape :content content)]
|
||||
(update-geometry shape)))
|
||||
|
||||
(defn shape-with-open-path?
|
||||
[shape]
|
||||
(let [svg? (contains? shape :svg-attrs)
|
||||
;; No close subpaths for svgs imported
|
||||
maybe-close (if svg? identity subpath/close-subpaths)]
|
||||
(and (= :path (:type shape))
|
||||
(not (->> shape
|
||||
:content
|
||||
(maybe-close)
|
||||
(subpath/get-subpaths)
|
||||
(every? subpath/is-closed?))))))
|
||||
|
||||
(defn convert-to-path
|
||||
"Transform a shape to a path shape"
|
||||
([shape]
|
||||
(convert-to-path shape {}))
|
||||
([shape objects]
|
||||
(-> (stp/convert-to-path shape objects)
|
||||
(update :content impl/path-data))))
|
||||
|
||||
@@ -4,15 +4,27 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.path.bool
|
||||
(ns app.common.types.path.bool
|
||||
(:require
|
||||
[app.common.colors :as clr]
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.geom.shapes.path :as gsp]
|
||||
[app.common.math :as mth]
|
||||
[app.common.svg.path.command :as upc]
|
||||
[app.common.svg.path.subpath :as ups]))
|
||||
[app.common.types.path.helpers :as helpers]
|
||||
[app.common.types.path.segment :as segment]
|
||||
[app.common.types.path.subpath :as subpath]))
|
||||
|
||||
(def default-fills
|
||||
[{:fill-color clr/black}])
|
||||
|
||||
(def group-style-properties
|
||||
#{:shadow :blur})
|
||||
|
||||
;; FIXME: revisit
|
||||
(def style-properties
|
||||
(into group-style-properties
|
||||
[:fills :strokes]))
|
||||
|
||||
(defn add-previous
|
||||
([content]
|
||||
@@ -25,87 +37,92 @@
|
||||
(assoc :prev first)
|
||||
|
||||
(some? prev)
|
||||
(assoc :prev (gsp/command->point prev))))))))
|
||||
(assoc :prev (helpers/segment->point prev))))))))
|
||||
|
||||
(defn close-paths
|
||||
"Removes the :close-path commands and replace them for line-to so we can calculate
|
||||
the intersections"
|
||||
[content]
|
||||
|
||||
(loop [head (first content)
|
||||
content (rest content)
|
||||
result []
|
||||
last-move nil
|
||||
last-p nil]
|
||||
(loop [segments (seq content)
|
||||
result []
|
||||
last-move nil
|
||||
last-point nil]
|
||||
(if-let [segment (first segments)]
|
||||
(let [point
|
||||
(helpers/segment->point segment)
|
||||
|
||||
(if (nil? head)
|
||||
result
|
||||
(let [head-p (gsp/command->point head)
|
||||
head (cond
|
||||
(and (= :close-path (:command head))
|
||||
(or (nil? last-p) ;; Ignore consecutive close-paths
|
||||
(< (gpt/distance last-p last-move) 0.01)))
|
||||
nil
|
||||
segment
|
||||
(cond
|
||||
(and (= :close-path (:command segment))
|
||||
(or (nil? last-point) ;; Ignore consecutive close-paths
|
||||
(< (gpt/distance last-point last-move) 0.01)))
|
||||
nil
|
||||
|
||||
(= :close-path (:command head))
|
||||
(upc/make-line-to last-move)
|
||||
(= :close-path (:command segment))
|
||||
(helpers/make-line-to last-move)
|
||||
|
||||
:else
|
||||
head)]
|
||||
:else
|
||||
segment)]
|
||||
|
||||
(recur (first content)
|
||||
(rest content)
|
||||
(cond-> result (some? head) (conj head))
|
||||
(if (= :move-to (:command head))
|
||||
head-p
|
||||
(recur (rest segments)
|
||||
(cond-> result (some? segment) (conj segment))
|
||||
(if (= :move-to (:command segment))
|
||||
point
|
||||
last-move)
|
||||
head-p)))))
|
||||
point))
|
||||
result)))
|
||||
|
||||
(defn- split-command
|
||||
[cmd values]
|
||||
(case (:command cmd)
|
||||
:line-to (gsp/split-line-to-ranges (:prev cmd) cmd values)
|
||||
:curve-to (gsp/split-curve-to-ranges (:prev cmd) cmd values)
|
||||
:line-to (helpers/split-line-to-ranges (:prev cmd) cmd values)
|
||||
:curve-to (helpers/split-curve-to-ranges (:prev cmd) cmd values)
|
||||
[cmd]))
|
||||
|
||||
(defn split-ts [seg-1 seg-2]
|
||||
(cond
|
||||
(and (= :line-to (:command seg-1))
|
||||
(= :line-to (:command seg-2)))
|
||||
(gsp/line-line-intersect (gsp/command->line seg-1) (gsp/command->line seg-2))
|
||||
(defn- split-ts
|
||||
[seg-1 seg-2]
|
||||
(let [cmd-1 (get seg-1 :command)
|
||||
cmd-2 (get seg-2 :command)]
|
||||
(cond
|
||||
(and (= :line-to cmd-1)
|
||||
(= :line-to cmd-2))
|
||||
(helpers/line-line-intersect (helpers/command->line seg-1)
|
||||
(helpers/command->line seg-2))
|
||||
|
||||
(and (= :line-to (:command seg-1))
|
||||
(= :curve-to (:command seg-2)))
|
||||
(gsp/line-curve-intersect (gsp/command->line seg-1) (gsp/command->bezier seg-2))
|
||||
(and (= :line-to cmd-1)
|
||||
(= :curve-to cmd-2))
|
||||
(helpers/line-curve-intersect (helpers/command->line seg-1)
|
||||
(helpers/command->bezier seg-2))
|
||||
|
||||
(and (= :curve-to (:command seg-1))
|
||||
(= :line-to (:command seg-2)))
|
||||
(let [[seg-2' seg-1']
|
||||
(gsp/line-curve-intersect (gsp/command->line seg-2) (gsp/command->bezier seg-1))]
|
||||
;; Need to reverse because we send the arguments reversed
|
||||
[seg-1' seg-2'])
|
||||
(and (= :curve-to cmd-1)
|
||||
(= :line-to cmd-2))
|
||||
(let [[seg-2' seg-1']
|
||||
(helpers/line-curve-intersect (helpers/command->line seg-2)
|
||||
(helpers/command->bezier seg-1))]
|
||||
;; Need to reverse because we send the arguments reversed
|
||||
[seg-1' seg-2'])
|
||||
|
||||
(and (= :curve-to (:command seg-1))
|
||||
(= :curve-to (:command seg-2)))
|
||||
(gsp/curve-curve-intersect (gsp/command->bezier seg-1) (gsp/command->bezier seg-2))
|
||||
(and (= :curve-to cmd-1)
|
||||
(= :curve-to cmd-2))
|
||||
(helpers/curve-curve-intersect (helpers/command->bezier seg-1)
|
||||
(helpers/command->bezier seg-2))
|
||||
|
||||
:else
|
||||
[[] []]))
|
||||
:else
|
||||
[[] []])))
|
||||
|
||||
(defn content-intersect-split
|
||||
[content-a content-b sr-a sr-b]
|
||||
|
||||
(let [command->selrect (memoize gsp/command->selrect)]
|
||||
(let [command->selrect (memoize helpers/command->selrect)]
|
||||
|
||||
(letfn [(overlap-segment-selrect?
|
||||
[segment selrect]
|
||||
(letfn [(overlap-segment-selrect? [segment selrect]
|
||||
(if (= :move-to (:command segment))
|
||||
false
|
||||
(let [r1 (command->selrect segment)]
|
||||
(grc/overlaps-rects? r1 selrect))))
|
||||
|
||||
(overlap-segments?
|
||||
[seg-1 seg-2]
|
||||
(overlap-segments? [seg-1 seg-2]
|
||||
(if (or (= :move-to (:command seg-1))
|
||||
(= :move-to (:command seg-2)))
|
||||
false
|
||||
@@ -113,17 +130,14 @@
|
||||
r2 (command->selrect seg-2)]
|
||||
(grc/overlaps-rects? r1 r2))))
|
||||
|
||||
(split
|
||||
[seg-1 seg-2]
|
||||
(split [seg-1 seg-2]
|
||||
(if (not (overlap-segments? seg-1 seg-2))
|
||||
[seg-1]
|
||||
(let [[ts-seg-1 _] (split-ts seg-1 seg-2)]
|
||||
(-> (split-command seg-1 ts-seg-1)
|
||||
(add-previous (:prev seg-1))))))
|
||||
|
||||
(split-segment-on-content
|
||||
[segment content content-sr]
|
||||
|
||||
(split-segment-on-content [segment content content-sr]
|
||||
(if (overlap-segment-selrect? segment content-sr)
|
||||
(->> content
|
||||
(filter #(overlap-segments? segment %))
|
||||
@@ -133,8 +147,7 @@
|
||||
[segment]))
|
||||
[segment]))
|
||||
|
||||
(split-content
|
||||
[content-a content-b sr-b]
|
||||
(split-content [content-a content-b sr-b]
|
||||
(into []
|
||||
(mapcat #(split-segment-on-content % content-b sr-b))
|
||||
content-a))]
|
||||
@@ -151,28 +164,28 @@
|
||||
[segment content content-sr content-geom]
|
||||
|
||||
(let [point (case (:command segment)
|
||||
:line-to (-> (gsp/command->line segment)
|
||||
(gsp/line-values 0.5))
|
||||
:line-to (-> (helpers/command->line segment)
|
||||
(helpers/line-values 0.5))
|
||||
|
||||
:curve-to (-> (gsp/command->bezier segment)
|
||||
(gsp/curve-values 0.5)))]
|
||||
:curve-to (-> (helpers/command->bezier segment)
|
||||
(helpers/curve-values 0.5)))]
|
||||
|
||||
(and (grc/contains-point? content-sr point)
|
||||
(or
|
||||
(gsp/is-point-in-geom-data? point content-geom)
|
||||
(gsp/is-point-in-border? point content)))))
|
||||
(helpers/is-point-in-geom-data? point content-geom)
|
||||
(helpers/is-point-in-border? point content)))))
|
||||
|
||||
(defn inside-segment?
|
||||
[segment content-sr content-geom]
|
||||
(let [point (case (:command segment)
|
||||
:line-to (-> (gsp/command->line segment)
|
||||
(gsp/line-values 0.5))
|
||||
:line-to (-> (helpers/command->line segment)
|
||||
(helpers/line-values 0.5))
|
||||
|
||||
:curve-to (-> (gsp/command->bezier segment)
|
||||
(gsp/curve-values 0.5)))]
|
||||
:curve-to (-> (helpers/command->bezier segment)
|
||||
(helpers/curve-values 0.5)))]
|
||||
|
||||
(and (grc/contains-point? content-sr point)
|
||||
(gsp/is-point-in-geom-data? point content-geom))))
|
||||
(helpers/is-point-in-geom-data? point content-geom))))
|
||||
|
||||
(defn overlap-segment?
|
||||
"Finds if the current segment is overlapping against other
|
||||
@@ -185,8 +198,8 @@
|
||||
(contains? #{:line-to :curve-to} (:command segment)))
|
||||
|
||||
(case (:command segment)
|
||||
:line-to (let [[p1 q1] (gsp/command->line segment)
|
||||
[p2 q2] (gsp/command->line other)]
|
||||
:line-to (let [[p1 q1] (helpers/command->line segment)
|
||||
[p2 q2] (helpers/command->line other)]
|
||||
|
||||
(when (or (and (< (gpt/distance p1 p2) 0.1)
|
||||
(< (gpt/distance q1 q2) 0.1))
|
||||
@@ -194,8 +207,8 @@
|
||||
(< (gpt/distance q1 p2) 0.1)))
|
||||
[segment other]))
|
||||
|
||||
:curve-to (let [[p1 q1 h11 h21] (gsp/command->bezier segment)
|
||||
[p2 q2 h12 h22] (gsp/command->bezier other)]
|
||||
:curve-to (let [[p1 q1 h11 h21] (helpers/command->bezier segment)
|
||||
[p2 q2 h12 h22] (helpers/command->bezier other)]
|
||||
|
||||
(when (or (and (< (gpt/distance p1 p2) 0.1)
|
||||
(< (gpt/distance q1 q2) 0.1)
|
||||
@@ -227,11 +240,11 @@
|
||||
result
|
||||
|
||||
(let [result (if (not= (:prev current) prev)
|
||||
(conj result (upc/make-move-to (:prev current)))
|
||||
(conj result (helpers/make-move-to (:prev current)))
|
||||
result)]
|
||||
(recur (first content)
|
||||
(rest content)
|
||||
(gsp/command->point current)
|
||||
(helpers/segment->point current)
|
||||
(conj result (dissoc current :prev)))))))
|
||||
|
||||
(defn remove-duplicated-segments
|
||||
@@ -273,20 +286,43 @@
|
||||
segments
|
||||
result))))))
|
||||
|
||||
(defn close-content
|
||||
[content]
|
||||
(into []
|
||||
(mapcat :data)
|
||||
(->> content
|
||||
(subpath/close-subpaths)
|
||||
(subpath/get-subpaths))))
|
||||
|
||||
(defn- content->geom-data
|
||||
[content]
|
||||
|
||||
(->> content
|
||||
(close-content)
|
||||
(filter #(not= (= :line-to (:command %))
|
||||
(= :curve-to (:command %))))
|
||||
(mapv (fn [segment]
|
||||
{:command (:command segment)
|
||||
:segment segment
|
||||
:geom (if (= :line-to (:command segment))
|
||||
(helpers/command->line segment)
|
||||
(helpers/command->bezier segment))
|
||||
:selrect (helpers/command->selrect segment)}))))
|
||||
|
||||
(defn create-union [content-a content-a-split content-b content-b-split sr-a sr-b]
|
||||
;; Pick all segments in content-a that are not inside content-b
|
||||
;; Pick all segments in content-b that are not inside content-a
|
||||
(let [content-a-geom (gsp/content->geom-data content-a)
|
||||
content-b-geom (gsp/content->geom-data content-b)
|
||||
(let [content-a-geom (content->geom-data content-a)
|
||||
content-b-geom (content->geom-data content-b)
|
||||
|
||||
content
|
||||
(concat
|
||||
(->> content-a-split (filter #(not (contains-segment? % content-b sr-b content-b-geom))))
|
||||
(->> content-b-split (filter #(not (contains-segment? % content-a sr-a content-a-geom)))))
|
||||
|
||||
content-geom (gsp/content->geom-data content)
|
||||
content-geom (content->geom-data content)
|
||||
|
||||
content-sr (gsp/content->selrect (fix-move-to content))
|
||||
content-sr (segment/content->selrect (fix-move-to content))
|
||||
|
||||
;; Overlapping segments should be added when they are part of the border
|
||||
border-content
|
||||
@@ -302,8 +338,8 @@
|
||||
;; Pick all segments in content-a that are not inside content-b
|
||||
;; Pick all segments in content b that are inside content-a
|
||||
;; removing overlapping
|
||||
(let [content-a-geom (gsp/content->geom-data content-a)
|
||||
content-b-geom (gsp/content->geom-data content-b)]
|
||||
(let [content-a-geom (content->geom-data content-a)
|
||||
content-b-geom (content->geom-data content-b)]
|
||||
(d/concat-vec
|
||||
(->> content-a-split (filter #(not (contains-segment? % content-b sr-b content-b-geom))))
|
||||
|
||||
@@ -315,13 +351,12 @@
|
||||
(defn create-intersection [content-a content-a-split content-b content-b-split sr-a sr-b]
|
||||
;; Pick all segments in content-a that are inside content-b
|
||||
;; Pick all segments in content-b that are inside content-a
|
||||
(let [content-a-geom (gsp/content->geom-data content-a)
|
||||
content-b-geom (gsp/content->geom-data content-b)]
|
||||
(let [content-a-geom (content->geom-data content-a)
|
||||
content-b-geom (content->geom-data content-b)]
|
||||
(d/concat-vec
|
||||
(->> content-a-split (filter #(contains-segment? % content-b sr-b content-b-geom)))
|
||||
(->> content-b-split (filter #(contains-segment? % content-a sr-a content-a-geom))))))
|
||||
|
||||
|
||||
(defn create-exclusion [content-a content-b]
|
||||
;; Pick all segments
|
||||
(d/concat-vec content-a content-b))
|
||||
@@ -331,26 +366,37 @@
|
||||
|
||||
(let [;; We need to reverse the second path when making a difference/intersection/exclude
|
||||
;; and both shapes are in the same direction
|
||||
should-reverse? (and (not= :union bool-type)
|
||||
(= (ups/clockwise? content-b)
|
||||
(ups/clockwise? content-a)))
|
||||
should-reverse?
|
||||
(and (not= :union bool-type)
|
||||
(= (subpath/clockwise? content-b)
|
||||
(subpath/clockwise? content-a)))
|
||||
|
||||
content-a (-> content-a
|
||||
(close-paths)
|
||||
(add-previous))
|
||||
content-a
|
||||
(-> content-a
|
||||
(close-paths)
|
||||
(add-previous))
|
||||
|
||||
content-b (-> content-b
|
||||
(close-paths)
|
||||
(cond-> should-reverse? (ups/reverse-content))
|
||||
(add-previous))
|
||||
content-b
|
||||
(-> content-b
|
||||
(close-paths)
|
||||
(cond-> should-reverse? (subpath/reverse-content))
|
||||
(add-previous))
|
||||
|
||||
sr-a (gsp/content->selrect content-a)
|
||||
sr-b (gsp/content->selrect content-b)
|
||||
sr-a
|
||||
(segment/content->selrect content-a)
|
||||
|
||||
sr-b
|
||||
(segment/content->selrect content-b)
|
||||
|
||||
;; Split content in new segments in the intersection with the other path
|
||||
[content-a-split content-b-split] (content-intersect-split content-a content-b sr-a sr-b)
|
||||
content-a-split (->> content-a-split add-previous (filter is-segment?))
|
||||
content-b-split (->> content-b-split add-previous (filter is-segment?))
|
||||
[content-a-split content-b-split]
|
||||
(content-intersect-split content-a content-b sr-a sr-b)
|
||||
|
||||
content-a-split
|
||||
(->> content-a-split add-previous (filter is-segment?))
|
||||
|
||||
content-b-split
|
||||
(->> content-b-split add-previous (filter is-segment?))
|
||||
|
||||
content
|
||||
(case bool-type
|
||||
@@ -362,14 +408,16 @@
|
||||
(-> content
|
||||
remove-duplicated-segments
|
||||
fix-move-to
|
||||
ups/close-subpaths)))
|
||||
subpath/close-subpaths)))
|
||||
|
||||
(defn content-bool
|
||||
(defn calculate-content
|
||||
"Create a bool content from a collection of contents and specified
|
||||
type."
|
||||
[bool-type contents]
|
||||
;; We apply the boolean operation in to each pair and the result to the next
|
||||
;; element
|
||||
(if (seq contents)
|
||||
(->> contents
|
||||
(reduce (partial content-bool-pair bool-type))
|
||||
(into []))
|
||||
(vec))
|
||||
[]))
|
||||
File diff suppressed because it is too large
Load Diff
782
common/src/app/common/types/path/impl.cljc
Normal file
782
common/src/app/common/types/path/impl.cljc
Normal file
@@ -0,0 +1,782 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.types.path.impl
|
||||
"Contains schemas and data type implementation for PathData binary
|
||||
and plain formats"
|
||||
#?(:cljs
|
||||
(:require-macros [app.common.types.path.impl :refer [read-float read-short write-float write-short]]))
|
||||
(:refer-clojure :exclude [-lookup -reduce])
|
||||
(:require
|
||||
#?(:clj [app.common.fressian :as fres])
|
||||
#?(:clj [clojure.data.json :as json])
|
||||
#?(:cljs [app.common.weak-map :as weak-map])
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.svg.path :as svg.path]
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.path :as-alias path])
|
||||
(:import
|
||||
#?(:cljs [goog.string StringBuffer]
|
||||
:clj [java.nio ByteBuffer ByteOrder])))
|
||||
|
||||
#?(:clj (set! *warn-on-reflection* true))
|
||||
|
||||
(def ^:const SEGMENT-BYTE-SIZE 28)
|
||||
|
||||
(defprotocol IPathData
|
||||
(-write-to [_ buffer offset] "write the content to the specified buffer")
|
||||
(-get-byte-size [_] "get byte size"))
|
||||
|
||||
(defprotocol ITransformable
|
||||
(-transform [_ m] "apply a transform")
|
||||
(-lookup [_ index f])
|
||||
(-walk [_ f initial])
|
||||
(-reduce [_ f initial]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMPL HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmacro read-short
|
||||
[target offset]
|
||||
(if (:ns &env)
|
||||
`(.getInt16 ~target ~offset true)
|
||||
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
|
||||
`(.getShort ~target ~offset))))
|
||||
|
||||
(defmacro read-float
|
||||
[target offset]
|
||||
(if (:ns &env)
|
||||
`(.getFloat32 ~target ~offset true)
|
||||
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
|
||||
`(double (.getFloat ~target ~offset)))))
|
||||
|
||||
(defmacro write-float
|
||||
[target offset value]
|
||||
(if (:ns &env)
|
||||
`(.setFloat32 ~target ~offset ~value true)
|
||||
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
|
||||
`(.putFloat ~target ~offset ~value))))
|
||||
|
||||
(defmacro write-short
|
||||
[target offset value]
|
||||
(if (:ns &env)
|
||||
`(.setInt16 ~target ~offset ~value true)
|
||||
(let [target (with-meta target {:tag 'java.nio.ByteBuffer})]
|
||||
`(.putShort ~target ~offset ~value))))
|
||||
|
||||
(defmacro with-cache
|
||||
"A helper macro that facilitates cache handling for content
|
||||
instance, only relevant on CLJS"
|
||||
[target key & expr]
|
||||
(if (:ns &env)
|
||||
(let [cache (gensym "cache-")
|
||||
target (with-meta target {:tag 'js})]
|
||||
`(let [~cache (.-cache ~target)
|
||||
~'result (.get ~cache ~key)]
|
||||
(if ~'result
|
||||
(do
|
||||
~'result)
|
||||
(let [~'result (do ~@expr)]
|
||||
(.set ~cache ~key ~'result)
|
||||
~'result))))
|
||||
`(do ~@expr)))
|
||||
|
||||
(defn- allocate
|
||||
[n-segments]
|
||||
#?(:clj (let [buffer (ByteBuffer/allocate (* n-segments SEGMENT-BYTE-SIZE))]
|
||||
(.order buffer ByteOrder/LITTLE_ENDIAN))
|
||||
:cljs (new js/ArrayBuffer (* n-segments SEGMENT-BYTE-SIZE))))
|
||||
|
||||
(defn- clone-buffer
|
||||
[buffer]
|
||||
#?(:clj
|
||||
(let [src (.array ^ByteBuffer buffer)
|
||||
len (alength ^bytes src)
|
||||
dst (byte-array len)]
|
||||
(System/arraycopy src 0 dst 0 len)
|
||||
(let [buffer (ByteBuffer/wrap dst)]
|
||||
(.order buffer ByteOrder/LITTLE_ENDIAN)))
|
||||
:cljs
|
||||
(let [src-view (js/Uint32Array. buffer)
|
||||
dst-buff (js/ArrayBuffer. (.-byteLength buffer))
|
||||
dst-view (js/Uint32Array. dst-buff)]
|
||||
(.set dst-view src-view)
|
||||
dst-buff)))
|
||||
|
||||
(defn- impl-transform-segment
|
||||
"Apply a transformation to a segment located under specified offset"
|
||||
[buffer offset a b c d e f]
|
||||
(let [t (read-short buffer offset)]
|
||||
(case t
|
||||
(1 2)
|
||||
(let [x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))
|
||||
x (+ (* x a) (* y c) e)
|
||||
y (+ (* x b) (* y d) f)]
|
||||
(write-float buffer (+ offset 20) x)
|
||||
(write-float buffer (+ offset 24) y))
|
||||
|
||||
3
|
||||
(let [c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))
|
||||
|
||||
c1x (+ (* c1x a) (* c1y c) e)
|
||||
c1y (+ (* c1x b) (* c1y d) f)
|
||||
c2x (+ (* c2x a) (* c2y c) e)
|
||||
c2y (+ (* c2x b) (* c2y d) f)
|
||||
x (+ (* x a) (* y c) e)
|
||||
y (+ (* x b) (* y d) f)]
|
||||
|
||||
(write-float buffer (+ offset 4) c1x)
|
||||
(write-float buffer (+ offset 8) c1y)
|
||||
(write-float buffer (+ offset 12) c2x)
|
||||
(write-float buffer (+ offset 16) c2y)
|
||||
(write-float buffer (+ offset 20) x)
|
||||
(write-float buffer (+ offset 24) y))
|
||||
|
||||
nil)))
|
||||
|
||||
(defn- impl-transform
|
||||
[buffer m size]
|
||||
(let [a (dm/get-prop m :a)
|
||||
b (dm/get-prop m :b)
|
||||
c (dm/get-prop m :c)
|
||||
d (dm/get-prop m :d)
|
||||
e (dm/get-prop m :e)
|
||||
f (dm/get-prop m :f)]
|
||||
(loop [index 0]
|
||||
(when (< index size)
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)]
|
||||
(impl-transform-segment buffer offset a b c d e f)
|
||||
(recur (inc index)))))))
|
||||
|
||||
(defn- impl-walk
|
||||
[buffer f initial size]
|
||||
(loop [index 0
|
||||
result (transient initial)]
|
||||
(if (< index size)
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type (read-short buffer offset)
|
||||
c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))
|
||||
type (case type
|
||||
1 :line-to
|
||||
2 :move-to
|
||||
3 :curve-to
|
||||
4 :close-path)
|
||||
res (f type c1x c1y c2x c2y x y)]
|
||||
(recur (inc index)
|
||||
(if (some? res)
|
||||
(conj! result res)
|
||||
result)))
|
||||
(persistent! result))))
|
||||
|
||||
(defn impl-reduce
|
||||
[buffer f initial size]
|
||||
(loop [index 0
|
||||
result initial]
|
||||
(if (< index size)
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type (read-short buffer offset)
|
||||
c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))
|
||||
type (case type
|
||||
1 :line-to
|
||||
2 :move-to
|
||||
3 :curve-to
|
||||
4 :close-path)
|
||||
result (f result index type c1x c1y c2x c2y x y)]
|
||||
(if (reduced? result)
|
||||
result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
(defn impl-lookup
|
||||
[buffer index f]
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type (read-short buffer offset)
|
||||
c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))
|
||||
type (case type
|
||||
1 :line-to
|
||||
2 :move-to
|
||||
3 :curve-to
|
||||
4 :close-path)]
|
||||
#?(:clj (f type c1x c1y c2x c2y x y)
|
||||
:cljs (^function f type c1x c1y c2x c2y x y))))
|
||||
|
||||
(defn- to-string-segment*
|
||||
[buffer offset type ^StringBuilder builder]
|
||||
(case (long type)
|
||||
1 (let [x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
(doto builder
|
||||
(.append "M")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
2 (let [x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
(doto builder
|
||||
(.append "L")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
|
||||
3 (let [c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
(doto builder
|
||||
(.append "C")
|
||||
(.append c1x)
|
||||
(.append ",")
|
||||
(.append c1y)
|
||||
(.append ",")
|
||||
(.append c2x)
|
||||
(.append ",")
|
||||
(.append c2y)
|
||||
(.append ",")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
4 (doto builder
|
||||
(.append "Z"))))
|
||||
|
||||
(defn- to-string
|
||||
"Format the path data structure to string"
|
||||
[buffer size]
|
||||
(let [builder #?(:clj (java.lang.StringBuilder. (int (* size 4)))
|
||||
:cljs (StringBuffer.))]
|
||||
(loop [index 0]
|
||||
(when (< index size)
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type (read-short buffer offset)]
|
||||
(to-string-segment* buffer offset type builder)
|
||||
(recur (inc index)))))
|
||||
|
||||
(.toString builder)))
|
||||
|
||||
(defn- read-segment
|
||||
"Read segment from binary buffer at specified index"
|
||||
[buffer index]
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type (read-short buffer offset)]
|
||||
(case (long type)
|
||||
1 (let [x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
{:command :move-to
|
||||
:params {:x (double x)
|
||||
:y (double y)}})
|
||||
|
||||
2 (let [x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
{:command :line-to
|
||||
:params {:x (double x)
|
||||
:y (double y)}})
|
||||
|
||||
3 (let [c1x (read-float buffer (+ offset 4))
|
||||
c1y (read-float buffer (+ offset 8))
|
||||
c2x (read-float buffer (+ offset 12))
|
||||
c2y (read-float buffer (+ offset 16))
|
||||
x (read-float buffer (+ offset 20))
|
||||
y (read-float buffer (+ offset 24))]
|
||||
{:command :curve-to
|
||||
:params {:x (double x)
|
||||
:y (double y)
|
||||
:c1x (double c1x)
|
||||
:c1y (double c1y)
|
||||
:c2x (double c2x)
|
||||
:c2y (double c2y)}})
|
||||
|
||||
4 {:command :close-path
|
||||
:params {}})))
|
||||
|
||||
(defn- in-range?
|
||||
[size i]
|
||||
(and (< i size) (>= i 0)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TYPE: PATH-DATA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
#?(:clj
|
||||
(deftype PathData [size
|
||||
^ByteBuffer buffer
|
||||
^:unsynchronized-mutable hash]
|
||||
Object
|
||||
(toString [_]
|
||||
(to-string buffer size))
|
||||
|
||||
(equals [_ other]
|
||||
(if (instance? PathData other)
|
||||
(.equals ^ByteBuffer buffer (.-buffer ^PathData other))
|
||||
false))
|
||||
|
||||
ITransformable
|
||||
(-transform [_ m]
|
||||
(let [buffer (clone-buffer buffer)]
|
||||
(impl-transform buffer m size)
|
||||
(PathData. size buffer nil)))
|
||||
|
||||
(-walk [_ f initial]
|
||||
(impl-walk buffer f initial size))
|
||||
|
||||
(-reduce [_ f initial]
|
||||
(impl-reduce buffer f initial size))
|
||||
|
||||
(-lookup [_ index f]
|
||||
(when (and (<= 0 index)
|
||||
(< index size))
|
||||
(impl-lookup buffer index f)))
|
||||
|
||||
json/JSONWriter
|
||||
(-write [this writter options]
|
||||
(json/-write (.toString this) writter options))
|
||||
|
||||
clojure.lang.IHashEq
|
||||
(hasheq [this]
|
||||
(when-not hash
|
||||
(set! hash (clojure.lang.Murmur3/hashOrdered (seq this))))
|
||||
hash)
|
||||
|
||||
clojure.lang.Sequential
|
||||
clojure.lang.Seqable
|
||||
(seq [_]
|
||||
(when (pos? size)
|
||||
((fn next-seq [i]
|
||||
(when (< i size)
|
||||
(cons (read-segment buffer i)
|
||||
(lazy-seq (next-seq (inc i))))))
|
||||
0)))
|
||||
|
||||
clojure.lang.IReduceInit
|
||||
(reduce [_ f start]
|
||||
(loop [index 0
|
||||
result start]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment buffer index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
clojure.lang.Indexed
|
||||
(nth [_ i]
|
||||
(if (in-range? size i)
|
||||
(read-segment buffer i)
|
||||
nil))
|
||||
|
||||
(nth [_ i default]
|
||||
(if (in-range? size i)
|
||||
(read-segment buffer i)
|
||||
default))
|
||||
|
||||
clojure.lang.Counted
|
||||
(count [_] size)
|
||||
|
||||
IPathData
|
||||
(-get-byte-size [_]
|
||||
(* size SEGMENT-BYTE-SIZE))
|
||||
|
||||
(-write-to [_ _ _]
|
||||
(throw (RuntimeException. "not implemented"))))
|
||||
|
||||
:cljs
|
||||
#_:clj-kondo/ignore
|
||||
(deftype PathData [size buffer dview cache ^:mutable __hash]
|
||||
Object
|
||||
(toString [_]
|
||||
(to-string dview size))
|
||||
|
||||
IPathData
|
||||
(-get-byte-size [_]
|
||||
(.-byteLength buffer))
|
||||
|
||||
(-write-to [_ into-buffer offset]
|
||||
;; NOTE: we still use u8 because until the heap refactor merge
|
||||
;; we can't guarrantee the alignment of offset on 4 bytes
|
||||
(assert (instance? js/ArrayBuffer into-buffer))
|
||||
(let [size (.-byteLength buffer)
|
||||
mem (js/Uint8Array. into-buffer offset size)]
|
||||
(.set mem (js/Uint8Array. buffer))))
|
||||
|
||||
ITransformable
|
||||
(-transform [this m]
|
||||
(let [buffer (clone-buffer buffer)
|
||||
dview (js/DataView. buffer)]
|
||||
(impl-transform dview m size)
|
||||
(PathData. size buffer dview (weak-map/create) nil)))
|
||||
|
||||
(-walk [_ f initial]
|
||||
(impl-walk dview f initial size))
|
||||
|
||||
(-reduce [_ f initial]
|
||||
(impl-reduce dview f initial size))
|
||||
|
||||
(-lookup [_ index f]
|
||||
(when (and (<= 0 index)
|
||||
(< index size))
|
||||
(impl-lookup dview index f)))
|
||||
|
||||
cljs.core/ISequential
|
||||
cljs.core/IEquiv
|
||||
(-equiv [this other]
|
||||
(if (instance? PathData other)
|
||||
(let [obuffer (.-buffer other)]
|
||||
(if (= (.-byteLength obuffer)
|
||||
(.-byteLength buffer))
|
||||
(let [cb (js/Uint32Array. buffer)
|
||||
ob (js/Uint32Array. obuffer)
|
||||
sz (alength cb)]
|
||||
(loop [i 0]
|
||||
(if (< i sz)
|
||||
(if (= (aget ob i)
|
||||
(aget cb i))
|
||||
(recur (inc i))
|
||||
false)
|
||||
true)))
|
||||
false))
|
||||
false))
|
||||
|
||||
cljs.core/IReduce
|
||||
(-reduce [_ f]
|
||||
(loop [index 1
|
||||
result (if (pos? size)
|
||||
(read-segment dview 0)
|
||||
nil)]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment dview index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
(-reduce [_ f start]
|
||||
(loop [index 0
|
||||
result start]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment dview index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
cljs.core/IHash
|
||||
(-hash [coll]
|
||||
(caching-hash coll hash-ordered-coll __hash))
|
||||
|
||||
cljs.core/ICounted
|
||||
(-count [_] size)
|
||||
|
||||
cljs.core/IIndexed
|
||||
(-nth [_ i]
|
||||
(if (in-range? size i)
|
||||
(read-segment dview i)
|
||||
nil))
|
||||
|
||||
(-nth [_ i default]
|
||||
(if (in-range? i size)
|
||||
(read-segment dview i)
|
||||
default))
|
||||
|
||||
cljs.core/ISeqable
|
||||
(-seq [this]
|
||||
(when (pos? size)
|
||||
((fn next-seq [i]
|
||||
(when (< i size)
|
||||
(cons (read-segment dview i)
|
||||
(lazy-seq (next-seq (inc i))))))
|
||||
0)))
|
||||
|
||||
cljs.core/IPrintWithWriter
|
||||
(-pr-writer [this writer _]
|
||||
(cljs.core/-write writer (str "#penpot/path-data \"" (.toString this) "\"")))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def schema:safe-number
|
||||
[:schema {:gen/gen (sg/small-int :max 100 :min -100)}
|
||||
::sm/safe-number])
|
||||
|
||||
(def ^:private schema:line-to-segment
|
||||
[:map
|
||||
[:command [:= :line-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x schema:safe-number]
|
||||
[:y schema:safe-number]]]])
|
||||
|
||||
(def ^:private schema:close-path-segment
|
||||
[:map
|
||||
[:command [:= :close-path]]])
|
||||
|
||||
(def ^:private schema:move-to-segment
|
||||
[:map
|
||||
[:command [:= :move-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x schema:safe-number]
|
||||
[:y schema:safe-number]]]])
|
||||
|
||||
(def ^:private schema:curve-to-segment
|
||||
[:map
|
||||
[:command [:= :curve-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x schema:safe-number]
|
||||
[:y schema:safe-number]
|
||||
[:c1x schema:safe-number]
|
||||
[:c1y schema:safe-number]
|
||||
[:c2x schema:safe-number]
|
||||
[:c2y schema:safe-number]]]])
|
||||
|
||||
(def ^:private schema:segment
|
||||
[:multi {:title "PathSegment"
|
||||
:dispatch :command
|
||||
:decode/json #(update % :command keyword)}
|
||||
[:line-to schema:line-to-segment]
|
||||
[:close-path schema:close-path-segment]
|
||||
[:move-to schema:move-to-segment]
|
||||
[:curve-to schema:curve-to-segment]])
|
||||
|
||||
(def schema:segments
|
||||
[:vector {:gen/gen (->> (sg/generator schema:segment)
|
||||
(sg/vector)
|
||||
(sg/filter not-empty)
|
||||
(sg/filter (fn [[e1]]
|
||||
(= (:command e1) :move-to))))}
|
||||
schema:segment])
|
||||
|
||||
(def schema:content-like
|
||||
[:sequential schema:segment])
|
||||
|
||||
(def check-content-like
|
||||
(sm/check-fn schema:content-like))
|
||||
|
||||
(def check-segment
|
||||
(sm/check-fn schema:segment))
|
||||
|
||||
(def ^:private check-segments
|
||||
(sm/check-fn schema:segments))
|
||||
|
||||
(defn path-data?
|
||||
[o]
|
||||
(instance? PathData o))
|
||||
|
||||
(declare from-string)
|
||||
(declare from-plain)
|
||||
|
||||
;; Mainly used on backend: features/components_v2.clj
|
||||
(sm/register! ::path/segment schema:segment)
|
||||
(sm/register! ::path/segments schema:segments)
|
||||
|
||||
(sm/register!
|
||||
{:type ::path/content
|
||||
:compile
|
||||
(fn [_ _ _]
|
||||
(let [decoder (delay (sm/decoder schema:segments sm/json-transformer))
|
||||
generator (->> (sg/generator schema:segments)
|
||||
(sg/filter not-empty)
|
||||
(sg/fmap from-plain))]
|
||||
{:pred path-data?
|
||||
:type-properties
|
||||
{:gen/gen generator
|
||||
:encode/json identity
|
||||
:decode/json (fn [s]
|
||||
(cond
|
||||
(string? s)
|
||||
(from-string s)
|
||||
|
||||
(vector? s)
|
||||
(let [decode-fn (deref decoder)]
|
||||
(-> (decode-fn s)
|
||||
(from-plain)))
|
||||
|
||||
:else
|
||||
s))}}))})
|
||||
|
||||
(def check-path-content
|
||||
(sm/check-fn ::path/content))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CONSTRUCTORS & PREDICATES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn from-string
|
||||
[s]
|
||||
(from-plain (svg.path/parse s)))
|
||||
|
||||
(defn from-bytes
|
||||
[buffer]
|
||||
#?(:clj
|
||||
(cond
|
||||
(instance? ByteBuffer buffer)
|
||||
(let [size (.capacity ^ByteBuffer buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))
|
||||
buffer (.order ^ByteBuffer buffer ByteOrder/LITTLE_ENDIAN)]
|
||||
(PathData. count buffer nil))
|
||||
|
||||
(bytes? buffer)
|
||||
(let [size (alength ^bytes buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))
|
||||
buffer (ByteBuffer/wrap buffer)]
|
||||
(PathData. count
|
||||
(.order buffer ByteOrder/LITTLE_ENDIAN)
|
||||
nil))
|
||||
:else
|
||||
(throw (java.lang.IllegalArgumentException. "invalid data provided")))
|
||||
|
||||
:cljs
|
||||
(cond
|
||||
(instance? js/ArrayBuffer buffer)
|
||||
(let [size (.-byteLength buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count
|
||||
buffer
|
||||
(js/DataView. buffer)
|
||||
(weak-map/create)
|
||||
nil))
|
||||
|
||||
(instance? js/DataView buffer)
|
||||
(let [dview buffer
|
||||
buffer (.-buffer dview)
|
||||
size (.-byteLength buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count buffer dview (weak-map/create) nil))
|
||||
|
||||
(instance? js/Uint8Array buffer)
|
||||
(from-bytes (.-buffer buffer))
|
||||
|
||||
(instance? js/Int8Array buffer)
|
||||
(from-bytes (.-buffer buffer))
|
||||
|
||||
:else
|
||||
(throw (js/Error. "invalid data provided")))))
|
||||
|
||||
;; FIXME: consider implementing with reduce
|
||||
;; FIXME: consider ensure fixed precision for avoid doing it on formatting
|
||||
|
||||
(defn from-plain
|
||||
"Create a PathData instance from plain data structures"
|
||||
[segments]
|
||||
(assert (check-segments segments))
|
||||
|
||||
(let [total (count segments)
|
||||
#?@(:cljs [buffer' (allocate total)
|
||||
buffer (new js/DataView buffer')]
|
||||
:clj [buffer (allocate total)])]
|
||||
(loop [index 0]
|
||||
(when (< index total)
|
||||
(let [segment (nth segments index)
|
||||
offset (* index SEGMENT-BYTE-SIZE)]
|
||||
(case (get segment :command)
|
||||
:move-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))]
|
||||
(write-short buffer offset 1)
|
||||
(write-float buffer (+ offset 20) x)
|
||||
(write-float buffer (+ offset 24) y))
|
||||
|
||||
:line-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))]
|
||||
|
||||
(write-short buffer offset 2)
|
||||
(write-float buffer (+ offset 20) x)
|
||||
(write-float buffer (+ offset 24) y))
|
||||
|
||||
:curve-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))
|
||||
c1x (float (get params :c1x x))
|
||||
c1y (float (get params :c1y y))
|
||||
c2x (float (get params :c2x x))
|
||||
c2y (float (get params :c2y y))]
|
||||
|
||||
(write-short buffer offset 3)
|
||||
(write-float buffer (+ offset 4) c1x)
|
||||
(write-float buffer (+ offset 8) c1y)
|
||||
(write-float buffer (+ offset 12) c2x)
|
||||
(write-float buffer (+ offset 16) c2y)
|
||||
(write-float buffer (+ offset 20) x)
|
||||
(write-float buffer (+ offset 24) y))
|
||||
|
||||
:close-path
|
||||
(write-short buffer offset 4))
|
||||
(recur (inc index)))))
|
||||
|
||||
(from-bytes buffer)))
|
||||
|
||||
(defn path-data
|
||||
"Create an instance of PathData, returns itself if it is already
|
||||
PathData instance"
|
||||
[data]
|
||||
(cond
|
||||
(path-data? data)
|
||||
data
|
||||
|
||||
(nil? data)
|
||||
(from-plain [])
|
||||
|
||||
(sequential? data)
|
||||
(from-plain data)
|
||||
|
||||
:else
|
||||
(throw (ex-info "unexpected data" {:data data}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SERIALIZATION
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(t/add-handlers!
|
||||
{:id "penpot/path-data"
|
||||
:class PathData
|
||||
:wfn (fn [^PathData pdata]
|
||||
(let [buffer (.-buffer pdata)]
|
||||
#?(:cljs (js/Uint8Array. buffer)
|
||||
:clj (.array ^ByteBuffer buffer))))
|
||||
:rfn from-bytes})
|
||||
|
||||
#?(:clj
|
||||
(fres/add-handlers!
|
||||
{:name "penpot/path-data"
|
||||
:class PathData
|
||||
:wfn (fn [n w o]
|
||||
(fres/write-tag! w n 1)
|
||||
(let [buffer (.-buffer ^PathData o)
|
||||
bytes (.array ^ByteBuffer buffer)]
|
||||
(fres/write-bytes! w bytes)))
|
||||
:rfn (fn [r]
|
||||
(let [^bytes bytes (fres/read-object! r)]
|
||||
(from-bytes bytes)))}))
|
||||
|
||||
891
common/src/app/common/types/path/segment.cljc
Normal file
891
common/src/app/common/types/path/segment.cljc
Normal file
@@ -0,0 +1,891 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.types.path.segment
|
||||
"A collection of helpers for work with plain segment type"
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.rect :as grc]
|
||||
[app.common.math :as mth]
|
||||
[app.common.types.path.helpers :as helpers]
|
||||
[app.common.types.path.impl :as impl]
|
||||
[clojure.set :as set]))
|
||||
|
||||
#?(:clj (set! *warn-on-reflection* true))
|
||||
|
||||
(defn update-handler
|
||||
[command prefix point]
|
||||
(let [[cox coy] (if (= prefix :c1) [:c1x :c1y] [:c2x :c2y])]
|
||||
(-> command
|
||||
(assoc-in [:params cox] (:x point))
|
||||
(assoc-in [:params coy] (:y point)))))
|
||||
|
||||
(defn get-handler [{:keys [params] :as command} prefix]
|
||||
(let [cx (d/prefix-keyword prefix :x)
|
||||
cy (d/prefix-keyword prefix :y)]
|
||||
(when (and command
|
||||
(contains? params cx)
|
||||
(contains? params cy))
|
||||
(gpt/point (get params cx)
|
||||
(get params cy)))))
|
||||
|
||||
(defn get-handlers
|
||||
"Retrieve a map where for every point will retrieve a list of
|
||||
the handlers that are associated with that point.
|
||||
point -> [[index, prefix]]"
|
||||
[content]
|
||||
(let [prev-point* (volatile! nil)
|
||||
vec-conj (fnil conj [])]
|
||||
(impl/-reduce content
|
||||
(fn [result index type _ _ _ _ x y]
|
||||
(let [curr-point (gpt/point x y)
|
||||
prev-point (deref prev-point*)]
|
||||
(vreset! prev-point* curr-point)
|
||||
(if (and prev-point (= :curve-to type))
|
||||
(-> result
|
||||
(update prev-point vec-conj [index :c1])
|
||||
(update curr-point vec-conj [index :c2]))
|
||||
result)))
|
||||
{})))
|
||||
|
||||
;; FIXME: can be optimized with internal reduction
|
||||
(defn point-indices
|
||||
[content point]
|
||||
(->> (d/enumerate content)
|
||||
(filter (fn [[_ segment]] (= point (helpers/segment->point segment))))
|
||||
(map (fn [[index _]] index))))
|
||||
|
||||
(defn handler-indices
|
||||
"Return an index where the key is the positions and the values the handlers"
|
||||
[content point]
|
||||
(->> (d/with-prev content)
|
||||
(d/enumerate)
|
||||
(mapcat (fn [[index [cur-segment pre-segment]]]
|
||||
(if (and (some? pre-segment) (= :curve-to (:command cur-segment)))
|
||||
(let [cur-pos (helpers/segment->point cur-segment)
|
||||
pre-pos (helpers/segment->point pre-segment)]
|
||||
(cond-> []
|
||||
(= pre-pos point) (conj [index :c1])
|
||||
(= cur-pos point) (conj [index :c2])))
|
||||
[])))))
|
||||
|
||||
(defn opposite-index
|
||||
"Calculates the opposite index given a prefix and an index"
|
||||
[content index prefix]
|
||||
|
||||
(let [point (if (= prefix :c2)
|
||||
(helpers/segment->point (nth content index))
|
||||
(helpers/segment->point (nth content (dec index))))
|
||||
|
||||
point->handlers (get-handlers content)
|
||||
|
||||
handlers (->> point
|
||||
(point->handlers)
|
||||
(filter (fn [[ci cp]] (and (not= index ci) (not= prefix cp)))))]
|
||||
|
||||
(cond
|
||||
(= (count handlers) 1)
|
||||
(->> handlers first)
|
||||
|
||||
(and (= :c1 prefix) (= (count content) index))
|
||||
[(dec index) :c2]
|
||||
|
||||
:else nil)))
|
||||
|
||||
;; FIXME: rename to get-point
|
||||
(defn get-handler-point
|
||||
"Given a segment index and prefix, get a handler point"
|
||||
[content index prefix]
|
||||
(when (and (some? index)
|
||||
(some? content))
|
||||
(impl/-lookup content index
|
||||
(fn [command c1x c1y c2x c2y x y]
|
||||
(let [prefix (if (= :curve-to command)
|
||||
prefix
|
||||
nil)]
|
||||
(case prefix
|
||||
:c1 (gpt/point c1x c1y)
|
||||
:c2 (gpt/point c2x c2y)
|
||||
(gpt/point x y)))))))
|
||||
|
||||
;; FIXME: revisit this function
|
||||
(defn handler->node
|
||||
[content index prefix]
|
||||
(if (= prefix :c1)
|
||||
(helpers/segment->point (nth content (dec index)))
|
||||
(helpers/segment->point (nth content index))))
|
||||
|
||||
(defn calculate-opposite-handler
|
||||
"Given a point and its handler, gives the symmetric handler"
|
||||
[point handler]
|
||||
(let [handler-vector (gpt/to-vec point handler)]
|
||||
(gpt/add point (gpt/negate handler-vector))))
|
||||
|
||||
(defn opposite-handler
|
||||
"Calculates the coordinates of the opposite handler"
|
||||
[point handler]
|
||||
(let [phv (gpt/to-vec point handler)]
|
||||
(gpt/add point (gpt/negate phv))))
|
||||
|
||||
(defn get-points
|
||||
"Returns points for the given segment, faster version of
|
||||
the `content->points`."
|
||||
[content]
|
||||
(impl/with-cache content "get-points"
|
||||
(impl/-walk content
|
||||
(fn [type _ _ _ _ x y]
|
||||
(when (not= type :close-path)
|
||||
(gpt/point x y)))
|
||||
[])))
|
||||
|
||||
;; FIXME: incorrect API, don't need full shape
|
||||
(defn path->lines
|
||||
"Given a path returns a list of lines that approximate the path"
|
||||
[shape]
|
||||
(loop [command (first (:content shape))
|
||||
pending (rest (:content shape))
|
||||
result []
|
||||
last-start nil
|
||||
prev-point nil]
|
||||
|
||||
(if-let [{:keys [command params]} command]
|
||||
(let [point (if (= :close-path command)
|
||||
last-start
|
||||
(gpt/point params))
|
||||
|
||||
result (case command
|
||||
:line-to (conj result [prev-point point])
|
||||
:curve-to (let [h1 (gpt/point (:c1x params) (:c1y params))
|
||||
h2 (gpt/point (:c2x params) (:c2y params))]
|
||||
(into result (helpers/curve->lines prev-point point h1 h2)))
|
||||
:move-to (cond-> result
|
||||
last-start (conj [prev-point last-start]))
|
||||
result)
|
||||
last-start (if (= :move-to command)
|
||||
point
|
||||
last-start)]
|
||||
(recur (first pending)
|
||||
(rest pending)
|
||||
result
|
||||
last-start
|
||||
point))
|
||||
|
||||
(conj result [prev-point last-start]))))
|
||||
|
||||
(def ^:const path-closest-point-accuracy 0.01)
|
||||
|
||||
;; FIXME: move to helpers?, this function need performance review, it
|
||||
;; is executed so many times on path edition
|
||||
(defn- curve-closest-point
|
||||
[position start end h1 h2 precision]
|
||||
(let [d (memoize (fn [t] (gpt/distance position (helpers/curve-values start end h1 h2 t))))]
|
||||
(loop [t1 0.0
|
||||
t2 1.0]
|
||||
(if (<= (mth/abs (- t1 t2)) precision)
|
||||
(-> (helpers/curve-values start end h1 h2 t1)
|
||||
;; store the segment info
|
||||
(with-meta {:t t1 :from-p start :to-p end}))
|
||||
|
||||
(let [ht (+ t1 (/ (- t2 t1) 2))
|
||||
ht1 (+ t1 (/ (- t2 t1) 4))
|
||||
ht2 (+ t1 (/ (* 3 (- t2 t1)) 4))
|
||||
|
||||
[t1 t2] (cond
|
||||
(< (d ht1) (d ht2))
|
||||
[t1 ht]
|
||||
|
||||
(< (d ht2) (d ht1))
|
||||
[ht t2]
|
||||
|
||||
(and (< (d ht) (d t1)) (< (d ht) (d t2)))
|
||||
[ht1 ht2]
|
||||
|
||||
(< (d t1) (d t2))
|
||||
[t1 ht]
|
||||
|
||||
:else
|
||||
[ht t2])]
|
||||
(recur (double t1)
|
||||
(double t2)))))))
|
||||
|
||||
(defn- line-closest-point
|
||||
"Finds the closest point in the line segment defined by from-p and to-p"
|
||||
[position from-p to-p]
|
||||
|
||||
(let [e1 (gpt/to-vec from-p to-p)
|
||||
e2 (gpt/to-vec from-p position)
|
||||
|
||||
len2 (+ (mth/sq (:x e1)) (mth/sq (:y e1)))
|
||||
t (/ (gpt/dot e1 e2) len2)]
|
||||
|
||||
(if (and (>= t 0) (<= t 1) (not (mth/almost-zero? len2)))
|
||||
(-> (gpt/add from-p (gpt/scale e1 t))
|
||||
(with-meta {:t t
|
||||
:from-p from-p
|
||||
:to-p to-p}))
|
||||
|
||||
;; There is no perpendicular projection in the line so the closest
|
||||
;; point will be one of the extremes
|
||||
(if (<= (gpt/distance position from-p) (gpt/distance position to-p))
|
||||
from-p
|
||||
to-p))))
|
||||
|
||||
(defn closest-point
|
||||
"Returns the closest point in the path to the position, at a given precision"
|
||||
[content position precision]
|
||||
(let [point+distance
|
||||
(fn [[cur-segment prev-segment]]
|
||||
(let [from-p (helpers/segment->point prev-segment)
|
||||
to-p (helpers/segment->point cur-segment)
|
||||
h1 (gpt/point (get-in cur-segment [:params :c1x])
|
||||
(get-in cur-segment [:params :c1y]))
|
||||
h2 (gpt/point (get-in cur-segment [:params :c2x])
|
||||
(get-in cur-segment [:params :c2y]))
|
||||
point
|
||||
(case (:command cur-segment)
|
||||
:line-to
|
||||
(line-closest-point position from-p to-p)
|
||||
|
||||
:curve-to
|
||||
(curve-closest-point position from-p to-p h1 h2 precision)
|
||||
|
||||
nil)]
|
||||
(when point
|
||||
[point (gpt/distance point position)])))
|
||||
|
||||
find-min-point
|
||||
(fn [[min-p min-dist :as acc] [cur-p cur-dist :as cur]]
|
||||
(if (and (some? acc) (or (not cur) (<= min-dist cur-dist)))
|
||||
[min-p min-dist]
|
||||
[cur-p cur-dist]))]
|
||||
|
||||
(->> content
|
||||
(d/with-prev)
|
||||
(map point+distance)
|
||||
(reduce find-min-point)
|
||||
(first))))
|
||||
|
||||
(defn- remove-line-curves
|
||||
"Remove all curves that have both handlers in the same position that the
|
||||
beginning and end points. This makes them really line-to commands.
|
||||
|
||||
NOTE: works with plain format so it expects to receive a vector"
|
||||
[content]
|
||||
(assert (vector? content) "expected a plain format for `content`")
|
||||
|
||||
(let [with-prev (d/enumerate (d/with-prev content))
|
||||
|
||||
process-segment
|
||||
(fn [content [index [segment prev]]]
|
||||
(let [cur-point (helpers/segment->point segment)
|
||||
pre-point (helpers/segment->point prev)
|
||||
handler-c1 (get-handler segment :c1)
|
||||
handler-c2 (get-handler segment :c2)]
|
||||
(if (and (= :curve-to (:command segment))
|
||||
(= cur-point handler-c2)
|
||||
(= pre-point handler-c1))
|
||||
(assoc content index {:command :line-to
|
||||
:params (into {} cur-point)})
|
||||
content)))]
|
||||
|
||||
(reduce process-segment content with-prev)))
|
||||
|
||||
(defn make-corner-point
|
||||
"Changes the content to make a point a 'corner'"
|
||||
[content point]
|
||||
(let [handlers
|
||||
(-> (get-handlers content)
|
||||
(get point))
|
||||
|
||||
transform-content
|
||||
(fn [content [index prefix]]
|
||||
(let [cx (d/prefix-keyword prefix :x)
|
||||
cy (d/prefix-keyword prefix :y)]
|
||||
(-> content
|
||||
(assoc-in [index :params cx] (:x point))
|
||||
(assoc-in [index :params cy] (:y point)))))
|
||||
|
||||
content
|
||||
(reduce transform-content (vec content) handlers)
|
||||
|
||||
content
|
||||
(remove-line-curves content)]
|
||||
|
||||
(impl/from-plain content)))
|
||||
|
||||
(defn- line->curve
|
||||
[from-p segment]
|
||||
|
||||
(let [to-p (helpers/segment->point segment)
|
||||
|
||||
v (gpt/to-vec from-p to-p)
|
||||
d (gpt/distance from-p to-p)
|
||||
|
||||
dv1 (-> (gpt/normal-left v)
|
||||
(gpt/scale (/ d 3)))
|
||||
|
||||
h1 (gpt/add from-p dv1)
|
||||
|
||||
dv2 (-> (gpt/to-vec to-p h1)
|
||||
(gpt/unit)
|
||||
(gpt/scale (/ d 3)))
|
||||
|
||||
h2 (gpt/add to-p dv2)]
|
||||
(-> segment
|
||||
(assoc :command :curve-to)
|
||||
(update :params (fn [params]
|
||||
;; ensure plain map
|
||||
(-> (into {} params)
|
||||
(assoc :c1x (:x h1))
|
||||
(assoc :c1y (:y h1))
|
||||
(assoc :c2x (:x h2))
|
||||
(assoc :c2y (:y h2))))))))
|
||||
|
||||
;; FIXME: optimize
|
||||
(defn is-curve?
|
||||
[content point]
|
||||
(let [handlers (-> (get-handlers content)
|
||||
(get point))
|
||||
handler-points (map #(get-handler-point content (first %) (second %)) handlers)]
|
||||
(some #(not= point %) handler-points)))
|
||||
|
||||
(def ^:private xf:mapcat-points
|
||||
(comp
|
||||
(mapcat #(list (:next-p %) (:prev-p %)))
|
||||
(remove nil?)))
|
||||
|
||||
(defn make-curve-point
|
||||
"Changes the content to make the point a 'curve'. The handlers will be
|
||||
positioned in the same vector that results from the previous->next
|
||||
points but with fixed length."
|
||||
[content point]
|
||||
|
||||
(let [;; We perform this operation before because it can be
|
||||
;; optimized with internal reduction so is better to use the
|
||||
;; PathData type before converting it to plain vector.
|
||||
indices
|
||||
(point-indices content point)
|
||||
|
||||
vectors
|
||||
(map (fn [index]
|
||||
(let [segment (nth content index)
|
||||
prev-i (dec index)
|
||||
prev (when (not (= :move-to (:command segment)))
|
||||
(get content prev-i))
|
||||
next-i (inc index)
|
||||
next (get content next-i)
|
||||
|
||||
next (when (not (= :move-to (:command next)))
|
||||
next)]
|
||||
{:index index
|
||||
:prev-i (when (some? prev) prev-i)
|
||||
:prev-c prev
|
||||
:prev-p (helpers/segment->point prev)
|
||||
:next-i (when (some? next) next-i)
|
||||
:next-c next
|
||||
:next-p (helpers/segment->point next)
|
||||
:segment segment}))
|
||||
indices)
|
||||
|
||||
points
|
||||
(into #{} xf:mapcat-points vectors)
|
||||
|
||||
;; We transform content to a plain format for execute the
|
||||
;; algorithm because right now is the only way to execute it
|
||||
content
|
||||
(vec content)
|
||||
|
||||
content
|
||||
(if (= (count points) 2)
|
||||
(let [[fpoint spoint] (vec points)
|
||||
v1 (gpt/to-vec fpoint point)
|
||||
v2 (gpt/to-vec fpoint spoint)
|
||||
vp (gpt/project v1 v2)
|
||||
vh (gpt/subtract v1 vp)
|
||||
|
||||
add-curve
|
||||
(fn [content {:keys [index prev-p next-p next-i]}]
|
||||
(let [curr-segment (get content index)
|
||||
curr-command (get curr-segment :command)
|
||||
|
||||
next-segment (get content next-i)
|
||||
next-command (get next-segment :command)
|
||||
|
||||
;; New handlers for prev-point and next-point
|
||||
prev-h
|
||||
(when (some? prev-p) (gpt/add prev-p vh))
|
||||
|
||||
next-h
|
||||
(when (some? next-p) (gpt/add next-p vh))
|
||||
|
||||
;; Correct 1/3 to the point improves the curve
|
||||
prev-correction
|
||||
(when (some? prev-h) (gpt/scale (gpt/to-vec prev-h point) (/ 1 3)))
|
||||
|
||||
next-correction
|
||||
(when (some? next-h) (gpt/scale (gpt/to-vec next-h point) (/ 1 3)))
|
||||
|
||||
prev-h
|
||||
(when (some? prev-h) (gpt/add prev-h prev-correction))
|
||||
|
||||
next-h
|
||||
(when (some? next-h) (gpt/add next-h next-correction))]
|
||||
|
||||
(cond-> content
|
||||
(and (= :line-to curr-command) (some? prev-p))
|
||||
(update index helpers/update-curve-to prev-p prev-h)
|
||||
|
||||
(and (= :line-to next-command) (some? next-p))
|
||||
(update next-i helpers/update-curve-to next-h next-p)
|
||||
|
||||
(and (= :curve-to curr-command) (some? prev-p))
|
||||
(update index update-handler :c2 prev-h)
|
||||
|
||||
(and (= :curve-to next-command) (some? next-p))
|
||||
(update next-i update-handler :c1 next-h))))]
|
||||
|
||||
(reduce add-curve content vectors))
|
||||
|
||||
(let [add-curve
|
||||
(fn [content {:keys [index segment prev-p next-c next-i]}]
|
||||
(cond-> content
|
||||
(= :line-to (:command segment))
|
||||
(update index #(line->curve prev-p %))
|
||||
|
||||
(= :curve-to (:command segment))
|
||||
(update index #(line->curve prev-p %))
|
||||
|
||||
(= :line-to (:command next-c))
|
||||
(update next-i #(line->curve point %))
|
||||
|
||||
(= :curve-to (:command next-c))
|
||||
(update next-i #(line->curve point %))))]
|
||||
(reduce add-curve content vectors)))]
|
||||
|
||||
(impl/from-plain content)))
|
||||
|
||||
(defn get-segments-with-points
|
||||
"Given a content and a set of points return all the segments in the path
|
||||
that uses the points"
|
||||
[content points]
|
||||
(let [point-set (set points)]
|
||||
(loop [result (transient [])
|
||||
prev-point nil
|
||||
start-point nil
|
||||
index 0
|
||||
content (seq content)]
|
||||
(if-let [{:keys [command] :as segment} (first content)]
|
||||
(let [close-path? (= command :close-path)
|
||||
move-to? (= command :move-to)
|
||||
|
||||
cur-point (if close-path?
|
||||
start-point
|
||||
(helpers/segment->point segment))
|
||||
|
||||
;; If there is a move-to we don't have a segment
|
||||
prev-point (if move-to?
|
||||
nil
|
||||
prev-point)
|
||||
|
||||
;; We update the start point
|
||||
start-point (if move-to?
|
||||
cur-point
|
||||
start-point)
|
||||
|
||||
result (cond-> result
|
||||
(and (some? prev-point)
|
||||
(contains? point-set prev-point)
|
||||
(contains? point-set cur-point))
|
||||
|
||||
(conj! (-> segment
|
||||
(assoc :start prev-point)
|
||||
(assoc :end cur-point)
|
||||
(assoc :index index))))]
|
||||
(recur result
|
||||
cur-point
|
||||
start-point
|
||||
(inc index)
|
||||
(rest content)))
|
||||
|
||||
(persistent! result)))))
|
||||
|
||||
(defn split-segments
|
||||
"Given a content creates splits commands between points with new segments"
|
||||
[content points value]
|
||||
|
||||
(let [split-command
|
||||
(fn [{:keys [command start end index] :as segment}]
|
||||
(case command
|
||||
:line-to [index (helpers/split-line-to start segment value)]
|
||||
:curve-to [index (helpers/split-curve-to start segment value)]
|
||||
:close-path [index [(helpers/make-line-to (gpt/lerp start end value)) segment]]
|
||||
nil))
|
||||
|
||||
segment-changes
|
||||
(->> (get-segments-with-points content points)
|
||||
(into {} (keep split-command)))
|
||||
|
||||
process-segments
|
||||
(fn [[index command]]
|
||||
(if (contains? segment-changes index)
|
||||
(get segment-changes index)
|
||||
[command]))]
|
||||
|
||||
(into [] (mapcat process-segments) (d/enumerate content))))
|
||||
|
||||
;; FIXME: rename to next-segment
|
||||
(defn next-node
|
||||
"Calculates the next-node to be inserted."
|
||||
[content position prev-point prev-handler]
|
||||
(let [position (select-keys position [:x :y])
|
||||
last-command (-> content last :command)
|
||||
add-line? (and prev-point (not prev-handler) (not= last-command :close-path))
|
||||
add-curve? (and prev-point prev-handler (not= last-command :close-path))]
|
||||
(cond
|
||||
add-line? {:command :line-to
|
||||
:params position}
|
||||
add-curve? {:command :curve-to
|
||||
:params (helpers/make-curve-params position prev-handler)}
|
||||
:else {:command :move-to
|
||||
:params position})))
|
||||
(defn remove-nodes
|
||||
"Removes from content the points given. Will try to reconstruct the paths
|
||||
to keep everything consistent"
|
||||
[content points]
|
||||
|
||||
(if (empty? points)
|
||||
content
|
||||
|
||||
(let [content (d/with-prev content)]
|
||||
|
||||
(loop [result []
|
||||
last-handler nil
|
||||
[cur-segment prev-segment] (first content)
|
||||
content (rest content)]
|
||||
|
||||
(if (nil? cur-segment)
|
||||
;; The result with be an array of arrays were every entry is a subpath
|
||||
(->> result
|
||||
;; remove empty and only 1 node subpaths
|
||||
(filter #(> (count %) 1))
|
||||
;; flatten array-of-arrays plain array
|
||||
(flatten)
|
||||
(into []))
|
||||
|
||||
(let [move? (= :move-to (:command cur-segment))
|
||||
curve? (= :curve-to (:command cur-segment))
|
||||
|
||||
;; When the old command was a move we start a subpath
|
||||
result (if move? (conj result []) result)
|
||||
|
||||
subpath (peek result)
|
||||
|
||||
point (helpers/segment->point cur-segment)
|
||||
|
||||
old-prev-point (helpers/segment->point prev-segment)
|
||||
new-prev-point (helpers/segment->point (peek subpath))
|
||||
|
||||
remove? (contains? points point)
|
||||
|
||||
|
||||
;; We store the first handler for the first curve to be removed to
|
||||
;; use it for the first handler of the regenerated path
|
||||
cur-handler (cond
|
||||
(and (not last-handler) remove? curve?)
|
||||
(select-keys (:params cur-segment) [:c1x :c1y])
|
||||
|
||||
(not remove?)
|
||||
nil
|
||||
|
||||
:else
|
||||
last-handler)
|
||||
|
||||
cur-segment (cond-> cur-segment
|
||||
;; If we're starting a subpath and it's not a move make it a move
|
||||
(and (not move?) (empty? subpath))
|
||||
(assoc :command :move-to
|
||||
:params (select-keys (:params cur-segment) [:x :y]))
|
||||
|
||||
;; If have a curve the first handler will be relative to the previous
|
||||
;; point. We change the handler to the new previous point
|
||||
(and curve? (seq subpath) (not= old-prev-point new-prev-point))
|
||||
(update :params merge last-handler))
|
||||
|
||||
head-idx (dec (count result))
|
||||
|
||||
result (cond-> result
|
||||
(not remove?)
|
||||
(update head-idx conj cur-segment))]
|
||||
(recur result
|
||||
cur-handler
|
||||
(first content)
|
||||
(rest content))))))))
|
||||
|
||||
(defn join-nodes
|
||||
"Creates new segments between points that weren't previously"
|
||||
[content points]
|
||||
|
||||
(let [segments-set (into #{}
|
||||
(map (juxt :start :end))
|
||||
(get-segments-with-points content points))
|
||||
|
||||
create-line-command (fn [point other]
|
||||
[(helpers/make-move-to point)
|
||||
(helpers/make-line-to other)])
|
||||
|
||||
not-segment? (fn [point other] (and (not (contains? segments-set [point other]))
|
||||
(not (contains? segments-set [other point]))))
|
||||
|
||||
new-content (->> (d/map-perm create-line-command not-segment? points)
|
||||
(flatten)
|
||||
(into []))]
|
||||
|
||||
(into content new-content)))
|
||||
|
||||
|
||||
(defn separate-nodes
|
||||
"Removes the segments between the points given"
|
||||
[content points]
|
||||
|
||||
(let [content (d/with-prev content)]
|
||||
(loop [result []
|
||||
[cur-segment prev-segment] (first content)
|
||||
content (rest content)]
|
||||
|
||||
(if (nil? cur-segment)
|
||||
(->> result
|
||||
(filter #(> (count %) 1))
|
||||
(flatten)
|
||||
(into []))
|
||||
|
||||
(let [prev-point (helpers/segment->point prev-segment)
|
||||
cur-point (helpers/segment->point cur-segment)
|
||||
|
||||
cur-segment (cond-> cur-segment
|
||||
(and (contains? points prev-point)
|
||||
(contains? points cur-point))
|
||||
|
||||
(assoc :command :move-to
|
||||
:params (select-keys (:params cur-segment) [:x :y])))
|
||||
|
||||
move? (= :move-to (:command cur-segment))
|
||||
|
||||
result (if move? (conj result []) result)
|
||||
head-idx (dec (count result))
|
||||
|
||||
result (-> result
|
||||
(update head-idx conj cur-segment))]
|
||||
(recur result
|
||||
(first content)
|
||||
(rest content)))))))
|
||||
|
||||
|
||||
(defn- add-to-set
|
||||
"Given a list of sets adds the value to the target set"
|
||||
[set-list target value]
|
||||
(->> set-list
|
||||
(mapv (fn [it]
|
||||
(cond-> it
|
||||
(= it target) (conj value))))))
|
||||
|
||||
(defn- join-sets
|
||||
"Given a list of sets join two sets in the list into a new one"
|
||||
[set-list target other]
|
||||
(conj (->> set-list
|
||||
(filterv #(and (not= % target)
|
||||
(not= % other))))
|
||||
(set/union target other)))
|
||||
|
||||
;; FIXME: revisit impl of this fn
|
||||
(defn- group-segments [segments]
|
||||
(loop [result []
|
||||
{point-a :start point-b :end :as segment} (first segments)
|
||||
segments (rest segments)]
|
||||
|
||||
(if (nil? segment)
|
||||
result
|
||||
|
||||
(let [set-a (d/seek #(contains? % point-a) result)
|
||||
set-b (d/seek #(contains? % point-b) result)
|
||||
|
||||
result (cond-> result
|
||||
(and (nil? set-a) (nil? set-b))
|
||||
(conj #{point-a point-b})
|
||||
|
||||
(and (some? set-a) (nil? set-b))
|
||||
(add-to-set set-a point-b)
|
||||
|
||||
(and (nil? set-a) (some? set-b))
|
||||
(add-to-set set-b point-a)
|
||||
|
||||
(and (some? set-a) (some? set-b) (not= set-a set-b))
|
||||
(join-sets set-a set-b))]
|
||||
(recur result
|
||||
(first segments)
|
||||
(rest segments))))))
|
||||
|
||||
(defn- calculate-merge-points [group-segments points]
|
||||
(let [index-merge-point (fn [group] (vector group (gpt/center-points group)))
|
||||
index-group (fn [point] (vector point (d/seek #(contains? % point) group-segments)))
|
||||
|
||||
group->merge-point (into {} (map index-merge-point) group-segments)
|
||||
point->group (into {} (map index-group) points)]
|
||||
(d/mapm #(group->merge-point %2) point->group)))
|
||||
|
||||
;; TODO: Improve the replace for curves
|
||||
(defn- replace-points
|
||||
"Replaces the points in a path for its merge-point"
|
||||
[content point->merge-point]
|
||||
(let [replace-command
|
||||
(fn [segment]
|
||||
(let [point (helpers/segment->point segment)]
|
||||
(if (contains? point->merge-point point)
|
||||
(let [merge-point (get point->merge-point point)]
|
||||
(-> segment (update :params assoc :x (:x merge-point) :y (:y merge-point))))
|
||||
segment)))]
|
||||
(->> content
|
||||
(mapv replace-command))))
|
||||
|
||||
(defn merge-nodes
|
||||
"Reduces the contiguous segments in points to a single point"
|
||||
[content points]
|
||||
(let [segments (get-segments-with-points content points)]
|
||||
(if (seq segments)
|
||||
(let [point->merge-point (-> segments
|
||||
(group-segments)
|
||||
(calculate-merge-points points))]
|
||||
(-> content
|
||||
(separate-nodes points)
|
||||
(replace-points point->merge-point)))
|
||||
content)))
|
||||
|
||||
(defn transform-content
|
||||
"Applies a transformation matrix over content and returns a new
|
||||
content as PathData instance."
|
||||
[content transform]
|
||||
(if (some? transform)
|
||||
(impl/-transform content transform)
|
||||
content))
|
||||
|
||||
(defn move-content
|
||||
"Applies a displacement over content and returns a new content as
|
||||
PathData instance. Implemented in function of `transform-content`."
|
||||
[content move-vec]
|
||||
(let [transform (gmt/translate-matrix move-vec)]
|
||||
(transform-content content transform)))
|
||||
|
||||
(defn calculate-extremities
|
||||
"Calculate extremities for the provided content"
|
||||
[content]
|
||||
(loop [points (transient #{})
|
||||
content (not-empty (vec content))
|
||||
from-p nil
|
||||
move-p nil]
|
||||
(if content
|
||||
(let [last-p (peek content)
|
||||
content (if (= :move-to (:command last-p))
|
||||
(pop content)
|
||||
content)
|
||||
segment (get content 0)
|
||||
to-p (helpers/segment->point segment)]
|
||||
|
||||
(if segment
|
||||
(case (:command segment)
|
||||
:move-to
|
||||
(recur (conj! points to-p)
|
||||
(not-empty (subvec content 1))
|
||||
to-p
|
||||
to-p)
|
||||
|
||||
:close-path
|
||||
(recur (conj! points move-p)
|
||||
(not-empty (subvec content 1))
|
||||
move-p
|
||||
move-p)
|
||||
|
||||
:line-to
|
||||
(recur (cond-> points
|
||||
(and from-p to-p)
|
||||
(-> (conj! move-p)
|
||||
(conj! to-p)))
|
||||
(not-empty (subvec content 1))
|
||||
to-p
|
||||
move-p)
|
||||
|
||||
:curve-to
|
||||
(let [c1 (helpers/segment->point segment :c1)
|
||||
c2 (helpers/segment->point segment :c2)]
|
||||
(recur (if (and from-p to-p c1 c2)
|
||||
(reduce conj!
|
||||
(-> points (conj! from-p) (conj! to-p))
|
||||
(helpers/calculate-curve-extremities from-p to-p c1 c2))
|
||||
points)
|
||||
|
||||
(not-empty (subvec content 1))
|
||||
to-p
|
||||
move-p)))
|
||||
(persistent! points)))
|
||||
(persistent! points))))
|
||||
|
||||
(defn content->selrect
|
||||
[content]
|
||||
(let [extremities (calculate-extremities content)
|
||||
;; We haven't found any extremes so we turn the commands to points
|
||||
extremities
|
||||
(if (empty? extremities)
|
||||
(->> content (keep helpers/segment->point))
|
||||
extremities)]
|
||||
|
||||
;; If no points are returned we return an empty rect.
|
||||
(if (d/not-empty? extremities)
|
||||
(grc/points->rect extremities)
|
||||
(grc/make-rect))))
|
||||
|
||||
(defn content-center
|
||||
[content]
|
||||
(-> content
|
||||
content->selrect
|
||||
grc/rect->center))
|
||||
|
||||
(defn append-segment
|
||||
[content segment]
|
||||
(let [content (cond
|
||||
(impl/path-data? content)
|
||||
(vec content)
|
||||
|
||||
(nil? content)
|
||||
[]
|
||||
|
||||
:else
|
||||
content)]
|
||||
(conj content (impl/check-segment segment))))
|
||||
|
||||
(defn points->content
|
||||
"Given a vector of points generate a path content.
|
||||
|
||||
Mainly used for generate a path content from user drawing points
|
||||
using curve drawing tool."
|
||||
[points & {:keys [close]}]
|
||||
(let [initial (first points)
|
||||
point->params
|
||||
(fn [point]
|
||||
{:x (dm/get-prop point :x)
|
||||
:y (dm/get-prop point :y)})]
|
||||
(loop [points (rest points)
|
||||
result [{:command :move-to
|
||||
:params (point->params initial)}]]
|
||||
(if-let [point (first points)]
|
||||
(recur (rest points)
|
||||
(conj result {:command :line-to
|
||||
:params (point->params point)}))
|
||||
|
||||
(let [result (if close
|
||||
(conj result {:command :close-path})
|
||||
result)]
|
||||
(impl/from-plain result))))))
|
||||
@@ -4,58 +4,34 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.path.shapes-to-path
|
||||
(ns app.common.types.path.shape-to-path
|
||||
(:require
|
||||
[app.common.colors :as clr]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.geom.shapes.common :as gco]
|
||||
[app.common.geom.shapes.corners :as gso]
|
||||
[app.common.geom.shapes.path :as gsp]
|
||||
[app.common.svg.path.bool :as pb]
|
||||
[app.common.svg.path.command :as pc]
|
||||
[app.common.types.path.bool :as bool]
|
||||
[app.common.types.path.helpers :as helpers]
|
||||
[app.common.types.path.impl :as path.impl]
|
||||
[app.common.types.path.segment :as segm]
|
||||
[app.common.types.shape.radius :as ctsr]))
|
||||
|
||||
(def ^:const bezier-circle-c 0.551915024494)
|
||||
(def ^:const ^:private bezier-circle-c
|
||||
0.551915024494)
|
||||
|
||||
(def dissoc-attrs
|
||||
(def ^:private dissoc-attrs
|
||||
[:x :y :width :height
|
||||
:rx :ry :r1 :r2 :r3 :r4
|
||||
:metadata])
|
||||
|
||||
(def allowed-transform-types
|
||||
#{:rect
|
||||
:circle
|
||||
:image})
|
||||
(defn without-position-attrs
|
||||
[shape]
|
||||
(d/without-keys shape dissoc-attrs))
|
||||
|
||||
(def style-group-properties
|
||||
[:shadow
|
||||
:blur])
|
||||
|
||||
(def style-properties
|
||||
(into style-group-properties
|
||||
[:fill-color
|
||||
:fill-opacity
|
||||
:fill-color-gradient
|
||||
:fill-color-ref-file
|
||||
:fill-color-ref-id
|
||||
:fill-image
|
||||
:fills
|
||||
:stroke-color
|
||||
:stroke-color-ref-file
|
||||
:stroke-color-ref-id
|
||||
:stroke-opacity
|
||||
:stroke-style
|
||||
:stroke-width
|
||||
:stroke-alignment
|
||||
:stroke-cap-start
|
||||
:stroke-cap-end
|
||||
:strokes]))
|
||||
|
||||
(def default-bool-fills [{:fill-color clr/black}])
|
||||
|
||||
(defn make-corner-arc
|
||||
(defn- make-corner-arc
|
||||
"Creates a curvle corner for border radius"
|
||||
[from to corner radius]
|
||||
(let [x (case corner
|
||||
@@ -91,9 +67,9 @@
|
||||
:bottom-right (assoc to :x c2x)
|
||||
:bottom-left (assoc to :y c2y))]
|
||||
|
||||
(pc/make-curve-to to h1 h2)))
|
||||
(helpers/make-curve-to to h1 h2)))
|
||||
|
||||
(defn circle->path
|
||||
(defn- circle->path
|
||||
"Creates the bezier curves to approximate a circle shape"
|
||||
[{:keys [x y width height]}]
|
||||
(let [mx (+ x (/ width 2))
|
||||
@@ -112,13 +88,13 @@
|
||||
c1y (+ y (* (/ height 2) (- 1 c)))
|
||||
c2y (+ y (* (/ height 2) (+ 1 c)))]
|
||||
|
||||
[(pc/make-move-to p1)
|
||||
(pc/make-curve-to p2 (assoc p1 :x c2x) (assoc p2 :y c1y))
|
||||
(pc/make-curve-to p3 (assoc p2 :y c2y) (assoc p3 :x c2x))
|
||||
(pc/make-curve-to p4 (assoc p3 :x c1x) (assoc p4 :y c2y))
|
||||
(pc/make-curve-to p1 (assoc p4 :y c1y) (assoc p1 :x c1x))]))
|
||||
[(helpers/make-move-to p1)
|
||||
(helpers/make-curve-to p2 (assoc p1 :x c2x) (assoc p2 :y c1y))
|
||||
(helpers/make-curve-to p3 (assoc p2 :y c2y) (assoc p3 :x c2x))
|
||||
(helpers/make-curve-to p4 (assoc p3 :x c1x) (assoc p4 :y c2y))
|
||||
(helpers/make-curve-to p1 (assoc p4 :y c1y) (assoc p1 :x c1x))]))
|
||||
|
||||
(defn draw-rounded-rect-path
|
||||
(defn- draw-rounded-rect-path
|
||||
([x y width height r]
|
||||
(draw-rounded-rect-path x y width height r r r r))
|
||||
|
||||
@@ -135,21 +111,21 @@
|
||||
p7 (gpt/point (+ x r4) (+ height y))
|
||||
p8 (gpt/point x (+ height y (- r4)))]
|
||||
(-> []
|
||||
(conj (pc/make-move-to p1))
|
||||
(conj (helpers/make-move-to p1))
|
||||
(cond-> (not= p1 p2)
|
||||
(conj (make-corner-arc p1 p2 :top-left r1)))
|
||||
(conj (pc/make-line-to p3))
|
||||
(conj (helpers/make-line-to p3))
|
||||
(cond-> (not= p3 p4)
|
||||
(conj (make-corner-arc p3 p4 :top-right r2)))
|
||||
(conj (pc/make-line-to p5))
|
||||
(conj (helpers/make-line-to p5))
|
||||
(cond-> (not= p5 p6)
|
||||
(conj (make-corner-arc p5 p6 :bottom-right r3)))
|
||||
(conj (pc/make-line-to p7))
|
||||
(conj (helpers/make-line-to p7))
|
||||
(cond-> (not= p7 p8)
|
||||
(conj (make-corner-arc p7 p8 :bottom-left r4)))
|
||||
(conj (pc/make-line-to p1))))))
|
||||
(conj (helpers/make-line-to p1))))))
|
||||
|
||||
(defn rect->path
|
||||
(defn- rect->path
|
||||
"Creates a bezier curve that approximates a rounded corner rectangle"
|
||||
[{:keys [x y width height] :as shape}]
|
||||
(case (ctsr/radius-mode shape)
|
||||
@@ -165,7 +141,10 @@
|
||||
|
||||
(declare convert-to-path)
|
||||
|
||||
(defn fix-first-relative
|
||||
;; FIXME: this looks unnecesary because penpot already normalizes all
|
||||
;; path content to be absolute. There are no relative segments on
|
||||
;; penpot.
|
||||
(defn- fix-first-relative
|
||||
"Fix an issue with the simplify commands not changing the first relative"
|
||||
[content]
|
||||
(let [head (first content)]
|
||||
@@ -173,17 +152,19 @@
|
||||
(and head (:relative head))
|
||||
(update 0 assoc :relative false))))
|
||||
|
||||
(defn group-to-path
|
||||
(defn- group-to-path
|
||||
[group objects]
|
||||
(let [xform (comp (map #(get objects %))
|
||||
(map #(-> (convert-to-path % objects))))
|
||||
(let [xform (comp (map (d/getf objects))
|
||||
(map #(convert-to-path % objects)))
|
||||
|
||||
child-as-paths (into [] xform (:shapes group))
|
||||
head (last child-as-paths)
|
||||
head-data (select-keys head style-properties)
|
||||
head (peek child-as-paths)
|
||||
head-data (select-keys head bool/style-properties)
|
||||
content (into []
|
||||
(comp (filter #(= :path (:type %)))
|
||||
(mapcat #(fix-first-relative (:content %))))
|
||||
(comp (filter cfh/path-shape?)
|
||||
(map :content)
|
||||
(map vec)
|
||||
(mapcat fix-first-relative))
|
||||
child-as-paths)]
|
||||
(-> group
|
||||
(assoc :type :path)
|
||||
@@ -191,54 +172,68 @@
|
||||
(merge head-data)
|
||||
(d/without-keys dissoc-attrs))))
|
||||
|
||||
(defn bool-to-path
|
||||
(defn- bool-to-path
|
||||
[shape objects]
|
||||
|
||||
(let [children (->> (:shapes shape)
|
||||
(map #(get objects %))
|
||||
(map #(convert-to-path % objects)))
|
||||
bool-type (:bool-type shape)
|
||||
content (pb/content-bool bool-type (mapv :content children))]
|
||||
(let [children
|
||||
(->> (:shapes shape)
|
||||
(map (d/getf objects))
|
||||
(map #(convert-to-path % objects)))
|
||||
|
||||
bool-type
|
||||
(:bool-type shape)
|
||||
|
||||
content
|
||||
(bool/calculate-content bool-type (map :content children))]
|
||||
|
||||
(-> shape
|
||||
(assoc :type :path)
|
||||
(assoc :content content)
|
||||
(dissoc :bool-type)
|
||||
(d/without-keys dissoc-attrs))))
|
||||
|
||||
(defn convert-to-path
|
||||
"Transforms the given shape to a path"
|
||||
([shape]
|
||||
(convert-to-path shape {}))
|
||||
([{:keys [type metadata] :as shape} objects]
|
||||
(assert (map? objects))
|
||||
(case type
|
||||
(:group :frame)
|
||||
(group-to-path shape objects)
|
||||
"Transforms the given shape to a path shape"
|
||||
[shape objects]
|
||||
(assert (map? objects))
|
||||
;; FIXME: add check-objects-like
|
||||
;; FIXME: add check-shape ?
|
||||
|
||||
:bool
|
||||
(bool-to-path shape objects)
|
||||
(let [type (dm/get-prop shape :type)]
|
||||
|
||||
(:rect :circle :image :text)
|
||||
(let [new-content
|
||||
(case type
|
||||
:circle (circle->path shape)
|
||||
#_:else (rect->path shape))
|
||||
(case type
|
||||
(:group :frame)
|
||||
(group-to-path shape objects)
|
||||
|
||||
;; Apply the transforms that had the shape
|
||||
transform
|
||||
(cond-> (:transform shape (gmt/matrix))
|
||||
(:flip-x shape) (gmt/scale (gpt/point -1 1))
|
||||
(:flip-y shape) (gmt/scale (gpt/point 1 -1)))
|
||||
:bool
|
||||
(bool-to-path shape objects)
|
||||
|
||||
new-content (cond-> new-content
|
||||
(some? transform)
|
||||
(gsp/transform-content (gmt/transform-in (gco/shape->center shape) transform)))]
|
||||
(:rect :circle :image :text)
|
||||
(let [content
|
||||
(if (= type :circle)
|
||||
(circle->path shape)
|
||||
(rect->path shape))
|
||||
|
||||
(-> shape
|
||||
(assoc :type :path)
|
||||
(assoc :content new-content)
|
||||
(cond-> (= :image type)
|
||||
(assoc :fill-image metadata))
|
||||
(d/without-keys dissoc-attrs)))
|
||||
content
|
||||
(path.impl/from-plain content)
|
||||
|
||||
;; For the rest return the plain shape
|
||||
shape)))
|
||||
;; Apply the transforms that had the shape
|
||||
transform
|
||||
(cond-> (:transform shape (gmt/matrix))
|
||||
(:flip-x shape) (gmt/scale (gpt/point -1 1))
|
||||
(:flip-y shape) (gmt/scale (gpt/point 1 -1)))
|
||||
|
||||
content
|
||||
(cond-> content
|
||||
(some? transform)
|
||||
(segm/transform-content (gmt/transform-in (gco/shape->center shape) transform)))]
|
||||
|
||||
(-> shape
|
||||
(assoc :type :path)
|
||||
(assoc :content content)
|
||||
(cond-> (= :image type)
|
||||
(assoc :fill-image (get shape :metadata)))
|
||||
(d/without-keys dissoc-attrs)))
|
||||
|
||||
;; For the rest return the plain shape
|
||||
shape)))
|
||||
@@ -4,11 +4,11 @@
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.svg.path.subpath
|
||||
(ns app.common.types.path.subpath
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.svg.path.command :as upc]))
|
||||
[app.common.types.path.helpers :as helpers]))
|
||||
|
||||
(defn pt=
|
||||
"Check if two points are close"
|
||||
@@ -18,7 +18,7 @@
|
||||
(defn make-subpath
|
||||
"Creates a subpath either from a single command or with all the data"
|
||||
([command]
|
||||
(let [p (upc/command->point command)]
|
||||
(let [p (helpers/segment->point command)]
|
||||
(make-subpath p p [command])))
|
||||
([from to data]
|
||||
{:from from
|
||||
@@ -29,9 +29,9 @@
|
||||
"Adds a command to the subpath"
|
||||
[subpath command]
|
||||
(let [command (if (= :close-path (:command command))
|
||||
(upc/make-line-to (:from subpath))
|
||||
(helpers/make-line-to (:from subpath))
|
||||
command)
|
||||
p (upc/command->point command)]
|
||||
p (helpers/segment->point command)]
|
||||
(-> subpath
|
||||
(assoc :to p)
|
||||
(update :data conj command))))
|
||||
@@ -62,7 +62,7 @@
|
||||
result))
|
||||
|
||||
new-data (->> subpath :data d/with-prev reverse
|
||||
(reduce reverse-commands [(upc/make-move-to (:to subpath))]))]
|
||||
(reduce reverse-commands [(helpers/make-move-to (:to subpath))]))]
|
||||
|
||||
(make-subpath (:to subpath) (:from subpath) new-data)))
|
||||
|
||||
@@ -125,6 +125,9 @@
|
||||
(defn is-closed? [subpath]
|
||||
(pt= (:from subpath) (:to subpath)))
|
||||
|
||||
(def ^:private xf-mapcat-data
|
||||
(mapcat :data))
|
||||
|
||||
(defn close-subpaths
|
||||
"Searches a path for possible subpaths that can create closed loops and merge them"
|
||||
[content]
|
||||
@@ -153,20 +156,17 @@
|
||||
new-subpaths)))
|
||||
result))]
|
||||
|
||||
(->> closed-subpaths
|
||||
(mapcat :data)
|
||||
(into []))))
|
||||
|
||||
(into [] xf-mapcat-data closed-subpaths)))
|
||||
|
||||
;; FIXME: revisit this fn impl for perfromance
|
||||
(defn reverse-content
|
||||
"Given a content reverse the order of the commands"
|
||||
[content]
|
||||
|
||||
(->> content
|
||||
(get-subpaths)
|
||||
(->> (get-subpaths content)
|
||||
(mapv reverse-subpath)
|
||||
(reverse)
|
||||
(mapcat :data)
|
||||
(into [])))
|
||||
(into [] xf-mapcat-data)))
|
||||
|
||||
;; https://mathworld.wolfram.com/PolygonArea.html
|
||||
(defn clockwise?
|
||||
@@ -181,10 +181,10 @@
|
||||
(if (nil? current)
|
||||
(> signed-area 0)
|
||||
|
||||
(let [{x1 :x y1 :y :as p} (upc/command->point current)
|
||||
(let [{x1 :x y1 :y :as p} (helpers/segment->point current)
|
||||
last? (nil? (first subpath))
|
||||
first-point (if (nil? first-point) p first-point)
|
||||
{x2 :x y2 :y} (if last? first-point (upc/command->point (first subpath)))
|
||||
{x2 :x y2 :y} (if last? first-point (helpers/segment->point (first subpath)))
|
||||
signed-area (+ signed-area (- (* x1 y2) (* x2 y1)))]
|
||||
|
||||
(recur (first subpath)
|
||||
@@ -22,14 +22,13 @@
|
||||
:keyword])
|
||||
|
||||
(def schema:plugin-data
|
||||
[:map-of {:gen/max 5}
|
||||
schema:keyword
|
||||
(sm/register!
|
||||
^{::sm/type ::plugin-data}
|
||||
[:map-of {:gen/max 5}
|
||||
schema:string
|
||||
schema:string]])
|
||||
|
||||
(sm/register! ::plugin-data schema:plugin-data)
|
||||
|
||||
schema:keyword
|
||||
[:map-of {:gen/max 5}
|
||||
schema:string
|
||||
schema:string]]))
|
||||
|
||||
(def ^:private schema:registry-entry
|
||||
[:map
|
||||
|
||||
@@ -22,13 +22,14 @@
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.color :as ctc]
|
||||
[app.common.types.grid :as ctg]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.types.path.segment :as path.segment]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape.attrs :refer [default-color]]
|
||||
[app.common.types.shape.blur :as ctsb]
|
||||
[app.common.types.shape.export :as ctse]
|
||||
[app.common.types.shape.interactions :as ctsi]
|
||||
[app.common.types.shape.layout :as ctsl]
|
||||
[app.common.types.shape.path :as ctsp]
|
||||
[app.common.types.shape.shadow :as ctss]
|
||||
[app.common.types.shape.text :as ctsx]
|
||||
[app.common.types.token :as cto]
|
||||
@@ -119,35 +120,35 @@
|
||||
[:vector {:gen/max 4 :gen/min 4} ::gpt/point])
|
||||
|
||||
(def schema:fill
|
||||
[:map {:title "Fill"}
|
||||
[:fill-color {:optional true} ::ctc/rgb-color]
|
||||
[:fill-opacity {:optional true} ::sm/safe-number]
|
||||
[:fill-color-gradient {:optional true} [:maybe ::ctc/gradient]]
|
||||
[:fill-color-ref-file {:optional true} [:maybe ::sm/uuid]]
|
||||
[:fill-color-ref-id {:optional true} [:maybe ::sm/uuid]]
|
||||
[:fill-image {:optional true} ::ctc/image-color]])
|
||||
(sm/register!
|
||||
^{::sm/type ::fill}
|
||||
[:map {:title "Fill"}
|
||||
[:fill-color {:optional true} ::ctc/rgb-color]
|
||||
[:fill-opacity {:optional true} ::sm/safe-number]
|
||||
[:fill-color-gradient {:optional true} [:maybe ::ctc/gradient]]
|
||||
[:fill-color-ref-file {:optional true} [:maybe ::sm/uuid]]
|
||||
[:fill-color-ref-id {:optional true} [:maybe ::sm/uuid]]
|
||||
[:fill-image {:optional true} ::ctc/image-color]]))
|
||||
|
||||
(sm/register! ::fill schema:fill)
|
||||
|
||||
(def ^:private schema:stroke
|
||||
[:map {:title "Stroke"}
|
||||
[:stroke-color {:optional true} :string]
|
||||
[:stroke-color-ref-file {:optional true} ::sm/uuid]
|
||||
[:stroke-color-ref-id {:optional true} ::sm/uuid]
|
||||
[:stroke-opacity {:optional true} ::sm/safe-number]
|
||||
[:stroke-style {:optional true}
|
||||
[::sm/one-of #{:solid :dotted :dashed :mixed :none :svg}]]
|
||||
[:stroke-width {:optional true} ::sm/safe-number]
|
||||
[:stroke-alignment {:optional true}
|
||||
[::sm/one-of #{:center :inner :outer}]]
|
||||
[:stroke-cap-start {:optional true}
|
||||
[::sm/one-of stroke-caps]]
|
||||
[:stroke-cap-end {:optional true}
|
||||
[::sm/one-of stroke-caps]]
|
||||
[:stroke-color-gradient {:optional true} ::ctc/gradient]
|
||||
[:stroke-image {:optional true} ::ctc/image-color]])
|
||||
|
||||
(sm/register! ::stroke schema:stroke)
|
||||
(def schema:stroke
|
||||
(sm/register!
|
||||
^{::sm/type ::stroke}
|
||||
[:map {:title "Stroke"}
|
||||
[:stroke-color {:optional true} :string]
|
||||
[:stroke-color-ref-file {:optional true} ::sm/uuid]
|
||||
[:stroke-color-ref-id {:optional true} ::sm/uuid]
|
||||
[:stroke-opacity {:optional true} ::sm/safe-number]
|
||||
[:stroke-style {:optional true}
|
||||
[::sm/one-of #{:solid :dotted :dashed :mixed :none :svg}]]
|
||||
[:stroke-width {:optional true} ::sm/safe-number]
|
||||
[:stroke-alignment {:optional true}
|
||||
[::sm/one-of #{:center :inner :outer}]]
|
||||
[:stroke-cap-start {:optional true}
|
||||
[::sm/one-of stroke-caps]]
|
||||
[:stroke-cap-end {:optional true}
|
||||
[::sm/one-of stroke-caps]]
|
||||
[:stroke-color-gradient {:optional true} ::ctc/gradient]
|
||||
[:stroke-image {:optional true} ::ctc/image-color]]))
|
||||
|
||||
(def check-stroke
|
||||
(sm/check-fn schema:stroke))
|
||||
@@ -171,8 +172,7 @@
|
||||
[:width ::sm/safe-number]
|
||||
[:height ::sm/safe-number]])
|
||||
|
||||
;; FIXME: rename to shape-generic-attrs
|
||||
(def schema:shape-attrs
|
||||
(def schema:shape-generic-attrs
|
||||
[:map {:title "ShapeAttrs"}
|
||||
[:page-id {:optional true} ::sm/uuid]
|
||||
[:component-id {:optional true} ::sm/uuid]
|
||||
@@ -234,7 +234,7 @@
|
||||
[:map {:title "BoolAttrs"}
|
||||
[:shapes [:vector {:gen/max 10 :gen/min 1} ::sm/uuid]]
|
||||
[:bool-type [::sm/one-of bool-types]]
|
||||
[:content ::ctsp/content]])
|
||||
[:content ::path/content]])
|
||||
|
||||
(def ^:private schema:rect-attrs
|
||||
[:map {:title "RectAttrs"}])
|
||||
@@ -259,7 +259,7 @@
|
||||
|
||||
(def ^:private schema:path-attrs
|
||||
[:map {:title "PathAttrs"}
|
||||
[:content ::ctsp/content]])
|
||||
[:content ::path/content]])
|
||||
|
||||
(def ^:private schema:text-attrs
|
||||
[:map {:title "TextAttrs"}
|
||||
@@ -276,7 +276,7 @@
|
||||
[]
|
||||
(->> (sg/generator schema:shape-base-attrs)
|
||||
(sg/mcat (fn [{:keys [type] :as shape}]
|
||||
(sg/let [attrs1 (sg/generator schema:shape-attrs)
|
||||
(sg/let [attrs1 (sg/generator schema:shape-generic-attrs)
|
||||
attrs2 (sg/generator schema:shape-geom-attrs)
|
||||
attrs3 (case type
|
||||
:text (sg/generator schema:text-attrs)
|
||||
@@ -294,94 +294,100 @@
|
||||
(merge attrs1 shape attrs2 attrs3)))))
|
||||
(sg/fmap create-shape)))
|
||||
|
||||
(def schema:shape-attrs
|
||||
[:multi {:dispatch :type
|
||||
:decode/json (fn [shape]
|
||||
(update shape :type keyword))
|
||||
:title "Shape"}
|
||||
[:group
|
||||
[:merge {:title "GroupShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:group-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:frame
|
||||
[:merge {:title "FrameShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
::ctsl/layout-attrs
|
||||
schema:frame-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs
|
||||
::ctv/variant-shape
|
||||
::ctv/variant-container]]
|
||||
|
||||
[:bool
|
||||
[:merge {:title "BoolShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:bool-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:rect
|
||||
[:merge {:title "RectShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:rect-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:circle
|
||||
[:merge {:title "CircleShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:circle-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:image
|
||||
[:merge {:title "ImageShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:image-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:svg-raw
|
||||
[:merge {:title "SvgRawShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:svg-raw-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:path
|
||||
[:merge {:title "PathShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:path-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:text
|
||||
[:merge {:title "TextShape"}
|
||||
ctsl/schema:layout-attrs
|
||||
schema:text-attrs
|
||||
schema:shape-generic-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]])
|
||||
|
||||
(def schema:shape
|
||||
[:and {:title "Shape"
|
||||
:gen/gen (shape-generator)
|
||||
:decode/json {:leave decode-shape}}
|
||||
[:fn shape?]
|
||||
[:multi {:dispatch :type
|
||||
:decode/json (fn [shape]
|
||||
(update shape :type keyword))
|
||||
:title "Shape"}
|
||||
[:group
|
||||
[:merge {:title "GroupShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:group-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
(sm/register!
|
||||
^{::sm/type ::shape}
|
||||
[:and {:title "Shape"
|
||||
:gen/gen (shape-generator)
|
||||
:decode/json {:leave decode-shape}}
|
||||
[:fn shape?]
|
||||
schema:shape-attrs]))
|
||||
|
||||
[:frame
|
||||
[:merge {:title "FrameShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
::ctsl/layout-attrs
|
||||
schema:frame-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs
|
||||
::ctv/variant-shape
|
||||
::ctv/variant-container]]
|
||||
(def check-shape-generic-attrs
|
||||
(sm/check-fn schema:shape-generic-attrs))
|
||||
|
||||
[:bool
|
||||
[:merge {:title "BoolShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:bool-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:rect
|
||||
[:merge {:title "RectShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:rect-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:circle
|
||||
[:merge {:title "CircleShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:circle-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:image
|
||||
[:merge {:title "ImageShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:image-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:svg-raw
|
||||
[:merge {:title "SvgRawShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:svg-raw-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:path
|
||||
[:merge {:title "PathShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:path-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-base-attrs]]
|
||||
|
||||
[:text
|
||||
[:merge {:title "TextShape"}
|
||||
::ctsl/layout-child-attrs
|
||||
schema:text-attrs
|
||||
schema:shape-attrs
|
||||
schema:shape-geom-attrs
|
||||
schema:shape-base-attrs]]]])
|
||||
|
||||
(sm/register! ::shape schema:shape)
|
||||
|
||||
(def check-shape-attrs!
|
||||
(def check-shape-attrs
|
||||
(sm/check-fn schema:shape-attrs))
|
||||
|
||||
(def check-shape!
|
||||
(def check-shape
|
||||
(sm/check-fn schema:shape
|
||||
:hint "expected valid shape"))
|
||||
|
||||
@@ -396,6 +402,50 @@
|
||||
(or (some :fill-image fills)
|
||||
(some :stroke-image strokes)))
|
||||
|
||||
;; Valid attributes
|
||||
|
||||
(def ^:private allowed-shape-attrs #{:page-id :component-id :component-file :component-root :main-instance
|
||||
:remote-synced :shape-ref :touched :blocked :collapsed :locked
|
||||
:hidden :masked-group :fills :proportion :proportion-lock :constraints-h
|
||||
:constraints-v :fixed-scroll :r1 :r2 :r3 :r4 :opacity :grids :exports
|
||||
:strokes :blend-mode :interactions :shadow :blur :grow-type :applied-tokens
|
||||
:plugin-data})
|
||||
(def ^:private allowed-shape-geom-attrs #{:x :y :width :height})
|
||||
(def ^:private allowed-shape-base-attrs #{:id :name :type :selrect :points :transform :transform-inverse :parent-id :frame-id})
|
||||
(def ^:private allowed-bool-attrs #{:shapes :bool-type :content})
|
||||
(def ^:private allowed-group-attrs #{:shapes})
|
||||
(def ^:private allowed-frame-attrs #{:shapes :hide-fill-on-export :show-content :hide-in-viewer})
|
||||
(def ^:private allowed-image-attrs #{:metadata})
|
||||
(def ^:private allowed-svg-attrs #{:content})
|
||||
(def ^:private allowed-path-attrs #{:content})
|
||||
(def ^:private allowed-text-attrs #{:content})
|
||||
(def ^:private allowed-generic-attrs (set/union allowed-shape-attrs allowed-shape-geom-attrs allowed-shape-base-attrs))
|
||||
|
||||
(defn is-allowed-attr?
|
||||
[attr type]
|
||||
(case type
|
||||
:group (or (contains? allowed-group-attrs attr)
|
||||
(contains? allowed-generic-attrs attr))
|
||||
:frame (or (contains? allowed-frame-attrs attr)
|
||||
(contains? allowed-generic-attrs attr))
|
||||
:bool (or (contains? allowed-bool-attrs attr)
|
||||
(contains? allowed-shape-attrs attr)
|
||||
(contains? allowed-shape-base-attrs attr))
|
||||
:rect (contains? allowed-generic-attrs attr)
|
||||
:circle (contains? allowed-generic-attrs attr)
|
||||
:image (or (contains? allowed-image-attrs attr)
|
||||
(contains? allowed-generic-attrs attr))
|
||||
:svg-raw (or (contains? allowed-svg-attrs attr)
|
||||
(contains? allowed-generic-attrs attr))
|
||||
:path (or (contains? allowed-path-attrs attr)
|
||||
(contains? allowed-shape-attrs attr)
|
||||
(contains? allowed-shape-base-attrs attr))
|
||||
:text (or (contains? allowed-text-attrs attr)
|
||||
(contains? allowed-generic-attrs attr))))
|
||||
|
||||
|
||||
|
||||
|
||||
;; --- Initialization
|
||||
|
||||
(def ^:private minimal-rect-attrs
|
||||
@@ -525,7 +575,7 @@
|
||||
(defn setup-path
|
||||
[{:keys [content selrect points] :as shape}]
|
||||
(let [selrect (or selrect
|
||||
(gsh/content->selrect content)
|
||||
(path.segment/content->selrect content)
|
||||
(grc/make-rect))
|
||||
points (or points (grc/rect->points selrect))]
|
||||
(-> shape
|
||||
@@ -711,3 +761,8 @@
|
||||
(d/patch-object (select-keys props basic-extract-props))
|
||||
(cond-> (cfh/text-shape? shape) (patch-text-props props))
|
||||
(cond-> (cfh/frame-shape? shape) (patch-layout-props props)))))
|
||||
|
||||
;; FIXME: Get these from the wasm module, and tweak the values
|
||||
;; (we'd probably want 12 stops at most)
|
||||
(def MAX-GRADIENT-STOPS 16)
|
||||
(def MAX-FILLS 8)
|
||||
|
||||
@@ -168,25 +168,24 @@
|
||||
(def item-align-self-types
|
||||
#{:start :end :center :stretch})
|
||||
|
||||
(sm/register!
|
||||
^{::sm/type ::layout-child-attrs}
|
||||
[:map {:title "LayoutChildAttrs"}
|
||||
[:layout-item-margin-type {:optional true} [::sm/one-of item-margin-types]]
|
||||
[:layout-item-margin {:optional true}
|
||||
[:map
|
||||
[:m1 {:optional true} ::sm/safe-number]
|
||||
[:m2 {:optional true} ::sm/safe-number]
|
||||
[:m3 {:optional true} ::sm/safe-number]
|
||||
[:m4 {:optional true} ::sm/safe-number]]]
|
||||
[:layout-item-max-h {:optional true} ::sm/safe-number]
|
||||
[:layout-item-min-h {:optional true} ::sm/safe-number]
|
||||
[:layout-item-max-w {:optional true} ::sm/safe-number]
|
||||
[:layout-item-min-w {:optional true} ::sm/safe-number]
|
||||
[:layout-item-h-sizing {:optional true} [::sm/one-of item-h-sizing-types]]
|
||||
[:layout-item-v-sizing {:optional true} [::sm/one-of item-v-sizing-types]]
|
||||
[:layout-item-align-self {:optional true} [::sm/one-of item-align-self-types]]
|
||||
[:layout-item-absolute {:optional true} :boolean]
|
||||
[:layout-item-z-index {:optional true} ::sm/safe-number]])
|
||||
(def schema:layout-attrs
|
||||
[:map {:title "LayoutChildAttrs"}
|
||||
[:layout-item-margin-type {:optional true} [::sm/one-of item-margin-types]]
|
||||
[:layout-item-margin {:optional true}
|
||||
[:map
|
||||
[:m1 {:optional true} ::sm/safe-number]
|
||||
[:m2 {:optional true} ::sm/safe-number]
|
||||
[:m3 {:optional true} ::sm/safe-number]
|
||||
[:m4 {:optional true} ::sm/safe-number]]]
|
||||
[:layout-item-max-h {:optional true} ::sm/safe-number]
|
||||
[:layout-item-min-h {:optional true} ::sm/safe-number]
|
||||
[:layout-item-max-w {:optional true} ::sm/safe-number]
|
||||
[:layout-item-min-w {:optional true} ::sm/safe-number]
|
||||
[:layout-item-h-sizing {:optional true} [::sm/one-of item-h-sizing-types]]
|
||||
[:layout-item-v-sizing {:optional true} [::sm/one-of item-v-sizing-types]]
|
||||
[:layout-item-align-self {:optional true} [::sm/one-of item-align-self-types]]
|
||||
[:layout-item-absolute {:optional true} :boolean]
|
||||
[:layout-item-z-index {:optional true} ::sm/safe-number]])
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMAS
|
||||
|
||||
@@ -1,431 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.types.shape.path
|
||||
(:require
|
||||
[app.common.schema :as sm])
|
||||
(:import
|
||||
#?(:cljs [goog.string StringBuffer]
|
||||
:clj [java.nio ByteBuffer])))
|
||||
|
||||
#?(:clj (set! *warn-on-reflection* true))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SCHEMA: PLAIN FORMAT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def schema:line-to-segment
|
||||
[:map
|
||||
[:command [:= :line-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x ::sm/safe-number]
|
||||
[:y ::sm/safe-number]]]])
|
||||
|
||||
(def schema:close-path-segment
|
||||
[:map
|
||||
[:command [:= :close-path]]])
|
||||
|
||||
(def schema:move-to-segment
|
||||
[:map
|
||||
[:command [:= :move-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x ::sm/safe-number]
|
||||
[:y ::sm/safe-number]]]])
|
||||
|
||||
(def schema:curve-to-segment
|
||||
[:map
|
||||
[:command [:= :curve-to]]
|
||||
[:params
|
||||
[:map
|
||||
[:x ::sm/safe-number]
|
||||
[:y ::sm/safe-number]
|
||||
[:c1x ::sm/safe-number]
|
||||
[:c1y ::sm/safe-number]
|
||||
[:c2x ::sm/safe-number]
|
||||
[:c2y ::sm/safe-number]]]])
|
||||
|
||||
(def schema:path-segment
|
||||
[:multi {:title "PathSegment"
|
||||
:dispatch :command
|
||||
:decode/json #(update % :command keyword)}
|
||||
[:line-to schema:line-to-segment]
|
||||
[:close-path schema:close-path-segment]
|
||||
[:move-to schema:move-to-segment]
|
||||
[:curve-to schema:curve-to-segment]])
|
||||
|
||||
(def schema:path-content
|
||||
[:vector schema:path-segment])
|
||||
|
||||
(def check-path-content
|
||||
(sm/check-fn schema:path-content))
|
||||
|
||||
(sm/register! ::segment schema:path-segment)
|
||||
(sm/register! ::content schema:path-content)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TYPE: PATH-DATA
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:const SEGMENT-BYTE-SIZE 28)
|
||||
|
||||
(defprotocol IPathData
|
||||
(-write-to [_ buffer offset] "write the content to the specified buffer"))
|
||||
|
||||
(defrecord PathSegment [command params])
|
||||
|
||||
(defn- get-path-string
|
||||
"Format the path data structure to string"
|
||||
[buffer size]
|
||||
(let [builder #?(:clj (java.lang.StringBuilder. (int (* size 4)))
|
||||
:cljs (StringBuffer.))]
|
||||
(loop [index 0]
|
||||
(when (< index size)
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type #?(:clj (.getShort ^ByteBuffer buffer offset)
|
||||
:cljs (.getInt16 buffer offset))]
|
||||
(case (long type)
|
||||
1 (let [x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
(doto builder
|
||||
(.append "M")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
2 (let [x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
(doto builder
|
||||
(.append "L")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
|
||||
3 (let [c1x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 4))
|
||||
:cljs (.getFloat32 buffer (+ offset 4)))
|
||||
c1y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 8))
|
||||
:cljs (.getFloat32 buffer (+ offset 8)))
|
||||
c2x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 12))
|
||||
:cljs (.getFloat32 buffer (+ offset 12)))
|
||||
c2y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 16))
|
||||
:cljs (.getFloat32 buffer (+ offset 16)))
|
||||
x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
(doto builder
|
||||
(.append "C")
|
||||
(.append c1x)
|
||||
(.append ",")
|
||||
(.append c1y)
|
||||
(.append ",")
|
||||
(.append c2x)
|
||||
(.append ",")
|
||||
(.append c2y)
|
||||
(.append ",")
|
||||
(.append x)
|
||||
(.append ",")
|
||||
(.append y)))
|
||||
4 (doto builder
|
||||
(.append "Z")))
|
||||
(recur (inc index)))))
|
||||
|
||||
(.toString builder)))
|
||||
|
||||
(defn- read-segment
|
||||
[buffer index]
|
||||
(let [offset (* index SEGMENT-BYTE-SIZE)
|
||||
type #?(:clj (.getShort ^ByteBuffer buffer offset)
|
||||
:cljs (.getInt16 buffer offset))]
|
||||
(case (long type)
|
||||
1 (let [x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
(->PathSegment :move-to {:x x :y y}))
|
||||
|
||||
2 (let [x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
(->PathSegment :line-to {:x x :y y}))
|
||||
|
||||
3 (let [c1x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 4))
|
||||
:cljs (.getFloat32 buffer (+ offset 4)))
|
||||
c1y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 8))
|
||||
:cljs (.getFloat32 buffer (+ offset 8)))
|
||||
c2x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 12))
|
||||
:cljs (.getFloat32 buffer (+ offset 12)))
|
||||
c2y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 16))
|
||||
:cljs (.getFloat32 buffer (+ offset 16)))
|
||||
x #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 20))
|
||||
:cljs (.getFloat32 buffer (+ offset 20)))
|
||||
y #?(:clj (.getFloat ^ByteBuffer buffer (+ offset 24))
|
||||
:cljs (.getFloat32 buffer (+ offset 24)))]
|
||||
|
||||
(->PathSegment :curve-to {:x x :y y :c1x c1x :c1y c1y :c2x c2x :c2y c2y}))
|
||||
|
||||
4 (->PathSegment :close-path {}))))
|
||||
|
||||
(defn- in-range?
|
||||
[size i]
|
||||
(and (< i size) (>= i 0)))
|
||||
|
||||
#?(:clj
|
||||
(deftype PathData [size buffer]
|
||||
Object
|
||||
(toString [_]
|
||||
(get-path-string buffer size))
|
||||
|
||||
clojure.lang.Sequential
|
||||
clojure.lang.IPersistentCollection
|
||||
|
||||
(empty [_]
|
||||
(throw (ex-info "not implemented" {})))
|
||||
(equiv [_ other]
|
||||
(if (instance? PathData other)
|
||||
(.equals ^ByteBuffer buffer (.-buffer ^PathData other))
|
||||
false))
|
||||
|
||||
(seq [this]
|
||||
(when (pos? size)
|
||||
(->> (range size)
|
||||
(map (fn [i] (nth this i))))))
|
||||
|
||||
(cons [_ _val]
|
||||
(throw (ex-info "not implemented" {})))
|
||||
|
||||
clojure.lang.IReduceInit
|
||||
(reduce [_ f start]
|
||||
(loop [index 0
|
||||
result start]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment buffer index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
clojure.lang.Indexed
|
||||
(nth [_ i]
|
||||
(if (in-range? size i)
|
||||
(read-segment buffer i)
|
||||
nil))
|
||||
|
||||
(nth [_ i default]
|
||||
(if (in-range? size i)
|
||||
(read-segment buffer i)
|
||||
default))
|
||||
|
||||
clojure.lang.Counted
|
||||
(count [_] size))
|
||||
|
||||
:cljs
|
||||
(deftype PathData [size buffer dview]
|
||||
Object
|
||||
(toString [_]
|
||||
(get-path-string dview size))
|
||||
|
||||
IPathData
|
||||
(-write-to [_ into-buffer offset]
|
||||
(assert (instance? js/ArrayBuffer into-buffer) "expected an instance of Uint32Array")
|
||||
(let [size (.-byteLength buffer)
|
||||
mem (js/Uint32Array. into-buffer offset size)]
|
||||
(.set mem (js/Uint32Array. buffer))))
|
||||
|
||||
cljs.core/ISequential
|
||||
cljs.core/IEquiv
|
||||
(-equiv [_ other]
|
||||
(if (instance? PathData other)
|
||||
(let [obuffer (.-buffer other)
|
||||
osize (.-byteLength obuffer)
|
||||
csize (.-byteLength buffer)]
|
||||
(if (= osize csize)
|
||||
(let [cb (js/Uint32Array. buffer)
|
||||
ob (js/Uint32Array. obuffer)]
|
||||
(loop [i 0]
|
||||
(if (< i osize)
|
||||
(if (= (aget ob i)
|
||||
(aget cb i))
|
||||
(recur (inc i))
|
||||
false)
|
||||
true)))
|
||||
false))
|
||||
false))
|
||||
|
||||
cljs.core/IReduce
|
||||
(-reduce [_ f]
|
||||
(loop [index 1
|
||||
result (if (pos? size)
|
||||
(read-segment dview 0)
|
||||
nil)]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment dview index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
(-reduce [_ f start]
|
||||
(loop [index 0
|
||||
result start]
|
||||
(if (< index size)
|
||||
(let [result (f result (read-segment dview index))]
|
||||
(if (reduced? result)
|
||||
@result
|
||||
(recur (inc index) result)))
|
||||
result)))
|
||||
|
||||
cljs.core/IHash
|
||||
(-hash [_]
|
||||
(throw (ex-info "not-implemented" {})))
|
||||
|
||||
cljs.core/ICounted
|
||||
(-count [_] size)
|
||||
|
||||
cljs.core/IIndexed
|
||||
(-nth [_ i]
|
||||
(if (in-range? size i)
|
||||
(read-segment dview i)
|
||||
nil))
|
||||
|
||||
(-nth [_ i default]
|
||||
(if (in-range? i size)
|
||||
(read-segment dview i)
|
||||
default))
|
||||
|
||||
cljs.core/ISeqable
|
||||
(-seq [this]
|
||||
(when (pos? size)
|
||||
(->> (range size)
|
||||
(map (fn [i] (cljs.core/-nth this i))))))))
|
||||
|
||||
(defn- from-bytes
|
||||
[buffer]
|
||||
#?(:clj
|
||||
(cond
|
||||
(instance? ByteBuffer buffer)
|
||||
(let [size (.capacity ^ByteBuffer buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count buffer))
|
||||
|
||||
(bytes? buffer)
|
||||
(let [size (alength ^bytes buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count
|
||||
(ByteBuffer/wrap buffer)))
|
||||
|
||||
:else
|
||||
(throw (java.lang.IllegalArgumentException. "invalid data provided")))
|
||||
|
||||
:cljs
|
||||
(cond
|
||||
(instance? js/ArrayBuffer buffer)
|
||||
(let [size (.-byteLength buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count
|
||||
buffer
|
||||
(js/DataView. buffer)))
|
||||
|
||||
(instance? js/DataView buffer)
|
||||
(let [dview buffer
|
||||
buffer (.-buffer dview)
|
||||
size (.-byteLength buffer)
|
||||
count (long (/ size SEGMENT-BYTE-SIZE))]
|
||||
(PathData. count buffer dview))
|
||||
|
||||
:else
|
||||
(throw (js/Error. "invalid data provided")))))
|
||||
|
||||
;; FIXME: consider implementing with reduce
|
||||
;; FIXME: consider ensure fixed precision for avoid doing it on formatting
|
||||
|
||||
(defn- from-plain
|
||||
"Create a PathData instance from plain data structures"
|
||||
[content]
|
||||
(assert (check-path-content content))
|
||||
|
||||
(let [content (vec content)
|
||||
total (count content)
|
||||
#?@(:cljs [buffer (new js/ArrayBuffer (* total SEGMENT-BYTE-SIZE))
|
||||
dview (new js/DataView buffer)]
|
||||
:clj [buffer (ByteBuffer/allocate (* total SEGMENT-BYTE-SIZE))])]
|
||||
(loop [index 0]
|
||||
(when (< index total)
|
||||
(let [segment (nth content index)
|
||||
offset (* index SEGMENT-BYTE-SIZE)]
|
||||
(case (get segment :command)
|
||||
:move-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))]
|
||||
#?(:clj (.putShort buffer (int offset) (short 1))
|
||||
:cljs (.setInt16 dview offset 1))
|
||||
#?(:clj (.putFloat buffer (+ offset 20) x)
|
||||
:cljs (.setFloat32 dview (+ offset 20) x))
|
||||
#?(:clj (.putFloat buffer (+ offset 24) y)
|
||||
:cljs (.setFloat32 dview (+ offset 24) y)))
|
||||
|
||||
:line-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))]
|
||||
#?(:clj (.putShort buffer (int offset) (short 2))
|
||||
:cljs (.setInt16 dview offset 2))
|
||||
#?(:clj (.putFloat buffer (+ offset 20) x)
|
||||
:cljs (.setFloat32 dview (+ offset 20) x))
|
||||
#?(:clj (.putFloat buffer (+ offset 24) y)
|
||||
:cljs (.setFloat32 dview (+ offset 24) y)))
|
||||
|
||||
:curve-to
|
||||
(let [params (get segment :params)
|
||||
x (float (get params :x))
|
||||
y (float (get params :y))
|
||||
c1x (float (get params :c1x x))
|
||||
c1y (float (get params :c1y y))
|
||||
c2x (float (get params :c2x x))
|
||||
c2y (float (get params :c2y y))]
|
||||
|
||||
#?(:clj (.putShort buffer (int offset) (short 3))
|
||||
:cljs (.setInt16 dview offset 3))
|
||||
#?(:clj (.putFloat buffer (+ offset 4) c1x)
|
||||
:cljs (.setFloat32 dview (+ offset 4) c1x))
|
||||
#?(:clj (.putFloat buffer (+ offset 8) c1y)
|
||||
:cljs (.setFloat32 dview (+ offset 8) c1y))
|
||||
#?(:clj (.putFloat buffer (+ offset 12) c2x)
|
||||
:cljs (.setFloat32 dview (+ offset 12) c2x))
|
||||
#?(:clj (.putFloat buffer (+ offset 16) c2y)
|
||||
:cljs (.setFloat32 dview (+ offset 16) c2y))
|
||||
#?(:clj (.putFloat buffer (+ offset 20) x)
|
||||
:cljs (.setFloat32 dview (+ offset 20) x))
|
||||
#?(:clj (.putFloat buffer (+ offset 24) y)
|
||||
:cljs (.setFloat32 dview (+ offset 24) y)))
|
||||
|
||||
:close-path
|
||||
#?(:clj (.putShort buffer (int offset) (short 4))
|
||||
:cljs (.setInt16 dview offset 4)))
|
||||
(recur (inc index)))))
|
||||
|
||||
#?(:cljs (from-bytes dview)
|
||||
:clj (from-bytes buffer))))
|
||||
|
||||
(defn path-data
|
||||
"Create an instance of PathData, returns itself if it is already
|
||||
PathData instance"
|
||||
[data]
|
||||
(cond
|
||||
(instance? PathData data)
|
||||
data
|
||||
|
||||
(sequential? data)
|
||||
(from-plain data)
|
||||
|
||||
:else
|
||||
(from-bytes data)))
|
||||
@@ -16,6 +16,8 @@
|
||||
[app.common.types.shape.layout :as ctl]
|
||||
[app.common.uuid :as uuid]))
|
||||
|
||||
|
||||
;; FIXME: the order of arguments seems arbitrary, container should be a first artgument
|
||||
(defn add-shape
|
||||
"Insert a shape in the tree, at the given index below the given parent or frame.
|
||||
Update the parent as needed."
|
||||
|
||||
144
common/src/app/common/types/text.cljc
Normal file
144
common/src/app/common/types/text.cljc
Normal file
@@ -0,0 +1,144 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.common.types.text
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[clojure.set :as set]))
|
||||
|
||||
(defn- compare-text-content
|
||||
"Given two content text structures, conformed by maps and vectors,
|
||||
compare them, and returns a set with the type of differences.
|
||||
The possibilities are :text-content-text :text-content-attribute and :text-content-structure."
|
||||
[a b]
|
||||
(cond
|
||||
;; If a and b are equal, there is no diff
|
||||
(= a b)
|
||||
#{}
|
||||
|
||||
;; If types are different, the structure is different
|
||||
(not= (type a) (type b))
|
||||
#{:text-content-structure}
|
||||
|
||||
;; If they are maps, check the keys
|
||||
(map? a)
|
||||
(let [keys (-> (set/union (set (keys a)) (set (keys b)))
|
||||
(disj :key))] ;; We have to ignore :key because it is a draft artifact
|
||||
(reduce
|
||||
(fn [acc k]
|
||||
(let [v1 (get a k)
|
||||
v2 (get b k)]
|
||||
(cond
|
||||
;; If the key is :children, keep digging
|
||||
(= k :children)
|
||||
(if (not= (count v1) (count v2))
|
||||
#{:text-content-structure}
|
||||
(into acc
|
||||
(apply set/union
|
||||
(map #(compare-text-content %1 %2) v1 v2))))
|
||||
|
||||
;; If the key is :text, and they are different, it is a text differece
|
||||
(= k :text)
|
||||
(if (not= v1 v2)
|
||||
(conj acc :text-content-text)
|
||||
acc)
|
||||
|
||||
:else
|
||||
;; If the key is not :text, and they are different, it is an attribute differece
|
||||
(if (not= v1 v2)
|
||||
(conj acc :text-content-attribute)
|
||||
acc))))
|
||||
#{}
|
||||
keys))
|
||||
|
||||
:else
|
||||
#{:text-content-structure}))
|
||||
|
||||
|
||||
(defn equal-attrs?
|
||||
"Given a text structure, and a map of attrs, check that all the internal attrs in
|
||||
paragraphs and sentences have the same attrs"
|
||||
[item attrs]
|
||||
(let [item-attrs (dissoc item :text :type :key :children)]
|
||||
(and
|
||||
(or (empty? item-attrs)
|
||||
(= attrs (dissoc item :text :type :key :children)))
|
||||
(every? #(equal-attrs? % attrs) (:children item)))))
|
||||
|
||||
(defn get-first-paragraph-text-attrs
|
||||
"Given a content text structure, extract it's first paragraph
|
||||
text attrs"
|
||||
[content]
|
||||
(-> content
|
||||
(dm/get-in [:children 0 :children 0])
|
||||
(dissoc :text :type :key :children)))
|
||||
|
||||
(defn get-diff-type
|
||||
"Given two content text structures, conformed by maps and vectors,
|
||||
compare them, and returns a set with the type of differences.
|
||||
The possibilities are :text-content-text :text-content-attribute,
|
||||
:text-content-structure and :text-content-structure-same-attrs."
|
||||
[a b]
|
||||
(let [diff-type (compare-text-content a b)]
|
||||
(if-not (contains? diff-type :text-content-structure)
|
||||
diff-type
|
||||
(let [;; get attrs of the first paragraph of the first paragraph-set
|
||||
attrs (get-first-paragraph-text-attrs a)]
|
||||
(if (and (equal-attrs? a attrs)
|
||||
(equal-attrs? b attrs))
|
||||
#{:text-content-structure :text-content-structure-same-attrs}
|
||||
diff-type)))))
|
||||
|
||||
;; TODO We know that there are cases that the blocks of texts are separated
|
||||
;; differently: ["one" " " "two"], ["one " "two"], ["one" " two"]
|
||||
;; so this won't work for 100% of the situations. But it's good enough for now,
|
||||
;; we can iterate on the solution again in the future if needed.
|
||||
(defn equal-structure?
|
||||
"Given two content text structures, check that the structures are equal.
|
||||
This means that all the :children keys at any level has the same number of
|
||||
entries"
|
||||
[a b]
|
||||
(cond
|
||||
(not= (type a) (type b))
|
||||
false
|
||||
|
||||
(map? a)
|
||||
(let [children-a (:children a)
|
||||
children-b (:children b)]
|
||||
(if (not= (count children-a) (count children-b))
|
||||
false
|
||||
(every? true?
|
||||
(map equal-structure? children-a children-b))))
|
||||
|
||||
:else
|
||||
true))
|
||||
|
||||
|
||||
(defn copy-text-keys
|
||||
"Given two equal content text structures, deep copy all the keys :text
|
||||
from origin to destiny"
|
||||
[origin destiny]
|
||||
(cond
|
||||
(map? origin)
|
||||
(into {}
|
||||
(for [k (keys origin) :when (not= k :key)] ;; We ignore :key because it is a draft artifact
|
||||
(cond
|
||||
(= :children k)
|
||||
[k (vec (map #(copy-text-keys %1 %2) (get origin k) (get destiny k)))]
|
||||
(= :text k)
|
||||
[k (:text origin)]
|
||||
:else
|
||||
[k (get destiny k)])))))
|
||||
|
||||
(defn copy-attrs-keys
|
||||
"Given a content text structure and a list of attrs, copy that
|
||||
attrs values on all the content tree"
|
||||
[content attrs]
|
||||
(into {}
|
||||
(for [[k v] content]
|
||||
(if (= :children k)
|
||||
[k (vec (map #(copy-attrs-keys %1 attrs) v))]
|
||||
[k (get attrs k v)]))))
|
||||
@@ -662,63 +662,6 @@
|
||||
(def valid-active-token-themes?
|
||||
(sm/validator schema:active-themes))
|
||||
|
||||
;; === Import / Export from DTCG format
|
||||
|
||||
(def ^:private legacy-node?
|
||||
(sm/validator
|
||||
[:or
|
||||
[:map
|
||||
["value" :string]
|
||||
["type" :string]]
|
||||
[:map
|
||||
["value" [:sequential [:map ["type" :string]]]]
|
||||
["type" :string]]
|
||||
[:map
|
||||
["value" :map]
|
||||
["type" :string]]]))
|
||||
|
||||
(def ^:private dtcg-node?
|
||||
(sm/validator
|
||||
[:or
|
||||
[:map
|
||||
["$value" :string]
|
||||
["$type" :string]]
|
||||
[:map
|
||||
["$value" [:sequential [:map ["$type" :string]]]]
|
||||
["$type" :string]]
|
||||
[:map
|
||||
["$value" :map]
|
||||
["$type" :string]]]))
|
||||
|
||||
(defn get-json-format
|
||||
"Searches through parsed token file and returns:
|
||||
- `:json-format/legacy` when first node satisfies `legacy-node?` predicate
|
||||
- `:json-format/dtcg` when first node satisfies `dtcg-node?` predicate
|
||||
- `nil` if neither combination is found"
|
||||
([data]
|
||||
(get-json-format data legacy-node? dtcg-node?))
|
||||
([data legacy-node? dtcg-node?]
|
||||
(let [branch? map?
|
||||
children (fn [node] (vals node))
|
||||
check-node (fn [node]
|
||||
(cond
|
||||
(legacy-node? node) :json-format/legacy
|
||||
(dtcg-node? node) :json-format/dtcg
|
||||
:else nil))
|
||||
walk (fn walk [node]
|
||||
(lazy-seq
|
||||
(cons
|
||||
(check-node node)
|
||||
(when (branch? node)
|
||||
(mapcat walk (children node))))))]
|
||||
(->> (walk data)
|
||||
(filter some?)
|
||||
first))))
|
||||
|
||||
(defn single-set? [data]
|
||||
(and (not (contains? data "$metadata"))
|
||||
(not (contains? data "$themes"))))
|
||||
|
||||
;; DEPRECATED
|
||||
(defn walk-sets-tree-seq
|
||||
"Walk sets tree as a flat list.
|
||||
@@ -828,51 +771,10 @@
|
||||
(map-indexed (fn [index item]
|
||||
(assoc item :index index))))))
|
||||
|
||||
(defn flatten-nested-tokens-json
|
||||
"Recursively flatten the dtcg token structure, joining keys with '.'."
|
||||
[tokens token-path]
|
||||
(reduce-kv
|
||||
(fn [acc k v]
|
||||
(let [child-path (if (empty? token-path)
|
||||
(name k)
|
||||
(str token-path "." k))]
|
||||
(if (and (map? v)
|
||||
(not (contains? v "$type")))
|
||||
(merge acc (flatten-nested-tokens-json v child-path))
|
||||
(let [token-type (cto/dtcg-token-type->token-type (get v "$type"))]
|
||||
(if token-type
|
||||
(assoc acc child-path (make-token
|
||||
:name child-path
|
||||
:type token-type
|
||||
:value (get v "$value")
|
||||
:description (get v "$description")))
|
||||
;; Discard unknown tokens
|
||||
acc)))))
|
||||
{}
|
||||
tokens))
|
||||
|
||||
;; === Tokens Lib
|
||||
|
||||
(declare make-tokens-lib)
|
||||
|
||||
(defn legacy-nodes->dtcg-nodes [sets-data]
|
||||
(walk/postwalk
|
||||
(fn [node]
|
||||
(cond-> node
|
||||
(and (map? node)
|
||||
(contains? node "value")
|
||||
(sequential? (get node "value")))
|
||||
(update "value"
|
||||
(fn [seq-value]
|
||||
(map #(set/rename-keys % {"type" "$type"}) seq-value)))
|
||||
|
||||
(and (map? node)
|
||||
(and (contains? node "type")
|
||||
(contains? node "value")))
|
||||
(set/rename-keys {"value" "$value"
|
||||
"type" "$type"})))
|
||||
sets-data))
|
||||
|
||||
(defprotocol ITokensLib
|
||||
"A library of tokens, sets and themes."
|
||||
(set-path-exists? [_ path] "if a set at `path` exists")
|
||||
@@ -889,12 +791,11 @@ Will return a value that matches this schema:
|
||||
`:all` All of the nested sets are active
|
||||
`:partial` Mixed active state of nested sets")
|
||||
(get-active-themes-set-tokens [_] "set of set names that are active in the the active themes")
|
||||
(encode-dtcg [_] "Encodes library to a dtcg compatible json string")
|
||||
(decode-dtcg-json [_ parsed-json] "Decodes parsed json containing tokens and converts to library")
|
||||
(decode-legacy-json [_ parsed-json] "Decodes parsed legacy json containing tokens and converts to library")
|
||||
(get-all-tokens [_] "all tokens in the lib")
|
||||
(validate [_]))
|
||||
|
||||
(declare parse-multi-set-dtcg-json)
|
||||
(declare export-dtcg-json)
|
||||
(deftype TokensLib [sets themes active-themes]
|
||||
;; NOTE: This is only for debug purposes, pending to properly
|
||||
;; implement the toString and alternative printing.
|
||||
@@ -911,12 +812,9 @@ Will return a value that matches this schema:
|
||||
(-clj->js [_] (js-obj "sets" (clj->js sets)
|
||||
"themes" (clj->js themes)
|
||||
"active-themes" (clj->js active-themes)))])
|
||||
|
||||
|
||||
#?@(:clj
|
||||
[json/JSONWriter
|
||||
(-write [this writter options] (json/-write (encode-dtcg this) writter options))])
|
||||
|
||||
(-write [this writter options] (json/-write (export-dtcg-json this) writter options))])
|
||||
|
||||
ITokenSets
|
||||
(add-set [_ token-set]
|
||||
@@ -1291,142 +1189,6 @@ Will return a value that matches this schema:
|
||||
active-set-names)]
|
||||
tokens))
|
||||
|
||||
(encode-dtcg [this]
|
||||
(let [themes-xform
|
||||
(comp
|
||||
(filter #(and (instance? TokenTheme %)
|
||||
(not (hidden-temporary-theme? %))))
|
||||
(map (fn [token-theme]
|
||||
(let [theme-map (->> token-theme
|
||||
(into {})
|
||||
walk/stringify-keys)]
|
||||
(-> theme-map
|
||||
(set/rename-keys {"sets" "selectedTokenSets"})
|
||||
(update "selectedTokenSets" (fn [sets]
|
||||
(->> (for [s sets] [s "enabled"])
|
||||
(into {})))))))))
|
||||
themes
|
||||
(->> (tree-seq d/ordered-map? vals themes)
|
||||
(into [] themes-xform))
|
||||
|
||||
;; Active themes without exposing hidden penpot theme
|
||||
active-themes-clear
|
||||
(disj active-themes hidden-token-theme-path)
|
||||
|
||||
update-token-fn
|
||||
(fn [token]
|
||||
(cond-> {"$value" (:value token)
|
||||
"$type" (cto/token-type->dtcg-token-type (:type token))}
|
||||
(:description token) (assoc "$description" (:description token))))
|
||||
|
||||
name-set-tuples
|
||||
(->> sets
|
||||
(tree-seq d/ordered-map? vals)
|
||||
(filter (partial instance? TokenSet))
|
||||
(map (fn [{:keys [name tokens]}]
|
||||
[name (tokens-tree tokens :update-token-fn update-token-fn)])))
|
||||
|
||||
ordered-set-names
|
||||
(mapv first name-set-tuples)
|
||||
|
||||
sets
|
||||
(into {} name-set-tuples)
|
||||
|
||||
active-sets
|
||||
(get-active-themes-set-names this)]
|
||||
|
||||
(-> sets
|
||||
(assoc "$themes" themes)
|
||||
(assoc-in ["$metadata" "tokenSetOrder"] ordered-set-names)
|
||||
(assoc-in ["$metadata" "activeThemes"] active-themes-clear)
|
||||
(assoc-in ["$metadata" "activeSets"] active-sets))))
|
||||
|
||||
(decode-dtcg-json [_ data]
|
||||
(assert (map? data) "expected a map data structure for `data`")
|
||||
|
||||
(let [metadata (get data "$metadata")
|
||||
|
||||
xf-normalize-set-name
|
||||
(map normalize-set-name)
|
||||
|
||||
sets
|
||||
(dissoc data "$themes" "$metadata")
|
||||
|
||||
ordered-sets
|
||||
(-> (d/ordered-set)
|
||||
(into xf-normalize-set-name (get metadata "tokenSetOrder"))
|
||||
(into xf-normalize-set-name (keys sets)))
|
||||
|
||||
active-sets
|
||||
(or (->> (get metadata "activeSets")
|
||||
(into #{} xf-normalize-set-name)
|
||||
(not-empty))
|
||||
#{})
|
||||
|
||||
active-themes
|
||||
(or (->> (get metadata "activeThemes")
|
||||
(into #{})
|
||||
(not-empty))
|
||||
#{hidden-token-theme-path})
|
||||
|
||||
themes
|
||||
(->> (get data "$themes")
|
||||
(map (fn [theme]
|
||||
(make-token-theme
|
||||
:name (get theme "name")
|
||||
:group (get theme "group")
|
||||
:is-source (get theme "is-source")
|
||||
:id (get theme "id")
|
||||
:modified-at (some-> (get theme "modified-at")
|
||||
(dt/parse-instant))
|
||||
:sets (into #{}
|
||||
(comp (map key)
|
||||
xf-normalize-set-name
|
||||
(filter #(contains? ordered-sets %)))
|
||||
(get theme "selectedTokenSets")))))
|
||||
(not-empty))
|
||||
|
||||
library
|
||||
(make-tokens-lib)
|
||||
|
||||
sets
|
||||
(reduce-kv (fn [result name tokens]
|
||||
(assoc result
|
||||
(normalize-set-name name)
|
||||
(flatten-nested-tokens-json tokens "")))
|
||||
{}
|
||||
sets)
|
||||
|
||||
library
|
||||
(reduce (fn [library name]
|
||||
(if-let [tokens (get sets name)]
|
||||
(add-set library (make-token-set :name name :tokens tokens))
|
||||
library))
|
||||
library
|
||||
ordered-sets)
|
||||
|
||||
library
|
||||
(update-theme library hidden-token-theme-group hidden-token-theme-name
|
||||
#(assoc % :sets active-sets))
|
||||
|
||||
library
|
||||
(reduce add-theme library themes)
|
||||
|
||||
library
|
||||
(reduce (fn [library theme-path]
|
||||
(let [[group name] (split-token-theme-path theme-path)]
|
||||
(activate-theme library group name)))
|
||||
library
|
||||
active-themes)]
|
||||
|
||||
library))
|
||||
|
||||
(decode-legacy-json [this parsed-legacy-json]
|
||||
(let [other-data (select-keys parsed-legacy-json ["$themes" "$metadata"])
|
||||
sets-data (dissoc parsed-legacy-json "$themes" "$metadata")
|
||||
dtcg-sets-data (legacy-nodes->dtcg-nodes sets-data)]
|
||||
(decode-dtcg-json this (merge other-data
|
||||
dtcg-sets-data))))
|
||||
(get-all-tokens [this]
|
||||
(reduce
|
||||
(fn [tokens' set]
|
||||
@@ -1488,17 +1250,13 @@ Will return a value that matches this schema:
|
||||
[tokens-lib]
|
||||
(or tokens-lib (make-tokens-lib)))
|
||||
|
||||
(defn decode-dtcg
|
||||
[encoded-json]
|
||||
(-> (make-tokens-lib)
|
||||
(decode-dtcg-json encoded-json)))
|
||||
|
||||
(def type:tokens-lib
|
||||
{:type ::tokens-lib
|
||||
:pred valid-tokens-lib?
|
||||
:type-properties
|
||||
{:encode/json encode-dtcg
|
||||
:decode/json decode-dtcg}})
|
||||
(def schema:tokens-lib
|
||||
(sm/register!
|
||||
{:type ::tokens-lib
|
||||
:pred valid-tokens-lib?
|
||||
:type-properties
|
||||
{:encode/json export-dtcg-json
|
||||
:decode/json parse-multi-set-dtcg-json}}))
|
||||
|
||||
(defn duplicate-set [set-name lib & {:keys [suffix]}]
|
||||
(let [sets (get-sets lib)
|
||||
@@ -1508,7 +1266,335 @@ Will return a value that matches this schema:
|
||||
(assoc :name copy-name)
|
||||
(assoc :modified-at (dt/now)))))
|
||||
|
||||
(sm/register! type:tokens-lib)
|
||||
;; === Import / Export from JSON format
|
||||
|
||||
;; Supported formats:
|
||||
;; - Legacy: for tokens files prior to DTCG second draft
|
||||
;; - DTCG: for tokens files conforming to the DTCG second draft (current for now)
|
||||
;; https://www.w3.org/community/design-tokens/2022/06/14/call-to-implement-the-second-editors-draft-and-share-feedback/
|
||||
;;
|
||||
;; - Single set: for files that comply with the base DTCG format, that contain a single tree of tokens.
|
||||
;; - Multi sets: for files with the Tokens Studio extension, that may contain several sets, and also themes and other $metadata.
|
||||
;;
|
||||
;; Small glossary:
|
||||
;; * json data: a json-encoded string
|
||||
;; * decode: convert a json string into a plain clojure nested map
|
||||
;; * parse: build a TokensLib (or a fragment) from a decoded json data
|
||||
;; * export: generate from a TokensLib a plain clojure nested map, suitable to be encoded as a json string
|
||||
|
||||
(def ^:private legacy-node?
|
||||
(sm/validator
|
||||
[:or
|
||||
[:map
|
||||
["value" :string]
|
||||
["type" :string]]
|
||||
[:map
|
||||
["value" [:sequential [:map ["type" :string]]]]
|
||||
["type" :string]]
|
||||
[:map
|
||||
["value" :map]
|
||||
["type" :string]]]))
|
||||
|
||||
(def ^:private dtcg-node?
|
||||
(sm/validator
|
||||
[:or
|
||||
[:map
|
||||
["$value" :string]
|
||||
["$type" :string]]
|
||||
[:map
|
||||
["$value" [:sequential [:map ["$type" :string]]]]
|
||||
["$type" :string]]
|
||||
[:map
|
||||
["$value" :map]
|
||||
["$type" :string]]]))
|
||||
|
||||
(defn- get-json-format
|
||||
"Searches through decoded token file and returns:
|
||||
- `:json-format/legacy` when first node satisfies `legacy-node?` predicate
|
||||
- `:json-format/dtcg` when first node satisfies `dtcg-node?` predicate
|
||||
- `nil` if neither combination is found"
|
||||
([decoded-json]
|
||||
(get-json-format decoded-json legacy-node? dtcg-node?))
|
||||
([decoded-json legacy-node? dtcg-node?]
|
||||
(assert (map? decoded-json) "expected a plain clojure map for `decoded-json`")
|
||||
(let [branch? map?
|
||||
children (fn [node] (vals node))
|
||||
check-node (fn [node]
|
||||
(cond
|
||||
(legacy-node? node) :json-format/legacy
|
||||
(dtcg-node? node) :json-format/dtcg
|
||||
:else nil))
|
||||
walk (fn walk [node]
|
||||
(lazy-seq
|
||||
(cons
|
||||
(check-node node)
|
||||
(when (branch? node)
|
||||
(mapcat walk (children node))))))]
|
||||
(->> (walk decoded-json)
|
||||
(filter some?)
|
||||
first)))) ;; TODO: throw error if format cannot be determined
|
||||
|
||||
(defn- legacy-json->dtcg-json
|
||||
"Converts a decoded json file in legacy format into DTCG format."
|
||||
[decoded-json]
|
||||
(assert (map? decoded-json) "expected a plain clojure map for `decoded-json`")
|
||||
(walk/postwalk
|
||||
(fn [node]
|
||||
(cond-> node
|
||||
(and (map? node)
|
||||
(contains? node "value")
|
||||
(sequential? (get node "value")))
|
||||
(update "value"
|
||||
(fn [seq-value]
|
||||
(map #(set/rename-keys % {"type" "$type"}) seq-value)))
|
||||
|
||||
(and (map? node)
|
||||
(and (contains? node "type")
|
||||
(contains? node "value")))
|
||||
(set/rename-keys {"value" "$value"
|
||||
"type" "$type"})))
|
||||
decoded-json))
|
||||
|
||||
(defn- single-set?
|
||||
"Check if the decoded json file conforms to basic DTCG format with a single set."
|
||||
[decoded-json]
|
||||
(assert (map? decoded-json) "expected a plain clojure map for `decoded-json`")
|
||||
(and (not (contains? decoded-json "$metadata"))
|
||||
(not (contains? decoded-json "$themes"))))
|
||||
|
||||
(defn- flatten-nested-tokens-json
|
||||
"Convert a tokens tree in the decoded json fragment into a flat map,
|
||||
being the keys the token paths after joining the keys with '.'."
|
||||
[decoded-json-tokens parent-path]
|
||||
(reduce-kv
|
||||
(fn [tokens k v]
|
||||
(let [child-path (if (empty? parent-path)
|
||||
(name k)
|
||||
(str parent-path "." k))]
|
||||
(if (and (map? v)
|
||||
(not (contains? v "$type")))
|
||||
(merge tokens (flatten-nested-tokens-json v child-path))
|
||||
(let [token-type (cto/dtcg-token-type->token-type (get v "$type"))]
|
||||
(if token-type
|
||||
(assoc tokens child-path (make-token
|
||||
:name child-path
|
||||
:type token-type
|
||||
:value (get v "$value")
|
||||
:description (get v "$description")))
|
||||
;; Discard unknown type tokens
|
||||
tokens)))))
|
||||
{}
|
||||
decoded-json-tokens))
|
||||
|
||||
(defn- parse-single-set-dtcg-json
|
||||
"Parse a decoded json file with a single set of tokens in DTCG format into a TokensLib."
|
||||
[set-name decoded-json-tokens]
|
||||
(assert (map? decoded-json-tokens) "expected a plain clojure map for `decoded-json-tokens`")
|
||||
(assert (= (get-json-format decoded-json-tokens) :json-format/dtcg) "expected a dtcg format for `decoded-json-tokens`")
|
||||
(-> (make-tokens-lib)
|
||||
(add-set (make-token-set :name (normalize-set-name set-name)
|
||||
:tokens (flatten-nested-tokens-json decoded-json-tokens "")))))
|
||||
|
||||
(defn- parse-single-set-legacy-json
|
||||
"Parse a decoded json file with a single set of tokens in legacy format into a TokensLib."
|
||||
[set-name decoded-json-tokens]
|
||||
(assert (map? decoded-json-tokens) "expected a plain clojure map for `decoded-json-tokens`")
|
||||
(assert (= (get-json-format decoded-json-tokens) :json-format/legacy) "expected a legacy format for `decoded-json-tokens`")
|
||||
(parse-single-set-dtcg-json set-name (legacy-json->dtcg-json decoded-json-tokens)))
|
||||
|
||||
(defn- parse-multi-set-dtcg-json
|
||||
"Parse a decoded json file with multi sets in DTCG format into a TokensLib."
|
||||
[decoded-json]
|
||||
(assert (map? decoded-json) "expected a plain clojure map for `decoded-json`")
|
||||
(assert (= (get-json-format decoded-json) :json-format/dtcg) "expected a dtcg format for `decoded-json`")
|
||||
|
||||
(let [metadata (get decoded-json "$metadata")
|
||||
|
||||
xf-normalize-set-name
|
||||
(map normalize-set-name)
|
||||
|
||||
sets
|
||||
(dissoc decoded-json "$themes" "$metadata")
|
||||
|
||||
ordered-set-names
|
||||
(-> (d/ordered-set)
|
||||
(into xf-normalize-set-name (get metadata "tokenSetOrder"))
|
||||
(into xf-normalize-set-name (keys sets)))
|
||||
|
||||
active-set-names
|
||||
(or (->> (get metadata "activeSets")
|
||||
(into #{} xf-normalize-set-name)
|
||||
(not-empty))
|
||||
#{})
|
||||
|
||||
active-theme-names
|
||||
(or (->> (get metadata "activeThemes")
|
||||
(into #{})
|
||||
(not-empty))
|
||||
#{hidden-token-theme-path})
|
||||
|
||||
themes
|
||||
(->> (get decoded-json "$themes")
|
||||
(map (fn [theme]
|
||||
(make-token-theme
|
||||
:name (get theme "name")
|
||||
:group (get theme "group")
|
||||
:is-source (get theme "is-source")
|
||||
:id (get theme "id")
|
||||
:modified-at (some-> (get theme "modified-at")
|
||||
(dt/parse-instant))
|
||||
:sets (into #{}
|
||||
(comp (map key)
|
||||
xf-normalize-set-name
|
||||
(filter #(contains? ordered-set-names %)))
|
||||
(get theme "selectedTokenSets")))))
|
||||
(not-empty))
|
||||
|
||||
library
|
||||
(make-tokens-lib)
|
||||
|
||||
sets
|
||||
(reduce-kv (fn [result name tokens]
|
||||
(assoc result
|
||||
(normalize-set-name name)
|
||||
(flatten-nested-tokens-json tokens "")))
|
||||
{}
|
||||
sets)
|
||||
|
||||
library
|
||||
(reduce (fn [library name]
|
||||
(if-let [tokens (get sets name)]
|
||||
(add-set library (make-token-set :name name :tokens tokens))
|
||||
library))
|
||||
library
|
||||
ordered-set-names)
|
||||
|
||||
library
|
||||
(update-theme library hidden-token-theme-group hidden-token-theme-name
|
||||
#(assoc % :sets active-set-names))
|
||||
|
||||
library
|
||||
(reduce add-theme library themes)
|
||||
|
||||
library
|
||||
(reduce (fn [library theme-path]
|
||||
(let [[group name] (split-token-theme-path theme-path)]
|
||||
(activate-theme library group name)))
|
||||
library
|
||||
active-theme-names)]
|
||||
|
||||
library))
|
||||
|
||||
(defn- parse-multi-set-legacy-json
|
||||
"Parse a decoded json file with multi sets in legacy format into a TokensLib."
|
||||
[decoded-json]
|
||||
(assert (map? decoded-json) "expected a plain clojure map for `decoded-json`")
|
||||
(assert (= (get-json-format decoded-json) :json-format/legacy) "expected a legacy format for `decoded-json`")
|
||||
|
||||
(let [sets-data (dissoc decoded-json "$themes" "$metadata")
|
||||
other-data (select-keys decoded-json ["$themes" "$metadata"])
|
||||
dtcg-sets-data (legacy-json->dtcg-json sets-data)]
|
||||
(parse-multi-set-dtcg-json (merge other-data
|
||||
dtcg-sets-data))))
|
||||
|
||||
(defn parse-decoded-json
|
||||
"Guess the format and content type of the decoded json file and parse it into a TokensLib.
|
||||
The `file-name` is used to determine the set name when the json file contains a single set."
|
||||
[decoded-json file-name]
|
||||
(let [single-set? (single-set? decoded-json)
|
||||
json-format (get-json-format decoded-json)]
|
||||
(cond
|
||||
(and single-set?
|
||||
(= :json-format/legacy json-format))
|
||||
(parse-single-set-legacy-json file-name decoded-json)
|
||||
|
||||
(and single-set?
|
||||
(= :json-format/dtcg json-format))
|
||||
(parse-single-set-dtcg-json file-name decoded-json)
|
||||
|
||||
(= :json-format/legacy json-format)
|
||||
(parse-multi-set-legacy-json decoded-json)
|
||||
|
||||
:else
|
||||
(parse-multi-set-dtcg-json decoded-json))))
|
||||
|
||||
(defn export-dtcg-json
|
||||
"Convert a TokensLib into a plain clojure map, suitable to be encoded as a multi sets json string in DTCG format."
|
||||
[tokens-lib]
|
||||
(let [themes-xform
|
||||
(comp
|
||||
(filter #(and (instance? TokenTheme %)
|
||||
(not (hidden-temporary-theme? %))))
|
||||
(map (fn [token-theme]
|
||||
(let [theme-map (->> token-theme
|
||||
(into {})
|
||||
walk/stringify-keys)]
|
||||
(-> theme-map
|
||||
(set/rename-keys {"sets" "selectedTokenSets"})
|
||||
(update "selectedTokenSets" (fn [sets]
|
||||
(->> (for [s sets] [s "enabled"])
|
||||
(into {})))))))))
|
||||
themes
|
||||
(->> (get-theme-tree tokens-lib)
|
||||
(tree-seq d/ordered-map? vals)
|
||||
(into [] themes-xform))
|
||||
|
||||
;; Active themes without exposing hidden penpot theme
|
||||
active-themes-clear
|
||||
(-> (get-active-theme-paths tokens-lib)
|
||||
(disj hidden-token-theme-path))
|
||||
|
||||
update-token-fn
|
||||
(fn [token]
|
||||
(cond-> {"$value" (:value token)
|
||||
"$type" (cto/token-type->dtcg-token-type (:type token))}
|
||||
(:description token) (assoc "$description" (:description token))))
|
||||
|
||||
name-set-tuples
|
||||
(->> (get-set-tree tokens-lib)
|
||||
(tree-seq d/ordered-map? vals)
|
||||
(filter (partial instance? TokenSet))
|
||||
(map (fn [{:keys [name tokens]}]
|
||||
[name (tokens-tree tokens :update-token-fn update-token-fn)])))
|
||||
|
||||
ordered-set-names
|
||||
(mapv first name-set-tuples)
|
||||
|
||||
sets
|
||||
(into {} name-set-tuples)
|
||||
|
||||
active-set-names
|
||||
(get-active-themes-set-names tokens-lib)]
|
||||
|
||||
(-> sets
|
||||
(assoc "$themes" themes)
|
||||
(assoc-in ["$metadata" "tokenSetOrder"] ordered-set-names)
|
||||
(assoc-in ["$metadata" "activeThemes"] active-themes-clear)
|
||||
(assoc-in ["$metadata" "activeSets"] active-set-names))))
|
||||
|
||||
(defn get-tokens-of-unknown-type
|
||||
"Search for all tokens in the decoded json file that have a type that is not currently
|
||||
supported by Penpot. Returns a map token-path -> token type."
|
||||
([decoded-json]
|
||||
(get-tokens-of-unknown-type decoded-json "" (get-json-format decoded-json)))
|
||||
([decoded-json parent-path json-format]
|
||||
(let [type-key (if (= json-format :json-format/dtcg) "$type" "type")]
|
||||
(reduce-kv
|
||||
(fn [unknown-tokens k v]
|
||||
(let [child-path (if (empty? parent-path)
|
||||
(name k)
|
||||
(str parent-path "." k))]
|
||||
(if (and (map? v)
|
||||
(not (contains? v type-key)))
|
||||
(let [nested-unknown-tokens (get-tokens-of-unknown-type v child-path json-format)]
|
||||
(merge unknown-tokens nested-unknown-tokens))
|
||||
(let [token-type-str (get v type-key)
|
||||
token-type (cto/dtcg-token-type->token-type token-type-str)]
|
||||
(if (and (not (some? token-type)) (some? token-type-str))
|
||||
(assoc unknown-tokens child-path token-type-str)
|
||||
unknown-tokens)))))
|
||||
nil
|
||||
decoded-json))))
|
||||
|
||||
;; === Serialization handlers for RPC API (transit) and database (fressian)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user