mirror of
https://github.com/penpot/penpot.git
synced 2025-12-23 22:48:40 -05:00
Compare commits
513 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ca98ddf21 | ||
|
|
15157c54b1 | ||
|
|
232b29cd89 | ||
|
|
da0704081f | ||
|
|
066b1235a6 | ||
|
|
141694dc8d | ||
|
|
151aedcf91 | ||
|
|
5513daf17d | ||
|
|
fde0f3c182 | ||
|
|
7b408e4db1 | ||
|
|
b8fd829f9d | ||
|
|
089a66881c | ||
|
|
667b5fb6ee | ||
|
|
f0f89151c5 | ||
|
|
1221d60357 | ||
|
|
f553fa10d8 | ||
|
|
96947b0219 | ||
|
|
e2900d9012 | ||
|
|
1f0e470419 | ||
|
|
079a945c2f | ||
|
|
542d709541 | ||
|
|
4f1d5a19e4 | ||
|
|
91b0c47244 | ||
|
|
a7a49e4b39 | ||
|
|
ba81b2b14d | ||
|
|
423c237d42 | ||
|
|
5a55884b9f | ||
|
|
38fd343c53 | ||
|
|
94976aa2b1 | ||
|
|
5247d217ab | ||
|
|
40693e6857 | ||
|
|
5c428b5aa5 | ||
|
|
e92ddee33a | ||
|
|
c121f459ba | ||
|
|
698a258290 | ||
|
|
aa023d847d | ||
|
|
53f57dad0b | ||
|
|
d7d7535ab4 | ||
|
|
accc662e1c | ||
|
|
1efc1516e2 | ||
|
|
b5d731ca72 | ||
|
|
e380289e34 | ||
|
|
b22323a484 | ||
|
|
58dd23f9c7 | ||
|
|
54e7551d56 | ||
|
|
404297f837 | ||
|
|
33f853ff2e | ||
|
|
d16513be9d | ||
|
|
ad077696b0 | ||
|
|
1cbeafe85c | ||
|
|
76c8523f44 | ||
|
|
f277d8b125 | ||
|
|
60af8d0bcb | ||
|
|
d652ed8e68 | ||
|
|
09d73a2f51 | ||
|
|
7d4535ebd4 | ||
|
|
a5a53219bf | ||
|
|
8716f81765 | ||
|
|
7aa46a1f62 | ||
|
|
d62eb3d3f4 | ||
|
|
e4c427609d | ||
|
|
883a26845a | ||
|
|
bcdf5d86ae | ||
|
|
3eab9da74e | ||
|
|
2813fda136 | ||
|
|
a0022a804b | ||
|
|
068acb4303 | ||
|
|
d6f98a6c79 | ||
|
|
7b6c2da6da | ||
|
|
affed049ee | ||
|
|
377f636b8e | ||
|
|
2d512ef273 | ||
|
|
b8ebbe8c3c | ||
|
|
09c184200d | ||
|
|
bbe0b22a8b | ||
|
|
8603085a69 | ||
|
|
29ec44482d | ||
|
|
fd4d4ec6e3 | ||
|
|
0945dd2920 | ||
|
|
664cacbe9d | ||
|
|
08516ac7ca | ||
|
|
24e51eef5b | ||
|
|
ee62016c34 | ||
|
|
7c10f20b95 | ||
|
|
4958da63e5 | ||
|
|
f39a994fed | ||
|
|
5ef59d5e2e | ||
|
|
98221c6b51 | ||
|
|
74713cde63 | ||
|
|
a5084c35b5 | ||
|
|
cdce1df919 | ||
|
|
c75b886548 | ||
|
|
abd41e825e | ||
|
|
97b9a7d31c | ||
|
|
dbeebf181f | ||
|
|
0eec09acbf | ||
|
|
81e250e27d | ||
|
|
9f1f8cc80c | ||
|
|
2440c81b42 | ||
|
|
ada078abab | ||
|
|
31319a0d04 | ||
|
|
b9cb415507 | ||
|
|
36121d862d | ||
|
|
d8964a69bc | ||
|
|
39da7d7ab6 | ||
|
|
1bb25bb89d | ||
|
|
b0a3f2b72a | ||
|
|
f2f3d9f7eb | ||
|
|
cf72b35e73 | ||
|
|
fe8d9cf159 | ||
|
|
4cfe33bc5c | ||
|
|
e5d8bc91fb | ||
|
|
ce1ba3f28f | ||
|
|
257d72ee9d | ||
|
|
0766b341bd | ||
|
|
4ef631fd6a | ||
|
|
a923d39603 | ||
|
|
2f79d71262 | ||
|
|
4881bf3619 | ||
|
|
69df69c4bb | ||
|
|
2c36a4076f | ||
|
|
3ac6f59b7b | ||
|
|
c68a0d3967 | ||
|
|
aeb1ac41da | ||
|
|
b58830260c | ||
|
|
4114d9b56f | ||
|
|
553b9eb4bb | ||
|
|
12e97c73f3 | ||
|
|
bd1286aace | ||
|
|
630f42f7ac | ||
|
|
bf40cd98e8 | ||
|
|
ec7f8a6aa7 | ||
|
|
4d3192546c | ||
|
|
2184926bbb | ||
|
|
fffc3b1b58 | ||
|
|
e0cc999345 | ||
|
|
8e836f79fb | ||
|
|
093a58b9ec | ||
|
|
5e2b847202 | ||
|
|
ef207cfe70 | ||
|
|
f72c37a198 | ||
|
|
dbf3d0d7c1 | ||
|
|
903c8c021d | ||
|
|
9b8ef0a2e5 | ||
|
|
22e64c1c81 | ||
|
|
14c917d003 | ||
|
|
4377a0dcc4 | ||
|
|
db6ca6f905 | ||
|
|
ede8ee6a78 | ||
|
|
37b50497f3 | ||
|
|
e3d2b99acc | ||
|
|
7101b94557 | ||
|
|
5ffab1953d | ||
|
|
75e7cfb69e | ||
|
|
65b7e5c3a5 | ||
|
|
30a06249ff | ||
|
|
59ca09c24e | ||
|
|
1aeafdfca7 | ||
|
|
a714085523 | ||
|
|
eccc4226c7 | ||
|
|
4d6d85b3de | ||
|
|
c607b61af6 | ||
|
|
e16ec9c719 | ||
|
|
59e5656bd7 | ||
|
|
723eef9565 | ||
|
|
8448036d67 | ||
|
|
e1c9691567 | ||
|
|
577b731b22 | ||
|
|
ef3588d05f | ||
|
|
d4893523bc | ||
|
|
f10792619d | ||
|
|
7a0702650a | ||
|
|
ee1230c488 | ||
|
|
ede1176606 | ||
|
|
9506606e15 | ||
|
|
7e5f93ca3d | ||
|
|
6655563aba | ||
|
|
8ee9b45243 | ||
|
|
caa6897f81 | ||
|
|
660bc1a4dd | ||
|
|
3ddd45e99b | ||
|
|
9b71e04e1c | ||
|
|
39620fe9c4 | ||
|
|
db7c1fc7dd | ||
|
|
c89abf56ac | ||
|
|
d22f6e37c9 | ||
|
|
19b9b3cbd9 | ||
|
|
c1d3e4cd6e | ||
|
|
2164593757 | ||
|
|
9485ce03b5 | ||
|
|
ba832389d1 | ||
|
|
a8ee9be7b9 | ||
|
|
c8c83c1e1d | ||
|
|
afcfbdedda | ||
|
|
53f55444cd | ||
|
|
fa8665df88 | ||
|
|
5cc678ddc3 | ||
|
|
64c8741233 | ||
|
|
0cae9d6ad5 | ||
|
|
0c586551c4 | ||
|
|
2f4cb19745 | ||
|
|
b80ccbec0f | ||
|
|
246415be2b | ||
|
|
7faa9e970e | ||
|
|
04a0d867b0 | ||
|
|
a18214a1a5 | ||
|
|
68397edd4d | ||
|
|
1e2d9a15a3 | ||
|
|
0f101fad9f | ||
|
|
a91737b4d7 | ||
|
|
284d5ecb77 | ||
|
|
5d95d755ad | ||
|
|
4466abd150 | ||
|
|
27690c3da6 | ||
|
|
f436d72f51 | ||
|
|
20ea188070 | ||
|
|
c4f076910b | ||
|
|
72f2395142 | ||
|
|
47d28758d7 | ||
|
|
b7573c0b72 | ||
|
|
2ed743b6be | ||
|
|
036e335fc4 | ||
|
|
0e99b37c21 | ||
|
|
3cdbd7f381 | ||
|
|
76caff2b61 | ||
|
|
bb370b3e50 | ||
|
|
45d56f40e1 | ||
|
|
a02a316165 | ||
|
|
4a1ab75d8f | ||
|
|
a58ad2298a | ||
|
|
bbfcff0772 | ||
|
|
6b9f7e9922 | ||
|
|
21cdd1200a | ||
|
|
aeda6271cd | ||
|
|
63ee99d46c | ||
|
|
058c3707c8 | ||
|
|
b81ffa422f | ||
|
|
e54f213b3f | ||
|
|
97072b112c | ||
|
|
16ff29538b | ||
|
|
48b72229c3 | ||
|
|
fa8af898ba | ||
|
|
42e4a4a2dd | ||
|
|
e778bc8b18 | ||
|
|
6d60ca0474 | ||
|
|
41f906110e | ||
|
|
8d7be1e273 | ||
|
|
13adc88637 | ||
|
|
84f063a5b4 | ||
|
|
647357a892 | ||
|
|
c1eed3a364 | ||
|
|
2dd7f241d3 | ||
|
|
66c6e0232e | ||
|
|
4de43b32e8 | ||
|
|
1d735e5b12 | ||
|
|
15d3107c48 | ||
|
|
2d1158efa3 | ||
|
|
f78f843f7c | ||
|
|
07e40e78cd | ||
|
|
57f5e3c30a | ||
|
|
32cd388b06 | ||
|
|
d3829ec630 | ||
|
|
1be204e22d | ||
|
|
5d4511fc6a | ||
|
|
dd1997e23c | ||
|
|
36914d1dc4 | ||
|
|
1fdc724761 | ||
|
|
3c6403224d | ||
|
|
88fb5e7ab5 | ||
|
|
8ed508012e | ||
|
|
58d744a342 | ||
|
|
8f72d8583e | ||
|
|
57d7dfaa0a | ||
|
|
4f4ef6f1f2 | ||
|
|
6eadea8485 | ||
|
|
607e0c5c1d | ||
|
|
e31b4b58ce | ||
|
|
e659b8eb6e | ||
|
|
962408c1ae | ||
|
|
c1d213a0cd | ||
|
|
7e5115ecd9 | ||
|
|
539d5dfc08 | ||
|
|
343f63a7cc | ||
|
|
e3268739ed | ||
|
|
df416af19b | ||
|
|
c0f026c332 | ||
|
|
6b7665947e | ||
|
|
4f7659fbf8 | ||
|
|
d4cf817b83 | ||
|
|
669bca5fa5 | ||
|
|
49b4eabe8b | ||
|
|
03acfc2b3c | ||
|
|
d2f30d2b12 | ||
|
|
ccaadeb582 | ||
|
|
b149f96500 | ||
|
|
132b1800c2 | ||
|
|
65ee2f9081 | ||
|
|
4faa9ddd8d | ||
|
|
48909dc3c4 | ||
|
|
f1941681ab | ||
|
|
72313c770c | ||
|
|
c05a69509e | ||
|
|
44ffdc4f97 | ||
|
|
5a6c2c5054 | ||
|
|
9d0ca089fe | ||
|
|
1c49dd80a4 | ||
|
|
cbc92e9f1e | ||
|
|
2fa81474b9 | ||
|
|
9b7d0563b9 | ||
|
|
e1e13bcfb1 | ||
|
|
134c23c70c | ||
|
|
e369b70aeb | ||
|
|
c3970255e6 | ||
|
|
7823eaf890 | ||
|
|
c89b6e2d6d | ||
|
|
0d08549a04 | ||
|
|
960f095c1b | ||
|
|
d9eff00a71 | ||
|
|
7b196e1ca5 | ||
|
|
0981517bc6 | ||
|
|
8ae29ceaa2 | ||
|
|
ec0079461e | ||
|
|
9eaa55b711 | ||
|
|
0c4b1cc4fc | ||
|
|
5501859fa6 | ||
|
|
70a1a7a5ea | ||
|
|
96f8832bcf | ||
|
|
0b54215b84 | ||
|
|
33ff74e534 | ||
|
|
23d3661ea5 | ||
|
|
c3dc165c4c | ||
|
|
46a6aff4da | ||
|
|
cb21eeda94 | ||
|
|
b27edb4259 | ||
|
|
73d85b9884 | ||
|
|
5a3619c737 | ||
|
|
dc41fe7616 | ||
|
|
227f06c1ec | ||
|
|
48c41df054 | ||
|
|
946dac3c9f | ||
|
|
b160ba1793 | ||
|
|
b86f2ba90d | ||
|
|
bf5a7e20d9 | ||
|
|
3393963363 | ||
|
|
e641e93fd5 | ||
|
|
99fcd3556e | ||
|
|
b82679deaf | ||
|
|
0cd446421d | ||
|
|
6014612046 | ||
|
|
eb211c0c8e | ||
|
|
e6a9e27802 | ||
|
|
4ccd84f9c4 | ||
|
|
96102fc878 | ||
|
|
7f93060872 | ||
|
|
c3ef12f145 | ||
|
|
39106c1e14 | ||
|
|
607deb31dc | ||
|
|
9a35c04bf0 | ||
|
|
366bca5f93 | ||
|
|
97a1bf15ef | ||
|
|
9409078069 | ||
|
|
5b35cf7456 | ||
|
|
a9d3dfab1a | ||
|
|
32126d1874 | ||
|
|
5f4af76d28 | ||
|
|
b4f868be91 | ||
|
|
b6f35a5c1e | ||
|
|
55fda698ec | ||
|
|
c2467e12ba | ||
|
|
df74ad0e18 | ||
|
|
e2bdc67bd2 | ||
|
|
acf273e4e3 | ||
|
|
410bf7edfd | ||
|
|
345d97792f | ||
|
|
6b07c4179c | ||
|
|
74d911f856 | ||
|
|
ecb7f0a2f6 | ||
|
|
1e0e0aabf2 | ||
|
|
fa4f2aa5cc | ||
|
|
7cb8eb783a | ||
|
|
ef679f6722 | ||
|
|
6443db64d7 | ||
|
|
e1fb022878 | ||
|
|
96bb282674 | ||
|
|
4623f36042 | ||
|
|
29e0964ebc | ||
|
|
043c23899a | ||
|
|
4cf5dc0791 | ||
|
|
deaf6ef068 | ||
|
|
75011ca0ff | ||
|
|
ba3f84fd6c | ||
|
|
f8c7f84c18 | ||
|
|
7772ac0a85 | ||
|
|
9329c2ebd9 | ||
|
|
65f182001b | ||
|
|
fe83c5faea | ||
|
|
5b860ee601 | ||
|
|
cb9839223e | ||
|
|
15c42fba5e | ||
|
|
e7d7291947 | ||
|
|
04121efb13 | ||
|
|
32b9134722 | ||
|
|
8b60200ec6 | ||
|
|
7155b6a191 | ||
|
|
a57a772394 | ||
|
|
e9f6eefaeb | ||
|
|
4188f074ca | ||
|
|
e0a4ec8b87 | ||
|
|
122acb3eee | ||
|
|
9728f1ba80 | ||
|
|
93ca268ee7 | ||
|
|
b852dc86c0 | ||
|
|
361b5decbe | ||
|
|
cd3c2b4bf7 | ||
|
|
5718c1f287 | ||
|
|
d7caf5ed1a | ||
|
|
aa7cbc9f08 | ||
|
|
afb5ab7430 | ||
|
|
c2ba7cdbc7 | ||
|
|
2f8be445d6 | ||
|
|
09e1bac41c | ||
|
|
fc7fe41c98 | ||
|
|
c7308ce634 | ||
|
|
e45fa1380d | ||
|
|
e4eb80f643 | ||
|
|
cc6e071f48 | ||
|
|
283ea16627 | ||
|
|
15b33488c6 | ||
|
|
0e2be44e17 | ||
|
|
f949649ba3 | ||
|
|
b31a6f33a5 | ||
|
|
51ecbf15a9 | ||
|
|
0fd783e65e | ||
|
|
1da5fd106a | ||
|
|
17aafe6775 | ||
|
|
9064b9f849 | ||
|
|
9fc8760dc5 | ||
|
|
22800e71df | ||
|
|
352b09a891 | ||
|
|
68f560e29b | ||
|
|
4622dd0e0d | ||
|
|
ed822d9f46 | ||
|
|
c1359d9677 | ||
|
|
ac33df2054 | ||
|
|
b800fcafb4 | ||
|
|
6b997928e5 | ||
|
|
ee533e2644 | ||
|
|
d6da8afdce | ||
|
|
b3fcbd91e4 | ||
|
|
51c6abb261 | ||
|
|
43b86d403c | ||
|
|
6a1399dd50 | ||
|
|
6e62472759 | ||
|
|
6c9f4a8fd5 | ||
|
|
8618cb950f | ||
|
|
4fb5d3fb20 | ||
|
|
e0669ebbf8 | ||
|
|
015fd5bc3a | ||
|
|
88d85706ad | ||
|
|
782d733bc9 | ||
|
|
1318019ccb | ||
|
|
1aa2c0f9de | ||
|
|
bd08e99080 | ||
|
|
66530ca868 | ||
|
|
b4c2f2ecaa | ||
|
|
a739688780 | ||
|
|
790f6ce4ed | ||
|
|
40d7bb04b4 | ||
|
|
22d7cfc7fa | ||
|
|
d4c775b1f4 | ||
|
|
536c25c206 | ||
|
|
6fb65de100 | ||
|
|
043c4105db | ||
|
|
823792339f | ||
|
|
226ab7233b | ||
|
|
cf150891df | ||
|
|
c841ed6419 | ||
|
|
65c1eb3a63 | ||
|
|
8165635fad | ||
|
|
286a834f4a | ||
|
|
c14d28dc1e | ||
|
|
556ec45efc | ||
|
|
754e09b0de | ||
|
|
308b2d95f3 | ||
|
|
66053ae9df | ||
|
|
77348bb9a4 | ||
|
|
14257ae422 | ||
|
|
e64c956693 | ||
|
|
4a6b246f0f | ||
|
|
6f7bc54a39 | ||
|
|
0c8678eb87 | ||
|
|
5dd14b929a | ||
|
|
06fadc45f2 | ||
|
|
f633a673c4 | ||
|
|
e8d2c5e30d | ||
|
|
2992ec064f | ||
|
|
911281b7b9 | ||
|
|
e2e6e7db85 | ||
|
|
2b492134be | ||
|
|
7bdc97fbfa | ||
|
|
5059d71509 | ||
|
|
76c054a591 | ||
|
|
4471dca3f3 | ||
|
|
6c3b82ed85 | ||
|
|
f91434433e | ||
|
|
b6f82be56a | ||
|
|
c633970f9d | ||
|
|
95e5b1ec5e | ||
|
|
f84b3187a5 | ||
|
|
087f779fef | ||
|
|
ca283c2d26 | ||
|
|
9008eb218b | ||
|
|
089b77379d |
@@ -1,6 +1,130 @@
|
||||
version: 2
|
||||
version: 2.1
|
||||
jobs:
|
||||
build:
|
||||
test-common:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "JVM tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
clojure -M:dev:test
|
||||
|
||||
- run:
|
||||
name: "NODE tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
key: v1-dependencies-{{ checksum "common/deps.edn"}}
|
||||
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
|
||||
- run:
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run fmt:js:check
|
||||
yarn run lint:scss
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "unit tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run test
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
key: v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
|
||||
|
||||
test-integration:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}
|
||||
|
||||
- run:
|
||||
name: "integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
yarn run playwright install --with-deps chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
- image: cimg/postgres:14.5
|
||||
@@ -20,104 +144,30 @@ jobs:
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}}
|
||||
# fallback to using the latest cache if no exact match is found
|
||||
- v1-dependencies-
|
||||
|
||||
- run: cd .clj-kondo && cat config.edn
|
||||
- run: cat .cljfmt.edn
|
||||
- run: clj-kondo --version
|
||||
- v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
- run:
|
||||
name: "backend fmt check"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "exporter fmt check"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "common fmt check"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "frontend fmt check"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run fmt:js:check
|
||||
|
||||
- run:
|
||||
name: "common linter check"
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "frontend linter check"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:scss
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "backend linter check"
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "exporter linter check"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "common tests"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn test
|
||||
clojure -M:dev:test
|
||||
|
||||
- run:
|
||||
name: "frontend tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn test
|
||||
|
||||
- run:
|
||||
name: "frontend integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
clojure -M:dev:shadow-cljs release main
|
||||
yarn playwright install --with-deps chromium
|
||||
yarn test:e2e
|
||||
|
||||
- run:
|
||||
name: "backend tests"
|
||||
name: "tests"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
clojure -M:dev:test
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
environment:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://localhost/penpot_test"
|
||||
@@ -128,4 +178,43 @@ jobs:
|
||||
- save_cache:
|
||||
paths:
|
||||
- ~/.m2
|
||||
key: v1-dependencies-{{ checksum "backend/deps.edn" }}-{{ checksum "frontend/deps.edn"}}-{{ checksum "common/deps.edn"}}
|
||||
key: v1-dependencies-{{ checksum "backend/deps.edn" }}
|
||||
|
||||
|
||||
test-exporter:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: medium+
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx4g -Xms100m -XX:+UseSerialGC
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: "prepopulate linter cache"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint:clj
|
||||
|
||||
- run:
|
||||
name: "fmt check & linter"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
yarn run lint:clj
|
||||
|
||||
workflows:
|
||||
penpot:
|
||||
jobs:
|
||||
- test-frontend
|
||||
- test-integration
|
||||
- test-backend
|
||||
- test-common
|
||||
- test-exporter
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
:remove-consecutive-blank-lines? false
|
||||
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
||||
cljs.test/async [[:inner 0]]
|
||||
app.common.schema/register! [[:inner 0] [:inner 1]]
|
||||
promesa.exec/thread [[:inner 0]]
|
||||
specify! [[:inner 0] [:inner 1]]}
|
||||
}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -74,3 +74,5 @@ node_modules
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/render-wasm/target/
|
||||
/**/.yarn/*
|
||||
|
||||
72
CHANGES.md
72
CHANGES.md
@@ -1,5 +1,76 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 2.4.2
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix detach when top copy is dangling and nested copy is not [Taiga #9699](https://tree.taiga.io/project/penpot/issue/9699)
|
||||
- Fix problem in plugins with `replaceColor` method [#174](https://github.com/penpot/penpot-plugins/issues/174)
|
||||
- Fix issue with recursive commponents [Taiga #9903](https://tree.taiga.io/project/penpot/issue/9903)
|
||||
- Fix missing methods reference on API Docs
|
||||
- Fix memory usage issue on file-gc asynchronous task (related to snapshots feature)
|
||||
|
||||
## 2.4.1
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix error when importing files with touched components [Taiga #9625](https://tree.taiga.io/project/penpot/issue/9625)
|
||||
- Fix problem when changing color libraries [Plugins #184](https://github.com/penpot/penpot-plugins/issues/184)
|
||||
|
||||
## 2.4.0
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
- Use [nginx-unprivileged](https://hub.docker.com/r/nginxinc/nginx-unprivileged) as base image for
|
||||
Penpot's frontend docker image. Now all the docker images runs with the same unprivileged user
|
||||
(penpot). Because of that, the default NGINX listen port is now 8080 instead of 80, so
|
||||
you will have to modify your infrastructure to apply this change.
|
||||
|
||||
- Redis 7.2 is explicitly pinned in our example docker-compose.yml file. This is done because,
|
||||
starting with the next versions, Redis is no longer distributed under an open-source license.
|
||||
On-premise users are obviously free to upgrade to the version they are using or a more modern one.
|
||||
Keep in mind that if you were using a version other than 7.2, you may have to recreate the volume
|
||||
associated with the Redis container because the 7.2 storage format may not be compatible with what
|
||||
you already have stored on the volume, and Redis may not start. In the near future, we will evaluate
|
||||
whether to move to an open-source version of Redis (such as https://valkey.io/).
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Viewer role for team members [Taiga #1056](https://tree.taiga.io/project/penpot/us/1056) & [Taiga #6590](https://tree.taiga.io/project/penpot/us/6590)
|
||||
- File history versions management [Taiga #187](https://tree.taiga.io/project/penpot/us/187?milestone=411120)
|
||||
- Rename selected layer via keyboard shortcut and context menu option [Taiga #8882](https://tree.taiga.io/project/penpot/us/8882)
|
||||
- New .penpot file format [Taiga #8657](https://tree.taiga.io/project/penpot/us/8657)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem with some texts desynchronization [Taiga #9379](https://tree.taiga.io/project/penpot/issue/9379)
|
||||
- Fix problem with reoder grid layers [#5446](https://github.com/penpot/penpot/issues/5446)
|
||||
- Fix problem with swap component style [#9542](https://tree.taiga.io/project/penpot/issue/9542)
|
||||
|
||||
## 2.3.3
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem creating manual overlay interactions [Taiga #9146](https://tree.taiga.io/project/penpot/issue/9146)
|
||||
- Fix plugins list default URL
|
||||
- Activate plugins feature by default
|
||||
|
||||
## 2.3.2
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix null pointer exception on number checking functions
|
||||
- Fix problem with grid layout ordering after moving [Taiga #9179](https://tree.taiga.io/project/penpot/issue/9179)
|
||||
|
||||
### :books: Documentation
|
||||
|
||||
- Add initial documentation for Kubernetes
|
||||
|
||||
|
||||
## 2.3.1
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -156,6 +227,7 @@ time being.
|
||||
- Fix problem with comments max length [Taiga #8778](https://tree.taiga.io/project/penpot/issue/8778)
|
||||
- Fix copy/paste images in Safari [Taiga #8771](https://tree.taiga.io/project/penpot/issue/8771)
|
||||
- Fix swap when the copy is the only child of a group [#5075](https://github.com/penpot/penpot/issues/5075)
|
||||
- Fix file builder hangs when exporting [#5099](https://github.com/penpot/penpot/issues/5099)
|
||||
|
||||
## 2.1.5
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.mozilla.org/en-US/MPL/2.0" rel="nofollow"><img alt="License: MPL-2.0" src="https://img.shields.io/badge/MPL-2.0-blue.svg" style="max-width:100%;"></a>
|
||||
<a href="https://gitter.im/penpot/community" rel="nofollow"><img alt="Gitter" src="https://badges.gitter.im/sereno-xyz/community.svg" style="max-width:100%;"></a>
|
||||
<a href="https://community.penpot.app" rel="nofollow"><img alt="Penpot Community" src="https://img.shields.io/discourse/posts?server=https%3A%2F%2Fcommunity.penpot.app" style="max-width:100%;"></a>
|
||||
<a href="https://tree.taiga.io/project/penpot/" title="Managed with Taiga.io" rel="nofollow"><img alt="Managed with Taiga.io" src="https://img.shields.io/badge/managed%20with-TAIGA.io-709f14.svg" style="max-width:100%;"></a>
|
||||
<a href="https://gitpod.io/#https://github.com/penpot/penpot" rel="nofollow"><img alt="Gitpod ready-to-code" src="https://img.shields.io/badge/Gitpod-ready--to--code-blue?logo=gitpod" style="max-width:100%;"></a>
|
||||
</p>
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
|
||||
:deps
|
||||
{penpot/common {:local/root "../common"}
|
||||
org.clojure/clojure {:mvn/version "1.12.0-alpha12"}
|
||||
org.clojure/clojure {:mvn/version "1.12.0"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.6-3"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.6-6"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@@ -17,33 +17,33 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.3.2.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.4.0.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v10.0"
|
||||
:git/sha "520613f"
|
||||
{:git/tag "v11.4"
|
||||
:git/sha "ce50d42"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.939"}
|
||||
metosin/reitit-core {:mvn/version "0.7.0"}
|
||||
nrepl/nrepl {:mvn/version "1.1.2"}
|
||||
cider/cider-nrepl {:mvn/version "0.48.0"}
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.955"}
|
||||
metosin/reitit-core {:mvn/version "0.7.2"}
|
||||
nrepl/nrepl {:mvn/version "1.3.0"}
|
||||
cider/cider-nrepl {:mvn/version "0.50.2"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.7.3"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.46.0.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.7.4"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.46.1.3"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "5.1.0"}
|
||||
com.zaxxer/HikariCP {:mvn/version "6.0.0"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
||||
buddy/buddy-hashers {:mvn/version "2.0.167"}
|
||||
buddy/buddy-sign {:mvn/version "3.5.351"}
|
||||
buddy/buddy-sign {:mvn/version "3.6.1-359"}
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.17.2"}
|
||||
org.jsoup/jsoup {:mvn/version "1.18.1"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
@@ -58,7 +58,7 @@
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.25.63"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.28.26"}
|
||||
}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
@@ -74,7 +74,7 @@
|
||||
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.3" :git/sha "15ead66"}}
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.5" :git/sha "2a21b7a"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
|
||||
@@ -137,7 +137,6 @@
|
||||
;; :v6 v6
|
||||
;; }])))
|
||||
|
||||
|
||||
(defn calculate-frames
|
||||
[{:keys [data]}]
|
||||
(->> (vals (:pages-index data))
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Configuration status="fatal" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Configuration status="fatal" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Configuration status="fatal" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="60">
|
||||
<Configuration status="fatal" monitorInterval="60">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
|
||||
@@ -7,6 +7,8 @@ set -ex
|
||||
rm -rf target;
|
||||
mkdir -p target/classes;
|
||||
mkdir -p target/dist;
|
||||
mkdir -p target/dist/scripts;
|
||||
|
||||
echo "$CURRENT_VERSION" > target/classes/version.txt;
|
||||
cp ../CHANGES.md target/classes/changelog.md;
|
||||
|
||||
@@ -15,6 +17,7 @@ mv target/penpot.jar target/dist/penpot.jar
|
||||
cp resources/log4j2.xml target/dist/log4j2.xml
|
||||
cp scripts/run.template.sh target/dist/run.sh;
|
||||
cp scripts/manage.py target/dist/manage.py
|
||||
cp scripts/svgo-cli.js target/dist/scripts/;
|
||||
chmod +x target/dist/run.sh;
|
||||
chmod +x target/dist/manage.py
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_TENANT=dev
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_TENANT=dev
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-prepl-server \
|
||||
@@ -10,6 +10,7 @@ export PENPOT_FLAGS="\
|
||||
enable-webhooks \
|
||||
enable-backend-asserts \
|
||||
enable-audit-log \
|
||||
enable-login-with-ldap \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-feature-fdata-pointer-map \
|
||||
|
||||
214
backend/scripts/svgo-cli.js
Normal file
214
backend/scripts/svgo-cli.js
Normal file
File diff suppressed because one or more lines are too long
@@ -8,7 +8,7 @@
|
||||
(:require
|
||||
[buddy.hashers :as hashers]))
|
||||
|
||||
(def default-params
|
||||
(def ^:private default-options
|
||||
{:alg :argon2id
|
||||
:memory 32768 ;; 32 MiB
|
||||
:iterations 3
|
||||
@@ -16,12 +16,12 @@
|
||||
|
||||
(defn derive-password
|
||||
[password]
|
||||
(hashers/derive password default-params))
|
||||
(hashers/derive password default-options))
|
||||
|
||||
(defn verify-password
|
||||
[attempt password]
|
||||
(try
|
||||
(hashers/verify attempt password)
|
||||
(hashers/verify attempt password default-options)
|
||||
(catch Throwable _
|
||||
{:update false
|
||||
:valid false})))
|
||||
|
||||
@@ -8,9 +8,8 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[clj-ldap.client :as ldap]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.string]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
@@ -58,21 +57,26 @@
|
||||
:email email
|
||||
:backend "ldap"})))
|
||||
|
||||
(s/def ::fullname ::us/not-empty-string)
|
||||
(s/def ::email ::us/email)
|
||||
(s/def ::backend ::us/not-empty-string)
|
||||
(def ^:private schema:info-data
|
||||
[:map
|
||||
[:fullname ::sm/text]
|
||||
[:email ::sm/email]
|
||||
[:backend ::sm/text]])
|
||||
|
||||
(s/def ::info-data
|
||||
(s/keys :req-un [::fullname ::email ::backend]))
|
||||
(def ^:private valid-info-data?
|
||||
(sm/lazy-validator schema:info-data))
|
||||
|
||||
(def ^:private explain-info-data
|
||||
(sm/lazy-explainer schema:info-data))
|
||||
|
||||
(defn authenticate
|
||||
[cfg params]
|
||||
(with-open [conn (connect cfg)]
|
||||
(when-let [user (-> (assoc cfg ::conn conn)
|
||||
(retrieve-user params))]
|
||||
(when-not (s/valid? ::info-data user)
|
||||
(let [explain (s/explain-str ::info-data user)]
|
||||
(l/warn ::l/raw (str "invalid response from ldap, looks like ldap is not configured correctly\n" explain))
|
||||
(when-not (valid-info-data? user)
|
||||
(let [explain (explain-info-data user)]
|
||||
(l/warn :hint "invalid response from ldap, looks like ldap is not configured correctly" :data user)
|
||||
(ex/raise :type :restriction
|
||||
:code :wrong-ldap-response
|
||||
:explain explain)))
|
||||
@@ -102,38 +106,31 @@
|
||||
:host (:host cfg) :port (:port cfg) :cause cause)
|
||||
nil))))
|
||||
|
||||
(s/def ::enabled? ::us/boolean)
|
||||
(s/def ::host ::us/string)
|
||||
(s/def ::port ::us/integer)
|
||||
(s/def ::ssl ::us/boolean)
|
||||
(s/def ::tls ::us/boolean)
|
||||
(s/def ::query ::us/string)
|
||||
(s/def ::base-dn ::us/string)
|
||||
(s/def ::bind-dn ::us/string)
|
||||
(s/def ::bind-password ::us/string)
|
||||
(s/def ::attrs-email ::us/string)
|
||||
(s/def ::attrs-fullname ::us/string)
|
||||
(s/def ::attrs-username ::us/string)
|
||||
(def ^:private schema:params
|
||||
[:map
|
||||
[:host {:optional true} :string]
|
||||
[:port {:optional true} ::sm/int]
|
||||
[:bind-dn {:optional true} :string]
|
||||
[:bind-passwor {:optional true} :string]
|
||||
[:query {:optional true} :string]
|
||||
[:base-dn {:optional true} :string]
|
||||
[:attrs-email {:optional true} :string]
|
||||
[:attrs-username {:optional true} :string]
|
||||
[:attrs-fullname {:optional true} :string]
|
||||
[:ssl {:optional true} ::sm/boolean]
|
||||
[:tls {:optional true} ::sm/boolean]])
|
||||
|
||||
(s/def ::provider-params
|
||||
(s/keys :opt-un [::host ::port
|
||||
::ssl ::tls
|
||||
::enabled?
|
||||
::bind-dn
|
||||
::bind-password
|
||||
::query
|
||||
::attrs-email
|
||||
::attrs-username
|
||||
::attrs-fullname]))
|
||||
(def ^:private check-params
|
||||
(sm/check-fn schema:params :hint "Invalid LDAP provider parameters"))
|
||||
|
||||
(s/def ::provider
|
||||
(s/nilable ::provider-params))
|
||||
|
||||
(defmethod ig/pre-init-spec ::provider
|
||||
[_]
|
||||
(s/spec ::provider))
|
||||
(defmethod ig/assert-key ::provider
|
||||
[_ params]
|
||||
(when (:enabled params)
|
||||
(some->> params check-params)))
|
||||
|
||||
(defmethod ig/init-key ::provider
|
||||
[_ cfg]
|
||||
(when (:enabled? cfg)
|
||||
(when (:enabled cfg)
|
||||
(try-connectivity cfg)))
|
||||
|
||||
(sm/register! ::provider schema:params)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -32,11 +32,10 @@
|
||||
[buddy.sign.jwk :as jwk]
|
||||
[buddy.sign.jwt :as jwt]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
@@ -140,8 +139,9 @@
|
||||
(l/warn :hint "unable to retrieve JWKs (unexpected exception)"
|
||||
:cause cause)))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::providers/generic [_]
|
||||
(s/keys :req [::http/client]))
|
||||
(defmethod ig/assert-key ::providers/generic
|
||||
[_ params]
|
||||
(assert (http/client? (::http/client params)) "expected a valid http client"))
|
||||
|
||||
(defmethod ig/init-key ::providers/generic
|
||||
[_ cfg]
|
||||
@@ -197,6 +197,10 @@
|
||||
;; GITHUB AUTH PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- int-in-range?
|
||||
[val start end]
|
||||
(and (<= start val) (< val end)))
|
||||
|
||||
(defn- retrieve-github-email
|
||||
[cfg tdata props]
|
||||
(or (some-> props :github/email)
|
||||
@@ -207,7 +211,7 @@
|
||||
|
||||
{:keys [status body]} (http/req! cfg params {:sync? true})]
|
||||
|
||||
(when-not (s/int-in-range? 200 300 status)
|
||||
(when-not (int-in-range? status 200 300)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-github-emails
|
||||
:hint "unable to retrieve github emails"
|
||||
@@ -217,8 +221,9 @@
|
||||
|
||||
(->> body json/decode (filter :primary) first :email))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::providers/github [_]
|
||||
(s/keys :req [::http/client]))
|
||||
(defmethod ig/assert-key ::providers/github
|
||||
[_ params]
|
||||
(assert (http/client? (::http/client params)) "expected a valid http client"))
|
||||
|
||||
(defmethod ig/init-key ::providers/github
|
||||
[_ cfg]
|
||||
@@ -394,7 +399,7 @@
|
||||
:status (:status response)
|
||||
:body (:body response))
|
||||
|
||||
(when-not (s/int-in-range? 200 300 (:status response))
|
||||
(when-not (int-in-range? (:status response) 200 300)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-retrieve-user-info
|
||||
:hint "unable to retrieve user info"
|
||||
@@ -418,15 +423,15 @@
|
||||
(l/warn :hint "unable to get user info from JWT token (unexpected exception)"
|
||||
:cause cause))))
|
||||
|
||||
(s/def ::backend ::us/not-empty-string)
|
||||
(s/def ::email ::us/not-empty-string)
|
||||
(s/def ::fullname ::us/not-empty-string)
|
||||
(s/def ::props (s/map-of ::us/keyword any?))
|
||||
(s/def ::info
|
||||
(s/keys :req-un [::backend
|
||||
::email
|
||||
::fullname
|
||||
::props]))
|
||||
(def ^:private schema:info
|
||||
[:map
|
||||
[:backend ::sm/text]
|
||||
[:email ::sm/email]
|
||||
[:fullname ::sm/text]
|
||||
[:props [:map-of :keyword :any]]])
|
||||
|
||||
(def ^:private valid-info?
|
||||
(sm/validator schema:info))
|
||||
|
||||
(defn- get-info
|
||||
[{:keys [::provider ::setup/props] :as cfg} {:keys [params] :as request}]
|
||||
@@ -444,7 +449,7 @@
|
||||
|
||||
(l/trc :hint "user info" :info info)
|
||||
|
||||
(when-not (s/valid? ::info info)
|
||||
(when-not (valid-info? info)
|
||||
(l/warn :hint "received incomplete profile info object (please set correct scopes)" :info info)
|
||||
(ex/raise :type :internal
|
||||
:code :incomplete-user-info
|
||||
@@ -492,8 +497,8 @@
|
||||
|
||||
(defn- redirect-response
|
||||
[uri]
|
||||
{::rres/status 302
|
||||
::rres/headers {"location" (str uri)}})
|
||||
{::yres/status 302
|
||||
::yres/headers {"location" (str uri)}})
|
||||
|
||||
(defn- redirect-with-error
|
||||
([error] (redirect-with-error error nil))
|
||||
@@ -598,7 +603,7 @@
|
||||
|
||||
(defn- get-external-session-id
|
||||
[request]
|
||||
(let [session-id (rreq/get-header request "x-external-session-id")]
|
||||
(let [session-id (yreq/get-header request "x-external-session-id")]
|
||||
(when (string? session-id)
|
||||
(if (or (> (count session-id) 256)
|
||||
(= session-id "null")
|
||||
@@ -618,8 +623,8 @@
|
||||
state (tokens/generate (::setup/props cfg)
|
||||
(d/without-nils params))
|
||||
uri (build-auth-uri cfg state)]
|
||||
{::rres/status 200
|
||||
::rres/body {:redirect-uri uri}}))
|
||||
{::yres/status 200
|
||||
::yres/body {:redirect-uri uri}}))
|
||||
|
||||
(defn- callback-handler
|
||||
[{:keys [::provider] :as cfg} request]
|
||||
@@ -655,46 +660,37 @@
|
||||
:provider provider
|
||||
:hint "provider not configured"))))))})
|
||||
|
||||
(s/def ::client-id ::us/string)
|
||||
(s/def ::client-secret ::us/string)
|
||||
(s/def ::base-uri ::us/string)
|
||||
(s/def ::token-uri ::us/string)
|
||||
(s/def ::auth-uri ::us/string)
|
||||
(s/def ::user-uri ::us/string)
|
||||
(s/def ::scopes ::us/set-of-strings)
|
||||
(s/def ::roles ::us/set-of-strings)
|
||||
(s/def ::roles-attr ::us/string)
|
||||
(s/def ::email-attr ::us/string)
|
||||
(s/def ::name-attr ::us/string)
|
||||
(def ^:private schema:provider
|
||||
[:map {:title "provider"}
|
||||
[:client-id ::sm/text]
|
||||
[:client-secret ::sm/text]
|
||||
[:base-uri {:optional true} ::sm/text]
|
||||
[:token-uri {:optional true} ::sm/text]
|
||||
[:auth-uri {:optional true} ::sm/text]
|
||||
[:user-uri {:optional true} ::sm/text]
|
||||
[:scopes {:optional true}
|
||||
[::sm/set ::sm/text]]
|
||||
[:roles {:optional true}
|
||||
[::sm/set ::sm/text]]
|
||||
[:roles-attr {:optional true} ::sm/text]
|
||||
[:email-attr {:optional true} ::sm/text]
|
||||
[:name-attr {:optional true} ::sm/text]])
|
||||
|
||||
(s/def ::provider
|
||||
(s/keys :req-un [::client-id
|
||||
::client-secret]
|
||||
:opt-un [::base-uri
|
||||
::token-uri
|
||||
::auth-uri
|
||||
::user-uri
|
||||
::scopes
|
||||
::roles
|
||||
::roles-attr
|
||||
::email-attr
|
||||
::name-attr]))
|
||||
(def ^:private schema:routes-params
|
||||
[:map
|
||||
::session/manager
|
||||
::http/client
|
||||
::setup/props
|
||||
::db/pool
|
||||
[::providers [:map-of :keyword [:maybe schema:provider]]]])
|
||||
|
||||
(s/def ::providers (s/map-of ::us/keyword (s/nilable ::provider)))
|
||||
|
||||
(s/def ::routes vector?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes
|
||||
[_]
|
||||
(s/keys :req [::session/manager
|
||||
::http/client
|
||||
::setup/props
|
||||
::db/pool
|
||||
::providers]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (sm/check schema:routes-params params)))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
(let [cfg (update cfg :provider d/without-nils)]
|
||||
(let [cfg (update cfg :providers d/without-nils)]
|
||||
["" {:middleware [[session/authz cfg]
|
||||
[provider-lookup cfg]]}
|
||||
["/auth/oauth"
|
||||
|
||||
@@ -37,6 +37,21 @@
|
||||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def xf-map-id
|
||||
(map :id))
|
||||
|
||||
@@ -56,6 +71,13 @@
|
||||
(def conj-vec
|
||||
(fnil conj []))
|
||||
|
||||
(defn initial-state
|
||||
[]
|
||||
{:storage-objects #{}
|
||||
:files #{}
|
||||
:teams #{}
|
||||
:projects #{}})
|
||||
|
||||
(defn collect-storage-objects
|
||||
[state items]
|
||||
(update state :storage-objects into xf-map-media-id items))
|
||||
@@ -87,6 +109,8 @@
|
||||
attrs))
|
||||
|
||||
(defn update-index
|
||||
([coll]
|
||||
(update-index {} coll identity))
|
||||
([index coll]
|
||||
(update-index index coll identity))
|
||||
([index coll attr]
|
||||
@@ -110,10 +134,30 @@
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))))))))
|
||||
|
||||
(defn clean-file-features
|
||||
[file]
|
||||
(update file :features (fn [features]
|
||||
(if (set? features)
|
||||
(-> features
|
||||
(cfeat/migrate-legacy-features)
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/difference cfeat/backend-only-features))
|
||||
#{}))))
|
||||
|
||||
(defn get-project
|
||||
[cfg project-id]
|
||||
(db/get cfg :project {:id project-id}))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT t.* FROM team WHERE id = ANY(?)")
|
||||
|
||||
(defn get-teams
|
||||
[cfg ids]
|
||||
(let [conn (db/get-connection cfg)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-teams ids])
|
||||
(map decode-row))))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
@@ -167,9 +211,10 @@
|
||||
(defn get-file-object-thumbnails
|
||||
"Return all file object thumbnails for a given file."
|
||||
[cfg file-id]
|
||||
(db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil}))
|
||||
(->> (db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil})
|
||||
(not-empty)))
|
||||
|
||||
(defn get-file-thumbnail
|
||||
"Return the thumbnail for the specified file-id"
|
||||
@@ -224,26 +269,26 @@
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(mapv #(assoc % :file-id id)))))))
|
||||
|
||||
(def ^:private sql:get-team-files
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?")
|
||||
|
||||
(defn get-team-files
|
||||
(defn get-team-files-ids
|
||||
"Get a set of file ids for the specified team-id"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-files team-id])
|
||||
(->> (db/exec! conn [sql:get-team-files-ids team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-team-projects
|
||||
"SELECT p.id FROM project AS p
|
||||
"SELECT p.* FROM project AS p
|
||||
WHERE p.team_id = ?
|
||||
AND p.deleted_at IS NULL")
|
||||
|
||||
(defn get-team-projects
|
||||
"Get a set of project ids for the team"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-projects team-id])
|
||||
[cfg team-id]
|
||||
(->> (db/exec! cfg [sql:get-team-projects team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-project-files
|
||||
@@ -257,6 +302,10 @@
|
||||
(->> (db/exec! conn [sql:get-project-files project-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(defn remap-thumbnail-object-id
|
||||
[object-id file-id]
|
||||
(str/replace-first object-id #"^(.*?)/" (str file-id "/")))
|
||||
|
||||
(defn- relink-shapes
|
||||
"A function responsible to analyze all file data and
|
||||
replace the old :component-file reference with the new
|
||||
@@ -339,6 +388,12 @@
|
||||
data
|
||||
library-ids)))
|
||||
|
||||
(defn disable-database-timeouts!
|
||||
[cfg]
|
||||
(let [conn (db/get-connection cfg)]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||
|
||||
(defn- fix-version
|
||||
[file]
|
||||
(let [file (fmg/fix-version file)]
|
||||
@@ -400,8 +455,11 @@
|
||||
(fn [features]
|
||||
(let [features (cfeat/check-supported-features! features)]
|
||||
(-> (::features cfg #{})
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union features))))))
|
||||
(set/union features)
|
||||
;; We never want to store
|
||||
;; frontend-only features on file
|
||||
(set/difference cfeat/frontend-only-features))))))
|
||||
|
||||
|
||||
_ (when (contains? cf/flags :file-schema-validation)
|
||||
(fval/validate-file-schema! file))
|
||||
@@ -432,6 +490,20 @@
|
||||
|
||||
file))
|
||||
|
||||
|
||||
(defn register-pending-migrations
|
||||
"All features that are enabled and requires explicit migration are
|
||||
added to the state for a posterior migration step."
|
||||
[cfg {:keys [id features] :as file}]
|
||||
(doseq [feature (-> (::features cfg)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference cfeat/backend-only-features)
|
||||
(set/difference features))]
|
||||
(vswap! *state* update :pending-to-migrate (fnil conj []) [feature id]))
|
||||
|
||||
file)
|
||||
|
||||
|
||||
(defn apply-pending-migrations!
|
||||
"Apply alredy registered pending migrations to files"
|
||||
[cfg]
|
||||
|
||||
@@ -49,15 +49,6 @@
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL STREAM IO API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -65,11 +56,6 @@
|
||||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
(def ^:const penpot-magic-number 800099563638710213)
|
||||
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
(def ^:dynamic *position* nil)
|
||||
|
||||
(defn get-mark
|
||||
@@ -236,7 +222,7 @@
|
||||
|
||||
(defn copy-stream!
|
||||
[^OutputStream output ^InputStream input ^long size]
|
||||
(let [written (io/copy! input output :size size)]
|
||||
(let [written (io/copy input output :size size)]
|
||||
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
|
||||
(swap! *position* + written)
|
||||
written))
|
||||
@@ -258,18 +244,18 @@
|
||||
p (tmp/tempfile :prefix "penpot.binfile.")]
|
||||
(assert-mark m :stream)
|
||||
|
||||
(when (> s max-object-size)
|
||||
(when (> s bfc/max-object-size)
|
||||
(ex/raise :type :validation
|
||||
:code :max-file-size-reached
|
||||
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
|
||||
|
||||
(if (> s temp-file-threshold)
|
||||
(if (> s bfc/temp-file-threshold)
|
||||
(with-open [^OutputStream output (io/output-stream p)]
|
||||
(let [readed (io/copy! input output :offset 0 :size s)]
|
||||
(let [readed (io/copy input output :offset 0 :size s)]
|
||||
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
|
||||
(swap! *position* + readed)
|
||||
[s p]))
|
||||
[s (io/read-as-bytes input :size s)])))
|
||||
[s (io/read input :size s)])))
|
||||
|
||||
(defmacro assert-read-label!
|
||||
[input expected-label]
|
||||
@@ -381,10 +367,12 @@
|
||||
::l/sync? true)
|
||||
|
||||
(doseq [item media]
|
||||
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
|
||||
(l/dbg :hint "write penpot file media object"
|
||||
:id (:id item) ::l/sync? true))
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
|
||||
(l/dbg :hint "write penpot file object thumbnail"
|
||||
:media-id (str (:media-id item)) ::l/sync? true))
|
||||
|
||||
(doto output
|
||||
(write-obj! file)
|
||||
@@ -466,8 +454,8 @@
|
||||
|
||||
(defn- read-import-v1
|
||||
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
|
||||
(pu/with-open [input (zstd-input-stream input)
|
||||
input (io/data-input-stream input)]
|
||||
@@ -520,15 +508,6 @@
|
||||
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
|
||||
thumbnails))
|
||||
|
||||
(defn- clean-features
|
||||
[file]
|
||||
(update file :features (fn [features]
|
||||
(if (set? features)
|
||||
(-> features
|
||||
(cfeat/migrate-legacy-features)
|
||||
(set/difference cfeat/backend-only-features))
|
||||
#{}))))
|
||||
|
||||
(defmethod read-section :v1/files
|
||||
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
|
||||
|
||||
@@ -539,7 +518,7 @@
|
||||
file-id (:id file)
|
||||
file-id' (bfc/lookup-index file-id)
|
||||
|
||||
file (clean-features file)
|
||||
file (bfc/clean-file-features file)
|
||||
thumbnails (:thumbnails file)]
|
||||
|
||||
(when (not= file-id expected-file-id)
|
||||
@@ -559,7 +538,9 @@
|
||||
|
||||
(when (seq thumbnails)
|
||||
(let [thumbnails (remap-thumbnails thumbnails file-id')]
|
||||
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
|
||||
(l/dbg :hint "updated index with thumbnails"
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
|
||||
|
||||
(when (seq media)
|
||||
@@ -709,7 +690,7 @@
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
(io/coercible? output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
@@ -738,7 +719,7 @@
|
||||
:cause @cs)))))
|
||||
|
||||
(defn import-files!
|
||||
[cfg input]
|
||||
[{:keys [::input] :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
|
||||
@@ -141,16 +141,15 @@
|
||||
(write! cfg :team-font-variant id font))))
|
||||
|
||||
(defn- write-project!
|
||||
[cfg project-id]
|
||||
(let [project (bfc/get-project cfg project-id)]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str project-id))
|
||||
(write! cfg :project (str project-id) project)
|
||||
(vswap! bfc/*state* update :projects conj project-id)))
|
||||
[cfg project]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id (:id project)
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str (:id project)))
|
||||
(write! cfg :project (str (:id project)) project)
|
||||
(vswap! bfc/*state* update :projects conj (:id project)))
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
@@ -191,7 +190,7 @@
|
||||
[{:keys [::sto/storage] :as cfg} id]
|
||||
(let [sobj (sto/get-object storage id)
|
||||
data (with-open [input (sto/get-object-data storage sobj)]
|
||||
(io/read-as-bytes input))]
|
||||
(io/read input))]
|
||||
|
||||
(l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj))
|
||||
(write! cfg :storage-object id (meta sobj) data)))
|
||||
@@ -363,7 +362,7 @@
|
||||
(bfc/get-team-projects cfg team-id))
|
||||
|
||||
(run! (partial write-file! cfg)
|
||||
(bfc/get-team-files cfg team-id))
|
||||
(bfc/get-team-files-ids cfg team-id))
|
||||
|
||||
(run! (partial write-storage-object! cfg)
|
||||
(-> bfc/*state* deref :storage-objects))
|
||||
|
||||
963
backend/src/app/binfile/v3.clj
Normal file
963
backend/src/app/binfile/v3.clj
Normal file
@@ -0,0 +1,963 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v3
|
||||
"A ZIP based binary file exportation"
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as cth]
|
||||
[app.common.types.color :as ctcl]
|
||||
[app.common.types.component :as ctc]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as sto.impl]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as jio]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
java.io.InputStream
|
||||
java.io.OutputStreamWriter
|
||||
java.util.zip.ZipEntry
|
||||
java.util.zip.ZipFile
|
||||
java.util.zip.ZipOutputStream))
|
||||
|
||||
;; --- SCHEMA
|
||||
|
||||
(def ^:private schema:manifest
|
||||
[:map {:title "Manifest"}
|
||||
[:version ::sm/int]
|
||||
[:type :string]
|
||||
|
||||
[:generated-by {:optional true} :string]
|
||||
|
||||
[:files
|
||||
[:vector
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]
|
||||
[:features ::cfeat/features]]]]
|
||||
|
||||
[:relations {:optional true}
|
||||
[:vector
|
||||
[:tuple ::sm/uuid ::sm/uuid]]]])
|
||||
|
||||
(def ^:private schema:storage-object
|
||||
[:map {:title "StorageObject"}
|
||||
[:id ::sm/uuid]
|
||||
[:size ::sm/int]
|
||||
[:content-type :string]
|
||||
[:bucket [::sm/one-of {:format :string} sto/valid-buckets]]
|
||||
[:hash :string]])
|
||||
|
||||
(def ^:private schema:file-thumbnail
|
||||
[:map {:title "FileThumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:page-id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:tag :string]
|
||||
[:media-id ::sm/uuid]])
|
||||
|
||||
;; --- ENCODERS
|
||||
|
||||
(def encode-file
|
||||
(sm/encoder ::ctf/file sm/json-transformer))
|
||||
|
||||
(def encode-page
|
||||
(sm/encoder ::ctp/page sm/json-transformer))
|
||||
|
||||
(def encode-shape
|
||||
(sm/encoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def encode-media
|
||||
(sm/encoder ::ctf/media sm/json-transformer))
|
||||
|
||||
(def encode-component
|
||||
(sm/encoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def encode-color
|
||||
(sm/encoder ::ctcl/color sm/json-transformer))
|
||||
|
||||
(def encode-typography
|
||||
(sm/encoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def encode-plugin-data
|
||||
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def encode-storage-object
|
||||
(sm/encoder schema:storage-object sm/json-transformer))
|
||||
|
||||
(def encode-file-thumbnail
|
||||
(sm/encoder schema:file-thumbnail sm/json-transformer))
|
||||
|
||||
;; --- DECODERS
|
||||
|
||||
(def decode-manifest
|
||||
(sm/decoder schema:manifest sm/json-transformer))
|
||||
|
||||
(def decode-media
|
||||
(sm/decoder ::ctf/media sm/json-transformer))
|
||||
|
||||
(def decode-component
|
||||
(sm/decoder ::ctc/component sm/json-transformer))
|
||||
|
||||
(def decode-color
|
||||
(sm/decoder ::ctcl/color sm/json-transformer))
|
||||
|
||||
(def decode-file
|
||||
(sm/decoder ::ctf/file sm/json-transformer))
|
||||
|
||||
(def decode-page
|
||||
(sm/decoder ::ctp/page sm/json-transformer))
|
||||
|
||||
(def decode-shape
|
||||
(sm/decoder ::cts/shape sm/json-transformer))
|
||||
|
||||
(def decode-typography
|
||||
(sm/decoder ::cty/typography sm/json-transformer))
|
||||
|
||||
(def decode-plugin-data
|
||||
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
|
||||
|
||||
(def decode-storage-object
|
||||
(sm/decoder schema:storage-object sm/json-transformer))
|
||||
|
||||
(def decode-file-thumbnail
|
||||
(sm/decoder schema:file-thumbnail sm/json-transformer))
|
||||
|
||||
;; --- VALIDATORS
|
||||
|
||||
(def validate-manifest
|
||||
(sm/check-fn schema:manifest))
|
||||
|
||||
(def validate-file
|
||||
(sm/check-fn ::ctf/file))
|
||||
|
||||
(def validate-page
|
||||
(sm/check-fn ::ctp/page))
|
||||
|
||||
(def validate-shape
|
||||
(sm/check-fn ::cts/shape))
|
||||
|
||||
(def validate-media
|
||||
(sm/check-fn ::ctf/media))
|
||||
|
||||
(def validate-color
|
||||
(sm/check-fn ::ctcl/color))
|
||||
|
||||
(def validate-component
|
||||
(sm/check-fn ::ctc/component))
|
||||
|
||||
(def validate-typography
|
||||
(sm/check-fn ::cty/typography))
|
||||
|
||||
(def validate-plugin-data
|
||||
(sm/check-fn ::ctpg/plugin-data))
|
||||
|
||||
(def validate-storage-object
|
||||
(sm/check-fn schema:storage-object))
|
||||
|
||||
(def validate-file-thumbnail
|
||||
(sm/check-fn schema:file-thumbnail))
|
||||
|
||||
;; --- EXPORT IMPL
|
||||
|
||||
(defn- write-entry!
|
||||
[^ZipOutputStream output ^String path data]
|
||||
(.putNextEntry output (ZipEntry. path))
|
||||
(let [writer (OutputStreamWriter. output "UTF-8")]
|
||||
(json/write writer data :indent true :key-fn json/write-camel-key)
|
||||
(.flush writer))
|
||||
(.closeEntry output))
|
||||
|
||||
(defn- get-file
|
||||
[{:keys [::embed-assets ::include-libraries] :as cfg} file-id]
|
||||
|
||||
(when (and include-libraries embed-assets)
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(cond-> (bfc/get-file cfg file-id)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id))
|
||||
|
||||
:always
|
||||
(bfc/clean-file-features))))
|
||||
|
||||
(defn- resolve-extension
|
||||
[mtype]
|
||||
(case mtype
|
||||
"image/png" ".png"
|
||||
"image/jpeg" ".jpg"
|
||||
"image/gif" ".gif"
|
||||
"image/svg+xml" ".svg"
|
||||
"image/webp" ".webp"
|
||||
"font/woff" ".woff"
|
||||
"font/woff2" ".woff2"
|
||||
"font/ttf" ".ttf"
|
||||
"font/otf" ".otf"
|
||||
"application/octet-stream" ".bin"))
|
||||
|
||||
(defn- export-storage-objects
|
||||
[{:keys [::output] :as cfg}]
|
||||
(let [storage (sto/resolve cfg)]
|
||||
(doseq [id (-> bfc/*state* deref :storage-objects not-empty)]
|
||||
(let [sobject (sto/get-object storage id)
|
||||
smeta (meta sobject)
|
||||
ext (resolve-extension (:content-type smeta))
|
||||
path (str "objects/" id ".json")
|
||||
params (-> (meta sobject)
|
||||
(assoc :id (:id sobject))
|
||||
(assoc :size (:size sobject))
|
||||
(encode-storage-object))]
|
||||
|
||||
(write-entry! output path params)
|
||||
|
||||
(with-open [input (sto/get-object-data storage sobject)]
|
||||
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
|
||||
(io/copy input output :size (:size sobject))
|
||||
(.closeEntry output))))))
|
||||
|
||||
(defn- export-file
|
||||
[{:keys [::file-id ::output] :as cfg}]
|
||||
(let [file (get-file cfg file-id)
|
||||
media (->> (bfc/get-file-media cfg file)
|
||||
(map (fn [media]
|
||||
(dissoc media :file-id))))
|
||||
|
||||
data (:data file)
|
||||
typographies (:typographies data)
|
||||
components (:components data)
|
||||
colors (:colors data)
|
||||
|
||||
pages (:pages data)
|
||||
pages-index (:pages-index data)
|
||||
|
||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||
|
||||
(vswap! bfc/*state* update :files assoc file-id
|
||||
{:id file-id
|
||||
:project-id (:project-id file)
|
||||
:name (:name file)
|
||||
:features (:features file)})
|
||||
|
||||
(let [file (cond-> (dissoc file :data)
|
||||
(:options data)
|
||||
(assoc :options (:options data))
|
||||
:always
|
||||
(encode-file))
|
||||
path (str "files/" file-id ".json")]
|
||||
(write-entry! output path file))
|
||||
|
||||
(doseq [[index page-id] (d/enumerate pages)]
|
||||
(let [path (str "files/" file-id "/pages/" page-id ".json")
|
||||
page (get pages-index page-id)
|
||||
objects (:objects page)
|
||||
page (-> page
|
||||
(dissoc :objects)
|
||||
(assoc :index index))
|
||||
page (encode-page page)]
|
||||
|
||||
(write-entry! output path page)
|
||||
|
||||
(doseq [[shape-id shape] objects]
|
||||
(let [path (str "files/" file-id "/pages/" page-id "/" shape-id ".json")
|
||||
shape (assoc shape :page-id page-id)
|
||||
shape (encode-shape shape)]
|
||||
(write-entry! output path shape)))))
|
||||
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects media)
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects thumbnails)
|
||||
|
||||
(doseq [{:keys [id] :as media} media]
|
||||
(let [path (str "files/" file-id "/media/" id ".json")
|
||||
media (encode-media media)]
|
||||
(write-entry! output path media)))
|
||||
|
||||
(doseq [thumbnail thumbnails]
|
||||
(let [data (cth/parse-object-id (:object-id thumbnail))
|
||||
path (str "files/" file-id "/thumbnails/" (:tag data) "/" (:page-id data)
|
||||
"/" (:frame-id data) ".json")
|
||||
data (-> data
|
||||
(assoc :media-id (:media-id thumbnail))
|
||||
(encode-file-thumbnail))]
|
||||
(write-entry! output path data)))
|
||||
|
||||
(doseq [[id component] components]
|
||||
(let [path (str "files/" file-id "/components/" id ".json")
|
||||
component (encode-component component)]
|
||||
(write-entry! output path component)))
|
||||
|
||||
(doseq [[id color] colors]
|
||||
(let [path (str "files/" file-id "/colors/" id ".json")
|
||||
color (-> (encode-color color)
|
||||
(dissoc :file-id))
|
||||
color (cond-> color
|
||||
(and (contains? color :path)
|
||||
(str/empty? (:path color)))
|
||||
(dissoc :path))]
|
||||
(write-entry! output path color)))
|
||||
|
||||
(doseq [[id object] typographies]
|
||||
(let [path (str "files/" file-id "/typographies/" id ".json")
|
||||
color (encode-typography object)]
|
||||
(write-entry! output path color)))))
|
||||
|
||||
(defn- export-files
|
||||
[{:keys [::ids ::include-libraries ::output] :as cfg}]
|
||||
(let [ids (into ids (when include-libraries (bfc/get-libraries cfg ids)))
|
||||
rels (if include-libraries
|
||||
(->> (bfc/get-files-rels cfg ids)
|
||||
(mapv (juxt :file-id :library-file-id)))
|
||||
[])]
|
||||
|
||||
(vswap! bfc/*state* assoc :files (d/ordered-map))
|
||||
|
||||
;; Write all the exporting files
|
||||
(doseq [[index file-id] (d/enumerate ids)]
|
||||
(-> cfg
|
||||
(assoc ::file-id file-id)
|
||||
(assoc ::file-seqn index)
|
||||
(export-file)))
|
||||
|
||||
;; Write manifest file
|
||||
(let [files (:files @bfc/*state*)
|
||||
params {:type "penpot/export-files"
|
||||
:version 1
|
||||
:generated-by (str "penpot/" (:full cf/version))
|
||||
:files (vec (vals files))
|
||||
:relations rels}]
|
||||
(write-entry! output "manifest.json" params))))
|
||||
|
||||
;; --- IMPORT IMPL
|
||||
|
||||
(defn- read-zip-entries
|
||||
[^ZipFile input]
|
||||
(into #{} (iterator-seq (.entries input))))
|
||||
|
||||
(defn- get-zip-entry*
|
||||
[^ZipFile input ^String path]
|
||||
(.getEntry input path))
|
||||
|
||||
(defn- get-zip-entry
|
||||
[input path]
|
||||
(let [entry (get-zip-entry* input path)]
|
||||
(when-not entry
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, missing underlying zip entry"
|
||||
:path path))
|
||||
entry))
|
||||
|
||||
(defn- get-zip-entry-size
|
||||
[^ZipEntry entry]
|
||||
(.getSize entry))
|
||||
|
||||
(defn- zip-entry-name
|
||||
[^ZipEntry entry]
|
||||
(.getName entry))
|
||||
|
||||
(defn- zip-entry-stream
|
||||
^InputStream
|
||||
[^ZipFile input ^ZipEntry entry]
|
||||
(.getInputStream input entry))
|
||||
|
||||
(defn- zip-entry-reader
|
||||
[^ZipFile input ^ZipEntry entry]
|
||||
(-> (zip-entry-stream input entry)
|
||||
(io/reader :encoding "UTF-8")))
|
||||
|
||||
(defn- zip-entry-storage-content
|
||||
"Wraps a ZipFile and ZipEntry into a penpot storage compatible
|
||||
object and avoid creating temporal objects"
|
||||
[input entry]
|
||||
(let [hash (delay (->> entry
|
||||
(zip-entry-stream input)
|
||||
(sto.impl/calculate-hash)))]
|
||||
(reify
|
||||
sto.impl/IContentObject
|
||||
(get-size [_]
|
||||
(get-zip-entry-size entry))
|
||||
|
||||
sto.impl/IContentHash
|
||||
(get-hash [_]
|
||||
(deref hash))
|
||||
|
||||
jio/IOFactory
|
||||
(make-reader [this opts]
|
||||
(jio/make-reader this opts))
|
||||
(make-writer [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented")))
|
||||
|
||||
(make-input-stream [_ _]
|
||||
(zip-entry-stream input entry))
|
||||
(make-output-stream [_ _]
|
||||
(throw (UnsupportedOperationException. "not implemented"))))))
|
||||
|
||||
(defn- read-manifest
|
||||
[^ZipFile input]
|
||||
(let [entry (get-zip-entry input "manifest.json")]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
||||
(decode-manifest manifest)))))
|
||||
|
||||
(defn- match-media-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/media/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-color-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/colors/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-component-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/components/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-typography-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/typographies/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-thumbnail-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/thumbnails/([^/]+)/([^/]+)/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ tag page-id frame-id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:tag tag
|
||||
:page-id (parse-uuid page-id)
|
||||
:frame-id (parse-uuid frame-id)
|
||||
:file-id file-id}))))
|
||||
|
||||
(defn- match-page-entry-fn
|
||||
[file-id]
|
||||
(let [pattern (str "^files/" file-id "/pages/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-shape-entry-fn
|
||||
[file-id page-id]
|
||||
(let [pattern (str "^files/" file-id "/pages/" page-id "/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:page-id page-id
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- match-storage-entry-fn
|
||||
[]
|
||||
(let [pattern (str "^objects/([^/]+).json$")
|
||||
pattern (re-pattern pattern)]
|
||||
(fn [entry]
|
||||
(when-let [[_ id] (re-matches pattern (zip-entry-name entry))]
|
||||
{:entry entry
|
||||
:id (parse-uuid id)}))))
|
||||
|
||||
(defn- read-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(json/read reader :key-fn json/read-kebab-key)))
|
||||
|
||||
(defn- read-file
|
||||
[{:keys [::input ::file-id]}]
|
||||
(let [path (str "files/" file-id ".json")
|
||||
entry (get-zip-entry input path)]
|
||||
(-> (read-entry input entry)
|
||||
(decode-file)
|
||||
(validate-file))))
|
||||
|
||||
(defn- read-file-plugin-data
|
||||
[{:keys [::input ::file-id]}]
|
||||
(let [path (str "files/" file-id "/plugin-data.json")
|
||||
entry (get-zip-entry* input path)]
|
||||
(some->> entry
|
||||
(read-entry input)
|
||||
(decode-plugin-data)
|
||||
(validate-plugin-data))))
|
||||
|
||||
(defn- read-file-media
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-media-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-media)
|
||||
(validate-media))
|
||||
object (assoc object :file-id file-id)]
|
||||
(if (= id (:id object))
|
||||
(conj result object)
|
||||
result)))
|
||||
[])
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-colors
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-color-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-color)
|
||||
(validate-color))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-components
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-component-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-component)
|
||||
(validate-component))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-typographies
|
||||
[{:keys [::input ::file-id ::entries]}]
|
||||
(->> (keep (match-typography-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-typography)
|
||||
(validate-typography))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-shapes
|
||||
[{:keys [::input ::file-id ::page-id ::entries] :as cfg}]
|
||||
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-shape)
|
||||
(validate-shape))]
|
||||
(if (= id (:id object))
|
||||
(assoc result id object)
|
||||
result)))
|
||||
{})
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-pages
|
||||
[{:keys [::input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-page-entry-fn file-id) entries)
|
||||
(keep (fn [{:keys [id entry]}]
|
||||
(let [page (->> (read-entry input entry)
|
||||
(decode-page))
|
||||
page (dissoc page :options)]
|
||||
(when (= id (:id page))
|
||||
(let [objects (-> (assoc cfg ::page-id id)
|
||||
(read-file-shapes))]
|
||||
(assoc page :objects objects))))))
|
||||
(sort-by :index)
|
||||
(reduce (fn [result {:keys [id] :as page}]
|
||||
(assoc result id (dissoc page :index)))
|
||||
(d/ordered-map))))
|
||||
|
||||
(defn- read-file-thumbnails
|
||||
[{:keys [::input ::file-id ::entries] :as cfg}]
|
||||
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-file-thumbnail)
|
||||
(validate-file-thumbnail))]
|
||||
(if (and (= frame-id (:frame-id object))
|
||||
(= page-id (:page-id object))
|
||||
(= tag (:tag object)))
|
||||
(conj result object)
|
||||
result)))
|
||||
[])
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-data
|
||||
[{:keys [] :as cfg}]
|
||||
(let [colors (read-file-colors cfg)
|
||||
typographies (read-file-typographies cfg)
|
||||
components (read-file-components cfg)
|
||||
plugin-data (read-file-plugin-data cfg)
|
||||
pages (read-file-pages cfg)]
|
||||
|
||||
{:pages (-> pages keys vec)
|
||||
:pages-index (into {} pages)
|
||||
:colors colors
|
||||
:typographies typographies
|
||||
:components components
|
||||
:plugin-data plugin-data}))
|
||||
|
||||
(defn- import-file
|
||||
[{:keys [::db/conn ::project-id ::file-id ::file-name] :as cfg}]
|
||||
(let [file-id' (bfc/lookup-index file-id)
|
||||
file (read-file cfg)
|
||||
media (read-file-media cfg)
|
||||
thumbnails (read-file-thumbnails cfg)]
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id')
|
||||
:prev-id (str file-id)
|
||||
:features (str/join "," (:features file))
|
||||
:version (:version file)
|
||||
::l/sync? true)
|
||||
|
||||
(events/tap :progress {:section :file :name file-name})
|
||||
|
||||
(when media
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update media index"
|
||||
:file-id (str file-id')
|
||||
:total (count media)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
(vswap! bfc/*state* update :media into media))
|
||||
|
||||
(when thumbnails
|
||||
(l/dbg :hint "update thumbnails index"
|
||||
:file-id (str file-id')
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
|
||||
(vswap! bfc/*state* update :thumbnails into thumbnails))
|
||||
|
||||
(let [data (-> (read-file-data cfg)
|
||||
(d/without-nils)
|
||||
(assoc :id file-id')
|
||||
(cond-> (:options file)
|
||||
(assoc :options (:options file))))
|
||||
|
||||
file (-> file
|
||||
(assoc :id file-id')
|
||||
(assoc :data data)
|
||||
(assoc :name file-name)
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :options)
|
||||
(bfc/process-file))]
|
||||
|
||||
(->> file
|
||||
(bfc/register-pending-migrations cfg)
|
||||
(bfc/persist-file! cfg))
|
||||
|
||||
(when (::bfc/overwrite cfg)
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
|
||||
file-id')))
|
||||
|
||||
(defn- import-file-relations
|
||||
[{:keys [::db/conn ::manifest ::bfc/timestamp] :as cfg}]
|
||||
(events/tap :progress {:section :relations})
|
||||
(doseq [[file-id libr-id] (:relations manifest)]
|
||||
|
||||
(let [file-id (bfc/lookup-index file-id)
|
||||
libr-id (bfc/lookup-index libr-id)]
|
||||
|
||||
(when (and file-id libr-id)
|
||||
(l/dbg :hint "create file library link"
|
||||
:file-id (str file-id)
|
||||
:lib-id (str libr-id)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-library-rel
|
||||
{:synced-at timestamp
|
||||
:file-id file-id
|
||||
:library-file-id libr-id})))))
|
||||
|
||||
(defn- import-storage-objects
|
||||
[{:keys [::input ::entries ::bfc/timestamp] :as cfg}]
|
||||
(events/tap :progress {:section :storage-objects})
|
||||
|
||||
(let [storage (sto/resolve cfg)
|
||||
entries (keep (match-storage-entry-fn) entries)]
|
||||
|
||||
(doseq [{:keys [id entry]} entries]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-storage-object)
|
||||
(validate-storage-object))]
|
||||
|
||||
(when (not= id (:id object))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
|
||||
:expected-id (str id)
|
||||
:found-id (str (:id object))))
|
||||
|
||||
(let [ext (resolve-extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
(zip-entry-storage-content input))]
|
||||
|
||||
(when (not= (:size object) (sto/get-size content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: size does not match"
|
||||
:path path
|
||||
:expected-size (:size object)
|
||||
:found-size (sto/get-size content)))
|
||||
|
||||
(when (not= (:hash object) (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content)))
|
||||
|
||||
(let [params (-> object
|
||||
(dissoc :id :size)
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:id (str (:id sobject))
|
||||
:prev-id (str id)
|
||||
:bucket (:bucket params)
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
|
||||
|
||||
(defn- import-file-media
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :media})
|
||||
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:id (str (:id params))
|
||||
:file-id (str (:file-id params))
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-media-object params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
|
||||
(defn- import-file-thumbnails
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :thumbnails})
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [file-id (bfc/lookup-index (:file-id item))
|
||||
media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id)
|
||||
(cth/fmt-object-id))
|
||||
params {:file-id file-id
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str file-id)
|
||||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params
|
||||
{::db/on-conflict-do-nothing? (::bfc/overwrite cfg)}))))
|
||||
|
||||
(defn- import-files
|
||||
[{:keys [::bfc/timestamp ::input ::name] :or {timestamp (dt/now)} :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected zip file"
|
||||
(instance? ZipFile input))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [manifest (-> (read-manifest input)
|
||||
(validate-manifest))
|
||||
entries (read-zip-entries input)]
|
||||
|
||||
(when-not (= "penpot/export-files" (:type manifest))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-binfile-v3-manifest
|
||||
:hint "unexpected type on manifest"
|
||||
:manifest manifest))
|
||||
|
||||
;; Check if all files referenced on manifest are present
|
||||
(doseq [{file-id :id} (:files manifest)]
|
||||
(let [path (str "files/" file-id ".json")]
|
||||
(when-not (get-zip-entry input path)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-binfile-v3
|
||||
:hint "some files referenced on manifest not found"
|
||||
:path path
|
||||
:file-id file-id))))
|
||||
|
||||
(events/tap :progress {:section :manifest})
|
||||
|
||||
(let [index (bfc/update-index (map :id (:files manifest)))
|
||||
state {:media [] :index index}
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
|
||||
(binding [bfc/*state* (volatile! state)]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
(let [ids (->> (:files manifest)
|
||||
(reduce (fn [result {:keys [id] :as file}]
|
||||
(let [name' (get file :name)
|
||||
name' (if (map? name)
|
||||
(get name id)
|
||||
name')]
|
||||
(conj result (-> cfg
|
||||
(assoc ::file-id id)
|
||||
(assoc ::file-name name')
|
||||
(import-file)))))
|
||||
[]))]
|
||||
(import-file-relations cfg)
|
||||
(import-storage-objects cfg)
|
||||
(import-file-media cfg)
|
||||
(import-file-thumbnails cfg)
|
||||
|
||||
(bfc/apply-pending-migrations! cfg)
|
||||
|
||||
ids)))))))
|
||||
|
||||
;; --- PUBLIC API
|
||||
|
||||
(defn export-files!
|
||||
"Do the exportation of a specified file in custom penpot binary
|
||||
format. There are some options available for customize the output:
|
||||
|
||||
`::include-libraries`: additionally to the specified file, all the
|
||||
linked libraries also will be included (including transitive
|
||||
dependencies).
|
||||
|
||||
`::embed-assets`: instead of including the libraries, embed in the
|
||||
same file library all assets used from external libraries."
|
||||
|
||||
[{:keys [::ids] :as cfg} output]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuid's for `::ids` parameter"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
||||
(with-open [output (io/output-stream output)]
|
||||
(with-open [output (ZipOutputStream. output)]
|
||||
(let [cfg (assoc cfg ::output output)]
|
||||
(export-files cfg)
|
||||
(export-storage-objects cfg)))))
|
||||
|
||||
(catch java.util.zip.ZipException cause
|
||||
(vreset! cs cause)
|
||||
(vreset! ab true)
|
||||
(throw cause))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
(vreset! ab true)
|
||||
nil)
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(vreset! ab true)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "exportation finished" :export-id (str id)
|
||||
:elapsed (str (inst-ms (tp)) "ms")
|
||||
:aborted @ab
|
||||
:cause @cs)))))
|
||||
|
||||
|
||||
(defn import-files!
|
||||
[{:keys [::input] :as cfg}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
(and (uuid? (::profile-id cfg))
|
||||
(uuid? (::project-id cfg))))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(io/coercible? input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(with-open [input (ZipFile. (fs/file input))]
|
||||
(import-files (assoc cfg ::input input)))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
@@ -26,11 +26,11 @@
|
||||
[_ data]
|
||||
(d/without-nils data))
|
||||
|
||||
(defmethod ig/prep-key :default
|
||||
[_ data]
|
||||
(if (map? data)
|
||||
(d/without-nils data)
|
||||
data))
|
||||
(defmethod ig/expand-key :default
|
||||
[k v]
|
||||
{k (if (map? v)
|
||||
(d/without-nils v)
|
||||
v)})
|
||||
|
||||
(def default
|
||||
{:database-uri "postgresql://postgres/penpot"
|
||||
@@ -42,7 +42,6 @@
|
||||
:rpc-rlimit-config "resources/rlimit.edn"
|
||||
:rpc-climit-config "resources/climit.edn"
|
||||
|
||||
:auto-file-snapshot-total 10
|
||||
:auto-file-snapshot-every 5
|
||||
:auto-file-snapshot-timeout "3h"
|
||||
|
||||
@@ -101,7 +100,6 @@
|
||||
[:telemetry-uri {:optional true} :string]
|
||||
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
||||
|
||||
[:auto-file-snapshot-total {:optional true} ::sm/int]
|
||||
[:auto-file-snapshot-every {:optional true} ::sm/int]
|
||||
[:auto-file-snapshot-timeout {:optional true} ::dt/duration]
|
||||
|
||||
@@ -126,7 +124,7 @@
|
||||
[:worker-webhook-parallelism {:optional true} ::sm/int]
|
||||
|
||||
[:database-password {:optional true} [:maybe :string]]
|
||||
[:database-uri {:optional true} :string]
|
||||
[:database-uri {:optional true} ::sm/uri]
|
||||
[:database-username {:optional true} [:maybe :string]]
|
||||
[:database-readonly {:optional true} ::sm/boolean]
|
||||
[:database-min-pool-size {:optional true} ::sm/int]
|
||||
@@ -142,6 +140,10 @@
|
||||
[:quotes-font-variants-per-team {:optional true} ::sm/int]
|
||||
[:quotes-comment-threads-per-file {:optional true} ::sm/int]
|
||||
[:quotes-comments-per-file {:optional true} ::sm/int]
|
||||
[:quotes-snapshots-per-file {:optional true} ::sm/int]
|
||||
[:quotes-snapshots-per-team {:optional true} ::sm/int]
|
||||
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
|
||||
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
|
||||
|
||||
[:auth-data-cookie-domain {:optional true} :string]
|
||||
[:auth-token-cookie-name {:optional true} :string]
|
||||
@@ -188,7 +190,7 @@
|
||||
[:profile-complaint-max-age {:optional true} ::dt/duration]
|
||||
[:profile-complaint-threshold {:optional true} ::sm/int]
|
||||
|
||||
[:redis-uri {:optional true} :string]
|
||||
[:redis-uri {:optional true} ::sm/uri]
|
||||
|
||||
[:email-domain-blacklist {:optional true} ::fs/path]
|
||||
[:email-domain-whitelist {:optional true} ::fs/path]
|
||||
@@ -216,14 +218,14 @@
|
||||
[:storage-assets-fs-directory {:optional true} :string]
|
||||
[:storage-assets-s3-bucket {:optional true} :string]
|
||||
[:storage-assets-s3-region {:optional true} :keyword]
|
||||
[:storage-assets-s3-endpoint {:optional true} :string]
|
||||
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
|
||||
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
|
||||
|
||||
[:objects-storage-backend {:optional true} :keyword]
|
||||
[:objects-storage-fs-directory {:optional true} :string]
|
||||
[:objects-storage-s3-bucket {:optional true} :string]
|
||||
[:objects-storage-s3-region {:optional true} :keyword]
|
||||
[:objects-storage-s3-endpoint {:optional true} :string]
|
||||
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]
|
||||
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
|
||||
|
||||
(def default-flags
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db.sql :as sql]
|
||||
@@ -20,7 +20,6 @@
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[next.jdbc :as jdbc]
|
||||
[next.jdbc.date-time :as jdbc-dt])
|
||||
@@ -49,27 +48,17 @@
|
||||
;; Initialization
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::connection-timeout ::us/integer)
|
||||
(s/def ::max-size ::us/integer)
|
||||
(s/def ::min-size ::us/integer)
|
||||
(s/def ::name keyword?)
|
||||
(s/def ::password ::us/string)
|
||||
(s/def ::uri ::us/not-empty-string)
|
||||
(s/def ::username ::us/string)
|
||||
(s/def ::validation-timeout ::us/integer)
|
||||
(s/def ::read-only? ::us/boolean)
|
||||
|
||||
(s/def ::pool-options
|
||||
(s/keys :opt [::uri
|
||||
::name
|
||||
::min-size
|
||||
::max-size
|
||||
::connection-timeout
|
||||
::validation-timeout
|
||||
::username
|
||||
::password
|
||||
::mtx/metrics
|
||||
::read-only?]))
|
||||
(def ^:private schema:pool-options
|
||||
[:map {:title "pool-options"}
|
||||
[::connect-timeout {:optional true} ::sm/int]
|
||||
[::max-size {:optional true} ::sm/int]
|
||||
[::min-size {:optional true} ::sm/int]
|
||||
[::name {:optional true} :keyword]
|
||||
[::uri {:optional true} ::sm/uri]
|
||||
[::password {:optional true} :string]
|
||||
[::username {:optional true} :string]
|
||||
[::validation-timeout {:optional true} ::sm/int]
|
||||
[::read-only {:optional true} ::sm/boolean]])
|
||||
|
||||
(def defaults
|
||||
{::name :main
|
||||
@@ -79,27 +68,26 @@
|
||||
::validation-timeout 10000
|
||||
::idle-timeout 120000 ; 2min
|
||||
::max-lifetime 1800000 ; 30m
|
||||
::read-only? false})
|
||||
::read-only false})
|
||||
|
||||
(defmethod ig/prep-key ::pool
|
||||
[_ cfg]
|
||||
(merge defaults (d/without-nils cfg)))
|
||||
|
||||
;; Don't validate here, just validate that a map is received.
|
||||
(defmethod ig/pre-init-spec ::pool [_] ::pool-options)
|
||||
(defmethod ig/assert-key ::pool
|
||||
[_ options]
|
||||
(assert (sm/check schema:pool-options options)))
|
||||
|
||||
(defmethod ig/init-key ::pool
|
||||
[_ {:keys [::uri ::read-only?] :as cfg}]
|
||||
(when uri
|
||||
(l/info :hint "initialize connection pool"
|
||||
:name (d/name (::name cfg))
|
||||
:uri uri
|
||||
:read-only read-only?
|
||||
:with-credentials (and (contains? cfg ::username)
|
||||
(contains? cfg ::password))
|
||||
:min-size (::min-size cfg)
|
||||
:max-size (::max-size cfg))
|
||||
(create-pool cfg)))
|
||||
[_ cfg]
|
||||
(let [{:keys [::uri ::read-only] :as cfg}
|
||||
(merge defaults cfg)]
|
||||
(when uri
|
||||
(l/info :hint "initialize connection pool"
|
||||
:name (d/name (::name cfg))
|
||||
:uri (str uri)
|
||||
:read-only read-only
|
||||
:credentials (and (contains? cfg ::username)
|
||||
(contains? cfg ::password))
|
||||
:min-size (::min-size cfg)
|
||||
:max-size (::max-size cfg))
|
||||
(create-pool cfg))))
|
||||
|
||||
(defmethod ig/halt-key! ::pool
|
||||
[_ pool]
|
||||
@@ -115,13 +103,15 @@
|
||||
"SET idle_in_transaction_session_timeout = 300000;"))
|
||||
|
||||
(defn- create-datasource-config
|
||||
[{:keys [::mtx/metrics ::uri] :as cfg}]
|
||||
[{:keys [::uri] :as cfg}]
|
||||
|
||||
;; (app.common.pprint/pprint cfg)
|
||||
(let [config (HikariConfig.)]
|
||||
(doto config
|
||||
(.setJdbcUrl (str "jdbc:" uri))
|
||||
(.setPoolName (d/name (::name cfg)))
|
||||
(.setAutoCommit true)
|
||||
(.setReadOnly (::read-only? cfg))
|
||||
(.setReadOnly (::read-only cfg))
|
||||
(.setConnectionTimeout (::connection-timeout cfg))
|
||||
(.setValidationTimeout (::validation-timeout cfg))
|
||||
(.setIdleTimeout (::idle-timeout cfg))
|
||||
@@ -132,8 +122,8 @@
|
||||
(.setInitializationFailTimeout -1))
|
||||
|
||||
;; When metrics namespace is provided
|
||||
(when metrics
|
||||
(->> (::mtx/registry metrics)
|
||||
(when-let [instance (::mtx/metrics cfg)]
|
||||
(->> (mtx/get-registry instance)
|
||||
(PrometheusMetricsTrackerFactory.)
|
||||
(.setMetricsTrackerFactory config)))
|
||||
|
||||
@@ -150,10 +140,22 @@
|
||||
[conn]
|
||||
(instance? Connection conn))
|
||||
|
||||
(s/def ::conn some?)
|
||||
(s/def ::nilable-pool (s/nilable ::pool))
|
||||
(s/def ::pool pool?)
|
||||
(s/def ::connectable some?)
|
||||
(defn connectable?
|
||||
[o]
|
||||
(or (connection? o)
|
||||
(pool? o)))
|
||||
|
||||
(sm/register!
|
||||
{:type ::conn
|
||||
:pred connection?})
|
||||
|
||||
(sm/register!
|
||||
{:type ::connectable
|
||||
:pred connectable?})
|
||||
|
||||
(sm/register!
|
||||
{:type ::pool
|
||||
:pred pool?})
|
||||
|
||||
(defn closed?
|
||||
[pool]
|
||||
@@ -268,19 +270,17 @@
|
||||
:else (throw (IllegalArgumentException. "unable to resolve connectable"))))
|
||||
|
||||
(def ^:private params-mapping
|
||||
{::return-keys? :return-keys
|
||||
::return-keys :return-keys})
|
||||
{::return-keys :return-keys})
|
||||
|
||||
(defn rename-opts
|
||||
[opts]
|
||||
(set/rename-keys opts params-mapping))
|
||||
|
||||
(def ^:private default-insert-opts
|
||||
{:builder-fn sql/as-kebab-maps
|
||||
:return-keys true})
|
||||
(assoc sql/default-opts :return-keys true))
|
||||
|
||||
(def ^:private default-opts
|
||||
{:builder-fn sql/as-kebab-maps})
|
||||
sql/default-opts)
|
||||
|
||||
(defn exec!
|
||||
([ds sv] (exec! ds sv nil))
|
||||
@@ -331,7 +331,7 @@
|
||||
(defn update!
|
||||
"A helper that build an UPDATE SQL statement and executes it.
|
||||
|
||||
Given a connectable object, a table name, a hash map of columns and
|
||||
Given a connectable object, a table name, a hash map of columns and
|
||||
values to set, and either a hash map of columns and values to search
|
||||
on or a vector of a SQL where clause and parameters, perform an
|
||||
update on the table.
|
||||
@@ -411,10 +411,20 @@
|
||||
:hint "database object not found"))
|
||||
row))
|
||||
|
||||
(def ^:private default-plan-opts
|
||||
(-> default-opts
|
||||
(assoc :fetch-size 1)
|
||||
(assoc :concurrency :read-only)
|
||||
(assoc :cursors :close)
|
||||
(assoc :result-type :forward-only)))
|
||||
|
||||
(defn plan
|
||||
[ds sql]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/plan sql sql/default-opts)))
|
||||
([ds sql]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/plan sql default-plan-opts)))
|
||||
([ds sql opts]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/plan sql (merge default-plan-opts opts)))))
|
||||
|
||||
(defn cursor
|
||||
"Return a lazy seq of rows using server side cursors"
|
||||
|
||||
@@ -15,14 +15,15 @@
|
||||
(defn kebab-case [s] (str/replace s #"_" "-"))
|
||||
(defn snake-case [s] (str/replace s #"-" "_"))
|
||||
|
||||
(def default-opts
|
||||
{:table-fn snake-case
|
||||
:column-fn snake-case})
|
||||
|
||||
(defn as-kebab-maps
|
||||
[rs opts]
|
||||
(jdbc-opt/as-unqualified-modified-maps rs (assoc opts :label-fn kebab-case)))
|
||||
|
||||
(def default-opts
|
||||
{:table-fn snake-case
|
||||
:column-fn snake-case
|
||||
:builder-fn as-kebab-maps})
|
||||
|
||||
(defn insert
|
||||
([table key-map]
|
||||
(insert table key-map nil))
|
||||
|
||||
@@ -12,18 +12,12 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.email.invite-to-team :as-alias email.invite-to-team]
|
||||
[app.email.join-team :as-alias email.join-team]
|
||||
[app.email.request-team-access :as-alias email.request-team-access]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.template :as tmpl]
|
||||
[app.worker :as wrk]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
@@ -223,50 +217,45 @@
|
||||
[{:type "text/html"
|
||||
:content html}]))}))
|
||||
|
||||
(s/def ::priority #{:high :low})
|
||||
(s/def ::to (s/or :single ::us/email
|
||||
:multi (s/coll-of ::us/email)))
|
||||
(s/def ::from ::us/email)
|
||||
(s/def ::reply-to ::us/email)
|
||||
(s/def ::lang string?)
|
||||
(s/def ::extra-data ::us/string)
|
||||
(def ^:private schema:context
|
||||
[:map
|
||||
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
|
||||
[:reply-to {:optional true} ::sm/email]
|
||||
[:from {:optional true} ::sm/email]
|
||||
[:lang {:optional true} ::sm/text]
|
||||
[:priority {:optional true} [:enum :high :low]]
|
||||
[:extra-data {:optional true} ::sm/text]])
|
||||
|
||||
(s/def ::context
|
||||
(s/keys :req-un [::to]
|
||||
:opt-un [::reply-to ::from ::lang ::priority ::extra-data]))
|
||||
(def ^:private check-context
|
||||
(sm/check-fn schema:context))
|
||||
|
||||
(defn template-factory
|
||||
([id] (template-factory id {}))
|
||||
([id extra-context]
|
||||
(s/assert keyword? id)
|
||||
(fn [context]
|
||||
(us/verify ::context context)
|
||||
(when-let [spec (s/get-spec id)]
|
||||
(s/assert spec context))
|
||||
[& {:keys [id schema]}]
|
||||
(assert (keyword? id) "id should be provided and it should be a keyword")
|
||||
(let [check-fn (if schema
|
||||
(sm/check-fn schema)
|
||||
(constantly nil))]
|
||||
(fn [context]
|
||||
(let [context (-> context check-context check-fn)
|
||||
email (build-email-template id context)]
|
||||
(when-not email
|
||||
(ex/raise :type :internal
|
||||
:code :email-template-does-not-exists
|
||||
:hint "seems like the template is wrong or does not exists."
|
||||
:template-id id))
|
||||
|
||||
(let [context (merge (if (fn? extra-context)
|
||||
(extra-context)
|
||||
extra-context)
|
||||
context)
|
||||
email (build-email-template id context)]
|
||||
(when-not email
|
||||
(ex/raise :type :internal
|
||||
:code :email-template-does-not-exists
|
||||
:hint "seems like the template is wrong or does not exists."
|
||||
:context {:id id}))
|
||||
(cond-> (assoc email :id (name id))
|
||||
(:extra-data context)
|
||||
(assoc :extra-data (:extra-data context))
|
||||
(cond-> (assoc email :id (name id))
|
||||
(:extra-data context)
|
||||
(assoc :extra-data (:extra-data context))
|
||||
|
||||
(:from context)
|
||||
(assoc :from (:from context))
|
||||
(:from context)
|
||||
(assoc :from (:from context))
|
||||
|
||||
(:reply-to context)
|
||||
(assoc :reply-to (:reply-to context))
|
||||
|
||||
(:to context)
|
||||
(assoc :to (:to context)))))))
|
||||
(:reply-to context)
|
||||
(assoc :reply-to (:reply-to context))
|
||||
|
||||
(:to context)
|
||||
(assoc :to (:to context)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC HIGH-LEVEL API
|
||||
@@ -280,7 +269,8 @@
|
||||
"Schedule an already defined email to be sent using asynchronously
|
||||
using worker task."
|
||||
[{:keys [::conn ::factory] :as context}]
|
||||
(us/verify some? conn)
|
||||
(assert (db/connectable? conn) "expected a valid database connection or pool")
|
||||
|
||||
(let [email (if factory
|
||||
(factory context)
|
||||
(dissoc context ::conn))]
|
||||
@@ -297,8 +287,6 @@
|
||||
|
||||
(declare send-to-logger!)
|
||||
|
||||
(s/def ::sendmail fn?)
|
||||
|
||||
(defmethod ig/init-key ::sendmail
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
@@ -324,8 +312,9 @@
|
||||
(when (contains? cf/flags :log-emails)
|
||||
(send-to-logger! cfg params))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::sendmail ::mtx/metrics]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (fn? (::sendmail params)) "expected valid sendmail handler"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::sendmail]}]
|
||||
@@ -352,125 +341,113 @@
|
||||
;; EMAIL FACTORIES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::subject ::us/string)
|
||||
(s/def ::content ::us/string)
|
||||
(def ^:private schema:feedback
|
||||
[:map
|
||||
[:subject ::sm/text]
|
||||
[:content ::sm/text]])
|
||||
|
||||
(s/def ::feedback
|
||||
(s/keys :req-un [::subject ::content]))
|
||||
|
||||
(def feedback
|
||||
(def user-feedback
|
||||
"A profile feedback email."
|
||||
(template-factory ::feedback))
|
||||
(template-factory
|
||||
:id ::feedback
|
||||
:schema schema:feedback))
|
||||
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::register
|
||||
(s/keys :req-un [::name]))
|
||||
(def ^:private schema:register
|
||||
[:map [:name ::sm/text]])
|
||||
|
||||
(def register
|
||||
"A new profile registration welcome email."
|
||||
(template-factory ::register))
|
||||
(template-factory
|
||||
:id ::register
|
||||
:schema schema:register))
|
||||
|
||||
(s/def ::token ::us/string)
|
||||
(s/def ::password-recovery
|
||||
(s/keys :req-un [::name ::token]))
|
||||
(def ^:private schema:password-recovery
|
||||
[:map
|
||||
[:name ::sm/text]
|
||||
[:token ::sm/text]])
|
||||
|
||||
(def password-recovery
|
||||
"A password recovery notification email."
|
||||
(template-factory ::password-recovery))
|
||||
(template-factory
|
||||
:id ::password-recovery
|
||||
:schema schema:password-recovery))
|
||||
|
||||
(s/def ::pending-email ::us/email)
|
||||
(s/def ::change-email
|
||||
(s/keys :req-un [::name ::pending-email ::token]))
|
||||
(def ^:private schema:change-email
|
||||
[:map
|
||||
[:name ::sm/text]
|
||||
[:pending-email ::sm/email]
|
||||
[:token ::sm/text]])
|
||||
|
||||
(def change-email
|
||||
"Password change confirmation email"
|
||||
(template-factory ::change-email))
|
||||
(template-factory
|
||||
:id ::change-email
|
||||
:schema schema:change-email))
|
||||
|
||||
(s/def ::email.invite-to-team/invited-by ::us/string)
|
||||
(s/def ::email.invite-to-team/team ::us/string)
|
||||
(s/def ::email.invite-to-team/token ::us/string)
|
||||
|
||||
(s/def ::invite-to-team
|
||||
(s/keys :req-un [::email.invite-to-team/invited-by
|
||||
::email.invite-to-team/token
|
||||
::email.invite-to-team/team]))
|
||||
(def ^:private schema:invite-to-team
|
||||
[:map
|
||||
[:invited-by ::sm/text]
|
||||
[:team ::sm/text]
|
||||
[:token ::sm/text]])
|
||||
|
||||
(def invite-to-team
|
||||
"Teams member invitation email."
|
||||
(template-factory ::invite-to-team))
|
||||
(template-factory
|
||||
:id ::invite-to-team
|
||||
:schema schema:invite-to-team))
|
||||
|
||||
|
||||
(s/def ::email.join-team/invited-by ::us/string)
|
||||
(s/def ::email.join-team/team ::us/string)
|
||||
(s/def ::email.join-team/team-id ::us/uuid)
|
||||
|
||||
(s/def ::join-team
|
||||
(s/keys :req-un [::email.join-team/invited-by
|
||||
::email.join-team/team-id
|
||||
::email.join-team/team]))
|
||||
(def ^:private schema:join-team
|
||||
[:map
|
||||
[:invited-by ::sm/text]
|
||||
[:team ::sm/text]
|
||||
[:team-id ::sm/uuid]])
|
||||
|
||||
(def join-team
|
||||
"Teams member joined after request email."
|
||||
(template-factory ::join-team))
|
||||
(template-factory
|
||||
:id ::join-team
|
||||
:schema schema:join-team))
|
||||
|
||||
(s/def ::email.request-team-access/requested-by ::us/string)
|
||||
(s/def ::email.request-team-access/requested-by-email ::us/string)
|
||||
(s/def ::email.request-team-access/team-name ::us/string)
|
||||
(s/def ::email.request-team-access/team-id ::us/uuid)
|
||||
(s/def ::email.request-team-access/file-name ::us/string)
|
||||
(s/def ::email.request-team-access/file-id ::us/uuid)
|
||||
(s/def ::email.request-team-access/page-id ::us/uuid)
|
||||
|
||||
(s/def ::request-file-access
|
||||
(s/keys :req-un [::email.request-team-access/requested-by
|
||||
::email.request-team-access/requested-by-email
|
||||
::email.request-team-access/team-name
|
||||
::email.request-team-access/team-id
|
||||
::email.request-team-access/file-name
|
||||
::email.request-team-access/file-id
|
||||
::email.request-team-access/page-id]))
|
||||
(def ^:private schema:request-file-access
|
||||
[:map
|
||||
[:requested-by ::sm/text]
|
||||
[:requested-by-email ::sm/text]
|
||||
[:team-name ::sm/text]
|
||||
[:team-id ::sm/uuid]
|
||||
[:file-name ::sm/text]
|
||||
[:file-id ::sm/uuid]
|
||||
[:page-id ::sm/uuid]])
|
||||
|
||||
(def request-file-access
|
||||
"File access request email."
|
||||
(template-factory ::request-file-access))
|
||||
|
||||
|
||||
(s/def ::request-file-access-yourpenpot
|
||||
(s/keys :req-un [::email.request-team-access/requested-by
|
||||
::email.request-team-access/requested-by-email
|
||||
::email.request-team-access/team-name
|
||||
::email.request-team-access/team-id
|
||||
::email.request-team-access/file-name
|
||||
::email.request-team-access/file-id
|
||||
::email.request-team-access/page-id]))
|
||||
(template-factory
|
||||
:id ::request-file-access
|
||||
:schema schema:request-file-access))
|
||||
|
||||
(def request-file-access-yourpenpot
|
||||
"File access on Your Penpot request email."
|
||||
(template-factory ::request-file-access-yourpenpot))
|
||||
|
||||
(s/def ::request-file-access-yourpenpot-view
|
||||
(s/keys :req-un [::email.request-team-access/requested-by
|
||||
::email.request-team-access/requested-by-email
|
||||
::email.request-team-access/team-name
|
||||
::email.request-team-access/team-id
|
||||
::email.request-team-access/file-name
|
||||
::email.request-team-access/file-id
|
||||
::email.request-team-access/page-id]))
|
||||
(template-factory
|
||||
:id ::request-file-access-yourpenpot
|
||||
:schema schema:request-file-access))
|
||||
|
||||
(def request-file-access-yourpenpot-view
|
||||
"File access on Your Penpot view mode request email."
|
||||
(template-factory ::request-file-access-yourpenpot-view))
|
||||
(template-factory
|
||||
:id ::request-file-access-yourpenpot-view
|
||||
:schema schema:request-file-access))
|
||||
|
||||
(s/def ::request-team-access
|
||||
(s/keys :req-un [::email.request-team-access/requested-by
|
||||
::email.request-team-access/requested-by-email
|
||||
::email.request-team-access/team-name
|
||||
::email.request-team-access/team-id]))
|
||||
(def ^:private schema:request-team-access
|
||||
[:map
|
||||
[:requested-by ::sm/text]
|
||||
[:requested-by-email ::sm/text]
|
||||
[:team-name ::sm/text]
|
||||
[:team-id ::sm/uuid]])
|
||||
|
||||
(def request-team-access
|
||||
"Team access request email."
|
||||
(template-factory ::request-team-access))
|
||||
|
||||
(template-factory
|
||||
:id ::request-team-access
|
||||
:schema schema:request-team-access))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; BOUNCE/COMPLAINS HELPERS
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
[app.common.types.shape.path :as ctsp]
|
||||
[app.common.types.shape.text :as ctsx]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as fdata]
|
||||
@@ -1298,7 +1299,7 @@
|
||||
(let [[mtype data] (parse-datauri href)
|
||||
size (alength ^bytes data)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write-to-file! data path :size size)]
|
||||
written (io/write* path data :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
@@ -1381,7 +1382,9 @@
|
||||
(defn get-optimized-svg
|
||||
[sid]
|
||||
(let [svg-text (get-sobject-content sid)
|
||||
svg-text (svgo/optimize *system* svg-text)]
|
||||
svg-text (if (contains? cf/flags :backend-svgo)
|
||||
(svgo/optimize *system* svg-text)
|
||||
svg-text)]
|
||||
(csvg/parse svg-text)))
|
||||
|
||||
(def base-path "/data/cache")
|
||||
@@ -1484,11 +1487,6 @@
|
||||
:file-id (str (:id fdata))
|
||||
:id (str (:id mobj)))
|
||||
|
||||
(instance? org.graalvm.polyglot.PolyglotException cause)
|
||||
(l/inf :hint "skip processing media object: invalid svg found"
|
||||
:file-id (str (:id fdata))
|
||||
:id (str (:id mobj)))
|
||||
|
||||
(= (:type edata) :not-found)
|
||||
(l/inf :hint "skip processing media object: underlying object does not exist"
|
||||
:file-id (str (:id fdata))
|
||||
@@ -1747,8 +1745,8 @@
|
||||
(fn [system]
|
||||
(binding [*system* system]
|
||||
(when (string? label)
|
||||
(fsnap/take-file-snapshot! system {:file-id file-id
|
||||
:label (str "migration/" label)}))
|
||||
(fsnap/create-file-snapshot! system nil file-id (str "migration/" label)))
|
||||
|
||||
(let [file (get-file system file-id)
|
||||
file (process-file! system file :validate? validate?)]
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.auth.oidc :as-alias oidc]
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.transit :as t]
|
||||
[app.db :as-alias db]
|
||||
[app.http.access-token :as actoken]
|
||||
@@ -24,14 +25,13 @@
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[reitit.core :as r]
|
||||
[reitit.middleware :as rr]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as-alias rres]
|
||||
[yetti.adapter :as yt]))
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
(declare router-handler)
|
||||
|
||||
@@ -39,31 +39,28 @@
|
||||
;; HTTP SERVER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::handler fn?)
|
||||
(s/def ::router some?)
|
||||
(s/def ::port integer?)
|
||||
(s/def ::host string?)
|
||||
(s/def ::name string?)
|
||||
(def default-params
|
||||
{::port 6060
|
||||
::host "0.0.0.0"
|
||||
::max-body-size (* 1024 1024 30) ; default 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)}) ; default 120 MiB
|
||||
|
||||
(s/def ::max-body-size integer?)
|
||||
(s/def ::max-multipart-body-size integer?)
|
||||
(s/def ::io-threads integer?)
|
||||
(defmethod ig/expand-key ::server
|
||||
[k v]
|
||||
{k (merge default-params (d/without-nils v))})
|
||||
|
||||
(defmethod ig/prep-key ::server
|
||||
[_ cfg]
|
||||
(merge {::port 6060
|
||||
::host "0.0.0.0"
|
||||
::max-body-size (* 1024 1024 30) ; default 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)} ; default 120 MiB
|
||||
(d/without-nils cfg)))
|
||||
(def ^:private schema:server-params
|
||||
[:map
|
||||
[::port ::sm/int]
|
||||
[::host ::sm/text]
|
||||
[::max-body-size {:optional true} ::sm/int]
|
||||
[::max-multipart-body-size {:optional true} ::sm/int]
|
||||
[::router {:optional true} [:fn r/router?]]
|
||||
[::handler {:optional true} ::sm/fn]])
|
||||
|
||||
(defmethod ig/pre-init-spec ::server [_]
|
||||
(s/keys :req [::port ::host]
|
||||
:opt [::max-body-size
|
||||
::max-multipart-body-size
|
||||
::router
|
||||
::handler
|
||||
::io-threads]))
|
||||
(defmethod ig/assert-key ::server
|
||||
[_ params]
|
||||
(assert (sm/check schema:server-params params)))
|
||||
|
||||
(defmethod ig/init-key ::server
|
||||
[_ {:keys [::handler ::router ::host ::port] :as cfg}]
|
||||
@@ -100,12 +97,12 @@
|
||||
|
||||
(defn- not-found-handler
|
||||
[_]
|
||||
{::rres/status 404})
|
||||
{::yres/status 404})
|
||||
|
||||
(defn- router-handler
|
||||
[router]
|
||||
(letfn [(resolve-handler [request]
|
||||
(if-let [match (r/match-by-path router (rreq/path request))]
|
||||
(if-let [match (r/match-by-path router (yreq/path request))]
|
||||
(let [params (:path-params match)
|
||||
result (:result match)
|
||||
handler (or (:handler result) not-found-handler)
|
||||
@@ -114,11 +111,11 @@
|
||||
(partial not-found-handler request)))
|
||||
|
||||
(on-error [cause request]
|
||||
(let [{:keys [::rres/body] :as response} (errors/handle cause request)]
|
||||
(let [{:keys [::yres/body] :as response} (errors/handle cause request)]
|
||||
(cond-> response
|
||||
(map? body)
|
||||
(-> (update ::rres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::rres/body (t/encode-str body {:type :json-verbose}))))))]
|
||||
(-> (update ::yres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yres/body (t/encode-str body {:type :json-verbose}))))))]
|
||||
|
||||
(fn [request]
|
||||
(let [handler (resolve-handler request)]
|
||||
@@ -131,18 +128,26 @@
|
||||
;; HTTP ROUTER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::router [_]
|
||||
(s/keys :req [::session/manager
|
||||
::ws/routes
|
||||
::rpc/routes
|
||||
::rpc.doc/routes
|
||||
::oidc/routes
|
||||
::setup/props
|
||||
::assets/routes
|
||||
::debug/routes
|
||||
::db/pool
|
||||
::mtx/routes
|
||||
::awsns/routes]))
|
||||
(def ^:private schema:routes
|
||||
[:vector :any])
|
||||
|
||||
(def ^:private schema:router-params
|
||||
[:map
|
||||
[::ws/routes schema:routes]
|
||||
[::rpc/routes schema:routes]
|
||||
[::rpc.doc/routes schema:routes]
|
||||
[::oidc/routes schema:routes]
|
||||
[::assets/routes schema:routes]
|
||||
[::debug/routes schema:routes]
|
||||
[::mtx/routes schema:routes]
|
||||
[::awsns/routes schema:routes]
|
||||
::session/manager
|
||||
::setup/props
|
||||
::db/pool])
|
||||
|
||||
(defmethod ig/assert-key ::router
|
||||
[_ params]
|
||||
(assert (sm/check schema:router-params params)))
|
||||
|
||||
(defmethod ig/init-key ::router
|
||||
[_ cfg]
|
||||
|
||||
@@ -12,13 +12,13 @@
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[ring.request :as rreq]))
|
||||
[yetti.request :as yreq]))
|
||||
|
||||
(def header-re #"^Token\s+(.*)")
|
||||
|
||||
(defn- get-token
|
||||
[request]
|
||||
(some->> (rreq/get-header request "authorization")
|
||||
(some->> (yreq/get-header request "authorization")
|
||||
(re-matches header-re)
|
||||
(second)))
|
||||
|
||||
|
||||
@@ -9,14 +9,12 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
(def ^:private cache-max-age
|
||||
(dt/duration {:hours 24}))
|
||||
@@ -37,8 +35,8 @@
|
||||
(defn- serve-object-from-s3
|
||||
[{:keys [::sto/storage] :as cfg} obj]
|
||||
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
|
||||
{::rres/status 307
|
||||
::rres/headers {"location" (str url)
|
||||
{::yres/status 307
|
||||
::yres/headers {"location" (str url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"x-mtype" (-> obj meta :content-type)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}}))
|
||||
@@ -51,8 +49,8 @@
|
||||
headers {"x-accel-redirect" (:path purl)
|
||||
"content-type" (:content-type mdata)
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}]
|
||||
{::rres/status 204
|
||||
::rres/headers headers}))
|
||||
{::yres/status 204
|
||||
::yres/headers headers}))
|
||||
|
||||
(defn- serve-object
|
||||
"Helper function that returns the appropriate response depending on
|
||||
@@ -69,7 +67,7 @@
|
||||
obj (sto/get-object storage id)]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{::rres/status 404})))
|
||||
{::yres/status 404})))
|
||||
|
||||
(defn- generic-handler
|
||||
"A generic handler helper/common code for file-media based handlers."
|
||||
@@ -80,7 +78,7 @@
|
||||
sobj (sto/get-object storage (kf mobj))]
|
||||
(if sobj
|
||||
(serve-object cfg sobj)
|
||||
{::rres/status 404})))
|
||||
{::yres/status 404})))
|
||||
|
||||
(defn file-objects-handler
|
||||
"Handler that serves storage objects by file media id."
|
||||
@@ -95,11 +93,10 @@
|
||||
|
||||
;; --- Initialization
|
||||
|
||||
(s/def ::path ::us/string)
|
||||
(s/def ::routes vector?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::sto/storage ::path]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expected valid storage instance")
|
||||
(assert (string? (::path params))))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http.client :as http]
|
||||
@@ -18,29 +19,29 @@
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.data.json :as j]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
(declare parse-json)
|
||||
(declare handle-request)
|
||||
(declare parse-notification)
|
||||
(declare process-report)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::http/client
|
||||
::setup/props
|
||||
::db/pool]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (http/client? (::http/client params)) "expect a valid http client")
|
||||
(assert (sm/valid? ::setup/props (::setup/props params)) "expected valid setup props")
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
(letfn [(handler [request]
|
||||
(let [data (-> request rreq/body slurp)]
|
||||
(let [data (-> request yreq/body slurp)]
|
||||
(px/run! :vthread (partial handle-request cfg data)))
|
||||
{::rres/status 200})]
|
||||
{::yres/status 200})]
|
||||
["/sns" {:handler handler
|
||||
:allowed-methods #{:post}}]))
|
||||
|
||||
|
||||
@@ -7,20 +7,20 @@
|
||||
(ns app.http.client
|
||||
"Http client abstraction layer."
|
||||
(:require
|
||||
[app.common.spec :as us]
|
||||
[clojure.spec.alpha :as s]
|
||||
[app.common.schema :as sm]
|
||||
[integrant.core :as ig]
|
||||
[java-http-clj.core :as http]
|
||||
[promesa.core :as p])
|
||||
(:import
|
||||
java.net.http.HttpClient))
|
||||
|
||||
(s/def ::client #(instance? HttpClient %))
|
||||
(s/def ::client-holder
|
||||
(s/keys :req [::client]))
|
||||
(defn client?
|
||||
[o]
|
||||
(instance? HttpClient o))
|
||||
|
||||
(defmethod ig/pre-init-spec ::client [_]
|
||||
(s/keys :req []))
|
||||
(sm/register!
|
||||
{:type ::client
|
||||
:pred client?})
|
||||
|
||||
(defmethod ig/init-key ::client
|
||||
[_ _]
|
||||
@@ -30,7 +30,7 @@
|
||||
(defn send!
|
||||
([client req] (send! client req {}))
|
||||
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
|
||||
(us/assert! ::client client)
|
||||
(assert (client? client) "expected valid http client")
|
||||
(if sync?
|
||||
(http/send req {:client client :as response-type})
|
||||
(try
|
||||
|
||||
@@ -26,15 +26,14 @@
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[emoji.core :as emj]
|
||||
[integrant.core :as ig]
|
||||
[markdown.core :as md]
|
||||
[markdown.transformers :as mdt]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
;; (selmer.parser/cache-off!)
|
||||
|
||||
@@ -44,9 +43,9 @@
|
||||
|
||||
(defn index-handler
|
||||
[_cfg _request]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/html"}
|
||||
::rres/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/html"}
|
||||
::yres/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {:version (:full cf/version)}))})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -56,17 +55,17 @@
|
||||
(defn prepare-response
|
||||
[body]
|
||||
(let [headers {"content-type" "application/transit+json"}]
|
||||
{::rres/status 200
|
||||
::rres/body body
|
||||
::rres/headers headers}))
|
||||
{::yres/status 200
|
||||
::yres/body body
|
||||
::yres/headers headers}))
|
||||
|
||||
(defn prepare-download-response
|
||||
[body filename]
|
||||
(let [headers {"content-disposition" (str "attachment; filename=" filename)
|
||||
"content-type" "application/octet-stream"}]
|
||||
{::rres/status 200
|
||||
::rres/body body
|
||||
::rres/headers headers}))
|
||||
{::yres/status 200
|
||||
::yres/body body
|
||||
::yres/headers headers}))
|
||||
|
||||
(def sql:retrieve-range-of-changes
|
||||
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
|
||||
@@ -108,8 +107,8 @@
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
{::rres/status 201
|
||||
::rres/body "OK CREATED"})))
|
||||
{::yres/status 201
|
||||
::yres/body "OK CREATED"})))
|
||||
|
||||
:else
|
||||
(prepare-response (blob/decode data))))))
|
||||
@@ -123,7 +122,7 @@
|
||||
[{:keys [::db/pool]} {:keys [::session/profile-id params] :as request}]
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)
|
||||
data (some-> params :file :path io/read-as-bytes)]
|
||||
data (some-> params :file :path io/read*)]
|
||||
|
||||
(if (and data project-id)
|
||||
(let [fname (str "Imported file *: " (dt/now))
|
||||
@@ -138,8 +137,8 @@
|
||||
{:data data
|
||||
:deleted-at nil}
|
||||
{:id file-id})
|
||||
{::rres/status 200
|
||||
::rres/body "OK UPDATED"})
|
||||
{::yres/status 200
|
||||
::yres/body "OK UPDATED"})
|
||||
|
||||
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
|
||||
(create-file cfg {:id file-id
|
||||
@@ -149,15 +148,15 @@
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
{::rres/status 201
|
||||
::rres/body "OK CREATED"}))))
|
||||
{::yres/status 201
|
||||
::yres/body "OK CREATED"}))))
|
||||
|
||||
{::rres/status 500
|
||||
::rres/body "ERROR"})))
|
||||
{::yres/status 500
|
||||
::yres/body "ERROR"})))
|
||||
|
||||
(defn file-data-handler
|
||||
[cfg request]
|
||||
(case (rreq/method request)
|
||||
(case (yreq/method request)
|
||||
:get (retrieve-file-data cfg request)
|
||||
:post (upload-file-data cfg request)
|
||||
(ex/raise :type :http
|
||||
@@ -238,12 +237,12 @@
|
||||
1 (render-template-v1 report)
|
||||
2 (render-template-v2 report)
|
||||
3 (render-template-v3 report))]
|
||||
{::rres/status 200
|
||||
::rres/body result
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"
|
||||
{::yres/status 200
|
||||
::yres/body result
|
||||
::yres/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}})
|
||||
{::rres/status 404
|
||||
::rres/body "not found"})))
|
||||
{::yres/status 404
|
||||
::yres/body "not found"})))
|
||||
|
||||
(def sql:error-reports
|
||||
"SELECT id, created_at,
|
||||
@@ -256,10 +255,10 @@
|
||||
[{:keys [::db/pool]} _request]
|
||||
(let [items (->> (db/exec! pool [sql:error-reports])
|
||||
(map #(update % :created-at dt/format-instant :rfc1123)))]
|
||||
{::rres/status 200
|
||||
::rres/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
{::yres/status 200
|
||||
::yres/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"
|
||||
::yres/headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -295,15 +294,16 @@
|
||||
cfg (assoc cfg
|
||||
::bf.v1/overwrite false
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK CLONED"})
|
||||
::bf.v1/project-id project-id
|
||||
::bf.v1/input path)]
|
||||
(bf.v1/import-files! cfg)
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK CLONED"})
|
||||
|
||||
{::rres/status 200
|
||||
::rres/body (io/input-stream path)
|
||||
::rres/headers {"content-type" "application/octet-stream"
|
||||
{::yres/status 200
|
||||
::yres/body (io/input-stream path)
|
||||
::yres/headers {"content-type" "application/octet-stream"
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
|
||||
|
||||
|
||||
@@ -329,11 +329,12 @@
|
||||
::bf.v1/overwrite overwrite?
|
||||
::bf.v1/migrate migrate?
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"})))
|
||||
::bf.v1/project-id project-id
|
||||
::bf.v1/input path)]
|
||||
(bf.v1/import-files! cfg)
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ACTIONS
|
||||
@@ -363,34 +364,34 @@
|
||||
(db/update! conn :profile {:is-blocked true} {:id (:id profile)})
|
||||
(db/delete! conn :http-session {:profile-id (:id profile)})
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "PROFILE '%' BLOCKED" (:email profile))})
|
||||
|
||||
(contains? params :unblock)
|
||||
(do
|
||||
(db/update! conn :profile {:is-blocked false} {:id (:id profile)})
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "PROFILE '%' UNBLOCKED" (:email profile))})
|
||||
|
||||
(contains? params :resend)
|
||||
(if (:is-blocked profile)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "PROFILE ALREADY BLOCKED"}
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "PROFILE ALREADY BLOCKED"}
|
||||
(do
|
||||
(#'auth/send-email-verification! cfg profile)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "RESENDED FOR '%'" (:email profile))}))
|
||||
|
||||
:else
|
||||
(do
|
||||
(db/update! conn :profile {:is-active true} {:id (:id profile)})
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
|
||||
|
||||
|
||||
(defn- reset-file-version
|
||||
@@ -415,9 +416,9 @@
|
||||
|
||||
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"}))
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK"}))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -429,13 +430,13 @@
|
||||
[{:keys [::db/pool]} _]
|
||||
(try
|
||||
(db/exec-one! pool ["select count(*) as count from server_prop;"])
|
||||
{::rres/status 200
|
||||
::rres/body "OK"}
|
||||
{::yres/status 200
|
||||
::yres/body "OK"}
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unable to execute query on health handler"
|
||||
:cause cause)
|
||||
{::rres/status 503
|
||||
::rres/body "KO"})))
|
||||
{::yres/status 503
|
||||
::yres/body "KO"})))
|
||||
|
||||
(defn changelog-handler
|
||||
[_ _]
|
||||
@@ -444,11 +445,11 @@
|
||||
(md->html [text]
|
||||
(md/md-to-html-string text :replacement-transformers (into [transform-emoji] mdt/transformer-vector)))]
|
||||
(if-let [clog (io/resource "changelog.md")]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/html; charset=utf-8"}
|
||||
::rres/body (-> clog slurp md->html)}
|
||||
{::rres/status 404
|
||||
::rres/body "NOT FOUND"})))
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/html; charset=utf-8"}
|
||||
::yres/body (-> clog slurp md->html)}
|
||||
{::yres/status 404
|
||||
::yres/body "NOT FOUND"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INIT
|
||||
@@ -471,8 +472,10 @@
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed)))))})
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::db/pool ::session/manager]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool")
|
||||
(assert (session/manager? (::session/manager params)) "expected a valid session manager"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
|
||||
@@ -16,8 +16,8 @@
|
||||
[app.http.session :as-alias session]
|
||||
[app.util.inet :as inet]
|
||||
[clojure.spec.alpha :as s]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
(defn request->context
|
||||
"Extracts error report relevant context data from request."
|
||||
@@ -29,10 +29,10 @@
|
||||
{:request/path (:path request)
|
||||
:request/method (:method request)
|
||||
:request/params (:params request)
|
||||
:request/user-agent (rreq/get-header request "user-agent")
|
||||
:request/user-agent (yreq/get-header request "user-agent")
|
||||
:request/ip-addr (inet/parse-request request)
|
||||
:request/profile-id (:uid claims)
|
||||
:version/frontend (or (rreq/get-header request "x-frontend-version") "unknown")
|
||||
:version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")
|
||||
:version/backend (:full cf/version)}))
|
||||
|
||||
|
||||
@@ -46,34 +46,34 @@
|
||||
|
||||
(defmethod handle-error :authentication
|
||||
[err _ _]
|
||||
{::rres/status 401
|
||||
::rres/body (ex-data err)})
|
||||
{::yres/status 401
|
||||
::yres/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :authorization
|
||||
[err _ _]
|
||||
{::rres/status 403
|
||||
::rres/body (ex-data err)})
|
||||
{::yres/status 403
|
||||
::yres/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :restriction
|
||||
[err _ _]
|
||||
(let [{:keys [code] :as data} (ex-data err)]
|
||||
(if (= code :method-not-allowed)
|
||||
{::rres/status 405
|
||||
::rres/body data}
|
||||
{::rres/status 400
|
||||
::rres/body data})))
|
||||
{::yres/status 405
|
||||
::yres/body data}
|
||||
{::yres/status 400
|
||||
::yres/body data})))
|
||||
|
||||
(defmethod handle-error :rate-limit
|
||||
[err _ _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
{::rres/status 429
|
||||
::rres/headers headers}))
|
||||
{::yres/status 429
|
||||
::yres/headers headers}))
|
||||
|
||||
(defmethod handle-error :concurrency-limit
|
||||
[err _ _]
|
||||
(let [headers (-> err ex-data ::http/headers)]
|
||||
{::rres/status 429
|
||||
::rres/headers headers}))
|
||||
{::yres/status 429
|
||||
::yres/headers headers}))
|
||||
|
||||
(defmethod handle-error :validation
|
||||
[err request parent-cause]
|
||||
@@ -84,22 +84,26 @@
|
||||
(= code :schema-validation)
|
||||
(= code :data-validation))
|
||||
(let [explain (ex/explain data)]
|
||||
{::rres/status 400
|
||||
::rres/body (-> data
|
||||
{::yres/status 400
|
||||
::yres/body (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec ::sm/explain)
|
||||
(cond-> explain (assoc :explain explain)))})
|
||||
|
||||
(= code :vern-conflict)
|
||||
{::yres/status 409 ;; 409 - Conflict
|
||||
::yres/body data}
|
||||
|
||||
(= code :request-body-too-large)
|
||||
{::rres/status 413 ::rres/body data}
|
||||
{::yres/status 413 ::yres/body data}
|
||||
|
||||
(= code :invalid-image)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [cause (or parent-cause err)]
|
||||
(l/warn :hint "unexpected error on processing image" :cause cause)
|
||||
{::rres/status 400 ::rres/body data}))
|
||||
{::yres/status 400 ::yres/body data}))
|
||||
|
||||
:else
|
||||
{::rres/status 400 ::rres/body data})))
|
||||
{::yres/status 400 ::yres/body data})))
|
||||
|
||||
(defmethod handle-error :assertion
|
||||
[error request parent-cause]
|
||||
@@ -110,46 +114,47 @@
|
||||
(= code :data-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(l/error :hint "data assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::sm/explain)
|
||||
(cond-> explain (assoc :explain explain)))}})
|
||||
{::yres/status 500
|
||||
::yres/body (-> data
|
||||
(dissoc ::sm/explain)
|
||||
(cond-> explain (assoc :explain explain))
|
||||
(assoc :type :server-error)
|
||||
(assoc :code :assertion))})
|
||||
|
||||
(= code :spec-validation)
|
||||
(let [explain (ex/explain data)]
|
||||
(l/error :hint "spec assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain)))}})
|
||||
{::yres/status 500
|
||||
::yres/body (-> data
|
||||
(dissoc ::s/problems ::s/value ::s/spec)
|
||||
(cond-> explain (assoc :explain explain))
|
||||
(assoc :type :server-error)
|
||||
(assoc :code :assertion))})
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/error :hint "assertion error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :assertion
|
||||
:data data}})))))
|
||||
{::yres/status 500
|
||||
::yres/body (-> data
|
||||
(assoc :type :server-error)
|
||||
(assoc :code :assertion))})))))
|
||||
|
||||
(defmethod handle-error :not-found
|
||||
[err _ _]
|
||||
{::rres/status 404
|
||||
::rres/body (ex-data err)})
|
||||
{::yres/status 404
|
||||
::yres/body (ex-data err)})
|
||||
|
||||
(defmethod handle-error :internal
|
||||
[error request parent-cause]
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [cause (or parent-cause error)]
|
||||
(let [cause (or parent-cause error)
|
||||
data (ex-data error)]
|
||||
(l/error :hint "internal error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data (ex-data error)}})))
|
||||
{::yres/status 500
|
||||
::yres/body (-> data
|
||||
(assoc :type :server-error)
|
||||
(update :code #(or % :unhandled))
|
||||
(assoc :hint (ex-message error)))})))
|
||||
|
||||
(defmethod handle-error :default
|
||||
[error request parent-cause]
|
||||
@@ -173,20 +178,20 @@
|
||||
:cause cause)
|
||||
(cond
|
||||
(= state "57014")
|
||||
{::rres/status 504
|
||||
::rres/body {:type :server-error
|
||||
{::yres/status 504
|
||||
::yres/body {:type :server-error
|
||||
:code :statement-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
(= state "25P03")
|
||||
{::rres/status 504
|
||||
::rres/body {:type :server-error
|
||||
{::yres/status 504
|
||||
::yres/body {:type :server-error
|
||||
:code :idle-in-transaction-timeout
|
||||
:hint (ex-message error)}}
|
||||
|
||||
:else
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
{::yres/status 500
|
||||
::yres/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)
|
||||
:state state}}))))
|
||||
@@ -200,25 +205,25 @@
|
||||
(nil? edata)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "unexpected error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
{::yres/status 500
|
||||
::yres/body {:type :server-error
|
||||
:code :unexpected
|
||||
:hint (ex-message error)}})
|
||||
|
||||
:else
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/error :hint "unhandled error" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :unhandled
|
||||
:hint (ex-message error)
|
||||
:data edata}}))))
|
||||
{::yres/status 500
|
||||
::yres/body (-> edata
|
||||
(assoc :type :server-error)
|
||||
(update :code #(or % :unhandled))
|
||||
(assoc :hint (ex-message error)))}))))
|
||||
|
||||
(defmethod handle-exception java.io.IOException
|
||||
[cause _ _]
|
||||
(l/wrn :hint "io exception" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
{::yres/status 500
|
||||
::yres/body {:type :server-error
|
||||
:code :io-exception
|
||||
:hint (ex-message cause)}})
|
||||
|
||||
@@ -244,4 +249,4 @@
|
||||
|
||||
(defn handle'
|
||||
[cause request]
|
||||
(::rres/body (handle cause request)))
|
||||
(::yres/body (handle cause request)))
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
[app.http.errors :as errors]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[cuerdas.core :as str]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw])
|
||||
[yetti.middleware :as ymw]
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres])
|
||||
(:import
|
||||
io.undertow.server.RequestTooBigException
|
||||
java.io.InputStream
|
||||
@@ -37,17 +37,17 @@
|
||||
(defn- get-reader
|
||||
^java.io.BufferedReader
|
||||
[request]
|
||||
(let [^InputStream body (rreq/body request)]
|
||||
(let [^InputStream body (yreq/body request)]
|
||||
(java.io.BufferedReader.
|
||||
(java.io.InputStreamReader. body))))
|
||||
|
||||
(defn wrap-parse-request
|
||||
[handler]
|
||||
(letfn [(process-request [request]
|
||||
(let [header (rreq/get-header request "content-type")]
|
||||
(let [header (yreq/get-header request "content-type")]
|
||||
(cond
|
||||
(str/starts-with? header "application/transit+json")
|
||||
(with-open [^InputStream is (rreq/body request)]
|
||||
(with-open [^InputStream is (yreq/body request)]
|
||||
(let [params (t/read! (t/reader is))]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
@@ -85,7 +85,7 @@
|
||||
(errors/handle cause request)))]
|
||||
|
||||
(fn [request]
|
||||
(if (= (rreq/method request) :post)
|
||||
(if (= (yreq/method request) :post)
|
||||
(try
|
||||
(-> request process-request handler)
|
||||
(catch Throwable cause
|
||||
@@ -113,57 +113,53 @@
|
||||
|
||||
(defn wrap-format-response
|
||||
[handler]
|
||||
(letfn [(transit-streamable-body [data opts]
|
||||
(reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(let [tw (t/writer bos opts)]
|
||||
(t/write! tw data)))
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(binding [l/*context* {:value data}]
|
||||
(l/error :hint "unexpected error on encoding response"
|
||||
:cause cause)))
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))))
|
||||
(letfn [(transit-streamable-body [data opts _ output-stream]
|
||||
(try
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(let [tw (t/writer bos opts)]
|
||||
(t/write! tw data)))
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(binding [l/*context* {:value data}]
|
||||
(l/error :hint "unexpected error on encoding response"
|
||||
:cause cause)))
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))
|
||||
|
||||
(json-streamable-body [data]
|
||||
(reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(let [encode (or (-> data meta :encode/json) identity)
|
||||
data (encode data)]
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)]
|
||||
(json/write writer data :key-fn json/write-camel-key :value-fn write-json-value))))
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(binding [l/*context* {:value data}]
|
||||
(l/error :hint "unexpected error on encoding response"
|
||||
:cause cause)))
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))))
|
||||
(json-streamable-body [data _ output-stream]
|
||||
(try
|
||||
(let [encode (or (-> data meta :encode/json) identity)
|
||||
data (encode data)]
|
||||
(with-open [^OutputStream bos (buffered-output-stream output-stream buffer-size)]
|
||||
(with-open [^java.io.OutputStreamWriter writer (java.io.OutputStreamWriter. bos)]
|
||||
(json/write writer data :key-fn json/write-camel-key :value-fn write-json-value))))
|
||||
(catch java.io.IOException _)
|
||||
(catch Throwable cause
|
||||
(binding [l/*context* {:value data}]
|
||||
(l/error :hint "unexpected error on encoding response"
|
||||
:cause cause)))
|
||||
(finally
|
||||
(.close ^OutputStream output-stream))))
|
||||
|
||||
(format-response-with-json [response _]
|
||||
(let [body (::rres/body response)]
|
||||
(let [body (::yres/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(-> response
|
||||
(update ::rres/headers assoc "content-type" "application/json")
|
||||
(assoc ::rres/body (json-streamable-body body)))
|
||||
(update ::yres/headers assoc "content-type" "application/json")
|
||||
(assoc ::yres/body (yres/stream-body (partial json-streamable-body body))))
|
||||
response)))
|
||||
|
||||
(format-response-with-transit [response request]
|
||||
(let [body (::rres/body response)]
|
||||
(let [body (::yres/body response)]
|
||||
(if (or (boolean? body) (coll? body))
|
||||
(let [qs (rreq/query request)
|
||||
(let [qs (yreq/query request)
|
||||
opts (if (or (contains? cf/flags :transit-readable-response)
|
||||
(str/includes? qs "transit_verbose"))
|
||||
{:type :json-verbose}
|
||||
{:type :json})]
|
||||
(-> response
|
||||
(update ::rres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::rres/body (transit-streamable-body body opts))))
|
||||
(update ::yres/headers assoc "content-type" "application/transit+json")
|
||||
(assoc ::yres/body (yres/stream-body (partial transit-streamable-body body opts)))))
|
||||
response)))
|
||||
|
||||
(format-from-params [{:keys [query-params] :as request}]
|
||||
@@ -172,7 +168,7 @@
|
||||
|
||||
(format-response [response request]
|
||||
(let [accept (or (format-from-params request)
|
||||
(rreq/get-header request "accept"))]
|
||||
(yreq/get-header request "accept"))]
|
||||
(cond
|
||||
(or (= accept "application/transit+json")
|
||||
(str/includes? accept "application/transit+json"))
|
||||
@@ -221,11 +217,11 @@
|
||||
(defn wrap-cors
|
||||
[handler]
|
||||
(fn [request]
|
||||
(let [response (if (= (rreq/method request) :options)
|
||||
{::rres/status 200}
|
||||
(let [response (if (= (yreq/method request) :options)
|
||||
{::yres/status 200}
|
||||
(handler request))
|
||||
origin (rreq/get-header request "origin")]
|
||||
(update response ::rres/headers with-cors-headers origin))))
|
||||
origin (yreq/get-header request "origin")]
|
||||
(update response ::yres/headers with-cors-headers origin))))
|
||||
|
||||
(def cors
|
||||
{:name ::cors
|
||||
@@ -240,7 +236,7 @@
|
||||
(when-let [allowed (:allowed-methods data)]
|
||||
(fn [handler]
|
||||
(fn [request]
|
||||
(let [method (rreq/method request)]
|
||||
(let [method (yreq/method request)]
|
||||
(if (contains? allowed method)
|
||||
(handler request)
|
||||
{::rres/status 405}))))))})
|
||||
{::yres/status 405}))))))})
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -19,11 +19,9 @@
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[ring.request :as rreq]
|
||||
[yetti.request :as yrq]))
|
||||
[yetti.request :as yreq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
@@ -52,21 +50,32 @@
|
||||
(update! [_ data])
|
||||
(delete! [_ key]))
|
||||
|
||||
(s/def ::manager #(satisfies? ISessionManager %))
|
||||
(defn manager?
|
||||
[o]
|
||||
(satisfies? ISessionManager o))
|
||||
|
||||
(sm/register!
|
||||
{:type ::manager
|
||||
:pred manager?})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; STORAGE IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::session-params
|
||||
(s/keys :req-un [::user-agent
|
||||
::profile-id
|
||||
::created-at]))
|
||||
(def ^:private schema:params
|
||||
[:map {:title "session-params"}
|
||||
[:user-agent ::sm/text]
|
||||
[:profile-id ::sm/uuid]
|
||||
[:created-at ::sm/inst]])
|
||||
|
||||
(def ^:private valid-params?
|
||||
(sm/validator schema:params))
|
||||
|
||||
(defn- prepare-session-params
|
||||
[key params]
|
||||
(us/assert! ::us/not-empty-string key)
|
||||
(us/assert! ::session-params params)
|
||||
(assert (string? key) "expected key to be a string")
|
||||
(assert (not (str/blank? key)) "expected key to be not empty")
|
||||
(assert (valid-params? params) "expected valid params")
|
||||
|
||||
{:user-agent (:user-agent params)
|
||||
:profile-id (:profile-id params)
|
||||
@@ -117,8 +126,9 @@
|
||||
(swap! cache dissoc token)
|
||||
nil))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::manager [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::manager
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
|
||||
|
||||
(defmethod ig/init-key ::manager
|
||||
[_ {:keys [::db/pool]}]
|
||||
@@ -141,11 +151,11 @@
|
||||
|
||||
(defn create-fn
|
||||
[{:keys [::manager ::setup/props]} profile-id]
|
||||
(us/assert! ::manager manager)
|
||||
(us/assert! ::us/uuid profile-id)
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(assert (uuid? profile-id) "expected valid uuid for profile-id")
|
||||
|
||||
(fn [request response]
|
||||
(let [uagent (rreq/get-header request "user-agent")
|
||||
(let [uagent (yreq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent
|
||||
:created-at (dt/now)}
|
||||
@@ -158,10 +168,10 @@
|
||||
|
||||
(defn delete-fn
|
||||
[{:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(fn [request response]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yrq/get-cookie request cname)]
|
||||
cookie (yreq/get-cookie request cname)]
|
||||
(l/trace :hint "delete" :profile-id (:profile-id request))
|
||||
(some->> (:value cookie) (delete! manager))
|
||||
(-> response
|
||||
@@ -183,7 +193,7 @@
|
||||
(defn- get-token
|
||||
[request]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (some-> (yrq/get-cookie request cname) :value)]
|
||||
cookie (some-> (yreq/get-cookie request cname) :value)]
|
||||
(when-not (str/empty? cookie)
|
||||
cookie)))
|
||||
|
||||
@@ -199,7 +209,7 @@
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager ::setup/props]}]
|
||||
(us/assert! ::manager manager)
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
@@ -217,7 +227,7 @@
|
||||
|
||||
(defn- wrap-authz
|
||||
[handler {:keys [::manager]}]
|
||||
(us/assert! ::manager manager)
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(fn [request]
|
||||
(let [session (get-session manager (::token request))
|
||||
request (cond-> request
|
||||
@@ -308,16 +318,17 @@
|
||||
;; TASK: SESSION GC
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::tasks/max-age ::dt/duration)
|
||||
;; FIXME: MOVE
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/gc [_]
|
||||
(s/keys :req [::db/pool]
|
||||
:opt [::tasks/max-age]))
|
||||
(defmethod ig/assert-key ::tasks/gc
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected valid database pool")
|
||||
(assert (dt/duration? (::tasks/max-age params))))
|
||||
|
||||
(defmethod ig/prep-key ::tasks/gc
|
||||
[_ cfg]
|
||||
(defmethod ig/expand-key ::tasks/gc
|
||||
[k v]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)]
|
||||
(merge {::tasks/max-age max-age} (d/without-nils cfg))))
|
||||
{k (merge {::tasks/max-age max-age} (d/without-nils v))}))
|
||||
|
||||
(def ^:private
|
||||
sql:delete-expired
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
(:refer-clojure :exclude [tap])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.http.errors :as errors]
|
||||
@@ -16,7 +17,7 @@
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]
|
||||
[promesa.util :as pu]
|
||||
[ring.response :as rres])
|
||||
[yetti.response :as yres])
|
||||
(:import
|
||||
java.io.OutputStream))
|
||||
|
||||
@@ -49,24 +50,21 @@
|
||||
(defn response
|
||||
[handler & {:keys [buf] :or {buf 32} :as opts}]
|
||||
(fn [request]
|
||||
{::rres/headers default-headers
|
||||
::rres/status 200
|
||||
::rres/body (reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output]
|
||||
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
|
||||
(let [listener (events/start-listener
|
||||
(partial write! output)
|
||||
(partial pu/close! output))]
|
||||
(try
|
||||
(let [result (handler)]
|
||||
(events/tap :end result))
|
||||
|
||||
(catch java.io.EOFException cause
|
||||
(events/tap :error (errors/handle' cause request)))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unexpected error on processing sse response"
|
||||
:cause cause)
|
||||
(events/tap :error (errors/handle' cause request)))
|
||||
(finally
|
||||
(sp/close! events/*channel*)
|
||||
(px/await! listener)))))))}))
|
||||
{::yres/headers default-headers
|
||||
::yres/status 200
|
||||
::yres/body (yres/stream-body
|
||||
(fn [_ output]
|
||||
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
|
||||
(let [listener (events/start-listener
|
||||
(partial write! output)
|
||||
(partial pu/close! output))]
|
||||
(try
|
||||
(let [result (handler)]
|
||||
(events/tap :end result))
|
||||
(catch Throwable cause
|
||||
(events/tap :error (errors/handle' cause request))
|
||||
(when-not (ex/instance? java.io.EOFException cause)
|
||||
(l/err :hint "unexpected error on processing sse response" :cause cause)))
|
||||
(finally
|
||||
(sp/close! events/*channel*)
|
||||
(px/await! listener)))))))}))
|
||||
|
||||
@@ -18,10 +18,8 @@
|
||||
[app.msgbus :as mbus]
|
||||
[app.util.time :as dt]
|
||||
[app.util.websocket :as ws]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec.csp :as sp]
|
||||
[ring.websocket :as rws]
|
||||
[yetti.websocket :as yws]))
|
||||
|
||||
(def recv-labels
|
||||
@@ -113,7 +111,6 @@
|
||||
fsub (::file-subscription @state)
|
||||
tsub (::team-subscription @state)
|
||||
msg {:type :disconnect
|
||||
:subs-id profile-id
|
||||
:profile-id profile-id
|
||||
:session-id session-id}]
|
||||
|
||||
@@ -138,9 +135,7 @@
|
||||
(l/trace :fn "handle-message" :event "subscribe-team" :team-id team-id :conn-id id)
|
||||
(let [prev-subs (get @state ::team-subscription)
|
||||
channel (sp/chan :buf (sp/dropping-buffer 64)
|
||||
:xf (comp
|
||||
(remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id team-id))))]
|
||||
:xf (remove #(= (:session-id %) session-id)))]
|
||||
|
||||
(sp/pipe channel output-ch false)
|
||||
(mbus/sub! msgbus :topic team-id :chan channel)
|
||||
@@ -159,8 +154,7 @@
|
||||
(l/trace :fn "handle-message" :event "subscribe-file" :file-id file-id :conn-id id)
|
||||
(let [psub (::file-subscription @state)
|
||||
fch (sp/chan :buf (sp/dropping-buffer 64)
|
||||
:xf (comp (remove #(= (:session-id %) session-id))
|
||||
(map #(assoc % :subs-id file-id))))]
|
||||
:xf (remove #(= (:session-id %) session-id)))]
|
||||
|
||||
(let [subs {:file-id file-id :channel fch :topic file-id}]
|
||||
(swap! state assoc ::file-subscription subs))
|
||||
@@ -191,7 +185,6 @@
|
||||
;; Notifify the rest of participants of the new connection.
|
||||
(let [message {:type :join-file
|
||||
:file-id file-id
|
||||
:subs-id file-id
|
||||
:session-id session-id
|
||||
:profile-id profile-id}]
|
||||
(mbus/pub! msgbus :topic file-id :message message))))
|
||||
@@ -303,7 +296,7 @@
|
||||
:else
|
||||
(do
|
||||
(l/trace :hint "websocket request" :profile-id profile-id :session-id session-id)
|
||||
{::rws/listener (ws/listener request
|
||||
{::yws/listener (ws/listener request
|
||||
::ws/on-rcv-message (partial on-rcv-message cfg)
|
||||
::ws/on-snd-message (partial on-snd-message cfg)
|
||||
::ws/on-connect (partial on-connect cfg)
|
||||
@@ -311,13 +304,17 @@
|
||||
::profile-id profile-id
|
||||
::session-id session-id)}))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::mbus/msgbus
|
||||
::mtx/metrics
|
||||
::db/pool
|
||||
::session/manager]))
|
||||
|
||||
(s/def ::routes vector?)
|
||||
(def ^:private schema:routes-params
|
||||
[:map
|
||||
::mbus/msgbus
|
||||
::mtx/metrics
|
||||
::db/pool
|
||||
::session/manager])
|
||||
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (sm/valid? schema:routes-params params)))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ cfg]
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -25,9 +25,7 @@
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
@@ -95,46 +93,28 @@
|
||||
;; --- SPECS
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; COLLECTOR
|
||||
;; COLLECTOR API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Defines a service that collects the audit/activity log using
|
||||
;; internal database. Later this audit log can be transferred to
|
||||
;; an external storage and data cleared.
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::type ::us/string)
|
||||
(s/def ::props (s/map-of ::us/keyword any?))
|
||||
(s/def ::ip-addr ::us/string)
|
||||
(def ^:private schema:event
|
||||
[:map {:title "event"}
|
||||
[::type ::sm/text]
|
||||
[::name ::sm/text]
|
||||
[::profile-id ::sm/uuid]
|
||||
[::ip-addr {:optional true} ::sm/text]
|
||||
[::props {:optional true} [:map-of :keyword :any]]
|
||||
[::context {:optional true} [:map-of :keyword :any]]
|
||||
[::webhooks/event? {:optional true} ::sm/boolean]
|
||||
[::webhooks/batch-timeout {:optional true} ::dt/duration]
|
||||
[::webhooks/batch-key {:optional true}
|
||||
[:or ::sm/fn ::sm/text :keyword]]])
|
||||
|
||||
(s/def ::webhooks/event? ::us/boolean)
|
||||
(s/def ::webhooks/batch-timeout ::dt/duration)
|
||||
(s/def ::webhooks/batch-key
|
||||
(s/or :fn fn? :str string? :kw keyword?))
|
||||
|
||||
(s/def ::event
|
||||
(s/keys :req [::type ::name ::profile-id]
|
||||
:opt [::ip-addr
|
||||
::props
|
||||
::webhooks/event?
|
||||
::webhooks/batch-timeout
|
||||
::webhooks/batch-key]))
|
||||
|
||||
(s/def ::collector
|
||||
(s/keys :req [::wrk/executor ::db/pool]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::collector [_]
|
||||
(s/keys :req [::db/pool ::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::collector
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(cond
|
||||
(db/read-only? pool)
|
||||
(l/warn :hint "audit disabled (db is read-only)")
|
||||
|
||||
:else
|
||||
cfg))
|
||||
(def ^:private check-event
|
||||
(sm/check-fn schema:event))
|
||||
|
||||
(defn prepare-event
|
||||
[cfg mdata params result]
|
||||
@@ -273,12 +253,12 @@
|
||||
"Submit audit event to the collector."
|
||||
[cfg event]
|
||||
(try
|
||||
(let [event (d/without-nils event)
|
||||
(let [event (-> (d/without-nils event)
|
||||
(check-event))
|
||||
cfg (-> cfg
|
||||
(assoc ::rtry/when rtry/conflict-exception?)
|
||||
(assoc ::rtry/max-retries 6)
|
||||
(assoc ::rtry/label "persist-audit-log"))]
|
||||
(us/verify! ::event event)
|
||||
(rtry/invoke! cfg db/tx-run! handle-event! event))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error processing event" :cause cause))))
|
||||
@@ -289,8 +269,8 @@
|
||||
logic."
|
||||
[cfg event]
|
||||
(when (contains? cf/flags :audit-log)
|
||||
(let [event (d/without-nils event)]
|
||||
(us/verify! ::event event)
|
||||
(let [event (-> (d/without-nils event)
|
||||
(check-event))]
|
||||
(db/run! cfg (fn [cfg]
|
||||
(let [tnow (dt/now)
|
||||
params (-> (event->params event)
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -16,7 +17,6 @@
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]))
|
||||
@@ -108,8 +108,15 @@
|
||||
(mark-archived! cfg rows)
|
||||
(count events)))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::setup/props ::http/client]))
|
||||
(def ^:private schema:handler-params
|
||||
[:map
|
||||
::db/pool
|
||||
::setup/props
|
||||
::http/client])
|
||||
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (sm/valid? schema:handler-params params) "valid params expected for handler"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
@@ -22,8 +21,9 @@
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "valid database pool expected"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -38,7 +37,7 @@
|
||||
|
||||
(defn record->report
|
||||
[{:keys [::l/context ::l/message ::l/props ::l/logger ::l/level ::l/cause] :as record}]
|
||||
(us/assert! ::l/record record)
|
||||
(assert (l/valid-record? record) "expectd valid log record")
|
||||
(if (or (instance? java.util.concurrent.CompletionException cause)
|
||||
(instance? java.util.concurrent.ExecutionException cause))
|
||||
(-> record
|
||||
@@ -91,8 +90,9 @@
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected exception on database error logger" :cause cause))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::reporter
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
|
||||
@@ -9,12 +9,10 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.database :as ldb]
|
||||
[app.util.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
@@ -54,7 +52,7 @@
|
||||
|
||||
(defn record->report
|
||||
[{:keys [::l/context ::l/id ::l/cause] :as record}]
|
||||
(us/assert! ::l/record record)
|
||||
(assert (l/valid-record? record) "expectd valid log record")
|
||||
{:id id
|
||||
:tenant (cf/get :tenant)
|
||||
:host (cf/get :host)
|
||||
@@ -75,8 +73,9 @@
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unhandled error" :cause cause)))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req [::http/client]))
|
||||
(defmethod ig/assert-key ::reporter
|
||||
[_ params]
|
||||
(assert (http/client? (::http/client params)) "expect valid http client"))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ cfg]
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.data.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
@@ -60,8 +59,10 @@
|
||||
(some->> (:project-id props) (lookup-webhooks-by-project pool))
|
||||
(some->> (:file-id props) (lookup-webhooks-by-file pool))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::process-event-handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::process-event-handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool")
|
||||
(assert (http/client? (::http/client params)) "expect valid http client"))
|
||||
|
||||
(defmethod ig/init-key ::process-event-handler
|
||||
[_ cfg]
|
||||
@@ -87,12 +88,14 @@
|
||||
{:key-fn str/camel
|
||||
:indent true})
|
||||
|
||||
(defmethod ig/pre-init-spec ::run-webhook-handler [_]
|
||||
(s/keys :req [::http/client ::db/pool]))
|
||||
(defmethod ig/assert-key ::run-webhook-handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool")
|
||||
(assert (http/client? (::http/client params)) "expect valid http client"))
|
||||
|
||||
(defmethod ig/prep-key ::run-webhook-handler
|
||||
[_ cfg]
|
||||
(merge {::max-errors 3} (d/without-nils cfg)))
|
||||
(defmethod ig/expand-key ::run-webhook-handler
|
||||
[k v]
|
||||
{k (merge {::max-errors 3} (d/without-nils v))})
|
||||
|
||||
(defmethod ig/init-key ::run-webhook-handler
|
||||
[_ {:keys [::db/pool ::max-errors] :as cfg}]
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.auth.ldap :as-alias ldap]
|
||||
[app.auth.oidc :as-alias oidc]
|
||||
[app.auth.oidc.providers :as-alias oidc.providers]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
@@ -28,6 +29,7 @@
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.redis :as-alias rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.srepl :as-alias srepl]
|
||||
@@ -169,7 +171,7 @@
|
||||
{::db/uri (cf/get :database-uri)
|
||||
::db/username (cf/get :database-username)
|
||||
::db/password (cf/get :database-password)
|
||||
::db/read-only? (cf/get :database-readonly false)
|
||||
::db/read-only (cf/get :database-readonly false)
|
||||
::db/min-size (cf/get :database-min-pool-size 0)
|
||||
::db/max-size (cf/get :database-max-pool-size 60)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
@@ -245,7 +247,7 @@
|
||||
:base-dn (cf/get :ldap-base-dn)
|
||||
:bind-dn (cf/get :ldap-bind-dn)
|
||||
:bind-password (cf/get :ldap-bind-password)
|
||||
:enabled? (contains? cf/flags :login-with-ldap)}
|
||||
:enabled (contains? cf/flags :login-with-ldap)}
|
||||
|
||||
::oidc.providers/google
|
||||
{}
|
||||
@@ -302,9 +304,11 @@
|
||||
::http.assets/cache-max-agesignature-max-age (dt/duration {:hours 24 :minutes 5})
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.rpc/climit
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
::rpc/climit
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::climit/config (cf/get :rpc-climit-config)
|
||||
::climit/enabled (contains? cf/flags :rpc-climit)}
|
||||
|
||||
:app.rpc/rlimit
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
@@ -319,7 +323,6 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::svgo/optimizer (ig/ref ::svgo/optimizer)
|
||||
|
||||
::rpc/climit (ig/ref ::rpc/climit)
|
||||
::rpc/rlimit (ig/ref ::rpc/rlimit)
|
||||
@@ -330,7 +333,7 @@
|
||||
::email/whitelist (ig/ref ::email/whitelist)}
|
||||
|
||||
:app.rpc.doc/routes
|
||||
{:methods (ig/ref :app.rpc/methods)}
|
||||
{:app.rpc/methods (ig/ref :app.rpc/methods)}
|
||||
|
||||
:app.rpc/routes
|
||||
{::rpc/methods (ig/ref :app.rpc/methods)
|
||||
@@ -346,7 +349,6 @@
|
||||
:file-gc (ig/ref :app.tasks.file-gc/handler)
|
||||
:file-gc-scheduler (ig/ref :app.tasks.file-gc-scheduler/handler)
|
||||
:offload-file-data (ig/ref :app.tasks.offload-file-data/handler)
|
||||
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
|
||||
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
|
||||
:telemetry (ig/ref :app.tasks.telemetry/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto.gc-deleted/handler)
|
||||
@@ -379,8 +381,7 @@
|
||||
::email/default-from (cf/get :smtp-default-from)}
|
||||
|
||||
::email/handler
|
||||
{::email/sendmail (ig/ref ::email/sendmail)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
{::email/sendmail (ig/ref ::email/sendmail)}
|
||||
|
||||
:app.tasks.tasks-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
@@ -403,10 +404,6 @@
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.tasks.file-xlog-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.tasks.telemetry/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
@@ -430,9 +427,6 @@
|
||||
;; module requires the migrations to run before initialize.
|
||||
::migrations (ig/ref :app.migrations/migrations)}
|
||||
|
||||
::svgo/optimizer
|
||||
{}
|
||||
|
||||
:app.loggers.audit.archive-task/handler
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
@@ -488,10 +482,7 @@
|
||||
{::wrk/registry (ig/ref ::wrk/registry)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/entries
|
||||
[{:cron #app/cron "0 0 * * * ?" ;; hourly
|
||||
:task :file-xlog-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
[{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :session-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
@@ -523,11 +514,13 @@
|
||||
::wrk/dispatcher
|
||||
{::rds/redis (ig/ref ::rds/redis)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/tenant (cf/get :tenant)}
|
||||
|
||||
[::default ::wrk/runner]
|
||||
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
|
||||
::wrk/queue :default
|
||||
::wrk/tenant (cf/get :tenant)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
@@ -536,6 +529,7 @@
|
||||
[::webhook ::wrk/runner]
|
||||
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
|
||||
::wrk/queue :webhooks
|
||||
::wrk/tenant (cf/get :tenant)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
@@ -553,7 +547,7 @@
|
||||
(-> system-config
|
||||
(cond-> (contains? cf/flags :backend-worker)
|
||||
(merge worker-config))
|
||||
(ig/prep)
|
||||
(ig/expand)
|
||||
(ig/init))))
|
||||
(l/inf :hint "welcome to penpot"
|
||||
:flags (str/join "," (map name cf/flags))
|
||||
@@ -566,7 +560,7 @@
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> config
|
||||
(ig/prep)
|
||||
(ig/expand)
|
||||
(ig/init)))))
|
||||
|
||||
(defn stop
|
||||
@@ -622,12 +616,6 @@
|
||||
|
||||
(deref p))
|
||||
(catch Throwable cause
|
||||
(binding [*out* *err*]
|
||||
(println "==== ERROR ===="))
|
||||
(.printStackTrace cause)
|
||||
(when-let [cause' (ex-cause cause)]
|
||||
(binding [*out* *err*]
|
||||
(println "==== CAUSE ===="))
|
||||
(.printStackTrace cause'))
|
||||
(ex/print-throwable cause)
|
||||
(px/sleep 500)
|
||||
(System/exit -1))))
|
||||
|
||||
@@ -46,14 +46,15 @@
|
||||
(s/keys :req-un [::path]
|
||||
:opt-un [::mtype]))
|
||||
|
||||
(sm/register! ::upload
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]])
|
||||
(sm/register!
|
||||
^{::sm/type ::upload}
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]])
|
||||
|
||||
(defn validate-media-type!
|
||||
([upload] (validate-media-type! upload cm/valid-image-types))
|
||||
@@ -225,7 +226,7 @@
|
||||
(letfn [(ttf->otf [data]
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".otf"))
|
||||
_ (io/write-to-file! data finput)
|
||||
_ (io/write* finput data)
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str finput)
|
||||
@@ -236,7 +237,7 @@
|
||||
(otf->ttf [data]
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".ttf"))
|
||||
_ (io/write-to-file! data finput)
|
||||
_ (io/write* finput data)
|
||||
res (sh/sh "fontforge" "-lang=ff" "-c"
|
||||
(str/fmt "Open('%s'); Generate('%s')"
|
||||
(str finput)
|
||||
@@ -250,14 +251,14 @@
|
||||
;; command.
|
||||
(let [finput (tmp/tempfile :prefix "penpot.font." :suffix "")
|
||||
foutput (fs/path (str finput ".woff"))
|
||||
_ (io/write-to-file! data finput)
|
||||
_ (io/write* finput data)
|
||||
res (sh/sh "sfnt2woff" (str finput))]
|
||||
(when (zero? (:exit res))
|
||||
foutput)))
|
||||
|
||||
(woff->sfnt [data]
|
||||
(let [finput (tmp/tempfile :prefix "penpot" :suffix "")
|
||||
_ (io/write-to-file! data finput)
|
||||
_ (io/write* finput data)
|
||||
res (sh/sh "woff2sfnt" (str finput)
|
||||
:out-enc :bytes)]
|
||||
(when (zero? (:exit res))
|
||||
|
||||
@@ -8,9 +8,8 @@
|
||||
(:refer-clojure :exclude [run!])
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.metrics.definition :as-alias mdef]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
io.prometheus.client.CollectorRegistry
|
||||
@@ -34,41 +33,52 @@
|
||||
(declare create-collector)
|
||||
(declare handler)
|
||||
|
||||
(defprotocol IMetrics
|
||||
(get-registry [_])
|
||||
(get-collector [_ id])
|
||||
(get-handler [_]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; METRICS SERVICE PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::mdef/name string?)
|
||||
(s/def ::mdef/help string?)
|
||||
(s/def ::mdef/labels (s/every string? :kind vector?))
|
||||
(s/def ::mdef/type #{:gauge :counter :summary :histogram})
|
||||
(sm/register!
|
||||
{:type ::collector
|
||||
:pred #(instance? SimpleCollector %)
|
||||
:type-properties
|
||||
{:title "collector"
|
||||
:description "An instance of SimpleCollector"}})
|
||||
|
||||
(s/def ::mdef/instance
|
||||
#(instance? SimpleCollector %))
|
||||
(sm/register!
|
||||
{:type ::registry
|
||||
:pred #(instance? CollectorRegistry %)
|
||||
:type-properties
|
||||
{:title "Metrics Registry"
|
||||
:description "Instance of CollectorRegistry"}})
|
||||
|
||||
(s/def ::mdef/definition
|
||||
(s/keys :req [::mdef/name
|
||||
::mdef/help
|
||||
::mdef/type]
|
||||
:opt [::mdef/labels
|
||||
::mdef/instance]))
|
||||
(def ^:private schema:definitions
|
||||
[:map-of :keyword
|
||||
[:map {:title "definition"}
|
||||
[::mdef/name :string]
|
||||
[::mdef/help :string]
|
||||
[::mdef/type [:enum :gauge :counter :summary :histogram]]
|
||||
[::mdef/labels {:optional true} [::sm/vec :string]]
|
||||
[::mdef/instance {:optional true} ::collector]]])
|
||||
|
||||
(s/def ::definitions
|
||||
(s/map-of keyword? ::mdef/definition))
|
||||
(defn metrics?
|
||||
[o]
|
||||
(satisfies? IMetrics o))
|
||||
|
||||
(s/def ::registry
|
||||
#(instance? CollectorRegistry %))
|
||||
(sm/register!
|
||||
{:type ::metrics
|
||||
:pred metrics?})
|
||||
|
||||
(s/def ::handler fn?)
|
||||
(s/def ::metrics
|
||||
(s/keys :req [::registry
|
||||
::handler
|
||||
::definitions]))
|
||||
(def ^:private valid-definitions?
|
||||
(sm/validator schema:definitions))
|
||||
|
||||
(s/def ::default ::definitions)
|
||||
|
||||
(defmethod ig/pre-init-spec ::metrics [_]
|
||||
(s/keys :req-un [::default]))
|
||||
(defmethod ig/assert-key ::metrics
|
||||
[_ {:keys [default]}]
|
||||
(assert (valid-definitions? default) "expected valid definitions"))
|
||||
|
||||
(defmethod ig/init-key ::metrics
|
||||
[_ cfg]
|
||||
@@ -81,12 +91,14 @@
|
||||
{}
|
||||
(:default cfg))]
|
||||
|
||||
(us/verify! ::definitions definitions)
|
||||
|
||||
{::handler (partial handler registry)
|
||||
::definitions definitions
|
||||
::registry registry}))
|
||||
|
||||
(reify
|
||||
IMetrics
|
||||
(get-handler [_]
|
||||
(partial handler registry))
|
||||
(get-collector [_ id]
|
||||
(get definitions id))
|
||||
(get-registry [_]
|
||||
registry))))
|
||||
|
||||
(defn- handler
|
||||
[registry _]
|
||||
@@ -96,17 +108,14 @@
|
||||
{:headers {"content-type" TextFormat/CONTENT_TYPE_004}
|
||||
:body (.toString writer)}))
|
||||
|
||||
|
||||
|
||||
(s/def ::routes vector?)
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::metrics]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ {:keys [::metrics]}]
|
||||
(assert (metrics? metrics) "expected a valid instance for metrics"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::metrics]}]
|
||||
(let [registry (::registry metrics)]
|
||||
["/metrics" {:handler (partial handler registry)
|
||||
:allowed-methods #{:get}}]))
|
||||
["/metrics" {:handler (get-handler metrics)
|
||||
:allowed-methods #{:get}}])
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Implementation
|
||||
@@ -126,8 +135,9 @@
|
||||
(defmulti create-collector ::mdef/type)
|
||||
|
||||
(defn run!
|
||||
[{:keys [::definitions]} & {:keys [id] :as params}]
|
||||
(when-let [mobj (get definitions id)]
|
||||
[instance & {:keys [id] :as params}]
|
||||
(assert (metrics? instance) "expected valid metrics instance")
|
||||
(when-let [mobj (get-collector instance id)]
|
||||
(run-collector! mobj params)
|
||||
true))
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
[app.db :as db]
|
||||
[app.migrations.clj.migration-0023 :as mg0023]
|
||||
[app.util.migrations :as mg]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def migrations
|
||||
@@ -412,7 +411,22 @@
|
||||
:fn (mg/resource "app/migrations/sql/0129-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0130-mod-file-change-table"
|
||||
:fn (mg/resource "app/migrations/sql/0130-mod-file-change-table.sql")}])
|
||||
:fn (mg/resource "app/migrations/sql/0130-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0131-mod-webhook-table"
|
||||
:fn (mg/resource "app/migrations/sql/0131-mod-webhook-table.sql")}
|
||||
|
||||
{:name "0132-mod-file-change-table"
|
||||
:fn (mg/resource "app/migrations/sql/0132-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0133-mod-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0133-mod-file-table.sql")}
|
||||
|
||||
{:name "0134-mod-file-change-table"
|
||||
:fn (mg/resource "app/migrations/sql/0134-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0135-mod-team-invitation-table.sql"
|
||||
:fn (mg/resource "app/migrations/sql/0135-mod-team-invitation-table.sql")}])
|
||||
|
||||
(defn apply-migrations!
|
||||
[pool name migrations]
|
||||
@@ -420,9 +434,9 @@
|
||||
(mg/setup! conn)
|
||||
(mg/migrate! conn {:name name :steps migrations})))
|
||||
|
||||
(defmethod ig/pre-init-spec ::migrations
|
||||
[_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::migrations
|
||||
[_ {:keys [::db/pool]}]
|
||||
(assert (db/pool? pool) "expected valid pool"))
|
||||
|
||||
(defmethod ig/init-key ::migrations
|
||||
[module {:keys [::db/pool]}]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE webhook
|
||||
ADD COLUMN profile_id uuid NULL REFERENCES profile (id) ON DELETE SET NULL;
|
||||
|
||||
CREATE INDEX webhook__profile_id__idx
|
||||
ON webhook (profile_id)
|
||||
WHERE profile_id IS NOT NULL;
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE file_change
|
||||
ADD COLUMN created_by text NOT NULL DEFAULT 'system';
|
||||
2
backend/src/app/migrations/sql/0133-mod-file-table.sql
Normal file
2
backend/src/app/migrations/sql/0133-mod-file-table.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE file
|
||||
ADD COLUMN vern int NOT NULL DEFAULT 0;
|
||||
@@ -0,0 +1,18 @@
|
||||
ALTER TABLE file_change
|
||||
ADD COLUMN updated_at timestamptz DEFAULT now(),
|
||||
ADD COLUMN deleted_at timestamptz DEFAULT NULL,
|
||||
ALTER COLUMN created_at SET DEFAULT now();
|
||||
|
||||
DROP INDEX file_change__created_at__idx;
|
||||
DROP INDEX file_change__created_at__label__idx;
|
||||
DROP INDEX file_change__label__idx;
|
||||
|
||||
CREATE INDEX file_change__deleted_at__idx
|
||||
ON file_change (deleted_at, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
CREATE INDEX file_change__system_snapshots__idx
|
||||
ON file_change (file_id, created_at)
|
||||
WHERE data IS NOT NULL
|
||||
AND created_by = 'system'
|
||||
AND deleted_at IS NULL;
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE team_invitation
|
||||
ADD COLUMN created_by uuid NULL REFERENCES profile(id) ON DELETE SET NULL;
|
||||
@@ -9,22 +9,27 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cfg]
|
||||
[app.redis :as rds]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def ^:private prefix (cfg/get :tenant))
|
||||
|
||||
(defprotocol IMsgBus
|
||||
(-sub [_ topics chan])
|
||||
(-pub [_ topic message])
|
||||
(-purge [_ chans]))
|
||||
|
||||
|
||||
|
||||
(defn- prefix-topic
|
||||
[topic]
|
||||
(str prefix "." topic))
|
||||
@@ -32,30 +37,33 @@
|
||||
(def ^:private xform-prefix-topic
|
||||
(map (fn [obj] (update obj :topic prefix-topic))))
|
||||
|
||||
(declare ^:private redis-pub!)
|
||||
(declare ^:private redis-sub!)
|
||||
(declare ^:private redis-unsub!)
|
||||
(declare ^:private start-io-loop!)
|
||||
(declare ^:private redis-pub)
|
||||
(declare ^:private redis-sub)
|
||||
(declare ^:private redis-unsub)
|
||||
(declare ^:private start-io-loop)
|
||||
(declare ^:private subscribe-to-topics)
|
||||
(declare ^:private unsubscribe-channels)
|
||||
|
||||
(s/def ::cmd-ch sp/chan?)
|
||||
(s/def ::rcv-ch sp/chan?)
|
||||
(s/def ::pub-ch sp/chan?)
|
||||
(s/def ::state ::us/agent)
|
||||
(s/def ::pconn ::rds/connection-holder)
|
||||
(s/def ::sconn ::rds/connection-holder)
|
||||
(s/def ::msgbus
|
||||
(s/keys :req [::cmd-ch ::rcv-ch ::pub-ch ::state ::pconn ::sconn ::wrk/executor]))
|
||||
(defn msgbus?
|
||||
[o]
|
||||
(satisfies? IMsgBus o))
|
||||
|
||||
(defmethod ig/pre-init-spec ::msgbus [_]
|
||||
(s/keys :req [::rds/redis ::wrk/executor]))
|
||||
(sm/register!
|
||||
{:type ::msgbus
|
||||
:pred msgbus?})
|
||||
|
||||
(defmethod ig/prep-key ::msgbus
|
||||
[_ cfg]
|
||||
(-> cfg
|
||||
(assoc ::buffer-size 128)
|
||||
(assoc ::timeout (dt/duration {:seconds 30}))))
|
||||
(defmethod ig/expand-key ::msgbus
|
||||
[k v]
|
||||
{k (-> (d/without-nils v)
|
||||
(assoc ::buffer-size 128)
|
||||
(assoc ::timeout (dt/duration {:seconds 30})))})
|
||||
|
||||
(def ^:private schema:params
|
||||
[:map ::rds/redis ::wrk/executor])
|
||||
|
||||
(defmethod ig/assert-key ::msgbus
|
||||
[_ params]
|
||||
(assert (sm/check schema:params params)))
|
||||
|
||||
(defmethod ig/init-key ::msgbus
|
||||
[_ {:keys [::buffer-size ::wrk/executor ::timeout ::rds/redis] :as cfg}]
|
||||
@@ -66,46 +74,66 @@
|
||||
:xf xform-prefix-topic)
|
||||
state (agent {})
|
||||
|
||||
pconn (rds/connect redis :timeout timeout)
|
||||
pconn (rds/connect redis :type :default :timeout timeout)
|
||||
sconn (rds/connect redis :type :pubsub :timeout timeout)
|
||||
msgbus (-> cfg
|
||||
|
||||
_ (set-error-handler! state #(l/error :cause % :hint "unexpected error on agent" ::l/sync? true))
|
||||
_ (set-error-mode! state :continue)
|
||||
|
||||
cfg (-> cfg
|
||||
(assoc ::pconn pconn)
|
||||
(assoc ::sconn sconn)
|
||||
(assoc ::cmd-ch cmd-ch)
|
||||
(assoc ::rcv-ch rcv-ch)
|
||||
(assoc ::pub-ch pub-ch)
|
||||
(assoc ::state state)
|
||||
(assoc ::wrk/executor executor))]
|
||||
(assoc ::state state))
|
||||
|
||||
(set-error-handler! state #(l/error :cause % :hint "unexpected error on agent" ::l/sync? true))
|
||||
(set-error-mode! state :continue)
|
||||
io-thr (start-io-loop cfg)]
|
||||
|
||||
(assoc msgbus ::io-thr (start-io-loop! msgbus))))
|
||||
(reify
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(px/interrupt! io-thr)
|
||||
(sp/close! cmd-ch)
|
||||
(sp/close! rcv-ch)
|
||||
(sp/close! pub-ch)
|
||||
(d/close! pconn)
|
||||
(d/close! sconn))
|
||||
|
||||
IMsgBus
|
||||
(-sub [_ topics chan]
|
||||
(l/debug :hint "subscribe" :topics topics :chan (hash chan))
|
||||
(send-via executor state subscribe-to-topics cfg topics chan))
|
||||
|
||||
(-pub [_ topic message]
|
||||
(let [message (assoc message :topic topic)]
|
||||
(sp/put! pub-ch {:topic topic :message message})))
|
||||
|
||||
(-purge [_ chans]
|
||||
(l/debug :hint "purge" :chans (count chans))
|
||||
(send-via executor state unsubscribe-channels cfg chans)))))
|
||||
|
||||
(defmethod ig/halt-key! ::msgbus
|
||||
[_ msgbus]
|
||||
(px/interrupt! (::io-thr msgbus))
|
||||
(sp/close! (::cmd-ch msgbus))
|
||||
(sp/close! (::rcv-ch msgbus))
|
||||
(sp/close! (::pub-ch msgbus))
|
||||
(d/close! (::pconn msgbus))
|
||||
(d/close! (::sconn msgbus)))
|
||||
[_ instance]
|
||||
(d/close! instance))
|
||||
|
||||
(defn sub!
|
||||
[{:keys [::state ::wrk/executor] :as cfg} & {:keys [topic topics chan]}]
|
||||
[instance & {:keys [topic topics chan]}]
|
||||
(assert (satisfies? IMsgBus instance) "expected valid msgbus instance")
|
||||
(let [topics (into [] (map prefix-topic) (if topic [topic] topics))]
|
||||
(l/debug :hint "subscribe" :topics topics :chan (hash chan))
|
||||
(send-via executor state subscribe-to-topics cfg topics chan)
|
||||
(-sub instance topics chan)
|
||||
nil))
|
||||
|
||||
(defn pub!
|
||||
[{::keys [pub-ch]} & {:as params}]
|
||||
(sp/put! pub-ch params))
|
||||
[instance & {:keys [topic message]}]
|
||||
(assert (satisfies? IMsgBus instance) "expected valid msgbus instance")
|
||||
(-pub instance topic message))
|
||||
|
||||
(defn purge!
|
||||
[{:keys [::state ::wrk/executor] :as msgbus} chans]
|
||||
(l/debug :hint "purge" :chans (count chans))
|
||||
(send-via executor state unsubscribe-channels msgbus chans)
|
||||
[instance chans]
|
||||
(assert (satisfies? IMsgBus instance) "expected valid msgbus instance")
|
||||
(assert (every? sp/chan? chans) "expected a seq of chans")
|
||||
(-purge instance chans)
|
||||
nil)
|
||||
|
||||
;; --- IMPL
|
||||
@@ -118,7 +146,7 @@
|
||||
(let [nsubs (if (nil? nsubs) #{chan} (conj nsubs chan))]
|
||||
(when (= 1 (count nsubs))
|
||||
(l/trace :hint "open subscription" :topic topic ::l/sync? true)
|
||||
(redis-sub! cfg topic))
|
||||
(redis-sub cfg topic))
|
||||
nsubs))
|
||||
|
||||
(defn- disj-subscription
|
||||
@@ -129,7 +157,7 @@
|
||||
(let [nsubs (disj nsubs chan)]
|
||||
(when (empty? nsubs)
|
||||
(l/trace :hint "close subscription" :topic topic ::l/sync? true)
|
||||
(redis-unsub! cfg topic))
|
||||
(redis-unsub cfg topic))
|
||||
nsubs))
|
||||
|
||||
(defn- subscribe-to-topics
|
||||
@@ -170,7 +198,7 @@
|
||||
(when-not (sp/offer! rcv-ch val)
|
||||
(l/warn :msg "dropping message on subscription loop"))))))
|
||||
|
||||
(defn- process-input!
|
||||
(defn- process-input
|
||||
[{:keys [::state ::wrk/executor] :as cfg} topic message]
|
||||
(let [chans (get-in @state [:topics topic])]
|
||||
(when-let [closed (loop [chans (seq chans)
|
||||
@@ -183,9 +211,9 @@
|
||||
(send-via executor state unsubscribe-channels cfg closed))))
|
||||
|
||||
|
||||
(defn start-io-loop!
|
||||
(defn start-io-loop
|
||||
[{:keys [::sconn ::rcv-ch ::pub-ch ::state ::wrk/executor] :as cfg}]
|
||||
(rds/add-listener! sconn (create-listener rcv-ch))
|
||||
(rds/add-listener sconn (create-listener rcv-ch))
|
||||
|
||||
(px/thread
|
||||
{:name "penpot/msgbus/io-loop"
|
||||
@@ -209,12 +237,12 @@
|
||||
|
||||
(identical? port rcv-ch)
|
||||
(let [{:keys [topic message]} val]
|
||||
(process-input! cfg topic message)
|
||||
(process-input cfg topic message)
|
||||
(recur))
|
||||
|
||||
(identical? port pub-ch)
|
||||
(do
|
||||
(redis-pub! cfg val)
|
||||
(redis-pub cfg val)
|
||||
(recur)))))
|
||||
|
||||
(catch InterruptedException _
|
||||
@@ -230,13 +258,12 @@
|
||||
|
||||
(l/debug :hint "io-loop thread terminated")))))
|
||||
|
||||
|
||||
(defn- redis-pub!
|
||||
(defn- redis-pub
|
||||
"Publish a message to the redis server. Asynchronous operation,
|
||||
intended to be used in core.async go blocks."
|
||||
[{:keys [::pconn] :as cfg} {:keys [topic message]}]
|
||||
(try
|
||||
(p/await! (rds/publish! pconn topic (t/encode message)))
|
||||
(p/await! (rds/publish pconn topic (t/encode message)))
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
@@ -244,23 +271,23 @@
|
||||
:message message
|
||||
:cause cause))))
|
||||
|
||||
(defn- redis-sub!
|
||||
(defn- redis-sub
|
||||
"Create redis subscription. Blocking operation, intended to be used
|
||||
inside an agent."
|
||||
[{:keys [::sconn] :as cfg} topic]
|
||||
(try
|
||||
(rds/subscribe! sconn topic)
|
||||
(rds/subscribe sconn [topic])
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on subscribing" :topic topic :cause cause))))
|
||||
|
||||
(defn- redis-unsub!
|
||||
(defn- redis-unsub
|
||||
"Removes redis subscription. Blocking operation, intended to be used
|
||||
inside an agent."
|
||||
[{:keys [::sconn] :as cfg} topic]
|
||||
(try
|
||||
(rds/unsubscribe! sconn topic)
|
||||
(rds/unsubscribe sconn [topic])
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
|
||||
@@ -6,11 +6,12 @@
|
||||
|
||||
(ns app.redis
|
||||
"The msgbus abstraction implemented using redis as underlying backend."
|
||||
(:refer-clojure :exclude [eval])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.metrics :as mtx]
|
||||
[app.redis.script :as-alias rscript]
|
||||
[app.util.cache :as cache]
|
||||
@@ -18,13 +19,11 @@
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.core :as c]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
clojure.lang.IDeref
|
||||
clojure.lang.MapEntry
|
||||
io.lettuce.core.KeyValue
|
||||
io.lettuce.core.RedisClient
|
||||
@@ -53,79 +52,24 @@
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(declare initialize-resources)
|
||||
(declare shutdown-resources)
|
||||
(declare connect*)
|
||||
(declare ^:private initialize-resources)
|
||||
(declare ^:private shutdown-resources)
|
||||
(declare ^:private impl-eval)
|
||||
|
||||
(s/def ::timer
|
||||
#(instance? Timer %))
|
||||
(defprotocol IRedis
|
||||
(-connect [_ options])
|
||||
(-get-or-connect [_ key options]))
|
||||
|
||||
(s/def ::default-connection
|
||||
#(or (instance? StatefulRedisConnection %)
|
||||
(and (instance? IDeref %)
|
||||
(instance? StatefulRedisConnection (deref %)))))
|
||||
(defprotocol IConnection
|
||||
(publish [_ topic message])
|
||||
(rpush [_ key payload])
|
||||
(blpop [_ timeout keys])
|
||||
(eval [_ script]))
|
||||
|
||||
(s/def ::pubsub-connection
|
||||
#(or (instance? StatefulRedisPubSubConnection %)
|
||||
(and (instance? IDeref %)
|
||||
(instance? StatefulRedisPubSubConnection (deref %)))))
|
||||
|
||||
(s/def ::connection
|
||||
(s/or :default ::default-connection
|
||||
:pubsub ::pubsub-connection))
|
||||
|
||||
(s/def ::connection-holder
|
||||
(s/keys :req [::connection]))
|
||||
|
||||
(s/def ::redis-uri
|
||||
#(instance? RedisURI %))
|
||||
|
||||
(s/def ::resources
|
||||
#(instance? ClientResources %))
|
||||
|
||||
(s/def ::pubsub-listener
|
||||
#(instance? RedisPubSubListener %))
|
||||
|
||||
(s/def ::uri ::us/not-empty-string)
|
||||
(s/def ::timeout ::dt/duration)
|
||||
(s/def ::connect? ::us/boolean)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(s/def ::worker-threads ::us/integer)
|
||||
(s/def ::cache cache/cache?)
|
||||
|
||||
(s/def ::redis
|
||||
(s/keys :req [::resources
|
||||
::redis-uri
|
||||
::timer
|
||||
::mtx/metrics]
|
||||
:opt [::connection
|
||||
::cache]))
|
||||
|
||||
(defmethod ig/prep-key ::redis
|
||||
[_ cfg]
|
||||
(let [cpus (px/get-available-processors)
|
||||
threads (max 1 (int (* cpus 0.2)))]
|
||||
(merge {::timeout (dt/duration "10s")
|
||||
::io-threads (max 3 threads)
|
||||
::worker-threads (max 3 threads)}
|
||||
(d/without-nils cfg))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::redis [_]
|
||||
(s/keys :req [::uri ::mtx/metrics]
|
||||
:opt [::timeout
|
||||
::connect?
|
||||
::io-threads
|
||||
::worker-threads]))
|
||||
|
||||
(defmethod ig/init-key ::redis
|
||||
[_ {:keys [::connect?] :as cfg}]
|
||||
(let [state (initialize-resources cfg)]
|
||||
(cond-> state
|
||||
connect? (assoc ::connection (connect* cfg {})))))
|
||||
|
||||
(defmethod ig/halt-key! ::redis
|
||||
[_ state]
|
||||
(shutdown-resources state))
|
||||
(defprotocol IPubSubConnection
|
||||
(add-listener [_ listener])
|
||||
(subscribe [_ topics])
|
||||
(unsubscribe [_ topics]))
|
||||
|
||||
(def default-codec
|
||||
(RedisCodec/of StringCodec/UTF8 ByteArrayCodec/INSTANCE))
|
||||
@@ -133,23 +77,76 @@
|
||||
(def string-codec
|
||||
(RedisCodec/of StringCodec/UTF8 StringCodec/UTF8))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor] :as cfg}]
|
||||
(letfn [(on-remove [key val cause]
|
||||
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
||||
(some-> val d/close!))]
|
||||
(cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")))
|
||||
(sm/register!
|
||||
{:type ::connection
|
||||
:pred #(satisfies? IConnection %)
|
||||
:type-properties
|
||||
{:title "connection"
|
||||
:description "redis connection instance"}})
|
||||
|
||||
(sm/register!
|
||||
{:type ::pubsub-connection
|
||||
:pred #(satisfies? IPubSubConnection %)
|
||||
:type-properties
|
||||
{:title "connection"
|
||||
:description "redis connection instance"}})
|
||||
|
||||
(defn redis?
|
||||
[o]
|
||||
(satisfies? IRedis o))
|
||||
|
||||
(sm/register!
|
||||
{:type ::redis
|
||||
:pred redis?})
|
||||
|
||||
(def ^:private schema:script
|
||||
[:map {:title "script"}
|
||||
[::rscript/name qualified-keyword?]
|
||||
[::rscript/path ::sm/text]
|
||||
[::rscript/keys {:optional true} [:vector :any]]
|
||||
[::rscript/vals {:optional true} [:vector :any]]])
|
||||
|
||||
(def valid-script?
|
||||
(sm/lazy-validator schema:script))
|
||||
|
||||
(defmethod ig/expand-key ::redis
|
||||
[k v]
|
||||
(let [cpus (px/get-available-processors)
|
||||
threads (max 1 (int (* cpus 0.2)))]
|
||||
{k (-> (d/without-nils v)
|
||||
(assoc ::timeout (dt/duration "10s"))
|
||||
(assoc ::io-threads (max 3 threads))
|
||||
(assoc ::worker-threads (max 3 threads)))}))
|
||||
|
||||
(def ^:private schema:redis-params
|
||||
[:map {:title "redis-params"}
|
||||
::wrk/executor
|
||||
::mtx/metrics
|
||||
[::uri ::sm/uri]
|
||||
[::worker-threads ::sm/int]
|
||||
[::io-threads ::sm/int]
|
||||
[::timeout ::dt/duration]])
|
||||
|
||||
(defmethod ig/assert-key ::redis
|
||||
[_ params]
|
||||
(assert (sm/check schema:redis-params params)))
|
||||
|
||||
(defmethod ig/init-key ::redis
|
||||
[_ params]
|
||||
(initialize-resources params))
|
||||
|
||||
(defmethod ig/halt-key! ::redis
|
||||
[_ instance]
|
||||
(d/close! instance))
|
||||
|
||||
(defn- initialize-resources
|
||||
"Initialize redis connection resources"
|
||||
[{:keys [::uri ::io-threads ::worker-threads ::connect?] :as cfg}]
|
||||
(l/info :hint "initialize redis resources"
|
||||
:uri uri
|
||||
:io-threads io-threads
|
||||
:worker-threads worker-threads
|
||||
:connect? connect?)
|
||||
[{:keys [::uri ::io-threads ::worker-threads ::wrk/executor ::mtx/metrics] :as params}]
|
||||
|
||||
(l/inf :hint "initialize redis resources"
|
||||
:uri (str uri)
|
||||
:io-threads io-threads
|
||||
:worker-threads worker-threads)
|
||||
|
||||
(let [timer (HashedWheelTimer.)
|
||||
resources (.. (DefaultClientResources/builder)
|
||||
@@ -158,147 +155,134 @@
|
||||
(timer ^Timer timer)
|
||||
(build))
|
||||
|
||||
redis-uri (RedisURI/create ^String uri)
|
||||
cfg (-> cfg
|
||||
(assoc ::resources resources)
|
||||
(assoc ::timer timer)
|
||||
(assoc ::redis-uri redis-uri))]
|
||||
redis-uri (RedisURI/create ^String (str uri))
|
||||
|
||||
(assoc cfg ::cache (create-cache cfg))))
|
||||
shutdown (fn [client conn]
|
||||
(ex/ignoring (.close ^StatefulConnection conn))
|
||||
(ex/ignoring (.close ^RedisClient client))
|
||||
(l/trc :hint "disconnect" :hid (hash client)))
|
||||
|
||||
(defn- shutdown-resources
|
||||
[{:keys [::resources ::cache ::timer]}]
|
||||
(cache/invalidate! cache)
|
||||
on-remove (fn [key val cause]
|
||||
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
||||
(some-> val d/close!))
|
||||
|
||||
(when resources
|
||||
(.shutdown ^ClientResources resources))
|
||||
|
||||
(when timer
|
||||
(.stop ^Timer timer)))
|
||||
|
||||
(defn connect*
|
||||
[{:keys [::resources ::redis-uri] :as state}
|
||||
{:keys [timeout codec type]
|
||||
:or {codec default-codec type :default}}]
|
||||
|
||||
(us/assert! ::resources resources)
|
||||
(let [client (RedisClient/create ^ClientResources resources ^RedisURI redis-uri)
|
||||
timeout (or timeout (::timeout state))
|
||||
conn (case type
|
||||
:default (.connect ^RedisClient client ^RedisCodec codec)
|
||||
:pubsub (.connectPubSub ^RedisClient client ^RedisCodec codec))]
|
||||
|
||||
(l/trc :hint "connect" :hid (hash client))
|
||||
(.setTimeout ^StatefulConnection conn ^Duration timeout)
|
||||
cache (cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")]
|
||||
(reify
|
||||
IDeref
|
||||
(deref [_] conn)
|
||||
|
||||
AutoCloseable
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(ex/ignoring (.close ^StatefulConnection conn))
|
||||
(ex/ignoring (.shutdown ^RedisClient client))
|
||||
(l/trc :hint "disconnect" :hid (hash client))))))
|
||||
(ex/ignoring (cache/invalidate! cache))
|
||||
(ex/ignoring (.shutdown ^ClientResources resources))
|
||||
(ex/ignoring (.stop ^Timer timer)))
|
||||
|
||||
IRedis
|
||||
(-get-or-connect [this key options]
|
||||
(let [create (fn [_] (-connect this options))]
|
||||
(cache/get cache key create)))
|
||||
|
||||
(-connect [_ options]
|
||||
(let [timeout (or (:timeout options) (::timeout params))
|
||||
codec (get options :codec default-codec)
|
||||
type (get options :type :default)
|
||||
client (RedisClient/create ^ClientResources resources
|
||||
^RedisURI redis-uri)]
|
||||
|
||||
(l/trc :hint "connect" :hid (hash client))
|
||||
(if (= type :pubsub)
|
||||
(let [conn (.connectPubSub ^RedisClient client
|
||||
^RedisCodec codec)]
|
||||
(.setTimeout ^StatefulConnection conn
|
||||
^Duration timeout)
|
||||
(reify
|
||||
IPubSubConnection
|
||||
(add-listener [_ listener]
|
||||
(assert (instance? RedisPubSubListener listener) "expected listener instance")
|
||||
(.addListener ^StatefulRedisPubSubConnection conn
|
||||
^RedisPubSubListener listener))
|
||||
|
||||
(subscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection conn)]
|
||||
(.subscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(unsubscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection conn)]
|
||||
(.unsubscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
|
||||
AutoCloseable
|
||||
(close [_] (shutdown client conn))))
|
||||
|
||||
(let [conn (.connect ^RedisClient client ^RedisCodec codec)]
|
||||
(.setTimeout ^StatefulConnection conn ^Duration timeout)
|
||||
(reify
|
||||
IConnection
|
||||
(publish [_ topic message]
|
||||
(assert (string? topic) "expected topic to be string")
|
||||
(assert (bytes? message) "expected message to be a byte array")
|
||||
|
||||
(let [pcomm (.async ^StatefulRedisConnection conn)]
|
||||
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
|
||||
|
||||
(rpush [_ key payload]
|
||||
(assert (or (and (vector? payload)
|
||||
(every? bytes? payload))
|
||||
(bytes? payload)))
|
||||
(try
|
||||
(let [cmd (.sync ^StatefulRedisConnection conn)
|
||||
data (if (vector? payload) payload [payload])
|
||||
vals (make-array (. Class (forName "[B")) (count data))]
|
||||
|
||||
(loop [i 0 xs (seq data)]
|
||||
(when xs
|
||||
(aset ^"[[B" vals i ^bytes (first xs))
|
||||
(recur (inc i) (next xs))))
|
||||
|
||||
(.rpush ^RedisCommands cmd
|
||||
^String key
|
||||
^"[[B" vals))
|
||||
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(blpop [_ timeout keys]
|
||||
(try
|
||||
(let [keys (into-array Object (map str keys))
|
||||
cmd (.sync ^StatefulRedisConnection conn)
|
||||
timeout (/ (double (inst-ms timeout)) 1000.0)]
|
||||
(when-let [res (.blpop ^RedisCommands cmd
|
||||
^double timeout
|
||||
^"[Ljava.lang.String;" keys)]
|
||||
(MapEntry/create
|
||||
(.getKey ^KeyValue res)
|
||||
(.getValue ^KeyValue res))))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(eval [_ script]
|
||||
(assert (valid-script? script) "expected valid script")
|
||||
(impl-eval conn metrics script))
|
||||
|
||||
AutoCloseable
|
||||
(close [_] (shutdown client conn))))))))))
|
||||
|
||||
(defn connect
|
||||
[state & {:as opts}]
|
||||
(let [connection (connect* state opts)]
|
||||
(-> state
|
||||
(assoc ::connection connection)
|
||||
(dissoc ::cache)
|
||||
(vary-meta assoc `d/close! (fn [_] (d/close! connection))))))
|
||||
[instance & {:as opts}]
|
||||
(assert (satisfies? IRedis instance) "expected valid redis instance")
|
||||
(-connect instance opts))
|
||||
|
||||
(defn get-or-connect
|
||||
[{:keys [::cache] :as state} key options]
|
||||
(us/assert! ::redis state)
|
||||
(let [create (fn [_] (connect* state options))
|
||||
connection (cache/get cache key create)]
|
||||
(-> state
|
||||
(dissoc ::cache)
|
||||
(assoc ::connection connection))))
|
||||
|
||||
(defn add-listener!
|
||||
[{:keys [::connection] :as conn} listener]
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(us/assert! ::pubsub-listener listener)
|
||||
(.addListener ^StatefulRedisPubSubConnection @connection
|
||||
^RedisPubSubListener listener)
|
||||
conn)
|
||||
|
||||
(defn publish!
|
||||
[{:keys [::connection]} topic message]
|
||||
(us/assert! ::us/string topic)
|
||||
(us/assert! ::us/bytes message)
|
||||
(us/assert! ::default-connection connection)
|
||||
|
||||
(let [pcomm (.async ^StatefulRedisConnection @connection)]
|
||||
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
|
||||
|
||||
(defn subscribe!
|
||||
"Blocking operation, intended to be used on a thread/agent thread."
|
||||
[{:keys [::connection]} & topics]
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
|
||||
(.subscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn unsubscribe!
|
||||
"Blocking operation, intended to be used on a thread/agent thread."
|
||||
[{:keys [::connection]} & topics]
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection @connection)]
|
||||
(.unsubscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn rpush!
|
||||
[{:keys [::connection]} key payload]
|
||||
(us/assert! ::default-connection connection)
|
||||
(us/assert! (or (and (vector? payload)
|
||||
(every? bytes? payload))
|
||||
(bytes? payload)))
|
||||
(try
|
||||
(let [cmd (.sync ^StatefulRedisConnection @connection)
|
||||
data (if (vector? payload) payload [payload])
|
||||
vals (make-array (. Class (forName "[B")) (count data))]
|
||||
|
||||
(loop [i 0 xs (seq data)]
|
||||
(when xs
|
||||
(aset ^"[[B" vals i ^bytes (first xs))
|
||||
(recur (inc i) (next xs))))
|
||||
|
||||
(.rpush ^RedisCommands cmd
|
||||
^String key
|
||||
^"[[B" vals))
|
||||
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn blpop!
|
||||
[{:keys [::connection]} timeout & keys]
|
||||
(us/assert! ::default-connection connection)
|
||||
(try
|
||||
(let [keys (into-array Object (map str keys))
|
||||
cmd (.sync ^StatefulRedisConnection @connection)
|
||||
timeout (/ (double (inst-ms timeout)) 1000.0)]
|
||||
(when-let [res (.blpop ^RedisCommands cmd
|
||||
^double timeout
|
||||
^"[Ljava.lang.String;" keys)]
|
||||
(MapEntry/create
|
||||
(.getKey ^KeyValue res)
|
||||
(.getValue ^KeyValue res))))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(defn open?
|
||||
[{:keys [::connection]}]
|
||||
(us/assert! ::pubsub-connection connection)
|
||||
(.isOpen ^StatefulConnection @connection))
|
||||
[instance key & {:as opts}]
|
||||
(assert (satisfies? IRedis instance) "expected valid redis instance")
|
||||
(-get-or-connect instance key opts))
|
||||
|
||||
(defn pubsub-listener
|
||||
[& {:keys [on-message on-subscribe on-unsubscribe]}]
|
||||
@@ -328,26 +312,10 @@
|
||||
(on-unsubscribe nil topic count)))))
|
||||
|
||||
(def ^:private scripts-cache (atom {}))
|
||||
(def noop-fn (constantly nil))
|
||||
|
||||
(s/def ::rscript/name qualified-keyword?)
|
||||
(s/def ::rscript/path ::us/not-empty-string)
|
||||
(s/def ::rscript/keys (s/every any? :kind vector?))
|
||||
(s/def ::rscript/vals (s/every any? :kind vector?))
|
||||
|
||||
(s/def ::rscript/script
|
||||
(s/keys :req [::rscript/name
|
||||
::rscript/path]
|
||||
:opt [::rscript/keys
|
||||
::rscript/vals]))
|
||||
|
||||
(defn eval!
|
||||
[{:keys [::mtx/metrics ::connection] :as state} script]
|
||||
(us/assert! ::redis state)
|
||||
(us/assert! ::default-connection connection)
|
||||
(us/assert! ::rscript/script script)
|
||||
|
||||
(let [cmd (.async ^StatefulRedisConnection @connection)
|
||||
(defn- impl-eval
|
||||
[^StatefulRedisConnection connection metrics script]
|
||||
(let [cmd (.async ^StatefulRedisConnection connection)
|
||||
keys (into-array String (map str (::rscript/keys script)))
|
||||
vals (into-array String (map str (::rscript/vals script)))
|
||||
sname (::rscript/name script)]
|
||||
|
||||
@@ -36,8 +36,8 @@
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[ring.request :as rreq]
|
||||
[ring.response :as rres]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
|
||||
@@ -64,16 +64,16 @@
|
||||
response (if (fn? result)
|
||||
(result request)
|
||||
(let [result (rph/unwrap result)]
|
||||
{::rres/status (::http/status mdata 200)
|
||||
::rres/headers (::http/headers mdata {})
|
||||
::rres/body result}))]
|
||||
{::yres/status (::http/status mdata 200)
|
||||
::yres/headers (::http/headers mdata {})
|
||||
::yres/body result}))]
|
||||
(-> response
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))
|
||||
|
||||
(defn get-external-session-id
|
||||
[request]
|
||||
(when-let [session-id (rreq/get-header request "x-external-session-id")]
|
||||
(when-let [session-id (yreq/get-header request "x-external-session-id")]
|
||||
(when-not (or (> (count session-id) 256)
|
||||
(= session-id "null")
|
||||
(str/blank? session-id))
|
||||
@@ -81,7 +81,7 @@
|
||||
|
||||
(defn- get-external-event-origin
|
||||
[request]
|
||||
(when-let [origin (rreq/get-header request "x-event-origin")]
|
||||
(when-let [origin (yreq/get-header request "x-event-origin")]
|
||||
(when-not (or (> (count origin) 256)
|
||||
(= origin "null")
|
||||
(str/blank? origin))
|
||||
@@ -92,7 +92,7 @@
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params method] :as request}]
|
||||
(let [handler-name (:type path-params)
|
||||
etag (rreq/get-header request "if-none-match")
|
||||
etag (yreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
||||
@@ -250,39 +250,49 @@
|
||||
'app.rpc.commands.projects
|
||||
'app.rpc.commands.search
|
||||
'app.rpc.commands.teams
|
||||
'app.rpc.commands.teams-invitations
|
||||
'app.rpc.commands.verify-token
|
||||
'app.rpc.commands.viewer
|
||||
'app.rpc.commands.webhooks)
|
||||
(map (partial process-method cfg))
|
||||
(into {}))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::methods [_]
|
||||
(s/keys :req [::session/manager
|
||||
::http.client/client
|
||||
::db/pool
|
||||
::mbus/msgbus
|
||||
::ldap/provider
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
::setup/props]
|
||||
:opt [::climit
|
||||
::rlimit]))
|
||||
(def ^:private schema:methods-params
|
||||
[:map {:title "methods-params"}
|
||||
::session/manager
|
||||
::http.client/client
|
||||
::db/pool
|
||||
::mbus/msgbus
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
[::ldap/provider [:maybe ::ldap/provider]]
|
||||
[::climit [:maybe ::climit]]
|
||||
[::rlimit [:maybe ::rlimit]]
|
||||
::setup/props])
|
||||
|
||||
(defmethod ig/assert-key ::methods
|
||||
[_ params]
|
||||
(assert (sm/check schema:methods-params params)))
|
||||
|
||||
(defmethod ig/init-key ::methods
|
||||
[_ cfg]
|
||||
(let [cfg (d/without-nils cfg)]
|
||||
(resolve-command-methods cfg)))
|
||||
|
||||
(s/def ::methods
|
||||
(s/map-of keyword? (s/tuple map? fn?)))
|
||||
(def ^:private schema:methods
|
||||
[:map-of :keyword [:tuple :map ::sm/fn]])
|
||||
|
||||
(s/def ::routes vector?)
|
||||
(sm/register! ::methods schema:methods)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::methods
|
||||
::db/pool
|
||||
::setup/props
|
||||
::session/manager]))
|
||||
(def ^:private valid-methods?
|
||||
(sm/validator schema:methods))
|
||||
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool")
|
||||
(assert (some? (::setup/props params)))
|
||||
(assert (session/manager? (::session/manager params)) "expect valid session manager")
|
||||
(assert (valid-methods? (::methods params)) "expect valid methods map"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::methods] :as cfg}]
|
||||
|
||||
@@ -10,18 +10,15 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.common.schema :as sm]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit.config :as-alias config]
|
||||
[app.util.cache :as cache]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
@@ -32,6 +29,62 @@
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(declare ^:private impl-invoke)
|
||||
(declare ^:private id->str)
|
||||
(declare ^:private create-cache)
|
||||
|
||||
(defprotocol IConcurrencyLimiter
|
||||
(^:private get-config [_ limit-id] "get a config for a key")
|
||||
(^:private invoke [_ config handler] "invoke a handler for a config"))
|
||||
|
||||
(sm/register!
|
||||
{:type ::rpc/climit
|
||||
:pred #(satisfies? IConcurrencyLimiter %)})
|
||||
|
||||
(def ^:private schema:config
|
||||
[:map-of :keyword
|
||||
[:map
|
||||
[::id {:optional true} :keyword]
|
||||
[::key {:optional true} :any]
|
||||
[::label {:optional true} ::sm/text]
|
||||
[::params {:optional true} :map]
|
||||
[::permits {:optional true} ::sm/int]
|
||||
[::queue {:optional true} ::sm/int]
|
||||
[::timeout {:optional true} ::sm/int]]])
|
||||
|
||||
(def ^:private check-config
|
||||
(sm/check-fn schema:config))
|
||||
|
||||
(def ^:private schema:climit-params
|
||||
[:map
|
||||
::mtx/metrics
|
||||
::wrk/executor
|
||||
[::enabled {:optional true} ::sm/boolean]
|
||||
[::config {:optional true} ::fs/path]])
|
||||
|
||||
(defmethod ig/assert-key ::rpc/climit
|
||||
[_ params]
|
||||
(assert (sm/valid? schema:climit-params params)))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [::config ::enabled ::mtx/metrics] :as cfg}]
|
||||
(when enabled
|
||||
(when-let [params (some->> config slurp edn/read-string check-config)]
|
||||
(l/inf :hint "initializing concurrency limit" :config (str config))
|
||||
(let [params (reduce-kv (fn [result k v]
|
||||
(assoc result k (assoc v ::id k)))
|
||||
params
|
||||
params)
|
||||
cache (create-cache cfg)]
|
||||
|
||||
(reify
|
||||
IConcurrencyLimiter
|
||||
(get-config [_ id]
|
||||
(get params id))
|
||||
|
||||
(invoke [_ config handler]
|
||||
(impl-invoke metrics cache config handler)))))))
|
||||
|
||||
(defn- id->str
|
||||
([id]
|
||||
(-> (str id)
|
||||
@@ -41,59 +94,23 @@
|
||||
(str (-> (str id) (subs 1)) "/" key)
|
||||
(id->str id))))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor]}]
|
||||
(letfn [(on-remove [key _ cause]
|
||||
(let [[id skey] key]
|
||||
(l/trc :hint "disposed" :id (id->str id skey) :reason (str cause))))]
|
||||
(cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")))
|
||||
|
||||
(s/def ::config/permits ::us/integer)
|
||||
(s/def ::config/queue ::us/integer)
|
||||
(s/def ::config/timeout ::us/integer)
|
||||
(s/def ::config
|
||||
(s/map-of keyword?
|
||||
(s/keys :opt-un [::config/permits
|
||||
::config/queue
|
||||
::config/timeout])))
|
||||
|
||||
(defmethod ig/prep-key ::rpc/climit
|
||||
[_ cfg]
|
||||
(assoc cfg ::path (cf/get :rpc-climit-config)))
|
||||
|
||||
(s/def ::path ::fs/path)
|
||||
(defmethod ig/pre-init-spec ::rpc/climit [_]
|
||||
(s/keys :req [::mtx/metrics ::wrk/executor ::path]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [::path ::mtx/metrics] :as cfg}]
|
||||
(when (contains? cf/flags :rpc-climit)
|
||||
(when-let [params (some->> path slurp edn/read-string)]
|
||||
(l/inf :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config params)
|
||||
{::cache (create-cache cfg)
|
||||
::config params
|
||||
::mtx/metrics metrics})))
|
||||
|
||||
(s/def ::cache cache/cache?)
|
||||
(s/def ::instance
|
||||
(s/keys :req [::cache ::config]))
|
||||
|
||||
(s/def ::rpc/climit
|
||||
(s/nilable ::instance))
|
||||
|
||||
(defn- create-limiter
|
||||
[config [id skey]]
|
||||
(l/trc :hint "created" :id (id->str id skey))
|
||||
[config id]
|
||||
(l/trc :hint "created" :id id)
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:type :semaphore))
|
||||
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor]}]
|
||||
(letfn [(on-remove [id _ cause]
|
||||
(l/trc :hint "disposed" :id id :reason (str cause)))]
|
||||
(cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")))
|
||||
|
||||
(defn measure!
|
||||
(defn- measure
|
||||
[metrics mlabels stats elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
permits (:permits stats)
|
||||
@@ -117,8 +134,14 @@
|
||||
:val (inst-ms elapsed)
|
||||
:labels mlabels))))
|
||||
|
||||
(defn log!
|
||||
[action req-id stats limit-id limit-label params elapsed]
|
||||
(defn- prepare-params-for-debug
|
||||
[params]
|
||||
(-> (select-keys params [::rpc/profile-id :file-id :profile-id])
|
||||
(set/rename-keys {::rpc/profile-id :profile-id})
|
||||
(update-vals str)))
|
||||
|
||||
(defn- log
|
||||
[action req-id stats limit-id limit-label limit-params elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
queue (:queue stats)
|
||||
queue (- queue mpermits)
|
||||
@@ -132,37 +155,42 @@
|
||||
:label limit-label
|
||||
:queue queue
|
||||
:elapsed (some-> elapsed dt/format-duration)
|
||||
:params (-> (select-keys params [::rpc/profile-id :file-id :profile-id])
|
||||
(set/rename-keys {::rpc/profile-id :profile-id})
|
||||
(update-vals str)))))
|
||||
:params @limit-params)))
|
||||
|
||||
(def ^:private idseq (AtomicLong. 0))
|
||||
|
||||
(defn- invoke
|
||||
[limiter metrics limit-id limit-key limit-label handler params]
|
||||
(let [tpoint (dt/tpoint)
|
||||
mlabels (into-array String [(id->str limit-id)])
|
||||
limit-id (id->str limit-id limit-key)
|
||||
stats (pbh/get-stats limiter)
|
||||
req-id (.incrementAndGet ^AtomicLong idseq)]
|
||||
(defn- impl-invoke
|
||||
[metrics cache config handler]
|
||||
(let [limit-id (::id config)
|
||||
limit-key (::key config)
|
||||
limit-label (::label config)
|
||||
limit-params (delay
|
||||
(prepare-params-for-debug
|
||||
(::params config)))
|
||||
|
||||
mlabels (into-array String [(id->str limit-id)])
|
||||
limit-id (id->str limit-id limit-key)
|
||||
limiter (cache/get cache limit-id (partial create-limiter config))
|
||||
tpoint (dt/tpoint)
|
||||
req-id (.incrementAndGet ^AtomicLong idseq)]
|
||||
(try
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "enqueued" req-id stats limit-id limit-label params nil)
|
||||
(let [stats (pbh/get-stats limiter)]
|
||||
(measure metrics mlabels stats nil)
|
||||
(log "enqueued" req-id stats limit-id limit-label limit-params nil))
|
||||
|
||||
(px/invoke! limiter (fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats elapsed)
|
||||
(log! "acquired" req-id stats limit-id limit-label params elapsed)
|
||||
|
||||
(handler params))))
|
||||
(measure metrics mlabels stats elapsed)
|
||||
(log "acquired" req-id stats limit-id limit-label limit-params elapsed)
|
||||
(handler))))
|
||||
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(let [elapsed (tpoint)]
|
||||
(log! "rejected" req-id stats limit-id limit-label params elapsed)
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
(log "rejected" req-id stats limit-id limit-label limit-params elapsed)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached"
|
||||
@@ -173,8 +201,8 @@
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "finished" req-id stats limit-id limit-label params elapsed))))))
|
||||
(measure metrics mlabels stats nil)
|
||||
(log "finished" req-id stats limit-id limit-label limit-params elapsed))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MIDDLEWARE
|
||||
@@ -204,71 +232,70 @@
|
||||
(throw (IllegalArgumentException. "unable to normalize limit")))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} handler mdata]
|
||||
(let [cache (::cache climit)
|
||||
config (::config climit)
|
||||
label (::sv/name mdata)]
|
||||
[cfg handler {label ::sv/name :as mdata}]
|
||||
(if-let [climit (::rpc/climit cfg)]
|
||||
(reduce (fn [handler [limit-id key-fn]]
|
||||
(if-let [config (get-config climit limit-id)]
|
||||
(let [key-fn (or key-fn noop-fn)]
|
||||
(l/trc :hint "instrumenting method"
|
||||
:method label
|
||||
:limit (id->str limit-id)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed (not= key-fn nil))
|
||||
|
||||
(if climit
|
||||
(reduce (fn [handler [limit-id key-fn]]
|
||||
(if-let [config (get config limit-id)]
|
||||
(let [key-fn (or key-fn noop-fn)]
|
||||
(l/trc :hint "instrumenting method"
|
||||
:method label
|
||||
:limit (id->str limit-id)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed (not= key-fn noop-fn))
|
||||
(if (and (= key-fn ::rpc/profile-id)
|
||||
(false? (::rpc/auth mdata true)))
|
||||
|
||||
(if (and (= key-fn ::rpc/profile-id)
|
||||
(false? (::rpc/auth mdata true)))
|
||||
;; We don't enforce by-profile limit on methods that does
|
||||
;; not require authentication
|
||||
handler
|
||||
|
||||
;; We don't enforce by-profile limit on methods that does
|
||||
;; not require authentication
|
||||
handler
|
||||
(fn [cfg params]
|
||||
(let [config (-> config
|
||||
(assoc ::key (key-fn params))
|
||||
(assoc ::label label)
|
||||
;; NOTE: only used for debugging output
|
||||
(assoc ::params params))]
|
||||
(invoke climit config (partial handler cfg params))))))
|
||||
|
||||
(fn [cfg params]
|
||||
(let [limit-key (key-fn params)
|
||||
cache-key [limit-id limit-key]
|
||||
limiter (cache/get cache cache-key (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))))
|
||||
(do
|
||||
(l/wrn :hint "no config found for specified queue" :id (id->str limit-id))
|
||||
handler)))
|
||||
handler
|
||||
(concat global-limits (get-limits mdata)))
|
||||
|
||||
(do
|
||||
(l/wrn :hint "no config found for specified queue" :id (id->str limit-id))
|
||||
handler)))
|
||||
|
||||
handler
|
||||
(concat global-limits (get-limits mdata)))
|
||||
handler)))
|
||||
handler))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- build-exec-chain
|
||||
[{:keys [::label ::rpc/climit ::mtx/metrics] :as cfg} f]
|
||||
(let [config (get climit ::config)
|
||||
cache (get climit ::cache)]
|
||||
(reduce (fn [handler [limit-id limit-key :as ckey]]
|
||||
(if-let [config (get config limit-id)]
|
||||
[{:keys [::label ::rpc/climit] :as cfg} f]
|
||||
(reduce (fn [handler [limit-id limit-key]]
|
||||
(if-let [config (get-config climit limit-id)]
|
||||
(let [config (-> config
|
||||
(assoc ::key limit-key)
|
||||
(assoc ::label label))]
|
||||
(fn [cfg params]
|
||||
(let [limiter (cache/get cache ckey (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))
|
||||
(do
|
||||
(l/wrn :hint "config not found" :label label :id limit-id)
|
||||
f)))
|
||||
f
|
||||
(get-limits cfg))))
|
||||
(let [config (assoc config ::params params)]
|
||||
(invoke climit config (partial handler cfg params)))))
|
||||
(do
|
||||
(l/wrn :hint "config not found" :label label :id limit-id)
|
||||
f)))
|
||||
f
|
||||
(get-limits cfg)))
|
||||
|
||||
(defn invoke!
|
||||
"Run a function in context of climit.
|
||||
Intended to be used in virtual threads."
|
||||
[{:keys [::executor] :as cfg} f params]
|
||||
(let [f (if (some? executor)
|
||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
||||
f)
|
||||
f (build-exec-chain cfg f)]
|
||||
[{:keys [::executor ::rpc/climit] :as cfg} f params]
|
||||
(let [f (if climit
|
||||
(let [f (if (some? executor)
|
||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
||||
f)]
|
||||
(build-exec-chain cfg f))
|
||||
f)]
|
||||
(f cfg params)))
|
||||
|
||||
@@ -273,7 +273,8 @@
|
||||
(merge {:viewed-tutorial? false
|
||||
:viewed-walkthrough? false
|
||||
:nudge {:big 10 :small 1}
|
||||
:v2-info-shown true})
|
||||
:v2-info-shown true
|
||||
:release-notes-viewed (:main cf/version)})
|
||||
(db/tjson))
|
||||
|
||||
password (or (:password params) "!")
|
||||
@@ -383,7 +384,9 @@
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
|
||||
|
||||
props (audit/profile->props profile)
|
||||
props (-> (audit/profile->props profile)
|
||||
(assoc :from-invitation (some? invitation)))
|
||||
|
||||
|
||||
create-welcome-file-when-needed
|
||||
(fn []
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(:refer-clojure :exclude [assert])
|
||||
(:require
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.binfile.v3 :as bf.v3]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.db :as db]
|
||||
@@ -24,7 +25,7 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[promesa.exec :as px]
|
||||
[ring.response :as rres]))
|
||||
[yetti.response :as yres]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
@@ -35,51 +36,103 @@
|
||||
[:map {:title "export-binfile"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:include-libraries :boolean]
|
||||
[:embed-assets :boolean]])
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:include-libraries ::sm/boolean]
|
||||
[:embed-assets ::sm/boolean]])
|
||||
|
||||
(defn stream-export-v1
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(yres/stream-body
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids #{file-id})
|
||||
(assoc ::bf.v1/embed-assets embed-assets)
|
||||
(assoc ::bf.v1/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(defn stream-export-v3
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(yres/stream-body
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v3/ids #{file-id})
|
||||
(assoc ::bf.v3/embed-assets embed-assets)
|
||||
(assoc ::bf.v3/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
|
||||
(sv/defmethod ::export-binfile
|
||||
"Export a penpot file in a binary format."
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true
|
||||
::sm/result schema:export-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id include-libraries embed-assets] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(fn [_]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "application/octet-stream"}
|
||||
::rres/body (reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids #{file-id})
|
||||
(assoc ::bf.v1/embed-assets embed-assets)
|
||||
(assoc ::bf.v1/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause)))))}))
|
||||
(let [version (or version 1)
|
||||
body (case (int version)
|
||||
1 (stream-export-v1 cfg params)
|
||||
2 (throw (ex-info "not-implemented" {}))
|
||||
3 (stream-export-v3 cfg params))]
|
||||
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "application/octet-stream"}
|
||||
::yres/body body})))
|
||||
|
||||
;; --- Command: import-binfile
|
||||
|
||||
(defn- import-binfile-v1
|
||||
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/name name)
|
||||
(assoc ::bf.v1/input (:path file)))]
|
||||
|
||||
;; NOTE: the importation process performs some operations that are
|
||||
;; not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we dispatch
|
||||
;; that operation to a dedicated executor.
|
||||
(px/invoke! executor (partial bf.v1/import-files! cfg))))
|
||||
|
||||
(defn- import-binfile-v3
|
||||
[{:keys [::wrk/executor] :as cfg} {:keys [project-id profile-id name file]}]
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v3/project-id project-id)
|
||||
(assoc ::bf.v3/profile-id profile-id)
|
||||
(assoc ::bf.v3/name name)
|
||||
(assoc ::bf.v3/input (:path file)))]
|
||||
;; NOTE: the importation process performs some operations that are
|
||||
;; not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we dispatch
|
||||
;; that operation to a dedicated executor.
|
||||
(px/invoke! executor (partial bf.v3/import-files! cfg))))
|
||||
|
||||
(defn- import-binfile
|
||||
[{:keys [::wrk/executor ::bf.v1/project-id ::db/pool] :as cfg} input]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
(let [result (px/invoke! executor (partial bf.v1/import-files! cfg input))]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [project-id version] :as params}]
|
||||
(let [result (case (int version)
|
||||
1 (import-binfile-v1 cfg params)
|
||||
3 (import-binfile-v3 cfg params))]
|
||||
(db/update! pool :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
result))
|
||||
|
||||
(def ^:private
|
||||
schema:import-binfile
|
||||
(def ^:private schema:import-binfile
|
||||
[:map {:title "import-binfile"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:name [:or [:string {:max 250}]
|
||||
[:map-of ::sm/uuid [:string {:max 250}]]]]
|
||||
[:project-id ::sm/uuid]
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:file ::media/upload]])
|
||||
|
||||
(sv/defmethod ::import-binfile
|
||||
@@ -88,12 +141,11 @@
|
||||
::webhooks/event? true
|
||||
::sse/stream? true
|
||||
::sm/params schema:import-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id name project-id file] :as params}]
|
||||
(projects/check-read-permissions! pool profile-id project-id)
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/name name))]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version] :as params}]
|
||||
(projects/check-edition-permissions! pool profile-id project-id)
|
||||
(let [params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :version (or version 1)))]
|
||||
(with-meta
|
||||
(sse/response #(import-binfile cfg (:path file)))
|
||||
(sse/response (partial import-binfile cfg params))
|
||||
{::audit/props {:file nil}})))
|
||||
|
||||
@@ -29,10 +29,11 @@
|
||||
;; --- GENERAL PURPOSE INTERNAL HELPERS
|
||||
|
||||
(defn- decode-row
|
||||
[{:keys [participants position] :as row}]
|
||||
[{:keys [participants position mentions] :as row}]
|
||||
(cond-> row
|
||||
(db/pgpoint? position) (assoc :position (db/decode-pgpoint position))
|
||||
(db/pgobject? participants) (assoc :participants (db/decode-transit-pgobject participants))))
|
||||
(db/pgobject? participants) (assoc :participants (db/decode-transit-pgobject participants))
|
||||
(db/pgarray? mentions) (assoc :mentions (db/decode-pgarray mentions #{}))))
|
||||
|
||||
(def xf-decode-row
|
||||
(map decode-row))
|
||||
@@ -461,8 +462,9 @@
|
||||
:thread-id thread-id
|
||||
:owner-id profile-id
|
||||
:content content})
|
||||
props {:file-id file-id
|
||||
:share-id nil}]
|
||||
comment (decode-row comment)
|
||||
props {:file-id file-id
|
||||
:share-id nil}]
|
||||
|
||||
;; Update thread modified-at attribute and assoc the current
|
||||
;; profile to the participant set.
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(declare ^:private send-feedback!)
|
||||
(declare ^:private send-user-feedback!)
|
||||
|
||||
(def ^:private schema:send-user-feedback
|
||||
[:map {:title "send-user-feedback"}
|
||||
@@ -34,14 +34,16 @@
|
||||
:hint "feedback not enabled"))
|
||||
|
||||
(let [profile (profile/get-profile pool profile-id)]
|
||||
(send-feedback! pool profile params)
|
||||
(send-user-feedback! pool profile params)
|
||||
nil))
|
||||
|
||||
(defn- send-feedback!
|
||||
(defn- send-user-feedback!
|
||||
[pool profile params]
|
||||
(let [dest (cf/get :feedback-destination)]
|
||||
(let [dest (or (cf/get :user-feedback-destination)
|
||||
;; LEGACY
|
||||
(cf/get :feedback-destination))]
|
||||
(eml/send! {::eml/conn pool
|
||||
::eml/factory eml/feedback
|
||||
::eml/factory eml/user-feedback
|
||||
:from dest
|
||||
:to dest
|
||||
:profile profile
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
[app.common.schema.desc-js-like :as-alias smdj]
|
||||
[app.common.types.components-list :as ctkl]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uri :as uri]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
@@ -35,7 +36,8 @@
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; --- FEATURES
|
||||
|
||||
@@ -181,6 +183,7 @@
|
||||
[:comment-thread-seqn [::sm/int {:min 0}]]
|
||||
[:name [:string {:max 250}]]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:vern [::sm/int {:min 0}]]
|
||||
[:modified-at ::dt/instant]
|
||||
[:is-shared ::sm/boolean]
|
||||
[:project-id ::sm/uuid]
|
||||
@@ -243,16 +246,16 @@
|
||||
file)))
|
||||
|
||||
(defn get-file
|
||||
[{:keys [::db/conn] :as cfg} id & {:keys [project-id
|
||||
migrate?
|
||||
include-deleted?
|
||||
lock-for-update?]
|
||||
:or {include-deleted? false
|
||||
lock-for-update? false
|
||||
migrate? true}}]
|
||||
(dm/assert!
|
||||
"expected cfg with valid connection"
|
||||
(db/connection-map? cfg))
|
||||
[{:keys [::db/conn ::wrk/executor] :as cfg} id
|
||||
& {:keys [project-id
|
||||
migrate?
|
||||
include-deleted?
|
||||
lock-for-update?]
|
||||
:or {include-deleted? false
|
||||
lock-for-update? false
|
||||
migrate? true}}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
|
||||
(let [params (merge {:id id}
|
||||
(when (some? project-id)
|
||||
@@ -261,55 +264,76 @@
|
||||
{::db/check-deleted (not include-deleted?)
|
||||
::db/remove-deleted (not include-deleted?)
|
||||
::sql/for-update lock-for-update?})
|
||||
(feat.fdata/resolve-file-data cfg)
|
||||
(decode-row))]
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
|
||||
;; NOTE: we perform the file decoding in a separate thread
|
||||
;; because it has heavy and synchronous operations for
|
||||
;; decoding file body that are not very friendly with virtual
|
||||
;; threads.
|
||||
file (px/invoke! executor #(decode-row file))]
|
||||
|
||||
(if (and migrate? (fmg/need-migration? file))
|
||||
(migrate-file cfg file)
|
||||
file)))
|
||||
|
||||
(defn get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :data-ref-id :data-backend])]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern :data-ref-id :data-backend])]
|
||||
(db/get cfg :file {:id id} opts)))
|
||||
|
||||
(defn- get-minimal-file-with-perms
|
||||
[cfg {:keys [:id ::rpc/profile-id]}]
|
||||
(let [mfile (get-minimal-file cfg id)
|
||||
perms (get-permissions cfg profile-id id)]
|
||||
(assoc mfile :permissions perms)))
|
||||
|
||||
(defn get-file-etag
|
||||
[{:keys [::rpc/profile-id]} {:keys [modified-at revn]}]
|
||||
(str profile-id (dt/format-instant modified-at :iso) revn))
|
||||
[{:keys [::rpc/profile-id]} {:keys [modified-at revn vern permissions]}]
|
||||
(str profile-id "/" revn "/" vern "/"
|
||||
(dt/format-instant modified-at :iso)
|
||||
"/"
|
||||
(uri/map->query-string permissions)))
|
||||
|
||||
(sv/defmethod ::get-file
|
||||
"Retrieve a file by its ID. Only authenticated users."
|
||||
{::doc/added "1.17"
|
||||
::cond/get-object #(get-minimal-file %1 (:id %2))
|
||||
::cond/get-object #(get-minimal-file-with-perms %1 %2)
|
||||
::cond/key-fn get-file-etag
|
||||
::sm/params schema:get-file
|
||||
::sm/result schema:file-with-permissions}
|
||||
[cfg {:keys [::rpc/profile-id id project-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [perms (get-permissions conn profile-id id)]
|
||||
(check-read-permissions! perms)
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
::sm/result schema:file-with-permissions
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id project-id] :as params}]
|
||||
;; The COND middleware makes initial request for a file and
|
||||
;; permissions when the incoming request comes with an
|
||||
;; ETAG. When ETAG does not matches, the request is resolved
|
||||
;; and this code is executed, in this case the permissions
|
||||
;; will be already prefetched and we just reuse them instead
|
||||
;; of making an additional database queries.
|
||||
(let [perms (or (:permissions (::cond/object params))
|
||||
(get-permissions conn profile-id id))]
|
||||
(check-read-permissions! perms)
|
||||
|
||||
file (-> (get-file cfg id :project-id project-id)
|
||||
(assoc :permissions perms)
|
||||
(check-version!))
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
|
||||
_ (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file) (:features params)))
|
||||
file (-> (get-file cfg id :project-id project-id)
|
||||
(assoc :permissions perms)
|
||||
(check-version!))]
|
||||
|
||||
;; This operation is needed for backward comapatibility with frontends that
|
||||
;; does not support pointer-map resolution mechanism; this just resolves the
|
||||
;; pointers on backend and return a complete file.
|
||||
file (if (and (contains? (:features file) "fdata/pointer-map")
|
||||
(not (contains? (:features params) "fdata/pointer-map")))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
file)]
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file) (:features params)))
|
||||
|
||||
(vary-meta file assoc ::cond/key (get-file-etag params file)))))))
|
||||
;; This operation is needed for backward comapatibility with frontends that
|
||||
;; does not support pointer-map resolution mechanism; this just resolves the
|
||||
;; pointers on backend and return a complete file.
|
||||
(if (and (contains? (:features file) "fdata/pointer-map")
|
||||
(not (contains? (:features params) "fdata/pointer-map")))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
file))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-fragment (by id)
|
||||
|
||||
@@ -355,6 +379,7 @@
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.is_shared,
|
||||
ft.media_id AS thumbnail_id
|
||||
from file as f
|
||||
@@ -504,6 +529,7 @@
|
||||
(def ^:private sql:team-shared-files
|
||||
"select f.id,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.data,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
@@ -587,6 +613,7 @@
|
||||
l.deleted_at,
|
||||
l.name,
|
||||
l.revn,
|
||||
l.vern,
|
||||
l.synced_at
|
||||
FROM libs AS l
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
@@ -648,6 +675,7 @@
|
||||
"with recent_files as (
|
||||
select f.id,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
@@ -670,11 +698,7 @@
|
||||
|
||||
(defn get-team-recent-files
|
||||
[conn team-id]
|
||||
(->> (db/exec! conn [sql:team-recent-files team-id])
|
||||
(mapv (fn [row]
|
||||
(if-let [media-id (:thumbnail-id row)]
|
||||
(assoc row :thumbnail-uri (resolve-public-uri media-id))
|
||||
(dissoc row :media-id))))))
|
||||
(db/exec! conn [sql:team-recent-files team-id]))
|
||||
|
||||
(def ^:private schema:get-team-recent-files
|
||||
[:map {:title "get-team-recent-files"}
|
||||
|
||||
@@ -118,11 +118,12 @@
|
||||
;; feature on frontend and make it permanent on file
|
||||
features (-> (:features params #{})
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union features))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features))]
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features)))]
|
||||
|
||||
(quotes/check! cfg {::quotes/id ::quotes/files-per-project
|
||||
::quotes/team-id team-id
|
||||
|
||||
@@ -15,10 +15,11 @@
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.main :as-alias main]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
@@ -26,173 +27,58 @@
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn check-authorized!
|
||||
[{:keys [::db/pool]} profile-id]
|
||||
(when-not (or (= "devenv" (cf/get :host))
|
||||
(let [profile (ex/ignoring (profile/get-profile pool profile-id))
|
||||
admins (or (cf/get :admins) #{})]
|
||||
(contains? admins (:email profile))))
|
||||
(ex/raise :type :authentication
|
||||
:code :authentication-required
|
||||
:hint "only admins allowed")))
|
||||
|
||||
(def sql:get-file-snapshots
|
||||
"SELECT id, label, revn, created_at
|
||||
FROM file_change
|
||||
WHERE file_id = ?
|
||||
AND created_at < ?
|
||||
AND label IS NOT NULL
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ?")
|
||||
"WITH changes AS (
|
||||
SELECT id, label, revn, created_at, created_by, profile_id
|
||||
FROM file_change
|
||||
WHERE file_id = ?
|
||||
AND data IS NOT NULL
|
||||
AND (deleted_at IS NULL OR deleted_at > now())
|
||||
), versions AS (
|
||||
(SELECT * FROM changes WHERE created_by = 'system' LIMIT 1000)
|
||||
UNION ALL
|
||||
(SELECT * FROM changes WHERE created_by != 'system' LIMIT 1000)
|
||||
)
|
||||
SELECT * FROM versions
|
||||
ORDER BY created_at DESC;")
|
||||
|
||||
(defn get-file-snapshots
|
||||
[{:keys [::db/conn]} {:keys [file-id limit start-at]
|
||||
:or {limit Long/MAX_VALUE}}]
|
||||
(let [start-at (or start-at (dt/now))
|
||||
limit (min limit 20)]
|
||||
(->> (db/exec! conn [sql:get-file-snapshots file-id start-at limit])
|
||||
(mapv (fn [row]
|
||||
(update row :created-at dt/format-instant :rfc1123))))))
|
||||
[conn file-id]
|
||||
(db/exec! conn [sql:get-file-snapshots file-id]))
|
||||
|
||||
(def ^:private schema:get-file-snapshots
|
||||
[:map [:file-id ::sm/uuid]])
|
||||
[:map {:title "get-file-snapshots"}
|
||||
[:file-id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::get-file-snapshots
|
||||
{::doc/added "1.20"
|
||||
::doc/skip true
|
||||
::sm/params schema:get-file-snapshots}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/run! cfg get-file-snapshots params))
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(get-file-snapshots conn file-id))))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [file-id id]}]
|
||||
(let [storage (sto/resolve cfg {::db/reuse-conn true})
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
snapshot (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id id}
|
||||
{::db/for-share true})]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:id id
|
||||
:file-id file-id))
|
||||
|
||||
(let [snapshot (feat.fdata/resolve-file-data cfg snapshot)]
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :precondition
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; If the file was already offloaded, on restring the snapshot
|
||||
;; we are going to replace the file data, so we need to touch
|
||||
;; the old referenced storage object and avoid possible leaks
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(sto/touch-object! storage (:data-ref-id file)))
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:version (:version snapshot)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:has-media-trimmed false
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)})))
|
||||
|
||||
(defn- resolve-snapshot-by-label
|
||||
[conn file-id label]
|
||||
(->> (db/query conn :file-change
|
||||
{:file-id file-id
|
||||
:label label}
|
||||
{::sql/order-by [[:created-at :desc]]
|
||||
::sql/columns [:file-id :id :label]})
|
||||
(first)))
|
||||
|
||||
(def ^:private
|
||||
schema:restore-file-snapshot
|
||||
[:and
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:label {:optional true} :string]]
|
||||
[::sm/contains-any #{:id :label}]])
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::doc/skip true
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id id label] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [params (cond-> params
|
||||
(and (not id) (string? label))
|
||||
(merge (resolve-snapshot-by-label conn file-id label)))]
|
||||
(restore-file-snapshot! cfg params)))))
|
||||
(def ^:private sql:get-file
|
||||
"SELECT f.*,
|
||||
p.id AS project_id,
|
||||
p.team_id AS team_id
|
||||
FROM file AS f
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn- get-file
|
||||
[cfg file-id]
|
||||
(let [file (->> (db/get cfg :file {:id file-id})
|
||||
(let [file (->> (db/exec-one! cfg [sql:get-file file-id])
|
||||
(feat.fdata/resolve-file-data cfg))]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(-> file
|
||||
(update :data blob/decode)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc ::id file-id)
|
||||
(update :data blob/encode)))))
|
||||
|
||||
(defn take-file-snapshot!
|
||||
[cfg {:keys [file-id label ::rpc/profile-id]}]
|
||||
(let [file (get-file cfg file-id)
|
||||
id (uuid/next)]
|
||||
|
||||
(l/debug :hint "creating file snapshot"
|
||||
:file-id (str file-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
{:id id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:profile-id profile-id
|
||||
:file-id (:id file)
|
||||
:label label}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id id :label label}))
|
||||
|
||||
(defn generate-snapshot-label
|
||||
(defn- generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (dt/now)
|
||||
(dt/format-instant)
|
||||
@@ -200,17 +86,218 @@
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(def ^:private schema:take-file-snapshot
|
||||
[:map [:file-id ::sm/uuid]])
|
||||
(defn create-file-snapshot!
|
||||
[cfg profile-id file-id label]
|
||||
(let [file (get-file cfg file-id)
|
||||
|
||||
(sv/defmethod ::take-file-snapshot
|
||||
;; NOTE: final user never can provide label as `:system`
|
||||
;; keyword because the validator implies label always as
|
||||
;; string; keyword is used for signal a special case
|
||||
created-by
|
||||
(if (= label :system)
|
||||
"system"
|
||||
"user")
|
||||
|
||||
deleted-at
|
||||
(if (= label :system)
|
||||
(dt/plus (dt/now) (cf/get-deletion-delay))
|
||||
nil)
|
||||
|
||||
label
|
||||
(if (= label :system)
|
||||
(str "internal/snapshot/" (:revn file))
|
||||
(or label (generate-snapshot-label)))
|
||||
|
||||
snapshot-id
|
||||
(uuid/next)]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/project-id (:project-id file))
|
||||
(assoc ::quotes/team-id (:team-id file))
|
||||
(assoc ::quotes/file-id (:id file))
|
||||
(quotes/check! {::quotes/id ::quotes/snapshots-per-file}
|
||||
{::quotes/id ::quotes/snapshots-per-team}))
|
||||
|
||||
(l/debug :hint "creating file snapshot"
|
||||
:file-id (str file-id)
|
||||
:id (str snapshot-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
{:id snapshot-id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:profile-id profile-id
|
||||
:file-id (:id file)
|
||||
:label label
|
||||
:deleted-at deleted-at
|
||||
:created-by created-by}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id snapshot-id :label label}))
|
||||
|
||||
(def ^:private schema:create-file-snapshot
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:label {:optional true} :string]])
|
||||
|
||||
(sv/defmethod ::create-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::doc/skip true
|
||||
::sm/params schema:take-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [params (update params :label (fn [label]
|
||||
(or label (generate-snapshot-label))))]
|
||||
(take-file-snapshot! cfg params)))))
|
||||
::sm/params schema:create-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id label]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(create-file-snapshot! cfg profile-id file-id label))))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn ::mbus/msgbus] :as cfg} file-id snapshot-id]
|
||||
(let [storage (sto/resolve cfg {::db/reuse-conn true})
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
vern (rand-int Integer/MAX_VALUE)
|
||||
snapshot (some->> (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id snapshot-id}
|
||||
{::db/for-share true})
|
||||
(feat.fdata/resolve-file-data cfg))]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:snapshot-id snapshot-id
|
||||
:file-id file-id))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; If the file was already offloaded, on restring the snapshot
|
||||
;; we are going to replace the file data, so we need to touch
|
||||
;; the old referenced storage object and avoid possible leaks
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(sto/touch-object! storage (:data-ref-id file)))
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:vern vern
|
||||
:version (:version snapshot)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:has-media-trimmed false
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; Send to the clients a notification to reload the file
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-restore
|
||||
:file-id (:id file)
|
||||
:vern vern})
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)}))
|
||||
|
||||
(def ^:private schema:restore-file-snapshot
|
||||
[:map {:title "restore-file-snapshot"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(create-file-snapshot! cfg profile-id file-id :system)
|
||||
(restore-file-snapshot! cfg file-id id))))
|
||||
|
||||
(def ^:private schema:update-file-snapshot
|
||||
[:map {:title "update-file-snapshot"}
|
||||
[:id ::sm/uuid]
|
||||
[:label ::sm/text]])
|
||||
|
||||
(defn- update-file-snapshot!
|
||||
[conn snapshot-id label]
|
||||
(-> (db/update! conn :file-change
|
||||
{:label label
|
||||
:created-by "user"
|
||||
:deleted-at nil}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys true})
|
||||
(dissoc :data :features)))
|
||||
|
||||
(defn- get-snapshot
|
||||
"Get a minimal snapshot from database and lock for update"
|
||||
[conn id]
|
||||
(db/get conn :file-change
|
||||
{:id id}
|
||||
{::sql/columns [:id :file-id :created-by :deleted-at]
|
||||
::db/for-update true}))
|
||||
|
||||
(sv/defmethod ::update-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:update-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id label]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
(update-file-snapshot! conn id label)))))
|
||||
|
||||
(def ^:private schema:remove-file-snapshot
|
||||
[:map {:title "remove-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn- delete-file-snapshot!
|
||||
[conn snapshot-id]
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at (dt/now)}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys false})
|
||||
nil)
|
||||
|
||||
(sv/defmethod ::delete-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:remove-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-deleted
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(delete-file-snapshot! conn id)))))
|
||||
|
||||
@@ -50,8 +50,7 @@
|
||||
" where file_id=? and tag=? and deleted_at is null")
|
||||
res (db/exec! conn [sql file-id tag])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(d/index-by :object-id :media-id)
|
||||
(d/without-nils))))
|
||||
|
||||
(defn- get-object-thumbnails
|
||||
@@ -62,8 +61,7 @@
|
||||
" where file_id=? and deleted_at is null")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(d/index-by :object-id :media-id)
|
||||
(d/without-nils))))
|
||||
|
||||
([conn file-id object-ids]
|
||||
@@ -75,8 +73,7 @@
|
||||
res (db/exec! conn [sql file-id ids])]
|
||||
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
(files/resolve-public-uri (:media-id row))))
|
||||
(d/index-by :object-id :media-id)
|
||||
(d/without-nils)))))
|
||||
|
||||
(sv/defmethod ::get-file-object-thumbnails
|
||||
@@ -127,8 +124,11 @@
|
||||
(if-let [frame (-> frames first)]
|
||||
(let [frame-id (:id frame)
|
||||
object-id (thc/fmt-object-id (:id file) page-id frame-id "frame")
|
||||
frame (if-let [thumb (get thumbnails object-id)]
|
||||
(assoc frame :thumbnail thumb :shapes [])
|
||||
|
||||
frame (if-let [media-id (get thumbnails object-id)]
|
||||
(-> frame
|
||||
(assoc :thumbnail-id media-id)
|
||||
(assoc :shapes []))
|
||||
(dissoc frame :thumbnail))
|
||||
|
||||
children-ids
|
||||
@@ -402,7 +402,10 @@
|
||||
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
;; TODO For now we check read permissions instead of write,
|
||||
;; to allow viewer users to update thumbnails. We might
|
||||
;; review this approach on the future.
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(let [media (create-file-thumbnail! cfg params)]
|
||||
{:uri (files/resolve-public-uri (:id media))
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[app.worker :as wrk]
|
||||
[clojure.set :as set]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
(declare ^:private update-file*)
|
||||
(declare ^:private process-changes-and-validate)
|
||||
(declare ^:private take-snapshot?)
|
||||
(declare ^:private delete-old-snapshots!)
|
||||
|
||||
;; PUBLIC API; intended to be used outside of this module
|
||||
(declare update-file!)
|
||||
@@ -60,6 +59,7 @@
|
||||
[:id ::sm/uuid]
|
||||
[:session-id ::sm/uuid]
|
||||
[:revn {:min 0} ::sm/int]
|
||||
[:vern {:min 0} ::sm/int]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:changes {:optional true} [:vector ::cpc/change]]
|
||||
[:changes-with-metadata {:optional true}
|
||||
@@ -147,7 +147,7 @@
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features)
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features))
|
||||
(assoc :team team)
|
||||
(assoc :file file)
|
||||
(assoc :changes changes))
|
||||
@@ -157,6 +157,14 @@
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
|
||||
(when (not= (:vern params)
|
||||
(:vern file))
|
||||
(ex/raise :type :validation
|
||||
:code :vern-conflict
|
||||
:hint "A different version has been restored for the file."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
@@ -215,23 +223,25 @@
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage))))
|
||||
|
||||
;; TODO: move this to asynchronous task
|
||||
(when (::snapshot-data file)
|
||||
(delete-old-snapshots! cfg file))
|
||||
|
||||
(persist-file! cfg file)
|
||||
|
||||
(let [params (assoc params :file file)
|
||||
response {:revn (:revn file)
|
||||
:lagged (get-lagged-changes conn params)}
|
||||
features (db/create-array conn "text" (:features file))]
|
||||
features (db/create-array conn "text" (:features file))
|
||||
deleted-at (if (::snapshot-data file)
|
||||
(dt/plus timestamp (cf/get-deletion-delay))
|
||||
(dt/plus timestamp (dt/duration {:hours 1})))]
|
||||
|
||||
;; Insert change (xlog)
|
||||
;; Insert change (xlog) with deleted_at in a future data for
|
||||
;; make them automatically eleggible for GC once they expires
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at timestamp
|
||||
:updated-at timestamp
|
||||
:deleted-at deleted-at
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
:version (:version file)
|
||||
@@ -449,33 +459,6 @@
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(inst-ms timeout))))))
|
||||
|
||||
;; Get the latest available snapshots without exceeding the total
|
||||
;; snapshot limit.
|
||||
(def ^:private sql:get-latest-snapshots
|
||||
"SELECT fch.id, fch.created_at
|
||||
FROM file_change AS fch
|
||||
WHERE fch.file_id = ?
|
||||
AND fch.label LIKE 'internal/%'
|
||||
ORDER BY fch.created_at DESC
|
||||
LIMIT ?")
|
||||
|
||||
;; Mark all snapshots that are outside the allowed total threshold
|
||||
;; available for the GC.
|
||||
(def ^:private sql:delete-snapshots
|
||||
"UPDATE file_change
|
||||
SET label = NULL
|
||||
WHERE file_id = ?
|
||||
AND label LIKE 'internal/%'
|
||||
AND created_at < ?")
|
||||
|
||||
(defn- delete-old-snapshots!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(when-let [snapshots (not-empty (db/exec! conn [sql:get-latest-snapshots id
|
||||
(cf/get :auto-file-snapshot-total 10)]))]
|
||||
(let [last-date (-> snapshots peek :created-at)
|
||||
result (db/exec-one! conn [sql:delete-snapshots id last-date])]
|
||||
(l/trc :hint "delete old snapshots" :file-id (str id) :total (db/get-update-count result)))))
|
||||
|
||||
(def ^:private sql:lagged-changes
|
||||
"select s.id, s.revn, s.file_id,
|
||||
s.session_id, s.changes
|
||||
@@ -502,6 +485,7 @@
|
||||
:file-id (:id file)
|
||||
:session-id (:session-id params)
|
||||
:revn (:revn file)
|
||||
:vern (:vern file)
|
||||
:changes changes})
|
||||
|
||||
(when (and (:is-shared file) (seq lchanges))
|
||||
|
||||
@@ -67,7 +67,7 @@
|
||||
:is-owner true
|
||||
:is-admin true
|
||||
:can-edit true}
|
||||
{::db/return-keys? false}))
|
||||
{::db/return-keys false}))
|
||||
|
||||
(doseq [params (sequence (comp
|
||||
(map #(bfc/remap-id % :file-id))
|
||||
@@ -176,7 +176,7 @@
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
|
||||
(let [projs (bfc/get-team-projects cfg team-id)
|
||||
files (bfc/get-team-files cfg team-id)
|
||||
files (bfc/get-team-files-ids cfg team-id)
|
||||
frels (bfc/get-files-rels cfg files)
|
||||
|
||||
team (-> (db/get-by-id conn :team team-id)
|
||||
@@ -326,7 +326,7 @@
|
||||
(def ^:private
|
||||
schema:move-files
|
||||
[:map {:title "move-files"}
|
||||
[:ids ::sm/set-of-uuid]
|
||||
[:ids [::sm/set {:min 1} ::sm/uuid]]
|
||||
[:project-id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::move-files
|
||||
@@ -335,7 +335,7 @@
|
||||
::webhooks/event? true
|
||||
::sm/params schema:move-files}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg #(move-files % (assoc params :profile-id profile-id))))
|
||||
(db/tx-run! cfg move-files (assoc params :profile-id profile-id)))
|
||||
|
||||
;; --- COMMAND: Move project
|
||||
|
||||
@@ -396,14 +396,15 @@
|
||||
(defn clone-template
|
||||
[cfg {:keys [project-id profile-id] :as params} template]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn ::wrk/executor] :as cfg}]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
;; NOTE: the importation process performs some operations
|
||||
;; that are not very friendly with virtual threads, and for
|
||||
;; avoid unexpected blocking of other concurrent operations
|
||||
;; we dispatch that operation to a dedicated executor.
|
||||
(let [cfg (-> cfg
|
||||
(assoc ::bf.v1/project-id project-id)
|
||||
(assoc ::bf.v1/profile-id profile-id))
|
||||
result (px/invoke! executor (partial bf.v1/import-files! cfg template))]
|
||||
(assoc ::bf.v1/profile-id profile-id)
|
||||
(assoc ::bf.v1/input template))
|
||||
result (px/invoke! executor (partial bf.v1/import-files! cfg))]
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
|
||||
@@ -60,15 +60,25 @@
|
||||
(media/validate-media-type! content)
|
||||
(media/validate-media-size! content)
|
||||
|
||||
(db/run! cfg (fn [cfg]
|
||||
(let [object (create-file-media-object cfg params)
|
||||
props {:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}]
|
||||
(with-meta object
|
||||
{::audit/replace-props props})))))
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
;; We get the minimal file for proper checking if
|
||||
;; file is not already deleted
|
||||
(let [_ (files/get-minimal-file conn file-id)
|
||||
mobj (create-file-media-object cfg params)]
|
||||
|
||||
(db/update! conn :file
|
||||
{:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(with-meta mobj
|
||||
{::audit/replace-props
|
||||
{:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}})))))
|
||||
|
||||
(defn- big-enough-for-thumbnail?
|
||||
"Checks if the provided image info is big enough for
|
||||
@@ -142,20 +152,14 @@
|
||||
:always
|
||||
(assoc ::image (process-main-image info)))))
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]}
|
||||
(defn- create-file-media-object
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor] :as cfg}
|
||||
{:keys [id file-id is-local name content]}]
|
||||
|
||||
(let [result (px/invoke! executor (partial process-image content))
|
||||
image (sto/put-object! storage (::image result))
|
||||
thumb (when-let [params (::thumb result)]
|
||||
(sto/put-object! storage params))]
|
||||
|
||||
(db/update! conn :file
|
||||
{:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
(db/exec-one! conn [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
@@ -182,7 +186,18 @@
|
||||
::sm/params schema:create-file-media-object-from-url}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(create-file-media-object-from-url cfg (assoc params :profile-id profile-id)))
|
||||
;; We get the minimal file for proper checking if file is not
|
||||
;; already deleted
|
||||
(let [_ (files/get-minimal-file cfg file-id)
|
||||
mobj (create-file-media-object-from-url cfg (assoc params :profile-id profile-id))]
|
||||
|
||||
(db/update! pool :file
|
||||
{:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
mobj))
|
||||
|
||||
(defn download-image
|
||||
[{:keys [::http/client]} uri]
|
||||
@@ -216,7 +231,7 @@
|
||||
{:response-type :input-stream :sync? true})
|
||||
{:keys [size mtype]} (parse-and-validate response)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write-to-file! body path :size size)]
|
||||
written (io/write* path body :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
|
||||
@@ -422,7 +422,9 @@
|
||||
:deleted-at deleted-at
|
||||
:id profile-id}})
|
||||
|
||||
(rph/with-transform {} (session/delete-fn cfg)))))
|
||||
|
||||
(-> (rph/wrap nil)
|
||||
(rph/with-transform (session/delete-fn cfg))))))
|
||||
|
||||
|
||||
;; --- HELPERS
|
||||
@@ -431,8 +433,11 @@
|
||||
"WITH owner_teams AS (
|
||||
SELECT tpr.team_id AS id
|
||||
FROM team_profile_rel AS tpr
|
||||
JOIN team AS t ON (t.id = tpr.team_id)
|
||||
WHERE tpr.is_owner IS TRUE
|
||||
AND tpr.profile_id = ?
|
||||
AND (t.deleted_at IS NULL OR
|
||||
t.deleted_at > now())
|
||||
)
|
||||
SELECT tpr.team_id AS id,
|
||||
count(tpr.profile_id) - 1 AS participants
|
||||
|
||||
@@ -222,7 +222,7 @@
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id team-id is-pinned] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(check-read-permissions! conn profile-id id)
|
||||
(db/exec-one! conn [sql:update-project-pin team-id id profile-id is-pinned is-pinned])
|
||||
nil))
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.team :as tt]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -20,20 +20,18 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.media :as media]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
[clojure.set :as set]))
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
@@ -82,7 +80,9 @@
|
||||
(cond-> row
|
||||
(some? features) (assoc :features (db/decode-pgarray features #{}))))
|
||||
|
||||
(defn- check-profile-muted
|
||||
;; FIXME: move
|
||||
|
||||
(defn check-profile-muted
|
||||
"Check if the member's email is part of the global bounce report"
|
||||
[conn member]
|
||||
(let [email (profile/clean-email (:email member))]
|
||||
@@ -92,7 +92,7 @@
|
||||
:email email
|
||||
:hint "the profile has reported repeatedly as spam or has bounces"))))
|
||||
|
||||
(defn- check-email-bounce
|
||||
(defn check-email-bounce
|
||||
"Check if the email is part of the global complain report"
|
||||
[conn email show?]
|
||||
(when (eml/has-bounce-reports? conn email)
|
||||
@@ -101,7 +101,7 @@
|
||||
:email (if show? email "private")
|
||||
:hint "this email has been repeatedly reported as bounce")))
|
||||
|
||||
(defn- check-email-spam
|
||||
(defn check-email-spam
|
||||
"Check if the member email is part of the global complain report"
|
||||
[conn email show?]
|
||||
(when (eml/has-complaint-reports? conn email)
|
||||
@@ -265,6 +265,8 @@
|
||||
[:fn #(or (contains? % :team-id)
|
||||
(contains? % :file-id))]])
|
||||
|
||||
;; FIXME: split in two separated requests
|
||||
|
||||
(sv/defmethod ::get-team-users
|
||||
"Get team users by team-id or by file-id"
|
||||
{::doc/added "1.17"
|
||||
@@ -302,20 +304,29 @@
|
||||
inner join project as p on (f.project_id = p.id)
|
||||
where p.team_id = ?")
|
||||
|
||||
(def sql:team-by-file
|
||||
"select p.team_id as id
|
||||
from project as p
|
||||
join file as f on (p.id = f.project_id)
|
||||
where f.id = ?")
|
||||
|
||||
(defn get-users
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:team-users team-id team-id team-id]))
|
||||
|
||||
(def sql:get-team-by-file
|
||||
"SELECT t.*
|
||||
FROM team AS t
|
||||
JOIN project AS p ON (p.team_id = t.id)
|
||||
JOIN file AS f ON (f.project_id = p.id)
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn get-team-for-file
|
||||
[conn file-id]
|
||||
(->> [sql:team-by-file file-id]
|
||||
(db/exec-one! conn)))
|
||||
(let [team (->> (db/exec! conn [sql:get-team-by-file file-id])
|
||||
(remove db/is-row-deleted?)
|
||||
(map decode-row)
|
||||
(first))]
|
||||
(when-not team
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "database object not found"))
|
||||
|
||||
team))
|
||||
|
||||
;; --- Query: Team Stats
|
||||
|
||||
@@ -406,6 +417,7 @@
|
||||
::quotes/profile-id profile-id})
|
||||
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
@@ -503,8 +515,6 @@
|
||||
|
||||
;; --- Mutation: Leave Team
|
||||
|
||||
(declare role->params)
|
||||
|
||||
(defn leave-team
|
||||
[conn {:keys [profile-id id reassign-to]}]
|
||||
(let [perms (get-permissions conn profile-id id)
|
||||
@@ -534,7 +544,7 @@
|
||||
|
||||
;; assign owner role to new profile
|
||||
(db/update! conn :team-profile-rel
|
||||
(role->params :owner)
|
||||
(get tt/permissions-for-role :owner)
|
||||
{:team-id id :profile-id reassign-to}))
|
||||
|
||||
;; and finally, if all other conditions does not match and the
|
||||
@@ -606,24 +616,8 @@
|
||||
|
||||
;; --- Mutation: Team Update Role
|
||||
|
||||
;; Temporarily disabled viewer role
|
||||
;; https://tree.taiga.io/project/penpot/issue/1083
|
||||
(def valid-roles
|
||||
#{:owner :admin :editor #_:viewer})
|
||||
|
||||
(def schema:role
|
||||
[::sm/one-of valid-roles])
|
||||
|
||||
(defn role->params
|
||||
[role]
|
||||
(case role
|
||||
:admin {:is-owner false :is-admin true :can-edit true}
|
||||
:editor {:is-owner false :is-admin false :can-edit true}
|
||||
:owner {:is-owner true :is-admin true :can-edit true}
|
||||
:viewer {:is-owner false :is-admin false :can-edit false}))
|
||||
|
||||
(defn update-team-member-role
|
||||
[conn {:keys [profile-id team-id member-id role] :as params}]
|
||||
[{:keys [::db/conn ::mbus/msgbus]} {:keys [profile-id team-id member-id role] :as params}]
|
||||
;; We retrieve all team members instead of query the
|
||||
;; database for a single member. This is just for
|
||||
;; convenience, if this becomes a bottleneck or problematic,
|
||||
@@ -631,7 +625,6 @@
|
||||
(let [perms (get-permissions conn profile-id team-id)
|
||||
members (get-team-members conn team-id)
|
||||
member (d/seek #(= member-id (:id %)) members)
|
||||
|
||||
is-owner? (:is-owner perms)
|
||||
is-admin? (:is-admin perms)]
|
||||
|
||||
@@ -655,7 +648,14 @@
|
||||
(ex/raise :type :validation
|
||||
:code :cant-promote-to-owner))
|
||||
|
||||
(let [params (role->params role)]
|
||||
(mbus/pub! msgbus
|
||||
:topic member-id
|
||||
:message {:type :team-role-change
|
||||
:topic member-id
|
||||
:team-id team-id
|
||||
:role role})
|
||||
|
||||
(let [params (get tt/permissions-for-role role)]
|
||||
;; Only allow single owner on team
|
||||
(when (= role :owner)
|
||||
(db/update! conn :team-profile-rel
|
||||
@@ -673,14 +673,13 @@
|
||||
[:map {:title "update-team-member-role"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:member-id ::sm/uuid]
|
||||
[:role schema:role]])
|
||||
[:role ::tt/role]])
|
||||
|
||||
(sv/defmethod ::update-team-member-role
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:update-team-member-role}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(update-team-member-role conn (assoc params :profile-id profile-id))))
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg update-team-member-role (assoc params :profile-id profile-id)))
|
||||
|
||||
;; --- Mutation: Delete Team Member
|
||||
|
||||
@@ -692,9 +691,10 @@
|
||||
(sv/defmethod ::delete-team-member
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:delete-team-member}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id member-id] :as params}]
|
||||
[{:keys [::db/pool ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id team-id member-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (get-permissions conn profile-id team-id)]
|
||||
(let [team (get-team pool :profile-id profile-id :team-id team-id)
|
||||
perms (get-permissions conn profile-id team-id)]
|
||||
(when-not (or (:is-owner perms)
|
||||
(:is-admin perms))
|
||||
(ex/raise :type :validation
|
||||
@@ -707,6 +707,13 @@
|
||||
(db/delete! conn :team-profile-rel {:profile-id member-id
|
||||
:team-id team-id})
|
||||
|
||||
(mbus/pub! msgbus
|
||||
:topic member-id
|
||||
:message {:type :team-membership-change
|
||||
:change :removed
|
||||
:team-id team-id
|
||||
:team-name (:name team)})
|
||||
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: Update Team Photo
|
||||
@@ -724,6 +731,7 @@
|
||||
::sm/params schema:update-team-photo}
|
||||
[cfg {:keys [::rpc/profile-id file] :as params}]
|
||||
;; Validate incoming mime type
|
||||
|
||||
(media/validate-media-type! file #{"image/jpeg" "image/png" "image/webp"})
|
||||
(update-team-photo cfg (assoc params :profile-id profile-id)))
|
||||
|
||||
@@ -745,534 +753,3 @@
|
||||
{:id team-id})
|
||||
|
||||
(assoc team :photo-id (:id photo)))))
|
||||
|
||||
;; --- Mutation: Create Team Invitation
|
||||
|
||||
(def sql:upsert-team-invitation
|
||||
"insert into team_invitation(id, team_id, email_to, role, valid_until)
|
||||
values (?, ?, ?, ?, ?)
|
||||
on conflict(team_id, email_to) do
|
||||
update set role = ?, valid_until = ?, updated_at = now()
|
||||
returning *")
|
||||
|
||||
(defn- create-invitation-token
|
||||
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :team-invitation
|
||||
:exp valid-until
|
||||
:profile-id profile-id
|
||||
:role role
|
||||
:team-id team-id
|
||||
:member-email member-email
|
||||
:member-id member-id}))
|
||||
|
||||
(defn- create-profile-identity-token
|
||||
[cfg profile-id]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid uuid for profile-id"
|
||||
(uuid? profile-id))
|
||||
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id profile-id
|
||||
:exp (dt/in-future {:days 30})}))
|
||||
|
||||
(def ^:private schema:create-invitation
|
||||
[:map {:title "params:create-invitation"}
|
||||
[::rpc/profile-id ::sm/uuid]
|
||||
[:team
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]]]
|
||||
[:profile
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:fullname :string]]]
|
||||
[:role [::sm/one-of valid-roles]]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(def ^:private check-create-invitation-params!
|
||||
(sm/check-fn schema:create-invitation))
|
||||
|
||||
(defn- create-invitation
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team profile role email] :as params}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid connection on cfg parameter"
|
||||
(db/connection? conn))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid params for `create-invitation` fn"
|
||||
(check-create-invitation-params! params))
|
||||
|
||||
(let [email (profile/clean-email email)
|
||||
member (profile/get-profile-by-email conn email)]
|
||||
|
||||
(check-profile-muted conn member)
|
||||
(check-email-bounce conn email true)
|
||||
(check-email-spam conn email true)
|
||||
|
||||
;; When we have email verification disabled and invitation user is
|
||||
;; already present in the database, we proceed to add it to the
|
||||
;; team as-is, without email roundtrip.
|
||||
|
||||
;; TODO: if member does not exists and email verification is
|
||||
;; disabled, we should proceed to create the profile (?)
|
||||
(if (and (not (contains? cf/flags :email-verification))
|
||||
(some? member))
|
||||
(let [params (merge {:team-id (:id team)
|
||||
:profile-id (:id member)}
|
||||
(role->params role))]
|
||||
|
||||
;; Insert the invited member to the team
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/on-conflict-do-nothing? true})
|
||||
|
||||
;; If profile is not yet verified, mark it as verified because
|
||||
;; accepting an invitation link serves as verification.
|
||||
(when-not (:is-active member)
|
||||
(db/update! conn :profile
|
||||
{:is-active true}
|
||||
{:id (:id member)}))
|
||||
|
||||
nil)
|
||||
|
||||
(let [id (uuid/next)
|
||||
expire (dt/in-future "168h") ;; 7 days
|
||||
invitation (db/exec-one! conn [sql:upsert-team-invitation id
|
||||
(:id team) (str/lower email)
|
||||
(name role) expire
|
||||
(name role) expire])
|
||||
updated? (not= id (:id invitation))
|
||||
profile-id (:id profile)
|
||||
tprops {:profile-id profile-id
|
||||
:invitation-id (:id invitation)
|
||||
:valid-until expire
|
||||
:team-id (:id team)
|
||||
:member-email (:email-to invitation)
|
||||
:member-id (:id member)
|
||||
:role role}
|
||||
itoken (create-invitation-token cfg tprops)
|
||||
ptoken (create-profile-identity-token cfg profile-id)]
|
||||
|
||||
(when (contains? cf/flags :log-invitation-tokens)
|
||||
(l/info :hint "invitation token" :token itoken))
|
||||
|
||||
(let [props (-> (dissoc tprops :profile-id)
|
||||
(audit/clean-props))
|
||||
evname (if updated?
|
||||
"update-team-invitation"
|
||||
"create-team-invitation")
|
||||
event (-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name evname)
|
||||
(assoc ::audit/props props))]
|
||||
(audit/submit! cfg event))
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/invite-to-team
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to email
|
||||
:invited-by (:fullname profile)
|
||||
:team (:name team)
|
||||
:token itoken
|
||||
:extra-data ptoken})
|
||||
|
||||
itoken))))
|
||||
|
||||
(defn- add-user-to-team
|
||||
[conn profile team role email]
|
||||
|
||||
(let [team-id (:id team)
|
||||
member (db/get* conn :profile
|
||||
{:email (str/lower email)}
|
||||
{::sql/columns [:id :email]})
|
||||
params (merge
|
||||
{:team-id team-id
|
||||
:profile-id (:id member)}
|
||||
(role->params role))]
|
||||
|
||||
;; Do not allow blocked users to join teams.
|
||||
(when (:is-blocked member)
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
|
||||
(quotes/check!
|
||||
{::db/conn conn
|
||||
::quotes/id ::quotes/profiles-per-team
|
||||
::quotes/profile-id (:id member)
|
||||
::quotes/team-id team-id})
|
||||
|
||||
;; Insert the member to the team
|
||||
(db/insert! conn :team-profile-rel params {::db/on-conflict-do-nothing? true})
|
||||
|
||||
;; Delete any request
|
||||
(db/delete! conn :team-access-request
|
||||
{:team-id team-id :requester-id (:id member)})
|
||||
|
||||
;; Delete any invitation
|
||||
(db/delete! conn :team-invitation
|
||||
{:team-id team-id :email-to (:email member)})
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/join-team
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to email
|
||||
:invited-by (:fullname profile)
|
||||
:team (:name team)
|
||||
:team-id (:id team)})))
|
||||
|
||||
(def sql:valid-requests-email
|
||||
"SELECT p.email
|
||||
FROM team_access_request AS tr
|
||||
JOIN profile AS p ON (tr.requester_id = p.id)
|
||||
WHERE tr.team_id = ?
|
||||
AND tr.auto_join_until > now()")
|
||||
|
||||
(defn- get-valid-requests-email
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:valid-requests-email team-id]))
|
||||
|
||||
(def ^:private xf:map-email
|
||||
(map :email))
|
||||
|
||||
(defn- create-team-invitations
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile team role emails] :as params}]
|
||||
(let [join-requests (into #{} xf:map-email
|
||||
(get-valid-requests-email conn (:id team)))
|
||||
team-members (into #{} xf:map-email
|
||||
(get-team-members conn (:id team)))
|
||||
|
||||
invitations (into #{}
|
||||
(comp
|
||||
;; We don't re-send inviation to
|
||||
;; already existing members
|
||||
(remove team-members)
|
||||
;; We don't send invitations to
|
||||
;; join-requested members
|
||||
(remove join-requests)
|
||||
(map (fn [email] (assoc params :email email)))
|
||||
(keep (partial create-invitation cfg)))
|
||||
emails)]
|
||||
|
||||
;; For requested invitations, do not send invitation emails, add
|
||||
;; the user directly to the team
|
||||
(->> (filter join-requests emails)
|
||||
(run! (partial add-user-to-team conn profile team role)))
|
||||
|
||||
invitations))
|
||||
|
||||
(def ^:private schema:create-team-invitations
|
||||
[:map {:title "create-team-invitations"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:role schema:role]
|
||||
[:emails [::sm/set ::sm/email]]])
|
||||
|
||||
(def ^:private max-invitations-by-request-threshold
|
||||
"The number of invitations can be sent in a single rpc request"
|
||||
25)
|
||||
|
||||
(sv/defmethod ::create-team-invitations
|
||||
"A rpc call that allow to send a single or multiple invitations to
|
||||
join the team."
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:create-team-invitations}
|
||||
[cfg {:keys [::rpc/profile-id team-id emails] :as params}]
|
||||
(let [perms (get-permissions cfg profile-id team-id)
|
||||
profile (db/get-by-id cfg :profile profile-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(when (> (count emails) max-invitations-by-request-threshold)
|
||||
(ex/raise :type :validation
|
||||
:code :max-invitations-by-request
|
||||
:hint "the maximum of invitation on single request is reached"
|
||||
:threshold max-invitations-by-request-threshold))
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/team-id team-id)
|
||||
(assoc ::quotes/incr (count emails))
|
||||
(quotes/check! {::quotes/id ::quotes/invitations-per-team}
|
||||
{::quotes/id ::quotes/profiles-per-team}))
|
||||
|
||||
;; Check if the current profile is allowed to send emails
|
||||
(check-profile-muted cfg profile)
|
||||
|
||||
(let [team (db/get-by-id cfg :team team-id)
|
||||
;; NOTE: Is important pass RPC method params down to the
|
||||
;; `create-team-invitations` because it uses the implicit
|
||||
;; RPC properties from params for fill necessary data on
|
||||
;; emiting an entry to the audit-log
|
||||
invitations (db/tx-run! cfg create-team-invitations
|
||||
(-> params
|
||||
(assoc :profile profile)
|
||||
(assoc :team team)
|
||||
(assoc :emails emails)))]
|
||||
|
||||
(with-meta {:total (count invitations)
|
||||
:invitations invitations}
|
||||
{::audit/props {:invitations (count invitations)}}))))
|
||||
|
||||
;; --- Mutation: Create Team & Invite Members
|
||||
|
||||
(def ^:private schema:create-team-with-invitations
|
||||
[:map {:title "create-team-with-invitations"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:emails [::sm/set ::sm/email]]
|
||||
[:role schema:role]])
|
||||
|
||||
(sv/defmethod ::create-team-with-invitations
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:create-team-with-invitations
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id emails role name] :as params}]
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features))
|
||||
|
||||
team (create-team cfg params)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/team-id (:id team))
|
||||
(assoc ::quotes/incr (count emails))
|
||||
(quotes/check! {::quotes/id ::quotes/teams-per-profile}
|
||||
{::quotes/id ::quotes/invitations-per-team}
|
||||
{::quotes/id ::quotes/profiles-per-team}))
|
||||
|
||||
(when (> (count emails) max-invitations-by-request-threshold)
|
||||
(ex/raise :type :validation
|
||||
:code :max-invitations-by-request
|
||||
:hint "the maximum of invitation on single request is reached"
|
||||
:threshold max-invitations-by-request-threshold))
|
||||
|
||||
(let [props {:name name :features features}
|
||||
event (-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name "create-team")
|
||||
(assoc ::audit/props props))]
|
||||
(audit/submit! cfg event))
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
(let [profile (db/get-by-id conn :profile profile-id)
|
||||
params (-> params
|
||||
(assoc :team team)
|
||||
(assoc :profile profile)
|
||||
(assoc :role role))
|
||||
invitations (->> emails
|
||||
(map (fn [email] (assoc params :email email)))
|
||||
(map (partial create-invitation cfg)))]
|
||||
|
||||
(vary-meta team assoc ::audit/props {:invitations (count invitations)}))))
|
||||
|
||||
;; --- Query: get-team-invitation-token
|
||||
|
||||
(def ^:private schema:get-team-invitation-token
|
||||
[:map {:title "get-team-invitation-token"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(sv/defmethod ::get-team-invitation-token
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:get-team-invitation-token}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(check-read-permissions! pool profile-id team-id)
|
||||
(let [email (profile/clean-email email)
|
||||
invit (-> (db/get pool :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to email})
|
||||
(update :role keyword))
|
||||
|
||||
member (profile/get-profile-by-email pool (:email-to invit))
|
||||
token (create-invitation-token cfg {:team-id (:team-id invit)
|
||||
:profile-id profile-id
|
||||
:valid-until (:valid-until invit)
|
||||
:role (:role invit)
|
||||
:member-id (:id member)
|
||||
:member-email (or (:email member)
|
||||
(profile/clean-email (:email-to invit)))})]
|
||||
{:token token}))
|
||||
|
||||
;; --- Mutation: Update invitation role
|
||||
|
||||
(def ^:private schema:update-team-invitation-role
|
||||
[:map {:title "update-team-invitation-role"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]
|
||||
[:role schema:role]])
|
||||
|
||||
(sv/defmethod ::update-team-invitation-role
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:update-team-invitation-role}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (get-permissions conn profile-id team-id)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(db/update! conn :team-invitation
|
||||
{:role (name role) :updated-at (dt/now)}
|
||||
{:team-id team-id :email-to (profile/clean-email email)})
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: Delete invitation
|
||||
|
||||
(def ^:private schema:delete-team-invition
|
||||
[:map {:title "delete-team-invitation"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(sv/defmethod ::delete-team-invitation
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:delete-team-invition}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (get-permissions conn profile-id team-id)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(let [invitation (db/delete! conn :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to (profile/clean-email email)}
|
||||
{::db/return-keys true})]
|
||||
(rph/wrap nil {::audit/props {:invitation-id (:id invitation)}})))))
|
||||
|
||||
|
||||
|
||||
|
||||
;; --- Mutation: Request Team Invitation
|
||||
|
||||
(def sql:upsert-team-access-request
|
||||
"INSERT INTO team_access_request (id, team_id, requester_id, valid_until, auto_join_until)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON conflict(id)
|
||||
DO UPDATE SET valid_until = ?, auto_join_until = ?, updated_at = now()
|
||||
RETURNING *")
|
||||
|
||||
|
||||
(def sql:team-access-request
|
||||
"SELECT id, (valid_until < now()) AS expired
|
||||
FROM team_access_request
|
||||
WHERE team_id = ?
|
||||
AND requester_id = ?")
|
||||
|
||||
(def sql:team-owner
|
||||
"SELECT profile_id
|
||||
FROM team_profile_rel
|
||||
WHERE team_id = ?
|
||||
AND is_owner = true")
|
||||
|
||||
|
||||
(defn- create-team-access-request
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team requester team-owner file is-viewer] :as params}]
|
||||
(let [old-request (->> (db/exec-one! conn [sql:team-access-request (:id team) (:id requester)])
|
||||
(decode-row))]
|
||||
(when (false? (:expired old-request))
|
||||
(ex/raise :type :validation
|
||||
:code :request-already-sent
|
||||
:hint "you have already made a request to join this team less than 24 hours ago"))
|
||||
|
||||
(let [id (or (:id old-request) (uuid/next))
|
||||
valid_until (dt/in-future "24h")
|
||||
auto_join_until (dt/in-future "168h") ;; 7 days
|
||||
request (db/exec-one! conn [sql:upsert-team-access-request
|
||||
id (:id team) (:id requester) valid_until auto_join_until
|
||||
valid_until auto_join_until])
|
||||
factory (cond
|
||||
(and (some? file) (:is-default team) is-viewer)
|
||||
eml/request-file-access-yourpenpot-view
|
||||
(and (some? file) (:is-default team))
|
||||
eml/request-file-access-yourpenpot
|
||||
(some? file)
|
||||
eml/request-file-access
|
||||
:else
|
||||
eml/request-team-access)
|
||||
page-id (when (some? file)
|
||||
(-> file :data :pages first))]
|
||||
|
||||
;; TODO needs audit?
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory factory
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to (:email team-owner)
|
||||
:requested-by (:fullname requester)
|
||||
:requested-by-email (:email requester)
|
||||
:team-name (:name team)
|
||||
:team-id (:id team)
|
||||
:file-name (:name file)
|
||||
:file-id (:id file)
|
||||
:page-id page-id})
|
||||
|
||||
request)))
|
||||
|
||||
|
||||
(def ^:private schema:create-team-access-request
|
||||
[:and
|
||||
[:map {:title "create-team-access-request"}
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:is-viewer {:optional true} ::sm/boolean]]
|
||||
|
||||
[:fn (fn [params]
|
||||
(or (contains? params :file-id)
|
||||
(contains? params :team-id)))]])
|
||||
|
||||
|
||||
(sv/defmethod ::create-team-access-request
|
||||
"A rpc call that allow to request for an invitations to join the team."
|
||||
{::doc/added "2.2.0"
|
||||
::sm/params schema:create-team-access-request}
|
||||
[cfg {:keys [::rpc/profile-id file-id team-id is-viewer] :as params}]
|
||||
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
|
||||
(let [requester (db/get-by-id conn :profile profile-id)
|
||||
team-id (if (some? team-id)
|
||||
team-id
|
||||
(:id (get-team-for-file conn file-id)))
|
||||
team (db/get-by-id conn :team team-id)
|
||||
owner-id (->> (db/exec! conn [sql:team-owner (:id team)])
|
||||
(map decode-row)
|
||||
(first)
|
||||
:profile-id)
|
||||
team-owner (db/get-by-id conn :profile owner-id)
|
||||
file (when (some? file-id)
|
||||
(db/get* conn :file
|
||||
{:id file-id}
|
||||
{::sql/columns [:id :name :data]}))
|
||||
file (when (some? file)
|
||||
(assoc file :data (blob/decode (:data file))))]
|
||||
|
||||
;;TODO needs quotes?
|
||||
|
||||
(when (or (nil? requester) (nil? team) (nil? team-owner) (and (some? file-id) (nil? file)))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-parameters))
|
||||
|
||||
;; Check that the requester is not muted
|
||||
(check-profile-muted conn requester)
|
||||
|
||||
;; Check that the owner is not marked as bounce nor spam
|
||||
(check-email-bounce conn (:email team-owner) false)
|
||||
(check-email-spam conn (:email team-owner) true)
|
||||
|
||||
(let [request (create-team-access-request
|
||||
cfg {:team team :requester requester :team-owner team-owner :file file :is-viewer is-viewer})]
|
||||
(when request
|
||||
(with-meta {:request request}
|
||||
{::audit/props {:request 1}})))))))
|
||||
|
||||
576
backend/src/app/rpc/commands/teams_invitations.clj
Normal file
576
backend/src/app/rpc/commands/teams_invitations.clj
Normal file
@@ -0,0 +1,576 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.teams-invitations
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.team :as types.team]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.email :as eml]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; --- Mutation: Create Team Invitation
|
||||
|
||||
|
||||
(def sql:upsert-team-invitation
|
||||
"insert into team_invitation(id, team_id, email_to, created_by, role, valid_until)
|
||||
values (?, ?, ?, ?, ?, ?)
|
||||
on conflict(team_id, email_to) do
|
||||
update set role = ?, valid_until = ?, updated_at = now()
|
||||
returning *")
|
||||
|
||||
(defn- create-invitation-token
|
||||
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :team-invitation
|
||||
:exp valid-until
|
||||
:profile-id profile-id
|
||||
:role role
|
||||
:team-id team-id
|
||||
:member-email member-email
|
||||
:member-id member-id}))
|
||||
|
||||
(defn- create-profile-identity-token
|
||||
[cfg profile-id]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid uuid for profile-id"
|
||||
(uuid? profile-id))
|
||||
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id profile-id
|
||||
:exp (dt/in-future {:days 30})}))
|
||||
|
||||
(def ^:private schema:create-invitation
|
||||
[:map {:title "params:create-invitation"}
|
||||
[::rpc/profile-id ::sm/uuid]
|
||||
[:team
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]]]
|
||||
[:profile
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:fullname :string]]]
|
||||
[:role ::types.team/role]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(def ^:private check-create-invitation-params!
|
||||
(sm/check-fn schema:create-invitation))
|
||||
|
||||
(defn- create-invitation
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team profile role email] :as params}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid connection on cfg parameter"
|
||||
(db/connection? conn))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid params for `create-invitation` fn"
|
||||
(check-create-invitation-params! params))
|
||||
|
||||
(let [email (profile/clean-email email)
|
||||
member (profile/get-profile-by-email conn email)]
|
||||
|
||||
(teams/check-profile-muted conn member)
|
||||
(teams/check-email-bounce conn email true)
|
||||
(teams/check-email-spam conn email true)
|
||||
|
||||
;; When we have email verification disabled and invitation user is
|
||||
;; already present in the database, we proceed to add it to the
|
||||
;; team as-is, without email roundtrip.
|
||||
|
||||
;; TODO: if member does not exists and email verification is
|
||||
;; disabled, we should proceed to create the profile (?)
|
||||
(if (and (not (contains? cf/flags :email-verification))
|
||||
(some? member))
|
||||
(let [params (merge {:team-id (:id team)
|
||||
:profile-id (:id member)}
|
||||
(get types.team/permissions-for-role role))]
|
||||
|
||||
;; Insert the invited member to the team
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/on-conflict-do-nothing? true})
|
||||
|
||||
;; If profile is not yet verified, mark it as verified because
|
||||
;; accepting an invitation link serves as verification.
|
||||
(when-not (:is-active member)
|
||||
(db/update! conn :profile
|
||||
{:is-active true}
|
||||
{:id (:id member)}))
|
||||
|
||||
nil)
|
||||
|
||||
(let [id (uuid/next)
|
||||
expire (dt/in-future "168h") ;; 7 days
|
||||
invitation (db/exec-one! conn [sql:upsert-team-invitation id
|
||||
(:id team) (str/lower email)
|
||||
(:id profile)
|
||||
(name role) expire
|
||||
(name role) expire])
|
||||
updated? (not= id (:id invitation))
|
||||
profile-id (:id profile)
|
||||
tprops {:profile-id profile-id
|
||||
:invitation-id (:id invitation)
|
||||
:valid-until expire
|
||||
:team-id (:id team)
|
||||
:member-email (:email-to invitation)
|
||||
:member-id (:id member)
|
||||
:role role}
|
||||
itoken (create-invitation-token cfg tprops)
|
||||
ptoken (create-profile-identity-token cfg profile-id)]
|
||||
|
||||
(when (contains? cf/flags :log-invitation-tokens)
|
||||
(l/info :hint "invitation token" :token itoken))
|
||||
|
||||
(let [props (-> (dissoc tprops :profile-id)
|
||||
(audit/clean-props))
|
||||
evname (if updated?
|
||||
"update-team-invitation"
|
||||
"create-team-invitation")
|
||||
event (-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name evname)
|
||||
(assoc ::audit/props props))]
|
||||
(audit/submit! cfg event))
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/invite-to-team
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to email
|
||||
:invited-by (:fullname profile)
|
||||
:team (:name team)
|
||||
:token itoken
|
||||
:extra-data ptoken})
|
||||
|
||||
itoken))))
|
||||
|
||||
(defn- add-user-to-team
|
||||
[conn profile team role email]
|
||||
|
||||
(let [team-id (:id team)
|
||||
member (db/get* conn :profile
|
||||
{:email (str/lower email)}
|
||||
{::sql/columns [:id :email]})
|
||||
params (merge
|
||||
{:team-id team-id
|
||||
:profile-id (:id member)}
|
||||
(get types.team/permissions-for-role role))]
|
||||
|
||||
;; Do not allow blocked users to join teams.
|
||||
(when (:is-blocked member)
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
|
||||
(quotes/check!
|
||||
{::db/conn conn
|
||||
::quotes/id ::quotes/profiles-per-team
|
||||
::quotes/profile-id (:id member)
|
||||
::quotes/team-id team-id})
|
||||
|
||||
;; Insert the member to the team
|
||||
(db/insert! conn :team-profile-rel params {::db/on-conflict-do-nothing? true})
|
||||
|
||||
;; Delete any request
|
||||
(db/delete! conn :team-access-request
|
||||
{:team-id team-id :requester-id (:id member)})
|
||||
|
||||
;; Delete any invitation
|
||||
(db/delete! conn :team-invitation
|
||||
{:team-id team-id :email-to (:email member)})
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/join-team
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to email
|
||||
:invited-by (:fullname profile)
|
||||
:team (:name team)
|
||||
:team-id (:id team)})))
|
||||
|
||||
(def sql:valid-requests-email
|
||||
"SELECT p.email
|
||||
FROM team_access_request AS tr
|
||||
JOIN profile AS p ON (tr.requester_id = p.id)
|
||||
WHERE tr.team_id = ?
|
||||
AND tr.auto_join_until > now()")
|
||||
|
||||
(defn- get-valid-requests-email
|
||||
[conn team-id]
|
||||
(db/exec! conn [sql:valid-requests-email team-id]))
|
||||
|
||||
(def ^:private xf:map-email
|
||||
(map :email))
|
||||
|
||||
(defn- create-team-invitations
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile team role emails] :as params}]
|
||||
(let [join-requests (into #{} xf:map-email
|
||||
(get-valid-requests-email conn (:id team)))
|
||||
team-members (into #{} xf:map-email
|
||||
(teams/get-team-members conn (:id team)))
|
||||
|
||||
invitations (into #{}
|
||||
(comp
|
||||
;; We don't re-send inviation to
|
||||
;; already existing members
|
||||
(remove team-members)
|
||||
;; We don't send invitations to
|
||||
;; join-requested members
|
||||
(remove join-requests)
|
||||
(map (fn [email] (assoc params :email email)))
|
||||
(keep (partial create-invitation cfg)))
|
||||
emails)]
|
||||
|
||||
;; For requested invitations, do not send invitation emails, add
|
||||
;; the user directly to the team
|
||||
(->> (filter join-requests emails)
|
||||
(run! (partial add-user-to-team conn profile team role)))
|
||||
|
||||
invitations))
|
||||
|
||||
(def ^:private schema:create-team-invitations
|
||||
[:map {:title "create-team-invitations"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:role ::types.team/role]
|
||||
[:emails [::sm/set ::sm/email]]])
|
||||
|
||||
(def ^:private max-invitations-by-request-threshold
|
||||
"The number of invitations can be sent in a single rpc request"
|
||||
25)
|
||||
|
||||
(sv/defmethod ::create-team-invitations
|
||||
"A rpc call that allow to send a single or multiple invitations to
|
||||
join the team."
|
||||
{::doc/added "1.17"
|
||||
::doc/module :teams
|
||||
::sm/params schema:create-team-invitations}
|
||||
[cfg {:keys [::rpc/profile-id team-id emails] :as params}]
|
||||
(let [perms (teams/get-permissions cfg profile-id team-id)
|
||||
profile (db/get-by-id cfg :profile profile-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(when (> (count emails) max-invitations-by-request-threshold)
|
||||
(ex/raise :type :validation
|
||||
:code :max-invitations-by-request
|
||||
:hint "the maximum of invitation on single request is reached"
|
||||
:threshold max-invitations-by-request-threshold))
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/team-id team-id)
|
||||
(assoc ::quotes/incr (count emails))
|
||||
(quotes/check! {::quotes/id ::quotes/invitations-per-team}
|
||||
{::quotes/id ::quotes/profiles-per-team}))
|
||||
|
||||
;; Check if the current profile is allowed to send emails
|
||||
(teams/check-profile-muted cfg profile)
|
||||
|
||||
(let [team (db/get-by-id cfg :team team-id)
|
||||
;; NOTE: Is important pass RPC method params down to the
|
||||
;; `create-team-invitations` because it uses the implicit
|
||||
;; RPC properties from params for fill necessary data on
|
||||
;; emiting an entry to the audit-log
|
||||
invitations (db/tx-run! cfg create-team-invitations
|
||||
(-> params
|
||||
(assoc :profile profile)
|
||||
(assoc :team team)
|
||||
(assoc :emails emails)))]
|
||||
|
||||
(with-meta {:total (count invitations)
|
||||
:invitations invitations}
|
||||
{::audit/props {:invitations (count invitations)}}))))
|
||||
|
||||
;; --- Mutation: Create Team & Invite Members
|
||||
|
||||
(def ^:private schema:create-team-with-invitations
|
||||
[:map {:title "create-team-with-invitations"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:emails [::sm/set ::sm/email]]
|
||||
[:role ::types.team/role]])
|
||||
|
||||
(sv/defmethod ::create-team-with-invitations
|
||||
{::doc/added "1.17"
|
||||
::doc/module :teams
|
||||
::sm/params schema:create-team-with-invitations
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id emails role name] :as params}]
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features))
|
||||
|
||||
team (teams/create-team cfg params)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/team-id (:id team))
|
||||
(assoc ::quotes/incr (count emails))
|
||||
(quotes/check! {::quotes/id ::quotes/teams-per-profile}
|
||||
{::quotes/id ::quotes/invitations-per-team}
|
||||
{::quotes/id ::quotes/profiles-per-team}))
|
||||
|
||||
(when (> (count emails) max-invitations-by-request-threshold)
|
||||
(ex/raise :type :validation
|
||||
:code :max-invitations-by-request
|
||||
:hint "the maximum of invitation on single request is reached"
|
||||
:threshold max-invitations-by-request-threshold))
|
||||
|
||||
(let [props {:name name :features features}
|
||||
event (-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name "create-team")
|
||||
(assoc ::audit/props props))]
|
||||
(audit/submit! cfg event))
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
(let [profile (db/get-by-id conn :profile profile-id)
|
||||
params (-> params
|
||||
(assoc :team team)
|
||||
(assoc :profile profile)
|
||||
(assoc :role role))
|
||||
invitations (->> emails
|
||||
(map (fn [email] (assoc params :email email)))
|
||||
(map (partial create-invitation cfg)))]
|
||||
|
||||
(vary-meta team assoc ::audit/props {:invitations (count invitations)}))))
|
||||
|
||||
;; --- Query: get-team-invitation-token
|
||||
|
||||
(def ^:private schema:get-team-invitation-token
|
||||
[:map {:title "get-team-invitation-token"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(sv/defmethod ::get-team-invitation-token
|
||||
{::doc/added "1.17"
|
||||
::doc/module :teams
|
||||
::sm/params schema:get-team-invitation-token}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(teams/check-read-permissions! pool profile-id team-id)
|
||||
(let [email (profile/clean-email email)
|
||||
invit (-> (db/get pool :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to email})
|
||||
(update :role keyword))
|
||||
|
||||
member (profile/get-profile-by-email pool (:email-to invit))
|
||||
token (create-invitation-token cfg {:team-id (:team-id invit)
|
||||
:profile-id profile-id
|
||||
:valid-until (:valid-until invit)
|
||||
:role (:role invit)
|
||||
:member-id (:id member)
|
||||
:member-email (or (:email member)
|
||||
(profile/clean-email (:email-to invit)))})]
|
||||
{:token token}))
|
||||
|
||||
;; --- Mutation: Update invitation role
|
||||
|
||||
(def ^:private schema:update-team-invitation-role
|
||||
[:map {:title "update-team-invitation-role"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]
|
||||
[:role ::types.team/role]])
|
||||
|
||||
(sv/defmethod ::update-team-invitation-role
|
||||
{::doc/added "1.17"
|
||||
::doc/module :teams
|
||||
::sm/params schema:update-team-invitation-role}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (teams/get-permissions conn profile-id team-id)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(db/update! conn :team-invitation
|
||||
{:role (name role) :updated-at (dt/now)}
|
||||
{:team-id team-id :email-to (profile/clean-email email)})
|
||||
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: Delete invitation
|
||||
|
||||
(def ^:private schema:delete-team-invition
|
||||
[:map {:title "delete-team-invitation"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:email ::sm/email]])
|
||||
|
||||
(sv/defmethod ::delete-team-invitation
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:delete-team-invition}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (teams/get-permissions conn profile-id team-id)]
|
||||
|
||||
(when-not (:is-admin perms)
|
||||
(ex/raise :type :validation
|
||||
:code :insufficient-permissions))
|
||||
|
||||
(let [invitation (db/delete! conn :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to (profile/clean-email email)}
|
||||
{::db/return-keys true})]
|
||||
(rph/wrap nil {::audit/props {:invitation-id (:id invitation)}})))))
|
||||
|
||||
|
||||
;; --- Mutation: Request Team Invitation
|
||||
|
||||
(def ^:private sql:get-team-owner
|
||||
"SELECT p.*
|
||||
FROM profile AS p
|
||||
JOIN team_profile_rel AS tpr ON (tpr.profile_id = p.id)
|
||||
WHERE tpr.team_id = ?
|
||||
AND tpr.is_owner IS TRUE")
|
||||
|
||||
(defn- get-team-owner
|
||||
"Return a complete profile of the team owner"
|
||||
[conn team-id]
|
||||
(->> (db/exec! conn [sql:get-team-owner team-id])
|
||||
(remove db/is-row-deleted?)
|
||||
(map profile/decode-row)
|
||||
(first)))
|
||||
|
||||
(defn- check-existing-team-access-request
|
||||
"Checks if an existing team access request is still valid"
|
||||
[conn team-id profile-id]
|
||||
(when-let [request (db/get* conn :team-access-request
|
||||
{:team-id team-id
|
||||
:requester-id profile-id})]
|
||||
(when (dt/is-after? (:valid-until request) (dt/now))
|
||||
(ex/raise :type :validation
|
||||
:code :request-already-sent
|
||||
:hint "you have already made a request to join this team less than 24 hours ago"))))
|
||||
|
||||
(def ^:private sql:upsert-team-access-request
|
||||
"INSERT INTO team_access_request (id, team_id, requester_id, valid_until, auto_join_until)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT (team_id, requester_id)
|
||||
DO UPDATE SET valid_until = ?, auto_join_until = ?, updated_at = now()
|
||||
RETURNING *")
|
||||
|
||||
(defn- upsert-team-access-request
|
||||
"Create or update team access request for provided team and profile-id"
|
||||
[conn team-id requester-id]
|
||||
(check-existing-team-access-request conn team-id requester-id)
|
||||
(let [valid-until (dt/in-future {:hours 24})
|
||||
auto-join-until (dt/in-future {:days 7})
|
||||
request-id (uuid/next)]
|
||||
(db/exec-one! conn [sql:upsert-team-access-request
|
||||
request-id team-id requester-id
|
||||
valid-until auto-join-until
|
||||
valid-until auto-join-until])))
|
||||
|
||||
(defn- get-file-for-team-access-request
|
||||
"A specific method for obtain a file with name and page-id used for
|
||||
team request access procediment"
|
||||
[cfg file-id]
|
||||
(let [file (files/get-file cfg file-id :migrate? false)]
|
||||
(-> file
|
||||
(dissoc :data)
|
||||
(dissoc :deleted-at)
|
||||
(assoc :page-id (-> file :data :pages first)))))
|
||||
|
||||
(def ^:private schema:create-team-access-request
|
||||
[:and
|
||||
[:map {:title "create-team-access-request"}
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:is-viewer {:optional true} ::sm/boolean]]
|
||||
|
||||
[:fn (fn [params]
|
||||
(or (contains? params :file-id)
|
||||
(contains? params :team-id)))]])
|
||||
|
||||
(sv/defmethod ::create-team-access-request
|
||||
"A rpc call that allow to request for an invitations to join the team."
|
||||
{::doc/added "2.2.0"
|
||||
::doc/module :teams
|
||||
::sm/params schema:create-team-access-request
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg}
|
||||
{:keys [::rpc/profile-id file-id team-id is-viewer] :as params}]
|
||||
|
||||
(let [requester (profile/get-profile conn profile-id)
|
||||
team (if team-id
|
||||
(->> (db/get-by-id conn :team team-id)
|
||||
(teams/decode-row))
|
||||
(teams/get-team-for-file conn file-id))
|
||||
|
||||
team-id (:id team)
|
||||
|
||||
team-owner (get-team-owner conn team-id)
|
||||
|
||||
file (when (some? file-id)
|
||||
(get-file-for-team-access-request cfg file-id))]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::quotes/profile-id profile-id)
|
||||
(assoc ::quotes/team-id team-id)
|
||||
(quotes/check! {::quotes/id ::quotes/team-access-requests-per-team}
|
||||
{::quotes/id ::quotes/team-access-requests-per-requester}))
|
||||
|
||||
(teams/check-profile-muted conn requester)
|
||||
(teams/check-email-bounce conn (:email team-owner) false)
|
||||
(teams/check-email-spam conn (:email team-owner) true)
|
||||
|
||||
(let [request (upsert-team-access-request conn team-id profile-id)
|
||||
factory (cond
|
||||
(and (some? file) (:is-default team) is-viewer)
|
||||
eml/request-file-access-yourpenpot-view
|
||||
|
||||
(and (some? file) (:is-default team))
|
||||
eml/request-file-access-yourpenpot
|
||||
|
||||
(some? file)
|
||||
eml/request-file-access
|
||||
|
||||
:else
|
||||
eml/request-team-access)]
|
||||
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory factory
|
||||
:public-uri (cf/get :public-uri)
|
||||
:to (:email team-owner)
|
||||
:requested-by (:fullname requester)
|
||||
:requested-by-email (:email requester)
|
||||
:team-name (:name team)
|
||||
:team-id team-id
|
||||
:file-name (:name file)
|
||||
:file-id file-id
|
||||
:page-id (:page-id file)})
|
||||
|
||||
(with-meta {:request request}
|
||||
{::audit/props {:request 1}}))))
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.team :as types.team]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
@@ -16,7 +17,6 @@
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.quotes :as quotes]
|
||||
@@ -92,7 +92,7 @@
|
||||
params (merge
|
||||
{:team-id team-id
|
||||
:profile-id (:id member)}
|
||||
(teams/role->params role))]
|
||||
(get types.team/permissions-for-role role))]
|
||||
|
||||
;; Do not allow blocked users accept invitations.
|
||||
(when (:is-blocked member)
|
||||
@@ -128,7 +128,7 @@
|
||||
[:iss :keyword]
|
||||
[:exp ::dt/instant]
|
||||
[:profile-id ::sm/uuid]
|
||||
[:role teams/schema:role]
|
||||
[:role ::types.team/role]
|
||||
[:team-id ::sm/uuid]
|
||||
[:member-email ::sm/email]
|
||||
[:member-id {:optional true} ::sm/uuid]])
|
||||
@@ -166,13 +166,28 @@
|
||||
;; invited team.
|
||||
(let [props {:team-id (:team-id claims)
|
||||
:role (:role claims)
|
||||
:invitation-id (:id invitation)}
|
||||
event (-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name "accept-team-invitation")
|
||||
(assoc ::audit/props props))]
|
||||
:invitation-id (:id invitation)}]
|
||||
|
||||
(audit/submit!
|
||||
cfg
|
||||
(-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/name "accept-team-invitation")
|
||||
(assoc ::audit/props props)))
|
||||
|
||||
;; NOTE: Backward compatibility; old invitations can
|
||||
;; have the `created-by` to be nil; so in this case we
|
||||
;; don't submit this event to the audit-log
|
||||
(when-let [created-by (:created-by invitation)]
|
||||
(audit/submit!
|
||||
cfg
|
||||
(-> (audit/event-from-rpc-params params)
|
||||
(assoc ::audit/profile-id created-by)
|
||||
(assoc ::audit/name "accept-team-invitation-from")
|
||||
(assoc ::audit/props (assoc props
|
||||
:profile-id (:id profile)
|
||||
:email (:email profile))))))
|
||||
|
||||
(accept-invitation cfg claims invitation profile)
|
||||
(audit/submit! cfg event)
|
||||
(assoc claims :state :created))
|
||||
|
||||
(ex/raise :type :validation
|
||||
|
||||
@@ -15,12 +15,27 @@
|
||||
[app.http.client :as http]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.teams :refer [check-edition-permissions! check-read-permissions!]]
|
||||
[app.rpc.commands.teams :refer [check-read-permissions!] :as t]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn get-webhooks-permissions
|
||||
[conn profile-id team-id creator-id]
|
||||
(let [permissions (t/get-permissions conn profile-id team-id)
|
||||
|
||||
can-edit (boolean (or (:can-edit permissions)
|
||||
(= profile-id creator-id)))]
|
||||
(assoc permissions :can-edit can-edit)))
|
||||
|
||||
(def has-webhook-edit-permissions?
|
||||
(perms/make-edition-predicate-fn get-webhooks-permissions))
|
||||
|
||||
(def check-webhook-edition-permissions!
|
||||
(perms/make-check-fn has-webhook-edit-permissions?))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [uri] :as row}]
|
||||
(cond-> row
|
||||
@@ -65,11 +80,12 @@
|
||||
max-hooks-for-team)))))
|
||||
|
||||
(defn- insert-webhook!
|
||||
[{:keys [::db/pool]} {:keys [team-id uri mtype is-active] :as params}]
|
||||
[{:keys [::db/pool]} {:keys [team-id uri mtype is-active ::rpc/profile-id] :as params}]
|
||||
(-> (db/insert! pool :webhook
|
||||
{:id (uuid/next)
|
||||
:team-id team-id
|
||||
:uri (str uri)
|
||||
:profile-id profile-id
|
||||
:is-active is-active
|
||||
:mtype mtype})
|
||||
(decode-row)))
|
||||
@@ -101,7 +117,7 @@
|
||||
{::doc/added "1.17"
|
||||
::sm/params schema:create-webhook}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(check-edition-permissions! pool profile-id team-id)
|
||||
(check-webhook-edition-permissions! pool profile-id team-id profile-id)
|
||||
(validate-quotes! cfg params)
|
||||
(validate-webhook! cfg nil params)
|
||||
(insert-webhook! cfg params))
|
||||
@@ -118,7 +134,7 @@
|
||||
::sm/params schema:update-webhook}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
(let [whook (-> (db/get pool :webhook {:id id}) (decode-row))]
|
||||
(check-edition-permissions! pool profile-id (:team-id whook))
|
||||
(check-webhook-edition-permissions! pool profile-id (:team-id whook) (:profile-id whook))
|
||||
(validate-webhook! cfg whook params)
|
||||
(update-webhook! cfg whook params)))
|
||||
|
||||
@@ -132,15 +148,17 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [whook (-> (db/get conn :webhook {:id id}) decode-row)]
|
||||
(check-edition-permissions! conn profile-id (:team-id whook))
|
||||
(check-webhook-edition-permissions! conn profile-id (:team-id whook) (:profile-id whook))
|
||||
(db/delete! conn :webhook {:id id})
|
||||
nil)))
|
||||
|
||||
;; --- Query: Webhooks
|
||||
|
||||
(def sql:get-webhooks
|
||||
"select id, uri, mtype, is_active, error_code, error_count
|
||||
from webhook where team_id = ? order by uri")
|
||||
"SELECT id, uri, mtype, is_active, error_code, error_count, profile_id
|
||||
FROM webhook
|
||||
WHERE team_id = ?
|
||||
ORDER BY uri")
|
||||
|
||||
(def ^:private schema:get-webhooks
|
||||
[:map {:title "get-webhooks"}
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
[app.util.services :as-alias sv]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.hash :as bh]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
(def
|
||||
^{:dynamic true
|
||||
@@ -48,20 +48,25 @@
|
||||
(str "W/\"" (encode s) "\""))
|
||||
|
||||
(defn wrap
|
||||
[_ f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
|
||||
[_ f {:keys [::get-object ::key-fn ::reuse-key?] :or {reuse-key? true} :as mdata}]
|
||||
(if (and (ifn? get-object) (ifn? key-fn))
|
||||
(do
|
||||
(l/trc :hint "instrumenting method" :service (::sv/name mdata))
|
||||
(fn [cfg {:keys [::key] :as params}]
|
||||
(if *enabled*
|
||||
(let [key' (when (or key reuse-key?)
|
||||
(some->> (get-object cfg params) (key-fn params) (fmt-key)))]
|
||||
(let [object (when (some? key)
|
||||
(get-object cfg params))
|
||||
key' (when (some? object)
|
||||
(->> object (key-fn params) (fmt-key)))]
|
||||
(if (and (some? key) (= key key'))
|
||||
(fn [_] {::rres/status 304})
|
||||
(let [result (f cfg params)
|
||||
(fn [_] {::yres/status 304})
|
||||
(let [params (if (some? object)
|
||||
(assoc params ::object object)
|
||||
params)
|
||||
result (f cfg params)
|
||||
etag (or (and reuse-key? key')
|
||||
(some-> result meta ::key fmt-key)
|
||||
(some-> result key-fn fmt-key))]
|
||||
(some->> result meta ::key fmt-key)
|
||||
(some->> result (key-fn params) fmt-key))]
|
||||
(rph/with-header result "etag" etag))))
|
||||
(f cfg params))))
|
||||
f))
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[pretty-spec.core :as ps]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DOC (human readable)
|
||||
@@ -87,11 +87,12 @@
|
||||
(let [params (:query-params request)
|
||||
pstyle (:type params "js")
|
||||
context (assoc context :param-style pstyle)]
|
||||
{::rres/status 200
|
||||
::rres/body (-> (io/resource "app/templates/api-doc.tmpl")
|
||||
|
||||
{::yres/status 200
|
||||
::yres/body (-> (io/resource "app/templates/api-doc.tmpl")
|
||||
(tmpl/render context))}))
|
||||
(fn [_]
|
||||
{::rres/status 404})))
|
||||
{::yres/status 404})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OPENAPI / SWAGGER (v3.1)
|
||||
@@ -175,12 +176,12 @@
|
||||
[context]
|
||||
(if (contains? cf/flags :backend-openapi-doc)
|
||||
(fn [_]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "application/json; charset=utf-8"}
|
||||
::rres/body (json/encode context)})
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "application/json; charset=utf-8"}
|
||||
::yres/body (json/encode context)})
|
||||
|
||||
(fn [_]
|
||||
{::rres/status 404})))
|
||||
{::yres/status 404})))
|
||||
|
||||
(defn openapi-handler
|
||||
[]
|
||||
@@ -191,24 +192,23 @@
|
||||
context {:public-uri (cf/get :public-uri)
|
||||
:swagger-js swagger-js
|
||||
:swagger-css swagger-cs}]
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/html"}
|
||||
::rres/body (-> (io/resource "app/templates/openapi.tmpl")
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/html"}
|
||||
::yres/body (-> (io/resource "app/templates/openapi.tmpl")
|
||||
(tmpl/render context))}))
|
||||
(fn [_]
|
||||
{::rres/status 404})))
|
||||
{::yres/status 404})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MODULE INIT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::routes vector?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req-un [::rpc/methods]))
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (sm/valid? ::rpc/methods (::rpc/methods params)) "expected valid methods"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [methods] :as cfg}]
|
||||
[_ {:keys [::rpc/methods] :as cfg}]
|
||||
[(let [context (prepare-doc-context methods)]
|
||||
[["/_doc"
|
||||
{:handler (doc-handler context)
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.http :as-alias http]
|
||||
[app.rpc :as-alias rpc]
|
||||
[ring.response :as-alias rres]))
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
;; A utilty wrapper object for wrap service responses that does not
|
||||
;; implements the IObj interface that make possible attach metadata to
|
||||
@@ -77,4 +77,4 @@
|
||||
(fn [_ response]
|
||||
(let [exp (if (integer? max-age) max-age (inst-ms max-age))
|
||||
val (dm/fmt "max-age=%" (int (/ exp 1000.0)))]
|
||||
(update response ::rres/headers assoc "cache-control" val)))))
|
||||
(update response ::yres/headers assoc "cache-control" val)))))
|
||||
|
||||
@@ -8,25 +8,24 @@
|
||||
"A permission checking helper factories."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[app.common.schema :as sm]))
|
||||
|
||||
(sm/register! ::permissions
|
||||
[:map {:title "Permissions"}
|
||||
[:type {:gen/elements [:membership :share-link]} :keyword]
|
||||
[:is-owner ::sm/boolean]
|
||||
[:is-admin ::sm/boolean]
|
||||
[:can-edit ::sm/boolean]
|
||||
[:can-read ::sm/boolean]
|
||||
[:is-logged ::sm/boolean]])
|
||||
(sm/register!
|
||||
^{::sm/type ::permissions}
|
||||
[:map {:title "Permissions"}
|
||||
[:type {:gen/elements [:membership :share-link]} :keyword]
|
||||
[:is-owner ::sm/boolean]
|
||||
[:is-admin ::sm/boolean]
|
||||
[:can-edit ::sm/boolean]
|
||||
[:can-read ::sm/boolean]
|
||||
[:is-logged ::sm/boolean]])
|
||||
|
||||
|
||||
(s/def ::role #{:admin :owner :editor :viewer})
|
||||
(def valid-roles
|
||||
#{:admin :owner :editor :viewer})
|
||||
|
||||
(defn assign-role-flags
|
||||
[params role]
|
||||
(us/verify ::role role)
|
||||
(assert (contains? valid-roles role) "expected a valid role")
|
||||
(cond-> params
|
||||
(= role :owner)
|
||||
(assoc :is-owner true
|
||||
@@ -51,7 +50,7 @@
|
||||
(defn make-admin-predicate-fn
|
||||
"A simple factory for admin permission predicate functions."
|
||||
[qfn]
|
||||
(us/assert fn? qfn)
|
||||
(assert (fn? qfn) "expected a function")
|
||||
(fn check
|
||||
([perms] (:is-admin perms))
|
||||
([conn & args] (check (apply qfn conn args)))))
|
||||
@@ -59,7 +58,7 @@
|
||||
(defn make-edition-predicate-fn
|
||||
"A simple factory for edition permission predicate functions."
|
||||
[qfn]
|
||||
(us/assert fn? qfn)
|
||||
(assert (fn? qfn) "expected a function")
|
||||
(fn check
|
||||
([perms] (:can-edit perms))
|
||||
([conn & args] (check (apply qfn conn args)))))
|
||||
@@ -67,7 +66,7 @@
|
||||
(defn make-read-predicate-fn
|
||||
"A simple factory for read permission predicate functions."
|
||||
[qfn]
|
||||
(us/assert fn? qfn)
|
||||
(assert (fn? qfn) "expected a function")
|
||||
(fn check
|
||||
([perms] (:can-read perms))
|
||||
([conn & args] (check (apply qfn conn args)))))
|
||||
@@ -75,7 +74,7 @@
|
||||
(defn make-comment-predicate-fn
|
||||
"A simple factory for comment permission predicate functions."
|
||||
[qfn]
|
||||
(us/assert fn? qfn)
|
||||
(assert (fn? qfn) "expected a function")
|
||||
(fn check
|
||||
([perms]
|
||||
(and (:is-logged perms) (= (:who-comment perms) "all")))
|
||||
|
||||
@@ -408,6 +408,121 @@
|
||||
(assoc ::count-sql [sql:get-comments-per-file file-id])
|
||||
(generic-check!)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: SNAPSHOTS-PER-FILE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:snapshots-per-file
|
||||
[:map
|
||||
[::profile-id ::sm/uuid]
|
||||
[::project-id ::sm/uuid]
|
||||
[::team-id ::sm/uuid]
|
||||
[::file-id ::sm/uuid]])
|
||||
|
||||
(def ^:private valid-snapshots-per-file-quote?
|
||||
(sm/lazy-validator schema:snapshots-per-file))
|
||||
|
||||
(def ^:private sql:get-snapshots-per-file
|
||||
"SELECT count(*) AS total
|
||||
FROM file_change AS fc
|
||||
WHERE fc.file_id = ?
|
||||
AND fc.created_by = 'user'
|
||||
AND fc.deleted_at IS NULL
|
||||
AND fc.data IS NOT NULL")
|
||||
|
||||
(defmethod check-quote ::snapshots-per-file
|
||||
[{:keys [::profile-id ::file-id ::team-id ::project-id ::target] :as quote}]
|
||||
(assert (valid-snapshots-per-file-quote? quote) "invalid quote parameters")
|
||||
(-> quote
|
||||
(assoc ::default (cf/get :quotes-snapshots-per-file Integer/MAX_VALUE))
|
||||
(assoc ::quote-sql [sql:get-quotes-4 target file-id profile-id project-id
|
||||
profile-id team-id profile-id profile-id])
|
||||
(assoc ::count-sql [sql:get-snapshots-per-file file-id])
|
||||
(generic-check!)))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: SNAPSHOTS-PER-TEAM
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:snapshots-per-team
|
||||
[:map
|
||||
[::profile-id ::sm/uuid]
|
||||
[::team-id ::sm/uuid]])
|
||||
|
||||
(def ^:private valid-snapshots-per-team-quote?
|
||||
(sm/lazy-validator schema:snapshots-per-team))
|
||||
|
||||
(def ^:private sql:get-snapshots-per-team
|
||||
"SELECT count(*) AS total
|
||||
FROM file_change AS fc
|
||||
JOIN file AS f ON (f.id = fc.file_id)
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?
|
||||
AND fc.created_by = 'user'
|
||||
AND fc.deleted_at IS NULL
|
||||
AND fc.data IS NOT NULL")
|
||||
|
||||
(defmethod check-quote ::snapshots-per-team
|
||||
[{:keys [::profile-id ::team-id ::target] :as quote}]
|
||||
(assert (valid-snapshots-per-team-quote? quote) "invalid quote parameters")
|
||||
(-> quote
|
||||
(assoc ::default (cf/get :quotes-snapshots-per-team Integer/MAX_VALUE))
|
||||
(assoc ::quote-sql [sql:get-quotes-2 target team-id profile-id profile-id])
|
||||
(assoc ::count-sql [sql:get-snapshots-per-team team-id])
|
||||
(generic-check!)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: TEAM-ACCESS-REQUESTS-PER-TEAM
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:team-access-requests-per-team
|
||||
[:map
|
||||
[::profile-id ::sm/uuid]
|
||||
[::team-id ::sm/uuid]])
|
||||
|
||||
(def ^:private valid-team-access-requests-per-team-quote?
|
||||
(sm/lazy-validator schema:team-access-requests-per-team))
|
||||
|
||||
(def ^:private sql:get-team-access-requests-per-team
|
||||
"SELECT count(*) AS total
|
||||
FROM team_access_request AS tar
|
||||
WHERE tar.team_id = ?")
|
||||
|
||||
(defmethod check-quote ::team-access-requests-per-team
|
||||
[{:keys [::profile-id ::team-id ::target] :as quote}]
|
||||
(assert (valid-team-access-requests-per-team-quote? quote) "invalid quote parameters")
|
||||
(-> quote
|
||||
(assoc ::default (cf/get :quotes-team-access-requests-per-team Integer/MAX_VALUE))
|
||||
(assoc ::quote-sql [sql:get-quotes-2 target team-id profile-id profile-id])
|
||||
(assoc ::count-sql [sql:get-team-access-requests-per-team team-id])
|
||||
(generic-check!)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: TEAM-ACCESS-REQUESTS-PER-REQUESTER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:team-access-requests-per-requester
|
||||
[:map
|
||||
[::profile-id ::sm/uuid]])
|
||||
|
||||
(def ^:private valid-team-access-requests-per-requester-quote?
|
||||
(sm/lazy-validator schema:team-access-requests-per-requester))
|
||||
|
||||
(def ^:private sql:get-team-access-requests-per-requester
|
||||
"SELECT count(*) AS total
|
||||
FROM team_access_request AS tar
|
||||
WHERE tar.requester_id = ?")
|
||||
|
||||
(defmethod check-quote ::team-access-requests-per-requester
|
||||
[{:keys [::profile-id ::target] :as quote}]
|
||||
(assert (valid-team-access-requests-per-requester-quote? quote) "invalid quote parameters")
|
||||
(-> quote
|
||||
(assoc ::default (cf/get :quotes-team-access-requests-per-requester Integer/MAX_VALUE))
|
||||
(assoc ::quote-sql [sql:get-quotes-1 target profile-id])
|
||||
(assoc ::count-sql [sql:get-team-access-requests-per-requester profile-id])
|
||||
(generic-check!)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: DEFAULT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as uri]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -61,7 +61,6 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
@@ -95,9 +94,46 @@
|
||||
(defmulti parse-limit (fn [[_ strategy _]] strategy))
|
||||
(defmulti process-limit (fn [_ _ _ o] (::strategy o)))
|
||||
|
||||
(sm/register!
|
||||
{:type ::rpc/rlimit
|
||||
:pred #(instance? clojure.lang.Agent %)})
|
||||
|
||||
(def ^:private schema:strategy
|
||||
[:enum :window :bucket])
|
||||
|
||||
(def ^:private schema:limit-tuple
|
||||
[:tuple :keyword schema:strategy :string])
|
||||
|
||||
(def ^:private schema:limit
|
||||
[:and
|
||||
[:map
|
||||
[::name :any]
|
||||
[::strategy schema:strategy]
|
||||
[::key :string]
|
||||
[::opts :string]]
|
||||
[:or
|
||||
[:map
|
||||
[::capacity ::sm/int]
|
||||
[::rate ::sm/int]
|
||||
[::internal ::dt/duration]
|
||||
[::params [::sm/vec :any]]]
|
||||
[:map
|
||||
[::nreq ::sm/int]
|
||||
[::unit [:enum :days :hours :minutes :seconds :weeks]]]]])
|
||||
|
||||
(def ^:private schema:limits
|
||||
[:map-of :keyword [::sm/vec schema:limit]])
|
||||
|
||||
(def ^:private valid-limit-tuple?
|
||||
(sm/lazy-validator schema:limit-tuple))
|
||||
|
||||
(def ^:private valid-rlimit-instance?
|
||||
(sm/lazy-validator ::rpc/rlimit))
|
||||
|
||||
(defmethod parse-limit :window
|
||||
[[name strategy opts :as vlimit]]
|
||||
(us/assert! ::limit-tuple vlimit)
|
||||
(assert (valid-limit-tuple? vlimit) "expected valid limit tuple")
|
||||
|
||||
(merge
|
||||
{::name name
|
||||
::strategy strategy}
|
||||
@@ -118,7 +154,8 @@
|
||||
|
||||
(defmethod parse-limit :bucket
|
||||
[[name strategy opts :as vlimit]]
|
||||
(us/assert! ::limit-tuple vlimit)
|
||||
(assert (valid-limit-tuple? vlimit) "expected valid limit tuple")
|
||||
|
||||
(if-let [[_ capacity rate interval] (re-find bucket-opts-re opts)]
|
||||
(let [interval (dt/duration interval)
|
||||
rate (parse-long rate)
|
||||
@@ -140,7 +177,7 @@
|
||||
(let [script (-> bucket-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id)])
|
||||
(assoc ::rscript/vals (conj params (dt/->seconds now))))
|
||||
result (rds/eval! redis script)
|
||||
result (rds/eval redis script)
|
||||
allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)
|
||||
reset (* (/ (inst-ms interval) rate)
|
||||
@@ -164,7 +201,7 @@
|
||||
script (-> window-rate-limit-script
|
||||
(assoc ::rscript/keys [(str key "." service "." user-id "." (dt/format-instant ts))])
|
||||
(assoc ::rscript/vals [nreq (dt/->seconds ttl)]))
|
||||
result (rds/eval! redis script)
|
||||
result (rds/eval redis script)
|
||||
allowed? (boolean (nth result 0))
|
||||
remaining (nth result 1)]
|
||||
(l/trace :hint "limit processed"
|
||||
@@ -245,8 +282,8 @@
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/rlimit ::rds/redis] :as cfg} f mdata]
|
||||
(us/assert! ::rpc/rlimit rlimit)
|
||||
(us/assert! ::rds/redis redis)
|
||||
(assert (rds/redis? redis) "expected a valid redis instance")
|
||||
(assert (or (nil? rlimit) (valid-rlimit-instance? rlimit)) "expected a valid rlimit instance")
|
||||
|
||||
(if rlimit
|
||||
(let [skey (keyword (::rpc/type cfg) (->> mdata ::sv/spec name))
|
||||
@@ -275,42 +312,19 @@
|
||||
;; CONFIG WATCHER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::strategy (s/and ::us/keyword #{:window :bucket}))
|
||||
(s/def ::capacity ::us/integer)
|
||||
(s/def ::rate ::us/integer)
|
||||
(s/def ::interval ::dt/duration)
|
||||
(s/def ::key ::us/string)
|
||||
(s/def ::opts ::us/string)
|
||||
(s/def ::params vector?)
|
||||
(s/def ::unit #{:days :hours :minutes :seconds :weeks})
|
||||
(s/def ::nreq ::us/integer)
|
||||
(s/def ::refresh ::dt/duration)
|
||||
(def ^:private schema:config
|
||||
[:map-of
|
||||
[:or :keyword [:set :keyword]]
|
||||
[:vector schema:limit-tuple]])
|
||||
|
||||
(s/def ::limit-tuple
|
||||
(s/tuple ::us/keyword ::strategy string?))
|
||||
(def ^:private check-config
|
||||
(sm/check-fn schema:config))
|
||||
|
||||
(s/def ::limits
|
||||
(s/map-of keyword? (s/every ::limit :kind vector?)))
|
||||
(def ^:private check-refresh
|
||||
(sm/check-fn ::dt/duration))
|
||||
|
||||
(s/def ::limit
|
||||
(s/and
|
||||
(s/keys :req [::name ::strategy ::key ::opts])
|
||||
(s/or :bucket
|
||||
(s/keys :req [::capacity
|
||||
::rate
|
||||
::interval
|
||||
::params])
|
||||
:window
|
||||
(s/keys :req [::nreq
|
||||
::unit]))))
|
||||
|
||||
(s/def ::rpc/rlimit
|
||||
(s/nilable
|
||||
#(instance? clojure.lang.Agent %)))
|
||||
|
||||
(s/def ::config
|
||||
(s/map-of (s/or :kw keyword? :set set?)
|
||||
(s/every ::limit-tuple :kind vector?)))
|
||||
(def ^:private check-limits
|
||||
(sm/check-fn schema:limits))
|
||||
|
||||
(defn read-config
|
||||
[path]
|
||||
@@ -336,13 +350,9 @@
|
||||
{}
|
||||
config)))]
|
||||
|
||||
(when-let [config (some->> path slurp edn/read-string)]
|
||||
(us/verify! ::config config)
|
||||
(let [refresh (->> config meta :refresh dt/duration)
|
||||
limits (->> config compile-pass-1 compile-pass-2)]
|
||||
|
||||
(us/verify! ::limits limits)
|
||||
(us/verify! ::refresh refresh)
|
||||
(when-let [config (some->> path slurp edn/read-string check-config)]
|
||||
(let [refresh (->> config meta :refresh dt/duration check-refresh)
|
||||
limits (->> config compile-pass-1 compile-pass-2 check-limits)]
|
||||
|
||||
{::refresh refresh
|
||||
::limits limits}))))
|
||||
@@ -385,8 +395,9 @@
|
||||
(when-let [path (cf/get :rpc-rlimit-config)]
|
||||
(and (fs/exists? path) (fs/regular-file? path) path)))
|
||||
|
||||
(defmethod ig/pre-init-spec :app.rpc/rlimit [_]
|
||||
(s/keys :req [::wrk/executor]))
|
||||
(defmethod ig/assert-key :app.rpc/rlimit
|
||||
[_ {:keys [::wrk/executor]}]
|
||||
(assert (sm/valid? ::wrk/executor executor) "expect valid executor"))
|
||||
|
||||
(defmethod ig/init-key ::rpc/rlimit
|
||||
[_ {:keys [::wrk/executor] :as cfg}]
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
@@ -17,7 +17,6 @@
|
||||
[app.setup.templates]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.nonce :as bn]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(defn- generate-random-key
|
||||
@@ -73,12 +72,9 @@
|
||||
(db/run! system (fn [{:keys [::db/conn]}]
|
||||
(db/exec-one! conn [sql:add-prop prop value false value false])))))
|
||||
|
||||
(s/def ::key ::us/string)
|
||||
(s/def ::props (s/map-of ::us/keyword some?))
|
||||
|
||||
(defmethod ig/pre-init-spec ::props [_]
|
||||
(s/keys :req [::db/pool]
|
||||
:opt [::key]))
|
||||
(defmethod ig/assert-key ::props
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected valid database pool"))
|
||||
|
||||
(defmethod ig/init-key ::props
|
||||
[_ {:keys [::db/pool ::key] :as cfg}]
|
||||
@@ -94,3 +90,7 @@
|
||||
(assoc :secret-key secret)
|
||||
(assoc :tokens-key (keys/derive secret :salt "tokens"))
|
||||
(update :instance-id handle-instance-id conn (db/read-only? pool))))))
|
||||
|
||||
|
||||
;; FIXME
|
||||
(sm/register! ::props :any)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.db :as db]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-update :as fupdate]
|
||||
[app.rpc.commands.management :as management]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
@@ -51,9 +52,11 @@
|
||||
:project-id (:default-project-id profile)}
|
||||
template-stream (tmpl/get-template-stream cfg "welcome")
|
||||
file-id (-> (management/clone-template cfg params template-stream)
|
||||
first)]
|
||||
first)
|
||||
file-name (str fullname "'s first file")]
|
||||
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/rename-file conn {:id file-id :name file-name})
|
||||
(fupdate/update-file! cfg file-id update-welcome-shape fullname)
|
||||
(profile/update-profile-props cfg id {:welcome-file-id file-id})
|
||||
(db/exec-one! conn [sql:mark-file-object-thumbnails-deleted file-id])
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
"Server Repl."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.srepl.cli]
|
||||
[app.srepl.main]
|
||||
@@ -16,7 +15,6 @@
|
||||
[app.util.locks :as locks]
|
||||
[clojure.core.server :as ccs]
|
||||
[clojure.main :as cm]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(defn- repl-init
|
||||
@@ -44,16 +42,14 @@
|
||||
|
||||
;; --- State initialization
|
||||
|
||||
(s/def ::port ::us/integer)
|
||||
(s/def ::host ::us/not-empty-string)
|
||||
(defmethod ig/assert-key ::server
|
||||
[_ params]
|
||||
(assert (int? (::port params)) "expected valid port")
|
||||
(assert (string? (::host params)) "expected valid host"))
|
||||
|
||||
(defmethod ig/pre-init-spec ::server
|
||||
[_]
|
||||
(s/keys :req [::host ::port]))
|
||||
|
||||
(defmethod ig/prep-key ::server
|
||||
[[type _] cfg]
|
||||
(assoc cfg ::flag (keyword (str (name type) "-server"))))
|
||||
(defmethod ig/expand-key ::server
|
||||
[[type :as k] v]
|
||||
{k (assoc v ::flag (keyword (str (name type) "-server")))})
|
||||
|
||||
(defmethod ig/init-key ::server
|
||||
[[type _] {:keys [::flag ::port ::host] :as cfg}]
|
||||
|
||||
@@ -122,22 +122,19 @@
|
||||
WHERE file_id = ANY(?)
|
||||
AND id IS NOT NULL")
|
||||
|
||||
(defn get-file-snapshots
|
||||
(defn search-file-snapshots
|
||||
"Get a seq parirs of file-id and snapshot-id for a set of files
|
||||
and specified label"
|
||||
[conn label ids]
|
||||
[conn file-ids label]
|
||||
(db/exec! conn [sql:snapshots-with-file label
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
(db/create-array conn "uuid" file-ids)]))
|
||||
|
||||
(defn take-team-snapshot!
|
||||
[system team-id label]
|
||||
(let [conn (db/get-connection system)]
|
||||
(->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(map (fn [file-id]
|
||||
{:file-id file-id
|
||||
:label label}))
|
||||
(reduce (fn [result params]
|
||||
(fsnap/take-file-snapshot! conn params)
|
||||
(reduce (fn [result file-id]
|
||||
(fsnap/create-file-snapshot! system nil file-id label)
|
||||
(inc result))
|
||||
0))))
|
||||
|
||||
@@ -147,7 +144,7 @@
|
||||
ids (->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(into #{}))
|
||||
|
||||
snap (get-file-snapshots conn label ids)
|
||||
snap (search-file-snapshots conn ids label)
|
||||
|
||||
ids' (into #{} (map :file-id) snap)
|
||||
team (-> (feat.comp-v2/get-team conn team-id)
|
||||
@@ -157,8 +154,8 @@
|
||||
(throw (RuntimeException. "no uniform snapshot available")))
|
||||
|
||||
(feat.comp-v2/update-team! conn team)
|
||||
(reduce (fn [result params]
|
||||
(fsnap/restore-file-snapshot! conn params)
|
||||
(reduce (fn [result {:keys [file-id id]}]
|
||||
(fsnap/restore-file-snapshot! system file-id id)
|
||||
(inc result))
|
||||
0
|
||||
snap)))
|
||||
@@ -167,7 +164,7 @@
|
||||
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
|
||||
|
||||
(when (string? label)
|
||||
(fsnap/take-file-snapshot! system {:file-id file-id :label label}))
|
||||
(fsnap/create-file-snapshot! system nil file-id label))
|
||||
|
||||
(let [conn (db/get-connection system)
|
||||
file (get-file system file-id opts)
|
||||
|
||||
@@ -311,33 +311,29 @@
|
||||
collectable file-changes entry."
|
||||
[& {:keys [file-id label]}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system fsnap/take-file-snapshot! {:file-id file-id :label label})))
|
||||
(db/tx-run! main/system fsnap/create-file-snapshot! {:file-id file-id :label label})))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[file-id label]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(when-let [snapshot (->> (h/get-file-snapshots conn label #{file-id})
|
||||
(when-let [snapshot (->> (h/search-file-snapshots conn #{file-id} label)
|
||||
(map :id)
|
||||
(first))]
|
||||
(fsnap/restore-file-snapshot! system
|
||||
{:id (:id snapshot)
|
||||
:file-id file-id}))))))
|
||||
(fsnap/restore-file-snapshot! system file-id (:id snapshot)))))))
|
||||
|
||||
(defn list-file-snapshots!
|
||||
[file-id & {:keys [limit]}]
|
||||
[file-id & {:as _}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [system]
|
||||
(let [params {:file-id file-id :limit limit}]
|
||||
(->> (fsnap/get-file-snapshots system (d/without-nils params))
|
||||
(print-table [:label :id :revn :created-at])))))))
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(->> (fsnap/get-file-snapshots conn file-id)
|
||||
(print-table [:label :id :revn :created-at]))))))
|
||||
|
||||
(defn take-team-snapshot!
|
||||
[team-id & {:keys [label rollback?] :or {rollback? true}}]
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
label (or label (fsnap/generate-snapshot-label))]
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(db/tx-run! h/take-team-snapshot! team-id label))))
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -19,7 +19,6 @@
|
||||
[app.storage.impl :as impl]
|
||||
[app.storage.s3 :as ss3]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig])
|
||||
@@ -34,23 +33,43 @@
|
||||
:assets-s3 :s3
|
||||
nil)))
|
||||
|
||||
(def valid-buckets
|
||||
#{"file-media-object"
|
||||
"team-font-variant"
|
||||
"file-object-thumbnail"
|
||||
"file-thumbnail"
|
||||
"profile"
|
||||
"file-data"
|
||||
"file-data-fragment"
|
||||
"file-change"})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Storage Module State
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::id #{:assets-fs :assets-s3 :fs :s3})
|
||||
(s/def ::s3 ::ss3/backend)
|
||||
(s/def ::fs ::sfs/backend)
|
||||
(s/def ::type #{:fs :s3})
|
||||
(def ^:private schema:backends
|
||||
[:map-of :keyword
|
||||
[:maybe
|
||||
[:or ::ss3/backend ::sfs/backend]]])
|
||||
|
||||
(s/def ::backends
|
||||
(s/map-of ::us/keyword
|
||||
(s/nilable
|
||||
(s/or :s3 ::ss3/backend
|
||||
:fs ::sfs/backend))))
|
||||
(def ^:private valid-backends?
|
||||
(sm/validator schema:backends))
|
||||
|
||||
(defmethod ig/pre-init-spec ::storage [_]
|
||||
(s/keys :req [::db/pool ::backends]))
|
||||
(def ^:private schema:storage
|
||||
[:map {:title "storage"}
|
||||
[::backends schema:backends]
|
||||
[::backend [:enum :s3 :fs]]
|
||||
::db/connectable])
|
||||
|
||||
(def valid-storage?
|
||||
(sm/validator schema:storage))
|
||||
|
||||
(sm/register! ::storage schema:storage)
|
||||
|
||||
(defmethod ig/assert-key ::storage
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected valid database pool")
|
||||
(assert (valid-backends? (::backends params)) "expected valid backends map"))
|
||||
|
||||
(defmethod ig/init-key ::storage
|
||||
[_ {:keys [::backends ::db/pool] :as cfg}]
|
||||
@@ -68,14 +87,6 @@
|
||||
(assoc ::backend backend)
|
||||
(assoc ::db/connectable pool))))
|
||||
|
||||
(s/def ::backend keyword?)
|
||||
(s/def ::storage
|
||||
(s/keys :req [::backends ::db/pool ::db/connectable]
|
||||
:opt [::backend]))
|
||||
|
||||
(s/def ::storage-with-backend
|
||||
(s/and ::storage #(contains? % ::backend)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Database Objects
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -190,15 +201,16 @@
|
||||
(dm/export impl/object?)
|
||||
|
||||
(defn get-object
|
||||
[{:keys [::db/connectable] :as storage} id]
|
||||
(us/assert! ::storage storage)
|
||||
[{:keys [::db/connectable] :as storage} id]
|
||||
(assert (valid-storage? storage))
|
||||
(retrieve-database-object connectable id))
|
||||
|
||||
(defn put-object!
|
||||
"Creates a new object with the provided content."
|
||||
[{:keys [::backend] :as storage} {:keys [::content] :as params}]
|
||||
(us/assert! ::storage-with-backend storage)
|
||||
(us/assert! ::impl/content content)
|
||||
(assert (valid-storage? storage))
|
||||
(assert (impl/content? content) "expected an instance of content")
|
||||
|
||||
(let [object (create-database-object storage params)]
|
||||
(if (::created? (meta object))
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
@@ -209,7 +221,7 @@
|
||||
(defn touch-object!
|
||||
"Mark object as touched."
|
||||
[{:keys [::db/connectable] :as storage} object-or-id]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)]
|
||||
(-> (db/update! connectable :storage-object
|
||||
{:touched-at (dt/now)}
|
||||
@@ -221,7 +233,7 @@
|
||||
"Return an input stream instance of the object content."
|
||||
^InputStream
|
||||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(when (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(-> (impl/resolve-backend storage (:backend object))
|
||||
@@ -230,7 +242,7 @@
|
||||
(defn get-object-bytes
|
||||
"Returns a byte array of object content."
|
||||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(when (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(-> (impl/resolve-backend storage (:backend object))
|
||||
@@ -240,7 +252,7 @@
|
||||
([storage object]
|
||||
(get-object-url storage object nil))
|
||||
([storage object options]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(when (or (nil? (:expired-at object))
|
||||
(dt/is-after? (:expired-at object) (dt/now)))
|
||||
(-> (impl/resolve-backend storage (:backend object))
|
||||
@@ -250,7 +262,7 @@
|
||||
"Get the Path to the object. Only works with `:fs` type of
|
||||
storages."
|
||||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(let [backend (impl/resolve-backend storage (:backend object))]
|
||||
(when (and (= :fs (::type backend))
|
||||
(or (nil? (:expired-at object))
|
||||
@@ -259,7 +271,7 @@
|
||||
|
||||
(defn del-object!
|
||||
[{:keys [::db/connectable] :as storage} object-or-id]
|
||||
(us/assert! ::storage storage)
|
||||
(assert (valid-storage? storage))
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
res (db/update! connectable :storage-object
|
||||
{:deleted-at (dt/now)}
|
||||
@@ -267,9 +279,12 @@
|
||||
(pos? (db/get-update-count res))))
|
||||
|
||||
(dm/export impl/calculate-hash)
|
||||
(dm/export impl/get-hash)
|
||||
(dm/export impl/get-size)
|
||||
|
||||
(defn configure
|
||||
[storage connectable]
|
||||
(assert (valid-storage? storage))
|
||||
(assoc storage ::db/connectable connectable))
|
||||
|
||||
(defn resolve
|
||||
|
||||
@@ -6,27 +6,29 @@
|
||||
|
||||
(ns app.storage.fs
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as u]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
java.io.InputStream
|
||||
java.io.OutputStream
|
||||
java.nio.file.Files
|
||||
java.nio.file.Path))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;; --- BACKEND INIT
|
||||
|
||||
(s/def ::directory ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::backend [_]
|
||||
(s/keys :opt [::directory]))
|
||||
(defmethod ig/assert-key ::backend
|
||||
[_ params]
|
||||
;; FIXME: path (?)
|
||||
(assert (string? (::directory params))))
|
||||
|
||||
(defmethod ig/init-key ::backend
|
||||
[_ cfg]
|
||||
@@ -39,18 +41,22 @@
|
||||
::directory (str dir)
|
||||
::uri (u/uri (str "file://" dir))))))
|
||||
|
||||
(s/def ::uri u/uri?)
|
||||
(s/def ::backend
|
||||
(s/keys :req [::directory
|
||||
::uri]
|
||||
:opt [::sto/type
|
||||
::sto/id]))
|
||||
(def ^:private schema:backend
|
||||
[:map {:title "fs-backend"}
|
||||
[::directory :string]
|
||||
[::uri ::sm/uri]
|
||||
[::sto/type [:= :fs]]])
|
||||
|
||||
(sm/register! ::backend schema:backend)
|
||||
|
||||
(def ^:private valid-backend?
|
||||
(sm/validator schema:backend))
|
||||
|
||||
;; --- API IMPL
|
||||
|
||||
(defmethod impl/put-object :fs
|
||||
[backend {:keys [id] :as object} content]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
full (fs/normalize (fs/join base path))]
|
||||
@@ -58,15 +64,15 @@
|
||||
(when-not (fs/exists? (fs/parent full))
|
||||
(fs/create-dir (fs/parent full)))
|
||||
|
||||
(dm/with-open [src (io/input-stream content)
|
||||
dst (io/output-stream full)]
|
||||
(io/copy! src dst))
|
||||
(with-open [^InputStream src (io/input-stream content)]
|
||||
(with-open [^OutputStream dst (io/output-stream full)]
|
||||
(io/copy src dst)))
|
||||
|
||||
object))
|
||||
|
||||
(defmethod impl/get-object-data :fs
|
||||
[backend {:keys [id] :as object}]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(let [^Path base (fs/path (::directory backend))
|
||||
^Path path (fs/path (impl/id->path id))
|
||||
^Path full (fs/normalize (fs/join base path))]
|
||||
@@ -78,12 +84,12 @@
|
||||
|
||||
(defmethod impl/get-object-bytes :fs
|
||||
[backend object]
|
||||
(dm/with-open [input (impl/get-object-data backend object)]
|
||||
(io/read-as-bytes input)))
|
||||
(with-open [^InputStream input (impl/get-object-data backend object)]
|
||||
(io/read input)))
|
||||
|
||||
(defmethod impl/get-object-url :fs
|
||||
[{:keys [::uri] :as backend} {:keys [id] :as object} _]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(update uri :path
|
||||
(fn [existing]
|
||||
(if (str/ends-with? existing "/")
|
||||
@@ -92,7 +98,7 @@
|
||||
|
||||
(defmethod impl/del-object :fs
|
||||
[backend {:keys [id] :as object}]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(let [base (fs/path (::directory backend))
|
||||
path (fs/path (impl/id->path id))
|
||||
path (fs/join base path)]
|
||||
@@ -100,7 +106,7 @@
|
||||
|
||||
(defmethod impl/del-objects-in-bulk :fs
|
||||
[backend ids]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(let [base (fs/path (::directory backend))]
|
||||
(doseq [id ids]
|
||||
(let [path (fs/path (impl/id->path id))
|
||||
|
||||
@@ -16,10 +16,9 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as impl]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:lock-sobjects
|
||||
@@ -100,13 +99,14 @@
|
||||
0
|
||||
(get-buckets conn min-age)))
|
||||
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expect valid storage")
|
||||
(assert (db/pool? (::db/pool params)) "expect valid storage"))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::sto/storage ::db/pool]))
|
||||
|
||||
(defmethod ig/prep-key ::handler
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (dt/duration {:hours 2})))
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v ::min-age (dt/duration {:hours 2}))})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::min-age] :as cfg}]
|
||||
|
||||
@@ -25,7 +25,6 @@
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:has-team-font-variant-refs
|
||||
@@ -226,8 +225,9 @@
|
||||
;; HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid storage"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.hash :as bh]
|
||||
[clojure.java.io :as jio]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
java.nio.ByteBuffer
|
||||
@@ -234,7 +233,3 @@
|
||||
[v]
|
||||
(satisfies? IContentObject v))
|
||||
|
||||
(s/def ::object object?)
|
||||
(s/def ::content content?)
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as u]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
@@ -19,7 +19,6 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
@@ -86,61 +85,68 @@
|
||||
|
||||
;; --- BACKEND INIT
|
||||
|
||||
(s/def ::region ::us/keyword)
|
||||
(s/def ::bucket ::us/string)
|
||||
(s/def ::prefix ::us/string)
|
||||
(s/def ::endpoint ::us/string)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(def ^:private schema:config
|
||||
[:map {:title "s3-backend-config"}
|
||||
::wrk/executor
|
||||
[::region {:optional true} :keyword]
|
||||
[::bucket {:optional true} ::sm/text]
|
||||
[::prefix {:optional true} ::sm/text]
|
||||
[::endpoint {:optional true} ::sm/uri]
|
||||
[::io-threads {:optional true} ::sm/int]])
|
||||
|
||||
(defmethod ig/pre-init-spec ::backend [_]
|
||||
(s/keys :opt [::region ::bucket ::prefix ::endpoint ::io-threads ::wrk/executor]))
|
||||
(defmethod ig/expand-key ::backend
|
||||
[k v]
|
||||
{k (merge {::region :eu-central-1} (d/without-nils v))})
|
||||
|
||||
(defmethod ig/prep-key ::backend
|
||||
[_ {:keys [::prefix ::region] :as cfg}]
|
||||
(cond-> (d/without-nils cfg)
|
||||
(some? prefix) (assoc ::prefix prefix)
|
||||
(nil? region) (assoc ::region :eu-central-1)))
|
||||
(defmethod ig/assert-key ::backend
|
||||
[_ params]
|
||||
(assert (sm/check schema:config params)))
|
||||
|
||||
(defmethod ig/init-key ::backend
|
||||
[_ cfg]
|
||||
;; Return a valid backend data structure only if all optional
|
||||
;; parameters are provided.
|
||||
(when (and (contains? cfg ::region)
|
||||
(string? (::bucket cfg)))
|
||||
(let [client (build-s3-client cfg)
|
||||
presigner (build-s3-presigner cfg)]
|
||||
(assoc cfg
|
||||
[_ params]
|
||||
(when (and (contains? params ::region)
|
||||
(contains? params ::bucket))
|
||||
(let [client (build-s3-client params)
|
||||
presigner (build-s3-presigner params)]
|
||||
(assoc params
|
||||
::sto/type :s3
|
||||
::client @client
|
||||
::presigner presigner
|
||||
::close-fn #(.close ^java.lang.AutoCloseable client)))))
|
||||
|
||||
(defmethod ig/resolve-key ::backend
|
||||
[_ params]
|
||||
(dissoc params ::close-fn))
|
||||
|
||||
(defmethod ig/halt-key! ::backend
|
||||
[_ {:keys [::close-fn]}]
|
||||
(when (fn? close-fn)
|
||||
(px/run! close-fn)))
|
||||
|
||||
(s/def ::client #(instance? S3AsyncClient %))
|
||||
(s/def ::presigner #(instance? S3Presigner %))
|
||||
(s/def ::backend
|
||||
(s/keys :req [::region
|
||||
::bucket
|
||||
::client
|
||||
::presigner]
|
||||
:opt [::prefix
|
||||
::sto/id]))
|
||||
(def ^:private schema:backend
|
||||
[:map {:title "s3-backend"}
|
||||
;; [::region :keyword]
|
||||
;; [::bucket ::sm/text]
|
||||
[::client [:fn #(instance? S3AsyncClient %)]]
|
||||
[::presigner [:fn #(instance? S3Presigner %)]]
|
||||
[::prefix {:optional true} ::sm/text]
|
||||
#_[::sto/type [:= :s3]]])
|
||||
|
||||
(sm/register! ::backend schema:backend)
|
||||
|
||||
(def ^:private valid-backend?
|
||||
(sm/validator schema:backend))
|
||||
|
||||
;; --- API IMPL
|
||||
|
||||
(defmethod impl/put-object :s3
|
||||
[backend object content]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(p/await! (put-object backend object content)))
|
||||
|
||||
(defmethod impl/get-object-data :s3
|
||||
[backend object]
|
||||
(us/assert! ::backend backend)
|
||||
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(loop [result (get-object-data backend object)
|
||||
retryn 0]
|
||||
|
||||
@@ -167,22 +173,21 @@
|
||||
|
||||
(defmethod impl/get-object-bytes :s3
|
||||
[backend object]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(p/await! (get-object-bytes backend object)))
|
||||
|
||||
(defmethod impl/get-object-url :s3
|
||||
[backend object options]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(get-object-url backend object options))
|
||||
|
||||
(defmethod impl/del-object :s3
|
||||
[backend object]
|
||||
(us/assert! ::backend backend)
|
||||
(p/await! (del-object backend object)))
|
||||
|
||||
(defmethod impl/del-objects-in-bulk :s3
|
||||
[backend ids]
|
||||
(us/assert! ::backend backend)
|
||||
(assert (valid-backend? backend) "expected a valid backend instance")
|
||||
(p/await! (del-object-in-bulk backend ids)))
|
||||
|
||||
;; --- HELPERS
|
||||
@@ -221,7 +226,7 @@
|
||||
builder (.region ^S3AsyncClientBuilder builder (lookup-region region))
|
||||
builder (cond-> ^S3AsyncClientBuilder builder
|
||||
(some? endpoint)
|
||||
(.endpointOverride (URI. endpoint)))]
|
||||
(.endpointOverride (URI. (str endpoint))))]
|
||||
(.build ^S3AsyncClientBuilder builder))]
|
||||
|
||||
(reify
|
||||
@@ -240,7 +245,7 @@
|
||||
(.build))]
|
||||
|
||||
(-> (S3Presigner/builder)
|
||||
(cond-> (some? endpoint) (.endpointOverride (URI. endpoint)))
|
||||
(cond-> (some? endpoint) (.endpointOverride (URI. (str endpoint))))
|
||||
(.region (lookup-region region))
|
||||
(.serviceConfiguration ^S3Configuration config)
|
||||
(.build))))
|
||||
@@ -337,7 +342,8 @@
|
||||
|
||||
(defn- get-object-url
|
||||
[{:keys [::presigner ::bucket ::prefix]} {:keys [id]} {:keys [max-age] :or {max-age default-max-age}}]
|
||||
(us/assert dt/duration? max-age)
|
||||
(assert (dt/duration? max-age) "expected valid duration instance")
|
||||
|
||||
(let [gor (.. (GetObjectRequest/builder)
|
||||
(bucket bucket)
|
||||
(key (dm/str prefix (impl/id->path id)))
|
||||
|
||||
@@ -11,10 +11,10 @@
|
||||
permanently delete these files (look at systemd-tempfiles)."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
@@ -29,12 +29,13 @@
|
||||
|
||||
(defonce queue (sp/chan :buf 128))
|
||||
|
||||
(defmethod ig/pre-init-spec ::cleaner [_]
|
||||
(s/keys :req [::wrk/executor]))
|
||||
(defmethod ig/assert-key ::cleaner
|
||||
[_ {:keys [::wrk/executor]}]
|
||||
(assert (sm/valid? ::wrk/executor executor)))
|
||||
|
||||
(defmethod ig/prep-key ::cleaner
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (dt/duration "60m")))
|
||||
(defmethod ig/expand-key ::cleaner
|
||||
[k v]
|
||||
{k (assoc v ::min-age (dt/duration "60m"))})
|
||||
|
||||
(defmethod ig/init-key ::cleaner
|
||||
[_ cfg]
|
||||
|
||||
@@ -7,36 +7,32 @@
|
||||
(ns app.svgo
|
||||
"A SVG Optimizer service"
|
||||
(:require
|
||||
[app.common.jsrt :as jsrt]
|
||||
[app.common.logging :as l]
|
||||
[app.worker :as-alias wrk]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec.semaphore :as ps]
|
||||
[promesa.util :as pu]))
|
||||
[app.util.shell :as shell]
|
||||
[datoteka.fs :as fs]
|
||||
[promesa.exec.semaphore :as ps]))
|
||||
|
||||
(def ^:dynamic *semaphore*
|
||||
"A dynamic variable that can optionally contain a traffic light to
|
||||
appropriately delimit the use of resources, managed externally."
|
||||
nil)
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn optimize
|
||||
[{pool ::optimizer} data]
|
||||
[system data]
|
||||
(try
|
||||
(some-> *semaphore* ps/acquire!)
|
||||
(jsrt/run! pool
|
||||
(fn [context]
|
||||
(jsrt/set! context "svgData" data)
|
||||
(jsrt/eval! context "penpotSvgo.optimize(svgData, {plugins: ['safeAndFastPreset']})")))
|
||||
(let [script (fs/join fs/*cwd* "scripts/svgo-cli.js")
|
||||
cmd ["node" (str script)]
|
||||
result (shell/exec! system
|
||||
:cmd cmd
|
||||
:in data)]
|
||||
(if (= (:exit result) 0)
|
||||
(:out result)
|
||||
(do
|
||||
(l/raw! :warn (str "Error on optimizing svg, returning svg as-is." (:err result)))
|
||||
data)))
|
||||
|
||||
(finally
|
||||
(some-> *semaphore* ps/release!))))
|
||||
|
||||
(defmethod ig/init-key ::optimizer
|
||||
[_ _]
|
||||
(l/inf :hint "initializing svg optimizer pool")
|
||||
(let [init (jsrt/resource->source "app/common/svg/optimizer.js")]
|
||||
(jsrt/pool :init init)))
|
||||
|
||||
(defmethod ig/halt-key! ::optimizer
|
||||
[_ pool]
|
||||
(l/info :hint "stopping svg optimizer pool")
|
||||
(pu/close! pool))
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:dynamic *team-deletion* false)
|
||||
@@ -113,8 +112,9 @@
|
||||
[_cfg props]
|
||||
(l/wrn :hint "not implementation found" :rel (:object props)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -26,8 +26,6 @@
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare ^:private get-file)
|
||||
@@ -44,7 +42,7 @@
|
||||
f.data_ref_id
|
||||
FROM file_change AS f
|
||||
WHERE f.file_id = ?
|
||||
AND f.label IS NOT NULL
|
||||
AND f.data IS NOT NULL
|
||||
ORDER BY f.created_at ASC")
|
||||
|
||||
(def ^:private sql:mark-file-media-object-deleted
|
||||
@@ -54,16 +52,21 @@
|
||||
RETURNING id")
|
||||
|
||||
(def ^:private xf:collect-used-media
|
||||
(comp (map :data) (mapcat bfc/collect-used-media)))
|
||||
(comp
|
||||
(map :data)
|
||||
(mapcat bfc/collect-used-media)))
|
||||
|
||||
(defn- clean-file-media!
|
||||
"Performs the garbage collection of file media objects."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [used (into #{}
|
||||
xf:collect-used-media
|
||||
(cons file
|
||||
(->> (db/cursor conn [sql:get-snapshots id])
|
||||
(map (partial decode-file cfg)))))
|
||||
(let [xform (comp
|
||||
(map (partial decode-file cfg))
|
||||
xf:collect-used-media)
|
||||
|
||||
used (->> (db/plan conn [sql:get-snapshots id])
|
||||
(transduce xform conj #{}))
|
||||
used (into used xf:collect-used-media [file])
|
||||
|
||||
ids (db/create-array conn "uuid" used)
|
||||
unused (->> (db/exec! conn [sql:mark-file-media-object-deleted id ids])
|
||||
(into #{} (map :id)))]
|
||||
@@ -146,51 +149,47 @@
|
||||
AND f.deleted_at IS null
|
||||
ORDER BY f.modified_at ASC")
|
||||
|
||||
(def ^:private xf:map-id (map :id))
|
||||
|
||||
(defn- get-used-components
|
||||
"Given a file and a set of components marked for deletion, return a
|
||||
filtered set of component ids that are still un use"
|
||||
[components library-id {:keys [data]}]
|
||||
(filter #(ctf/used-in? data library-id % :component) components))
|
||||
|
||||
(defn- clean-deleted-components!
|
||||
"Performs the garbage collection of unreferenced deleted components."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data] :as file}]
|
||||
(let [file-id (:id file)
|
||||
|
||||
get-used-components
|
||||
(fn [data components]
|
||||
;; Find which of the components are used in the file.
|
||||
(into #{}
|
||||
(filter #(ctf/used-in? data file-id % :component))
|
||||
components))
|
||||
deleted-components
|
||||
(ctkl/deleted-components-seq data)
|
||||
|
||||
get-unused-components
|
||||
(fn [components files]
|
||||
;; Find and return a set of unused components (on all files).
|
||||
(reduce (fn [components {:keys [data]}]
|
||||
(if (seq components)
|
||||
(->> (get-used-components data components)
|
||||
(set/difference components))
|
||||
(reduced components)))
|
||||
xform
|
||||
(mapcat (partial get-used-components deleted-components file-id))
|
||||
|
||||
components
|
||||
files))
|
||||
used-remote
|
||||
(->> (db/plan conn [sql:get-files-for-library file-id])
|
||||
(transduce (comp (map (partial decode-file cfg)) xform) conj #{}))
|
||||
|
||||
process-fdata
|
||||
(fn [data unused]
|
||||
(reduce (fn [data id]
|
||||
(l/trc :hint "delete component"
|
||||
:component-id (str id)
|
||||
:file-id (str file-id))
|
||||
(ctkl/delete-component data id))
|
||||
data
|
||||
unused))
|
||||
used-local
|
||||
(into #{} xform [file])
|
||||
|
||||
deleted (into #{} (ctkl/deleted-components-seq data))
|
||||
|
||||
unused (->> (db/cursor conn [sql:get-files-for-library file-id] {:chunk-size 1})
|
||||
(map (partial decode-file cfg))
|
||||
(cons file)
|
||||
(get-unused-components deleted)
|
||||
(mapv :id)
|
||||
(set))
|
||||
|
||||
file (update file :data process-fdata unused)]
|
||||
unused
|
||||
(transduce xf:map-id disj
|
||||
(into #{} xf:map-id deleted-components)
|
||||
(concat used-remote used-local))
|
||||
|
||||
file
|
||||
(update file :data
|
||||
(fn [data]
|
||||
(reduce (fn [data id]
|
||||
(l/trc :hint "delete component"
|
||||
:component-id (str id)
|
||||
:file-id (str file-id))
|
||||
(ctkl/delete-component data id))
|
||||
data
|
||||
unused)))]
|
||||
|
||||
(l/dbg :hint "clean" :rel "components" :file-id (str file-id) :total (count unused))
|
||||
file))
|
||||
@@ -315,8 +314,10 @@
|
||||
;; HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::sto/storage]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool")
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expected valid storage to be provided"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
[app.db :as db]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private
|
||||
@@ -43,12 +42,13 @@
|
||||
|
||||
{:processed total}))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool"))
|
||||
|
||||
(defmethod ig/prep-key ::handler
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (cf/get-deletion-delay)))
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v ::min-age (cf/get-deletion-delay))})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.tasks.file-xlog-gc
|
||||
"A maintenance task that performs a garbage collection of the file
|
||||
change (transaction) log."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private
|
||||
sql:delete-files-xlog
|
||||
"DELETE FROM file_change
|
||||
WHERE id IN (SELECT id FROM file_change
|
||||
WHERE label IS NULL
|
||||
AND created_at < ?
|
||||
ORDER BY created_at LIMIT ?)
|
||||
RETURNING id, data_backend, data_ref_id")
|
||||
|
||||
(def xf:filter-offloded
|
||||
(comp
|
||||
(filter feat.fdata/offloaded?)
|
||||
(keep :data-ref-id)))
|
||||
|
||||
(defn- delete-in-chunks
|
||||
[{:keys [::chunk-size ::threshold] :as cfg}]
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(loop [total 0]
|
||||
(let [chunk (db/exec! cfg [sql:delete-files-xlog threshold chunk-size])
|
||||
length (count chunk)]
|
||||
|
||||
;; touch all references on offloaded changes entries
|
||||
(doseq [data-ref-id (sequence xf:filter-offloded chunk)]
|
||||
(l/trc :hint "touching referenced storage object"
|
||||
:storage-object-id (str data-ref-id))
|
||||
(sto/touch-object! storage data-ref-id))
|
||||
|
||||
(if (pos? length)
|
||||
(recur (+ total length))
|
||||
total)))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [min-age (or (:min-age props)
|
||||
(dt/duration "72h"))
|
||||
chunk-size (:chunk-size props 5000)
|
||||
threshold (dt/minus (dt/now) min-age)]
|
||||
|
||||
(-> cfg
|
||||
(assoc ::db/rollback (:rollback props false))
|
||||
(assoc ::threshold threshold)
|
||||
(assoc ::chunk-size chunk-size)
|
||||
(db/tx-run! (fn [cfg]
|
||||
(let [total (delete-in-chunks cfg)]
|
||||
(l/trc :hint "file xlog cleaned" :total total)
|
||||
total)))))))
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:get-profiles
|
||||
@@ -27,7 +26,7 @@
|
||||
|
||||
(defn- delete-profiles!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-profiles min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-profiles min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id]}]
|
||||
(l/trc :hint "permanently delete" :rel "profile" :id (str id))
|
||||
|
||||
@@ -50,7 +49,7 @@
|
||||
|
||||
(defn- delete-teams!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-teams min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-teams min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id photo-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team"
|
||||
@@ -78,7 +77,7 @@
|
||||
|
||||
(defn- delete-fonts!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-fonts min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-fonts min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at] :as font}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "team-font-variant"
|
||||
@@ -110,7 +109,7 @@
|
||||
|
||||
(defn- delete-projects!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-projects min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-projects min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id team-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "project"
|
||||
@@ -136,7 +135,7 @@
|
||||
|
||||
(defn- delete-files!
|
||||
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-files min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-files min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id deleted-at project-id] :as file}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file"
|
||||
@@ -165,7 +164,7 @@
|
||||
|
||||
(defn delete-file-thumbnails!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-file-thumbnails min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-file-thumbnails min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [file-id revn media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-thumbnail"
|
||||
@@ -194,7 +193,7 @@
|
||||
|
||||
(defn delete-file-object-thumbnails!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-file-object-thumbnails min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-file-object-thumbnails min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [file-id object-id media-id deleted-at]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-tagged-object-thumbnail"
|
||||
@@ -223,7 +222,7 @@
|
||||
|
||||
(defn- delete-file-data-fragments!
|
||||
[{:keys [::db/conn ::sto/storage ::min-age ::chunk-size] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-file-data-fragments min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-file-data-fragments min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [file-id id deleted-at data-ref-id]}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-data-fragment"
|
||||
@@ -249,7 +248,7 @@
|
||||
|
||||
(defn- delete-file-media-objects!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-file-media-objects min-age chunk-size] {:chunk-size 1})
|
||||
(->> (db/cursor conn [sql:get-file-media-objects min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as fmo}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-media-object"
|
||||
@@ -266,6 +265,34 @@
|
||||
(inc total))
|
||||
0)))
|
||||
|
||||
(def ^:private sql:get-file-change
|
||||
"SELECT id, file_id, deleted_at, data_backend, data_ref_id
|
||||
FROM file_change
|
||||
WHERE deleted_at IS NOT NULL
|
||||
AND deleted_at < now() - ?::interval
|
||||
ORDER BY deleted_at ASC
|
||||
LIMIT ?
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- delete-file-change!
|
||||
[{:keys [::db/conn ::min-age ::chunk-size ::sto/storage] :as cfg}]
|
||||
(->> (db/cursor conn [sql:get-file-change min-age chunk-size] {:chunk-size 5})
|
||||
(reduce (fn [total {:keys [id file-id deleted-at] :as xlog}]
|
||||
(l/trc :hint "permanently delete"
|
||||
:rel "file-change"
|
||||
:id (str id)
|
||||
:file-id (str file-id)
|
||||
:deleted-at (dt/format-instant deleted-at))
|
||||
|
||||
(when (= "objects-storage" (:data-backend xlog))
|
||||
(sto/touch-object! storage (:data-ref-id xlog)))
|
||||
|
||||
(db/delete! conn :file-change {:id id})
|
||||
|
||||
(inc total))
|
||||
0)))
|
||||
|
||||
(def ^:private deletion-proc-vars
|
||||
[#'delete-profiles!
|
||||
#'delete-file-media-objects!
|
||||
@@ -275,7 +302,8 @@
|
||||
#'delete-files!
|
||||
#'delete-projects!
|
||||
#'delete-fonts!
|
||||
#'delete-teams!])
|
||||
#'delete-teams!
|
||||
#'delete-file-change!])
|
||||
|
||||
(defn- execute-proc!
|
||||
"A generic function that executes the specified proc iterativelly
|
||||
@@ -289,14 +317,16 @@
|
||||
(recur (+ total result))
|
||||
total))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::sto/storage]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool")
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expected valid storage to be provided"))
|
||||
|
||||
(defmethod ig/prep-key ::handler
|
||||
[_ cfg]
|
||||
(assoc cfg
|
||||
::min-age (cf/get-deletion-delay)
|
||||
::chunk-size 10))
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v
|
||||
::min-age (cf/get-deletion-delay)
|
||||
::chunk-size 50)})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.storage :as sto]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(defn- offload-file-data!
|
||||
@@ -109,8 +108,10 @@
|
||||
;; HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::sto/storage]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool")
|
||||
(assert (sto/valid-storage? (::sto/storage params)) "expected valid storage to be provided"))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
|
||||
@@ -11,19 +11,19 @@
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private
|
||||
sql:delete-completed-tasks
|
||||
"DELETE FROM task WHERE scheduled_at < now() - ?::interval")
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
(defmethod ig/assert-key ::handler
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected a valid database pool"))
|
||||
|
||||
(defmethod ig/prep-key ::handler
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (cf/get-deletion-delay)))
|
||||
(defmethod ig/expand-key ::handler
|
||||
[k v]
|
||||
{k (assoc v ::min-age (cf/get-deletion-delay))})
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::db/pool ::min-age] :as cfg}]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user