mirror of
https://github.com/penpot/penpot.git
synced 2026-01-06 21:39:00 -05:00
Compare commits
1364 Commits
pre-lazy-l
...
2.0.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5eee1cdbf3 | ||
|
|
54c506100d | ||
|
|
7c64ed84f1 | ||
|
|
5fef0b64f4 | ||
|
|
3294058e16 | ||
|
|
5e8c164a44 | ||
|
|
3472359168 | ||
|
|
1b17742fc3 | ||
|
|
98038b10a0 | ||
|
|
91ca55742a | ||
|
|
e83c90203e | ||
|
|
b4a7a15045 | ||
|
|
b847754e3b | ||
|
|
9c5f7373de | ||
|
|
3e8c665b7f | ||
|
|
cf6bea1974 | ||
|
|
b8bff31aca | ||
|
|
54c4e9af6c | ||
|
|
07d859f9bd | ||
|
|
d008d82a11 | ||
|
|
abff7d324d | ||
|
|
39613944bb | ||
|
|
9194e257b6 | ||
|
|
b54b99becf | ||
|
|
b357cf505d | ||
|
|
632165d6dc | ||
|
|
4d463537dd | ||
|
|
ae6cb551cb | ||
|
|
0f181df767 | ||
|
|
8d104de41c | ||
|
|
a94f1d6fe4 | ||
|
|
c616e3c932 | ||
|
|
bbb64b8be9 | ||
|
|
2c3ae851ea | ||
|
|
7117ea1f7e | ||
|
|
0fc7d8529e | ||
|
|
a8fae53564 | ||
|
|
23bd57e9bb | ||
|
|
11f2d7e711 | ||
|
|
3a71068a48 | ||
|
|
bebdc78ce6 | ||
|
|
22939aa689 | ||
|
|
6901acb37e | ||
|
|
e0fe7181f1 | ||
|
|
a65282c01b | ||
|
|
5b35176584 | ||
|
|
857429290d | ||
|
|
3c842d2b81 | ||
|
|
38fd385d5d | ||
|
|
9bf5b1a8cd | ||
|
|
7c80c605d1 | ||
|
|
0cb2e6d07d | ||
|
|
d907812513 | ||
|
|
d8bf48e49e | ||
|
|
b667f1bb2c | ||
|
|
8e9fa66e2f | ||
|
|
17fb5283cc | ||
|
|
03c9f6b1e1 | ||
|
|
908229b7a8 | ||
|
|
2a752e3625 | ||
|
|
c6fabc349e | ||
|
|
3ea3923751 | ||
|
|
293ab3c80e | ||
|
|
89fa8ce66e | ||
|
|
3bf5648b5b | ||
|
|
57346ab685 | ||
|
|
8bd9c0d031 | ||
|
|
88f46f2ab2 | ||
|
|
edd91f00af | ||
|
|
16fa0b0330 | ||
|
|
c975e0bcee | ||
|
|
25001e5b80 | ||
|
|
c3a0db2431 | ||
|
|
e27c0b2086 | ||
|
|
ec8c847440 | ||
|
|
380c77a704 | ||
|
|
caaf695352 | ||
|
|
56f4348586 | ||
|
|
56ba32b66d | ||
|
|
4dacba6836 | ||
|
|
ddfe5fbcb8 | ||
|
|
7948f565e3 | ||
|
|
2bca2b005e | ||
|
|
4cb57c9748 | ||
|
|
bb76700c18 | ||
|
|
33bdf5e83f | ||
|
|
f0eff95e18 | ||
|
|
2a6b9f06b3 | ||
|
|
f531a5c323 | ||
|
|
acc1fac8de | ||
|
|
36e66c4dd9 | ||
|
|
8c2038e43b | ||
|
|
0135b477ca | ||
|
|
8bf1b9c28e | ||
|
|
002772ff0e | ||
|
|
4838571ec2 | ||
|
|
8e71d219ca | ||
|
|
cbac4587cf | ||
|
|
e636bdd0b0 | ||
|
|
a7a3344030 | ||
|
|
137e576e63 | ||
|
|
41cdd2453a | ||
|
|
fa00fed694 | ||
|
|
f97b705468 | ||
|
|
ac835bb655 | ||
|
|
ee308282f1 | ||
|
|
f1685f6e75 | ||
|
|
87d0c2ac30 | ||
|
|
66845033ab | ||
|
|
a569a350b4 | ||
|
|
b684ee2f83 | ||
|
|
2e23f19081 | ||
|
|
5c5188920d | ||
|
|
9cbbe1565d | ||
|
|
1b17664ade | ||
|
|
2f89512a75 | ||
|
|
702ec65d77 | ||
|
|
d22ae22aad | ||
|
|
86ba875317 | ||
|
|
928fbd8e38 | ||
|
|
f6f262f387 | ||
|
|
7d4b2b1bb7 | ||
|
|
dd4b5f3eb6 | ||
|
|
6fa2137335 | ||
|
|
347276fb4e | ||
|
|
7d36cf1b5e | ||
|
|
0bc5a80c51 | ||
|
|
c55ceb4bca | ||
|
|
b3456d0f7f | ||
|
|
352c13881a | ||
|
|
79fbbe0bee | ||
|
|
fd0a760b77 | ||
|
|
9c25723ee3 | ||
|
|
6b552fd8a9 | ||
|
|
f18be26054 | ||
|
|
34534c924f | ||
|
|
7b7820952c | ||
|
|
5924f3bc41 | ||
|
|
c6d92a2517 | ||
|
|
036392af6e | ||
|
|
01a64dda2e | ||
|
|
389c394f39 | ||
|
|
0935390761 | ||
|
|
584e18d858 | ||
|
|
420178e620 | ||
|
|
7d270ed933 | ||
|
|
9d7e8cf4e6 | ||
|
|
9adfaae6bc | ||
|
|
bbe0baac5f | ||
|
|
0fa2dbcaf2 | ||
|
|
c8b2db8145 | ||
|
|
a3f44074a0 | ||
|
|
fcd29211a4 | ||
|
|
b130cc40f0 | ||
|
|
c94478c9bb | ||
|
|
227b2fe085 | ||
|
|
a4c7cc51bb | ||
|
|
f6c6207522 | ||
|
|
8f0e11d020 | ||
|
|
27010ae0fb | ||
|
|
b7c4cb1f58 | ||
|
|
a9052e2690 | ||
|
|
e8b29c3cfc | ||
|
|
7ebf8dd702 | ||
|
|
c5109a1cd5 | ||
|
|
36129bd227 | ||
|
|
aed6a045b3 | ||
|
|
2fda60f995 | ||
|
|
8093555acc | ||
|
|
8be45f100b | ||
|
|
10fbae2f0a | ||
|
|
5f8d66e7eb | ||
|
|
37507c3697 | ||
|
|
0965c71a08 | ||
|
|
d435b17452 | ||
|
|
06206f39b0 | ||
|
|
fd5b1c0341 | ||
|
|
4029735364 | ||
|
|
b836e8c5ab | ||
|
|
2cd0bc565d | ||
|
|
ccce550cda | ||
|
|
6904cacd0c | ||
|
|
a4a70f81b9 | ||
|
|
e01f8d6fdf | ||
|
|
da5f452db5 | ||
|
|
542b27a779 | ||
|
|
fed9346ec6 | ||
|
|
29332b67f9 | ||
|
|
c97f2d620d | ||
|
|
26ca36d8c6 | ||
|
|
9dac69894e | ||
|
|
5a79a2d4d6 | ||
|
|
10fbbd6c86 | ||
|
|
a7b7355a7d | ||
|
|
43faa06ac0 | ||
|
|
24c9bcf944 | ||
|
|
520acfc823 | ||
|
|
c2737f2378 | ||
|
|
b2020c8a66 | ||
|
|
ce7f1440fa | ||
|
|
3127a020a0 | ||
|
|
02ea0374a3 | ||
|
|
c295680c89 | ||
|
|
e420be5e51 | ||
|
|
b1e226cdc6 | ||
|
|
8c72770fec | ||
|
|
e33b08f47f | ||
|
|
065d481cb5 | ||
|
|
ad3e44258a | ||
|
|
7e398515d3 | ||
|
|
ee4e1fbbf4 | ||
|
|
8541ddc598 | ||
|
|
bd2630fa1a | ||
|
|
d96902f61d | ||
|
|
677da04c43 | ||
|
|
4ba7bf664b | ||
|
|
9c36d77573 | ||
|
|
5fd72cf9d9 | ||
|
|
b258b05fb2 | ||
|
|
a4776cf27f | ||
|
|
61df70b314 | ||
|
|
dbe32fa980 | ||
|
|
c4df29f2a6 | ||
|
|
6b84eef14b | ||
|
|
357cdb807b | ||
|
|
7c1e8a753f | ||
|
|
04fe8f8960 | ||
|
|
d57d1ef346 | ||
|
|
bd8fcfde28 | ||
|
|
3a67e51f2f | ||
|
|
4ccea6b2cf | ||
|
|
d2998e1767 | ||
|
|
9c724c8e95 | ||
|
|
e2ddb3e31e | ||
|
|
9c9d09a816 | ||
|
|
cfb5e9aa66 | ||
|
|
411af023d5 | ||
|
|
193df9ce1e | ||
|
|
08c5cdb2dd | ||
|
|
5cd46d8bc0 | ||
|
|
dd4cc56384 | ||
|
|
f24323148b | ||
|
|
2396b54e15 | ||
|
|
3873f477d6 | ||
|
|
8285d7538b | ||
|
|
78aafa4635 | ||
|
|
315be268a4 | ||
|
|
7fa026da15 | ||
|
|
0c72a6f7fa | ||
|
|
4ef7af104d | ||
|
|
ef0abc1498 | ||
|
|
af5d05b460 | ||
|
|
bd88b872c7 | ||
|
|
4dac2221e7 | ||
|
|
5315dc18af | ||
|
|
6a0768b490 | ||
|
|
4212b46835 | ||
|
|
8bf8a28439 | ||
|
|
54af1ce7ae | ||
|
|
5d200a70d6 | ||
|
|
ecc61130ec | ||
|
|
588410bbb1 | ||
|
|
d83787d714 | ||
|
|
5a30c5e584 | ||
|
|
81a271961f | ||
|
|
e4c8d09e9c | ||
|
|
1d2110b68c | ||
|
|
b85c3bec18 | ||
|
|
763fc3532e | ||
|
|
eaf546ba5e | ||
|
|
22fd0ae306 | ||
|
|
ffe505e525 | ||
|
|
f66f168a99 | ||
|
|
fd24831c71 | ||
|
|
8585e73c0f | ||
|
|
f8bd0ba28a | ||
|
|
a75c32fa67 | ||
|
|
ec9d67ae1e | ||
|
|
83ac6024a2 | ||
|
|
30e1c3b4ff | ||
|
|
43d7d91415 | ||
|
|
a9325754b5 | ||
|
|
b5cfff77b3 | ||
|
|
f0602a7a04 | ||
|
|
835c29fbea | ||
|
|
a6562619a3 | ||
|
|
a6c9ced5b3 | ||
|
|
7e803eeca8 | ||
|
|
4ef62cc2dc | ||
|
|
9c2cbb2a48 | ||
|
|
2735229ffe | ||
|
|
bf5d95e069 | ||
|
|
b3f4f389ff | ||
|
|
3d6eb9d4bb | ||
|
|
87146bea85 | ||
|
|
d8a6abfb88 | ||
|
|
1d726249d0 | ||
|
|
0d751b0e20 | ||
|
|
bd9874cf28 | ||
|
|
1beb39ff60 | ||
|
|
9a7a99e67a | ||
|
|
b9743891bb | ||
|
|
8f156a7fd0 | ||
|
|
ef2dfe5888 | ||
|
|
095e48e479 | ||
|
|
0cd44f5540 | ||
|
|
1d25115218 | ||
|
|
f697f32707 | ||
|
|
d14565437c | ||
|
|
1c65df69f3 | ||
|
|
b4c78e11f4 | ||
|
|
9a0bb36a20 | ||
|
|
64dc58c259 | ||
|
|
f8bfe249aa | ||
|
|
4606785e5f | ||
|
|
892acecd9b | ||
|
|
526f6ef841 | ||
|
|
8148151ee5 | ||
|
|
dc39a14c7c | ||
|
|
5c6e8366c1 | ||
|
|
4378f132b4 | ||
|
|
abd66050bd | ||
|
|
02ff228f29 | ||
|
|
ea73e1d365 | ||
|
|
b31683fe72 | ||
|
|
190e022c29 | ||
|
|
151421c8db | ||
|
|
aae4c13231 | ||
|
|
0fbd7e95df | ||
|
|
ebb02f28be | ||
|
|
311a609977 | ||
|
|
121876110a | ||
|
|
dec3478024 | ||
|
|
edb0408300 | ||
|
|
ab3e2fd9c2 | ||
|
|
e3f508d8d4 | ||
|
|
67cdaa397c | ||
|
|
fe3c68ec39 | ||
|
|
126bab3ce4 | ||
|
|
ab0b3c71a8 | ||
|
|
cfe6fae77d | ||
|
|
94a0c12049 | ||
|
|
409eea6c5c | ||
|
|
6b03145524 | ||
|
|
0204cc5d40 | ||
|
|
65fa434388 | ||
|
|
38b72abf32 | ||
|
|
19b5baf7ee | ||
|
|
5e89b1c1d0 | ||
|
|
0590336c71 | ||
|
|
8363b86cfa | ||
|
|
a5b156e0d6 | ||
|
|
d6b60ce43a | ||
|
|
89b43d7127 | ||
|
|
db59de8494 | ||
|
|
ef91c00b10 | ||
|
|
440495a1d0 | ||
|
|
b999057be1 | ||
|
|
ca5e2c345b | ||
|
|
3f316ca9c9 | ||
|
|
cdab615cbb | ||
|
|
355af61cda | ||
|
|
9dc7671a95 | ||
|
|
319cbe02cc | ||
|
|
8e7471509c | ||
|
|
dc7d279e9d | ||
|
|
dd69762b31 | ||
|
|
db7ed75a91 | ||
|
|
8850fd8894 | ||
|
|
b097f73b13 | ||
|
|
895f649ef1 | ||
|
|
054ffbe396 | ||
|
|
80370e39b5 | ||
|
|
51c2269c84 | ||
|
|
1e9c6f3ebe | ||
|
|
a3ca905f37 | ||
|
|
d8a9e1a2cb | ||
|
|
a237a82e6f | ||
|
|
affa37f0c5 | ||
|
|
55b6df0ae4 | ||
|
|
b5fe07d5ee | ||
|
|
1a12e63027 | ||
|
|
5c33bc6892 | ||
|
|
6bf3363429 | ||
|
|
3bc6d2b0a7 | ||
|
|
d5b2a91bce | ||
|
|
4e9710ddfa | ||
|
|
91118bec70 | ||
|
|
75d0648065 | ||
|
|
7436918edb | ||
|
|
1d55b30132 | ||
|
|
ecab472ced | ||
|
|
fbe09e6b5a | ||
|
|
2247f0ecac | ||
|
|
04a69c2a2c | ||
|
|
b00b77895f | ||
|
|
c47fe2954a | ||
|
|
2a998a2dcc | ||
|
|
a6f70c77cb | ||
|
|
5f0b86e0df | ||
|
|
18fc08d418 | ||
|
|
b41b3de46d | ||
|
|
eabec6be20 | ||
|
|
f525c6df5e | ||
|
|
e2412b3d43 | ||
|
|
16c8c3483f | ||
|
|
ecb8ed8b8b | ||
|
|
4c0e17ea7b | ||
|
|
98b41a5bff | ||
|
|
2d5e1f7792 | ||
|
|
d4fb85bb02 | ||
|
|
946677f5b3 | ||
|
|
a704e919d8 | ||
|
|
f8c416c5ae | ||
|
|
76b75192e7 | ||
|
|
8ee79e5d7c | ||
|
|
3b0148046b | ||
|
|
8128171d8e | ||
|
|
786513863b | ||
|
|
3bbf97fde9 | ||
|
|
a3bfeace73 | ||
|
|
37859a20a6 | ||
|
|
c865a1bdfd | ||
|
|
d478a7d8d9 | ||
|
|
56bc70dffe | ||
|
|
9328974511 | ||
|
|
be22d506a8 | ||
|
|
e6964cf02c | ||
|
|
5d5fc2c151 | ||
|
|
343efc68c5 | ||
|
|
3fdf0c727e | ||
|
|
1a6a6e9367 | ||
|
|
d08dfaa022 | ||
|
|
9852558a57 | ||
|
|
de1dae7f93 | ||
|
|
bed13c24df | ||
|
|
112e71a259 | ||
|
|
338b6cdbd6 | ||
|
|
358176c927 | ||
|
|
25cf30e7d3 | ||
|
|
4ffaf6f996 | ||
|
|
b30d525400 | ||
|
|
d860eac59f | ||
|
|
f714f08716 | ||
|
|
71b4079483 | ||
|
|
9940dabfff | ||
|
|
fb58d7a4cc | ||
|
|
5390eabcd6 | ||
|
|
8963cb2739 | ||
|
|
69f9982d26 | ||
|
|
7f93c41005 | ||
|
|
f32aaee41f | ||
|
|
4f01a63771 | ||
|
|
ca7438aab5 | ||
|
|
5739c0797c | ||
|
|
4ef2482b02 | ||
|
|
d0889931b5 | ||
|
|
1c29c73b8e | ||
|
|
d488d69abc | ||
|
|
3e9b2ec5c8 | ||
|
|
8529927173 | ||
|
|
2c12790782 | ||
|
|
2c740df767 | ||
|
|
bad0fb912b | ||
|
|
c214d8b044 | ||
|
|
895fb3b480 | ||
|
|
e25c1e987c | ||
|
|
55293e60d6 | ||
|
|
ea51a8d9b6 | ||
|
|
bdf0a64e3a | ||
|
|
9a976a8f6e | ||
|
|
916d179009 | ||
|
|
bb5eb4a097 | ||
|
|
e2428fc0c6 | ||
|
|
ee4c56aa9b | ||
|
|
8a6882e155 | ||
|
|
28d6cf6f51 | ||
|
|
747cead313 | ||
|
|
b9b85b5ada | ||
|
|
0db24dc7ec | ||
|
|
2031e513ed | ||
|
|
8b6be5b62e | ||
|
|
bc04eaa910 | ||
|
|
47df285500 | ||
|
|
cfffb1e551 | ||
|
|
cf41982ee2 | ||
|
|
9f7d1be0a9 | ||
|
|
9012987f7e | ||
|
|
4dfbfcf2ac | ||
|
|
07939d11dc | ||
|
|
0d1af260a4 | ||
|
|
5157928cdb | ||
|
|
899093dd55 | ||
|
|
875ea58a01 | ||
|
|
b3f97fe456 | ||
|
|
24fb48ea0f | ||
|
|
4f69ff7124 | ||
|
|
92425fcbaf | ||
|
|
1134f16ffa | ||
|
|
ef99ad349b | ||
|
|
131fc95ab0 | ||
|
|
7eecd50c50 | ||
|
|
88f49cfbc9 | ||
|
|
5b722a8608 | ||
|
|
8cb550120a | ||
|
|
1bc4001e70 | ||
|
|
07b8a2a6e6 | ||
|
|
c3f37fb8a3 | ||
|
|
afd373ffee | ||
|
|
cac785f3e1 | ||
|
|
d2059475f0 | ||
|
|
6fe85465a1 | ||
|
|
3412658286 | ||
|
|
85d06b10c2 | ||
|
|
ee91ab5dad | ||
|
|
43cd4656b4 | ||
|
|
4106e8da56 | ||
|
|
638cf6daff | ||
|
|
ce68bde9a8 | ||
|
|
93542282f1 | ||
|
|
335b51387d | ||
|
|
7dec194b1f | ||
|
|
ff22208ec2 | ||
|
|
38148cf87f | ||
|
|
1c38883ddd | ||
|
|
c2b8e5c946 | ||
|
|
9ad0662409 | ||
|
|
2465690c7d | ||
|
|
3c9ae9b210 | ||
|
|
8d20220330 | ||
|
|
bf6e467abf | ||
|
|
fb2c4c9c3a | ||
|
|
f36410da87 | ||
|
|
bcd859ca4c | ||
|
|
7833a06a86 | ||
|
|
6e4075a2e7 | ||
|
|
add0bed3ca | ||
|
|
acbc2a80dd | ||
|
|
611b90f5fb | ||
|
|
ca0fd0fa13 | ||
|
|
9645ffba40 | ||
|
|
041224e44b | ||
|
|
44b66352ab | ||
|
|
b2ad78d947 | ||
|
|
42b68a786e | ||
|
|
942989824a | ||
|
|
00ee6833c8 | ||
|
|
20b651560d | ||
|
|
a3faca910f | ||
|
|
97e7806bdb | ||
|
|
1cc65c69b7 | ||
|
|
f888a6db4c | ||
|
|
a40d207dfd | ||
|
|
0b20d85677 | ||
|
|
7d2af587cd | ||
|
|
4ec1844e6e | ||
|
|
4aef2a475a | ||
|
|
a21a64aa10 | ||
|
|
60962b58fe | ||
|
|
ef2160dbb6 | ||
|
|
8eaf93f08a | ||
|
|
467e4c76a6 | ||
|
|
54511a5ef0 | ||
|
|
bf898bfdc9 | ||
|
|
1f0683498f | ||
|
|
238519cb69 | ||
|
|
0da51d878f | ||
|
|
dd5ec39619 | ||
|
|
445519fc70 | ||
|
|
bbe4ef5fc1 | ||
|
|
f851d552bf | ||
|
|
99cbd84148 | ||
|
|
e5cd2983d0 | ||
|
|
e2d7105624 | ||
|
|
26ab7f83fe | ||
|
|
f0955c0e99 | ||
|
|
f5dd199bc6 | ||
|
|
c123cf6e98 | ||
|
|
74d2273d24 | ||
|
|
697a542754 | ||
|
|
233e7e7e87 | ||
|
|
9594c70ec5 | ||
|
|
7d2aef441c | ||
|
|
372e6b8a88 | ||
|
|
74e879a2be | ||
|
|
a3e4f3f376 | ||
|
|
75716c37e1 | ||
|
|
e6d4a56901 | ||
|
|
85ac766bf9 | ||
|
|
58f9b2a4e8 | ||
|
|
54db163cd8 | ||
|
|
05d0d2550a | ||
|
|
1c5d51bf97 | ||
|
|
e636dc30c2 | ||
|
|
ab2265d505 | ||
|
|
f1282f8367 | ||
|
|
f57c5b4da2 | ||
|
|
905e1eea7b | ||
|
|
1d9b91821b | ||
|
|
27ef14fd2a | ||
|
|
abdd58f3cf | ||
|
|
67b343660a | ||
|
|
85ef9763bd | ||
|
|
9c47d34f98 | ||
|
|
b6134e1afe | ||
|
|
c5f24331a3 | ||
|
|
7dd0745429 | ||
|
|
98e56bab80 | ||
|
|
072c724462 | ||
|
|
49c750bdaf | ||
|
|
addf83ab22 | ||
|
|
78e26794e8 | ||
|
|
628454d13c | ||
|
|
d3e9d9be76 | ||
|
|
2415bae1b3 | ||
|
|
d50afcce15 | ||
|
|
c80da1bbac | ||
|
|
039baa6bd1 | ||
|
|
162e7a56d6 | ||
|
|
df43912fe5 | ||
|
|
c7001fed3c | ||
|
|
27e9a2a7f2 | ||
|
|
336cc98029 | ||
|
|
1af2ec0b79 | ||
|
|
de0cd5aa04 | ||
|
|
f91a8b371a | ||
|
|
10d6f93ed7 | ||
|
|
0a09ff8e36 | ||
|
|
8ea4e5ca10 | ||
|
|
fea14e9ea6 | ||
|
|
a6edc184f0 | ||
|
|
9db714b25d | ||
|
|
6660ca8e6f | ||
|
|
8de6b0c553 | ||
|
|
d45d6af0ec | ||
|
|
fdf6f0dfef | ||
|
|
d51338e754 | ||
|
|
5e6ce26742 | ||
|
|
4390df4b48 | ||
|
|
418ec34880 | ||
|
|
bdc303e778 | ||
|
|
65df775937 | ||
|
|
87f0e46036 | ||
|
|
0735fa93f6 | ||
|
|
2d5500d96f | ||
|
|
2170a92dd2 | ||
|
|
c1d4fc71a8 | ||
|
|
897968939d | ||
|
|
35f3c6e90f | ||
|
|
32ae1bcdc8 | ||
|
|
593966a30a | ||
|
|
7879d883cf | ||
|
|
8cc4ff0b4c | ||
|
|
0999ecb2a9 | ||
|
|
dec622600d | ||
|
|
b05421755f | ||
|
|
dbcfb2746f | ||
|
|
337f52e1bf | ||
|
|
f4d513b622 | ||
|
|
d95d79a7c2 | ||
|
|
ff88f30c74 | ||
|
|
764d15412f | ||
|
|
2942f28880 | ||
|
|
4fb1247045 | ||
|
|
74cc8079bb | ||
|
|
9e6db257cc | ||
|
|
21927fd54c | ||
|
|
5883a50520 | ||
|
|
7624797acf | ||
|
|
5590210088 | ||
|
|
c49d6d4ecf | ||
|
|
918ecc7b37 | ||
|
|
e7e70b4edd | ||
|
|
c64464b1b5 | ||
|
|
72f7e5bb76 | ||
|
|
b33d114402 | ||
|
|
c484c0d667 | ||
|
|
3994bf583c | ||
|
|
acae8708f5 | ||
|
|
d28c7cf061 | ||
|
|
cc9546dd1b | ||
|
|
47bf121d25 | ||
|
|
72937ba091 | ||
|
|
4097dec5a4 | ||
|
|
f1e12015d6 | ||
|
|
2f242533d2 | ||
|
|
6fdefe69ec | ||
|
|
cf950c426f | ||
|
|
541052fee7 | ||
|
|
00cea9b215 | ||
|
|
00961808b4 | ||
|
|
1e7a2b575f | ||
|
|
a1a9519cf5 | ||
|
|
46fca11b38 | ||
|
|
71681532cd | ||
|
|
ed336724a0 | ||
|
|
c7582e7887 | ||
|
|
710a357a6e | ||
|
|
1b10af5cfc | ||
|
|
619d46c476 | ||
|
|
fa50775df2 | ||
|
|
69ab02fc45 | ||
|
|
8c657e4172 | ||
|
|
610f5dc5f7 | ||
|
|
32e8098a6d | ||
|
|
35d8fd9d97 | ||
|
|
9a9e2af09c | ||
|
|
dc2b4ddebc | ||
|
|
5573f467b7 | ||
|
|
5235c5f1dc | ||
|
|
60173212e7 | ||
|
|
a6be5bb399 | ||
|
|
330c0ac9f9 | ||
|
|
4c81ac4386 | ||
|
|
74e57c00af | ||
|
|
619b557c80 | ||
|
|
90cb2c4518 | ||
|
|
41794c5f5e | ||
|
|
757291644b | ||
|
|
a89f16e594 | ||
|
|
b718a282e0 | ||
|
|
7ac4b89a0e | ||
|
|
ff3c948056 | ||
|
|
f8b574be81 | ||
|
|
d3dd9ffd9b | ||
|
|
150fa394ff | ||
|
|
14651b1ae5 | ||
|
|
f857836bfa | ||
|
|
415ce339a7 | ||
|
|
261dc553bb | ||
|
|
4c9174969f | ||
|
|
443ca0a02c | ||
|
|
862053738a | ||
|
|
4ece2ba148 | ||
|
|
2bb2d4ca59 | ||
|
|
15cd9432b7 | ||
|
|
4acc98749c | ||
|
|
9e527e4007 | ||
|
|
5cbb3f76c7 | ||
|
|
c4e707d5a2 | ||
|
|
86b4a95875 | ||
|
|
ea2173bd30 | ||
|
|
63e74545ab | ||
|
|
29d48f0a98 | ||
|
|
8981e57deb | ||
|
|
ba55d657a4 | ||
|
|
3212ed9bd1 | ||
|
|
add9c98ba0 | ||
|
|
ee8cdfc7d3 | ||
|
|
26699de71b | ||
|
|
71bc4e5186 | ||
|
|
377d9682da | ||
|
|
a31be7e2ff | ||
|
|
8ead63cad0 | ||
|
|
9649878fd8 | ||
|
|
fa19ce2b5b | ||
|
|
6fd30d50f4 | ||
|
|
d654a4faed | ||
|
|
f152e30737 | ||
|
|
8ea82021f0 | ||
|
|
afd68fa09d | ||
|
|
bc3d268f57 | ||
|
|
1415ed30b6 | ||
|
|
c824711893 | ||
|
|
2633e56a76 | ||
|
|
4e152f470b | ||
|
|
c89a1b3b27 | ||
|
|
1cb6f43339 | ||
|
|
e8a1c58c5d | ||
|
|
39cb4a081b | ||
|
|
c336cbe8ab | ||
|
|
565bf5fbb8 | ||
|
|
d3bf35869a | ||
|
|
d63e5f520e | ||
|
|
9fbdc10971 | ||
|
|
39b5f10529 | ||
|
|
af7142e97b | ||
|
|
dd3040c56f | ||
|
|
90d6d38b47 | ||
|
|
f62d2085e8 | ||
|
|
e55d1a3b7f | ||
|
|
528f0b4f60 | ||
|
|
722cb6351d | ||
|
|
4cd9237f47 | ||
|
|
b9b66aee85 | ||
|
|
08c8b938ae | ||
|
|
1907884a6d | ||
|
|
44c4ba08b8 | ||
|
|
f4ac607958 | ||
|
|
dc67056a8c | ||
|
|
9f6b82dfc0 | ||
|
|
c17d2c1aba | ||
|
|
b6be1c2e1a | ||
|
|
ed9ee210e4 | ||
|
|
0f50afc4c3 | ||
|
|
4e1353caf1 | ||
|
|
c8d19c846a | ||
|
|
d6114d0a2b | ||
|
|
3a6a20e1da | ||
|
|
aa360dd0aa | ||
|
|
80feaaeed3 | ||
|
|
206f9acfd9 | ||
|
|
f9af2a16b9 | ||
|
|
c07dbc9843 | ||
|
|
43b8ccb52e | ||
|
|
ec2eb3d406 | ||
|
|
6d35cb2eb4 | ||
|
|
aaf457a792 | ||
|
|
8d65998cc3 | ||
|
|
a5fc42cafa | ||
|
|
66eca9ba4a | ||
|
|
6fa22c3a04 | ||
|
|
0c682ea75d | ||
|
|
bdb16109d5 | ||
|
|
ced357dcca | ||
|
|
07693a46f2 | ||
|
|
6b60b10cfb | ||
|
|
acef775131 | ||
|
|
d91b3d4fb6 | ||
|
|
de7c61e5ca | ||
|
|
a31fbabc10 | ||
|
|
5b2227cf4f | ||
|
|
84537b607e | ||
|
|
3d66ae21de | ||
|
|
8032a22f14 | ||
|
|
5ed1ff6d41 | ||
|
|
d2626ead0b | ||
|
|
040b336ef9 | ||
|
|
2331647ec6 | ||
|
|
7a50cb3ff9 | ||
|
|
a71e7f7906 | ||
|
|
267045e113 | ||
|
|
a41ce5b8b7 | ||
|
|
d737b9501b | ||
|
|
79130b4da9 | ||
|
|
836781be42 | ||
|
|
42a0152c3a | ||
|
|
efddd6c35f | ||
|
|
564843b297 | ||
|
|
1df4118523 | ||
|
|
4c683bb10c | ||
|
|
512e9b2070 | ||
|
|
b8b40fc7ef | ||
|
|
a64854bf72 | ||
|
|
6f48f8eceb | ||
|
|
769aa16cc4 | ||
|
|
e97245c762 | ||
|
|
79963d1eab | ||
|
|
c90af362b3 | ||
|
|
7ca30a313d | ||
|
|
0e380a97cc | ||
|
|
275c8b5860 | ||
|
|
8231890ee4 | ||
|
|
9126adacde | ||
|
|
ced1f60940 | ||
|
|
532a656daf | ||
|
|
7db883e8b7 | ||
|
|
79105e8034 | ||
|
|
c6fb211874 | ||
|
|
750ea4fe3f | ||
|
|
04fd4e12d7 | ||
|
|
9eb902c682 | ||
|
|
dba10ffd9b | ||
|
|
3a401f69fd | ||
|
|
e8c35c2de6 | ||
|
|
a8cf072bda | ||
|
|
3d462e3821 | ||
|
|
8528de642f | ||
|
|
fb7d3676d2 | ||
|
|
09ba1c262b | ||
|
|
d4c91ae44b | ||
|
|
e216b10716 | ||
|
|
5e4e706033 | ||
|
|
7968bffc38 | ||
|
|
733825edfa | ||
|
|
893a8992c3 | ||
|
|
f97931647c | ||
|
|
13ca5d1f1a | ||
|
|
db5946d1ab | ||
|
|
43c13ed432 | ||
|
|
b6d53c9ded | ||
|
|
db622cece8 | ||
|
|
a8ab883c07 | ||
|
|
fc0a4fa5b7 | ||
|
|
8cc3669aac | ||
|
|
2924791cb0 | ||
|
|
7c0a63c7da | ||
|
|
26f4082b5f | ||
|
|
036bf84ecd | ||
|
|
c4ee88dc66 | ||
|
|
03eca0d9a2 | ||
|
|
3ea737deb1 | ||
|
|
0ea623487c | ||
|
|
5baa9e8fb6 | ||
|
|
e43380ad61 | ||
|
|
9ca7c4280c | ||
|
|
295d9568c8 | ||
|
|
04be6b13be | ||
|
|
e4e566240f | ||
|
|
daf77ecc5f | ||
|
|
0fd6cacd17 | ||
|
|
6d73685f3a | ||
|
|
f104cc5477 | ||
|
|
c70acb1570 | ||
|
|
60fbcc3e4b | ||
|
|
a980c102be | ||
|
|
a005bf63a2 | ||
|
|
a5c6d78ee5 | ||
|
|
658c26014b | ||
|
|
dabb9d0a82 | ||
|
|
16a051d7e0 | ||
|
|
82b10ecb87 | ||
|
|
5accbd511f | ||
|
|
e7a27759e6 | ||
|
|
3001476dbc | ||
|
|
a9e7ed57d9 | ||
|
|
3a260825b9 | ||
|
|
7fa47d68a8 | ||
|
|
a5239c1cb6 | ||
|
|
2298252379 | ||
|
|
669d928bbf | ||
|
|
0b3cff1a9f | ||
|
|
f1768c5a07 | ||
|
|
b0d723282b | ||
|
|
497b581576 | ||
|
|
334d1fd9b3 | ||
|
|
188f5c6167 | ||
|
|
e474accb61 | ||
|
|
f75da999dc | ||
|
|
457feedec4 | ||
|
|
1de9171d50 | ||
|
|
4a4aabd230 | ||
|
|
ace890c809 | ||
|
|
cea096f06c | ||
|
|
a853314e3f | ||
|
|
1f2f70fcd4 | ||
|
|
14584ef920 | ||
|
|
f6b182a3b5 | ||
|
|
02ab545cda | ||
|
|
2b715851e1 | ||
|
|
994d08b479 | ||
|
|
051859969c | ||
|
|
f7ad3e37a4 | ||
|
|
41d6261ef3 | ||
|
|
712130495e | ||
|
|
2661d6c122 | ||
|
|
d70fc33689 | ||
|
|
8bd10c3c04 | ||
|
|
4c815998f8 | ||
|
|
36dce3ddbc | ||
|
|
4e9b92b857 | ||
|
|
e1befadc18 | ||
|
|
891dab7f06 | ||
|
|
a6e8d408b5 | ||
|
|
24faba67d8 | ||
|
|
8f004c0c75 | ||
|
|
86f09fa028 | ||
|
|
208b06d9cb | ||
|
|
cdf312fdd9 | ||
|
|
7f60946204 | ||
|
|
153bb752a4 | ||
|
|
a882d0bf6d | ||
|
|
a85a7c74c3 | ||
|
|
7aeb5498a1 | ||
|
|
be31371892 | ||
|
|
3620e6b4d7 | ||
|
|
440983d2b9 | ||
|
|
0a69bc03b0 | ||
|
|
0c302e30c9 | ||
|
|
2fa06baa36 | ||
|
|
4ead40b640 | ||
|
|
094d11f972 | ||
|
|
4537576a6d | ||
|
|
4909e8bc74 | ||
|
|
f0fc3a5d84 | ||
|
|
7183b52f4c | ||
|
|
9965fbc92b | ||
|
|
919f6d246b | ||
|
|
c8593b1c18 | ||
|
|
50774bebb3 | ||
|
|
46b767ab0b | ||
|
|
7ddfdb1e15 | ||
|
|
a0426d14cc | ||
|
|
582ae6a850 | ||
|
|
47a4c6b0c1 | ||
|
|
6bc6f7ae7f | ||
|
|
a10090974e | ||
|
|
a0a7b0dc7d | ||
|
|
623b4a9858 | ||
|
|
b1d33d4c15 | ||
|
|
1a3c07abdb | ||
|
|
adffd1f000 | ||
|
|
9f80ddd125 | ||
|
|
0e724ac821 | ||
|
|
a2a61e99a7 | ||
|
|
8798ff937d | ||
|
|
d5aa4f3ee4 | ||
|
|
faa4467b02 | ||
|
|
0c8aba6be0 | ||
|
|
7ae308c8c9 | ||
|
|
f864424d14 | ||
|
|
317f83e3ec | ||
|
|
75576c341d | ||
|
|
70b57f92b4 | ||
|
|
df4be5106b | ||
|
|
66c07e1336 | ||
|
|
e6766bac8f | ||
|
|
0d5c1811cf | ||
|
|
1b3e68f430 | ||
|
|
326be0df4f | ||
|
|
3986543293 | ||
|
|
3f97b3a112 | ||
|
|
8d0afd8c96 | ||
|
|
17a208d67b | ||
|
|
cceb35b053 | ||
|
|
3b0d654b6d | ||
|
|
3b929041f2 | ||
|
|
2950259f97 | ||
|
|
e4f4ab9221 | ||
|
|
aaeb8c8868 | ||
|
|
4ab4ad96f0 | ||
|
|
0d33779c95 | ||
|
|
db21525485 | ||
|
|
00e894d801 | ||
|
|
d69db0b337 | ||
|
|
02cb75209c | ||
|
|
c679b04ad5 | ||
|
|
1d21bd34f6 | ||
|
|
1f5991112d | ||
|
|
3bbd2023a4 | ||
|
|
35da01bac9 | ||
|
|
5b84054eaa | ||
|
|
166d2b7b68 | ||
|
|
6ad6e6f856 | ||
|
|
3e89a22600 | ||
|
|
ba3c42e62c | ||
|
|
3d84270f50 | ||
|
|
c7fa7aa7bc | ||
|
|
ec1bcada86 | ||
|
|
0a5e15b916 | ||
|
|
02d8208553 | ||
|
|
f73ce6572c | ||
|
|
997441eff3 | ||
|
|
c58302ffc4 | ||
|
|
f9d63dba00 | ||
|
|
9b59b92464 | ||
|
|
b582998228 | ||
|
|
33ad2d94fb | ||
|
|
161a55e166 | ||
|
|
944d167bbb | ||
|
|
4fc391763e | ||
|
|
92643b29c1 | ||
|
|
74e10c3629 | ||
|
|
5276afe349 | ||
|
|
2b8d80a9b2 | ||
|
|
db5c16fb1d | ||
|
|
a24d5676a6 | ||
|
|
a3a7c597b5 | ||
|
|
e92932b4f9 | ||
|
|
901806e508 | ||
|
|
a78eb226e2 | ||
|
|
69ffd57447 | ||
|
|
748bc45eb7 | ||
|
|
98cae9fe10 | ||
|
|
3c07416c48 | ||
|
|
840753aae3 | ||
|
|
22502ff7c8 | ||
|
|
508af62dc0 | ||
|
|
942f6167b0 | ||
|
|
9e24ba7b39 | ||
|
|
4f09688af7 | ||
|
|
b6b2a3ec53 | ||
|
|
20ce492909 | ||
|
|
50053b0fc4 | ||
|
|
f9fe4cd0a5 | ||
|
|
3ca36a37af | ||
|
|
a9415a95d2 | ||
|
|
de09b10ac2 | ||
|
|
2091fbca7c | ||
|
|
c7ac3b0163 | ||
|
|
87d17897ed | ||
|
|
0cd20db860 | ||
|
|
b871337920 | ||
|
|
78443353df | ||
|
|
b440ea5eee | ||
|
|
43737ab528 | ||
|
|
1991b44c00 | ||
|
|
e6fcb418b1 | ||
|
|
3f23953f83 | ||
|
|
8bc975e717 | ||
|
|
a41841ebf4 | ||
|
|
f127b5c6ea | ||
|
|
258969f342 | ||
|
|
97a6095762 | ||
|
|
b66032f2cc | ||
|
|
ff72a9ce70 | ||
|
|
cedcc15c9d | ||
|
|
dfbc449045 | ||
|
|
02044a8153 | ||
|
|
844634e8c8 | ||
|
|
1c98230487 | ||
|
|
03f1ba733d | ||
|
|
1e1b13196c | ||
|
|
3dc45104db | ||
|
|
8e456d393f | ||
|
|
6e229a4091 | ||
|
|
4a77fdc887 | ||
|
|
1ef8da0414 | ||
|
|
e36dce372a | ||
|
|
26af5c7847 | ||
|
|
4c7e565f6a | ||
|
|
800d35a42c | ||
|
|
abb3a33021 | ||
|
|
e193261d7f | ||
|
|
843a3f7f6e | ||
|
|
ce675097b1 | ||
|
|
ece11c5958 | ||
|
|
2a7f115266 | ||
|
|
f11a56fb67 | ||
|
|
fc7f26cbb5 | ||
|
|
fd397c30ac | ||
|
|
04d8a64f63 | ||
|
|
0570c7fdef | ||
|
|
1de0014de3 | ||
|
|
4b79424903 | ||
|
|
5cfc135791 | ||
|
|
658d09ccf8 | ||
|
|
da5847cc4d | ||
|
|
15deeacb5e | ||
|
|
f7f077adb3 | ||
|
|
d09cab49aa | ||
|
|
6009f6846a | ||
|
|
b2bbe12a11 | ||
|
|
70ff72a03a | ||
|
|
cf569baabd | ||
|
|
a84b23168d | ||
|
|
72e29e58d2 | ||
|
|
792145353e | ||
|
|
c249bd6f22 | ||
|
|
744c60cdef | ||
|
|
6c4d757ecb | ||
|
|
339cdbec2d | ||
|
|
eeabeadc39 | ||
|
|
d30707a02c | ||
|
|
8f867c03de | ||
|
|
45072c19a2 | ||
|
|
0370e8083a | ||
|
|
827609db79 | ||
|
|
01ad26c084 | ||
|
|
0a8bbe0b77 | ||
|
|
a51925565a | ||
|
|
ea71bfe6d6 | ||
|
|
2664a846e9 | ||
|
|
a3241d1442 | ||
|
|
d4d3f9ca81 | ||
|
|
46070c2987 | ||
|
|
04540c4b0f | ||
|
|
03931da17a | ||
|
|
8b18115b54 | ||
|
|
0688f6a4a3 | ||
|
|
2fee0254b7 | ||
|
|
e8b4389a1a | ||
|
|
aa7e70141c | ||
|
|
03f0724dfd | ||
|
|
5b26e686f3 | ||
|
|
c4ce83bb07 | ||
|
|
c0a2550485 | ||
|
|
9898ad1991 | ||
|
|
f47b5a18c7 | ||
|
|
68a1882a65 | ||
|
|
4178be3acf | ||
|
|
706f91db39 | ||
|
|
3a859f2347 | ||
|
|
89974f4c95 | ||
|
|
1761a16d31 | ||
|
|
6ecf0f4ca4 | ||
|
|
af6e808337 | ||
|
|
aeff50ba9f | ||
|
|
41bccc7213 | ||
|
|
0b2ad569a1 | ||
|
|
4f05389a51 | ||
|
|
19e40175be | ||
|
|
28981e5d46 | ||
|
|
9ea440b6f7 | ||
|
|
e044ff3d55 | ||
|
|
7f4bc246c6 | ||
|
|
cdc8c270dc | ||
|
|
6de70ff5b7 | ||
|
|
b7bfb73134 | ||
|
|
e8da60b2e7 | ||
|
|
d204ae86f2 | ||
|
|
30a53252f8 | ||
|
|
0d1aed96c8 | ||
|
|
6ef85ef0e8 | ||
|
|
9ed6d5f360 | ||
|
|
4d54768875 | ||
|
|
9149772ce9 | ||
|
|
6e39c26704 | ||
|
|
6c2f9b7bd3 | ||
|
|
189d0c107c | ||
|
|
ba864eaa4d | ||
|
|
764774ee49 | ||
|
|
65f7c9cbbf | ||
|
|
aed5388bfd | ||
|
|
34f1f3d103 | ||
|
|
21bd59defd | ||
|
|
67b3040327 | ||
|
|
08d7f5d8a3 | ||
|
|
b63a8d34b5 | ||
|
|
3d66a4b7be | ||
|
|
e856387292 | ||
|
|
5ec1272d68 | ||
|
|
d8aba5f645 | ||
|
|
fede8c9975 | ||
|
|
77564531eb | ||
|
|
194d3251a4 | ||
|
|
4a991ef3f9 | ||
|
|
a179f73deb | ||
|
|
eeaee5ad42 | ||
|
|
e0a1cd6e77 | ||
|
|
b6c257bfc5 | ||
|
|
77472aabea | ||
|
|
36b5ca7313 | ||
|
|
1465ed3607 | ||
|
|
0ea07469d2 | ||
|
|
870e4f96b2 | ||
|
|
5502f317ad | ||
|
|
9e40b4551d | ||
|
|
04f3d99def | ||
|
|
b7b7b9d580 | ||
|
|
4d6c0f3da9 | ||
|
|
9d8628b4cc | ||
|
|
df99ca55f8 | ||
|
|
a8a784bea4 | ||
|
|
5cb8ce3319 | ||
|
|
443d157dbe | ||
|
|
9c35652043 | ||
|
|
a4796e8db8 | ||
|
|
5c3ea37bbe | ||
|
|
8919a7067e | ||
|
|
fa99d9aaed | ||
|
|
5c2bdfcefe | ||
|
|
c7ed642f6a | ||
|
|
97d6214ff4 | ||
|
|
419776bf5e | ||
|
|
d4f177ffdd | ||
|
|
25bd70c86f | ||
|
|
b47cea7ead | ||
|
|
a76e5940af | ||
|
|
a6662f2774 | ||
|
|
1822103936 | ||
|
|
e866e99804 | ||
|
|
947cc0ce92 | ||
|
|
4bb93d9c7e | ||
|
|
5a012d4e33 | ||
|
|
2705876c56 | ||
|
|
2290503d4a | ||
|
|
0c13764c63 | ||
|
|
9007371ab5 | ||
|
|
5ea414aed6 | ||
|
|
c43458af1d | ||
|
|
790ce27316 | ||
|
|
3c114bd9ef | ||
|
|
8085e93a07 | ||
|
|
af2e4ca00f | ||
|
|
0ab56b38b9 | ||
|
|
833871df65 | ||
|
|
b6ecc8b1be | ||
|
|
0b29aaecc4 | ||
|
|
7d2c8aa1c3 | ||
|
|
cfe7ba34f7 | ||
|
|
c53e476ba2 | ||
|
|
260879791b | ||
|
|
8b57dcf015 | ||
|
|
2698944ec7 | ||
|
|
a6e802ba2a | ||
|
|
a2fff7e74a | ||
|
|
cf3c3cf989 | ||
|
|
d0244e0bef | ||
|
|
1f712c82bf | ||
|
|
3702c054a8 | ||
|
|
77b886aa1a | ||
|
|
62ffe67838 | ||
|
|
c14fe661df | ||
|
|
480251c41c | ||
|
|
1433ec5dad | ||
|
|
e7d4fc3c4f | ||
|
|
492ce43b4a | ||
|
|
1576e33564 | ||
|
|
08cd2ddf1f | ||
|
|
4899b3af6e | ||
|
|
65b3c62a87 | ||
|
|
6982b03ad6 | ||
|
|
9e52cdb75e | ||
|
|
e3ed198ba1 | ||
|
|
f49cf0b6ae | ||
|
|
2fc6290c8f | ||
|
|
41287d8fc5 | ||
|
|
471fd78174 | ||
|
|
746d898245 | ||
|
|
93bf8c1478 | ||
|
|
9cfc00ce97 | ||
|
|
73b8f3fb17 | ||
|
|
c77af2000c | ||
|
|
af99bf05e2 | ||
|
|
3f151f16ce | ||
|
|
7a3525febc | ||
|
|
b3684990f1 | ||
|
|
824e7d76ae | ||
|
|
5ad31a878b | ||
|
|
79c2a6c5d5 | ||
|
|
7fc77f279b | ||
|
|
3aadf00a6f | ||
|
|
9474700d09 | ||
|
|
95868416ef | ||
|
|
009556b8f7 | ||
|
|
6068ddc0ff | ||
|
|
3ae1a97bc9 | ||
|
|
5159438e5d | ||
|
|
addb392ecc | ||
|
|
e6fb96c4c2 | ||
|
|
7da949610d | ||
|
|
452289b726 | ||
|
|
67c692fdbd | ||
|
|
0a123a3917 | ||
|
|
dc4bf82684 | ||
|
|
c9200f235e | ||
|
|
48e283812e | ||
|
|
40d4a917e1 | ||
|
|
9ed3ad2f3c | ||
|
|
da358d635b | ||
|
|
7508627dc5 | ||
|
|
56ab2aa4ca | ||
|
|
07ce435a91 | ||
|
|
802ccf1d2c | ||
|
|
0872058631 | ||
|
|
8421a0fdc6 | ||
|
|
de48ec4cbf | ||
|
|
0ebf9564b2 | ||
|
|
8e3a73d0bd | ||
|
|
2adb55c67d | ||
|
|
ae67137e0e | ||
|
|
4dbe5c504f | ||
|
|
f5b4ea975e | ||
|
|
04dbe5f741 | ||
|
|
22b326e2b9 | ||
|
|
74aaa710bf | ||
|
|
7d2da6ef80 | ||
|
|
93a7a8e856 | ||
|
|
412343f3de | ||
|
|
c707539f6f | ||
|
|
831f79d651 | ||
|
|
c439de49a5 | ||
|
|
3da98cbd1e | ||
|
|
9cba125abe | ||
|
|
1318a5c3c8 | ||
|
|
caadc43d35 | ||
|
|
37a7bb202b | ||
|
|
538b8313ed | ||
|
|
4d54d5c455 | ||
|
|
0b53dc627f | ||
|
|
bc91c46a9a | ||
|
|
127b02922f | ||
|
|
9c969f8b26 |
@@ -31,10 +31,33 @@ jobs:
|
||||
- run: cat .cljfmt.edn
|
||||
- run: clj-kondo --version
|
||||
|
||||
# - run:
|
||||
# name: "fmt check [clj]"
|
||||
# command: |
|
||||
# yarn run fmt:clj:check
|
||||
- run:
|
||||
name: "fmt check backend [clj]"
|
||||
working_directory: "./backend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check exporter [clj]"
|
||||
working_directory: "./exporter"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check common [clj]"
|
||||
working_directory: "./common"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: "fmt check frontend [clj]"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
- run:
|
||||
name: common lint
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
promesa.core/-> clojure.core/->
|
||||
promesa.exec.csp/go-loop clojure.core/loop
|
||||
rumext.v2/defc clojure.core/defn
|
||||
rumext.v2/fnc clojure.core/fn
|
||||
promesa.util/with-open clojure.core/with-open
|
||||
app.common.schema.generators/let clojure.core/let
|
||||
app.common.data/export clojure.core/def
|
||||
app.common.data.macros/get-in clojure.core/get-in
|
||||
app.common.data.macros/with-open clojure.core/with-open
|
||||
@@ -15,11 +15,13 @@
|
||||
:hooks
|
||||
{:analyze-call
|
||||
{app.common.data.macros/export hooks.export/export
|
||||
potok.v2.core/reify hooks.export/potok-reify
|
||||
app.util.services/defmethod hooks.export/service-defmethod
|
||||
app.common.record/defrecord hooks.export/penpot-defrecord
|
||||
app.db/with-atomic hooks.export/penpot-with-atomic
|
||||
potok.v2.core/reify hooks.export/potok-reify
|
||||
rumext.v2/fnc hooks.export/rumext-fnc
|
||||
rumext.v2/lazy-component hooks.export/rumext-lazycomponent
|
||||
shadow.lazy/loadable hooks.export/rumext-lazycomponent
|
||||
}}
|
||||
|
||||
:output
|
||||
|
||||
@@ -37,6 +37,9 @@
|
||||
(api/token-node rsym)
|
||||
(api/vector-node [])]
|
||||
other))]
|
||||
|
||||
;; (prn (api/sexpr result))
|
||||
|
||||
{:node result})))
|
||||
|
||||
(defn penpot-with-atomic
|
||||
@@ -71,6 +74,17 @@
|
||||
{:node result})))
|
||||
|
||||
|
||||
(defn rumext-lazycomponent
|
||||
[{:keys [node]}]
|
||||
(let [[cname mdata params & body] (rest (:children node))
|
||||
[params body] (if (api/vector-node? mdata)
|
||||
[mdata (cons params body)]
|
||||
[params body])]
|
||||
(let [result (api/list-node [(api/token-node 'constantly) nil])]
|
||||
;; (prn (api/sexpr result))
|
||||
{:node result})))
|
||||
|
||||
|
||||
(defn penpot-defrecord
|
||||
[{:keys [:node]}]
|
||||
(let [[rnode rtype rparams & other] (:children node)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
:remove-surrounding-whitespace? true
|
||||
:remove-consecutive-blank-lines? false
|
||||
:extra-indents {rumext.v2/fnc [[:inner 0]]
|
||||
cljs.test/async [[:inner 0]]
|
||||
promesa.exec/thread [[:inner 0]]
|
||||
specify! [[:inner 0] [:inner 1]]}
|
||||
}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -23,6 +23,7 @@
|
||||
/*.jpg
|
||||
/*.md
|
||||
/*.png
|
||||
/*.svg
|
||||
/*.sql
|
||||
/*.txt
|
||||
/*.yml
|
||||
|
||||
@@ -6,4 +6,6 @@ enableImmutableInstalls: false
|
||||
|
||||
enableTelemetry: false
|
||||
|
||||
httpTimeout: 600000
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
168
CHANGES.md
168
CHANGES.md
@@ -1,34 +1,168 @@
|
||||
# CHANGELOG
|
||||
|
||||
## :rocket: Next
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
### :sparkles: New features
|
||||
## 2.0.3
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
- Fix chrome scrollbar styling [Taiga Issue #7852](https://tree.taiga.io/project/penpot/issue/7852)
|
||||
- Fix incorrect password encoding on create-profile manage scritp [Github #3651](https://github.com/penpot/penpot/issues/3651)
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
## 1.20.0
|
||||
## 2.0.2
|
||||
|
||||
### :sparkles: Enhancements
|
||||
|
||||
- Fix locking contention on cron subsystem (causes backend start blocking)
|
||||
- Fix locking contention on file object thumbails backend RPC calls
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix color palette sorting [Taiga Issue #7458](https://tree.taiga.io/project/penpot/issue/7458)
|
||||
- Fix style scoping problem with imported SVG [Taiga #7671](https://tree.taiga.io/project/penpot/issue/7671)
|
||||
|
||||
|
||||
## 2.0.1
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix different issues related to components v2 migrations including [Github #4443](https://github.com/penpot/penpot/issues/4443)
|
||||
|
||||
|
||||
## 2.0.0 - I Just Can't Get Enough
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
- Grid CSS layout [Taiga #4915](https://tree.taiga.io/project/penpot/epic/4915)
|
||||
- UI redesign [Taiga #4958](https://tree.taiga.io/project/penpot/epic/4958)
|
||||
- New components System [Taiga #2662](https://tree.taiga.io/project/penpot/epic/2662)
|
||||
- Swap components [Taiga #1331](https://tree.taiga.io/project/penpot/us/1331)
|
||||
- Images as fill [Taiga #2983](https://tree.taiga.io/project/penpot/us/2983)
|
||||
- HTML code generation [Taiga #5277](https://tree.taiga.io/project/penpot/us/5277)
|
||||
- Light and dark themes [Taiga #2287](https://tree.taiga.io/project/penpot/us/2287)
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
### :sparkles: New features
|
||||
- New strokes default to inside border [Taiga #6847](https://tree.taiga.io/project/penpot/issue/6847)
|
||||
- Change default z ordering on layers in flex layout. The previous behavior was inconsistent with how HTML works and we changed it to be more consistent. Previous layers that overlapped could be hidden, the fastest way to fix this is changing the z-index property but a better way is to change the order of your layers.
|
||||
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
- New Hausa, Yoruba and Igbo translations and update translation files (by All For Tech Empowerment Foundation) [Taiga #6950](https://tree.taiga.io/project/penpot/us/6950), [Taiga #6534](https://tree.taiga.io/project/penpot/us/6534)
|
||||
- Hide bounding-box when editing shape (by @VasilevsVV) [#3930](https://github.com/penpot/penpot/pull/3930)
|
||||
- CTRL + "+" to zoom into canvas instead of browser (by @audriu) [#3848](https://github.com/penpot/penpot/pull/3848)
|
||||
- Add dev deps.edn in the project root (by @PEZ) [#3794](https://github.com/penpot/penpot/pull/3794)
|
||||
- Allow passing overrides to frontend nginx config (by @m90) [#3602](https://github.com/penpot/penpot/pull/3602)
|
||||
- Update index.njk to remove typo (by @fdvmoreira) [#155](https://github.com/penpot/penpot-docs/pull/155)
|
||||
- Typo (by StephanEggermont) [#157](https://github.com/penpot/penpot-docs/pull/157)
|
||||
|
||||
### :sparkles: New features
|
||||
- Send comments with Ctrl+Enter / Cmd + Enter [Taiga #6085](https://tree.taiga.io/project/penpot/issue/6085)
|
||||
- Select through stroke only rectangle [Taiga #5484](https://tree.taiga.io/project/penpot/issue/5484)
|
||||
- Stroke default position [Taiga #6847](https://tree.taiga.io/project/penpot/issue/6847)
|
||||
- Override browser Ctrl+ and Ctrl- zoom with Penpot Zoom [Taiga #3200](https://tree.taiga.io/project/penpot/us/3200)
|
||||
- Improve the way handlers work on flex layouts [Taiga #6598](https://tree.taiga.io/project/penpot/us/6598)
|
||||
- Add menu entry for toggle between light/dark theme [Taiga #6829](https://tree.taiga.io/project/penpot/issue/6829)
|
||||
- Switch themes shortcut [Taiga #6644](https://tree.taiga.io/project/penpot/us/6644)
|
||||
- Constraints section at design tab new position [Taiga #6830](https://tree.taiga.io/project/penpot/issue/6830)
|
||||
- [PICKER] File library colors order [Taiga #5399](https://tree.taiga.io/project/penpot/us/5399)
|
||||
- Onboarding invitations improvements [Taiga #5974](https://tree.taiga.io/project/penpot/us/5974)
|
||||
- [PERFORMANCE] Workspace thumbnails refactor [Taiga #5828](https://tree.taiga.io/project/penpot/us/5828)
|
||||
- [PERFORMANCE] Add performance optimizations to shape rendering [Taiga #5835](https://tree.taiga.io/project/penpot/us/5835)
|
||||
- [PERFORMANCE] Optimize SVG output [Taiga #4134](https://tree.taiga.io/project/penpot/us/4134)
|
||||
- [PERFORMANCE] Optimize svg on importation [Taiga #5879](https://tree.taiga.io/project/penpot/us/5879)
|
||||
- [PERFORMANCE] Optimization tasks related to design tab file [Taiga #5760](https://tree.taiga.io/project/penpot/us/5760)
|
||||
- [INSTALL] Ability to setup features by team [Taiga #6108](https://tree.taiga.io/project/penpot/us/6108)
|
||||
- [IMAGES] Keep aspect ratio option [Taiga #6933](https://tree.taiga.io/project/penpot/us/6933)
|
||||
- [INSPECT] UI review [Taiga #5687](https://tree.taiga.io/project/penpot/us/5687)
|
||||
- [GRID LAYOUT] Phase 1 [Taiga #4303](https://tree.taiga.io/project/penpot/us/4303)
|
||||
- [GRID LAYOUT] Inspect code for Grid [Taiga #5277](https://tree.taiga.io/project/penpot/us/5277)
|
||||
- [GRID LAYOUT] Phase 1 polishing [Taiga #5612](https://tree.taiga.io/project/penpot/us/5612)
|
||||
- [GRID LAYOUT] Improvements & Feedback [Taiga #6047](https://tree.taiga.io/project/penpot/us/6047)
|
||||
- [COMPONENTS] Naming of the main component [Taiga #5291](https://tree.taiga.io/project/penpot/us/5291)
|
||||
- [COMPONENTS] Rework inside of components - Library page [Taiga #2918](https://tree.taiga.io/project/penpot/us/2918)
|
||||
- [COMPONENTS] Update component when updating main instance [Taiga #3794](https://tree.taiga.io/project/penpot/us/3794)
|
||||
- [COMPONENTS] Main component new behavior [Taiga #3796](https://tree.taiga.io/project/penpot/us/3796)
|
||||
- [COMPONENTS] Main component look & feel [Taiga #5290](https://tree.taiga.io/project/penpot/us/5290)
|
||||
- [COMPONENTS] Library view [Taiga #2880](https://tree.taiga.io/project/penpot/us/2880)
|
||||
- [COMPONENTS] Positioning inside a component should relative, as in boards [Taiga #2826](https://tree.taiga.io/project/penpot/us/2826)
|
||||
- [COMPONENTS] Update message should show only if affecting at components that are being used at a file [Taiga #1397](https://tree.taiga.io/project/penpot/us/1397)
|
||||
- [COMPONENTS] Annotations [Taiga #4957](https://tree.taiga.io/project/penpot/us/4957)
|
||||
- [COMPONENTS] Synchronization order for nested components [Taiga #5439](https://tree.taiga.io/project/penpot/us/5439)
|
||||
- [COMPONENTS] Libraries modal zero case [Taiga #5294](https://tree.taiga.io/project/penpot/us/5294)
|
||||
- [COMPONENTS] Contextual menu casuistics [Taiga #5292](https://tree.taiga.io/project/penpot/us/5292)
|
||||
- [COMPONENTS] Libraries publishing flow review [Taiga #5293](https://tree.taiga.io/project/penpot/us/5293)
|
||||
- [COMPONENTS] Add loading text to Libraries modal [Taiga #6702](https://tree.taiga.io/project/penpot/us/6702)
|
||||
- [COMPONENTS] Components rename and organization in bulk [Taiga #2877](https://tree.taiga.io/project/penpot/us/2877)
|
||||
- [COMPONENTS] Info overlay about components V2 [Taiga #6276](https://tree.taiga.io/project/penpot/us/6276)
|
||||
- [REDESIGN] New styles basics [Taiga #4967](https://tree.taiga.io/project/penpot/us/4967)
|
||||
- [REDESIGN] Layers tab redesign [Taiga #4966](https://tree.taiga.io/project/penpot/us/4966)
|
||||
- [REDESIGN] Design tab phase 1 [Taiga #4982](https://tree.taiga.io/project/penpot/us/4966)
|
||||
- [REDESIGN] Assets tab redesign [Taiga #4984](https://tree.taiga.io/project/penpot/us/4984)
|
||||
- [REDESIGN] Palette panels (colors, typographies...) [Taiga #4983](https://tree.taiga.io/project/penpot/us/4983)
|
||||
- [REDESIGN] Workspace structure [Taiga #4988](https://tree.taiga.io/project/penpot/us/4988)
|
||||
- [REDESIGN] Shortcut tab [Taiga #4989](https://tree.taiga.io/project/penpot/us/4989)
|
||||
- [REDESIGN] Toolbar [Taiga #5500](https://tree.taiga.io/project/penpot/us/5500)
|
||||
- [REDESIGN] History tab [Taiga #5481](https://tree.taiga.io/project/penpot/us/5481)
|
||||
- [REDESIGN] Path options/toolbar [Taiga #5815](https://tree.taiga.io/project/penpot/us/5815)
|
||||
- [REDESIGN] Design tab phase 2 [Taiga #5814](https://tree.taiga.io/project/penpot/us/5814)
|
||||
- [REDESIGN] Design tab phase 3 and dashboard details [Taiga #5920](https://tree.taiga.io/project/penpot/us/5920)
|
||||
- [REDESIGN] Dashboard [Taiga #5164](https://tree.taiga.io/project/penpot/us/5164)
|
||||
- [REDESIGN] New Dashboard UI [Taiga #5869](https://tree.taiga.io/project/penpot/us/5869)
|
||||
- [REDESIGN] Prototype tab [Taiga #4985](https://tree.taiga.io/project/penpot/us/4985)
|
||||
- [REDESIGN] Code tab [Taiga #4986](https://tree.taiga.io/project/penpot/us/4986)
|
||||
- [REDESIGN] Modals and alert messages [Taiga #5915](https://tree.taiga.io/project/penpot/us/5915)
|
||||
- [REDESIGN] Comments page [Taiga #5917](https://tree.taiga.io/project/penpot/us/5917)
|
||||
- [REDESIGN] View Mode [Taiga #5163](https://tree.taiga.io/project/penpot/us/5163)
|
||||
- [REDESIGN] Miscellaneous tasks [Taiga #6050](https://tree.taiga.io/project/penpot/us/6050)
|
||||
- [REDESIGN] Swap components [Taiga #6739](https://tree.taiga.io/project/penpot/us/6739)
|
||||
- [REDESIGN] Font selector [Taiga #6677](https://tree.taiga.io/project/penpot/us/6677)
|
||||
- [REDESIGN] Colour system of alerts and notifications [Taiga #6746](https://tree.taiga.io/project/penpot/us/6746)
|
||||
- [REDESIGN] Review text in paragraphs for accessibility [Taiga #6703](https://tree.taiga.io/project/penpot/us/6703)
|
||||
- [REDESIGN] Interaction icons [Taiga #6880](https://tree.taiga.io/project/penpot/us/6880)
|
||||
- [REDESIGN] Panels visual separations [Taiga #6692](https://tree.taiga.io/project/penpot/us/6692)
|
||||
- [REDESIGN] Onboarding slides [Taiga #6678](https://tree.taiga.io/project/penpot/us/6678)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix pixelated thumbnails [Github
|
||||
#3681](https://github.com/penpot/penpot/issues/3681) [Github #3661](https://github.com/penpot/penpot/issues/3661)
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
### :bug Bugs fixed
|
||||
- Fix pixelated thumbnails [Github #3681](https://github.com/penpot/penpot/issues/3681), [Github #3661](https://github.com/penpot/penpot/issues/3661)
|
||||
- Fix problem with not applying colors to boards [Github #3941](https://github.com/penpot/penpot/issues/3941)
|
||||
- Fix problem with path editor undoing changes [Github #3998](https://github.com/penpot/penpot/issues/3998)
|
||||
- [View mode] Open overlay places frame in the wrong position when paired with a fixed element [Taiga #6385](https://tree.taiga.io/project/penpot/issue/6385)
|
||||
- Flex Layout: Fit-content not recalculated after deleting an element [Taiga #5968](https://tree.taiga.io/project/penpot/issue/5968)
|
||||
- Selecting from Color Palette does not work for board when there is no existing fill [Taiga #6464](https://tree.taiga.io/project/penpot/issue/6464)
|
||||
- Color thumbnails are consistently rounded in the inspect code mode [Taiga #5886](https://tree.taiga.io/project/penpot/issue/5886)
|
||||
- Adding vector path points before first point of existing open path not working [Taiga #6593](https://tree.taiga.io/project/penpot/issue/6593)
|
||||
- Some image formats include the extension when importing [Taiga #5485](https://tree.taiga.io/project/penpot/issue/5485)
|
||||
- Gradient color tool doesn't work properly with flipped items [Taiga #6485](https://tree.taiga.io/project/penpot/issue/6485)
|
||||
- [TEXT] Align options are not shown when several text are selected [Taiga #5948](https://tree.taiga.io/project/penpot/issue/5948)
|
||||
- [VIEW MODE] Comments not working properly on multiple pages [Taiga #6281](https://tree.taiga.io/project/penpot/issue/6281)
|
||||
- [PERFORMANCE] Alignments are slow [Taiga #5865](https://tree.taiga.io/project/penpot/issue/5865)
|
||||
- [EXPORT] Exporting an element with a non-visible drop shadow displays the shadow either way [Taiga #6768](https://tree.taiga.io/project/penpot/issue/6768)
|
||||
- [SAFARI] Color picker cursor is not pointing correctly [Taiga #6733](https://tree.taiga.io/project/penpot/issue/6733)
|
||||
- [Import Files] When user has imported .penpot file with new file name of previously downloaded library file the default library file name is set for it [Taiga #5596](https://tree.taiga.io/project/penpot/issue/5596)
|
||||
- Issue when resizing a duotone FA icon [Taiga #5935](https://tree.taiga.io/project/penpot/issue/5935)
|
||||
- "Hide grid" keyboard shortcut broken [Taiga #5102](https://tree.taiga.io/project/penpot/issue/5102)
|
||||
- Picking a gradient color in recent colors for a new color in the assets tab crashes Penpot [Taiga #5601](https://tree.taiga.io/project/penpot/issue/5601)
|
||||
- Thumbnails not loading [Taiga #6012](https://tree.taiga.io/project/penpot/issue/6012)
|
||||
- Don't show signup link/form when registration is disabled. [Taiga #1196](https://tree.taiga.io/project/penpot/issue/1196)
|
||||
- Registration Page UI UX issue with small resolutions [Taiga #1693](https://tree.taiga.io/project/penpot/issue/1693)
|
||||
- [LOGIN] "E-Mail-Adress" input field is set to type 'text' instead of 'eMail [Taiga #1921](https://tree.taiga.io/project/penpot/issue/1921)
|
||||
- Handling correctly slashes "/" in emails [Taiga #4906](https://tree.taiga.io/project/penpot/issue/4906)
|
||||
- Tab character in texts crashes the app [Taiga #4418](https://tree.taiga.io/project/penpot/issue/4418)
|
||||
- Text does not match export [Taiga #4129](https://tree.taiga.io/project/penpot/issue/4129)
|
||||
- Scrollbars cover the layers carets [Taiga #4431](https://tree.taiga.io/project/penpot/issue/4431)
|
||||
- Horizontal ruler disappear when overlapping a board [Taiga #4138](https://tree.taiga.io/project/penpot/issue/4138)
|
||||
- Resize shape + Alt key is not working [Taiga #3447](https://tree.taiga.io/project/penpot/issue/3447)
|
||||
- Libraries images broken on premise [Taiga #4573](https://tree.taiga.io/project/penpot/issue/4573)
|
||||
- [VIEWER] Cannot scroll down in code </> mode [Taiga #4655](https://tree.taiga.io/project/penpot/issue/4655)
|
||||
- Strange cursor behavior after clicking viewport with text tool [Taiga #4363](https://tree.taiga.io/project/penpot/issue/4363)
|
||||
- Selected color affects all of them [Taiga #5285](https://tree.taiga.io/project/penpot/issue/5285)
|
||||
- Fix problem with shadow negative spread [Github #3421](https://github.com/penpot/penpot/issues/3421)
|
||||
- Fix problem with linked colors to strokes [Github #3522](https://github.com/penpot/penpot/issues/3522)
|
||||
- Fix problem with hand tool stuck [Github #3318](https://github.com/penpot/penpot/issues/3318)
|
||||
- Fix problem with fix scrolling on nested elements [Github #3508](https://github.com/penpot/penpot/issues/3508)
|
||||
- Fix problem when changing typography assets [Github #3683](https://github.com/penpot/penpot/issues/3683)
|
||||
- Internal error when you copy and paste some main components between files [Taiga #7397](https://tree.taiga.io/project/penpot/issue/7397)
|
||||
- Fix toolbar disappearing [Taiga #7411](https://tree.taiga.io/project/penpot/issue/7411)
|
||||
- Fix long text on tab breaks UI [Taiga Issue #7421](https://tree.taiga.io/project/penpot/issue/7421)
|
||||
|
||||
## 1.19.5
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
org.clojure/clojure {:mvn/version "1.12.0-alpha5"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.4.4"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.5-10"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.5-11"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.2.6.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.3.0.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/yetti
|
||||
@@ -26,12 +26,13 @@
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.894"}
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.3.909"}
|
||||
metosin/reitit-core {:mvn/version "0.6.0"}
|
||||
nrepl/nrepl {:mvn/version "1.1.0"}
|
||||
cider/cider-nrepl {:mvn/version "0.43.1"}
|
||||
cider/cider-nrepl {:mvn/version "0.44.0"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.6.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.7.1"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.44.1.0"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "5.1.0"}
|
||||
|
||||
@@ -42,7 +43,7 @@
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.1.8"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.16.2"}
|
||||
org.jsoup/jsoup {:mvn/version "1.17.2"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
@@ -57,7 +58,7 @@
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.20.138"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.22.12"}
|
||||
}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
@@ -90,8 +91,8 @@
|
||||
|
||||
:jmx-remote
|
||||
{:jvm-opts ["-Dcom.sun.management.jmxremote"
|
||||
"-Dcom.sun.management.jmxremote.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9090"
|
||||
"-Dcom.sun.management.jmxremote.port=9091"
|
||||
"-Dcom.sun.management.jmxremote.rmi.port=9091"
|
||||
"-Dcom.sun.management.jmxremote.local.only=false"
|
||||
"-Dcom.sun.management.jmxremote.authenticate=false"
|
||||
"-Dcom.sun.management.jmxremote.ssl=false"
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
(ns user
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.debug :as debug]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.logging :as l]
|
||||
@@ -42,7 +44,7 @@
|
||||
[clojure.walk :refer [macroexpand-all]]
|
||||
[criterium.core :as crit]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[malli.core :as m]
|
||||
[malli.dev.pretty :as mdp]
|
||||
@@ -54,8 +56,12 @@
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(repl/disable-reload! (find-ns 'integrant.core))
|
||||
(repl/disable-reload! (find-ns 'app.common.debug))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(add-tap #'debug/tap-handler)
|
||||
|
||||
;; --- Benchmarking Tools
|
||||
|
||||
(defmacro run-quick-bench
|
||||
@@ -131,8 +137,11 @@
|
||||
;; :v6 v6
|
||||
;; }])))
|
||||
|
||||
(defonce debug-tap
|
||||
(do
|
||||
(add-tap #(locking debug-tap
|
||||
(prn "tap debug:" %)))
|
||||
1))
|
||||
|
||||
(defn calculate-frames
|
||||
[{:keys [data]}]
|
||||
(->> (vals (:pages-index data))
|
||||
(mapcat (comp vals :objects))
|
||||
(filter cfh/is-direct-child-of-root?)
|
||||
(filter cfh/frame-shape?)
|
||||
(count)))
|
||||
|
||||
@@ -1,30 +1,33 @@
|
||||
[{:id "material-design-3"
|
||||
:name "Material Design 3"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Material%20Design%203.penpot"}
|
||||
{:id "tutorial-for-beginners"
|
||||
[{:id "tutorial-for-beginners"
|
||||
:name "Tutorial for beginners"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/tutorial-for-beginners.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Penpot Design System"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Penpot-Design-system.penpot"}
|
||||
{:id "flex-layout-playground"
|
||||
:name "Flex Layout Playground"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Flex%20Layout%20Playground.penpot"}
|
||||
{:id "lucide-icons"
|
||||
:name "Lucide Icons"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Lucide-icons.penpot"}
|
||||
{:id "font-awesome"
|
||||
:name "Font Awesome"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Font-Awesome.penpot"}
|
||||
{:id "plants-app"
|
||||
:name "Plants app"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Plants-app.penpot"}
|
||||
{:id "wireframing-kit"
|
||||
:name "Wireframing Kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/wireframing-kit.penpot"}
|
||||
{:id "ant-design"
|
||||
:name "Ant Design UI Kit (lite)"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Ant-Design-UI-Kit-Lite.penpot"}
|
||||
{:id "cocomaterial"
|
||||
:name "Cocomaterial"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Cocomaterial.penpot"}
|
||||
{:id "circum-icons"
|
||||
:name "Circum Icons pack"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/CircumIcons.penpot"}
|
||||
{:id "coreui"
|
||||
:name "CoreUI"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/CoreUI%20DesignSystem%20(DEMO).penpot"}
|
||||
{:id "black-white-mobile-templates"
|
||||
:name "Black & White Mobile Templates"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Black-White-Mobile-Templates.penpot"}
|
||||
{:id "avataaars"
|
||||
:name "Avataaars"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Avataaars-by-Pablo-Stanley.penpot"}
|
||||
{:id "ux-notes"
|
||||
:name "UX Notes"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/UX-Notes.penpot"}
|
||||
{:id "whiteboarding-kit"
|
||||
:name "Whiteboarding Kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Whiteboarding-mapping-kit.penpot"}]
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Whiteboarding-mapping-kit.penpot"}
|
||||
{:id "open-color-scheme"
|
||||
:name "Open Color Scheme"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Open-Color-Scheme.penpot"}
|
||||
{:id "flex-layout-playground"
|
||||
:name "Flex Layout Playground"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/binary-files/Flex-Layout-Playground.penpot"}]
|
||||
|
||||
@@ -37,6 +37,13 @@
|
||||
|
||||
<h2>GENERAL NOTES</h2>
|
||||
|
||||
<h3>HTTP Transport & Methods</h3>
|
||||
<p>The HTTP is the transport method for accesing this API; all
|
||||
functions can be called using POST HTTP method; the functions
|
||||
that starts with <b>get-</b> in the name, can use GET HTTP
|
||||
method which in many cases benefits from the HTTP cache.</p>
|
||||
|
||||
|
||||
<h3>Authentication</h3>
|
||||
<p>The penpot backend right now offers two way for authenticate the request:
|
||||
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
||||
|
||||
@@ -145,17 +145,6 @@ Debug Main Page
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label>Ignore index errors?</label>
|
||||
<input type="checkbox" name="ignore-index-errors" checked/>
|
||||
<br />
|
||||
<small>
|
||||
Do not break on index lookup errors (remap operation).
|
||||
Useful when importing a broken file that has broken
|
||||
relations or missing pieces.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="upload" value="Upload" />
|
||||
</div>
|
||||
@@ -168,7 +157,7 @@ Debug Main Page
|
||||
<legend>Reset file version</legend>
|
||||
<desc>Allows reset file data version to a specific number/</desc>
|
||||
|
||||
<form method="post" action="/dbg/actions/reset-file-data-version">
|
||||
<form method="post" action="/dbg/actions/reset-file-version">
|
||||
<div class="row">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
</div>
|
||||
|
||||
@@ -3,15 +3,26 @@
|
||||
;; Optional: queue, ommited means Integer/MAX_VALUE
|
||||
;; Optional: timeout, ommited means no timeout
|
||||
;; Note: queue and timeout are excluding
|
||||
{:update-file/by-profile
|
||||
{:update-file/global {:permits 20}
|
||||
:update-file/by-profile
|
||||
{:permits 1 :queue 5}
|
||||
|
||||
:update-file/global {:permits 20}
|
||||
:process-font/global {:permits 4}
|
||||
:process-font/by-profile {:permits 1}
|
||||
|
||||
:derive-password/global {:permits 8}
|
||||
:process-font/global {:permits 4}
|
||||
:process-image/global {:permits 8}
|
||||
:process-image/by-profile {:permits 1}
|
||||
|
||||
:auth/global {:permits 8}
|
||||
|
||||
:root/global
|
||||
{:permits 40}
|
||||
|
||||
:root/by-profile
|
||||
{:permits 10}
|
||||
|
||||
:file-thumbnail-ops/global
|
||||
{:permits 20}
|
||||
:file-thumbnail-ops/by-profile
|
||||
{:permits 2}
|
||||
|
||||
|
||||
52
backend/resources/log4j2-devenv-repl.xml
Normal file
52
backend/resources/log4j2-devenv-repl.xml
Normal file
@@ -0,0 +1,52 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="20"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
<Loggers>
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.common.files.migrations" level="debug" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
@@ -6,13 +6,13 @@
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main.log" filePattern="logs/main-%i.log">
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
<DefaultRolloverStrategy max="20"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
@@ -21,31 +21,36 @@
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="info" />
|
||||
<Logger name="app.common.files.migrations" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.common.files.migrations" level="debug" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="console" level="info" />
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
<AppenderRef ref="console" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
<AppenderRef ref="console" level="info" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
<AppenderRef ref="console" level="info" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
|
||||
65
backend/resources/log4j2-experiments.xml
Normal file
65
backend/resources/log4j2-experiments.xml
Normal file
@@ -0,0 +1,65 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Configuration status="info" monitorInterval="30">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
</Console>
|
||||
|
||||
<RollingFile name="main" fileName="logs/main-latest.log" filePattern="logs/main-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="50M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
</RollingFile>
|
||||
|
||||
<RollingFile name="reports" fileName="logs/reports-latest.log" filePattern="logs/reports-%i.log">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"
|
||||
alwaysWriteExceptions="true" />
|
||||
<Policies>
|
||||
<SizeBasedTriggeringPolicy size="100M"/>
|
||||
</Policies>
|
||||
<DefaultRolloverStrategy max="9"/>
|
||||
</RollingFile>
|
||||
</Appenders>
|
||||
|
||||
<Loggers>
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error"/>
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.rpc.commands.binfile" level="debug" />
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
<Logger name="app.rpc.climit" level="debug" />
|
||||
<Logger name="app.common.files.migrations" level="info" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" additivity="false">
|
||||
<AppenderRef ref="main" level="debug" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app.features" level="all" additivity="true">
|
||||
<AppenderRef ref="reports" level="warn" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="app" level="all" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Logger name="user" level="trace" additivity="false">
|
||||
<AppenderRef ref="main" level="trace" />
|
||||
</Logger>
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="main" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</Configuration>
|
||||
@@ -11,16 +11,9 @@
|
||||
<Logger name="io.lettuce" level="error" />
|
||||
<Logger name="com.zaxxer.hikari" level="error" />
|
||||
<Logger name="org.postgresql" level="error" />
|
||||
|
||||
<Logger name="app.util" level="info" />
|
||||
|
||||
<Logger name="app.loggers" level="debug" />
|
||||
|
||||
<Logger name="app" level="info" additivity="false">
|
||||
<AppenderRef ref="console" />
|
||||
<AppenderRef ref="console" level="info" />
|
||||
</Logger>
|
||||
|
||||
|
||||
<Root level="info">
|
||||
<AppenderRef ref="console" />
|
||||
</Root>
|
||||
|
||||
@@ -160,7 +160,6 @@ available_commands = (
|
||||
"delete-profile",
|
||||
"search-profile",
|
||||
"derive-password",
|
||||
"migrate-components-v2",
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -233,7 +232,4 @@ elif args.action == "search-profile":
|
||||
|
||||
search_profile(email)
|
||||
|
||||
elif args.action == "migrate-components-v2":
|
||||
migrate_components_v2()
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ export PENPOT_HOST=devenv
|
||||
export PENPOT_TENANT=dev
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-registration
|
||||
enable-login-with-ldap \
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
@@ -26,11 +26,17 @@ export PENPOT_FLAGS="\
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
disable-feature-components-v2 \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
disable-soft-file-schema-validation \
|
||||
disable-soft-file-validation";
|
||||
enable-file-schema-validation";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
@@ -64,7 +70,7 @@ export OPTIONS="
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-J-XX:+EnableDynamicAgentLoading \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
|
||||
48
backend/scripts/repl-test
Executable file
48
backend/scripts/repl-test
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /home/penpot/environ
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J-XX:+UseTransparentHugePages \
|
||||
-J-XX:ReservedCodeCacheSize=1g \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||
|
||||
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Setup GC
|
||||
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@@ -18,7 +18,9 @@ if [ -f ./environ ]; then
|
||||
source ./environ
|
||||
fi
|
||||
|
||||
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow --enable-preview $JVM_OPTS"
|
||||
export JVM_OPTS="-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -Dlog4j2.configurationFile=log4j2.xml -XX:-OmitStackTraceInFastThrow -Dpolyglot.engine.WarnInterpreterOnly=false --enable-preview $JVM_OPTS"
|
||||
|
||||
set -x
|
||||
exec $JAVA_CMD $JVM_OPTS "$@" -jar penpot.jar -m app.main
|
||||
ENTRYPOINT=${1:-app.main};
|
||||
|
||||
set -ex
|
||||
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m $ENTRYPOINT
|
||||
|
||||
@@ -15,48 +15,35 @@ export PENPOT_FLAGS="\
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
disable-secure-session-cookies \
|
||||
enable-rpc-climit \
|
||||
enable-smtp \
|
||||
enable-access-tokens \
|
||||
disable-feature-components-v2 \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
disable-soft-file-schema-validation \
|
||||
disable-soft-file-validation";
|
||||
enable-file-schema-validation";
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J-Dlog4j2.configurationFile=log4j2.xml \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-XX:+EnableDynamicAgentLoading \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints"
|
||||
|
||||
# Setup HEAP
|
||||
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
@@ -72,24 +59,8 @@ export PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
|
||||
export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
if [ "$1" = "--watch" ]; then
|
||||
trap "exit" INT TERM ERR
|
||||
trap "kill 0" EXIT
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
echo "Start Watch..."
|
||||
set -ex
|
||||
|
||||
clojure $OPTIONS -A:dev -M -m app.main &
|
||||
|
||||
npx nodemon \
|
||||
--watch src \
|
||||
--watch ../common \
|
||||
--ext "clj" \
|
||||
--signal SIGKILL \
|
||||
--exec 'echo "(app.main/stop)\n\r(repl/refresh)\n\r(app.main/start)\n" | nc -N localhost 6062'
|
||||
|
||||
wait;
|
||||
|
||||
else
|
||||
set -x
|
||||
clojure $OPTIONS -A:dev -M -m app.main;
|
||||
fi
|
||||
clojure $OPTIONS -A:dev -M -m $entrypoint;
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
@@ -37,7 +38,7 @@
|
||||
;; HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- obfuscate-string
|
||||
(defn obfuscate-string
|
||||
[s]
|
||||
(if (< (count s) 10)
|
||||
(apply str (take (count s) (repeat "*")))
|
||||
@@ -413,7 +414,7 @@
|
||||
::props]))
|
||||
|
||||
(defn get-info
|
||||
[{:keys [provider ::main/props] :as cfg} {:keys [params] :as request}]
|
||||
[{:keys [provider ::setup/props] :as cfg} {:keys [params] :as request}]
|
||||
(when-let [error (get params :error)]
|
||||
(ex/raise :type :internal
|
||||
:code :error-on-retrieving-code
|
||||
@@ -474,6 +475,7 @@
|
||||
[{:keys [::db/pool] :as cfg} info]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(some->> (:email info)
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email conn))))
|
||||
|
||||
(defn- redirect-response
|
||||
@@ -507,7 +509,7 @@
|
||||
(if profile
|
||||
(let [sxf (session/create-fn cfg (:id profile))
|
||||
token (or (:invitation-token info)
|
||||
(tokens/generate (::main/props cfg)
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :auth
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)}))
|
||||
@@ -535,9 +537,10 @@
|
||||
:iss :prepared-register
|
||||
:is-active true
|
||||
:exp (dt/in-future {:hours 48}))
|
||||
token (tokens/generate (::main/props cfg) info)
|
||||
token (tokens/generate (::setup/props cfg) info)
|
||||
params (d/without-nils
|
||||
{:token token
|
||||
:provider (:provider (:path-params request))
|
||||
:fullname (:fullname info)})
|
||||
uri (-> (u/uri (cf/get :public-uri))
|
||||
(assoc :path "/#/auth/register/validate")
|
||||
@@ -550,7 +553,7 @@
|
||||
(defn- auth-handler
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [props (audit/extract-utm-params params)
|
||||
state (tokens/generate (::main/props cfg)
|
||||
state (tokens/generate (::setup/props cfg)
|
||||
{:iss :oauth
|
||||
:invitation-token (:invitation-token params)
|
||||
:props props
|
||||
@@ -617,7 +620,7 @@
|
||||
[_]
|
||||
(s/keys :req [::session/manager
|
||||
::http/client
|
||||
::main/props
|
||||
::setup/props
|
||||
::db/pool
|
||||
::providers]))
|
||||
|
||||
|
||||
492
backend/src/app/binfile/common.clj
Normal file
492
backend/src/app/binfile/common.clj
Normal file
@@ -0,0 +1,492 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.common
|
||||
"A binfile related file processing common code, used for different
|
||||
binfile format implementations and management rpc methods."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as fval]
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[clojure.walk :as walk]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
|
||||
(def xf-map-id
|
||||
(map :id))
|
||||
|
||||
(def xf-map-media-id
|
||||
(comp
|
||||
(mapcat (juxt :media-id
|
||||
:thumbnail-id
|
||||
:woff1-file-id
|
||||
:woff2-file-id
|
||||
:ttf-file-id
|
||||
:otf-file-id))
|
||||
(filter uuid?)))
|
||||
|
||||
(def into-vec
|
||||
(fnil into []))
|
||||
|
||||
(def conj-vec
|
||||
(fnil conj []))
|
||||
|
||||
(defn collect-storage-objects
|
||||
[state items]
|
||||
(update state :storage-objects into xf-map-media-id items))
|
||||
|
||||
(defn collect-summary
|
||||
[state key items]
|
||||
(update state key into xf-map-media-id items))
|
||||
|
||||
(defn lookup-index
|
||||
[id]
|
||||
(when id
|
||||
(let [val (get-in @*state* [:index id])]
|
||||
(l/trc :fn "lookup-index" :id (str id) :result (some-> val str) ::l/sync? true)
|
||||
(or val id))))
|
||||
|
||||
(defn remap-id
|
||||
[item key]
|
||||
(cond-> item
|
||||
(contains? item key)
|
||||
(update key lookup-index)))
|
||||
|
||||
(defn- index-object
|
||||
[index obj & attrs]
|
||||
(reduce (fn [index attr-fn]
|
||||
(let [old-id (attr-fn obj)
|
||||
new-id (if (::overwrite *options*) old-id (uuid/next))]
|
||||
(assoc index old-id new-id)))
|
||||
index
|
||||
attrs))
|
||||
|
||||
(defn update-index
|
||||
([index coll]
|
||||
(update-index index coll identity))
|
||||
([index coll attr]
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn decode-row
|
||||
"A generic decode row helper"
|
||||
[{:keys [data features] :as row}]
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
data (assoc :data (blob/decode data))))
|
||||
|
||||
(defn get-file
|
||||
[cfg file-id]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(when-let [file (db/get* conn :file {:id file-id}
|
||||
{::db/remove-deleted false})]
|
||||
(-> file
|
||||
(decode-row)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))))))))
|
||||
|
||||
(defn get-project
|
||||
[cfg project-id]
|
||||
(db/get cfg :project {:id project-id}))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
(decode-row)))
|
||||
|
||||
(defn get-fonts
|
||||
[cfg team-id]
|
||||
(db/query cfg :team-font-variant
|
||||
{:team-id team-id
|
||||
:deleted-at nil}))
|
||||
|
||||
(defn get-files-rels
|
||||
"Given a set of file-id's, return all matching relations with the libraries"
|
||||
[cfg ids]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuids"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT flr.* FROM file_library_rel AS flr "
|
||||
" JOIN file AS l ON (flr.library_file_id = l.id) "
|
||||
" WHERE flr.file_id = ANY(?) AND l.deleted_at IS NULL")]
|
||||
(db/exec! conn [sql ids])))))
|
||||
|
||||
(def ^:private sql:get-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
SELECT fl.id
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.file_id = ANY(?)
|
||||
UNION
|
||||
SELECT fl.id
|
||||
FROM file AS fl
|
||||
JOIN file_library_rel AS flr ON (flr.library_file_id = fl.id)
|
||||
JOIN libs AS l ON (flr.file_id = l.id)
|
||||
)
|
||||
SELECT DISTINCT l.id
|
||||
FROM libs AS l")
|
||||
|
||||
(defn get-libraries
|
||||
"Get all libraries ids related to provided file ids"
|
||||
[cfg ids]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids' (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-libraries ids'])
|
||||
(into #{} xf-map-id))))))
|
||||
|
||||
(defn get-file-object-thumbnails
|
||||
"Return all file object thumbnails for a given file."
|
||||
[cfg file-id]
|
||||
(db/query cfg :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:deleted-at nil}))
|
||||
|
||||
(defn get-file-thumbnail
|
||||
"Return the thumbnail for the specified file-id"
|
||||
[cfg {:keys [id revn]}]
|
||||
(db/get* cfg :file-thumbnail
|
||||
{:file-id id
|
||||
:revn revn
|
||||
:data nil}
|
||||
{::sql/columns [:media-id :file-id :revn]}))
|
||||
|
||||
|
||||
(def ^:private
|
||||
xform:collect-media-id
|
||||
(comp
|
||||
(map :objects)
|
||||
(mapcat vals)
|
||||
(mapcat (fn [obj]
|
||||
;; NOTE: because of some bug, we ended with
|
||||
;; many shape types having the ability to
|
||||
;; have fill-image attribute (which initially
|
||||
;; designed for :path shapes).
|
||||
(sequence
|
||||
(keep :id)
|
||||
(concat [(:fill-image obj)
|
||||
(:metadata obj)]
|
||||
(map :fill-image (:fills obj))
|
||||
(map :stroke-image (:strokes obj))
|
||||
(->> (:content obj)
|
||||
(tree-seq map? :children)
|
||||
(mapcat :fills)
|
||||
(map :fill-image))))))))
|
||||
|
||||
(defn collect-used-media
|
||||
"Given a fdata (file data), returns all media references."
|
||||
[data]
|
||||
(-> #{}
|
||||
(into xform:collect-media-id (vals (:pages-index data)))
|
||||
(into xform:collect-media-id (vals (:components data)))
|
||||
(into (keys (:media data)))))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data id] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [ids (collect-used-media data)
|
||||
ids (db/create-array conn "uuid" ids)
|
||||
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")]
|
||||
|
||||
;; We assoc the file-id again to the file-media-object row
|
||||
;; because there are cases that used objects refer to other
|
||||
;; files and we need to ensure in the exportation process that
|
||||
;; all ids matches
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(mapv #(assoc % :file-id id)))))))
|
||||
|
||||
(def ^:private sql:get-team-files
|
||||
"SELECT f.id FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE p.team_id = ?")
|
||||
|
||||
(defn get-team-files
|
||||
"Get a set of file ids for the specified team-id"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-files team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-team-projects
|
||||
"SELECT p.id FROM project AS p
|
||||
WHERE p.team_id = ?
|
||||
AND p.deleted_at IS NULL")
|
||||
|
||||
(defn get-team-projects
|
||||
"Get a set of project ids for the team"
|
||||
[{:keys [::db/conn]} team-id]
|
||||
(->> (db/exec! conn [sql:get-team-projects team-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(def ^:private sql:get-project-files
|
||||
"SELECT f.id FROM file AS f
|
||||
WHERE f.project_id = ?
|
||||
AND f.deleted_at IS NULL")
|
||||
|
||||
(defn get-project-files
|
||||
"Get a set of file ids for the project"
|
||||
[{:keys [::db/conn]} project-id]
|
||||
(->> (db/exec! conn [sql:get-project-files project-id])
|
||||
(into #{} xf-map-id)))
|
||||
|
||||
(defn- relink-shapes
|
||||
"A function responsible to analyze all file data and
|
||||
replace the old :component-file reference with the new
|
||||
ones, using the provided file-index."
|
||||
[data]
|
||||
(letfn [(process-map-form [form]
|
||||
(cond-> form
|
||||
;; Relink image shapes
|
||||
(and (map? (:metadata form))
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] lookup-index)
|
||||
|
||||
;; Relink paths with fill image
|
||||
(map? (:fill-image form))
|
||||
(update-in [:fill-image :id] lookup-index)
|
||||
|
||||
;; This covers old shapes and the new :fills.
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(update :fill-color-ref-file lookup-index)
|
||||
|
||||
;; This covers the old shapes and the new :strokes
|
||||
(uuid? (:stroke-color-ref-file form))
|
||||
(update :stroke-color-ref-file lookup-index)
|
||||
|
||||
;; This covers all text shapes that have typography referenced
|
||||
(uuid? (:typography-ref-file form))
|
||||
(update :typography-ref-file lookup-index)
|
||||
|
||||
;; This covers the component instance links
|
||||
(uuid? (:component-file form))
|
||||
(update :component-file lookup-index)
|
||||
|
||||
;; This covers the shadows and grids (they have directly
|
||||
;; the :file-id prop)
|
||||
(uuid? (:file-id form))
|
||||
(update :file-id lookup-index)))
|
||||
|
||||
(process-form [form]
|
||||
(if (map? form)
|
||||
(try
|
||||
(process-map-form form)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "failed form" :form (pr-str form) ::l/sync? true)
|
||||
(throw cause)))
|
||||
form))]
|
||||
|
||||
(walk/postwalk process-form data)))
|
||||
|
||||
(defn- relink-media
|
||||
"A function responsible of process the :media attr of file data and
|
||||
remap the old ids with the new ones."
|
||||
[media]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [id (lookup-index k)]
|
||||
(if (uuid? id)
|
||||
(-> res
|
||||
(assoc id (assoc v :id id))
|
||||
(dissoc k))
|
||||
res)))
|
||||
media
|
||||
media))
|
||||
|
||||
(defn- relink-colors
|
||||
"A function responsible of process the :colors attr of file data and
|
||||
remap the old ids with the new ones."
|
||||
[colors]
|
||||
(reduce-kv (fn [res k v]
|
||||
(if (:image v)
|
||||
(update-in res [k :image :id] lookup-index)
|
||||
res))
|
||||
colors
|
||||
colors))
|
||||
|
||||
(defn embed-assets
|
||||
[cfg data file-id]
|
||||
(letfn [(walk-map-form [form state]
|
||||
(cond
|
||||
(uuid? (:fill-color-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:fill-color-ref-file form) :colors (:fill-color-ref-id form)])
|
||||
(assoc form :fill-color-ref-file file-id))
|
||||
|
||||
(uuid? (:stroke-color-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:stroke-color-ref-file form) :colors (:stroke-color-ref-id form)])
|
||||
(assoc form :stroke-color-ref-file file-id))
|
||||
|
||||
(uuid? (:typography-ref-file form))
|
||||
(do
|
||||
(vswap! state conj [(:typography-ref-file form) :typographies (:typography-ref-id form)])
|
||||
(assoc form :typography-ref-file file-id))
|
||||
|
||||
(uuid? (:component-file form))
|
||||
(do
|
||||
(vswap! state conj [(:component-file form) :components (:component-id form)])
|
||||
(assoc form :component-file file-id))
|
||||
|
||||
:else
|
||||
form))
|
||||
|
||||
(process-group-of-assets [data [lib-id items]]
|
||||
;; NOTE: there is a possibility that shape refers to an
|
||||
;; non-existant file because the file was removed. In this
|
||||
;; case we just ignore the asset.
|
||||
(if-let [lib (get-file cfg lib-id)]
|
||||
(reduce (partial process-asset lib) data items)
|
||||
data))
|
||||
|
||||
(process-asset [lib data [bucket asset-id]]
|
||||
(let [asset (get-in lib [:data bucket asset-id])
|
||||
;; Add a special case for colors that need to have
|
||||
;; correctly set the :file-id prop (pending of the
|
||||
;; refactor that will remove it).
|
||||
asset (cond-> asset
|
||||
(= bucket :colors) (assoc :file-id file-id))]
|
||||
(update data bucket assoc asset-id asset)))]
|
||||
|
||||
(let [assets (volatile! [])]
|
||||
(walk/postwalk #(cond-> % (map? %) (walk-map-form assets)) data)
|
||||
(->> (deref assets)
|
||||
(filter #(as-> (first %) $ (and (uuid? $) (not= $ file-id))))
|
||||
(d/group-by first rest)
|
||||
(reduce (partial process-group-of-assets) data)))))
|
||||
|
||||
(defn- fix-version
|
||||
[file]
|
||||
(let [file (fmg/fix-version file)]
|
||||
;; FIXME: We're temporarily activating all migrations because a
|
||||
;; problem in the environments messed up with the version numbers
|
||||
;; When this problem is fixed delete the following line
|
||||
(if (> (:version file) 22)
|
||||
(assoc file :version 22)
|
||||
file)))
|
||||
|
||||
(defn process-file
|
||||
[{:keys [id] :as file}]
|
||||
(-> file
|
||||
(fix-version)
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(assoc :id id)
|
||||
(dissoc :recent-colors))))
|
||||
(fmg/migrate-file)
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(update :colors relink-colors)
|
||||
(d/without-nils))))))
|
||||
|
||||
(defn- upsert-file!
|
||||
[conn file]
|
||||
(let [sql (str "INSERT INTO file (id, project_id, name, revn, version, is_shared, data, created_at, modified_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) "
|
||||
"ON CONFLICT (id) DO UPDATE SET data=?, version=?")]
|
||||
(db/exec-one! conn [sql
|
||||
(:id file)
|
||||
(:project-id file)
|
||||
(:name file)
|
||||
(:revn file)
|
||||
(:version file)
|
||||
(:is-shared file)
|
||||
(:data file)
|
||||
(:created-at file)
|
||||
(:modified-at file)
|
||||
(:data file)
|
||||
(:version file)])))
|
||||
|
||||
(defn persist-file!
|
||||
"Applies all the final validations and perist the file."
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} {:keys [id] :as file}]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid timestamp"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(let [features (cfeat/check-supported-features! features)]
|
||||
(-> (::features cfg #{})
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union features))))))
|
||||
|
||||
_ (when (contains? cf/flags :file-schema-validation)
|
||||
(fval/validate-file-schema! file))
|
||||
|
||||
_ (when (contains? cf/flags :soft-file-schema-validation)
|
||||
(let [result (ex/try! (fval/validate-file-schema! file))]
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)
|
||||
|
||||
params (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(upsert-file! conn params)
|
||||
(db/insert! conn :file params ::db/return-keys false))
|
||||
|
||||
file))
|
||||
|
||||
(defn apply-pending-migrations!
|
||||
"Apply alredy registered pending migrations to files"
|
||||
[cfg]
|
||||
(doseq [[feature file-id] (-> *state* deref :pending-to-migrate)]
|
||||
(case feature
|
||||
"components/v2"
|
||||
(feat.compv2/migrate-file! cfg file-id
|
||||
:validate? (::validate cfg true)
|
||||
:skip-on-graphic-error? true)
|
||||
|
||||
"fdata/shape-data-type"
|
||||
nil
|
||||
|
||||
(ex/raise :type :internal
|
||||
:code :no-migration-defined
|
||||
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
||||
:feature feature))))
|
||||
779
backend/src/app/binfile/v1.clj
Normal file
779
backend/src/app/binfile/v1.clj
Normal file
@@ -0,0 +1,779 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v1
|
||||
"A custom, perfromance and efficiency focused binfile format impl"
|
||||
(:refer-clojure :exclude [assert])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.java.io :as jio]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.util :as pu]
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
com.github.luben.zstd.ZstdIOException
|
||||
com.github.luben.zstd.ZstdInputStream
|
||||
com.github.luben.zstd.ZstdOutputStream
|
||||
java.io.DataInputStream
|
||||
java.io.DataOutputStream
|
||||
java.io.InputStream
|
||||
java.io.OutputStream))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; Threshold in MiB when we pass from using
|
||||
;; in-memory byte-array's to use temporal files.
|
||||
(def temp-file-threshold
|
||||
(* 1024 1024 2))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL STREAM IO API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:const buffer-size (:xnio/buffer-size yt/defaults))
|
||||
(def ^:const penpot-magic-number 800099563638710213)
|
||||
|
||||
|
||||
;; A maximum (storage) object size allowed: 100MiB
|
||||
(def ^:const max-object-size
|
||||
(* 1024 1024 100))
|
||||
|
||||
(def ^:dynamic *position* nil)
|
||||
|
||||
(defn get-mark
|
||||
[id]
|
||||
(case id
|
||||
:header 1
|
||||
:stream 2
|
||||
:uuid 3
|
||||
:label 4
|
||||
:obj 5
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-mark-id
|
||||
:hint (format "invalid mark id %s" id))))
|
||||
|
||||
(defmacro assert
|
||||
[expr hint]
|
||||
`(when-not ~expr
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-condition
|
||||
:hint ~hint)))
|
||||
|
||||
(defmacro assert-mark
|
||||
[v type]
|
||||
`(let [expected# (get-mark ~type)
|
||||
val# (long ~v)]
|
||||
(when (not= val# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-mark
|
||||
:hint (format "received mark %s, expected %s" val# expected#)))))
|
||||
|
||||
(defmacro assert-label
|
||||
[expr label]
|
||||
`(let [v# ~expr]
|
||||
(when (not= v# ~label)
|
||||
(ex/raise :type :assertion
|
||||
:code :unexpected-label
|
||||
:hint (format "received label %s, expected %s" v# ~label)))))
|
||||
|
||||
;; --- PRIMITIVE IO
|
||||
|
||||
(defn write-byte!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-byte!" :data data :position @*position* ::l/sync? true)
|
||||
(.writeByte output (byte data))
|
||||
(swap! *position* inc))
|
||||
|
||||
(defn read-byte!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readByte input)]
|
||||
(l/trace :fn "read-byte!" :val v :position @*position* ::l/sync? true)
|
||||
(swap! *position* inc)
|
||||
v))
|
||||
|
||||
(defn write-long!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-long!" :data data :position @*position* ::l/sync? true)
|
||||
(.writeLong output (long data))
|
||||
(swap! *position* + 8))
|
||||
|
||||
|
||||
(defn read-long!
|
||||
[^DataInputStream input]
|
||||
(let [v (.readLong input)]
|
||||
(l/trace :fn "read-long!" :val v :position @*position* ::l/sync? true)
|
||||
(swap! *position* + 8)
|
||||
v))
|
||||
|
||||
(defn write-bytes!
|
||||
[^DataOutputStream output ^bytes data]
|
||||
(let [size (alength data)]
|
||||
(l/trace :fn "write-bytes!" :size size :position @*position* ::l/sync? true)
|
||||
(.write output data 0 size)
|
||||
(swap! *position* + size)))
|
||||
|
||||
(defn read-bytes!
|
||||
[^InputStream input ^bytes buff]
|
||||
(let [size (alength buff)
|
||||
readed (.readNBytes input buff 0 size)]
|
||||
(l/trace :fn "read-bytes!" :expected (alength buff) :readed readed :position @*position* ::l/sync? true)
|
||||
(swap! *position* + readed)
|
||||
readed))
|
||||
|
||||
;; --- COMPOSITE IO
|
||||
|
||||
(defn write-uuid!
|
||||
[^DataOutputStream output id]
|
||||
(l/trace :fn "write-uuid!" :position @*position* :WRITTEN? (.size output) ::l/sync? true)
|
||||
|
||||
(doto output
|
||||
(write-byte! (get-mark :uuid))
|
||||
(write-long! (uuid/get-word-high id))
|
||||
(write-long! (uuid/get-word-low id))))
|
||||
|
||||
(defn read-uuid!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-uuid!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :uuid)
|
||||
(let [a (read-long! input)
|
||||
b (read-long! input)]
|
||||
(uuid/custom a b))))
|
||||
|
||||
(defn write-obj!
|
||||
[^DataOutputStream output data]
|
||||
(l/trace :fn "write-obj!" :position @*position* ::l/sync? true)
|
||||
(let [^bytes data (fres/encode data)]
|
||||
(doto output
|
||||
(write-byte! (get-mark :obj))
|
||||
(write-long! (alength data))
|
||||
(write-bytes! data))))
|
||||
|
||||
(defn read-obj!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-obj!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :obj)
|
||||
(let [size (read-long! input)]
|
||||
(assert (pos? size) "incorrect header size found on reading header")
|
||||
(let [buff (byte-array size)]
|
||||
(read-bytes! input buff)
|
||||
(fres/decode buff)))))
|
||||
|
||||
(defn write-label!
|
||||
[^DataOutputStream output label]
|
||||
(l/trace :fn "write-label!" :label label :position @*position* ::l/sync? true)
|
||||
(doto output
|
||||
(write-byte! (get-mark :label))
|
||||
(write-obj! label)))
|
||||
|
||||
(defn read-label!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-label!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)]
|
||||
(assert-mark m :label)
|
||||
(read-obj! input)))
|
||||
|
||||
(defn write-header!
|
||||
[^OutputStream output version]
|
||||
(l/trace :fn "write-header!"
|
||||
:version version
|
||||
:position @*position*
|
||||
::l/sync? true)
|
||||
(let [vers (-> version name (subs 1) parse-long)
|
||||
output (io/data-output-stream output)]
|
||||
(doto output
|
||||
(write-byte! (get-mark :header))
|
||||
(write-long! penpot-magic-number)
|
||||
(write-long! vers))))
|
||||
|
||||
(defn read-header!
|
||||
[^InputStream input]
|
||||
(l/trace :fn "read-header!" :position @*position* ::l/sync? true)
|
||||
(let [input (io/data-input-stream input)
|
||||
mark (read-byte! input)
|
||||
mnum (read-long! input)
|
||||
vers (read-long! input)]
|
||||
|
||||
(when (or (not= mark (get-mark :header))
|
||||
(not= mnum penpot-magic-number))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-penpot-file
|
||||
:hint "invalid penpot file"))
|
||||
|
||||
(keyword (str "v" vers))))
|
||||
|
||||
(defn copy-stream!
|
||||
[^OutputStream output ^InputStream input ^long size]
|
||||
(let [written (io/copy! input output :size size)]
|
||||
(l/trace :fn "copy-stream!" :position @*position* :size size :written written ::l/sync? true)
|
||||
(swap! *position* + written)
|
||||
written))
|
||||
|
||||
(defn write-stream!
|
||||
[^DataOutputStream output stream size]
|
||||
(l/trace :fn "write-stream!" :position @*position* ::l/sync? true :size size)
|
||||
(doto output
|
||||
(write-byte! (get-mark :stream))
|
||||
(write-long! size))
|
||||
|
||||
(copy-stream! output stream size))
|
||||
|
||||
(defn read-stream!
|
||||
[^DataInputStream input]
|
||||
(l/trace :fn "read-stream!" :position @*position* ::l/sync? true)
|
||||
(let [m (read-byte! input)
|
||||
s (read-long! input)
|
||||
p (tmp/tempfile :prefix "penpot.binfile.")]
|
||||
(assert-mark m :stream)
|
||||
|
||||
(when (> s max-object-size)
|
||||
(ex/raise :type :validation
|
||||
:code :max-file-size-reached
|
||||
:hint (str/ffmt "unable to import storage object with size % bytes" s)))
|
||||
|
||||
(if (> s temp-file-threshold)
|
||||
(with-open [^OutputStream output (io/output-stream p)]
|
||||
(let [readed (io/copy! input output :offset 0 :size s)]
|
||||
(l/trace :fn "read-stream*!" :expected s :readed readed :position @*position* ::l/sync? true)
|
||||
(swap! *position* + readed)
|
||||
[s p]))
|
||||
[s (io/read-as-bytes input :size s)])))
|
||||
|
||||
(defmacro assert-read-label!
|
||||
[input expected-label]
|
||||
`(let [readed# (read-label! ~input)
|
||||
expected# ~expected-label]
|
||||
(when (not= readed# expected#)
|
||||
(ex/raise :type :validation
|
||||
:code :unexpected-label
|
||||
:hint (format "unexpected label found: %s, expected: %s" readed# expected#)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(defn zstd-input-stream
|
||||
^InputStream
|
||||
[input]
|
||||
(ZstdInputStream. ^InputStream input))
|
||||
|
||||
(defn zstd-output-stream
|
||||
^OutputStream
|
||||
[output & {:keys [level] :or {level 0}}]
|
||||
(ZstdOutputStream. ^OutputStream output (int level)))
|
||||
|
||||
(defn- get-files
|
||||
[cfg ids]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [sql (str "SELECT id FROM file "
|
||||
" WHERE id = ANY(?) ")
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql ids])
|
||||
(into [] (map :id))
|
||||
(not-empty))))))
|
||||
|
||||
;; --- EXPORT WRITER
|
||||
|
||||
(defmulti write-export ::version)
|
||||
(defmulti write-section ::section)
|
||||
|
||||
(defn write-export!
|
||||
[{:keys [::include-libraries ::embed-assets] :as cfg}]
|
||||
(when (and include-libraries embed-assets)
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(write-export cfg))
|
||||
|
||||
(defmethod write-export :default
|
||||
[{:keys [::output] :as options}]
|
||||
(write-header! output :v1)
|
||||
(pu/with-open [output (zstd-output-stream output :level 12)
|
||||
output (io/data-output-stream output)]
|
||||
(binding [bfc/*state* (volatile! {})]
|
||||
(run! (fn [section]
|
||||
(l/dbg :hint "write section" :section section ::l/sync? true)
|
||||
(write-label! output section)
|
||||
(let [options (-> options
|
||||
(assoc ::output output)
|
||||
(assoc ::section section))]
|
||||
(binding [bfc/*options* options]
|
||||
(write-section options))))
|
||||
|
||||
[:v1/metadata :v1/files :v1/rels :v1/sobjects]))))
|
||||
|
||||
(defmethod write-section :v1/metadata
|
||||
[{:keys [::output ::ids ::include-libraries] :as cfg}]
|
||||
(if-let [fids (get-files cfg ids)]
|
||||
(let [lids (when include-libraries
|
||||
(bfc/get-libraries cfg ids))
|
||||
ids (into fids lids)]
|
||||
(write-obj! output {:version cf/version :files ids})
|
||||
(vswap! bfc/*state* assoc :files ids))
|
||||
(ex/raise :type :not-found
|
||||
:code :files-not-found
|
||||
:hint "unable to retrieve files for export")))
|
||||
|
||||
(defmethod write-section :v1/files
|
||||
[{:keys [::output ::embed-assets ::include-libraries] :as cfg}]
|
||||
|
||||
;; Initialize SIDS with empty vector
|
||||
(vswap! bfc/*state* assoc :sids [])
|
||||
|
||||
(doseq [file-id (-> bfc/*state* deref :files)]
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))
|
||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||
(mapv #(dissoc % :file-id)))
|
||||
|
||||
file (cond-> (bfc/get-file cfg file-id)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
|
||||
embed-assets
|
||||
(update :data #(bfc/embed-assets cfg % file-id))
|
||||
|
||||
:always
|
||||
(assoc :thumbnails thumbnails))
|
||||
|
||||
media (bfc/get-file-media cfg file)]
|
||||
|
||||
(l/dbg :hint "write penpot file"
|
||||
:id (str file-id)
|
||||
:name (:name file)
|
||||
:thumbnails (count thumbnails)
|
||||
:features (:features file)
|
||||
:media (count media)
|
||||
::l/sync? true)
|
||||
|
||||
(doseq [item media]
|
||||
(l/dbg :hint "write penpot file media object" :id (:id item) ::l/sync? true))
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(l/dbg :hint "write penpot file object thumbnail" :media-id (str (:media-id item)) ::l/sync? true))
|
||||
|
||||
(doto output
|
||||
(write-obj! file)
|
||||
(write-obj! media))
|
||||
|
||||
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id media)
|
||||
(vswap! bfc/*state* update :sids into bfc/xf-map-media-id thumbnails))))
|
||||
|
||||
(defmethod write-section :v1/rels
|
||||
[{:keys [::output ::include-libraries] :as cfg}]
|
||||
(let [ids (-> bfc/*state* deref :files set)
|
||||
rels (when include-libraries
|
||||
(bfc/get-files-rels cfg ids))]
|
||||
(l/dbg :hint "found rels" :total (count rels) ::l/sync? true)
|
||||
(write-obj! output rels)))
|
||||
|
||||
(defmethod write-section :v1/sobjects
|
||||
[{:keys [::sto/storage ::output]}]
|
||||
(let [sids (-> bfc/*state* deref :sids)
|
||||
storage (media/configure-assets-storage storage)]
|
||||
|
||||
(l/dbg :hint "found sobjects"
|
||||
:items (count sids)
|
||||
::l/sync? true)
|
||||
|
||||
;; Write all collected storage objects
|
||||
(write-obj! output sids)
|
||||
|
||||
(doseq [id sids]
|
||||
(let [{:keys [size] :as obj} (sto/get-object storage id)]
|
||||
(l/dbg :hint "write sobject" :id (str id) ::l/sync? true)
|
||||
|
||||
(doto output
|
||||
(write-uuid! id)
|
||||
(write-obj! (meta obj)))
|
||||
|
||||
(pu/with-open [stream (sto/get-object-data storage obj)]
|
||||
(let [written (write-stream! output stream size)]
|
||||
(when (not= written size)
|
||||
(ex/raise :type :validation
|
||||
:code :mismatch-readed-size
|
||||
:hint (str/ffmt "found unexpected object size; size=% written=%" size written)))))))))
|
||||
|
||||
;; --- EXPORT READER
|
||||
|
||||
(defmulti read-import ::version)
|
||||
(defmulti read-section ::section)
|
||||
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::input io/input-stream?)
|
||||
(s/def ::overwrite? (s/nilable ::us/boolean))
|
||||
(s/def ::ignore-index-errors? (s/nilable ::us/boolean))
|
||||
|
||||
;; FIXME: replace with schema
|
||||
(s/def ::read-import-options
|
||||
(s/keys :req [::db/pool ::sto/storage ::project-id ::profile-id ::input]
|
||||
:opt [::overwrite? ::ignore-index-errors?]))
|
||||
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format. There are some options for customize the importation
|
||||
behavior:
|
||||
|
||||
`::bfc/overwrite`: if true, instead of creating new files and remapping id references,
|
||||
it reuses all ids and updates existing objects; defaults to `false`."
|
||||
[{:keys [::input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||
|
||||
(dm/assert!
|
||||
"expected input stream"
|
||||
(io/input-stream? input))
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
|
||||
(let [version (read-header! input)]
|
||||
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
||||
|
||||
(defn- read-import-v1
|
||||
[{:keys [::db/conn ::project-id ::profile-id ::input] :as cfg}]
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(pu/with-open [input (zstd-input-stream input)
|
||||
input (io/data-input-stream input)]
|
||||
(binding [bfc/*state* (volatile! {:media [] :index {}})]
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id)
|
||||
|
||||
features (cfeat/get-team-enabled-features cf/flags team)]
|
||||
|
||||
;; Process all sections
|
||||
(run! (fn [section]
|
||||
(l/dbg :hint "reading section" :section section ::l/sync? true)
|
||||
(assert-read-label! input section)
|
||||
(let [options (-> cfg
|
||||
(assoc ::bfc/features features)
|
||||
(assoc ::section section)
|
||||
(assoc ::input input))]
|
||||
(binding [bfc/*options* options]
|
||||
(events/tap :progress {:op :import :section section})
|
||||
(read-section options))))
|
||||
[:v1/metadata :v1/files :v1/rels :v1/sobjects])
|
||||
|
||||
(bfc/apply-pending-migrations! cfg)
|
||||
|
||||
;; Knowing that the ids of the created files are in index,
|
||||
;; just lookup them and return it as a set
|
||||
(let [files (-> bfc/*state* deref :files)]
|
||||
(into #{} (keep #(get-in @bfc/*state* [:index %])) files))))))
|
||||
|
||||
(defmethod read-import :v1
|
||||
[options]
|
||||
(db/tx-run! options read-import-v1))
|
||||
|
||||
(defmethod read-section :v1/metadata
|
||||
[{:keys [::input]}]
|
||||
(let [{:keys [version files]} (read-obj! input)]
|
||||
(l/dbg :hint "metadata readed"
|
||||
:version (:full version)
|
||||
:files (mapv str files)
|
||||
::l/sync? true)
|
||||
(vswap! bfc/*state* update :index bfc/update-index files)
|
||||
(vswap! bfc/*state* assoc :version version :files files)))
|
||||
|
||||
(defn- remap-thumbnails
|
||||
[thumbnails file-id]
|
||||
(mapv (fn [thumbnail]
|
||||
(-> thumbnail
|
||||
(assoc :file-id file-id)
|
||||
(update :object-id #(str/replace-first % #"^(.*?)/" (str file-id "/")))))
|
||||
thumbnails))
|
||||
|
||||
(defn- clean-features
|
||||
[file]
|
||||
(update file :features (fn [features]
|
||||
(if (set? features)
|
||||
(-> features
|
||||
(cfeat/migrate-legacy-features)
|
||||
(set/difference cfeat/backend-only-features))
|
||||
#{}))))
|
||||
|
||||
(defmethod read-section :v1/files
|
||||
[{:keys [::db/conn ::input ::project-id ::bfc/overwrite ::name] :as system}]
|
||||
|
||||
(doseq [[idx expected-file-id] (d/enumerate (-> bfc/*state* deref :files))]
|
||||
(let [file (read-obj! input)
|
||||
media (read-obj! input)
|
||||
|
||||
file-id (:id file)
|
||||
file-id' (bfc/lookup-index file-id)
|
||||
|
||||
file (clean-features file)
|
||||
thumbnails (:thumbnails file)]
|
||||
|
||||
(when (not= file-id expected-file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:found-id file-id
|
||||
:expected-id expected-file-id
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (file-id)"))
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id)
|
||||
:features (:features file)
|
||||
:version (-> file :data :version)
|
||||
:media (count media)
|
||||
:thumbnails (count thumbnails)
|
||||
::l/sync? true)
|
||||
|
||||
(when (seq thumbnails)
|
||||
(let [thumbnails (remap-thumbnails thumbnails file-id')]
|
||||
(l/dbg :hint "updated index with thumbnails" :total (count thumbnails) ::l/sync? true)
|
||||
(vswap! bfc/*state* update :thumbnails bfc/into-vec thumbnails)))
|
||||
|
||||
(when (seq media)
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update index with media" :total (count media) ::l/sync? true)
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
|
||||
;; Store file media for later insertion
|
||||
(l/dbg :hint "update media references" ::l/sync? true)
|
||||
(vswap! bfc/*state* update :media into (map #(update % :id bfc/lookup-index)) media))
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :id file-id')
|
||||
(cond-> (and (= idx 0) (some? name))
|
||||
(assoc :name name))
|
||||
(assoc :project-id project-id)
|
||||
(dissoc :thumbnails)
|
||||
(bfc/process-file))]
|
||||
|
||||
;; All features that are enabled and requires explicit migration are
|
||||
;; added to the state for a posterior migration step.
|
||||
(doseq [feature (-> (::bfc/features system)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference (:features file)))]
|
||||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature file-id']))
|
||||
|
||||
(l/dbg :hint "create file" :id (str file-id') ::l/sync? true)
|
||||
(bfc/persist-file! system file)
|
||||
|
||||
(when overwrite
|
||||
(db/delete! conn :file-thumbnail {:file-id file-id'}))
|
||||
|
||||
file-id'))))
|
||||
|
||||
(defmethod read-section :v1/rels
|
||||
[{:keys [::db/conn ::input ::bfc/timestamp]}]
|
||||
(let [rels (read-obj! input)
|
||||
ids (into #{} (-> bfc/*state* deref :files))]
|
||||
;; Insert all file relations
|
||||
(doseq [{:keys [library-file-id] :as rel} rels]
|
||||
(let [rel (-> rel
|
||||
(assoc :synced-at timestamp)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index))]
|
||||
|
||||
(if (contains? ids library-file-id)
|
||||
(do
|
||||
(l/dbg :hint "create file library link"
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-library-rel rel))
|
||||
|
||||
(l/warn :hint "ignoring file library link"
|
||||
:file-id (:file-id rel)
|
||||
:lib-id (:library-file-id rel)
|
||||
::l/sync? true))))))
|
||||
|
||||
(defmethod read-section :v1/sobjects
|
||||
[{:keys [::sto/storage ::db/conn ::input ::bfc/overwrite ::bfc/timestamp]}]
|
||||
(let [storage (media/configure-assets-storage storage)
|
||||
ids (read-obj! input)
|
||||
thumb? (into #{} (map :media-id) (:thumbnails @bfc/*state*))]
|
||||
|
||||
(doseq [expected-storage-id ids]
|
||||
(let [id (read-uuid! input)
|
||||
mdata (read-obj! input)]
|
||||
|
||||
(when (not= id expected-storage-id)
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"))
|
||||
|
||||
(l/dbg :hint "readed storage object" :id (str id) ::l/sync? true)
|
||||
|
||||
(let [[size resource] (read-stream! input)
|
||||
hash (sto/calculate-hash resource)
|
||||
content (-> (sto/content resource size)
|
||||
(sto/wrap-with-hash hash))
|
||||
|
||||
params (-> mdata
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
|
||||
params (if (thumb? id)
|
||||
(assoc params :bucket "file-object-thumbnail")
|
||||
(assoc params :bucket "file-media-object"))
|
||||
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:old-id (str id)
|
||||
:new-id (str (:id sobject))
|
||||
:is-thumbnail (boolean (thumb? id))
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject)))))
|
||||
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:id (str (:id item))
|
||||
:file-id (str (:file-id item))
|
||||
::l/sync? true)
|
||||
|
||||
(let [file-id (bfc/lookup-index (:file-id item))]
|
||||
(if (= file-id (:file-id item))
|
||||
(l/warn :hint "ignoring file media object" :file-id (str file-id) ::l/sync? true)
|
||||
(db/insert! conn :file-media-object
|
||||
(-> item
|
||||
(assoc :file-id file-id)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))
|
||||
{::db/on-conflict-do-nothing? overwrite}))))
|
||||
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [item (update item :media-id bfc/lookup-index)]
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str (:file-id item))
|
||||
:media-id (str (:media-id item))
|
||||
:object-id (:object-id item)
|
||||
::l/sync? true)
|
||||
(db/insert! conn :file-tagged-object-thumbnail item
|
||||
{::db/on-conflict-do-nothing? overwrite})))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HIGH LEVEL API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn export-files!
|
||||
"Do the exportation of a specified file in custom penpot binary
|
||||
format. There are some options available for customize the output:
|
||||
|
||||
`::include-libraries`: additionally to the specified file, all the
|
||||
linked libraries also will be included (including transitive
|
||||
dependencies).
|
||||
|
||||
`::embed-assets`: instead of including the libraries, embed in the
|
||||
same file library all assets used from external libraries."
|
||||
|
||||
[{:keys [::ids] :as cfg} output]
|
||||
|
||||
(dm/assert!
|
||||
"expected a set of uuid's for `::ids` parameter"
|
||||
(and (set? ids)
|
||||
(every? uuid? ids)))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(pu/with-open [output (io/output-stream output)]
|
||||
(binding [*position* (atom 0)]
|
||||
(write-export! (assoc cfg ::output output))))
|
||||
|
||||
(catch java.io.IOException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
(vreset! ab true)
|
||||
nil)
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(vreset! ab true)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "exportation finished" :export-id (str id)
|
||||
:elapsed (str (inst-ms (tp)) "ms")
|
||||
:aborted @ab
|
||||
:cause @cs)))))
|
||||
|
||||
(defn import-files!
|
||||
[cfg input]
|
||||
|
||||
(dm/assert!
|
||||
"expected valid profile-id and project-id on `cfg`"
|
||||
(and (uuid? (::profile-id cfg))
|
||||
(uuid? (::project-id cfg))))
|
||||
|
||||
(dm/assert!
|
||||
"expected instance of jio/IOFactory for `input`"
|
||||
(satisfies? jio/IOFactory input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(binding [*position* (atom 0)]
|
||||
(pu/with-open [input (io/input-stream input)]
|
||||
(read-import! (assoc cfg ::input input))))
|
||||
|
||||
(catch ZstdIOException cause
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-penpot-file
|
||||
:hint "invalid penpot file received: probably truncated"
|
||||
:cause cause))
|
||||
|
||||
(catch Throwable cause
|
||||
(vreset! cs cause)
|
||||
(throw cause))
|
||||
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
442
backend/src/app/binfile/v2.clj
Normal file
442
backend/src/app/binfile/v2.clj
Normal file
@@ -0,0 +1,442 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.binfile.v2
|
||||
"A sqlite3 based binary file exportation with support for exportation
|
||||
of entire team (or multiple teams) at once."
|
||||
(:refer-clojure :exclude [read])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.util :as pu])
|
||||
(:import
|
||||
java.sql.DriverManager))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; LOW LEVEL API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- create-database
|
||||
([cfg]
|
||||
(let [path (tmp/tempfile :prefix "penpot.binfile." :suffix ".sqlite")]
|
||||
(create-database cfg path)))
|
||||
([cfg path]
|
||||
(let [db (DriverManager/getConnection (str "jdbc:sqlite:" path))]
|
||||
(assoc cfg ::db db ::path path))))
|
||||
|
||||
(def ^:private
|
||||
sql:create-kvdata-table
|
||||
"CREATE TABLE kvdata (
|
||||
tag text NOT NULL,
|
||||
key text NOT NULL,
|
||||
val text NOT NULL,
|
||||
dat blob NULL
|
||||
)")
|
||||
|
||||
(def ^:private
|
||||
sql:create-kvdata-index
|
||||
"CREATE INDEX kvdata__tag_key__idx
|
||||
ON kvdata (tag, key)")
|
||||
|
||||
(defn- setup-schema!
|
||||
[{:keys [::db]}]
|
||||
(db/exec-one! db [sql:create-kvdata-table])
|
||||
(db/exec-one! db [sql:create-kvdata-index]))
|
||||
|
||||
(defn- write!
|
||||
[{:keys [::db]} tag k v & [data]]
|
||||
(db/insert! db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)
|
||||
:val (t/encode-str v {:type :json-verbose})
|
||||
:dat data}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(defn- read-blob
|
||||
[{:keys [::db]} tag k]
|
||||
(let [obj (db/get db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [:dat]})]
|
||||
(:dat obj)))
|
||||
|
||||
(defn- read-seq
|
||||
([{:keys [::db]} tag]
|
||||
(->> (db/query db :kvdata
|
||||
{:tag (d/name tag)}
|
||||
{::sql/columns [::val]})
|
||||
(map :val)
|
||||
(map t/decode-str)))
|
||||
([{:keys [::db]} tag k]
|
||||
(->> (db/query db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [::val]})
|
||||
(map :val)
|
||||
(map t/decode-str))))
|
||||
|
||||
(defn- read-obj
|
||||
[{:keys [::db]} tag k]
|
||||
(let [obj (db/get db :kvdata
|
||||
{:tag (d/name tag)
|
||||
:key (str k)}
|
||||
{::sql/columns [:val]})]
|
||||
(-> obj :val t/decode-str)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMPORT/EXPORT IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare ^:private write-project!)
|
||||
(declare ^:private write-file!)
|
||||
|
||||
(defn- write-team!
|
||||
[cfg team-id]
|
||||
|
||||
(let [team (bfc/get-team cfg team-id)
|
||||
fonts (bfc/get-fonts cfg team-id)]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-team
|
||||
:id team-id
|
||||
:name (:name team)})
|
||||
|
||||
(l/trc :hint "write" :obj "team"
|
||||
:id (str team-id)
|
||||
:fonts (count fonts))
|
||||
|
||||
(when-let [photo-id (:photo-id team)]
|
||||
(vswap! bfc/*state* update :storage-objects conj photo-id))
|
||||
|
||||
(vswap! bfc/*state* update :teams conj team-id)
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects fonts)
|
||||
|
||||
(write! cfg :team team-id team)
|
||||
|
||||
(doseq [{:keys [id] :as font} fonts]
|
||||
(vswap! bfc/*state* update :team-font-variants conj id)
|
||||
(write! cfg :team-font-variant id font))))
|
||||
|
||||
(defn- write-project!
|
||||
[cfg project-id]
|
||||
(let [project (bfc/get-project cfg project-id)]
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
(l/trc :hint "write" :obj "project" :id (str project-id))
|
||||
(write! cfg :project (str project-id) project)
|
||||
(vswap! bfc/*state* update :projects conj project-id)))
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||
media (bfc/get-file-media cfg file)
|
||||
rels (bfc/get-files-rels cfg #{file-id})]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :export
|
||||
:section :write-file
|
||||
:id file-id
|
||||
:name (:name file)})
|
||||
|
||||
(vswap! bfc/*state* (fn [state]
|
||||
(-> state
|
||||
(update :files conj file-id)
|
||||
(update :file-media-objects into bfc/xf-map-id media)
|
||||
(bfc/collect-storage-objects thumbs)
|
||||
(bfc/collect-storage-objects media))))
|
||||
|
||||
(write! cfg :file file-id file)
|
||||
(write! cfg :file-rels file-id rels)
|
||||
|
||||
(run! (partial write! cfg :file-media-object file-id) media)
|
||||
(run! (partial write! cfg :file-object-thumbnail file-id) thumbs)
|
||||
|
||||
(when-let [thumb (bfc/get-file-thumbnail cfg file)]
|
||||
(vswap! bfc/*state* bfc/collect-storage-objects [thumb])
|
||||
(write! cfg :file-thumbnail file-id thumb))
|
||||
|
||||
(l/trc :hint "write" :obj "file"
|
||||
:thumbnails (count thumbs)
|
||||
:rels (count rels)
|
||||
:media (count media))))
|
||||
|
||||
(defn- write-storage-object!
|
||||
[{:keys [::sto/storage] :as cfg} id]
|
||||
(let [sobj (sto/get-object storage id)
|
||||
data (with-open [input (sto/get-object-data storage sobj)]
|
||||
(io/read-as-bytes input))]
|
||||
|
||||
(l/trc :hint "write" :obj "storage-object" :id (str id) :size (:size sobj))
|
||||
(write! cfg :storage-object id (meta sobj) data)))
|
||||
|
||||
(defn- read-storage-object!
|
||||
[{:keys [::sto/storage ::bfc/timestamp] :as cfg} id]
|
||||
(let [mdata (read-obj cfg :storage-object id)
|
||||
data (read-blob cfg :storage-object id)
|
||||
hash (sto/calculate-hash data)
|
||||
|
||||
content (-> (sto/content data)
|
||||
(sto/wrap-with-hash hash))
|
||||
|
||||
params (-> mdata
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))
|
||||
|
||||
(l/trc :hint "read" :obj "storage-object"
|
||||
:id (str id)
|
||||
:new-id (str (:id sobject))
|
||||
:size (:size sobject))))
|
||||
|
||||
(defn read-team!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} team-id]
|
||||
(l/trc :hint "read" :obj "team" :id (str team-id))
|
||||
|
||||
(let [team (read-obj cfg :team team-id)
|
||||
team (-> team
|
||||
(update :id bfc/lookup-index)
|
||||
(update :photo-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-team
|
||||
:id team-id
|
||||
:name (:name team)})
|
||||
|
||||
(db/insert! conn :team
|
||||
(update team :features db/encode-pgarray conn "text")
|
||||
::db/return-keys false)
|
||||
|
||||
(doseq [font (->> (read-seq cfg :team-font-variant)
|
||||
(filter #(= team-id (:team-id %))))]
|
||||
(let [font (-> font
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(update :woff1-file-id bfc/lookup-index)
|
||||
(update :woff2-file-id bfc/lookup-index)
|
||||
(update :ttf-file-id bfc/lookup-index)
|
||||
(update :otf-file-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-font-variant font
|
||||
::db/return-keys false)))
|
||||
|
||||
team))
|
||||
|
||||
(defn read-project!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} project-id]
|
||||
(l/trc :hint "read" :obj "project" :id (str project-id))
|
||||
|
||||
(let [project (read-obj cfg :project project-id)
|
||||
project (-> project
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-project
|
||||
:id project-id
|
||||
:name (:name project)})
|
||||
|
||||
(db/insert! conn :project project
|
||||
::db/return-keys false)))
|
||||
|
||||
(defn read-file!
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} file-id]
|
||||
(l/trc :hint "read" :obj "file" :id (str file-id))
|
||||
|
||||
(let [file (-> (read-obj cfg :file file-id)
|
||||
(update :id bfc/lookup-index)
|
||||
(update :project-id bfc/lookup-index)
|
||||
(bfc/process-file))]
|
||||
|
||||
(events/tap :progress
|
||||
{:op :import
|
||||
:section :read-file
|
||||
:id file-id
|
||||
:name (:name file)})
|
||||
|
||||
;; All features that are enabled and requires explicit migration are
|
||||
;; added to the state for a posterior migration step.
|
||||
(doseq [feature (-> (::bfc/features cfg)
|
||||
(set/difference cfeat/no-migration-features)
|
||||
(set/difference (:features file)))]
|
||||
(vswap! bfc/*state* update :pending-to-migrate (fnil conj []) [feature (:id file)]))
|
||||
|
||||
(bfc/persist-file! cfg file))
|
||||
|
||||
(doseq [thumbnail (read-seq cfg :file-object-thumbnail file-id)]
|
||||
(let [thumbnail (-> thumbnail
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :media-id bfc/lookup-index))
|
||||
file-id (:file-id thumbnail)
|
||||
|
||||
thumbnail (update thumbnail :object-id
|
||||
#(str/replace-first % #"^(.*?)/" (str file-id "/")))]
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail thumbnail
|
||||
{::db/return-keys false})))
|
||||
|
||||
(doseq [rel (read-obj cfg :file-rels file-id)]
|
||||
(let [rel (-> rel
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :library-file-id bfc/lookup-index)
|
||||
(assoc :synced-at timestamp))]
|
||||
(db/insert! conn :file-library-rel rel
|
||||
::db/return-keys false)))
|
||||
|
||||
(doseq [media (read-seq cfg :file-media-object file-id)]
|
||||
(let [media (-> media
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(update :media-id bfc/lookup-index)
|
||||
(update :thumbnail-id bfc/lookup-index))]
|
||||
(db/insert! conn :file-media-object media
|
||||
::db/return-keys false
|
||||
::sql/on-conflict-do-nothing true))))
|
||||
|
||||
(def ^:private empty-summary
|
||||
{:teams #{}
|
||||
:files #{}
|
||||
:projects #{}
|
||||
:file-media-objects #{}
|
||||
:team-font-variants #{}
|
||||
:storage-objects #{}})
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn export-team!
|
||||
[cfg team-id]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg)
|
||||
(update ::sto/storage media/configure-assets-storage))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:path (str (::path cfg)))
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(setup-schema! cfg)
|
||||
(binding [bfc/*state* (volatile! empty-summary)]
|
||||
(write-team! cfg team-id)
|
||||
|
||||
(run! (partial write-project! cfg)
|
||||
(bfc/get-team-projects cfg team-id))
|
||||
|
||||
(run! (partial write-file! cfg)
|
||||
(bfc/get-team-files cfg team-id))
|
||||
|
||||
(run! (partial write-storage-object! cfg)
|
||||
(-> bfc/*state* deref :storage-objects))
|
||||
|
||||
(write! cfg :manifest "team-id" team-id)
|
||||
(write! cfg :manifest "objects" (deref bfc/*state*))
|
||||
|
||||
(::path cfg))))
|
||||
(finally
|
||||
(pu/close! (::db cfg))
|
||||
|
||||
(let [elapsed (tp)]
|
||||
(l/inf :hint "end"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
|
||||
(defn import-team!
|
||||
[cfg path]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg path)
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::bfc/timestamp (dt/now)))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:path (str (::path cfg)))
|
||||
|
||||
(try
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {}})]
|
||||
(let [objects (read-obj cfg :manifest "objects")]
|
||||
|
||||
;; We first process all storage objects, they have
|
||||
;; deduplication so we can't rely on simple reindex. This
|
||||
;; operation populates the index for all storage objects.
|
||||
(run! (partial read-storage-object! cfg) (:storage-objects objects))
|
||||
|
||||
;; Populate index with all the incoming objects
|
||||
(vswap! bfc/*state* update :index
|
||||
(fn [index]
|
||||
(-> index
|
||||
(bfc/update-index (:teams objects))
|
||||
(bfc/update-index (:projects objects))
|
||||
(bfc/update-index (:files objects))
|
||||
(bfc/update-index (:file-media-objects objects))
|
||||
(bfc/update-index (:team-font-variants objects)))))
|
||||
|
||||
(let [team-id (read-obj cfg :manifest "team-id")
|
||||
team (read-team! cfg team-id)
|
||||
features (cfeat/get-team-enabled-features cf/flags team)
|
||||
cfg (assoc cfg ::bfc/features features)]
|
||||
|
||||
(run! (partial read-project! cfg) (:projects objects))
|
||||
(run! (partial read-file! cfg) (:files objects))
|
||||
|
||||
;; (run-pending-migrations! cfg)
|
||||
|
||||
team)))))
|
||||
(finally
|
||||
(pu/close! (::db cfg))
|
||||
|
||||
(let [elapsed (tp)]
|
||||
(l/inf :hint "end"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
@@ -79,6 +79,8 @@
|
||||
|
||||
:telemetry-uri "https://telemetry.penpot.app/"
|
||||
|
||||
:media-max-file-size (* 1024 1024 30) ; 30MiB
|
||||
|
||||
:ldap-user-query "(|(uid=:username)(mail=:username))"
|
||||
:ldap-attrs-username "uid"
|
||||
:ldap-attrs-email "mail"
|
||||
@@ -99,6 +101,8 @@
|
||||
(s/def ::audit-log-archive-uri ::us/string)
|
||||
(s/def ::audit-log-http-handler-concurrency ::us/integer)
|
||||
|
||||
(s/def ::deletion-delay ::dt/duration)
|
||||
|
||||
(s/def ::admins ::us/set-of-valid-emails)
|
||||
(s/def ::file-change-snapshot-every ::us/integer)
|
||||
(s/def ::file-change-snapshot-timeout ::dt/duration)
|
||||
@@ -109,8 +113,7 @@
|
||||
(s/def ::worker-default-parallelism ::us/integer)
|
||||
(s/def ::worker-webhook-parallelism ::us/integer)
|
||||
|
||||
(s/def ::authenticated-cookie-domain ::us/string)
|
||||
(s/def ::authenticated-cookie-name ::us/string)
|
||||
(s/def ::auth-data-cookie-domain ::us/string)
|
||||
(s/def ::auth-token-cookie-name ::us/string)
|
||||
(s/def ::auth-token-cookie-max-age ::dt/duration)
|
||||
|
||||
@@ -212,12 +215,12 @@
|
||||
(s/keys :opt-un [::secret-key
|
||||
::flags
|
||||
::admins
|
||||
::deletion-delay
|
||||
::allow-demo-users
|
||||
::audit-log-archive-uri
|
||||
::audit-log-http-handler-concurrency
|
||||
::auth-token-cookie-name
|
||||
::auth-token-cookie-max-age
|
||||
::authenticated-cookie-name
|
||||
::authenticated-cookie-domain
|
||||
::database-password
|
||||
::database-uri
|
||||
@@ -333,7 +336,8 @@
|
||||
:enable-backend-openapi-doc
|
||||
:enable-backend-worker
|
||||
:enable-secure-session-cookies
|
||||
:enable-email-verification])
|
||||
:enable-email-verification
|
||||
:enable-v2-migration])
|
||||
|
||||
(defn- parse-flags
|
||||
[config]
|
||||
@@ -378,7 +382,8 @@
|
||||
(defonce ^:dynamic flags (parse-flags config))
|
||||
|
||||
(def deletion-delay
|
||||
(dt/duration {:days 7}))
|
||||
(or (c/get config :deletion-delay)
|
||||
(dt/duration {:days 7})))
|
||||
|
||||
(defn get
|
||||
"A configuration getter. Helps code be more testable."
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[next.jdbc :as jdbc]
|
||||
@@ -236,8 +237,11 @@
|
||||
(jdbc/get-connection system-or-pool)
|
||||
(if (map? system-or-pool)
|
||||
(open (::pool system-or-pool))
|
||||
(ex/raise :type :internal
|
||||
:code :unable-resolve-pool))))
|
||||
(throw (IllegalArgumentException. "unable to resolve connection pool")))))
|
||||
|
||||
(defn get-update-count
|
||||
[result]
|
||||
(:next.jdbc/update-count result))
|
||||
|
||||
(defn get-connection
|
||||
[cfg-or-conn]
|
||||
@@ -245,9 +249,7 @@
|
||||
cfg-or-conn
|
||||
(if (map? cfg-or-conn)
|
||||
(get-connection (::conn cfg-or-conn))
|
||||
(ex/raise :type :internal
|
||||
:code :unable-resolve-connection
|
||||
:hint "expected conn or system map"))))
|
||||
(throw (IllegalArgumentException. "unable to resolve connection")))))
|
||||
|
||||
(defn connection-map?
|
||||
"Check if the provided value is a map like data structure that
|
||||
@@ -255,58 +257,130 @@
|
||||
[o]
|
||||
(and (map? o) (connection? (::conn o))))
|
||||
|
||||
(defn- get-connectable
|
||||
(defn get-connectable
|
||||
"Resolve to a connection or connection pool instance; if it is not
|
||||
possible, raises an exception"
|
||||
[o]
|
||||
(cond
|
||||
(connection? o) o
|
||||
(pool? o) o
|
||||
(map? o) (get-connectable (or (::conn o) (::pool o)))
|
||||
:else (ex/raise :type :internal
|
||||
:code :unable-resolve-connectable
|
||||
:hint "expected conn, pool or system")))
|
||||
:else (throw (IllegalArgumentException. "unable to resolve connectable"))))
|
||||
|
||||
(def ^:private params-mapping
|
||||
{::return-keys? :return-keys
|
||||
::return-keys :return-keys})
|
||||
|
||||
(defn rename-opts
|
||||
[opts]
|
||||
(set/rename-keys opts params-mapping))
|
||||
|
||||
(def ^:private default-insert-opts
|
||||
{:builder-fn sql/as-kebab-maps
|
||||
:return-keys true})
|
||||
|
||||
(def ^:private default-opts
|
||||
{:builder-fn sql/as-kebab-maps})
|
||||
|
||||
(defn exec!
|
||||
([ds sv]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/execute! sv default-opts)))
|
||||
([ds sv] (exec! ds sv nil))
|
||||
([ds sv opts]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/execute! sv (into default-opts (sql/adapt-opts opts))))))
|
||||
(let [conn (get-connectable ds)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(jdbc/execute! conn sv opts))))
|
||||
|
||||
(defn exec-one!
|
||||
([ds sv]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/execute-one! sv default-opts)))
|
||||
([ds sv] (exec-one! ds sv nil))
|
||||
([ds sv opts]
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/execute-one! sv (into default-opts (sql/adapt-opts opts))))))
|
||||
(let [conn (get-connectable ds)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(jdbc/execute-one! conn sv opts))))
|
||||
|
||||
(defn insert!
|
||||
[ds table params & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
|
||||
(-> (get-connectable ds)
|
||||
(exec-one! (sql/insert table params opts)
|
||||
(assoc opts ::return-keys? return-keys?))))
|
||||
"A helper that builds an insert sql statement and executes it. By
|
||||
default returns the inserted row with all the field; you can delimit
|
||||
the returned columns with the `::columns` option."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert table params opts)
|
||||
opts (if (empty? opts)
|
||||
default-insert-opts
|
||||
(into default-insert-opts (rename-opts opts)))]
|
||||
(jdbc/execute-one! conn sql opts)))
|
||||
|
||||
(defn insert-multi!
|
||||
[ds table cols rows & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
|
||||
(-> (get-connectable ds)
|
||||
(exec! (sql/insert-multi table cols rows opts)
|
||||
(assoc opts ::return-keys? return-keys?))))
|
||||
(defn insert-many!
|
||||
"An optimized version of `insert!` that perform insertion of multiple
|
||||
values at once.
|
||||
|
||||
This expands to a single SQL statement with placeholders for every
|
||||
value being inserted. For large data sets, this may exceed the limit
|
||||
of sql string size and/or number of parameters."
|
||||
[ds table cols rows & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert-many table cols rows opts)
|
||||
opts (if (empty? opts)
|
||||
default-insert-opts
|
||||
(into default-insert-opts (rename-opts opts)))
|
||||
opts (update opts :return-keys boolean)]
|
||||
(jdbc/execute! conn sql opts)))
|
||||
|
||||
(defn update!
|
||||
[ds table params where & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
|
||||
(-> (get-connectable ds)
|
||||
(exec-one! (sql/update table params where opts)
|
||||
(assoc opts ::return-keys? return-keys?))))
|
||||
"A helper that build an UPDATE SQL statement and executes it.
|
||||
|
||||
Given a connectable object, a table name, a hash map of columns and
|
||||
values to set, and either a hash map of columns and values to search
|
||||
on or a vector of a SQL where clause and parameters, perform an
|
||||
update on the table.
|
||||
|
||||
By default returns an object with the number of affected rows; a
|
||||
complete row can be returned if you pass `::return-keys` with `true`
|
||||
or with a vector of columns.
|
||||
|
||||
Also it can be combined with the `::many` option if you perform an
|
||||
update to multiple rows and you want all the affected rows to be
|
||||
returned."
|
||||
[ds table params where & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/update table params where opts)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))
|
||||
opts (update opts :return-keys boolean)]
|
||||
(if (::many opts)
|
||||
(jdbc/execute! conn sql opts)
|
||||
(jdbc/execute-one! conn sql opts))))
|
||||
|
||||
(defn delete!
|
||||
[ds table params & {:as opts :keys [::return-keys?] :or {return-keys? true}}]
|
||||
(-> (get-connectable ds)
|
||||
(exec-one! (sql/delete table params opts)
|
||||
(assoc opts ::return-keys? return-keys?))))
|
||||
"A helper that builds an DELETE SQL statement and executes it.
|
||||
|
||||
Given a connectable object, a table name, and either a hash map of columns
|
||||
and values to search on or a vector of a SQL where clause and parameters,
|
||||
perform a delete on the table.
|
||||
|
||||
By default returns an object with the number of affected rows; a
|
||||
complete row can be returned if you pass `::return-keys` with `true`
|
||||
or with a vector of columns.
|
||||
|
||||
Also it can be combined with the `::many` option if you perform an
|
||||
update to multiple rows and you want all the affected rows to be
|
||||
returned."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/delete table params opts)
|
||||
opts (if (empty? opts)
|
||||
default-opts
|
||||
(into default-opts (rename-opts opts)))]
|
||||
(if (::many opts)
|
||||
(jdbc/execute! conn sql opts)
|
||||
(jdbc/execute-one! conn sql opts))))
|
||||
|
||||
(defn query
|
||||
[ds table params & {:as opts}]
|
||||
(exec! ds (sql/select table params opts) opts))
|
||||
|
||||
(defn is-row-deleted?
|
||||
[{:keys [deleted-at]}]
|
||||
@@ -320,7 +394,7 @@
|
||||
[ds table params & {:as opts}]
|
||||
(let [rows (exec! ds (sql/select table params opts))
|
||||
rows (cond->> rows
|
||||
(::remove-deleted? opts true)
|
||||
(::remove-deleted opts true)
|
||||
(remove is-row-deleted?))]
|
||||
(first rows)))
|
||||
|
||||
@@ -329,7 +403,7 @@
|
||||
filters. Raises :not-found exception if no object is found."
|
||||
[ds table params & {:as opts}]
|
||||
(let [row (get* ds table params opts)]
|
||||
(when (and (not row) (::check-deleted? opts true))
|
||||
(when (and (not row) (::check-deleted opts true))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:table table
|
||||
@@ -341,14 +415,29 @@
|
||||
(-> (get-connectable ds)
|
||||
(jdbc/plan sql sql/default-opts)))
|
||||
|
||||
(defn cursor
|
||||
"Return a lazy seq of rows using server side cursors"
|
||||
[conn query & {:keys [chunk-size] :or {chunk-size 25}}]
|
||||
(let [cname (str (gensym "cursor_"))
|
||||
fquery [(str "FETCH " chunk-size " FROM " cname)]]
|
||||
|
||||
;; declare cursor
|
||||
(exec-one! conn
|
||||
(if (vector? query)
|
||||
(into [(str "DECLARE " cname " CURSOR FOR " (nth query 0))]
|
||||
(rest query))
|
||||
[(str "DECLARE " cname " CURSOR FOR " query)]))
|
||||
|
||||
;; return a lazy seq
|
||||
((fn fetch-more []
|
||||
(lazy-seq
|
||||
(when-let [chunk (seq (exec! conn fquery))]
|
||||
(concat chunk (fetch-more))))))))
|
||||
|
||||
(defn get-by-id
|
||||
[ds table id & {:as opts}]
|
||||
(get ds table {:id id} opts))
|
||||
|
||||
(defn query
|
||||
[ds table params & {:as opts}]
|
||||
(exec! ds (sql/select table params opts)))
|
||||
|
||||
(defn pgobject?
|
||||
([v]
|
||||
(instance? PGobject v))
|
||||
@@ -401,6 +490,10 @@
|
||||
(.createArrayOf conn ^String type (into-array Object objects))
|
||||
(.createArrayOf conn ^String type objects))))
|
||||
|
||||
(defn encode-pgarray
|
||||
[data conn type]
|
||||
(create-array conn type data))
|
||||
|
||||
(defn decode-pgpoint
|
||||
[^PGpoint v]
|
||||
(gpt/point (.-x v) (.-y v)))
|
||||
@@ -421,12 +514,14 @@
|
||||
|
||||
(defn rollback!
|
||||
([conn]
|
||||
(let [^Connection conn (get-connection conn)]
|
||||
(l/trc :hint "explicit rollback requested")
|
||||
(.rollback conn)))
|
||||
(if (and (map? conn) (::savepoint conn))
|
||||
(rollback! conn (::savepoint conn))
|
||||
(let [^Connection conn (get-connection conn)]
|
||||
(l/trc :hint "explicit rollback requested")
|
||||
(.rollback conn))))
|
||||
([conn ^Savepoint sp]
|
||||
(let [^Connection conn (get-connection conn)]
|
||||
(l/trc :hint "explicit rollback requested")
|
||||
(l/trc :hint "explicit rollback requested (savepoint)")
|
||||
(.rollback conn sp))))
|
||||
|
||||
(defn tx-run!
|
||||
@@ -442,23 +537,30 @@
|
||||
(let [conn (::conn system)
|
||||
sp (savepoint conn)]
|
||||
(try
|
||||
(let [result (apply f system params)]
|
||||
(release! conn sp)
|
||||
(let [system' (-> system
|
||||
(assoc ::savepoint sp)
|
||||
(dissoc ::rollback))
|
||||
result (apply f system' params)]
|
||||
(if (::rollback system)
|
||||
(rollback! conn sp)
|
||||
(release! conn sp))
|
||||
result)
|
||||
(catch Throwable cause
|
||||
(rollback! conn sp)
|
||||
(.rollback ^Connection conn ^Savepoint sp)
|
||||
(throw cause))))
|
||||
|
||||
(::pool system)
|
||||
(with-atomic [conn (::pool system)]
|
||||
(let [system (assoc system ::conn conn)
|
||||
result (apply f system params)]
|
||||
(let [system' (-> system
|
||||
(assoc ::conn conn)
|
||||
(dissoc ::rollback))
|
||||
result (apply f system' params)]
|
||||
(when (::rollback system)
|
||||
(rollback! conn))
|
||||
result))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid arguments"))))
|
||||
(throw (IllegalArgumentException. "invalid system/cfg provided"))))
|
||||
|
||||
(defn run!
|
||||
[system f & params]
|
||||
@@ -548,11 +650,6 @@
|
||||
(.setType "jsonb")
|
||||
(.setValue (json/encode-str data)))))
|
||||
|
||||
(defn get-update-count
|
||||
[result]
|
||||
(:next.jdbc/update-count result))
|
||||
|
||||
|
||||
;; --- Locks
|
||||
|
||||
(def ^:private siphash-state
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
(:refer-clojure :exclude [update])
|
||||
(:require
|
||||
[app.db :as-alias db]
|
||||
[clojure.set :as set]
|
||||
[clojure.string :as str]
|
||||
[next.jdbc.optional :as jdbc-opt]
|
||||
[next.jdbc.sql.builder :as sql]))
|
||||
@@ -20,14 +19,6 @@
|
||||
{:table-fn snake-case
|
||||
:column-fn snake-case})
|
||||
|
||||
(def params-mapping
|
||||
{::db/return-keys? :return-keys
|
||||
::db/columns :columns})
|
||||
|
||||
(defn adapt-opts
|
||||
[opts]
|
||||
(set/rename-keys opts params-mapping))
|
||||
|
||||
(defn as-kebab-maps
|
||||
[rs opts]
|
||||
(jdbc-opt/as-unqualified-modified-maps rs (assoc opts :label-fn kebab-case)))
|
||||
@@ -39,10 +30,13 @@
|
||||
(let [opts (merge default-opts opts)
|
||||
opts (cond-> opts
|
||||
(::db/on-conflict-do-nothing? opts)
|
||||
(assoc :suffix "ON CONFLICT DO NOTHING")
|
||||
|
||||
(::on-conflict-do-nothing opts)
|
||||
(assoc :suffix "ON CONFLICT DO NOTHING"))]
|
||||
(sql/for-insert table key-map opts))))
|
||||
|
||||
(defn insert-multi
|
||||
(defn insert-many
|
||||
[table cols rows opts]
|
||||
(let [opts (merge default-opts opts)]
|
||||
(sql/for-insert-multi table cols rows opts)))
|
||||
@@ -53,11 +47,10 @@
|
||||
([table where-params opts]
|
||||
(let [opts (merge default-opts opts)
|
||||
opts (cond-> opts
|
||||
(::db/columns opts) (assoc :columns (::db/columns opts))
|
||||
(::db/for-update? opts) (assoc :suffix "FOR UPDATE")
|
||||
(::db/for-share? opts) (assoc :suffix "FOR KEY SHARE")
|
||||
(:for-update opts) (assoc :suffix "FOR UPDATE")
|
||||
(:for-key-share opts) (assoc :suffix "FOR KEY SHARE"))]
|
||||
(::order-by opts) (assoc :order-by (::order-by opts))
|
||||
(::columns opts) (assoc :columns (::columns opts))
|
||||
(::for-update opts) (assoc :suffix "FOR UPDATE")
|
||||
(::for-share opts) (assoc :suffix "FOR SHARE"))]
|
||||
(sql/for-query table where-params opts))))
|
||||
|
||||
(defn update
|
||||
@@ -65,11 +58,9 @@
|
||||
(update table key-map where-params nil))
|
||||
([table key-map where-params opts]
|
||||
(let [opts (into default-opts opts)
|
||||
opts (if-let [columns (::db/columns opts)]
|
||||
(let [columns (if (seq columns)
|
||||
(sql/as-cols columns opts)
|
||||
"*")]
|
||||
(assoc opts :suffix (str "RETURNING " columns)))
|
||||
keys (::db/return-keys opts)
|
||||
opts (if (vector? keys)
|
||||
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
|
||||
opts)]
|
||||
(sql/for-update table key-map where-params opts))))
|
||||
|
||||
@@ -77,5 +68,9 @@
|
||||
([table where-params]
|
||||
(delete table where-params nil))
|
||||
([table where-params opts]
|
||||
(let [opts (merge default-opts opts)]
|
||||
(let [opts (merge default-opts opts)
|
||||
keys (::db/return-keys opts)
|
||||
opts (if (vector? keys)
|
||||
(assoc opts :suffix (str "RETURNING " (sql/as-cols keys opts)))
|
||||
opts)]
|
||||
(sql/for-delete table where-params opts))))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]))
|
||||
@@ -21,14 +22,35 @@
|
||||
|
||||
(defn enable-objects-map
|
||||
[file]
|
||||
(let [update-fn #(d/update-when % :objects omap/wrap)]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(if (and (pmap/pointer-map? page)
|
||||
(not (pmap/loaded? page)))
|
||||
page
|
||||
(update page :objects omap/wrap)))
|
||||
|
||||
update-data
|
||||
(fn [fdata]
|
||||
(update fdata :pages-index d/update-vals update-page))]
|
||||
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index update-vals update-fn)
|
||||
(update :components update-vals update-fn))))
|
||||
(update :data update-data)
|
||||
(update :features conj "fdata/objects-map"))))
|
||||
|
||||
(defn process-objects
|
||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||
the objects-map instances to plain maps"
|
||||
[fdata update-fn]
|
||||
(if (contains? fdata :pages-index)
|
||||
(update fdata :pages-index d/update-vals
|
||||
(fn [page]
|
||||
(update page :objects
|
||||
(fn [objects]
|
||||
(if (omap/objects-map? objects)
|
||||
(update-fn objects)
|
||||
objects)))))
|
||||
fdata))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; POINTER-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -38,8 +60,14 @@
|
||||
[system file-id id]
|
||||
(let [{:keys [content]} (db/get system :file-data-fragment
|
||||
{:id id :file-id file-id}
|
||||
{::db/columns [:content]
|
||||
::db/check-deleted? false})]
|
||||
{::sql/columns [:content]
|
||||
::db/check-deleted false})]
|
||||
|
||||
(l/trc :hint "load pointer"
|
||||
:file-id (str file-id)
|
||||
:id (str id)
|
||||
:found (some? content))
|
||||
|
||||
(when-not content
|
||||
(ex/raise :type :internal
|
||||
:code :fragment-not-found
|
||||
@@ -53,28 +81,27 @@
|
||||
"Given a database connection and the final file-id, persist all
|
||||
pointers to the underlying storage (the database)."
|
||||
[system file-id]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! system :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:content content})))))
|
||||
(let [conn (db/get-connection system)]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! conn :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:content content}))))))
|
||||
|
||||
(defn process-pointers
|
||||
"Apply a function to all pointers on the file. Usuly used for
|
||||
dereference the pointer to a plain value before some processing."
|
||||
[fdata update-fn]
|
||||
(cond-> fdata
|
||||
(contains? fdata :pages-index)
|
||||
(update :pages-index process-pointers update-fn)
|
||||
|
||||
:always
|
||||
(update-vals (fn [val]
|
||||
(if (pmap/pointer-map? val)
|
||||
(update-fn val)
|
||||
val)))))
|
||||
(let [update-fn' (fn [val]
|
||||
(if (pmap/pointer-map? val)
|
||||
(update-fn val)
|
||||
val))]
|
||||
(-> fdata
|
||||
(d/update-vals update-fn')
|
||||
(update :pages-index d/update-vals update-fn'))))
|
||||
|
||||
(defn get-used-pointer-ids
|
||||
"Given a file, return all pointer ids used in the data."
|
||||
@@ -90,7 +117,6 @@
|
||||
(-> file
|
||||
(update :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index update-vals pmap/wrap)
|
||||
(update :components pmap/wrap))))
|
||||
|
||||
(update :pages-index d/update-vals pmap/wrap)
|
||||
(d/update-when :components pmap/wrap))))
|
||||
(update :features conj "fdata/pointer-map")))
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
@@ -52,8 +53,8 @@
|
||||
[_ cfg]
|
||||
(merge {::port 6060
|
||||
::host "0.0.0.0"
|
||||
::max-body-size (* 1024 1024 30) ; 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)} ; 120 MiB
|
||||
::max-body-size (* 1024 1024 30) ; default 30 MiB
|
||||
::max-multipart-body-size (* 1024 1024 120)} ; default 120 MiB
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::server [_]
|
||||
@@ -136,7 +137,7 @@
|
||||
::rpc/routes
|
||||
::rpc.doc/routes
|
||||
::oidc/routes
|
||||
::main/props
|
||||
::setup/props
|
||||
::assets/routes
|
||||
::debug/routes
|
||||
::db/pool
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[ring.request :as rreq]))
|
||||
|
||||
@@ -42,7 +43,7 @@
|
||||
(defn- wrap-soft-auth
|
||||
"Soft Authentication, will be executed synchronously on the undertow
|
||||
worker thread."
|
||||
[handler {:keys [::main/props]}]
|
||||
[handler {:keys [::setup/props]}]
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.db.sql :as sql]
|
||||
[app.http.client :as http]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -30,7 +31,7 @@
|
||||
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::http/client
|
||||
::main/props
|
||||
::setup/props
|
||||
::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
@@ -106,7 +107,7 @@
|
||||
[cfg headers]
|
||||
(let [tdata (get headers "x-penpot-data")]
|
||||
(when-not (str/empty? tdata)
|
||||
(let [result (tokens/verify (::main/props cfg) {:token tdata :iss :profile-identity})]
|
||||
(let [result (tokens/verify (::setup/props cfg) {:token tdata :iss :profile-identity})]
|
||||
(:profile-id result)))))
|
||||
|
||||
(defn- parse-notification
|
||||
|
||||
@@ -55,8 +55,8 @@
|
||||
convention."
|
||||
([cfg-or-client request]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request {})))
|
||||
(send! client request {:sync? true})))
|
||||
([cfg-or-client request options]
|
||||
(let [client (resolve-client cfg-or-client)]
|
||||
(send! client request options))))
|
||||
(send! client request (merge {:sync? true} options)))))
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.http.debug
|
||||
(:refer-clojure :exclude [error-handler])
|
||||
(:require
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
@@ -17,11 +18,12 @@
|
||||
[app.http.session :as session]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.binfile :as binf]
|
||||
[app.rpc.commands.files-create :refer [create-file]]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.setup :as-alias setup]
|
||||
[app.srepl.helpers :as srepl]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
@@ -99,11 +101,11 @@
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)]
|
||||
|
||||
(db/run! pool (fn [{:keys [::db/conn]}]
|
||||
(create-file conn {:id file-id
|
||||
:name (str "Cloned file: " filename)
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
|
||||
(create-file cfg {:id file-id
|
||||
:name (str "Cloned file: " filename)
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
@@ -140,11 +142,11 @@
|
||||
{::rres/status 200
|
||||
::rres/body "OK UPDATED"})
|
||||
|
||||
(db/run! pool (fn [{:keys [::db/conn]}]
|
||||
(create-file conn {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/run! pool (fn [{:keys [::db/conn] :as cfg}]
|
||||
(create-file cfg {:id file-id
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id})
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
{:id file-id})
|
||||
@@ -268,9 +270,10 @@
|
||||
(defn export-handler
|
||||
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
|
||||
|
||||
(let [file-ids (->> (:file-ids params)
|
||||
(remove empty?)
|
||||
(mapv parse-uuid))
|
||||
(let [file-ids (into #{}
|
||||
(comp (remove empty?)
|
||||
(map parse-uuid))
|
||||
(:file-ids params))
|
||||
libs? (contains? params :includelibs)
|
||||
clone? (contains? params :clone)
|
||||
embed? (contains? params :embedassets)]
|
||||
@@ -279,22 +282,22 @@
|
||||
(ex/raise :type :validation
|
||||
:code :missing-arguments))
|
||||
|
||||
(let [path (-> cfg
|
||||
(assoc ::binf/file-ids file-ids)
|
||||
(assoc ::binf/embed-assets? embed?)
|
||||
(assoc ::binf/include-libraries? libs?)
|
||||
(binf/export-to-tmpfile!))]
|
||||
(let [path (tmp/tempfile :prefix "penpot.export.")]
|
||||
(with-open [output (io/output-stream path)]
|
||||
(-> cfg
|
||||
(assoc ::bf.v1/ids file-ids)
|
||||
(assoc ::bf.v1/embed-assets embed?)
|
||||
(assoc ::bf.v1/include-libraries libs?)
|
||||
(bf.v1/export-files! output)))
|
||||
|
||||
(if clone?
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)]
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input path
|
||||
::binf/overwrite? false
|
||||
::binf/ignore-index-errors? true
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
project-id (:default-project-id profile)
|
||||
cfg (assoc cfg
|
||||
::bf.v1/overwrite false
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK CLONED"})
|
||||
@@ -305,7 +308,6 @@
|
||||
"content-disposition" (str "attachmen; filename=" (first file-ids) ".penpot")}}))))
|
||||
|
||||
|
||||
|
||||
(defn import-handler
|
||||
[{:keys [::db/pool] :as cfg} {:keys [params ::session/profile-id] :as request}]
|
||||
(when-not (contains? params :file)
|
||||
@@ -316,40 +318,40 @@
|
||||
(let [profile (profile/get-profile pool profile-id)
|
||||
project-id (:default-project-id profile)
|
||||
overwrite? (contains? params :overwrite)
|
||||
migrate? (contains? params :migrate)
|
||||
ignore-index-errors? (contains? params :ignore-index-errors)]
|
||||
migrate? (contains? params :migrate)]
|
||||
|
||||
(when-not project-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-project
|
||||
:hint "project not found"))
|
||||
|
||||
(binf/import!
|
||||
(assoc cfg
|
||||
::binf/input (-> params :file :path)
|
||||
::binf/overwrite? overwrite?
|
||||
::binf/migrate? migrate?
|
||||
::binf/ignore-index-errors? ignore-index-errors?
|
||||
::binf/profile-id profile-id
|
||||
::binf/project-id project-id))
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"}))
|
||||
(let [path (-> params :file :path)
|
||||
cfg (assoc cfg
|
||||
::bf.v1/overwrite overwrite?
|
||||
::bf.v1/migrate migrate?
|
||||
::bf.v1/profile-id profile-id
|
||||
::bf.v1/project-id project-id)]
|
||||
(bf.v1/import-files! cfg path)
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ACTIONS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- resend-email-notification
|
||||
[{:keys [::db/pool ::main/props] :as cfg} {:keys [params] :as request}]
|
||||
[{:keys [::db/pool ::setup/props] :as cfg} {:keys [params] :as request}]
|
||||
|
||||
(when-not (contains? params :force)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-force
|
||||
:hint "missing force checkbox"))
|
||||
|
||||
(let [profile (some->> params :email (profile/get-profile-by-email pool))]
|
||||
(let [profile (some->> params
|
||||
:email
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email pool))]
|
||||
|
||||
(when-not profile
|
||||
(ex/raise :type :validation
|
||||
@@ -392,7 +394,7 @@
|
||||
::rres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))
|
||||
|
||||
|
||||
(defn- reset-file-data-version
|
||||
(defn- reset-file-version
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [file-id (some-> params :file-id d/parse-uuid)
|
||||
version (some-> params :version d/parse-integer)]
|
||||
@@ -412,13 +414,8 @@
|
||||
:code :invalid-version
|
||||
:hint "provided invalid version"))
|
||||
|
||||
(srepl/update-file! cfg
|
||||
:id file-id
|
||||
:update-fn (fn [file]
|
||||
(update file :data assoc :version version))
|
||||
:migrate? false
|
||||
:inc-revn? false
|
||||
:save? true)
|
||||
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
|
||||
|
||||
{::rres/status 200
|
||||
::rres/headers {"content-type" "text/plain"}
|
||||
::rres/body "OK"}))
|
||||
@@ -490,8 +487,8 @@
|
||||
["/error" {:handler (partial error-list-handler cfg)}]
|
||||
["/actions/resend-email-verification"
|
||||
{:handler (partial resend-email-notification cfg)}]
|
||||
["/actions/reset-file-data-version"
|
||||
{:handler (partial reset-file-data-version cfg)}]
|
||||
["/actions/reset-file-version"
|
||||
{:handler (partial reset-file-version cfg)}]
|
||||
["/file/export" {:handler (partial export-handler cfg)}]
|
||||
["/file/import" {:handler (partial import-handler cfg)}]
|
||||
["/file/data" {:handler (partial file-data-handler cfg)}]
|
||||
|
||||
@@ -60,8 +60,12 @@
|
||||
|
||||
(defmethod handle-error :restriction
|
||||
[err _ _]
|
||||
{::rres/status 400
|
||||
::rres/body (ex-data err)})
|
||||
(let [{:keys [code] :as data} (ex-data err)]
|
||||
(if (= code :method-not-allowed)
|
||||
{::rres/status 405
|
||||
::rres/body data}
|
||||
{::rres/status 400
|
||||
::rres/body data})))
|
||||
|
||||
(defmethod handle-error :rate-limit
|
||||
[err _ _]
|
||||
@@ -95,7 +99,7 @@
|
||||
(= code :invalid-image)
|
||||
(binding [l/*context* (request->context request)]
|
||||
(let [cause (or parent-cause err)]
|
||||
(l/error :hint "unexpected error on processing image" :cause cause)
|
||||
(l/warn :hint "unexpected error on processing image" :cause cause)
|
||||
{::rres/status 400 ::rres/body data}))
|
||||
|
||||
:else
|
||||
@@ -214,6 +218,14 @@
|
||||
:hint (ex-message error)
|
||||
:data edata}}))))
|
||||
|
||||
(defmethod handle-exception java.io.IOException
|
||||
[cause _ _]
|
||||
(l/wrn :hint "io exception" :cause cause)
|
||||
{::rres/status 500
|
||||
::rres/body {:type :server-error
|
||||
:code :io-exception
|
||||
:hint (ex-message cause)}})
|
||||
|
||||
(defmethod handle-exception java.util.concurrent.CompletionException
|
||||
[cause request _]
|
||||
(let [cause' (ex-cause cause)]
|
||||
|
||||
@@ -10,11 +10,13 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http.session.tasks :as-alias tasks]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -32,7 +34,7 @@
|
||||
|
||||
;; A cookie that we can use to check from other sites of the same
|
||||
;; domain if a user is authenticated.
|
||||
(def default-authenticated-cookie-name "authenticated")
|
||||
(def default-auth-data-cookie-name "auth-data")
|
||||
|
||||
;; Default value for cookie max-age
|
||||
(def default-cookie-max-age (dt/duration {:days 7}))
|
||||
@@ -132,13 +134,13 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare ^:private assign-auth-token-cookie)
|
||||
(declare ^:private assign-authenticated-cookie)
|
||||
(declare ^:private assign-auth-data-cookie)
|
||||
(declare ^:private clear-auth-token-cookie)
|
||||
(declare ^:private clear-authenticated-cookie)
|
||||
(declare ^:private clear-auth-data-cookie)
|
||||
(declare ^:private gen-token)
|
||||
|
||||
(defn create-fn
|
||||
[{:keys [::manager ::main/props]} profile-id]
|
||||
[{:keys [::manager ::setup/props]} profile-id]
|
||||
(us/assert! ::manager manager)
|
||||
(us/assert! ::us/uuid profile-id)
|
||||
|
||||
@@ -152,7 +154,7 @@
|
||||
(l/trace :hint "create" :profile-id (str profile-id))
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)))))
|
||||
(assign-auth-data-cookie session)))))
|
||||
|
||||
(defn delete-fn
|
||||
[{:keys [::manager]}]
|
||||
@@ -166,7 +168,7 @@
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-authenticated-cookie)))))
|
||||
(clear-auth-data-cookie)))))
|
||||
|
||||
(defn- gen-token
|
||||
[props {:keys [profile-id created-at]}]
|
||||
@@ -196,7 +198,7 @@
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager ::main/props]}]
|
||||
[handler {:keys [::manager ::setup/props]}]
|
||||
(us/assert! ::manager manager)
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
@@ -228,7 +230,7 @@
|
||||
(let [session (update! manager session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-authenticated-cookie session)))
|
||||
(assign-auth-data-cookie session)))
|
||||
response))))
|
||||
|
||||
(def soft-auth
|
||||
@@ -248,6 +250,7 @@
|
||||
renewal (dt/plus created-at default-renewal-max-age)
|
||||
expires (dt/plus created-at max-age)
|
||||
secure? (contains? cf/flags :secure-session-cookies)
|
||||
strict? (contains? cf/flags :strict-session-cookies)
|
||||
cors? (contains? cf/flags :cors)
|
||||
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
|
||||
@@ -256,15 +259,15 @@
|
||||
:expires expires
|
||||
:value token
|
||||
:comment comment
|
||||
:same-site (if cors? :none :lax)
|
||||
:same-site (if cors? :none (if strict? :strict :lax))
|
||||
:secure secure?}]
|
||||
(update response :cookies assoc name cookie)))
|
||||
|
||||
(defn- assign-authenticated-cookie
|
||||
[response {updated-at :updated-at}]
|
||||
(defn- assign-auth-data-cookie
|
||||
[response {profile-id :profile-id updated-at :updated-at}]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
|
||||
domain (cf/get :authenticated-cookie-domain)
|
||||
cname (cf/get :authenticated-cookie-name "authenticated")
|
||||
domain (cf/get :auth-data-cookie-domain)
|
||||
cname default-auth-data-cookie-name
|
||||
|
||||
created-at (or updated-at (dt/now))
|
||||
renewal (dt/plus created-at default-renewal-max-age)
|
||||
@@ -272,14 +275,17 @@
|
||||
|
||||
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
|
||||
secure? (contains? cf/flags :secure-session-cookies)
|
||||
strict? (contains? cf/flags :strict-session-cookies)
|
||||
cors? (contains? cf/flags :cors)
|
||||
|
||||
cookie {:domain domain
|
||||
:expires expires
|
||||
:path "/"
|
||||
:comment comment
|
||||
:value true
|
||||
:same-site :strict
|
||||
:value (u/map->query-string {:profile-id profile-id})
|
||||
:same-site (if cors? :none (if strict? :strict :lax))
|
||||
:secure secure?}]
|
||||
|
||||
(cond-> response
|
||||
(string? domain)
|
||||
(update :cookies assoc cname cookie))))
|
||||
@@ -289,10 +295,10 @@
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
|
||||
|
||||
(defn- clear-authenticated-cookie
|
||||
(defn- clear-auth-data-cookie
|
||||
[response]
|
||||
(let [cname (cf/get :authenticated-cookie-name default-authenticated-cookie-name)
|
||||
domain (cf/get :authenticated-cookie-domain)]
|
||||
(let [cname default-auth-data-cookie-name
|
||||
domain (cf/get :auth-data-cookie-domain)]
|
||||
(cond-> response
|
||||
(string? domain)
|
||||
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age 0}))))
|
||||
|
||||
@@ -9,11 +9,10 @@
|
||||
(:refer-clojure :exclude [tap])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.http.errors :as errors]
|
||||
[promesa.core :as p]
|
||||
[app.util.events :as events]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]
|
||||
[promesa.util :as pu]
|
||||
@@ -21,26 +20,12 @@
|
||||
(:import
|
||||
java.io.OutputStream))
|
||||
|
||||
(def ^:dynamic *channel* nil)
|
||||
|
||||
(defn- write!
|
||||
[^OutputStream output ^bytes data]
|
||||
[^OutputStream output ^bytes data]
|
||||
(l/trc :hint "writting data" :data data :length (alength data))
|
||||
(.write output data)
|
||||
(.flush output))
|
||||
|
||||
(defn- create-writer-loop
|
||||
[^OutputStream output]
|
||||
(try
|
||||
(loop []
|
||||
(when-let [event (sp/take! *channel*)]
|
||||
(let [result (ex/try! (write! output event))]
|
||||
(if (ex/exception? result)
|
||||
(l/wrn :hint "unexpected exception on sse writer" :cause result)
|
||||
(recur)))))
|
||||
(finally
|
||||
(pu/close! output))))
|
||||
|
||||
(defn- encode
|
||||
[[name data]]
|
||||
(try
|
||||
@@ -61,13 +46,6 @@
|
||||
"Cache-Control" "no-cache, no-store, max-age=0, must-revalidate"
|
||||
"Pragma" "no-cache"})
|
||||
|
||||
(defn tap
|
||||
([data] (tap "event" data))
|
||||
([name data]
|
||||
(when-let [channel *channel*]
|
||||
(sp/put! channel [name data])
|
||||
nil)))
|
||||
|
||||
(defn response
|
||||
[handler & {:keys [buf] :or {buf 32} :as opts}]
|
||||
(fn [request]
|
||||
@@ -75,12 +53,15 @@
|
||||
::rres/status 200
|
||||
::rres/body (reify rres/StreamableResponseBody
|
||||
(-write-body-to-stream [_ _ output]
|
||||
(binding [*channel* (sp/chan :buf buf :xf (keep encode))]
|
||||
(let [writer (px/run! :virtual (partial create-writer-loop output))]
|
||||
(binding [events/*channel* (sp/chan :buf buf :xf (keep encode))]
|
||||
(let [listener (events/start-listener
|
||||
(partial write! output)
|
||||
(partial pu/close! output))]
|
||||
(try
|
||||
(tap "end" (handler))
|
||||
(let [result (handler)]
|
||||
(events/tap :end result))
|
||||
(catch Throwable cause
|
||||
(tap "error" (errors/handle' cause request)))
|
||||
(events/tap :error (errors/handle' cause request)))
|
||||
(finally
|
||||
(sp/close! *channel*)
|
||||
(p/await! writer)))))))}))
|
||||
(sp/close! events/*channel*)
|
||||
(px/await! listener)))))))}))
|
||||
|
||||
@@ -9,30 +9,24 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as-alias actoken]
|
||||
[app.http.client :as http.client]
|
||||
[app.loggers.audit.tasks :as-alias tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.tokens :as tokens]
|
||||
[app.setup :as-alias setup]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]
|
||||
[ring.request :as rreq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -133,7 +127,7 @@
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(cond
|
||||
(db/read-only? pool)
|
||||
(l/warn :hint "audit: disabled (db is read-only)")
|
||||
(l/warn :hint "audit disabled (db is read-only)")
|
||||
|
||||
:else
|
||||
cfg))
|
||||
@@ -187,33 +181,45 @@
|
||||
false)}))
|
||||
|
||||
(defn- handle-event!
|
||||
[conn-or-pool event]
|
||||
(us/verify! ::event event)
|
||||
[cfg event]
|
||||
(let [params {:id (uuid/next)
|
||||
:name (::name event)
|
||||
:type (::type event)
|
||||
:profile-id (::profile-id event)
|
||||
:ip-addr (::ip-addr event)
|
||||
:context (::context event)
|
||||
:props (::props event)}]
|
||||
:props (::props event)}
|
||||
tnow (dt/now)]
|
||||
|
||||
(when (contains? cf/flags :audit-log)
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
;; because of the timestamp precission (two concurrent requests), in
|
||||
;; this case we just retry the operation.
|
||||
(rtry/with-retry {::rtry/when rtry/conflict-exception?
|
||||
::rtry/max-retries 6
|
||||
::rtry/label "persist-audit-log"
|
||||
::db/conn (dm/check db/connection? conn-or-pool)}
|
||||
(let [now (dt/now)]
|
||||
(db/insert! conn-or-pool :audit-log
|
||||
(-> params
|
||||
(update :props db/tjson)
|
||||
(update :context db/tjson)
|
||||
(update :ip-addr db/inet)
|
||||
(assoc :created-at now)
|
||||
(assoc :tracked-at now)
|
||||
(assoc :source "backend"))))))
|
||||
(let [params (-> params
|
||||
(assoc :created-at tnow)
|
||||
(assoc :tracked-at tnow)
|
||||
(update :props db/tjson)
|
||||
(update :context db/tjson)
|
||||
(update :ip-addr db/inet)
|
||||
(assoc :source "backend"))]
|
||||
(db/insert! cfg :audit-log params)))
|
||||
|
||||
(when (and (or (contains? cf/flags :telemetry)
|
||||
(cf/get :telemetry-enabled))
|
||||
(not (contains? cf/flags :audit-log)))
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
;; because of the timestamp precission (two concurrent requests), in
|
||||
;; this case we just retry the operation.
|
||||
;;
|
||||
;; NOTE: this is only executed when general audit log is disabled
|
||||
(let [params (-> params
|
||||
(assoc :created-at tnow)
|
||||
(assoc :tracked-at tnow)
|
||||
(assoc :props (db/tjson {}))
|
||||
(assoc :context (db/tjson {}))
|
||||
(assoc :ip-addr (db/inet "0.0.0.0"))
|
||||
(assoc :source "backend"))]
|
||||
(db/insert! cfg :audit-log params)))
|
||||
|
||||
(when (and (contains? cf/flags :webhooks)
|
||||
(::webhooks/event? event))
|
||||
@@ -226,7 +232,7 @@
|
||||
:else label)
|
||||
dedupe? (boolean (and batch-key batch-timeout))]
|
||||
|
||||
(wrk/submit! ::wrk/conn conn-or-pool
|
||||
(wrk/submit! ::wrk/conn (::db/conn cfg)
|
||||
::wrk/task :process-webhook-event
|
||||
::wrk/queue :webhooks
|
||||
::wrk/max-retries 0
|
||||
@@ -243,144 +249,13 @@
|
||||
(defn submit!
|
||||
"Submit audit event to the collector."
|
||||
[cfg params]
|
||||
(let [conn (or (::db/conn cfg) (::db/pool cfg))]
|
||||
(us/assert! ::db/pool-or-conn conn)
|
||||
(try
|
||||
(handle-event! conn (d/without-nils params))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "audit: unexpected error processing event" :cause cause)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK: ARCHIVE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This is a task responsible to send the accumulated events to
|
||||
;; external service for archival.
|
||||
|
||||
(declare archive-events)
|
||||
|
||||
(s/def ::tasks/uri ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/archive-task [_]
|
||||
(s/keys :req [::db/pool ::main/props ::http.client/client]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/archive
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
;; NOTE: this let allows overwrite default configured values from
|
||||
;; the repl, when manually invoking the task.
|
||||
(let [enabled (or (contains? cf/flags :audit-log-archive)
|
||||
(:enabled params false))
|
||||
uri (cf/get :audit-log-archive-uri)
|
||||
uri (or uri (:uri params))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-configured
|
||||
:hint "archive task not configured, missing uri"))
|
||||
|
||||
(when enabled
|
||||
(loop [total 0]
|
||||
(let [n (archive-events cfg)]
|
||||
(if n
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(recur (+ total ^long n)))
|
||||
(when (pos? total)
|
||||
(l/debug :hint "events archived" :total total)))))))))
|
||||
|
||||
(def ^:private sql:retrieve-batch-of-audit-log
|
||||
"select *
|
||||
from audit_log
|
||||
where archived_at is null
|
||||
order by created_at asc
|
||||
limit 128
|
||||
for update skip locked;")
|
||||
|
||||
(defn archive-events
|
||||
[{:keys [::db/pool ::uri] :as cfg}]
|
||||
(letfn [(decode-row [{:keys [props ip-addr context] :as row}]
|
||||
(cond-> row
|
||||
(db/pgobject? props)
|
||||
(assoc :props (db/decode-transit-pgobject props))
|
||||
|
||||
(db/pgobject? context)
|
||||
(assoc :context (db/decode-transit-pgobject context))
|
||||
|
||||
(db/pgobject? ip-addr "inet")
|
||||
(assoc :ip-addr (db/decode-inet ip-addr))))
|
||||
|
||||
(row->event [row]
|
||||
(select-keys row [:type
|
||||
:name
|
||||
:source
|
||||
:created-at
|
||||
:tracked-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context]))
|
||||
|
||||
(send [events]
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (cf/get :public-uri)
|
||||
"cookie" (u/map->query-string {:auth-token token})}
|
||||
params {:uri uri
|
||||
:timeout 6000
|
||||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http.client/req! cfg params {:sync? true})]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
(l/error :hint "unable to archive events"
|
||||
:resp-status (:status resp)
|
||||
:resp-body (:body resp))
|
||||
false))))
|
||||
|
||||
(mark-as-archived [conn rows]
|
||||
(db/exec-one! conn ["update audit_log set archived_at=now() where id = ANY(?)"
|
||||
(->> (map :id rows)
|
||||
(into-array java.util.UUID)
|
||||
(db/create-array conn "uuid"))]))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(let [rows (db/exec! conn [sql:retrieve-batch-of-audit-log])
|
||||
xform (comp (map decode-row)
|
||||
(map row->event))
|
||||
events (into [] xform rows)]
|
||||
(when-not (empty? events)
|
||||
(l/trace :hint "archive events chunk" :uri uri :events (count events))
|
||||
(when (send events)
|
||||
(mark-as-archived conn rows)
|
||||
(count events)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; GC Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
"delete from audit_log
|
||||
where archived_at is not null")
|
||||
|
||||
(defn- clean-archived
|
||||
[{:keys [::db/pool]}]
|
||||
(let [result (db/exec-one! pool [sql:clean-archived])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::tasks/gc [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::tasks/gc
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(clean-archived cfg)))
|
||||
(try
|
||||
(let [event (d/without-nils params)
|
||||
cfg (-> cfg
|
||||
(assoc ::rtry/when rtry/conflict-exception?)
|
||||
(assoc ::rtry/max-retries 6)
|
||||
(assoc ::rtry/label "persist-audit-log"))]
|
||||
(us/verify! ::event event)
|
||||
(rtry/invoke! cfg db/tx-run! handle-event! event))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected error processing event" :cause cause))))
|
||||
|
||||
140
backend/src/app/loggers/audit/archive_task.clj
Normal file
140
backend/src/app/loggers/audit/archive_task.clj
Normal file
@@ -0,0 +1,140 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.loggers.audit.archive-task
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; This is a task responsible to send the accumulated events to
|
||||
;; external service for archival.
|
||||
|
||||
(defn- decode-row
|
||||
[{:keys [props ip-addr context] :as row}]
|
||||
(cond-> row
|
||||
(db/pgobject? props)
|
||||
(assoc :props (db/decode-transit-pgobject props))
|
||||
|
||||
(db/pgobject? context)
|
||||
(assoc :context (db/decode-transit-pgobject context))
|
||||
|
||||
(db/pgobject? ip-addr "inet")
|
||||
(assoc :ip-addr (db/decode-inet ip-addr))))
|
||||
|
||||
(def ^:private event-keys
|
||||
[:type
|
||||
:name
|
||||
:source
|
||||
:created-at
|
||||
:tracked-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context])
|
||||
|
||||
(defn- row->event
|
||||
[row]
|
||||
(select-keys row event-keys))
|
||||
|
||||
(defn- send!
|
||||
[{:keys [::uri] :as cfg} events]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
"origin" (cf/get :public-uri)
|
||||
"cookie" (u/map->query-string {:auth-token token})}
|
||||
params {:uri uri
|
||||
:timeout 12000
|
||||
:method :post
|
||||
:headers headers
|
||||
:body body}
|
||||
resp (http/req! cfg params)]
|
||||
(if (= (:status resp) 204)
|
||||
true
|
||||
(do
|
||||
(l/error :hint "unable to archive events"
|
||||
:resp-status (:status resp)
|
||||
:resp-body (:body resp))
|
||||
false))))
|
||||
|
||||
(defn- mark-archived!
|
||||
[{:keys [::db/conn]} rows]
|
||||
(let [ids (db/create-array conn "uuid" (map :id rows))]
|
||||
(db/exec-one! conn ["update audit_log set archived_at=now() where id = ANY(?)" ids])))
|
||||
|
||||
(def ^:private xf:create-event
|
||||
(comp (map decode-row)
|
||||
(map row->event)))
|
||||
|
||||
(def ^:private sql:get-audit-log-chunk
|
||||
"SELECT *
|
||||
FROM audit_log
|
||||
WHERE archived_at is null
|
||||
ORDER BY created_at ASC
|
||||
LIMIT 128
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- get-event-rows
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(->> (db/exec! conn [sql:get-audit-log-chunk])
|
||||
(not-empty)))
|
||||
|
||||
(defn- archive-events!
|
||||
[{:keys [::uri] :as cfg}]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(when-let [rows (get-event-rows cfg)]
|
||||
(let [events (into [] xf:create-event rows)]
|
||||
(l/trc :hint "archive events chunk" :uri uri :events (count events))
|
||||
(when (send! cfg events)
|
||||
(mark-archived! cfg rows)
|
||||
(count events)))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool ::setup/props ::http/client]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [params]
|
||||
;; NOTE: this let allows overwrite default configured values from
|
||||
;; the repl, when manually invoking the task.
|
||||
(let [enabled (or (contains? cf/flags :audit-log-archive)
|
||||
(:enabled params false))
|
||||
|
||||
uri (cf/get :audit-log-archive-uri)
|
||||
uri (or uri (:uri params))
|
||||
cfg (assoc cfg ::uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-configured
|
||||
:hint "archive task not configured, missing uri"))
|
||||
|
||||
(when enabled
|
||||
(loop [total 0]
|
||||
(if-let [n (archive-events! cfg)]
|
||||
(do
|
||||
(px/sleep 100)
|
||||
(recur (+ total ^long n)))
|
||||
|
||||
(when (pos? total)
|
||||
(l/dbg :hint "events archived" :total total))))))))
|
||||
|
||||
31
backend/src/app/loggers/audit/gc_task.clj
Normal file
31
backend/src/app/loggers/audit/gc_task.clj
Normal file
@@ -0,0 +1,31 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.loggers.audit.gc-task
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:clean-archived
|
||||
"DELETE FROM audit_log
|
||||
WHERE archived_at IS NOT NULL")
|
||||
|
||||
(defn- clean-archived!
|
||||
[{:keys [::db/pool]}]
|
||||
(let [result (db/exec-one! pool [sql:clean-archived])
|
||||
result (db/get-update-count result)]
|
||||
(l/debug :hint "delete archived audit log entries" :deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(clean-archived! cfg)))
|
||||
@@ -23,17 +23,20 @@
|
||||
|
||||
(defn- send-mattermost-notification!
|
||||
[cfg {:keys [id public-uri] :as report}]
|
||||
|
||||
|
||||
(let [text (str "Exception: " public-uri "/dbg/error/" id " "
|
||||
(when-let [pid (:profile-id report)]
|
||||
(str "(pid: #uuid-" pid ")"))
|
||||
"\n"
|
||||
"```\n"
|
||||
"- host: `" (:host report) "`\n"
|
||||
"- tenant: `" (:tenant report) "`\n"
|
||||
"- host: #" (:host report) "\n"
|
||||
"- tenant: #" (:tenant report) "\n"
|
||||
"- logger: #" (:logger report) "\n"
|
||||
"- request-path: `" (:request-path report) "`\n"
|
||||
"- frontend-version: `" (:frontend-version report) "`\n"
|
||||
"- backend-version: `" (:backend-version report) "`\n"
|
||||
"\n"
|
||||
"```\n"
|
||||
"Trace:\n"
|
||||
(:trace report)
|
||||
"```")
|
||||
@@ -60,6 +63,7 @@
|
||||
:frontend-version (:version/frontend context)
|
||||
:profile-id (:request/profile-id context)
|
||||
:request-path (:request/path context)
|
||||
:logger (::l/logger record)
|
||||
:trace (ex/format-throwable cause :detail? false :header? false)})
|
||||
|
||||
(defn handle-event
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.data.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
@@ -67,12 +67,10 @@
|
||||
[_ {:keys [::db/pool] :as cfg}]
|
||||
(fn [{:keys [props] :as task}]
|
||||
(let [event (::event props)]
|
||||
|
||||
(l/debug :hint "process webhook event"
|
||||
:name (:name event))
|
||||
(l/dbg :hint "process webhook event" :name (:name event))
|
||||
|
||||
(when-let [items (lookup-webhooks cfg event)]
|
||||
(l/trace :hint "webhooks found for event" :total (count items))
|
||||
(l/trc :hint "webhooks found for event" :total (count items))
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(doseq [item items]
|
||||
@@ -88,11 +86,9 @@
|
||||
(declare interpret-exception)
|
||||
(declare interpret-response)
|
||||
|
||||
(def ^:private json-mapper
|
||||
(json/mapper
|
||||
{:encode-key-fn str/camel
|
||||
:decode-key-fn (comp keyword str/kebab)
|
||||
:pretty true}))
|
||||
(def json-write-opts
|
||||
{:key-fn str/camel
|
||||
:indent true})
|
||||
|
||||
(defmethod ig/pre-init-spec ::run-webhook-handler [_]
|
||||
(s/keys :req [::http/client ::db/pool]))
|
||||
@@ -111,9 +107,11 @@
|
||||
" where id=?")
|
||||
err
|
||||
(:id whook)]
|
||||
res (db/exec-one! pool sql {::db/return-keys? true})]
|
||||
res (db/exec-one! pool sql {::db/return-keys true})]
|
||||
(when (>= (:error-count res) max-errors)
|
||||
(db/update! pool :webhook {:is-active false} {:id (:id whook)})))
|
||||
(db/update! pool :webhook
|
||||
{:is-active false}
|
||||
{:id (:id whook)})))
|
||||
|
||||
(db/update! pool :webhook
|
||||
{:updated-at (dt/now)
|
||||
@@ -134,15 +132,15 @@
|
||||
whook (::config props)
|
||||
|
||||
body (case (:mtype whook)
|
||||
"application/json" (json/encode-str event json-mapper)
|
||||
"application/json" (json/write-str event json-write-opts)
|
||||
"application/transit+json" (t/encode-str event)
|
||||
"application/x-www-form-urlencoded" (uri/map->query-string event))]
|
||||
|
||||
(l/debug :hint "run webhook"
|
||||
:event-name (:name event)
|
||||
:webhook-id (:id whook)
|
||||
:webhook-uri (:uri whook)
|
||||
:webhook-mtype (:mtype whook))
|
||||
(l/dbg :hint "run webhook"
|
||||
:event-name (:name event)
|
||||
:webhook-id (:id whook)
|
||||
:webhook-uri (:uri whook)
|
||||
:webhook-mtype (:mtype whook))
|
||||
|
||||
(let [req {:uri (:uri whook)
|
||||
:headers {"content-type" (:mtype whook)
|
||||
@@ -160,8 +158,8 @@
|
||||
(report-delivery! whook req nil err)
|
||||
(update-webhook! whook err)
|
||||
(when (= err "unknown")
|
||||
(l/error :hint "unknown error on webhook request"
|
||||
:cause cause))))))))))
|
||||
(l/err :hint "unknown error on webhook request"
|
||||
:cause cause))))))))))
|
||||
|
||||
(defn interpret-response
|
||||
[{:keys [status] :as response}]
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
[app.auth.oidc :as-alias oidc]
|
||||
[app.auth.oidc.providers :as-alias oidc.providers]
|
||||
[app.common.logging :as l]
|
||||
[app.common.svg :as csvg]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.email :as-alias email]
|
||||
@@ -22,10 +21,10 @@
|
||||
[app.http.session :as-alias session]
|
||||
[app.http.session.tasks :as-alias session.tasks]
|
||||
[app.http.websocket :as http.ws]
|
||||
[app.loggers.audit.tasks :as-alias audit.tasks]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.metrics :as-alias mtx]
|
||||
[app.metrics.definition :as-alias mdef]
|
||||
[app.migrations.v2 :as migrations.v2]
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.redis :as-alias rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -34,7 +33,10 @@
|
||||
[app.srepl :as-alias srepl]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.fs :as-alias sto.fs]
|
||||
[app.storage.gc-deleted :as-alias sto.gc-deleted]
|
||||
[app.storage.gc-touched :as-alias sto.gc-touched]
|
||||
[app.storage.s3 :as-alias sto.s3]
|
||||
[app.svgo :as-alias svgo]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cider.nrepl :refer [cider-nrepl-handler]]
|
||||
@@ -202,11 +204,11 @@
|
||||
:app.storage.tmp/cleaner
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::sto/gc-deleted-task
|
||||
::sto.gc-deleted/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
::sto/gc-touched-task
|
||||
::sto.gc-touched/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::http.client/client
|
||||
@@ -219,7 +221,7 @@
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::http.awsns/routes
|
||||
{::props (ig/ref ::setup/props)
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
|
||||
@@ -260,7 +262,7 @@
|
||||
::oidc/routes
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::props (ig/ref ::setup/props)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::oidc/providers {:google (ig/ref ::oidc.providers/google)
|
||||
:github (ig/ref ::oidc.providers/github)
|
||||
:gitlab (ig/ref ::oidc.providers/gitlab)
|
||||
@@ -272,7 +274,7 @@
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::rpc/routes (ig/ref ::rpc/routes)
|
||||
::rpc.doc/routes (ig/ref ::rpc.doc/routes)
|
||||
::props (ig/ref ::setup/props)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::mtx/routes (ig/ref ::mtx/routes)
|
||||
::oidc/routes (ig/ref ::oidc/routes)
|
||||
::http.debug/routes (ig/ref ::http.debug/routes)
|
||||
@@ -284,7 +286,7 @@
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::sto/storage (ig/ref ::sto/storage)
|
||||
::props (ig/ref ::setup/props)}
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
::http.ws/routes
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
@@ -299,7 +301,8 @@
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.rpc/climit
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
:app.rpc/rlimit
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
@@ -314,14 +317,12 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::csvg/optimizer (ig/ref ::csvg/optimizer)
|
||||
::svgo/optimizer (ig/ref ::svgo/optimizer)
|
||||
|
||||
::rpc/climit (ig/ref ::rpc/climit)
|
||||
::rpc/rlimit (ig/ref ::rpc/rlimit)
|
||||
::setup/templates (ig/ref ::setup/templates)
|
||||
::props (ig/ref ::setup/props)
|
||||
|
||||
:pool (ig/ref ::db/pool)}
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
:app.rpc.doc/routes
|
||||
{:methods (ig/ref :app.rpc/methods)}
|
||||
@@ -330,23 +331,26 @@
|
||||
{::rpc/methods (ig/ref :app.rpc/methods)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::props (ig/ref ::setup/props)}
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
::wrk/registry
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/tasks
|
||||
{:sendmail (ig/ref ::email/handler)
|
||||
:objects-gc (ig/ref :app.tasks.objects-gc/handler)
|
||||
:orphan-teams-gc (ig/ref :app.tasks.orphan-teams-gc/handler)
|
||||
:file-gc (ig/ref :app.tasks.file-gc/handler)
|
||||
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto/gc-deleted-task)
|
||||
:storage-gc-touched (ig/ref ::sto/gc-touched-task)
|
||||
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
|
||||
:telemetry (ig/ref :app.tasks.telemetry/handler)
|
||||
:storage-gc-deleted (ig/ref ::sto.gc-deleted/handler)
|
||||
:storage-gc-touched (ig/ref ::sto.gc-touched/handler)
|
||||
:session-gc (ig/ref ::session.tasks/gc)
|
||||
:audit-log-archive (ig/ref ::audit.tasks/archive)
|
||||
:audit-log-gc (ig/ref ::audit.tasks/gc)
|
||||
:audit-log-archive (ig/ref :app.loggers.audit.archive-task/handler)
|
||||
:audit-log-gc (ig/ref :app.loggers.audit.gc-task/handler)
|
||||
|
||||
:object-update
|
||||
(ig/ref :app.tasks.object-update/handler)
|
||||
:process-webhook-event
|
||||
(ig/ref ::webhooks/process-event-handler)
|
||||
:run-webhook
|
||||
@@ -373,6 +377,12 @@
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
:app.tasks.orphan-teams-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
:app.tasks.object-update/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
:app.tasks.file-gc/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
@@ -383,7 +393,7 @@
|
||||
:app.tasks.telemetry/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)
|
||||
::props (ig/ref ::setup/props)}
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
[::srepl/urepl ::srepl/server]
|
||||
{::srepl/port (cf/get :urepl-port 6062)
|
||||
@@ -397,21 +407,21 @@
|
||||
|
||||
::setup/props
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::key (cf/get :secret-key)
|
||||
::setup/key (cf/get :secret-key)
|
||||
|
||||
;; NOTE: this dependency is only necessary for proper initialization ordering, props
|
||||
;; module requires the migrations to run before initialize.
|
||||
::migrations (ig/ref :app.migrations/migrations)}
|
||||
|
||||
::csvg/optimizer
|
||||
::svgo/optimizer
|
||||
{}
|
||||
|
||||
::audit.tasks/archive
|
||||
{::props (ig/ref ::setup/props)
|
||||
:app.loggers.audit.archive-task/handler
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::http.client/client (ig/ref ::http.client/client)}
|
||||
|
||||
::audit.tasks/gc
|
||||
:app.loggers.audit.gc-task/handler
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
::webhooks/process-event-handler
|
||||
@@ -458,6 +468,9 @@
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :objects-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :orphan-teams-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :storage-gc-deleted}
|
||||
|
||||
@@ -486,7 +499,7 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::default ::wrk/worker]
|
||||
[::default ::wrk/runner]
|
||||
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
|
||||
::wrk/queue :default
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
@@ -494,7 +507,7 @@
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
|
||||
[::webhook ::wrk/worker]
|
||||
[::webhook ::wrk/runner]
|
||||
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
|
||||
::wrk/queue :webhooks
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
@@ -520,6 +533,15 @@
|
||||
:worker? (contains? cf/flags :backend-worker)
|
||||
:version (:full cf/version)))
|
||||
|
||||
(defn start-custom
|
||||
[config]
|
||||
(ig/load-namespaces config)
|
||||
(alter-var-root #'system (fn [sys]
|
||||
(when sys (ig/halt! sys))
|
||||
(-> config
|
||||
(ig/prep)
|
||||
(ig/init)))))
|
||||
|
||||
(defn stop
|
||||
[]
|
||||
(alter-var-root #'system (fn [sys]
|
||||
@@ -566,6 +588,11 @@
|
||||
(nrepl/start-server :bind "0.0.0.0" :port 6064 :handler cider-nrepl-handler))
|
||||
|
||||
(start)
|
||||
|
||||
(when (contains? cf/flags :v2-migration)
|
||||
(px/sleep 5000)
|
||||
(migrations.v2/migrate app.main/system))
|
||||
|
||||
(deref p))
|
||||
(catch Throwable cause
|
||||
(binding [*out* *err*]
|
||||
|
||||
@@ -32,9 +32,6 @@
|
||||
org.im4java.core.IMOperation
|
||||
org.im4java.core.Info))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 30)) ; 30 MiB
|
||||
|
||||
(s/def ::path fs/path?)
|
||||
(s/def ::filename string?)
|
||||
(s/def ::size integer?)
|
||||
@@ -83,13 +80,14 @@
|
||||
|
||||
(defn validate-media-size!
|
||||
[upload]
|
||||
(when (> (:size upload) (cf/get :media-max-file-size default-max-file-size))
|
||||
(ex/raise :type :restriction
|
||||
:code :media-max-file-size-reached
|
||||
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
|
||||
(:size upload)
|
||||
default-max-file-size)))
|
||||
upload)
|
||||
(let [max-size (cf/get :media-max-file-size)]
|
||||
(when (> (:size upload) max-size)
|
||||
(ex/raise :type :restriction
|
||||
:code :media-max-file-size-reached
|
||||
:hint (str/ffmt "the uploaded file size % is greater than the maximum %"
|
||||
(:size upload)
|
||||
max-size)))
|
||||
upload))
|
||||
|
||||
(defmulti process :cmd)
|
||||
(defmulti process-error class)
|
||||
|
||||
@@ -337,7 +337,49 @@
|
||||
:fn (mg/resource "app/migrations/sql/0106-mod-team-table.sql")}
|
||||
|
||||
{:name "0107-mod-file-tagged-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0107-mod-file-tagged-object-thumbnail-table.sql")}])
|
||||
:fn (mg/resource "app/migrations/sql/0107-mod-file-tagged-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0107-add-deletion-protection-trigger-function"
|
||||
:fn (mg/resource "app/migrations/sql/0107-add-deletion-protection-trigger-function.sql")}
|
||||
|
||||
{:name "0108-mod-file-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0108-mod-file-thumbnail-table.sql")}
|
||||
|
||||
{:name "0109-mod-file-tagged-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0109-mod-file-tagged-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0110-mod-file-media-object-table"
|
||||
:fn (mg/resource "app/migrations/sql/0110-mod-file-media-object-table.sql")}
|
||||
|
||||
{:name "0111-mod-file-data-fragment-table"
|
||||
:fn (mg/resource "app/migrations/sql/0111-mod-file-data-fragment-table.sql")}
|
||||
|
||||
{:name "0112-mod-profile-table"
|
||||
:fn (mg/resource "app/migrations/sql/0112-mod-profile-table.sql")}
|
||||
|
||||
{:name "0113-mod-team-font-variant-table"
|
||||
:fn (mg/resource "app/migrations/sql/0113-mod-team-font-variant-table.sql")}
|
||||
|
||||
{:name "0114-mod-team-table"
|
||||
:fn (mg/resource "app/migrations/sql/0114-mod-team-table.sql")}
|
||||
|
||||
{:name "0115-mod-project-table"
|
||||
:fn (mg/resource "app/migrations/sql/0115-mod-project-table.sql")}
|
||||
|
||||
{:name "0116-mod-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0116-mod-file-table.sql")}
|
||||
|
||||
{:name "0117-mod-file-object-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0117-mod-file-object-thumbnail-table.sql")}
|
||||
|
||||
{:name "0118-mod-task-table"
|
||||
:fn (mg/resource "app/migrations/sql/0118-mod-task-table.sql")}
|
||||
|
||||
{:name "0119-mod-file-table"
|
||||
:fn (mg/resource "app/migrations/sql/0119-mod-file-table.sql")}
|
||||
|
||||
{:name "0120-mod-audit-log-table"
|
||||
:fn (mg/resource "app/migrations/sql/0120-mod-audit-log-table.sql")}])
|
||||
|
||||
(defn apply-migrations!
|
||||
[pool name migrations]
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
CREATE OR REPLACE FUNCTION raise_deletion_protection()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
RAISE EXCEPTION 'unable to proceed to delete row on "%"', TG_TABLE_NAME
|
||||
USING HINT = 'disable deletion protection with "SET rules.deletion_protection TO off"';
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
@@ -0,0 +1,25 @@
|
||||
--- Add missing index for deleted_at column, we include all related
|
||||
--- columns because we expect the index to be small and expect use
|
||||
--- index-only scans.
|
||||
CREATE INDEX IF NOT EXISTS file_thumbnail__deleted_at__idx
|
||||
ON file_thumbnail (deleted_at, file_id, revn, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Add missing for media_id column, used mainly for refs checking
|
||||
CREATE INDEX IF NOT EXISTS file_thumbnail__media_id__idx ON file_thumbnail (media_id);
|
||||
|
||||
--- Remove CASCADE from media_id and file_id foreign constraint
|
||||
ALTER TABLE file_thumbnail
|
||||
DROP CONSTRAINT file_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
ALTER TABLE file_thumbnail
|
||||
DROP CONSTRAINT file_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_thumbnail FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -0,0 +1,26 @@
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
ADD COLUMN updated_at timestamptz NULL,
|
||||
ADD COLUMN deleted_at timestamptz NULL;
|
||||
|
||||
--- Add index for deleted_at column, we include all related columns
|
||||
--- because we expect the index to be small and expect use index-only
|
||||
--- scans.
|
||||
CREATE INDEX IF NOT EXISTS file_tagged_object_thumbnail__deleted_at__idx
|
||||
ON file_tagged_object_thumbnail (deleted_at, file_id, object_id, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Remove CASCADE from media_id and file_id foreign constraint
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
DROP CONSTRAINT file_tagged_object_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
ALTER TABLE file_tagged_object_thumbnail
|
||||
DROP CONSTRAINT file_tagged_object_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_tagged_object_thumbnail FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -0,0 +1,27 @@
|
||||
--- Fix legacy naming
|
||||
ALTER INDEX media_object_pkey RENAME TO file_media_object_pkey;
|
||||
ALTER INDEX media_object__file_id__idx RENAME TO file_media_object__file_id__idx;
|
||||
|
||||
--- Create index for the deleted_at column
|
||||
CREATE INDEX IF NOT EXISTS file_media_object__deleted_at__idx
|
||||
ON file_media_object (deleted_at, id, media_id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
--- Drop now unnecesary trigger because this will be handled by the
|
||||
--- application code
|
||||
DROP TRIGGER file_media_object__on_delete__tgr ON file_media_object;
|
||||
DROP FUNCTION on_delete_file_media_object ( ) CASCADE;
|
||||
DROP TRIGGER file_media_object__on_insert__tgr ON file_media_object;
|
||||
DROP FUNCTION on_media_object_insert () CASCADE;
|
||||
|
||||
--- Remove CASCADE from file FOREIGN KEY
|
||||
ALTER TABLE file_media_object
|
||||
DROP CONSTRAINT file_media_object_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON file_media_object FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
@@ -0,0 +1,9 @@
|
||||
ALTER TABLE file_data_fragment
|
||||
ADD COLUMN deleted_at timestamptz NULL;
|
||||
|
||||
--- Add index for deleted_at column, we include all related columns
|
||||
--- because we expect the index to be small and expect use index-only
|
||||
--- scans.
|
||||
CREATE INDEX IF NOT EXISTS file_data_fragment__deleted_at__idx
|
||||
ON file_data_fragment (deleted_at, file_id, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
15
backend/src/app/migrations/sql/0112-mod-profile-table.sql
Normal file
15
backend/src/app/migrations/sql/0112-mod-profile-table.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
ALTER TABLE profile
|
||||
DROP CONSTRAINT profile_photo_id_fkey,
|
||||
ADD FOREIGN KEY (photo_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT profile_default_project_id_fkey,
|
||||
ADD FOREIGN KEY (default_project_id) REFERENCES project(id) DEFERRABLE,
|
||||
DROP CONSTRAINT profile_default_team_id_fkey,
|
||||
ADD FOREIGN KEY (default_team_id) REFERENCES team(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON profile FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
--- Remove ON DELETE SET NULL from foreign constraint on
|
||||
--- storage_object table
|
||||
ALTER TABLE team_font_variant
|
||||
DROP CONSTRAINT team_font_variant_otf_file_id_fkey,
|
||||
ADD FOREIGN KEY (otf_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_ttf_file_id_fkey,
|
||||
ADD FOREIGN KEY (ttf_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_woff1_file_id_fkey,
|
||||
ADD FOREIGN KEY (woff1_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_woff2_file_id_fkey,
|
||||
ADD FOREIGN KEY (woff2_file_id) REFERENCES storage_object(id) DEFERRABLE,
|
||||
DROP CONSTRAINT team_font_variant_team_id_fkey,
|
||||
ADD FOREIGN KEY (team_id) REFERENCES team(id) DEFERRABLE;
|
||||
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON team_font_variant FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
10
backend/src/app/migrations/sql/0114-mod-team-table.sql
Normal file
10
backend/src/app/migrations/sql/0114-mod-team-table.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
--- Add deletion protection
|
||||
CREATE OR REPLACE TRIGGER deletion_protection__tgr
|
||||
BEFORE DELETE ON team FOR EACH STATEMENT
|
||||
WHEN ((current_setting('rules.deletion_protection', true) IN ('on', '')) OR
|
||||
(current_setting('rules.deletion_protection', true) IS NULL))
|
||||
EXECUTE PROCEDURE raise_deletion_protection();
|
||||
|
||||
ALTER TABLE team
|
||||
DROP CONSTRAINT team_photo_id_fkey,
|
||||
ADD FOREIGN KEY (photo_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE project
|
||||
DROP CONSTRAINT project_team_id_fkey,
|
||||
ADD FOREIGN KEY (team_id) REFERENCES team(id) DEFERRABLE;
|
||||
3
backend/src/app/migrations/sql/0116-mod-file-table.sql
Normal file
3
backend/src/app/migrations/sql/0116-mod-file-table.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE file
|
||||
DROP CONSTRAINT file_project_id_fkey,
|
||||
ADD FOREIGN KEY (project_id) REFERENCES project(id) DEFERRABLE;
|
||||
@@ -0,0 +1,12 @@
|
||||
ALTER TABLE file_object_thumbnail
|
||||
DROP CONSTRAINT file_object_thumbnail_file_id_fkey,
|
||||
ADD FOREIGN KEY (file_id) REFERENCES file(id) DEFERRABLE,
|
||||
DROP CONSTRAINT file_object_thumbnail_media_id_fkey,
|
||||
ADD FOREIGN KEY (media_id) REFERENCES storage_object(id) DEFERRABLE;
|
||||
|
||||
--- Mark all related storage_object row as touched
|
||||
-- UPDATE storage_object SET touched_at = now()
|
||||
-- WHERE id IN (SELECT DISTINCT media_id
|
||||
-- FROM file_object_thumbnail
|
||||
-- WHERE media_id IS NOT NULL)
|
||||
-- AND touched_at IS NULL;
|
||||
12
backend/src/app/migrations/sql/0118-mod-task-table.sql
Normal file
12
backend/src/app/migrations/sql/0118-mod-task-table.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
-- Removes the partitioning.
|
||||
CREATE TABLE new_task (LIKE task INCLUDING ALL);
|
||||
INSERT INTO new_task SELECT * FROM task;
|
||||
ALTER TABLE task RENAME TO old_task;
|
||||
ALTER TABLE new_task RENAME TO task;
|
||||
DROP TABLE old_task;
|
||||
ALTER INDEX new_task_label_name_queue_idx RENAME TO task__label_name_queue__idx;
|
||||
ALTER INDEX new_task_scheduled_at_queue_idx RENAME TO task__scheduled_at_queue__idx;
|
||||
ALTER TABLE task DROP CONSTRAINT new_task_pkey;
|
||||
ALTER TABLE task ADD PRIMARY KEY (id);
|
||||
ALTER TABLE task ALTER COLUMN created_at SET DEFAULT now();
|
||||
ALTER TABLE task ALTER COLUMN modified_at SET DEFAULT now();
|
||||
2
backend/src/app/migrations/sql/0119-mod-file-table.sql
Normal file
2
backend/src/app/migrations/sql/0119-mod-file-table.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE file
|
||||
ADD COLUMN version integer NULL;
|
||||
11
backend/src/app/migrations/sql/0120-mod-audit-log-table.sql
Normal file
11
backend/src/app/migrations/sql/0120-mod-audit-log-table.sql
Normal file
@@ -0,0 +1,11 @@
|
||||
CREATE TABLE new_audit_log (LIKE audit_log INCLUDING ALL);
|
||||
INSERT INTO new_audit_log SELECT * FROM audit_log;
|
||||
ALTER TABLE audit_log RENAME TO old_audit_log;
|
||||
ALTER TABLE new_audit_log RENAME TO audit_log;
|
||||
DROP TABLE old_audit_log;
|
||||
|
||||
DROP INDEX new_audit_log_id_archived_at_idx;
|
||||
ALTER TABLE audit_log DROP CONSTRAINT new_audit_log_pkey;
|
||||
ALTER TABLE audit_log ADD PRIMARY KEY (id);
|
||||
ALTER TABLE audit_log ALTER COLUMN created_at SET DEFAULT now();
|
||||
ALTER TABLE audit_log ALTER COLUMN tracked_at SET DEFAULT now();
|
||||
103
backend/src/app/migrations/v2.clj
Normal file
103
backend/src/app/migrations/v2.clj
Normal file
@@ -0,0 +1,103 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.migrations.v2
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat]
|
||||
[app.setup :as setup]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(def ^:private sql:get-teams
|
||||
"SELECT id, features,
|
||||
row_number() OVER (ORDER BY created_at DESC) AS rown
|
||||
FROM team
|
||||
WHERE deleted_at IS NULL
|
||||
AND (not (features @> '{components/v2}') OR features IS NULL)
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn- get-teams
|
||||
[conn]
|
||||
(->> (db/cursor conn [sql:get-teams] {:chunk-size 1})
|
||||
(map feat/decode-row)))
|
||||
|
||||
(defn- migrate-teams
|
||||
[{:keys [::db/conn] :as system}]
|
||||
;; Allow long running transaction for this connection
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
;; Do not allow other migration running in the same time
|
||||
(db/xact-lock! conn 0)
|
||||
|
||||
;; Run teams migration
|
||||
(run! (fn [{:keys [id rown]}]
|
||||
(try
|
||||
(-> (assoc system ::db/rollback false)
|
||||
(feat/migrate-team! id
|
||||
:rown rown
|
||||
:label "v2-migration"
|
||||
:validate? false
|
||||
:skip-on-graphics-error? true))
|
||||
(catch Throwable _
|
||||
(swap! feat/*stats* update :errors (fnil inc 0))
|
||||
(l/wrn :hint "error on migrating team (skiping)"))))
|
||||
(get-teams conn))
|
||||
|
||||
(setup/set-prop! system :v2-migrated true))
|
||||
|
||||
(defn migrate
|
||||
[system]
|
||||
(let [tpoint (dt/tpoint)
|
||||
stats (atom {})
|
||||
migrated? (setup/get-prop system :v2-migrated false)]
|
||||
|
||||
(when-not migrated?
|
||||
(l/inf :hint "v2 migration started")
|
||||
(try
|
||||
(binding [feat/*stats* stats]
|
||||
(db/tx-run! system migrate-teams))
|
||||
|
||||
(let [stats (deref stats)
|
||||
elapsed (dt/format-duration (tpoint))]
|
||||
(l/inf :hint "v2 migration finished"
|
||||
:files (:processed-files stats)
|
||||
:teams (:processed-teams stats)
|
||||
:errors (:errors stats)
|
||||
:elapsed elapsed))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/err :hint "error on aplying v2 migration" :cause cause))))))
|
||||
|
||||
(def ^:private required-services
|
||||
[[:app.main/assets :app.storage.s3/backend]
|
||||
[:app.main/assets :app.storage.fs/backend]
|
||||
:app.storage/storage
|
||||
:app.db/pool
|
||||
:app.setup/props
|
||||
:app.svgo/optimizer
|
||||
:app.metrics/metrics
|
||||
:app.migrations/migrations
|
||||
:app.http.client/client])
|
||||
|
||||
(defn -main
|
||||
[& _args]
|
||||
(try
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
system-var (requiring-resolve 'app.main/system)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
(start-var config)
|
||||
(migrate @system-var)
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
||||
@@ -91,7 +91,7 @@
|
||||
(s/def ::connect? ::us/boolean)
|
||||
(s/def ::io-threads ::us/integer)
|
||||
(s/def ::worker-threads ::us/integer)
|
||||
(s/def ::cache some?)
|
||||
(s/def ::cache cache/cache?)
|
||||
|
||||
(s/def ::redis
|
||||
(s/keys :req [::resources
|
||||
@@ -168,7 +168,7 @@
|
||||
|
||||
(defn- shutdown-resources
|
||||
[{:keys [::resources ::cache ::timer]}]
|
||||
(cache/invalidate-all! cache)
|
||||
(cache/invalidate! cache)
|
||||
|
||||
(when resources
|
||||
(.shutdown ^ClientResources resources))
|
||||
@@ -211,7 +211,8 @@
|
||||
(defn get-or-connect
|
||||
[{:keys [::cache] :as state} key options]
|
||||
(us/assert! ::redis state)
|
||||
(let [connection (cache/get cache key (fn [_] (connect* state options)))]
|
||||
(let [create (fn [_] (connect* state options))
|
||||
connection (cache/get cache key create)]
|
||||
(-> state
|
||||
(dissoc ::cache)
|
||||
(assoc ::connection connection))))
|
||||
|
||||
@@ -27,10 +27,12 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[ring.request :as rreq]
|
||||
@@ -71,24 +73,31 @@
|
||||
(defn- rpc-handler
|
||||
"Ring handler that dispatches cmd requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params] :as request}]
|
||||
(let [type (keyword (:type path-params))
|
||||
etag (rreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
[methods {:keys [params path-params method] :as request}]
|
||||
(let [handler-name (:type path-params)
|
||||
etag (rreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
|
||||
data (-> params
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::session/id (::session/id request))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
data (-> params
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::session/id (::session/id request))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
|
||||
data (vary-meta data assoc ::http/request request)
|
||||
method (get methods type default-handler)]
|
||||
data (vary-meta data assoc ::http/request request)
|
||||
handler-fn (get methods (keyword handler-name) default-handler)]
|
||||
|
||||
(when (and (or (= method :get)
|
||||
(= method :head))
|
||||
(not (str/starts-with? handler-name "get-")))
|
||||
(ex/raise :type :restriction
|
||||
:code :method-not-allowed
|
||||
:hint "method not allowed for this request"))
|
||||
|
||||
(binding [cond/*enabled* true]
|
||||
(let [response (method data)]
|
||||
(let [response (handler-fn data)]
|
||||
(handle-response request response)))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
@@ -139,24 +148,21 @@
|
||||
(f cfg (us/conform spec params)))
|
||||
f)))
|
||||
|
||||
;; TODO: integrate with sm/define
|
||||
|
||||
(defn- wrap-params-validation
|
||||
[_ f mdata]
|
||||
(if-let [schema (::sm/params mdata)]
|
||||
(let [schema (if (sm/lazy-schema? schema)
|
||||
schema
|
||||
(sm/define schema))
|
||||
validate (sm/validator schema)
|
||||
(let [validate (sm/validator schema)
|
||||
explain (sm/explainer schema)
|
||||
decode (sm/decoder schema)]
|
||||
(fn [cfg params]
|
||||
(let [params (decode params)]
|
||||
(if (validate params)
|
||||
(f cfg params)
|
||||
(ex/raise :type :validation
|
||||
:code :params-validation
|
||||
::sm/explain (explain params))))))
|
||||
|
||||
(let [params (d/without-qualified params)]
|
||||
(ex/raise :type :validation
|
||||
:code :params-validation
|
||||
::sm/explain (explain params)))))))
|
||||
f))
|
||||
|
||||
(defn- wrap-output-validation
|
||||
@@ -195,7 +201,7 @@
|
||||
|
||||
(defn- wrap
|
||||
[cfg f mdata]
|
||||
(l/debug :hint "register method" :name (::sv/name mdata))
|
||||
(l/trc :hint "register method" :name (::sv/name mdata))
|
||||
(let [f (wrap-all cfg f mdata)]
|
||||
(partial f cfg)))
|
||||
|
||||
@@ -243,10 +249,9 @@
|
||||
::ldap/provider
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
::main/props]
|
||||
::setup/props]
|
||||
:opt [::climit
|
||||
::rlimit]
|
||||
:req-un [::db/pool]))
|
||||
::rlimit]))
|
||||
|
||||
(defmethod ig/init-key ::methods
|
||||
[_ cfg]
|
||||
@@ -261,7 +266,7 @@
|
||||
(defmethod ig/pre-init-spec ::routes [_]
|
||||
(s/keys :req [::methods
|
||||
::db/pool
|
||||
::main/props
|
||||
::setup/props
|
||||
::session/manager]))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
|
||||
@@ -20,40 +20,35 @@
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.bulkhead :as pbh])
|
||||
(:import
|
||||
clojure.lang.ExceptionInfo))
|
||||
clojure.lang.ExceptionInfo
|
||||
java.util.concurrent.atomic.AtomicLong))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn- id->str
|
||||
[id]
|
||||
(-> (str id)
|
||||
(subs 1)))
|
||||
([id]
|
||||
(-> (str id)
|
||||
(subs 1)))
|
||||
([id key]
|
||||
(if key
|
||||
(str (-> (str id) (subs 1)) "/" key)
|
||||
(id->str id))))
|
||||
|
||||
(defn- create-bulkhead-cache
|
||||
[config]
|
||||
(letfn [(load-fn [[id skey]]
|
||||
(when-let [config (get config id)]
|
||||
(l/trc :hint "insert into cache" :id (id->str id) :key skey)
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:type :semaphore)))
|
||||
|
||||
(on-remove [key _ cause]
|
||||
(defn- create-cache
|
||||
[{:keys [::wrk/executor]}]
|
||||
(letfn [(on-remove [key _ cause]
|
||||
(let [[id skey] key]
|
||||
(l/trc :hint "evict from cache" :id (id->str id) :key skey :reason (str cause))))]
|
||||
|
||||
(cache/create :executor :same-thread
|
||||
(l/trc :hint "disposed" :id (id->str id skey) :reason (str cause))))]
|
||||
(cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m"
|
||||
:load-fn load-fn)))
|
||||
:keepalive "5m")))
|
||||
|
||||
(s/def ::config/permits ::us/integer)
|
||||
(s/def ::config/queue ::us/integer)
|
||||
@@ -70,7 +65,7 @@
|
||||
|
||||
(s/def ::path ::fs/path)
|
||||
(defmethod ig/pre-init-spec ::rpc/climit [_]
|
||||
(s/keys :req [::mtx/metrics ::path]))
|
||||
(s/keys :req [::mtx/metrics ::wrk/executor ::path]))
|
||||
|
||||
(defmethod ig/init-key ::rpc/climit
|
||||
[_ {:keys [::path ::mtx/metrics] :as cfg}]
|
||||
@@ -78,7 +73,7 @@
|
||||
(when-let [params (some->> path slurp edn/read-string)]
|
||||
(l/inf :hint "initializing concurrency limit" :config (str path))
|
||||
(us/verify! ::config params)
|
||||
{::cache (create-bulkhead-cache params)
|
||||
{::cache (create-cache cfg)
|
||||
::config params
|
||||
::mtx/metrics metrics})))
|
||||
|
||||
@@ -89,119 +84,191 @@
|
||||
(s/def ::rpc/climit
|
||||
(s/nilable ::instance))
|
||||
|
||||
(defn- create-limiter
|
||||
[config [id skey]]
|
||||
(l/trc :hint "created" :id (id->str id skey))
|
||||
(pbh/create :permits (or (:permits config) (:concurrency config))
|
||||
:queue (or (:queue config) (:queue-size config))
|
||||
:timeout (:timeout config)
|
||||
:type :semaphore))
|
||||
|
||||
|
||||
(defn measure!
|
||||
[metrics mlabels stats elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
permits (:permits stats)
|
||||
queue (:queue stats)
|
||||
queue (- queue mpermits)
|
||||
queue (if (neg? queue) 0 queue)]
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue
|
||||
:val queue
|
||||
:labels mlabels)
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-permits
|
||||
:val permits
|
||||
:labels mlabels)
|
||||
|
||||
(when elapsed
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val (inst-ms elapsed)
|
||||
:labels mlabels))))
|
||||
|
||||
(defn log!
|
||||
[action req-id stats limit-id limit-label params elapsed]
|
||||
(let [mpermits (:max-permits stats)
|
||||
queue (:queue stats)
|
||||
queue (- queue mpermits)
|
||||
queue (if (neg? queue) 0 queue)
|
||||
level (if (pos? queue) :warn :trace)]
|
||||
|
||||
(l/log level
|
||||
:hint action
|
||||
:req req-id
|
||||
:id limit-id
|
||||
:label limit-label
|
||||
:queue queue
|
||||
:elapsed (some-> elapsed dt/format-duration)
|
||||
:params (-> (select-keys params [::rpc/profile-id :file-id :profile-id])
|
||||
(set/rename-keys {::rpc/profile-id :profile-id})
|
||||
(update-vals str)))))
|
||||
|
||||
(def ^:private idseq (AtomicLong. 0))
|
||||
|
||||
(defn- invoke
|
||||
[limiter metrics limit-id limit-key limit-label handler params]
|
||||
(let [tpoint (dt/tpoint)
|
||||
mlabels (into-array String [(id->str limit-id)])
|
||||
limit-id (id->str limit-id limit-key)
|
||||
stats (pbh/get-stats limiter)
|
||||
req-id (.incrementAndGet ^AtomicLong idseq)]
|
||||
|
||||
(try
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "enqueued" req-id stats limit-id limit-label params nil)
|
||||
(px/invoke! limiter (fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats elapsed)
|
||||
(log! "acquired" req-id stats limit-id limit-label params elapsed)
|
||||
|
||||
(handler params))))
|
||||
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(let [elapsed (tpoint)]
|
||||
(log! "rejected" req-id stats limit-id limit-label params elapsed)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached"
|
||||
:cause cause))
|
||||
(throw cause))))
|
||||
|
||||
(finally
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
|
||||
(measure! metrics mlabels stats nil)
|
||||
(log! "finished" req-id stats limit-id limit-label params elapsed))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MIDDLEWARE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private noop-fn (constantly nil))
|
||||
(def ^:private global-limits
|
||||
[[:root/global noop-fn]
|
||||
[:root/by-profile ::rpc/profile-id]])
|
||||
|
||||
(defn- get-limits
|
||||
[cfg]
|
||||
(when-let [ref (get cfg ::id)]
|
||||
(cond
|
||||
(keyword? ref)
|
||||
[[ref]]
|
||||
|
||||
(and (vector? ref)
|
||||
(keyword (first ref)))
|
||||
[ref]
|
||||
|
||||
(and (vector? ref)
|
||||
(vector? (first ref)))
|
||||
(rseq ref)
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "unable to normalize limit")))))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} handler mdata]
|
||||
(let [cache (::cache climit)
|
||||
config (::config climit)
|
||||
label (::sv/name mdata)]
|
||||
|
||||
(if climit
|
||||
(reduce (fn [handler [limit-id key-fn]]
|
||||
(if-let [config (get config limit-id)]
|
||||
(let [key-fn (or key-fn noop-fn)]
|
||||
(l/trc :hint "instrumenting method"
|
||||
:method label
|
||||
:limit (id->str limit-id)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed (not= key-fn noop-fn))
|
||||
|
||||
(if (and (= key-fn ::rpc/profile-id)
|
||||
(false? (::rpc/auth mdata true)))
|
||||
|
||||
;; We don't enforce by-profile limit on methods that does
|
||||
;; not require authentication
|
||||
handler
|
||||
|
||||
(fn [cfg params]
|
||||
(let [limit-key (key-fn params)
|
||||
cache-key [limit-id limit-key]
|
||||
limiter (cache/get cache cache-key (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))))
|
||||
|
||||
(do
|
||||
(l/wrn :hint "no config found for specified queue" :id (id->str limit-id))
|
||||
handler)))
|
||||
|
||||
handler
|
||||
(concat global-limits (get-limits mdata)))
|
||||
handler)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- build-exec-chain
|
||||
[{:keys [::label ::rpc/climit ::mtx/metrics] :as cfg} f]
|
||||
(let [config (get climit ::config)
|
||||
cache (get climit ::cache)]
|
||||
(reduce (fn [handler [limit-id limit-key :as ckey]]
|
||||
(if-let [config (get config limit-id)]
|
||||
(fn [cfg params]
|
||||
(let [limiter (cache/get cache ckey (partial create-limiter config))
|
||||
handler (partial handler cfg)]
|
||||
(invoke limiter metrics limit-id limit-key label handler params)))
|
||||
(do
|
||||
(l/wrn :hint "config not found" :label label :id limit-id)
|
||||
f)))
|
||||
f
|
||||
(get-limits cfg))))
|
||||
|
||||
(defn invoke!
|
||||
[cache metrics id key f]
|
||||
(if-let [limiter (cache/get cache [id key])]
|
||||
(let [tpoint (dt/tpoint)
|
||||
labels (into-array String [(id->str id)])
|
||||
wrapped (fn []
|
||||
(let [elapsed (tpoint)
|
||||
stats (pbh/get-stats limiter)]
|
||||
(l/trc :hint "acquired"
|
||||
:id (id->str id)
|
||||
:key key
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-timing
|
||||
:val (inst-ms elapsed)
|
||||
:labels labels)
|
||||
(try
|
||||
(f)
|
||||
(finally
|
||||
(let [elapsed (tpoint)]
|
||||
(l/trc :hint "finished"
|
||||
:id (id->str id)
|
||||
:key key
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
measure!
|
||||
(fn [stats]
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-queue
|
||||
:val (:queue stats)
|
||||
:labels labels)
|
||||
(mtx/run! metrics
|
||||
:id :rpc-climit-permits
|
||||
:val (:permits stats)
|
||||
:labels labels))]
|
||||
|
||||
(try
|
||||
(let [stats (pbh/get-stats limiter)]
|
||||
(measure! stats)
|
||||
(l/trc :hint "enqueued"
|
||||
:id (id->str id)
|
||||
:key key
|
||||
:permits (:permits stats)
|
||||
:queue (:queue stats)
|
||||
:max-permits (:max-permits stats)
|
||||
:max-queue (:max-queue stats))
|
||||
(pbh/invoke! limiter wrapped))
|
||||
(catch ExceptionInfo cause
|
||||
(let [{:keys [type code]} (ex-data cause)]
|
||||
(if (= :bulkhead-error type)
|
||||
(ex/raise :type :concurrency-limit
|
||||
:code code
|
||||
:hint "concurrency limit reached")
|
||||
(throw cause))))
|
||||
|
||||
(finally
|
||||
(measure! (pbh/get-stats limiter)))))
|
||||
|
||||
(do
|
||||
(l/wrn :hint "unable to load limiter" :id (id->str id))
|
||||
(f))))
|
||||
|
||||
(defn configure
|
||||
[{:keys [::rpc/climit]} id]
|
||||
(us/assert! ::rpc/climit climit)
|
||||
(assoc climit ::id id))
|
||||
|
||||
(defn run!
|
||||
"Run a function in context of climit.
|
||||
Intended to be used in virtual threads."
|
||||
([{:keys [::id ::cache ::mtx/metrics]} f]
|
||||
(if (and cache id)
|
||||
(invoke! cache metrics id nil f)
|
||||
(f)))
|
||||
|
||||
([{:keys [::id ::cache ::mtx/metrics]} f executor]
|
||||
(let [f #(p/await! (px/submit! executor f))]
|
||||
(if (and cache id)
|
||||
(invoke! cache metrics id nil f)
|
||||
(f)))))
|
||||
|
||||
(def noop-fn (constantly nil))
|
||||
|
||||
(defn wrap
|
||||
[{:keys [::rpc/climit ::mtx/metrics]} f {:keys [::id ::key-fn] :or {key-fn noop-fn} :as mdata}]
|
||||
(if (and (some? climit) (some? id))
|
||||
(if-let [config (get-in climit [::config id])]
|
||||
(let [cache (::cache climit)]
|
||||
(l/dbg :hint "instrumenting method"
|
||||
:limit (id->str id)
|
||||
:service-name (::sv/name mdata)
|
||||
:timeout (:timeout config)
|
||||
:permits (:permits config)
|
||||
:queue (:queue config)
|
||||
:keyed? (not= key-fn noop-fn))
|
||||
|
||||
(fn [cfg params]
|
||||
(invoke! cache metrics id (key-fn params) (partial f cfg params))))
|
||||
|
||||
(do
|
||||
(l/wrn :hint "no config found for specified queue" :id (id->str id))
|
||||
f))
|
||||
|
||||
f))
|
||||
[{:keys [::executor] :as cfg} f params]
|
||||
(let [f (if (some? executor)
|
||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
||||
f)
|
||||
f (build-exec-chain cfg f)]
|
||||
(f cfg params)))
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -23,7 +24,7 @@
|
||||
(dissoc row :perms))
|
||||
|
||||
(defn create-access-token
|
||||
[{:keys [::db/conn ::main/props]} profile-id name expiration]
|
||||
[{:keys [::db/conn ::setup/props]} profile-id name expiration]
|
||||
(let [created-at (dt/now)
|
||||
token-id (uuid/next)
|
||||
token (tokens/generate props {:iss "access-token"
|
||||
@@ -47,7 +48,7 @@
|
||||
[{:keys [::db/pool] :as system} profile-id name expiration]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [props (:app.setup/props system)]
|
||||
(create-access-token {::db/conn conn ::main/props props}
|
||||
(create-access-token {::db/conn conn ::setup/props props}
|
||||
profile-id
|
||||
name
|
||||
expiration))))
|
||||
|
||||
@@ -19,7 +19,20 @@
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.services :as sv]))
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
|
||||
(def ^:private event-columns
|
||||
[:id
|
||||
:name
|
||||
:source
|
||||
:type
|
||||
:tracked-at
|
||||
:created-at
|
||||
:profile-id
|
||||
:ip-addr
|
||||
:props
|
||||
:context])
|
||||
|
||||
(defn- event->row [event]
|
||||
[(uuid/next)
|
||||
@@ -27,33 +40,56 @@
|
||||
(:source event)
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:created-at event)
|
||||
(:profile-id event)
|
||||
(db/inet (:ip-addr event))
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))])
|
||||
|
||||
(def ^:private event-columns
|
||||
[:id :name :source :type :tracked-at
|
||||
:profile-id :ip-addr :props :context])
|
||||
(defn- adjust-timestamp
|
||||
[{:keys [timestamp created-at] :as event}]
|
||||
(let [margin (inst-ms (dt/diff timestamp created-at))]
|
||||
(if (or (neg? margin)
|
||||
(> margin 3600000))
|
||||
;; If event is in future or lags more than 1 hour, we reasign
|
||||
;; timestamp to the server creation date
|
||||
(-> event
|
||||
(assoc :timestamp created-at)
|
||||
(update :context assoc :original-timestamp timestamp))
|
||||
event)))
|
||||
|
||||
(defn- handle-events
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id events] :as params}]
|
||||
(let [request (-> params meta ::http/request)
|
||||
ip-addr (audit/parse-client-ip request)
|
||||
tnow (dt/now)
|
||||
xform (comp
|
||||
(map #(assoc % :profile-id profile-id))
|
||||
(map #(assoc % :ip-addr ip-addr))
|
||||
(map #(assoc % :source "frontend"))
|
||||
(map (fn [event]
|
||||
(-> event
|
||||
(assoc :created-at tnow)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :ip-addr ip-addr)
|
||||
(assoc :source "frontend"))))
|
||||
(filter :profile-id)
|
||||
(map adjust-timestamp)
|
||||
(map event->row))
|
||||
events (sequence xform events)]
|
||||
(when (seq events)
|
||||
(db/insert-multi! pool :audit-log event-columns events))))
|
||||
(db/insert-many! pool :audit-log event-columns events))))
|
||||
|
||||
(def valid-event-types
|
||||
#{"action" "identify"})
|
||||
|
||||
(def schema:event
|
||||
[:map {:title "Event"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:type [:string {:max 250}]]
|
||||
[:name
|
||||
[:and {:gen/elements ["update-file", "get-profile"]}
|
||||
[:string {:max 250}]
|
||||
[:re #"[\d\w-]{1,50}"]]]
|
||||
[:type
|
||||
[:and {:gen/elements valid-event-types}
|
||||
[:string {:max 250}]
|
||||
[::sm/one-of {:format "string"} valid-event-types]]]
|
||||
[:props
|
||||
[:map-of :keyword :any]]
|
||||
[:context {:optional true}
|
||||
|
||||
@@ -21,10 +21,12 @@
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -39,7 +41,7 @@
|
||||
;; ---- COMMAND: login with password
|
||||
|
||||
(defn login-with-password
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email password] :as params}]
|
||||
[cfg {:keys [email password] :as params}]
|
||||
|
||||
(when-not (or (contains? cf/flags :login)
|
||||
(contains? cf/flags :login-with-password))
|
||||
@@ -47,18 +49,20 @@
|
||||
:code :login-disabled
|
||||
:hint "login is disabled in this instance"))
|
||||
|
||||
(letfn [(check-password [conn profile password]
|
||||
(letfn [(check-password [cfg profile password]
|
||||
(if (= (:password profile) "!")
|
||||
(ex/raise :type :validation
|
||||
:code :account-without-password
|
||||
:hint "the current account does not have password")
|
||||
(let [result (profile/verify-password cfg password (:password profile))]
|
||||
(when (:update result)
|
||||
(l/trace :hint "updating profile password" :id (:id profile) :email (:email profile))
|
||||
(profile/update-profile-password! conn (assoc profile :password password)))
|
||||
(l/trc :hint "updating profile password"
|
||||
:id (str (:id profile))
|
||||
:email (:email profile))
|
||||
(profile/update-profile-password! cfg (assoc profile :password password)))
|
||||
(:valid result))))
|
||||
|
||||
(validate-profile [conn profile]
|
||||
(validate-profile [cfg profile]
|
||||
(when-not profile
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
@@ -68,7 +72,7 @@
|
||||
(when (:is-blocked profile)
|
||||
(ex/raise :type :restriction
|
||||
:code :profile-blocked))
|
||||
(when-not (check-password conn profile password)
|
||||
(when-not (check-password cfg profile password)
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
(when-let [deleted-at (:deleted-at profile)]
|
||||
@@ -76,27 +80,30 @@
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials)))
|
||||
|
||||
profile)]
|
||||
profile)
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (->> (profile/get-profile-by-email conn email)
|
||||
(validate-profile conn)
|
||||
(profile/strip-private-attrs))
|
||||
(login [{:keys [::db/conn] :as cfg}]
|
||||
(let [profile (->> (profile/clean-email email)
|
||||
(profile/get-profile-by-email conn)
|
||||
(validate-profile cfg)
|
||||
(profile/strip-private-attrs))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
|
||||
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't login with other email and
|
||||
;; accept invitation with other email
|
||||
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
|
||||
{:invitation-token (:invitation-token params)}
|
||||
(assoc profile :is-admin (let [admins (cf/get :admins)]
|
||||
(contains? admins (:email profile)))))]
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't login with other email and
|
||||
;; accept invitation with other email
|
||||
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
|
||||
{:invitation-token (:invitation-token params)}
|
||||
(assoc profile :is-admin (let [admins (cf/get :admins)]
|
||||
(contains? admins (:email profile)))))]
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))]
|
||||
|
||||
(db/tx-run! cfg login)))
|
||||
|
||||
(def schema:login-with-password
|
||||
[:map {:title "login-with-password"}
|
||||
@@ -108,6 +115,7 @@
|
||||
"Performs authentication using penpot password."
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::climit/id :auth/global
|
||||
::sm/params schema:login-with-password}
|
||||
[cfg params]
|
||||
(login-with-password cfg params))
|
||||
@@ -126,12 +134,13 @@
|
||||
(defn recover-profile
|
||||
[{:keys [::db/pool] :as cfg} {:keys [token password]}]
|
||||
(letfn [(validate-token [token]
|
||||
(let [tdata (tokens/verify (::main/props cfg) {:token token :iss :password-recovery})]
|
||||
(let [tdata (tokens/verify (::setup/props cfg) {:token token :iss :password-recovery})]
|
||||
(:profile-id tdata)))
|
||||
|
||||
(update-password [conn profile-id]
|
||||
(let [pwd (profile/derive-password cfg password)]
|
||||
(db/update! conn :profile {:password pwd} {:id profile-id})))]
|
||||
(db/update! conn :profile {:password pwd} {:id profile-id})
|
||||
nil))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(->> (validate-token token)
|
||||
@@ -146,7 +155,8 @@
|
||||
(sv/defmethod ::recover-profile
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::sm/params schema:recover-profile}
|
||||
::sm/params schema:recover-profile
|
||||
::climit/id :auth/global}
|
||||
[cfg params]
|
||||
(recover-profile cfg params))
|
||||
|
||||
@@ -161,7 +171,7 @@
|
||||
:code :registration-disabled)))
|
||||
|
||||
(when (contains? params :invitation-token)
|
||||
(let [invitation (tokens/verify (::main/props cfg) {:token (:invitation-token params) :iss :team-invitation})]
|
||||
(let [invitation (tokens/verify (::setup/props cfg) {:token (:invitation-token params) :iss :team-invitation})]
|
||||
(when-not (= (:email params) (:member-email invitation))
|
||||
(ex/raise :type :restriction
|
||||
:code :email-does-not-match-invitation
|
||||
@@ -194,11 +204,12 @@
|
||||
(pos? (compare elapsed register-retry-threshold))))
|
||||
|
||||
(defn prepare-register
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
|
||||
|
||||
(validate-register-attempt! cfg params)
|
||||
|
||||
(let [profile (when-let [profile (profile/get-profile-by-email pool (:email params))]
|
||||
(let [email (profile/clean-email email)
|
||||
profile (when-let [profile (profile/get-profile-by-email pool email)]
|
||||
(cond
|
||||
(:is-blocked profile)
|
||||
(ex/raise :type :restriction
|
||||
@@ -213,7 +224,7 @@
|
||||
:code :email-already-exists
|
||||
:hint "profile already exists")))
|
||||
|
||||
params {:email (:email params)
|
||||
params {:email email
|
||||
:password (:password params)
|
||||
:invitation-token (:invitation-token params)
|
||||
:backend "penpot"
|
||||
@@ -223,7 +234,7 @@
|
||||
|
||||
params (d/without-nils params)
|
||||
|
||||
token (tokens/generate (::main/props cfg) params)]
|
||||
token (tokens/generate (::setup/props cfg) params)]
|
||||
(with-meta {:token token}
|
||||
{::audit/profile-id uuid/zero})))
|
||||
|
||||
@@ -252,7 +263,8 @@
|
||||
(merge (:props params))
|
||||
(merge {:viewed-tutorial? false
|
||||
:viewed-walkthrough? false
|
||||
:nudge {:big 10 :small 1}})
|
||||
:nudge {:big 10 :small 1}
|
||||
:v2-info-shown true})
|
||||
(db/tjson))
|
||||
|
||||
password (or (:password params) "!")
|
||||
@@ -301,7 +313,8 @@
|
||||
(-> (db/update! conn :profile
|
||||
{:default-team-id (:id team)
|
||||
:default-project-id (:default-project-id team)}
|
||||
{:id id})
|
||||
{:id id}
|
||||
{::db/return-keys true})
|
||||
(profile/decode-row))))
|
||||
|
||||
|
||||
@@ -328,8 +341,10 @@
|
||||
|
||||
(defn register-profile
|
||||
[{:keys [::db/conn] :as cfg} {:keys [token fullname] :as params}]
|
||||
(let [claims (tokens/verify (::main/props cfg) {:token token :iss :prepared-register})
|
||||
params (assoc claims :fullname fullname)
|
||||
(let [claims (tokens/verify (::setup/props cfg) {:token token :iss :prepared-register})
|
||||
params (-> claims
|
||||
(into params)
|
||||
(assoc :fullname fullname))
|
||||
|
||||
is-active (or (:is-active params)
|
||||
(not (contains? cf/flags :email-verification)))
|
||||
@@ -343,7 +358,7 @@
|
||||
(create-profile-rels! conn))))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::main/props cfg) {:token token :iss :team-invitation}))]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))]
|
||||
|
||||
;; If profile is filled in claims, means it tries to register
|
||||
;; again, so we proceed to update the modified-at attr
|
||||
@@ -354,7 +369,6 @@
|
||||
{::audit/type "fact"
|
||||
::audit/name "register-profile-retry"
|
||||
::audit/profile-id id}))
|
||||
|
||||
(cond
|
||||
;; If invitation token comes in params, this is because the
|
||||
;; user comes from team-invitation process; in this case,
|
||||
@@ -364,7 +378,7 @@
|
||||
;; email.
|
||||
(and (some? invitation) (= (:email profile) (:member-email invitation)))
|
||||
(let [claims (assoc invitation :member-id (:id profile))
|
||||
token (tokens/generate (::main/props cfg) claims)
|
||||
token (tokens/generate (::setup/props cfg) claims)
|
||||
resp {:invitation-token token}]
|
||||
(-> resp
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
@@ -391,12 +405,11 @@
|
||||
;; In all other cases, send a verification email.
|
||||
:else
|
||||
(do
|
||||
(send-email-verification! conn (::main/props cfg) profile)
|
||||
(send-email-verification! conn (::setup/props cfg) profile)
|
||||
(rph/with-meta profile
|
||||
{::audit/replace-props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
|
||||
(def schema:register-profile
|
||||
[:map {:title "register-profile"}
|
||||
[:token schema:token]
|
||||
@@ -405,7 +418,8 @@
|
||||
(sv/defmethod ::register-profile
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::sm/params schema:register-profile}
|
||||
::sm/params schema:register-profile
|
||||
::climit/id :auth/global}
|
||||
[{:keys [::db/pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(-> (assoc cfg ::db/conn conn)
|
||||
@@ -416,14 +430,14 @@
|
||||
(defn request-profile-recovery
|
||||
[{:keys [::db/pool] :as cfg} {:keys [email] :as params}]
|
||||
(letfn [(create-recovery-token [{:keys [id] :as profile}]
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
{:iss :password-recovery
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id id})]
|
||||
(assoc profile :token token)))
|
||||
|
||||
(send-email-notification [conn profile]
|
||||
(let [ptoken (tokens/generate (::main/props cfg)
|
||||
(let [ptoken (tokens/generate (::setup/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
@@ -437,7 +451,8 @@
|
||||
nil))]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(when-let [profile (profile/get-profile-by-email conn email)]
|
||||
(when-let [profile (->> (profile/clean-email email)
|
||||
(profile/get-profile-by-email conn))]
|
||||
(when-not (eml/allow-send-emails? conn profile)
|
||||
(ex/raise :type :validation
|
||||
:code :profile-is-muted
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,9 +9,10 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
@@ -23,18 +24,21 @@
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[app.util.time :as dt]))
|
||||
|
||||
;; --- GENERAL PURPOSE INTERNAL HELPERS
|
||||
|
||||
(defn decode-row
|
||||
(defn- decode-row
|
||||
[{:keys [participants position] :as row}]
|
||||
(cond-> row
|
||||
(db/pgpoint? position) (assoc :position (db/decode-pgpoint position))
|
||||
(db/pgobject? participants) (assoc :participants (db/decode-transit-pgobject participants))))
|
||||
|
||||
(def sql:get-file
|
||||
(def xf-decode-row
|
||||
(map decode-row))
|
||||
|
||||
(def ^:privateqpage-name
|
||||
sql:get-file
|
||||
"select f.id, f.modified_at, f.revn, f.features,
|
||||
f.project_id, p.team_id, f.data
|
||||
from file as f
|
||||
@@ -44,17 +48,19 @@
|
||||
|
||||
(defn- get-file
|
||||
"A specialized version of get-file for comments module."
|
||||
[{:keys [::db/conn] :as cfg} file-id page-id]
|
||||
(if-let [{:keys [data] :as file} (some-> (db/exec-one! conn [sql:get-file file-id])
|
||||
(files/decode-row))]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)))
|
||||
[cfg file-id page-id]
|
||||
(let [file (db/exec-one! cfg [sql:get-file file-id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "file not found"))
|
||||
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "file not found")))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [{:keys [data] :as file} (files/decode-row file)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data))))))
|
||||
|
||||
(defn- get-comment-thread
|
||||
[conn thread-id & {:as opts}]
|
||||
@@ -62,8 +68,8 @@
|
||||
(decode-row)))
|
||||
|
||||
(defn- get-comment
|
||||
[conn comment-id & {:keys [for-update?]}]
|
||||
(db/get-by-id conn :comment comment-id {:for-update for-update?}))
|
||||
[conn comment-id & {:as opts}]
|
||||
(db/get-by-id conn :comment comment-id opts))
|
||||
|
||||
(defn- get-next-seqn
|
||||
[conn file-id]
|
||||
@@ -92,23 +98,25 @@
|
||||
|
||||
(declare ^:private get-comment-threads)
|
||||
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::share-id (s/nilable ::us/uuid))
|
||||
|
||||
(s/def ::get-comment-threads
|
||||
(s/and (s/keys :req [::rpc/profile-id]
|
||||
:opt-un [::file-id ::share-id ::team-id])
|
||||
#(or (:file-id %) (:team-id %))))
|
||||
(def ^:private
|
||||
schema:get-comment-threads
|
||||
[:and
|
||||
[:map {:title "get-comment-threads"}
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]]
|
||||
[::sm/contains-any #{:file-id :team-id}]])
|
||||
|
||||
(sv/defmethod ::get-comment-threads
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id)))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comment-threads}
|
||||
[cfg {:keys [::rpc/profile-id file-id share-id] :as params}]
|
||||
|
||||
(def sql:comment-threads
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id))))
|
||||
|
||||
(def ^:private sql:comment-threads
|
||||
"select distinct on (ct.id)
|
||||
ct.*,
|
||||
f.name as file_name,
|
||||
@@ -133,23 +141,24 @@
|
||||
(defn- get-comment-threads
|
||||
[conn profile-id file-id]
|
||||
(->> (db/exec! conn [sql:comment-threads profile-id file-id])
|
||||
(into [] (map decode-row))))
|
||||
(into [] xf-decode-row)))
|
||||
|
||||
;; --- COMMAND: Get Unread Comment Threads
|
||||
|
||||
(declare ^:private get-unread-comment-threads)
|
||||
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::get-unread-comment-threads
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id]))
|
||||
(def ^:private
|
||||
schema:get-unread-comment-threads
|
||||
[:map {:title "get-unread-comment-threads"}
|
||||
[:team-id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::get-unread-comment-threads
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id)))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-unread-comment-threads}
|
||||
[cfg {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id))))
|
||||
|
||||
(def sql:comment-threads-by-team
|
||||
"select distinct on (ct.id)
|
||||
@@ -181,62 +190,60 @@
|
||||
(defn- get-unread-comment-threads
|
||||
[conn profile-id team-id]
|
||||
(->> (db/exec! conn [sql:unread-comment-threads-by-team profile-id team-id])
|
||||
(into [] (map decode-row))))
|
||||
|
||||
(into [] xf-decode-row)))
|
||||
|
||||
;; --- COMMAND: Get Single Comment Thread
|
||||
|
||||
(s/def ::get-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::us/id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:get-comment-thread
|
||||
[:map {:title "get-comment-thread"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::get-comment-thread
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "with threads as (" sql:comment-threads ")"
|
||||
"select * from threads where id = ?")]
|
||||
(-> (db/exec-one! conn [sql profile-id file-id id])
|
||||
(decode-row)))))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "with threads as (" sql:comment-threads ")"
|
||||
"select * from threads where id = ?")]
|
||||
(-> (db/exec-one! conn [sql profile-id file-id id])
|
||||
(decode-row))))))
|
||||
|
||||
;; --- COMMAND: Retrieve Comments
|
||||
|
||||
(declare ^:private get-comments)
|
||||
|
||||
(s/def ::thread-id ::us/uuid)
|
||||
(s/def ::get-comments
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::thread-id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:get-comments
|
||||
[:map {:title "get-comments"}
|
||||
[:thread-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::get-comments
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id thread-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comments conn thread-id))))
|
||||
|
||||
(def sql:comments
|
||||
"select c.* from comment as c
|
||||
where c.thread_id = ?
|
||||
order by c.created_at asc")
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comments}
|
||||
[cfg {:keys [::rpc/profile-id thread-id share-id]}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comments conn thread-id)))))
|
||||
|
||||
(defn- get-comments
|
||||
[conn thread-id]
|
||||
(->> (db/query conn :comment
|
||||
{:thread-id thread-id}
|
||||
{:order-by [[:created-at :asc]]})
|
||||
(into [] (map decode-row))))
|
||||
(into [] xf-decode-row)))
|
||||
|
||||
;; --- COMMAND: Get file comments users
|
||||
|
||||
;; All the profiles that had comment the file, plus the current
|
||||
;; profile.
|
||||
|
||||
(def sql:file-comment-users
|
||||
(def ^:private sql:file-comment-users
|
||||
"WITH available_profiles AS (
|
||||
SELECT DISTINCT owner_id AS id
|
||||
FROM comment
|
||||
@@ -255,20 +262,22 @@
|
||||
[conn file-id profile-id]
|
||||
(db/exec! conn [sql:file-comment-users file-id profile-id]))
|
||||
|
||||
(s/def ::get-profiles-for-file-comments
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:get-profiles-for-file-comments
|
||||
[:map {:title "get-profiles-for-file-comments"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::get-profiles-for-file-comments
|
||||
"Retrieves a list of profiles with limited set of properties of all
|
||||
participants on comment threads of the file."
|
||||
{::doc/added "1.15"
|
||||
::doc/changes ["1.15" "Imported from queries and renamed."]}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id share-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-file-comments-users conn file-id profile-id)))
|
||||
::doc/changes ["1.15" "Imported from queries and renamed."]
|
||||
::sm/params schema:get-profiles-for-file-comments}
|
||||
[cfg {:keys [::rpc/profile-id file-id share-id]}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-file-comments-users conn file-id profile-id))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
@@ -278,54 +287,52 @@
|
||||
|
||||
;; --- COMMAND: Create Comment Thread
|
||||
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::position ::gpt/point)
|
||||
(s/def ::content ::us/string)
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
|
||||
(s/def ::create-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::file-id ::position ::content ::page-id ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:create-comment-thread
|
||||
[:map {:title "create-comment-thread"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:position ::gpt/point]
|
||||
[:content :string]
|
||||
[:page-id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::create-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true}
|
||||
::webhooks/event? true
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::sm/params schema:create-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at file-id page-id share-id position content frame-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [{:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)]
|
||||
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/comment-threads-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}))
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-comment-permissions! cfg profile-id file-id share-id)
|
||||
(let [{:keys [team-id project-id page-name]} (get-file conn file-id page-id)]
|
||||
|
||||
(rtry/with-retry {::rtry/when rtry/conflict-exception?
|
||||
::rtry/max-retries 3
|
||||
::rtry/label "create-comment-thread"
|
||||
::db/conn conn}
|
||||
(create-comment-thread conn
|
||||
{:created-at request-at
|
||||
:profile-id profile-id
|
||||
:file-id file-id
|
||||
:page-id page-id
|
||||
:page-name page-name
|
||||
:position position
|
||||
:content content
|
||||
:frame-id frame-id}))))))
|
||||
(run! (partial quotes/check-quote! cfg)
|
||||
(list {::quotes/id ::quotes/comment-threads-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id file-id}))
|
||||
|
||||
(create-comment-thread conn {:created-at request-at
|
||||
:profile-id profile-id
|
||||
:file-id file-id
|
||||
:page-id page-id
|
||||
:page-name page-name
|
||||
:position position
|
||||
:content content
|
||||
:frame-id frame-id})))))
|
||||
|
||||
(defn- create-comment-thread
|
||||
[conn {:keys [profile-id file-id page-id page-name created-at position content frame-id]}]
|
||||
|
||||
(let [;; NOTE: we take the next seq number from a separate query because the whole
|
||||
;; operation can be retried on conflict, and in this case the new seq shold be
|
||||
;; retrieved from the database.
|
||||
@@ -365,68 +372,72 @@
|
||||
|
||||
;; --- COMMAND: Update Comment Thread Status
|
||||
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::share-id (s/nilable ::us/uuid))
|
||||
|
||||
(s/def ::update-comment-thread-status
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:update-comment-thread-status
|
||||
[:map {:title "update-comment-thread-status"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::update-comment-thread-status
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(upsert-comment-thread-status! conn profile-id id))))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-status}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(upsert-comment-thread-status! conn profile-id id)))))
|
||||
|
||||
|
||||
;; --- COMMAND: Update Comment Thread
|
||||
|
||||
(s/def ::is-resolved ::us/boolean)
|
||||
(s/def ::update-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::is-resolved]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:update-comment-thread
|
||||
[:map {:title "update-comment-thread"}
|
||||
[:id ::sm/uuid]
|
||||
[:is-resolved :boolean]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::update-comment-thread
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id is-resolved share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:is-resolved is-resolved}
|
||||
{:id id})
|
||||
nil)))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id id is-resolved share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:is-resolved is-resolved}
|
||||
{:id id})
|
||||
nil))))
|
||||
|
||||
|
||||
;; --- COMMAND: Add Comment
|
||||
|
||||
(declare ^:private get-comment-thread)
|
||||
|
||||
(s/def ::create-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::thread-id ::content]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:create-comment
|
||||
[:map {:title "create-comment"}
|
||||
[:thread-id ::sm/uuid]
|
||||
[:content :string]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::create-comment
|
||||
{::doc/added "1.15"
|
||||
::webhooks/event? true}
|
||||
::webhooks/event? true
|
||||
::sm/params schema:create-comment}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at thread-id share-id content]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)
|
||||
(let [{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)
|
||||
{:keys [team-id project-id page-name] :as file} (get-file cfg file-id page-id)]
|
||||
|
||||
(files/check-comment-permissions! conn profile-id (:id file) share-id)
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(quotes/check-quote! conn
|
||||
{::quotes/id ::quotes/comments-per-file
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id
|
||||
::quotes/project-id project-id
|
||||
::quotes/file-id (:id file)})
|
||||
::quotes/file-id file-id})
|
||||
|
||||
;; Update the page-name cached attribute on comment thread table.
|
||||
(when (not= page-name (:page-name thread))
|
||||
@@ -462,19 +473,21 @@
|
||||
|
||||
;; --- COMMAND: Update Comment
|
||||
|
||||
(s/def ::update-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::content]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:update-comment
|
||||
[:map {:title "update-comment"}
|
||||
[:id ::sm/uuid]
|
||||
[:content :string]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::update-comment
|
||||
{::doc/added "1.15"}
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id share-id content]}]
|
||||
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::db/for-update? true)
|
||||
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::db/for-update? true)]
|
||||
(let [{:keys [thread-id owner-id] :as comment} (get-comment conn id ::sql/for-update true)
|
||||
{:keys [file-id page-id] :as thread} (get-comment-thread conn thread-id ::sql/for-update true)]
|
||||
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
|
||||
@@ -483,7 +496,7 @@
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
|
||||
(let [{:keys [page-name] :as file} (get-file cfg file-id page-id)]
|
||||
(let [{:keys [page-name]} (get-file cfg file-id page-id)]
|
||||
(db/update! conn :comment
|
||||
{:content content
|
||||
:modified-at request-at}
|
||||
@@ -497,79 +510,90 @@
|
||||
|
||||
;; --- COMMAND: Delete Comment Thread
|
||||
|
||||
(s/def ::delete-comment-thread
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:delete-comment-thread
|
||||
[:map {:title "delete-comment-thread"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::delete-comment-thread
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:delete-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [owner-id file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
|
||||
(db/delete! conn :comment-thread {:id id})
|
||||
nil)))
|
||||
(db/delete! conn :comment-thread {:id id})
|
||||
nil))))
|
||||
|
||||
;; --- COMMAND: Delete comment
|
||||
|
||||
(s/def ::delete-comment
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:delete-comment
|
||||
[:map {:title "delete-comment"}
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::delete-comment
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id share-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::db/for-update? true)
|
||||
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
(db/delete! conn :comment {:id id}))))
|
||||
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:delete-comment}
|
||||
[cfg {:keys [::rpc/profile-id id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [owner-id thread-id] :as comment} (get-comment conn id ::sql/for-update true)
|
||||
{:keys [file-id] :as thread} (get-comment-thread conn thread-id)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(when-not (= owner-id profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :not-allowed))
|
||||
(db/delete! conn :comment {:id id})
|
||||
nil))))
|
||||
|
||||
;; --- COMMAND: Update comment thread position
|
||||
|
||||
(s/def ::update-comment-thread-position
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::position ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:update-comment-thread-position
|
||||
[:map {:title "update-comment-thread-position"}
|
||||
[:id ::sm/uuid]
|
||||
[:position ::gpt/point]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::update-comment-thread-position
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id position frame-id share-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:position (db/pgpoint position)
|
||||
:frame-id frame-id}
|
||||
{:id (:id thread)})
|
||||
nil)))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-position}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id position frame-id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:position (db/pgpoint position)
|
||||
:frame-id frame-id}
|
||||
{:id (:id thread)})
|
||||
nil))))
|
||||
|
||||
;; --- COMMAND: Update comment frame
|
||||
|
||||
(s/def ::update-comment-thread-frame
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::id ::frame-id]
|
||||
:opt-un [::share-id]))
|
||||
(def ^:private
|
||||
schema:update-comment-thread-frame
|
||||
[:map {:title "update-comment-thread-frame"}
|
||||
[:id ::sm/uuid]
|
||||
[:frame-id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::update-comment-thread-frame
|
||||
{::doc/added "1.15"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id ::rpc/request-at id frame-id share-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::db/for-update? true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:frame-id frame-id}
|
||||
{:id id})
|
||||
nil)))
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:update-comment-thread-frame}
|
||||
[cfg {:keys [::rpc/profile-id ::rpc/request-at id frame-id share-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [{:keys [file-id] :as thread} (get-comment-thread conn id ::sql/for-update true)]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(db/update! conn :comment-thread
|
||||
{:modified-at request-at
|
||||
:frame-id frame-id}
|
||||
{:id id})
|
||||
nil))))
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
[app.common.types.file :as ctf]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
@@ -70,19 +71,14 @@
|
||||
data (assoc :data (blob/decode data)))))
|
||||
|
||||
(defn check-version!
|
||||
[{:keys [data] :as file}]
|
||||
(dm/assert!
|
||||
"expect data to be decoded"
|
||||
(map? data))
|
||||
|
||||
(let [version (:version data 0)]
|
||||
[file]
|
||||
(let [version (:version file)]
|
||||
(when (> version fmg/version)
|
||||
(ex/raise :type :restriction
|
||||
:code :file-version-not-supported
|
||||
:hint "file version is greated that the maximum"
|
||||
:file-version version
|
||||
:max-version fmg/version))
|
||||
|
||||
file))
|
||||
|
||||
;; --- FILE PERMISSIONS
|
||||
@@ -225,24 +221,38 @@
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (fmg/migrate-file file)]
|
||||
(let [;; For avoid unnecesary overhead of creating multiple pointers and
|
||||
;; handly internally with objects map in their worst case (when
|
||||
;; probably all shapes and all pointers will be readed in any
|
||||
;; case), we just realize/resolve them before applying the
|
||||
;; migration to the file
|
||||
file (-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))
|
||||
|
||||
;; NOTE: when file is migrated, we break the rule of no perform
|
||||
;; mutations on get operations and update the file with all
|
||||
;; migrations applied
|
||||
;;
|
||||
;; NOTE: the following code will not work on read-only mode, it
|
||||
;; is a known issue; we keep is not implemented until we really
|
||||
;; need this
|
||||
(when (fmg/migrated? file)
|
||||
(db/update! conn :file
|
||||
{:data (blob/encode (:data file))
|
||||
:features (db/create-array conn "text" (:features file))}
|
||||
{:id id}
|
||||
{::db/return-keys? false})
|
||||
;; When file is migrated, we break the rule of no perform
|
||||
;; mutations on get operations and update the file with all
|
||||
;; migrations applied
|
||||
;;
|
||||
;; WARN: he following code will not work on read-only mode,
|
||||
;; it is a known issue; we keep is not implemented until we
|
||||
;; really need this.
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map file)
|
||||
file)]
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id)))
|
||||
(db/update! conn :file
|
||||
{:data (blob/encode (:data file))
|
||||
:version (:version file)
|
||||
:features (db/create-array conn "text" (:features file))}
|
||||
{:id id})
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
|
||||
file)))
|
||||
|
||||
@@ -262,17 +272,18 @@
|
||||
(when (some? project-id)
|
||||
{:project-id project-id}))
|
||||
file (-> (db/get conn :file params
|
||||
{::db/check-deleted? (not include-deleted?)
|
||||
::db/remove-deleted? (not include-deleted?)
|
||||
::db/for-update? lock-for-update?})
|
||||
{::db/check-deleted (not include-deleted?)
|
||||
::db/remove-deleted (not include-deleted?)
|
||||
::sql/for-update lock-for-update?})
|
||||
(decode-row))]
|
||||
(if migrate?
|
||||
(if (and migrate? (fmg/need-migration? file))
|
||||
(migrate-file cfg file)
|
||||
file)))
|
||||
|
||||
(defn get-minimal-file
|
||||
[{:keys [::db/pool] :as cfg} id]
|
||||
(db/get pool :file {:id id} {:columns [:id :modified-at :revn]}))
|
||||
[cfg id & {:as opts}]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :revn])]
|
||||
(db/get cfg :file {:id id} opts)))
|
||||
|
||||
(defn get-file-etag
|
||||
[{:keys [::rpc/profile-id]} {:keys [modified-at revn]}]
|
||||
@@ -336,6 +347,7 @@
|
||||
(sv/defmethod ::get-file-fragment
|
||||
"Retrieve a file fragment by its ID. Only authenticated users."
|
||||
{::doc/added "1.17"
|
||||
::rpc/auth false
|
||||
::sm/params schema:get-file-fragment
|
||||
::sm/result schema:file-fragment}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id fragment-id share-id]}]
|
||||
@@ -357,7 +369,9 @@
|
||||
f.is_shared,
|
||||
ft.media_id
|
||||
from file as f
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id
|
||||
and ft.revn = f.revn
|
||||
and ft.deleted_at is null)
|
||||
where f.project_id = ?
|
||||
and f.deleted_at is null
|
||||
order by f.modified_at desc")
|
||||
@@ -516,7 +530,7 @@
|
||||
ft.media_id
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn and ft.deleted_at is null)
|
||||
where f.is_shared = true
|
||||
and f.deleted_at is null
|
||||
and p.deleted_at is null
|
||||
@@ -660,7 +674,9 @@
|
||||
row_number() over w as row_num
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id
|
||||
and ft.revn = f.revn
|
||||
and ft.deleted_at is null)
|
||||
where p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and f.deleted_at is null
|
||||
@@ -733,7 +749,8 @@
|
||||
(db/update! conn :file
|
||||
{:name name
|
||||
:modified-at (dt/now)}
|
||||
{:id id}))
|
||||
{:id id}
|
||||
{::db/return-keys true}))
|
||||
|
||||
(sv/defmethod ::rename-file
|
||||
{::doc/added "1.17"
|
||||
@@ -851,7 +868,8 @@
|
||||
|
||||
(db/delete! conn :file-library-rel {:library-file-id id})
|
||||
(db/update! conn :file
|
||||
{:is-shared false}
|
||||
{:is-shared false
|
||||
:modified-at (dt/now)}
|
||||
{:id id})
|
||||
file)
|
||||
|
||||
@@ -859,10 +877,9 @@
|
||||
(true? (:is-shared params)))
|
||||
(let [file (assoc file :is-shared true)]
|
||||
(db/update! conn :file
|
||||
{:is-shared true}
|
||||
{:id id}
|
||||
::db/return-keys? false)
|
||||
|
||||
{:is-shared true
|
||||
:modified-at (dt/now)}
|
||||
{:id id})
|
||||
file)
|
||||
|
||||
:else
|
||||
@@ -899,7 +916,7 @@
|
||||
(db/update! conn :file
|
||||
{:deleted-at (dt/now)}
|
||||
{:id file-id}
|
||||
{::db/columns [:id :name :is-shared :project-id :created-at :modified-at]}))
|
||||
{::db/return-keys [:id :name :is-shared :project-id :created-at :modified-at]}))
|
||||
|
||||
(def ^:private
|
||||
schema:delete-file
|
||||
@@ -998,8 +1015,8 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(unlink-file-from-library conn params)))
|
||||
|
||||
(unlink-file-from-library conn params)
|
||||
nil))
|
||||
|
||||
;; --- MUTATION COMMAND: update-sync
|
||||
|
||||
@@ -1008,7 +1025,8 @@
|
||||
(db/update! conn :file-library-rel
|
||||
{:synced-at (dt/now)}
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}))
|
||||
:library-file-id library-id}
|
||||
{::db/return-keys true}))
|
||||
|
||||
(def ^:private schema:update-file-library-sync-status
|
||||
[:map {:title "update-file-library-sync-status"}
|
||||
@@ -1030,8 +1048,10 @@
|
||||
(defn ignore-sync
|
||||
[conn {:keys [file-id date] :as params}]
|
||||
(db/update! conn :file
|
||||
{:ignore-sync-until date}
|
||||
{:id file-id}))
|
||||
{:ignore-sync-until date
|
||||
:modified-at (dt/now)}
|
||||
{:id file-id}
|
||||
{::db/return-keys true}))
|
||||
|
||||
(s/def ::ignore-file-library-sync-status
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.defaults :refer [version]]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
@@ -18,14 +19,12 @@
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -50,47 +49,53 @@
|
||||
"expected a valid connection"
|
||||
(db/connection? conn))
|
||||
|
||||
(let [id (or id (uuid/next))
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
cfeat/*current* features]
|
||||
(let [id (or id (uuid/next))
|
||||
|
||||
pointers (pmap/create-tracked)
|
||||
pmap? (contains? features "fdata/pointer-map")
|
||||
omap? (contains? features "fdata/objects-map")
|
||||
|
||||
data (binding [pmap/*tracked* pointers
|
||||
cfeat/*current* features
|
||||
cfeat/*wrap-with-objects-map-fn* (if omap? omap/wrap identity)
|
||||
cfeat/*wrap-with-pointer-map-fn* (if pmap? pmap/wrap identity)]
|
||||
(if create-page
|
||||
data (if create-page
|
||||
(ctf/make-file-data id)
|
||||
(ctf/make-file-data id nil)))
|
||||
(ctf/make-file-data id nil))
|
||||
|
||||
features (->> (set/difference features cfeat/frontend-only-features)
|
||||
(db/create-array conn "text"))
|
||||
file {:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:version version
|
||||
:data data
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}
|
||||
|
||||
file (db/insert! conn :file
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:data (blob/encode data)
|
||||
:features features
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:deleted-at deleted-at}))]
|
||||
file (if (contains? features "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
(binding [pmap/*tracked* pointers]
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
file (if (contains? features "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map file)
|
||||
file)
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role! conn))
|
||||
file (d/without-nils file)]
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
(db/insert! conn :file
|
||||
(-> file
|
||||
(update :data blob/encode)
|
||||
(update :features db/encode-pgarray conn "text"))
|
||||
{::db/return-keys false})
|
||||
|
||||
(files/decode-row file)))
|
||||
(when (contains? features "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
|
||||
(->> (assoc params :file-id id :role :owner)
|
||||
(create-file-role! conn))
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
|
||||
file)))
|
||||
|
||||
(def ^:private schema:create-file
|
||||
[:map {:title "create-file"}
|
||||
|
||||
@@ -12,14 +12,17 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.main :as-alias main]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn check-authorized!
|
||||
[{:keys [::db/pool]} profile-id]
|
||||
@@ -57,70 +60,120 @@
|
||||
::sm/params schema:get-file-snapshots}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/run! cfg #(get-file-snapshots % params)))
|
||||
(db/run! cfg get-file-snapshots params))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [file-id id]}]
|
||||
(let [storage (media/configure-assets-storage storage conn)
|
||||
params {:id id :file-id file-id}
|
||||
options {:columns [:id :data :revn]}
|
||||
snapshot (db/get* conn :file-change params options)]
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
snapshot (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id id}
|
||||
{::db/for-share true})]
|
||||
|
||||
(when (and (some? snapshot)
|
||||
(some? (:data snapshot)))
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:id id
|
||||
:file-id file-id))
|
||||
|
||||
(l/debug :hint "snapshot found"
|
||||
:snapshot-id (:id snapshot)
|
||||
:file-id file-id)
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :precondition
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)}
|
||||
{:id file-id})
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "delete from file_object_thumbnail "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/del-object! storage media-id)))
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "delete from file_thumbnail "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/del-object! storage media-id)))
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
{:id (:id snapshot)})))
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
(def ^:private schema:restore-file-snapshot
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)}))
|
||||
|
||||
(defn- resolve-snapshot-by-label
|
||||
[conn file-id label]
|
||||
(->> (db/query conn :file-change
|
||||
{:file-id file-id
|
||||
:label label}
|
||||
{::sql/order-by [[:created-at :desc]]
|
||||
::sql/columns [:file-id :id :label]})
|
||||
(first)))
|
||||
|
||||
(def ^:private
|
||||
schema:restore-file-snapshot
|
||||
[:and
|
||||
[:map
|
||||
[:file-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:label {:optional true} :string]]
|
||||
[::sm/contains-any #{:id :label}]])
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::doc/skip true
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
[cfg {:keys [::rpc/profile-id file-id id label] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg #(restore-file-snapshot! % params)))
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [params (cond-> params
|
||||
(and (not id) (string? label))
|
||||
(merge (resolve-snapshot-by-label conn file-id label)))]
|
||||
(restore-file-snapshot! cfg params)))))
|
||||
|
||||
(defn take-file-snapshot!
|
||||
[{:keys [::db/conn]} {:keys [file-id label]}]
|
||||
(when-let [file (db/get* conn :file {:id file-id})]
|
||||
(let [id (uuid/next)
|
||||
label (or label (str "Snapshot at " (dt/format-instant (dt/now) :rfc1123)))]
|
||||
(l/debug :hint "persisting file snapshot" :file-id file-id :label label)
|
||||
(db/insert! conn :file-change
|
||||
{:id id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:features (:features file)
|
||||
:file-id (:id file)
|
||||
:label label})
|
||||
{:id id})))
|
||||
[cfg {:keys [file-id label]}]
|
||||
(let [conn (db/get-connection cfg)
|
||||
file (db/get conn :file {:id file-id})
|
||||
id (uuid/next)]
|
||||
|
||||
(l/debug :hint "creating file snapshot"
|
||||
:file-id (str file-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! conn :file-change
|
||||
{:id id
|
||||
:revn (:revn file)
|
||||
:data (:data file)
|
||||
:features (:features file)
|
||||
:file-id (:id file)
|
||||
:label label}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id id :label label}))
|
||||
|
||||
(defn generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (dt/now)
|
||||
(dt/format-instant)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(def ^:private schema:take-file-snapshot
|
||||
[:map [:file-id ::sm/uuid]])
|
||||
@@ -131,5 +184,8 @@
|
||||
::sm/params schema:take-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(check-authorized! cfg profile-id)
|
||||
(db/tx-run! cfg #(take-file-snapshot! % params)))
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [params (update params :label (fn [label]
|
||||
(or label (generate-snapshot-label))))]
|
||||
(take-file-snapshot! cfg params)))))
|
||||
|
||||
|
||||
@@ -8,11 +8,15 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as fch]
|
||||
[app.common.spec :as us]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.compv2]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-create :as files.create]
|
||||
@@ -20,48 +24,51 @@
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]))
|
||||
|
||||
[clojure.set :as set]))
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(s/def ::create-page ::us/boolean)
|
||||
|
||||
(s/def ::create-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files/name
|
||||
::files/project-id]
|
||||
:opt-un [::files/id
|
||||
::files/is-shared
|
||||
::files/features
|
||||
::create-page]))
|
||||
(def ^:private schema:create-temp-file
|
||||
[:map {:title "create-temp-file"}
|
||||
[:name :string]
|
||||
[:project-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:is-shared :boolean]
|
||||
[:features ::cfeat/features]
|
||||
[:create-page :boolean]])
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files}
|
||||
::doc/module :files
|
||||
::sm/params schema:create-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id)
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id project-id)
|
||||
|
||||
;; When we create files, we only need to respect the team
|
||||
;; features, because some features can be enabled
|
||||
;; globally, but the team is still not migrated properly.
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
input-features (:features params #{})
|
||||
|
||||
;; If the imported project doesn't contain v2 we need to remove it
|
||||
team-features
|
||||
(cond-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(not (contains? input-features "components/v2"))
|
||||
(disj "components/v2"))
|
||||
|
||||
|
||||
;; We also include all no migration features declared by
|
||||
;; client; that enables the ability to enable a runtime
|
||||
;; feature on frontend and make it permanent on file
|
||||
features (-> (:features params #{})
|
||||
features (-> input-features
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/union features))
|
||||
(set/union team-features))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
@@ -72,16 +79,18 @@
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
(s/def ::update-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files.update/changes
|
||||
::files.update/revn
|
||||
::files.update/session-id
|
||||
::files/id]))
|
||||
|
||||
(def ^:private schema:update-temp-file
|
||||
[:map {:title "update-temp-file"}
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:revn {:min 0} :int]
|
||||
[:session-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files}
|
||||
::doc/module :files
|
||||
::sm/params schema:update-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id session-id id revn changes] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :file-change
|
||||
@@ -93,42 +102,74 @@
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
nil)))
|
||||
(rph/with-meta (rph/wrap nil)
|
||||
{::audit/replace-props {:file-id id
|
||||
:revn revn}}))))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[conn {:keys [id] :as params}]
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
revs (db/query conn :file-change
|
||||
{:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
revn (count revs)]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id ::rpc/profile-id] :as params}]
|
||||
(let [file (files/get-file cfg id
|
||||
:migrate? false
|
||||
:lock-for-update? true)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
|
||||
(let [data
|
||||
(->> revs
|
||||
(mapcat #(->> % :changes blob/decode))
|
||||
(fch/process-changes (blob/decode (:data file))))]
|
||||
(let [changes (->> (db/cursor conn
|
||||
(sql/select :file-change {:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
{:chunk-size 10})
|
||||
(sequence (mapcat (comp blob/decode :changes))))
|
||||
|
||||
file (update file :data cpc/process-changes changes)
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
;; Delete changes from the changes history
|
||||
(db/delete! conn :file-change {:file-id id})
|
||||
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn revn
|
||||
:data (blob/encode data)}
|
||||
{:id id}))
|
||||
nil))
|
||||
:revn 1
|
||||
:data (blob/encode (:data file))}
|
||||
{:id id})
|
||||
|
||||
(s/def ::persist-temp-file
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::files/id]))
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id (:project-id file))
|
||||
file-features (:features file)
|
||||
team-features (cfeat/get-team-enabled-features cf/flags team)]
|
||||
(when (and (contains? team-features "components/v2")
|
||||
(not (contains? file-features "components/v2")))
|
||||
;; Migrate components v2
|
||||
(feat.compv2/migrate-file! cfg
|
||||
(:id file)
|
||||
:max-procs 2
|
||||
:validate? true
|
||||
:throw-on-validate? true)))
|
||||
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:persist-temp-file
|
||||
[:map {:title "persist-temp-file"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files}
|
||||
::doc/module :files
|
||||
::sm/params schema:persist-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file conn params))))
|
||||
(persist-temp-file cfg params))))
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
@@ -27,6 +28,7 @@
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.cond :as-alias cond]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.storage :as sto]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
@@ -46,7 +48,7 @@
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=? and tag=?")
|
||||
" where file_id=? and tag=? and deleted_at is null")
|
||||
res (db/exec! conn [sql file-id tag])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
@@ -58,7 +60,7 @@
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=?")
|
||||
" where file_id=? and deleted_at is null")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(->> res
|
||||
(d/index-by :object-id (fn [row]
|
||||
@@ -69,7 +71,7 @@
|
||||
(let [sql (str/concat
|
||||
"select object_id, media_id, tag "
|
||||
" from file_tagged_object_thumbnail"
|
||||
" where file_id=? and object_id = ANY(?)")
|
||||
" where file_id=? and object_id = ANY(?) and deleted_at is null")
|
||||
ids (db/create-array conn "text" (seq object-ids))
|
||||
res (db/exec! conn [sql file-id ids])]
|
||||
|
||||
@@ -226,34 +228,55 @@
|
||||
;; MUTATION COMMANDS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; --- MUTATION COMMAND: create-file-object-thumbnail
|
||||
(def sql:get-file-object-thumbnail
|
||||
"SELECT * FROM file_tagged_object_thumbnail
|
||||
WHERE file_id = ? AND object_id = ? AND tag = ?
|
||||
FOR UPDATE")
|
||||
|
||||
(def ^:private sql:create-object-thumbnail
|
||||
"insert into file_tagged_object_thumbnail(file_id, object_id, media_id, tag)
|
||||
values (?, ?, ?, ?)
|
||||
on conflict(file_id, tag, object_id) do
|
||||
update set media_id = ?
|
||||
returning *;")
|
||||
|
||||
(defn- create-file-object-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} file-id object-id media tag]
|
||||
(def sql:create-file-object-thumbnail
|
||||
"INSERT INTO file_tagged_object_thumbnail (file_id, object_id, tag, media_id)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT (file_id, object_id, tag)
|
||||
DO UPDATE SET updated_at=?, media_id=?, deleted_at=null
|
||||
RETURNING *")
|
||||
|
||||
(defn- persist-thumbnail!
|
||||
[storage media created-at]
|
||||
(let [path (:path media)
|
||||
mtype (:mtype media)
|
||||
hash (sto/calculate-hash path)
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at (dt/now)
|
||||
:content-type mtype
|
||||
:bucket "file-object-thumbnail"})]
|
||||
(sto/wrap-with-hash hash))]
|
||||
|
||||
(db/exec-one! conn [sql:create-object-thumbnail file-id object-id
|
||||
(:id media) tag (:id media)])))
|
||||
(sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at created-at
|
||||
:content-type mtype
|
||||
:bucket "file-object-thumbnail"})))
|
||||
|
||||
(def schema:create-file-object-thumbnail
|
||||
|
||||
|
||||
(defn- create-file-object-thumbnail!
|
||||
[{:keys [::sto/storage] :as cfg} file-id object-id media tag]
|
||||
(let [tsnow (dt/now)
|
||||
media (persist-thumbnail! storage media tsnow)
|
||||
[th1 th2] (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [th1 (db/exec-one! conn [sql:get-file-object-thumbnail file-id object-id tag])
|
||||
th2 (db/exec-one! conn [sql:create-file-object-thumbnail
|
||||
file-id object-id tag (:id media)
|
||||
tsnow (:id media)])]
|
||||
[th1 th2])))]
|
||||
|
||||
(when (and (some? th1)
|
||||
(not= (:media-id th1)
|
||||
(:media-id th2)))
|
||||
(sto/touch-object! storage (:media-id th1) :async true))
|
||||
|
||||
th2))
|
||||
|
||||
(def ^:private
|
||||
schema:create-file-object-thumbnail
|
||||
[:map {:title "create-file-object-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:object-id :string]
|
||||
@@ -263,37 +286,36 @@
|
||||
(sv/defmethod ::create-file-object-thumbnail
|
||||
{::doc/added "1.19"
|
||||
::doc/module :files
|
||||
::climit/id :file-thumbnail-ops
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::audit/skip true
|
||||
::sm/params schema:create-file-object-thumbnail}
|
||||
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id object-id media tag]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(media/validate-media-type! media)
|
||||
(media/validate-media-size! media)
|
||||
[cfg {:keys [::rpc/profile-id file-id object-id media tag]}]
|
||||
(media/validate-media-type! media)
|
||||
(media/validate-media-size! media)
|
||||
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(create-file-object-thumbnail! file-id object-id media (or tag "frame"))))))
|
||||
(db/run! cfg files/check-edition-permissions! profile-id file-id)
|
||||
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(create-file-object-thumbnail! cfg file-id object-id media (or tag "frame"))))
|
||||
|
||||
;; --- MUTATION COMMAND: delete-file-object-thumbnail
|
||||
|
||||
(defn- delete-file-object-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} file-id object-id]
|
||||
(when-let [{:keys [media-id]} (db/get* conn :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:object-id object-id}
|
||||
{::db/for-update? true})]
|
||||
|
||||
(when-let [{:keys [media-id tag]} (db/get* conn :file-tagged-object-thumbnail
|
||||
{:file-id file-id
|
||||
:object-id object-id}
|
||||
{::sql/for-update true})]
|
||||
(sto/touch-object! storage media-id)
|
||||
(db/delete! conn :file-tagged-object-thumbnail
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at (dt/now)}
|
||||
{:file-id file-id
|
||||
:object-id object-id})
|
||||
nil))
|
||||
:object-id object-id
|
||||
:tag tag})))
|
||||
|
||||
(s/def ::delete-file-object-thumbnail
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
@@ -302,29 +324,21 @@
|
||||
(sv/defmethod ::delete-file-object-thumbnail
|
||||
{::doc/added "1.19"
|
||||
::doc/module :files
|
||||
::climit/id :file-thumbnail-ops
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
::doc/deprecated "1.20"
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::audit/skip true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id object-id]}]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(delete-file-object-thumbnail! file-id object-id))
|
||||
nil)))
|
||||
[cfg {:keys [::rpc/profile-id file-id object-id]}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage conn)
|
||||
(delete-file-object-thumbnail! file-id object-id))
|
||||
nil))))
|
||||
|
||||
;; --- MUTATION COMMAND: create-file-thumbnail
|
||||
|
||||
(def ^:private sql:create-file-thumbnail
|
||||
"insert into file_thumbnail (file_id, revn, media_id, props)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
on conflict(file_id, revn) do
|
||||
update set media_id=?, props=?, updated_at=now();")
|
||||
|
||||
(defn- create-file-thumbnail!
|
||||
[{:keys [::db/conn ::sto/storage]} {:keys [file-id revn props media] :as params}]
|
||||
(media/validate-media-type! media)
|
||||
@@ -336,36 +350,67 @@
|
||||
hash (sto/calculate-hash path)
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))
|
||||
tnow (dt/now)
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? false
|
||||
::sto/deduplicate? true
|
||||
::sto/touched-at tnow
|
||||
:content-type mtype
|
||||
:bucket "file-thumbnail"})]
|
||||
(db/exec-one! conn [sql:create-file-thumbnail file-id revn
|
||||
(:id media) props
|
||||
(:id media) props])
|
||||
:bucket "file-thumbnail"})
|
||||
|
||||
thumb (db/get* conn :file-thumbnail
|
||||
{:file-id file-id
|
||||
:revn revn}
|
||||
{::db/remove-deleted false
|
||||
::sql/for-update true})]
|
||||
|
||||
(if (some? thumb)
|
||||
(do
|
||||
;; We mark the old media id as touched if it does not match
|
||||
(when (not= (:id media) (:media-id thumb))
|
||||
(sto/touch-object! storage (:media-id thumb)))
|
||||
|
||||
(db/update! conn :file-thumbnail
|
||||
{:media-id (:id media)
|
||||
:deleted-at nil
|
||||
:updated-at tnow
|
||||
:props props}
|
||||
{:file-id file-id
|
||||
:revn revn}))
|
||||
|
||||
(db/insert! conn :file-thumbnail
|
||||
{:file-id file-id
|
||||
:revn revn
|
||||
:created-at tnow
|
||||
:updated-at tnow
|
||||
:props props
|
||||
:media-id (:id media)}))
|
||||
|
||||
media))
|
||||
|
||||
(def ^:private
|
||||
schema:create-file-thumbnail
|
||||
[:map {:title "create-file-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:media ::media/upload]])
|
||||
|
||||
(sv/defmethod ::create-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
grid thumbnails."
|
||||
{::doc/added "1.19"
|
||||
::doc/module :files
|
||||
::audit/skip true
|
||||
::climit/id :file-thumbnail-ops
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
::sm/params [:map {:title "create-file-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:media ::media/upload]]}
|
||||
::climit/id [[:file-thumbnail-ops/by-profile ::rpc/profile-id]
|
||||
[:file-thumbnail-ops/global]]
|
||||
::rtry/enabled true
|
||||
::rtry/when rtry/conflict-exception?
|
||||
::sm/params schema:create-file-thumbnail}
|
||||
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(let [media (-> cfg
|
||||
(update ::sto/storage media/configure-assets-storage)
|
||||
(assoc ::db/conn conn)
|
||||
(create-file-thumbnail! params))]
|
||||
|
||||
{:uri (files/resolve-public-uri (:id media))}))))
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(when-not (db/read-only? conn)
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)
|
||||
media (create-file-thumbnail! cfg params)]
|
||||
{:uri (files/resolve-public-uri (:id media))})))))
|
||||
|
||||
@@ -30,41 +30,39 @@
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]))
|
||||
[clojure.set :as set]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; --- SCHEMA
|
||||
|
||||
(def ^:private
|
||||
schema:update-file
|
||||
(sm/define
|
||||
[:map {:title "update-file"}
|
||||
[:id ::sm/uuid]
|
||||
[:session-id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:changes {:optional true} [:vector ::cpc/change]]
|
||||
[:changes-with-metadata {:optional true}
|
||||
[:vector [:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:hint-origin {:optional true} :keyword]
|
||||
[:hint-events {:optional true} [:vector :string]]]]]
|
||||
[:skip-validate {:optional true} :boolean]]))
|
||||
[:map {:title "update-file"}
|
||||
[:id ::sm/uuid]
|
||||
[:session-id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:changes {:optional true} [:vector ::cpc/change]]
|
||||
[:changes-with-metadata {:optional true}
|
||||
[:vector [:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:hint-origin {:optional true} :keyword]
|
||||
[:hint-events {:optional true} [:vector :string]]]]]
|
||||
[:skip-validate {:optional true} :boolean]])
|
||||
|
||||
(def ^:private
|
||||
schema:update-file-result
|
||||
(sm/define
|
||||
[:vector {:title "update-file-result"}
|
||||
[:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:session-id ::sm/uuid]]]))
|
||||
[:vector {:title "update-file-result"}
|
||||
[:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} :int]
|
||||
[:session-id ::sm/uuid]]])
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
@@ -72,14 +70,26 @@
|
||||
;; to all clients using it.
|
||||
|
||||
(def ^:private library-change-types
|
||||
#{:add-color :mod-color :del-color
|
||||
:add-media :mod-media :del-media
|
||||
:add-component :mod-component :del-component :restore-component
|
||||
:add-typography :mod-typography :del-typography})
|
||||
#{:add-color
|
||||
:mod-color
|
||||
:del-color
|
||||
:add-media
|
||||
:mod-media
|
||||
:del-media
|
||||
:add-component
|
||||
:mod-component
|
||||
:del-component
|
||||
:restore-component
|
||||
:add-typography
|
||||
:mod-typography
|
||||
:del-typography})
|
||||
|
||||
(def ^:private file-change-types
|
||||
#{:add-obj :mod-obj :del-obj
|
||||
:reg-objects :mov-objects})
|
||||
#{:add-obj
|
||||
:mod-obj
|
||||
:del-obj
|
||||
:reg-objects
|
||||
:mov-objects})
|
||||
|
||||
(defn- library-change?
|
||||
[{:keys [type] :as change}]
|
||||
@@ -108,18 +118,11 @@
|
||||
[f]
|
||||
(fn [cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
cfeat/*wrap-with-pointer-map-fn* pmap/wrap]
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [result (f cfg file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
result))))
|
||||
|
||||
(defn- wrap-with-objects-map-context
|
||||
[f]
|
||||
(fn [cfg file]
|
||||
(binding [cfeat/*wrap-with-objects-map-fn* omap/wrap]
|
||||
(f cfg file))))
|
||||
|
||||
(declare get-lagged-changes)
|
||||
(declare send-notifications!)
|
||||
(declare update-file)
|
||||
@@ -132,8 +135,8 @@
|
||||
;; database.
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::climit/id :update-file/by-profile
|
||||
::climit/key-fn ::rpc/profile-id
|
||||
{::climit/id [[:update-file/by-profile ::rpc/profile-id]
|
||||
[:update-file/global]]
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2m")
|
||||
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
|
||||
@@ -180,18 +183,15 @@
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))))
|
||||
|
||||
(defn update-file
|
||||
[{:keys [::db/conn ::mtx/metrics] :as cfg}
|
||||
{:keys [id file features changes changes-with-metadata] :as params}]
|
||||
[{:keys [::mtx/metrics] :as cfg}
|
||||
{:keys [file features changes changes-with-metadata] :as params}]
|
||||
(let [features (-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file)))
|
||||
|
||||
update-fn (cond-> update-file*
|
||||
(contains? features "fdata/pointer-map")
|
||||
(wrap-with-pointer-map-context)
|
||||
|
||||
(contains? features "fdata/objects-map")
|
||||
(wrap-with-objects-map-context))
|
||||
(wrap-with-pointer-map-context))
|
||||
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
@@ -207,12 +207,6 @@
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(when (not= features (:features file))
|
||||
(let [features (db/create-array conn "text" features)]
|
||||
(db/update! conn :file
|
||||
{:features features}
|
||||
{:id id})))
|
||||
|
||||
(binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(let [file (assoc file :features features)
|
||||
@@ -232,13 +226,10 @@
|
||||
(defn- update-file*
|
||||
[{:keys [::db/conn ::wrk/executor] :as cfg}
|
||||
{:keys [profile-id file changes session-id ::created-at skip-validate] :as params}]
|
||||
(let [;; Process the file data in the CLIMIT context; scheduling it
|
||||
;; to be executed on a separated executor for avoid to do the
|
||||
;; CPU intensive operation on vthread.
|
||||
|
||||
update-fdata-fn (partial update-file-data cfg file changes skip-validate)
|
||||
file (-> (climit/configure cfg :update-file/global)
|
||||
(climit/run! update-fdata-fn executor))]
|
||||
(let [;; Process the file data on separated thread for avoid to do
|
||||
;; the CPU intensive operation on vthread.
|
||||
file (px/invoke! executor (partial update-file-data cfg file changes skip-validate))
|
||||
features (db/create-array conn "text" (:features file))]
|
||||
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
@@ -250,11 +241,14 @@
|
||||
:features (db/create-array conn "text" (:features file))
|
||||
:data (when (take-snapshot? file)
|
||||
(:data file))
|
||||
:changes (blob/encode changes)})
|
||||
:changes (blob/encode changes)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features features
|
||||
:data-backend nil
|
||||
:modified-at created-at
|
||||
:has-media-trimmed false}
|
||||
@@ -291,9 +285,19 @@
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file))
|
||||
(fmg/migrate-data)
|
||||
(d/without-nils))))
|
||||
(assoc :id (:id file)))))
|
||||
|
||||
;; For avoid unnecesary overhead of creating multiple pointers
|
||||
;; and handly internally with objects map in their worst
|
||||
;; case (when probably all shapes and all pointers will be
|
||||
;; readed in any case), we just realize/resolve them before
|
||||
;; applying the migration to the file
|
||||
file (if (fmg/need-migration? file)
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))
|
||||
file)
|
||||
|
||||
;; WARNING: this ruins performance; maybe we need to find
|
||||
;; some other way to do general validation
|
||||
@@ -304,14 +308,21 @@
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* nil]
|
||||
;; We do not resolve the objects maps here
|
||||
;; because there is a lower probability that all
|
||||
;; shapes needed to be loded into memory, so we
|
||||
;; leeave it on lazy status
|
||||
(-> (files/get-file cfg id :migrate? false)
|
||||
(feat.fdata/process-pointers deref) ; ensure all pointers resolved
|
||||
(update :data feat.fdata/process-pointers deref) ; ensure all pointers resolved
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file))))))
|
||||
(d/index-by :id)))
|
||||
|
||||
|
||||
file (-> (files/check-version! file)
|
||||
(update :revn inc)
|
||||
(update :data cpc/process-changes changes))]
|
||||
(update :data cpc/process-changes changes)
|
||||
(update :data d/without-nils))]
|
||||
|
||||
(when (contains? cf/flags :soft-file-validation)
|
||||
(soft-validate-file! file libs))
|
||||
@@ -328,12 +339,10 @@
|
||||
(val/validate-file-schema! file))
|
||||
|
||||
(cond-> file
|
||||
(and (contains? cfeat/*current* "fdata/objects-map")
|
||||
(not (contains? cfeat/*previous* "fdata/objects-map")))
|
||||
(contains? cfeat/*current* "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map)
|
||||
|
||||
(and (contains? cfeat/*current* "fdata/pointer-map")
|
||||
(not (contains? cfeat/*previous* "fdata/pointer-map")))
|
||||
(contains? cfeat/*current* "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map)
|
||||
|
||||
:always
|
||||
|
||||
@@ -8,14 +8,15 @@
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
@@ -26,38 +27,27 @@
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]))
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
||||
(def valid-style #{"normal" "italic"})
|
||||
|
||||
(s/def ::data (s/map-of ::us/string any?))
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::font-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/not-empty-string)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::share-id ::us/uuid)
|
||||
(s/def ::style valid-style)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::weight valid-weight)
|
||||
|
||||
;; --- QUERY: Get font variants
|
||||
|
||||
(s/def ::get-font-variants
|
||||
(s/and
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:opt-un [::team-id
|
||||
::file-id
|
||||
::project-id
|
||||
::share-id])
|
||||
(fn [o]
|
||||
(or (contains? o :team-id)
|
||||
(contains? o :file-id)
|
||||
(contains? o :project-id)))))
|
||||
(def ^:private
|
||||
schema:get-font-variants
|
||||
[:schema {:title "get-font-variants"}
|
||||
[:and
|
||||
[:map
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} ::sm/uuid]]
|
||||
[::sm/contains-any #{:team-id :file-id :project-id}]]])
|
||||
|
||||
(sv/defmethod ::get-font-variants
|
||||
{::doc/added "1.18"}
|
||||
{::doc/added "1.18"
|
||||
::sm/params schema:get-font-variants}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id file-id project-id share-id] :as params}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(cond
|
||||
@@ -87,28 +77,33 @@
|
||||
|
||||
(declare create-font-variant)
|
||||
|
||||
(s/def ::create-font-variant
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id
|
||||
::data
|
||||
::font-id
|
||||
::font-family
|
||||
::font-weight
|
||||
::font-style]))
|
||||
(def ^:private schema:create-font-variant
|
||||
[:map {:title "create-font-variant"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:data [:map-of :string :any]]
|
||||
[:font-id ::sm/uuid]
|
||||
[:font-family :string]
|
||||
[:font-weight [::sm/one-of {:format "number"} valid-weight]]
|
||||
[:font-style [::sm/one-of {:format "string"} valid-style]]])
|
||||
|
||||
(sv/defmethod ::create-font-variant
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(teams/check-edition-permissions! pool profile-id team-id)
|
||||
(quotes/check-quote! pool {::quotes/id ::quotes/font-variants-per-team
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id})
|
||||
(create-font-variant cfg (assoc params :profile-id profile-id))))
|
||||
::climit/id [[:process-font/by-profile ::rpc/profile-id]
|
||||
[:process-font/global]]
|
||||
::webhooks/event? true
|
||||
::sm/params schema:create-font-variant}
|
||||
[cfg {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(quotes/check-quote! conn {::quotes/id ::quotes/font-variants-per-team
|
||||
::quotes/profile-id profile-id
|
||||
::quotes/team-id team-id})
|
||||
(create-font-variant cfg (assoc params :profile-id profile-id))))))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [::sto/storage ::db/pool] :as cfg} {:keys [data] :as params}]
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]} {:keys [data] :as params}]
|
||||
(letfn [(generate-missing! [data]
|
||||
(let [data (media/run {:cmd :generate-fonts :input data})]
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
@@ -136,6 +131,7 @@
|
||||
ttf-params (prepare-font data "font/ttf")
|
||||
wf1-params (prepare-font data "font/woff")
|
||||
wf2-params (prepare-font data "font/woff2")]
|
||||
|
||||
(cond-> {}
|
||||
(some? otf-params)
|
||||
(assoc :otf (sto/put-object! storage otf-params))
|
||||
@@ -147,7 +143,7 @@
|
||||
(assoc :woff2 (sto/put-object! storage wf2-params)))))
|
||||
|
||||
(insert-font-variant! [{:keys [woff1 woff2 otf ttf]}]
|
||||
(db/insert! pool :team-font-variant
|
||||
(db/insert! conn :team-font-variant
|
||||
{:id (uuid/next)
|
||||
:team-id (:team-id params)
|
||||
:font-id (:font-id params)
|
||||
@@ -159,72 +155,112 @@
|
||||
:otf-file-id (:id otf)
|
||||
:ttf-file-id (:id ttf)}))]
|
||||
|
||||
(let [data (-> (climit/configure cfg :process-font/global)
|
||||
(climit/run! (partial generate-missing! data)
|
||||
(::wrk/executor cfg)))
|
||||
(let [data (px/invoke! executor (partial generate-missing! data))
|
||||
assets (persist-fonts-files! data)
|
||||
result (insert-font-variant! assets)]
|
||||
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
||||
|
||||
;; --- UPDATE FONT FAMILY
|
||||
|
||||
(s/def ::update-font
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id ::name]))
|
||||
(def ^:private
|
||||
schema:update-font
|
||||
[:map {:title "update-font"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:name :string]])
|
||||
|
||||
(sv/defmethod ::update-font
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id team-id id name]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(rph/with-meta
|
||||
(db/update! conn :team-font-variant
|
||||
{:font-family name}
|
||||
{:font-id id
|
||||
:team-id team-id})
|
||||
{::audit/replace-props {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:profile-id profile-id}})))
|
||||
::webhooks/event? true
|
||||
::sm/params schema:update-font}
|
||||
[cfg {:keys [::rpc/profile-id team-id id name]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
{:font-family name}
|
||||
{:font-id id
|
||||
:team-id team-id})
|
||||
|
||||
(rph/with-meta (rph/wrap nil)
|
||||
{::audit/replace-props {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:profile-id profile-id}}))))
|
||||
|
||||
;; --- DELETE FONT
|
||||
|
||||
(s/def ::delete-font
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id]))
|
||||
(def ^:private
|
||||
schema:delete-font
|
||||
[:map {:title "delete-font"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::delete-font
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [font (db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:font-id id :team-id team-id})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family font)
|
||||
:profile-id profile-id}}))))
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn ::sto/storage] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id
|
||||
:font-id id
|
||||
:deleted-at nil}
|
||||
{::sql/for-update true})
|
||||
storage (media/configure-assets-storage storage conn)
|
||||
tnow (dt/now)]
|
||||
|
||||
(when-not (seq fonts)
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
(doseq [font fonts]
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at tnow}
|
||||
{:id (:id font)})
|
||||
(some->> (:woff1-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:woff2-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id font) (sto/touch-object! storage))
|
||||
(some->> (:otf-file-id font) (sto/touch-object! storage)))
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:id id
|
||||
:team-id team-id
|
||||
:name (:font-family (peek fonts))
|
||||
:profile-id profile-id}})))))
|
||||
|
||||
;; --- DELETE FONT VARIANT
|
||||
|
||||
(s/def ::delete-font-variant
|
||||
(s/keys :req [::rpc/profile-id]
|
||||
:req-un [::team-id ::id]))
|
||||
(def ^:private schema:delete-font-variant
|
||||
[:map {:title "delete-font-variant"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::delete-font-variant
|
||||
{::doc/added "1.18"
|
||||
::webhooks/event? true}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [variant (db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id :team-id team-id})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}}))))
|
||||
::webhooks/event? true
|
||||
::sm/params schema:delete-font-variant}
|
||||
[cfg {:keys [::rpc/profile-id id team-id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn ::sto/storage] :as cfg}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [variant (db/get conn :team-font-variant
|
||||
{:id id :team-id team-id}
|
||||
{::sql/for-update true})
|
||||
storage (media/configure-assets-storage storage conn)]
|
||||
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/now)}
|
||||
{:id (:id variant)})
|
||||
|
||||
(some->> (:woff1-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:woff2-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:ttf-file-id variant) (sto/touch-object! storage))
|
||||
(some->> (:otf-file-id variant) (sto/touch-object! storage))
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:font-family (:font-family variant)
|
||||
:font-id (:font-id variant)}})))))
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -40,7 +41,7 @@
|
||||
{::rpc/auth false
|
||||
::doc/added "1.15"
|
||||
::doc/module :auth}
|
||||
[{:keys [::main/props ::ldap/provider] :as cfg} params]
|
||||
[{:keys [::setup/props ::ldap/provider] :as cfg} params]
|
||||
(when-not provider
|
||||
(ex/raise :type :restriction
|
||||
:code :ldap-not-initialized
|
||||
@@ -82,8 +83,8 @@
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(or (some->> (:email info)
|
||||
(profile/get-profile-by-email conn)
|
||||
(profile/decode-row))
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email conn))
|
||||
(->> (assoc info :is-active true :is-demo false)
|
||||
(auth/create-profile! conn)
|
||||
(auth/create-profile-rels! conn)
|
||||
|
||||
@@ -7,37 +7,83 @@
|
||||
(ns app.rpc.commands.management
|
||||
"A collection of RPC methods for manage the files, projects and team organization."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.binfile.common :as bfc]
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as pmg]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.http.sse :as sse]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.binfile :as binfile]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.projects :as proj]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.setup.templates :as tmpl]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.walk :as walk]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; --- COMMAND: Duplicate File
|
||||
|
||||
(declare duplicate-file)
|
||||
(defn duplicate-file
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
|
||||
(let [;; We don't touch the original file on duplication
|
||||
file (bfc/get-file cfg file-id)
|
||||
project-id (:project-id file)
|
||||
file (-> file
|
||||
(update :id bfc/lookup-index)
|
||||
(update :project-id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name))
|
||||
(cond-> (true? reset-shared-flag)
|
||||
(assoc :is-shared false)))
|
||||
|
||||
flibs (bfc/get-files-rels cfg #{file-id})
|
||||
fmeds (bfc/get-file-media cfg file)]
|
||||
|
||||
(when (uuid? profile-id)
|
||||
(proj/check-edition-permissions! conn profile-id project-id))
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index fmeds :id)
|
||||
|
||||
;; Process and persist file
|
||||
(let [file (->> (bfc/process-file file)
|
||||
(bfc/persist-file! cfg))]
|
||||
|
||||
;; The file profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(db/insert! conn :file-profile-rel
|
||||
{:file-id (:id file)
|
||||
:profile-id profile-id
|
||||
:is-owner true
|
||||
:is-admin true
|
||||
:can-edit true}
|
||||
{::db/return-keys? false}))
|
||||
|
||||
(doseq [params (sequence (comp
|
||||
(map #(bfc/remap-id % :file-id))
|
||||
(map #(bfc/remap-id % :library-file-id))
|
||||
(map #(assoc % :synced-at timestamp))
|
||||
(map #(assoc % :created-at timestamp)))
|
||||
flibs)]
|
||||
(db/insert! conn :file-library-rel params ::db/return-keys false))
|
||||
|
||||
(doseq [params (sequence (comp
|
||||
(map #(bfc/remap-id % :id))
|
||||
(map #(assoc % :created-at timestamp))
|
||||
(map #(bfc/remap-id % :file-id)))
|
||||
fmeds)]
|
||||
(db/insert! conn :file-media-object params ::db/return-keys false))
|
||||
|
||||
file)))
|
||||
|
||||
(def ^:private
|
||||
schema:duplicate-file
|
||||
@@ -51,185 +97,55 @@
|
||||
{::doc/added "1.16"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:duplicate-file}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg duplicate-file (assoc params :profile-id profile-id)))
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(defn- remap-id
|
||||
[item index key]
|
||||
(cond-> item
|
||||
(contains? item key)
|
||||
(assoc key (get index (get item key) (get item key)))))
|
||||
|
||||
(defn- process-file
|
||||
[cfg index {:keys [id] :as file}]
|
||||
(letfn [(process-form [form]
|
||||
(cond-> form
|
||||
;; Relink library items
|
||||
(and (map? form)
|
||||
(uuid? (:component-file form)))
|
||||
(update :component-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:fill-color-ref-file form)))
|
||||
(update :fill-color-ref-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:stroke-color-ref-file form)))
|
||||
(update :stroke-color-ref-file #(get index % %))
|
||||
|
||||
(and (map? form)
|
||||
(uuid? (:typography-ref-file form)))
|
||||
(update :typography-ref-file #(get index % %))
|
||||
|
||||
;; Relink Image Shapes
|
||||
(and (map? form)
|
||||
(map? (:metadata form))
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] #(get index % %))))
|
||||
|
||||
;; A function responsible to analyze all file data and
|
||||
;; replace the old :component-file reference with the new
|
||||
;; ones, using the provided file-index
|
||||
(relink-shapes [data]
|
||||
(walk/postwalk process-form data))
|
||||
|
||||
;; A function responsible of process the :media attr of file
|
||||
;; data and remap the old ids with the new ones.
|
||||
(relink-media [media]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [id (get index k)]
|
||||
(if (uuid? id)
|
||||
(-> res
|
||||
(assoc id (assoc v :id id))
|
||||
(dissoc k))
|
||||
res)))
|
||||
media
|
||||
media))
|
||||
|
||||
(process-file [{:keys [id] :as file}]
|
||||
(-> file
|
||||
(update :data assoc :id id)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(pmg/migrate-file)
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index relink-shapes)
|
||||
(update :components relink-shapes)
|
||||
(update :media relink-media)
|
||||
(d/without-nils))))))]
|
||||
|
||||
(let [new-id (get index id)
|
||||
file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(-> (assoc file :id new-id)
|
||||
(process-file)))
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg (:id file))
|
||||
file))
|
||||
file)]
|
||||
file)))
|
||||
|
||||
(def sql:get-used-libraries
|
||||
"select flr.*
|
||||
from file_library_rel as flr
|
||||
inner join file as l on (flr.library_file_id = l.id)
|
||||
where flr.file_id = ?
|
||||
and l.deleted_at is null")
|
||||
|
||||
(def sql:get-used-media-objects
|
||||
"select fmo.*
|
||||
from file_media_object as fmo
|
||||
inner join storage_object as so on (fmo.media_id = so.id)
|
||||
where fmo.file_id = ?
|
||||
and so.deleted_at is null")
|
||||
|
||||
(defn duplicate-file*
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id file index project-id name flibs fmeds]} {:keys [reset-shared-flag]}]
|
||||
(let [flibs (or flibs (db/exec! conn [sql:get-used-libraries (:id file)]))
|
||||
fmeds (or fmeds (db/exec! conn [sql:get-used-media-objects (:id file)]))
|
||||
|
||||
;; memo uniform creation/modification date
|
||||
now (dt/now)
|
||||
ignore (dt/plus now (dt/duration {:seconds 5}))
|
||||
|
||||
;; add to the index all file media objects.
|
||||
index (reduce #(assoc %1 (:id %2) (uuid/next)) index fmeds)
|
||||
|
||||
flibs-xf (comp
|
||||
(map #(remap-id % index :file-id))
|
||||
(map #(remap-id % index :library-file-id))
|
||||
(map #(assoc % :synced-at now))
|
||||
(map #(assoc % :created-at now)))
|
||||
|
||||
;; remap all file-library-rel row
|
||||
flibs (sequence flibs-xf flibs)
|
||||
|
||||
fmeds-xf (comp
|
||||
(map #(assoc % :id (get index (:id %))))
|
||||
(map #(assoc % :created-at now))
|
||||
(map #(remap-id % index :file-id)))
|
||||
|
||||
;; remap all file-media-object rows
|
||||
fmeds (sequence fmeds-xf fmeds)
|
||||
|
||||
file (cond-> file
|
||||
(some? project-id)
|
||||
(assoc :project-id project-id)
|
||||
|
||||
(some? name)
|
||||
(assoc :name name)
|
||||
|
||||
(true? reset-shared-flag)
|
||||
(assoc :is-shared false))
|
||||
|
||||
file (-> file
|
||||
(assoc :created-at now)
|
||||
(assoc :modified-at now)
|
||||
(assoc :ignore-sync-until ignore))
|
||||
|
||||
file (process-file cfg index file)]
|
||||
|
||||
(db/insert! conn :file
|
||||
(-> file
|
||||
(update :features #(db/create-array conn "text" %))
|
||||
(update :data blob/encode))
|
||||
{::db/return-keys? false})
|
||||
|
||||
(db/insert! conn :file-profile-rel
|
||||
{:file-id (:id file)
|
||||
:profile-id profile-id
|
||||
:is-owner true
|
||||
:is-admin true
|
||||
:can-edit true}
|
||||
{::db/return-keys? false})
|
||||
|
||||
(doseq [params flibs]
|
||||
(db/insert! conn :file-library-rel params ::db/return-keys? false))
|
||||
|
||||
(doseq [params fmeds]
|
||||
(db/insert! conn :file-media-object params ::db/return-keys? false))
|
||||
|
||||
file))
|
||||
|
||||
(defn duplicate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(let [;; We don't touch the original file on duplication
|
||||
file (files/get-file cfg file-id :migrate? false)
|
||||
index {file-id (uuid/next)}
|
||||
params (assoc params :index index :file file)]
|
||||
(proj/check-edition-permissions! conn profile-id (:project-id file))
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(duplicate-file* cfg params {:reset-shared-flag true})))
|
||||
(binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
|
||||
(duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :reset-shared-flag true)))))))
|
||||
|
||||
;; --- COMMAND: Duplicate Project
|
||||
|
||||
(declare duplicate-project)
|
||||
(defn duplicate-project
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id project-id name] :as params}]
|
||||
(binding [bfc/*state* (volatile! {:index {project-id (uuid/next)}})]
|
||||
(let [project (-> (db/get-by-id conn :project project-id)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :is-pinned false)
|
||||
(update :id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name)))
|
||||
|
||||
files (bfc/get-project-files cfg project-id)]
|
||||
|
||||
;; Update index with the project files and the project-id
|
||||
(vswap! bfc/*state* update :index bfc/update-index files)
|
||||
|
||||
|
||||
;; Check if the source team-id allow creating new project for current user
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
|
||||
;; create the duplicated project and assign the current profile as
|
||||
;; a project owner
|
||||
(let [project (teams/create-project conn project)]
|
||||
;; The project profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(teams/create-project-role conn profile-id (:id project) :owner))
|
||||
|
||||
(doseq [file-id files]
|
||||
(let [params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :file-id file-id)
|
||||
(assoc :reset-shared-flag false))]
|
||||
(duplicate-file cfg params)))
|
||||
|
||||
project))))
|
||||
|
||||
(def ^:private
|
||||
schema:duplicate-project
|
||||
@@ -244,51 +160,94 @@
|
||||
::webhooks/event? true
|
||||
::sm/params schema:duplicate-project}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg duplicate-project (assoc params :profile-id profile-id)))
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
;; Defer all constraints
|
||||
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(-> (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(duplicate-project (assoc params :profile-id profile-id))))))
|
||||
|
||||
(defn duplicate-project
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id project-id name] :as params}]
|
||||
(defn duplicate-team
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} & {:keys [profile-id team-id name] :as params}]
|
||||
|
||||
;; Defer all constraints
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
;; Check if the source team-id allowed to be read by the user if
|
||||
;; profile-id is present; it can be ommited if this function is
|
||||
;; called from SREPL helpers where no profile is available
|
||||
(when (uuid? profile-id)
|
||||
(teams/check-read-permissions! conn profile-id team-id))
|
||||
|
||||
(let [project (-> (db/get-by-id conn :project project-id)
|
||||
(assoc :is-pinned false))
|
||||
(binding [bfc/*state* (volatile! {:index {team-id (uuid/next)}})]
|
||||
(let [projs (bfc/get-team-projects cfg team-id)
|
||||
files (bfc/get-team-files cfg team-id)
|
||||
frels (bfc/get-files-rels cfg files)
|
||||
|
||||
files (db/query conn :file
|
||||
{:project-id (:id project)
|
||||
:deleted-at nil}
|
||||
{:columns [:id]})
|
||||
team (-> (db/get-by-id conn :team team-id)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(update :id bfc/lookup-index)
|
||||
(cond-> (string? name)
|
||||
(assoc :name name)))
|
||||
|
||||
project (cond-> project
|
||||
(string? name)
|
||||
(assoc :name name)
|
||||
fonts (db/query conn :team-font-variant
|
||||
{:team-id team-id})]
|
||||
|
||||
:always
|
||||
(assoc :id (uuid/next)))]
|
||||
(vswap! bfc/*state* update :index
|
||||
(fn [index]
|
||||
(-> index
|
||||
(bfc/update-index projs)
|
||||
(bfc/update-index files)
|
||||
(bfc/update-index fonts :id))))
|
||||
|
||||
;; Check if the source team-id allow creating new project for current user
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id project))
|
||||
;; FIXME: disallow clone default team
|
||||
;; Create the new team in the database
|
||||
(db/insert! conn :team team)
|
||||
|
||||
;; create the duplicated project and assign the current profile as
|
||||
;; a project owner
|
||||
(teams/create-project conn project)
|
||||
(teams/create-project-role conn profile-id (:id project) :owner)
|
||||
;; Duplicate team <-> profile relations
|
||||
(doseq [params frels]
|
||||
(let [params (-> params
|
||||
(assoc :id (uuid/next))
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/return-keys false})))
|
||||
|
||||
;; duplicate all files
|
||||
(let [index (reduce #(assoc %1 (:id %2) (uuid/next)) {} files)
|
||||
params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :project-id (:id project))
|
||||
(assoc :index index))]
|
||||
(doseq [{:keys [id]} files]
|
||||
(let [file (files/get-file cfg id :migrate? false)
|
||||
params (assoc params :file file)
|
||||
opts {:reset-shared-flag false}]
|
||||
(duplicate-file* cfg params opts))))
|
||||
;; Duplicate team fonts
|
||||
(doseq [font fonts]
|
||||
(let [params (-> font
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index)
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp))]
|
||||
(db/insert! conn :team-font-variant params
|
||||
{::db/return-keys false})))
|
||||
|
||||
;; return the created project
|
||||
project))
|
||||
;; Duplicate projects; We don't reuse the `duplicate-project`
|
||||
;; here because we handle files duplication by whole team
|
||||
;; instead of by project and we want to preserve some project
|
||||
;; props which are reset on the `duplicate-project` impl
|
||||
(doseq [project-id projs]
|
||||
(let [project (db/get conn :project {:id project-id})
|
||||
project (-> project
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(update :id bfc/lookup-index)
|
||||
(update :team-id bfc/lookup-index))]
|
||||
(teams/create-project conn project)
|
||||
|
||||
;; The project profile creation is optional, so when no profile is
|
||||
;; present (when this function is called from profile less
|
||||
;; environment: SREPL) we just omit the creation of the relation
|
||||
(when (uuid? profile-id)
|
||||
(teams/create-project-role conn profile-id (:id project) :owner))))
|
||||
|
||||
(doseq [file-id files]
|
||||
(let [params (-> params
|
||||
(dissoc :name)
|
||||
(assoc :file-id file-id)
|
||||
(assoc :reset-shared-flag false))]
|
||||
(duplicate-file cfg params)))
|
||||
|
||||
team)))
|
||||
|
||||
;; --- COMMAND: Move file
|
||||
|
||||
@@ -437,6 +396,19 @@
|
||||
|
||||
;; --- COMMAND: Clone Template
|
||||
|
||||
(defn- clone-template
|
||||
[{:keys [::wrk/executor ::bf.v1/project-id] :as cfg} template]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
(let [result (px/submit! executor (partial bf.v1/import-files! cfg template))]
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
(deref result)))))
|
||||
|
||||
(def ^:private
|
||||
schema:clone-template
|
||||
(sm/define
|
||||
@@ -444,8 +416,6 @@
|
||||
[:project-id ::sm/uuid]
|
||||
[:template-id ::sm/word-string]]))
|
||||
|
||||
(declare ^:private clone-template)
|
||||
|
||||
(sv/defmethod ::clone-template
|
||||
"Clone into the specified project the template by its id."
|
||||
{::doc/added "1.16"
|
||||
@@ -457,33 +427,14 @@
|
||||
_ (teams/check-edition-permissions! pool profile-id (:team-id project))
|
||||
template (tmpl/get-template-stream cfg template-id)
|
||||
params (-> cfg
|
||||
(assoc ::binfile/input template)
|
||||
(assoc ::binfile/project-id (:id project))
|
||||
(assoc ::binfile/profile-id profile-id)
|
||||
(assoc ::binfile/ignore-index-errors? true)
|
||||
(assoc ::binfile/migrate? true))]
|
||||
|
||||
(assoc ::bf.v1/project-id (:id project))
|
||||
(assoc ::bf.v1/profile-id profile-id))]
|
||||
(when-not template
|
||||
(ex/raise :type :not-found
|
||||
:code :template-not-found
|
||||
:hint "template not found"))
|
||||
|
||||
(sse/response #(clone-template params))))
|
||||
|
||||
(defn- clone-template
|
||||
[{:keys [::wrk/executor ::binfile/project-id] :as params}]
|
||||
(db/tx-run! params
|
||||
(fn [{:keys [::db/conn] :as params}]
|
||||
;; NOTE: the importation process performs some operations that
|
||||
;; are not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we
|
||||
;; dispatch that operation to a dedicated executor.
|
||||
(let [result (p/thread-call executor (partial binfile/import! params))]
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id})
|
||||
|
||||
(deref result)))))
|
||||
(sse/response #(clone-template params template))))
|
||||
|
||||
;; --- COMMAND: Get list of builtin templates
|
||||
|
||||
|
||||
@@ -23,10 +23,12 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]))
|
||||
[datoteka.io :as io]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 10)) ; 10 MiB
|
||||
@@ -55,20 +57,25 @@
|
||||
:opt-un [::id]))
|
||||
|
||||
(sv/defmethod ::upload-file-media-object
|
||||
{::doc/added "1.17"}
|
||||
{::doc/added "1.17"
|
||||
::climit/id [[:process-image/by-profile ::rpc/profile-id]
|
||||
[:process-image/global]]}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id content] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
|
||||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(media/validate-media-type! content)
|
||||
(media/validate-media-size! content)
|
||||
(let [object (db/run! cfg #(create-file-media-object % params))
|
||||
props {:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}]
|
||||
(with-meta object
|
||||
{::audit/replace-props props}))))
|
||||
|
||||
(db/run! cfg (fn [cfg]
|
||||
(let [object (create-file-media-object cfg params)
|
||||
props {:name (:name params)
|
||||
:file-id file-id
|
||||
:is-local (:is-local params)
|
||||
:size (:size content)
|
||||
:mtype (:mtype content)}]
|
||||
(with-meta object
|
||||
{::audit/replace-props props}))))))
|
||||
|
||||
(defn- big-enough-for-thumbnail?
|
||||
"Checks if the provided image info is big enough for
|
||||
@@ -143,16 +150,19 @@
|
||||
(assoc ::image (process-main-image info)))))
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor] :as cfg}
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]}
|
||||
{:keys [id file-id is-local name content]}]
|
||||
|
||||
(let [result (-> (climit/configure cfg :process-image/global)
|
||||
(climit/run! (partial process-image content) executor))
|
||||
|
||||
(let [result (px/invoke! executor (partial process-image content))
|
||||
image (sto/put-object! storage (::image result))
|
||||
thumb (when-let [params (::thumb result)]
|
||||
(sto/put-object! storage params))]
|
||||
|
||||
(db/update! conn :file
|
||||
{:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
(db/exec-one! conn [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
@@ -177,7 +187,7 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(let [cfg (update cfg ::sto/storage media/configure-assets-storage)]
|
||||
(files/check-edition-permissions! pool profile-id file-id)
|
||||
(db/run! cfg #(create-file-media-object-from-url % params))))
|
||||
(create-file-media-object-from-url cfg (assoc params :profile-id profile-id))))
|
||||
|
||||
(defn download-image
|
||||
[{:keys [::http/client]} uri]
|
||||
@@ -229,7 +239,17 @@
|
||||
params (-> params
|
||||
(assoc :content content)
|
||||
(assoc :name (or name (:filename content))))]
|
||||
(create-file-media-object cfg params)))
|
||||
|
||||
;; NOTE: we use the climit here in a dynamic invocation because we
|
||||
;; don't want saturate the process-image limit with IO (download
|
||||
;; of external image)
|
||||
|
||||
(-> cfg
|
||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||
[:process-image/global]])
|
||||
(assoc ::climit/label "create-file-media-object-from-url")
|
||||
(climit/invoke! #(db/run! %1 create-file-media-object %2) params))))
|
||||
|
||||
|
||||
;; --- Clone File Media object (Upload and create from url)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.email :as eml]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
@@ -22,12 +23,14 @@
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cuerdas.core :as str]))
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(declare check-profile-existence!)
|
||||
(declare decode-row)
|
||||
@@ -37,6 +40,19 @@
|
||||
(declare strip-private-attrs)
|
||||
(declare verify-password)
|
||||
|
||||
(defn clean-email
|
||||
"Clean and normalizes email address string"
|
||||
[email]
|
||||
(let [email (str/lower email)
|
||||
email (if (str/starts-with? email "mailto:")
|
||||
(subs email 7)
|
||||
email)
|
||||
email (if (or (str/starts-with? email "<")
|
||||
(str/ends-with? email ">"))
|
||||
(str/trim email "<>")
|
||||
email)]
|
||||
email))
|
||||
|
||||
(def ^:private
|
||||
schema:profile
|
||||
(sm/define
|
||||
@@ -99,7 +115,7 @@
|
||||
;; NOTE: we need to retrieve the profile independently if we use
|
||||
;; it or not for explicit locking and avoid concurrent updates of
|
||||
;; the same row/object.
|
||||
(let [profile (-> (db/get-by-id conn :profile profile-id ::db/for-update? true)
|
||||
(let [profile (-> (db/get-by-id conn :profile profile-id ::sql/for-update true)
|
||||
(decode-row))
|
||||
|
||||
;; Update the profile map with direct params
|
||||
@@ -136,25 +152,23 @@
|
||||
[:old-password {:optional true} [:maybe [::sm/word-string {:max 500}]]]]))
|
||||
|
||||
(sv/defmethod ::update-profile-password
|
||||
{:doc/added "1.0"
|
||||
{::doc/added "1.0"
|
||||
::sm/params schema:update-profile-password
|
||||
::sm/result :nil}
|
||||
::climit/id :auth/global}
|
||||
[cfg {:keys [::rpc/profile-id password] :as params}]
|
||||
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cfg (assoc cfg ::db/conn conn)
|
||||
profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
|
||||
(when (= (str/lower (:email profile))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
(when (= (:email profile) (str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
|
||||
(update-profile-password! conn (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil)))
|
||||
(update-profile-password! cfg (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil))))
|
||||
|
||||
(defn- invalidate-profile-session!
|
||||
"Removes all sessions except the current one."
|
||||
@@ -164,7 +178,7 @@
|
||||
|
||||
(defn- validate-password!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::db/for-update? true)]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
|
||||
(when (and (not= (:password profile) "!")
|
||||
(not (:valid (verify-password cfg old-password (:password profile)))))
|
||||
(ex/raise :type :validation
|
||||
@@ -172,11 +186,12 @@
|
||||
profile))
|
||||
|
||||
(defn update-profile-password!
|
||||
[conn {:keys [id password] :as profile}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
||||
(when-not (db/read-only? conn)
|
||||
(db/update! conn :profile
|
||||
{:password (auth/derive-password password)}
|
||||
{:id id})))
|
||||
{:password (derive-password cfg password)}
|
||||
{:id id})
|
||||
nil))
|
||||
|
||||
;; --- MUTATION: Update Photo
|
||||
|
||||
@@ -201,8 +216,9 @@
|
||||
|
||||
(defn update-profile-photo
|
||||
[{:keys [::db/pool ::sto/storage] :as cfg} {:keys [profile-id file] :as params}]
|
||||
|
||||
(let [photo (upload-photo cfg params)
|
||||
profile (db/get-by-id pool :profile profile-id ::db/for-update? true)]
|
||||
profile (db/get-by-id pool :profile profile-id ::sql/for-update true)]
|
||||
|
||||
;; Schedule deletion of old photo
|
||||
(when-let [id (:photo-id profile)]
|
||||
@@ -221,7 +237,7 @@
|
||||
:file-mtype (:mtype file)}}))))
|
||||
|
||||
(defn- generate-thumbnail!
|
||||
[file]
|
||||
[_ file]
|
||||
(let [input (media/run {:cmd :info :input file})
|
||||
thumb (media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
@@ -238,12 +254,15 @@
|
||||
:content-type (:mtype thumb)}))
|
||||
|
||||
(defn upload-photo
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file]}]
|
||||
(let [params (-> (climit/configure cfg :process-image/global)
|
||||
(climit/run! (partial generate-thumbnail! file) executor))]
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file] :as params}]
|
||||
(let [params (-> cfg
|
||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||
[:process-image/global]])
|
||||
(assoc ::climit/label "upload-photo")
|
||||
(assoc ::climit/executor executor)
|
||||
(climit/invoke! generate-thumbnail! file))]
|
||||
(sto/put-object! storage params)))
|
||||
|
||||
|
||||
;; --- MUTATION: Request Email Change
|
||||
|
||||
(declare ^:private request-email-change!)
|
||||
@@ -264,7 +283,7 @@
|
||||
cfg (assoc cfg ::conn conn)
|
||||
params (assoc params
|
||||
:profile profile
|
||||
:email (str/lower email))]
|
||||
:email (clean-email email))]
|
||||
(if (contains? cf/flags :smtp)
|
||||
(request-email-change! cfg params)
|
||||
(change-email-immediately! cfg params)))))
|
||||
@@ -282,12 +301,12 @@
|
||||
|
||||
(defn- request-email-change!
|
||||
[{:keys [::conn] :as cfg} {:keys [profile email] :as params}]
|
||||
(let [token (tokens/generate (::main/props cfg)
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
{:iss :change-email
|
||||
:exp (dt/in-future "15m")
|
||||
:profile-id (:id profile)
|
||||
:email email})
|
||||
ptoken (tokens/generate (::main/props cfg)
|
||||
ptoken (tokens/generate (::setup/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
@@ -329,7 +348,7 @@
|
||||
::sm/params schema:update-profile-props}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id props]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (get-profile conn profile-id ::db/for-update? true)
|
||||
(let [profile (get-profile conn profile-id ::sql/for-update true)
|
||||
props (reduce-kv (fn [props k v]
|
||||
;; We don't accept namespaced keys
|
||||
(if (simple-ident? k)
|
||||
@@ -403,10 +422,9 @@
|
||||
where email = ?
|
||||
and deleted_at is null) as val")
|
||||
|
||||
(defn check-profile-existence!
|
||||
(defn- check-profile-existence!
|
||||
[conn {:keys [email] :as params}]
|
||||
(let [email (str/lower email)
|
||||
result (db/exec-one! conn [sql:profile-existence email])]
|
||||
(let [result (db/exec-one! conn [sql:profile-existence email])]
|
||||
(when (:val result)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists))
|
||||
@@ -421,7 +439,7 @@
|
||||
(defn get-profile-by-email
|
||||
"Returns a profile looked up by email or `nil` if not match found."
|
||||
[conn email]
|
||||
(->> (db/exec! conn [sql:profile-by-email (str/lower email)])
|
||||
(->> (db/exec! conn [sql:profile-by-email (clean-email email)])
|
||||
(map decode-row)
|
||||
(first)))
|
||||
|
||||
@@ -436,17 +454,13 @@
|
||||
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
||||
|
||||
(defn derive-password
|
||||
[cfg password]
|
||||
[{:keys [::wrk/executor]} password]
|
||||
(when password
|
||||
(-> (climit/configure cfg :derive-password/global)
|
||||
(climit/run! (partial auth/derive-password password)
|
||||
(::wrk/executor cfg)))))
|
||||
(px/invoke! executor (partial auth/derive-password password))))
|
||||
|
||||
(defn verify-password
|
||||
[cfg password password-data]
|
||||
(-> (climit/configure cfg :derive-password/global)
|
||||
(climit/run! (partial auth/verify-password password password-data)
|
||||
(::wrk/executor cfg))))
|
||||
[{:keys [::wrk/executor]} password password-data]
|
||||
(px/invoke! executor (partial auth/verify-password password password-data)))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [props] :as row}]
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -189,8 +190,8 @@
|
||||
{:project-id (:id project)
|
||||
:profile-id profile-id
|
||||
:team-id team-id
|
||||
:is-pinned true})
|
||||
(assoc project :is-pinned true))))
|
||||
:is-pinned false})
|
||||
(assoc project :is-pinned false))))
|
||||
|
||||
|
||||
;; --- MUTATION: Toggle Project Pin
|
||||
@@ -233,7 +234,7 @@
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id id name] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [project (db/get-by-id conn :project id ::db/for-update? true)]
|
||||
(let [project (db/get-by-id conn :project id ::sql/for-update true)]
|
||||
(db/update! conn :project
|
||||
{:name name}
|
||||
{:id id})
|
||||
@@ -259,7 +260,8 @@
|
||||
(check-edition-permissions! conn profile-id id)
|
||||
(let [project (db/update! conn :project
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id :is-default false})]
|
||||
{:id id :is-default false}
|
||||
{::db/return-keys true})]
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:team-id (:team-id project)
|
||||
:name (:name project)
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
@@ -416,14 +417,16 @@
|
||||
;; namespace too.
|
||||
|
||||
(defn create-project
|
||||
[conn {:keys [id team-id name is-default] :as params}]
|
||||
[conn {:keys [id team-id name is-default created-at modified-at]}]
|
||||
(let [id (or id (uuid/next))
|
||||
is-default (if (boolean? is-default) is-default false)]
|
||||
(db/insert! conn :project
|
||||
{:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:is-default is-default})))
|
||||
is-default (if (boolean? is-default) is-default false)
|
||||
params {:id id
|
||||
:name name
|
||||
:team-id team-id
|
||||
:is-default is-default
|
||||
:created-at created-at
|
||||
:modified-at modified-at}]
|
||||
(db/insert! conn :project (d/without-nils params))))
|
||||
|
||||
(defn create-project-role
|
||||
[conn profile-id project-id role]
|
||||
@@ -689,7 +692,7 @@
|
||||
|
||||
(defn- create-invitation-token
|
||||
[cfg {:keys [profile-id valid-until team-id member-id member-email role]}]
|
||||
(tokens/generate (::main/props cfg)
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :team-invitation
|
||||
:exp valid-until
|
||||
:profile-id profile-id
|
||||
@@ -700,14 +703,15 @@
|
||||
|
||||
(defn- create-profile-identity-token
|
||||
[cfg profile]
|
||||
(tokens/generate (::main/props cfg)
|
||||
(tokens/generate (::setup/props cfg)
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})}))
|
||||
|
||||
(defn- create-invitation
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team profile role email] :as params}]
|
||||
(let [member (profile/get-profile-by-email conn email)]
|
||||
(let [email (profile/clean-email email)
|
||||
member (profile/get-profile-by-email conn email)]
|
||||
|
||||
(when (and member (not (eml/allow-send-emails? conn member)))
|
||||
(ex/raise :type :validation
|
||||
@@ -801,7 +805,8 @@
|
||||
(db/with-atomic [conn pool]
|
||||
(let [perms (get-permissions conn profile-id team-id)
|
||||
profile (db/get-by-id conn :profile profile-id)
|
||||
team (db/get-by-id conn :team team-id)]
|
||||
team (db/get-by-id conn :team team-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
(run! (partial quotes/check-quote! conn)
|
||||
(list {::quotes/id ::quotes/invitations-per-team
|
||||
@@ -832,7 +837,7 @@
|
||||
;; We don't re-send inviation to already existing members
|
||||
(remove (partial contains? members))
|
||||
(map (fn [email]
|
||||
{:email (str/lower email)
|
||||
{:email email
|
||||
:team team
|
||||
:profile profile
|
||||
:role role}))
|
||||
@@ -864,17 +869,23 @@
|
||||
::sm/params schema:create-team-with-invitations}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id emails role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [params (assoc params :profile-id profile-id)
|
||||
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
params (assoc params
|
||||
:profile-id profile-id
|
||||
:features features)
|
||||
cfg (assoc cfg ::db/conn conn)
|
||||
team (create-team cfg params)
|
||||
profile (db/get-by-id conn :profile profile-id)]
|
||||
profile (db/get-by-id conn :profile profile-id)
|
||||
emails (into #{} (map profile/clean-email) emails)]
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
(->> emails
|
||||
(map (fn [email]
|
||||
{:team team
|
||||
:profile profile
|
||||
:email (str/lower email)
|
||||
:email email
|
||||
:role role}))
|
||||
(run! (partial create-invitation cfg)))
|
||||
|
||||
@@ -911,17 +922,20 @@
|
||||
{::doc/added "1.17"}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id team-id email] :as params}]
|
||||
(check-read-permissions! pool profile-id team-id)
|
||||
(let [invit (-> (db/get pool :team-invitation
|
||||
(let [email (profile/clean-email email)
|
||||
invit (-> (db/get pool :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to (str/lower email)})
|
||||
:email-to email})
|
||||
(update :role keyword))
|
||||
|
||||
member (profile/get-profile-by-email pool (:email-to invit))
|
||||
token (create-invitation-token cfg {:team-id (:team-id invit)
|
||||
:profile-id profile-id
|
||||
:valid-until (:valid-until invit)
|
||||
:role (:role invit)
|
||||
:member-id (:id member)
|
||||
:member-email (or (:email member) (:email-to invit))})]
|
||||
:member-email (or (:email member)
|
||||
(profile/clean-email (:email-to invit)))})]
|
||||
{:token token}))
|
||||
|
||||
;; --- Mutation: Update invitation role
|
||||
@@ -942,7 +956,7 @@
|
||||
|
||||
(db/update! conn :team-invitation
|
||||
{:role (name role) :updated-at (dt/now)}
|
||||
{:team-id team-id :email-to (str/lower email)})
|
||||
{:team-id team-id :email-to (profile/clean-email email)})
|
||||
nil)))
|
||||
|
||||
;; --- Mutation: Delete invitation
|
||||
@@ -963,5 +977,6 @@
|
||||
|
||||
(let [invitation (db/delete! conn :team-invitation
|
||||
{:team-id team-id
|
||||
:email-to (str/lower email)})]
|
||||
:email-to (profile/clean-email email)}
|
||||
{::db/return-keys true})]
|
||||
(rph/wrap nil {::audit/props {:invitation-id (:id invitation)}})))))
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.tokens.spec.team-invitation :as-alias spec.team-invitation]
|
||||
[app.util.services :as sv]
|
||||
@@ -38,24 +39,25 @@
|
||||
::doc/module :auth}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [token] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [claims (tokens/verify (::main/props cfg) {:token token})
|
||||
(let [claims (tokens/verify (::setup/props cfg) {:token token})
|
||||
cfg (assoc cfg :conn conn)]
|
||||
(process-token cfg params claims))))
|
||||
|
||||
(defmethod process-token :change-email
|
||||
[{:keys [conn] :as cfg} _params {:keys [profile-id email] :as claims}]
|
||||
(when (profile/get-profile-by-email conn email)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists))
|
||||
(let [email (profile/clean-email email)]
|
||||
(when (profile/get-profile-by-email conn email)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists))
|
||||
|
||||
(db/update! conn :profile
|
||||
{:email email}
|
||||
{:id profile-id})
|
||||
(db/update! conn :profile
|
||||
{:email email}
|
||||
{:id profile-id})
|
||||
|
||||
(rph/with-meta claims
|
||||
{::audit/name "update-profile-email"
|
||||
::audit/props {:email email}
|
||||
::audit/profile-id profile-id}))
|
||||
(rph/with-meta claims
|
||||
{::audit/name "update-profile-email"
|
||||
::audit/props {:email email}
|
||||
::audit/profile-id profile-id})))
|
||||
|
||||
(defmethod process-token :verify-email
|
||||
[{:keys [conn] :as cfg} _ {:keys [profile-id] :as claims}]
|
||||
|
||||
@@ -38,6 +38,11 @@
|
||||
team (-> (db/get conn :team {:id (:team-id project)})
|
||||
(teams/decode-row))
|
||||
|
||||
members (into #{} (->> (teams/get-team-members conn (:team-id project))
|
||||
(map :id)))
|
||||
|
||||
perms (assoc perms :in-team (contains? members profile-id))
|
||||
|
||||
_ (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
@@ -95,7 +95,8 @@
|
||||
:mtype mtype
|
||||
:error-code nil
|
||||
:error-count 0}
|
||||
{:id id})
|
||||
{:id id}
|
||||
{::db/return-keys true})
|
||||
(decode-row)))
|
||||
|
||||
(sv/defmethod ::create-webhook
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
[_ f {:keys [::get-object ::key-fn ::reuse-key?] :as mdata}]
|
||||
(if (and (ifn? get-object) (ifn? key-fn))
|
||||
(do
|
||||
(l/debug :hint "instrumenting method" :service (::sv/name mdata))
|
||||
(l/trc :hint "instrumenting method" :service (::sv/name mdata))
|
||||
(fn [cfg {:keys [::key] :as params}]
|
||||
(if *enabled*
|
||||
(let [key' (when (or key reuse-key?)
|
||||
|
||||
@@ -7,8 +7,10 @@
|
||||
(ns app.rpc.quotes
|
||||
"Penpot resource usage quotes."
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -23,21 +25,15 @@
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::conn ::db/pool-or-conn)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::team-id ::us/uuid)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::incr (s/and int? pos?))
|
||||
(s/def ::target ::us/string)
|
||||
|
||||
(s/def ::quote
|
||||
(s/keys :req [::id ::profile-id]
|
||||
:opt [::conn
|
||||
::team-id
|
||||
::project-id
|
||||
::file-id
|
||||
::incr]))
|
||||
(def ^:private schema:quote
|
||||
(sm/define
|
||||
[:map {:title "Quote"}
|
||||
[::team-id {:optional true} ::sm/uuid]
|
||||
[::project-id {:optional true} ::sm/uuid]
|
||||
[::file-id {:optional true} ::sm/uuid]
|
||||
[::incr {:optional true} [:int {:min 0}]]
|
||||
[::id :keyword]
|
||||
[::profile-id ::sm/uuid]]))
|
||||
|
||||
(def ^:private enabled (volatile! true))
|
||||
|
||||
@@ -52,15 +48,22 @@
|
||||
(vswap! enabled (constantly false)))
|
||||
|
||||
(defn check-quote!
|
||||
[conn quote]
|
||||
(us/assert! ::db/pool-or-conn conn)
|
||||
(us/assert! ::quote quote)
|
||||
[ds quote]
|
||||
(dm/assert!
|
||||
"expected valid quote map"
|
||||
(sm/validate schema:quote quote))
|
||||
|
||||
(when (contains? cf/flags :quotes)
|
||||
(when @enabled
|
||||
(check-quote (assoc quote ::conn conn ::target (name (::id quote)))))))
|
||||
;; This approach add flexibility on how and where the
|
||||
;; check-quote! can be called (in or out of transaction)
|
||||
(db/run! ds (fn [cfg]
|
||||
(-> (merge cfg quote)
|
||||
(assoc ::target (name (::id quote)))
|
||||
(check-quote)))))))
|
||||
|
||||
(defn- send-notification!
|
||||
[{:keys [::conn] :as params}]
|
||||
[{:keys [::db/conn] :as params}]
|
||||
(l/warn :hint "max quote reached"
|
||||
:target (::target params)
|
||||
:profile-id (some-> params ::profile-id str)
|
||||
@@ -93,7 +96,7 @@
|
||||
:content content}]}))))
|
||||
|
||||
(defn- generic-check!
|
||||
[{:keys [::conn ::incr ::quote-sql ::count-sql ::default ::target] :or {incr 1} :as params}]
|
||||
[{:keys [::db/conn ::incr ::quote-sql ::count-sql ::default ::target] :or {incr 1} :as params}]
|
||||
(let [quote (->> (db/exec! conn quote-sql)
|
||||
(map :quote)
|
||||
(reduce max (- Integer/MAX_VALUE)))
|
||||
@@ -347,7 +350,6 @@
|
||||
(assoc ::count-sql [sql:get-comments-per-file file-id])
|
||||
(generic-check!)))
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; QUOTE: DEFAULT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
|
||||
(ns app.rpc.retry
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.util.services :as sv])
|
||||
(:import
|
||||
org.postgresql.util.PSQLException))
|
||||
@@ -15,49 +15,41 @@
|
||||
(defn conflict-exception?
|
||||
"Check if exception matches a insertion conflict on postgresql."
|
||||
[e]
|
||||
(and (instance? PSQLException e)
|
||||
(= "23505" (.getSQLState ^PSQLException e))))
|
||||
(when-let [cause (ex/instance? PSQLException e)]
|
||||
(= "23505" (.getSQLState ^PSQLException cause))))
|
||||
|
||||
(def ^:private always-false
|
||||
(constantly false))
|
||||
|
||||
(defn invoke!
|
||||
[{:keys [::max-retries] :or {max-retries 3} :as cfg} f & args]
|
||||
(loop [rnum 1]
|
||||
(let [match? (get cfg ::when always-false)
|
||||
result (try
|
||||
(apply f cfg args)
|
||||
(catch Throwable cause
|
||||
(if (and (match? cause) (<= rnum max-retries))
|
||||
::retry
|
||||
(throw cause))))]
|
||||
(if (= ::retry result)
|
||||
(let [label (get cfg ::label "anonymous")]
|
||||
(l/warn :hint "retrying operation" :label label :retry rnum)
|
||||
(recur (inc rnum)))
|
||||
result))))
|
||||
|
||||
(def ^:private always-false (constantly false))
|
||||
|
||||
(defn wrap-retry
|
||||
[_ f {:keys [::matches ::sv/name] :or {matches always-false} :as mdata}]
|
||||
[_ f {:keys [::sv/name] :as mdata}]
|
||||
|
||||
(when (::enabled mdata)
|
||||
(l/debug :hint "wrapping retry" :name name))
|
||||
|
||||
(if-let [max-retries (::max-retries mdata)]
|
||||
(fn [cfg params]
|
||||
((fn run [retry]
|
||||
(try
|
||||
(f cfg params)
|
||||
(catch Throwable cause
|
||||
(if (matches cause)
|
||||
(let [current-retry (inc retry)]
|
||||
(l/trace :hint "running retry algorithm" :retry current-retry)
|
||||
(if (<= current-retry max-retries)
|
||||
(run current-retry)
|
||||
(throw cause)))
|
||||
(throw cause))))) 1))
|
||||
(if (::enabled mdata)
|
||||
(let [max-retries (get mdata ::max-retries 3)
|
||||
matches? (get mdata ::when always-false)]
|
||||
(l/trc :hint "wrapping retry" :name name :max-retries max-retries)
|
||||
(fn [cfg params]
|
||||
(-> cfg
|
||||
(assoc ::max-retries max-retries)
|
||||
(assoc ::when matches?)
|
||||
(assoc ::label name)
|
||||
(invoke! f params))))
|
||||
f))
|
||||
|
||||
(defmacro with-retry
|
||||
[{:keys [::when ::max-retries ::label ::db/conn] :or {max-retries 3}} & body]
|
||||
`(let [conn# ~conn]
|
||||
(assert (or (nil? conn#) (db/connection? conn#)) "invalid database connection")
|
||||
(loop [tnum# 1]
|
||||
(let [result# (let [sp# (some-> conn# db/savepoint)]
|
||||
(try
|
||||
(let [result# (do ~@body)]
|
||||
(some->> sp# (db/release! conn#))
|
||||
result#)
|
||||
(catch Throwable cause#
|
||||
(some->> sp# (db/rollback! conn#))
|
||||
(if (and (~when cause#) (<= tnum# ~max-retries))
|
||||
::retry
|
||||
(throw cause#)))))]
|
||||
(if (= ::retry result#)
|
||||
(do
|
||||
(l/warn :hint "retrying operation" :label ~label :retry tnum#)
|
||||
(recur (inc tnum#)))
|
||||
result#)))))
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.setup
|
||||
"Initial data setup of instance."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
@@ -25,7 +26,7 @@
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->str)))
|
||||
|
||||
(defn- retrieve-all
|
||||
(defn- get-all-props
|
||||
[conn]
|
||||
(->> (db/query conn :server-prop {:preload true})
|
||||
(filter #(not= "secret-key" (:id %)))
|
||||
@@ -50,16 +51,37 @@
|
||||
:cause cause))))
|
||||
instance-id)))
|
||||
|
||||
(s/def ::main/key ::us/string)
|
||||
(s/def ::main/props
|
||||
(s/map-of ::us/keyword some?))
|
||||
|
||||
(def sql:add-prop
|
||||
"INSERT INTO server_prop (id, content, preload)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT (id)
|
||||
DO UPDATE SET content=?, preload=?")
|
||||
|
||||
(defn get-prop
|
||||
([system prop] (get-prop system prop nil))
|
||||
([system prop default]
|
||||
(let [prop (d/name prop)]
|
||||
(db/run! system (fn [{:keys [::db/conn]}]
|
||||
(or (db/get* conn :server-prop {:id prop})
|
||||
default))))))
|
||||
|
||||
(defn set-prop!
|
||||
[system prop value]
|
||||
(let [value (db/tjson value)
|
||||
prop (d/name prop)]
|
||||
(db/run! system (fn [{:keys [::db/conn]}]
|
||||
(db/exec-one! conn [sql:add-prop prop value false value false])))))
|
||||
|
||||
(s/def ::key ::us/string)
|
||||
(s/def ::props (s/map-of ::us/keyword some?))
|
||||
|
||||
(defmethod ig/pre-init-spec ::props [_]
|
||||
(s/keys :req [::db/pool]
|
||||
:opt [::main/key]))
|
||||
:opt [::key]))
|
||||
|
||||
(defmethod ig/init-key ::props
|
||||
[_ {:keys [::db/pool ::main/key] :as cfg}]
|
||||
[_ {:keys [::db/pool ::key] :as cfg}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/xact-lock! conn 0)
|
||||
(when-not key
|
||||
@@ -68,7 +90,7 @@
|
||||
"PENPOT_SECRET_KEY environment variable")))
|
||||
|
||||
(let [secret (or key (generate-random-key))]
|
||||
(-> (retrieve-all conn)
|
||||
(-> (get-all-props conn)
|
||||
(assoc :secret-key secret)
|
||||
(assoc :tokens-key (keys/derive secret :salt "tokens"))
|
||||
(update :instance-id handle-instance-id conn (db/read-only? pool))))))
|
||||
|
||||
39
backend/src/app/srepl/binfile.clj
Normal file
39
backend/src/app/srepl/binfile.clj
Normal file
@@ -0,0 +1,39 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.binfile
|
||||
(:require
|
||||
[app.binfile.v2 :as binfile.v2]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as h]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn export-team!
|
||||
[team-id]
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(binfile.v2/export-team! main/system team-id)))
|
||||
|
||||
(defn import-team!
|
||||
[path & {:keys [owner rollback?] :or {rollback? true}}]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [cfg]
|
||||
(let [team (binfile.v2/import-team! cfg path)
|
||||
owner (cond
|
||||
(string? owner)
|
||||
(db/get* cfg :profile {:email (str/lower owner)})
|
||||
(uuid? owner)
|
||||
(db/get* cfg :profile {:id owner}))]
|
||||
|
||||
(when owner
|
||||
(db/insert! cfg :team-profile-rel
|
||||
{:team-id (:id team)
|
||||
:profile-id (:id owner)
|
||||
:is-admin true
|
||||
:is-owner true
|
||||
:can-edit true}))
|
||||
|
||||
team))))
|
||||
@@ -11,9 +11,8 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.rpc.commands.auth :as cmd.auth]
|
||||
[app.srepl.components-v2]
|
||||
[app.rpc.commands.profile :as cmd.profile]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
@@ -39,12 +38,13 @@
|
||||
:or {is-active true}}]
|
||||
(when-let [system (get-current-system)]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(let [params {:id (uuid/next)
|
||||
:email email
|
||||
:fullname fullname
|
||||
:is-active is-active
|
||||
:password password
|
||||
:props {}}]
|
||||
(let [password (cmd.profile/derive-password system password)
|
||||
params {:id (uuid/next)
|
||||
:email email
|
||||
:fullname fullname
|
||||
:is-active is-active
|
||||
:password password
|
||||
:props {}}]
|
||||
(->> (cmd.auth/create-profile! conn params)
|
||||
(cmd.auth/create-profile-rels! conn))))))
|
||||
|
||||
@@ -65,9 +65,8 @@
|
||||
(let [res (db/update! conn :profile
|
||||
params
|
||||
{:email email
|
||||
:deleted-at nil}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (:next.jdbc/update-count res))))))))
|
||||
:deleted-at nil})]
|
||||
(pos? (db/get-update-count res))))))))
|
||||
|
||||
(defmethod exec-command :delete-profile
|
||||
[{:keys [email soft]}]
|
||||
@@ -82,12 +81,10 @@
|
||||
(let [res (if soft
|
||||
(db/update! conn :profile
|
||||
{:deleted-at (dt/now)}
|
||||
{:email email :deleted-at nil}
|
||||
{::db/return-keys? false})
|
||||
{:email email :deleted-at nil})
|
||||
(db/delete! conn :profile
|
||||
{:email email}
|
||||
{::db/return-keys? false}))]
|
||||
(pos? (:next.jdbc/update-count res))))))
|
||||
{:email email}))]
|
||||
(pos? (db/get-update-count res))))))
|
||||
|
||||
(defmethod exec-command :search-profile
|
||||
[{:keys [email]}]
|
||||
@@ -107,28 +104,6 @@
|
||||
[{:keys [password]}]
|
||||
(auth/derive-password password))
|
||||
|
||||
(defmethod exec-command :migrate-v2
|
||||
[_]
|
||||
(letfn [(on-start [{:keys [total rollback]}]
|
||||
(println
|
||||
(str/ffmt "The components/v2 migration started (rollback:%, teams:%)"
|
||||
(if rollback "on" "off")
|
||||
total)))
|
||||
|
||||
(on-progress [{:keys [total elapsed progress completed]}]
|
||||
(println (str/ffmt "Progress % (total: %, completed: %, elapsed: %)"
|
||||
progress total completed elapsed)))
|
||||
(on-error [cause]
|
||||
(println "ERR:" (ex-message cause)))
|
||||
|
||||
(on-end [_]
|
||||
(println "Migration finished"))]
|
||||
(app.srepl.components-v2/migrate-teams! main/system
|
||||
:on-start on-start
|
||||
:on-error on-error
|
||||
:on-progress on-progress
|
||||
:on-end on-end)))
|
||||
|
||||
(defmethod exec-command :default
|
||||
[{:keys [::cmd]}]
|
||||
(ex/raise :type :internal
|
||||
|
||||
@@ -6,291 +6,301 @@
|
||||
|
||||
(ns app.srepl.components-v2
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.db :as db]
|
||||
[app.features.components-v2 :as feat]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.core :as p]
|
||||
[app.worker :as-alias wrk]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.semaphore :as ps]
|
||||
[promesa.util :as pu]))
|
||||
|
||||
(defn- print-stats!
|
||||
[stats]
|
||||
(->> stats
|
||||
(into (sorted-map))
|
||||
(pp/pprint)))
|
||||
(def ^:dynamic *scope* nil)
|
||||
(def ^:dynamic *semaphore* nil)
|
||||
|
||||
(defn- report-progress-files
|
||||
[tpoint]
|
||||
(fn [_ _ oldv newv]
|
||||
(when (not= (:processed/files oldv)
|
||||
(:processed/files newv))
|
||||
(let [total (:total/files newv)
|
||||
completed (:processed/files newv)
|
||||
progress (/ (* completed 100.0) total)
|
||||
elapsed (tpoint)]
|
||||
(l/dbg :hint "progress"
|
||||
:completed (:processed/files newv)
|
||||
:total (:total/files newv)
|
||||
:progress (str (int progress) "%")
|
||||
:elapsed (dt/format-duration elapsed))))))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PRIVATE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- report-progress-teams
|
||||
[tpoint on-progress]
|
||||
(fn [_ _ oldv newv]
|
||||
(when (not= (:processed/teams oldv)
|
||||
(:processed/teams newv))
|
||||
(let [total (:total/teams newv)
|
||||
completed (:processed/teams newv)
|
||||
progress (/ (* completed 100.0) total)
|
||||
progress (str (int progress) "%")
|
||||
elapsed (dt/format-duration (tpoint))]
|
||||
(def ^:private sql:get-files-by-created-at
|
||||
"SELECT id, features,
|
||||
row_number() OVER (ORDER BY created_at DESC) AS rown
|
||||
FROM file
|
||||
WHERE deleted_at IS NULL
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(when (fn? on-progress)
|
||||
(on-progress {:total total
|
||||
:elapsed elapsed
|
||||
:completed completed
|
||||
:progress progress}))
|
||||
(defn- get-files
|
||||
[conn]
|
||||
(->> (db/cursor conn [sql:get-files-by-created-at] {:chunk-size 500})
|
||||
(map feat/decode-row)
|
||||
(remove (fn [{:keys [features]}]
|
||||
(contains? features "components/v2")))))
|
||||
|
||||
(l/dbg :hint "progress"
|
||||
:completed completed
|
||||
:progress progress
|
||||
:elapsed elapsed)))))
|
||||
|
||||
(defn- get-total-files
|
||||
[pool & {:keys [team-id]}]
|
||||
(if (some? team-id)
|
||||
(let [sql (str/concat
|
||||
"SELECT count(f.id) AS count FROM file AS f "
|
||||
" JOIN project AS p ON (p.id = f.project_id) "
|
||||
" WHERE p.team_id = ? AND f.deleted_at IS NULL "
|
||||
" AND p.deleted_at IS NULL")
|
||||
res (db/exec-one! pool [sql team-id])]
|
||||
(:count res))
|
||||
|
||||
(let [sql (str/concat
|
||||
"SELECT count(id) AS count FROM file "
|
||||
" WHERE deleted_at IS NULL")
|
||||
res (db/exec-one! pool [sql])]
|
||||
(:count res))))
|
||||
|
||||
(defn- get-total-teams
|
||||
[pool]
|
||||
(let [sql (str/concat
|
||||
"SELECT count(id) AS count FROM team "
|
||||
" WHERE deleted_at IS NULL")
|
||||
res (db/exec-one! pool [sql])]
|
||||
(:count res)))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn migrate-file!
|
||||
[system file-id & {:keys [rollback?] :or {rollback? true}}]
|
||||
[file-id & {:keys [rollback? validate? label cache skip-on-graphic-error?]
|
||||
:or {rollback? true
|
||||
validate? false
|
||||
skip-on-graphic-error? true}}]
|
||||
(l/dbg :hint "migrate:start" :rollback rollback?)
|
||||
(let [tpoint (dt/tpoint)
|
||||
file-id (h/parse-uuid file-id)]
|
||||
|
||||
(l/dbg :hint "migrate:start")
|
||||
(let [tpoint (dt/tpoint)]
|
||||
(try
|
||||
(binding [feat/*stats* (atom {})]
|
||||
(-> (assoc system ::db/rollback rollback?)
|
||||
(feat/migrate-file! file-id))
|
||||
(binding [feat/*stats* (atom {})
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(feat/migrate-file! file-id
|
||||
:validate? validate?
|
||||
:skip-on-graphic-error? skip-on-graphic-error?
|
||||
:label label))
|
||||
|
||||
(-> (deref feat/*stats*)
|
||||
(assoc :elapsed (dt/format-duration (tpoint)))))
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "migrate:error" :cause cause))
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "migrate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :elapsed elapsed))))))
|
||||
|
||||
(defn migrate-files!
|
||||
[{:keys [::db/pool] :as system}
|
||||
& {:keys [chunk-size max-jobs max-items start-at preset rollback? skip-on-error validate?]
|
||||
:or {chunk-size 10
|
||||
skip-on-error true
|
||||
max-jobs 10
|
||||
max-items Long/MAX_VALUE
|
||||
preset :shutdown-on-failure
|
||||
rollback? true
|
||||
validate? false}}]
|
||||
(letfn [(get-chunk [cursor]
|
||||
(let [sql (str/concat
|
||||
"SELECT id, created_at FROM file "
|
||||
" WHERE created_at < ? AND deleted_at IS NULL "
|
||||
" ORDER BY created_at desc LIMIT ?")
|
||||
rows (db/exec! pool [sql cursor chunk-size])]
|
||||
[(some->> rows peek :created-at) (seq rows)]))
|
||||
|
||||
(get-candidates []
|
||||
(->> (d/iteration get-chunk
|
||||
:vf second
|
||||
:kf first
|
||||
:initk (or start-at (dt/now)))
|
||||
(take max-items)
|
||||
(map :id)))]
|
||||
|
||||
(l/dbg :hint "migrate:start")
|
||||
(let [fsem (ps/create :permits max-jobs)
|
||||
total (get-total-files pool)
|
||||
stats (atom {:files/total total})
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
(add-watch stats :progress-report (report-progress-files tpoint))
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*semaphore* fsem
|
||||
feat/*skip-on-error* skip-on-error]
|
||||
(try
|
||||
(pu/with-open [scope (px/structured-task-scope :preset preset :factory :virtual)]
|
||||
|
||||
(run! (fn [file-id]
|
||||
(ps/acquire! feat/*semaphore*)
|
||||
(px/submit! scope (fn []
|
||||
(-> (assoc system ::db/rollback rollback?)
|
||||
(feat/migrate-file! file-id
|
||||
:validate? validate?
|
||||
:throw-on-validate? (not skip-on-error))))))
|
||||
(get-candidates))
|
||||
|
||||
(p/await! scope))
|
||||
|
||||
(-> (deref feat/*stats*)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :elapsed elapsed))))))))
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
|
||||
|
||||
(defn migrate-team!
|
||||
[{:keys [::db/pool] :as system} team-id
|
||||
& {:keys [rollback? skip-on-error validate?]
|
||||
:or {rollback? true skip-on-error true validate? false}}]
|
||||
(l/dbg :hint "migrate:start")
|
||||
[team-id & {:keys [rollback? skip-on-graphic-error? validate? label cache]
|
||||
:or {rollback? true
|
||||
validate? true
|
||||
skip-on-graphic-error? true}}]
|
||||
|
||||
(let [total (get-total-files pool :team-id team-id)
|
||||
stats (atom {:total/files total})
|
||||
tpoint (dt/tpoint)]
|
||||
(l/dbg :hint "migrate:start" :rollback rollback?)
|
||||
|
||||
(add-watch stats :progress-report (report-progress-files tpoint))
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
stats (atom {})
|
||||
tpoint (dt/tpoint)]
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(feat/migrate-team! team-id
|
||||
:label label
|
||||
:validate? validate?
|
||||
:skip-on-graphics-error? skip-on-graphic-error?))
|
||||
|
||||
(-> (deref feat/*stats*)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :rollback rollback? :elapsed elapsed)))))))
|
||||
|
||||
(defn migrate-files!
|
||||
"A REPL helper for migrate all files.
|
||||
|
||||
This function starts multiple concurrent file migration processes
|
||||
until thw maximum number of jobs is reached which by default has the
|
||||
value of `1`. This is controled with the `:max-jobs` option.
|
||||
|
||||
If you want to run this on multiple machines you will need to specify
|
||||
the total number of partitions and the current partition.
|
||||
|
||||
In order to get the report table populated, you will need to provide
|
||||
a correct `:label`. That label is also used for persist a file
|
||||
snaphot before continue with the migration."
|
||||
[& {:keys [max-jobs max-items rollback? validate?
|
||||
cache skip-on-graphic-error?
|
||||
label partitions current-partition]
|
||||
:or {validate? false
|
||||
rollback? true
|
||||
max-jobs 1
|
||||
current-partition 1
|
||||
skip-on-graphic-error? true
|
||||
max-items Long/MAX_VALUE}}]
|
||||
|
||||
(when (int? partitions)
|
||||
(when-not (int? current-partition)
|
||||
(throw (IllegalArgumentException. "missing `current-partition` parameter")))
|
||||
(when-not (<= 0 current-partition partitions)
|
||||
(throw (IllegalArgumentException. "invalid value on `current-partition` parameter"))))
|
||||
|
||||
(let [stats (atom {})
|
||||
tpoint (dt/tpoint)
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/migration/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
|
||||
migrate-file
|
||||
(fn [file-id rown]
|
||||
(try
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [system]
|
||||
(db/exec-one! system ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(feat/migrate-file! system file-id
|
||||
:rown rown
|
||||
:label label
|
||||
:validate? validate?
|
||||
:skip-on-graphic-error? skip-on-graphic-error?)))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "unexpected error on processing file (skiping)"
|
||||
:file-id (str file-id))
|
||||
|
||||
(events/tap :error
|
||||
(ex-info "unexpected error on processing file (skiping)"
|
||||
{:file-id file-id}
|
||||
cause))
|
||||
|
||||
(swap! stats update :errors (fnil inc 0)))
|
||||
|
||||
(finally
|
||||
(ps/release! sjobs))))
|
||||
|
||||
process-file
|
||||
(fn [{:keys [id rown]}]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial migrate-file id rown)))]
|
||||
|
||||
(l/dbg :hint "migrate:start"
|
||||
:label label
|
||||
:rollback rollback?
|
||||
:max-jobs max-jobs
|
||||
:max-items max-items)
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*cache* cache]
|
||||
(try
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(db/exec! conn ["SET LOCAL statement_timeout = 0"])
|
||||
(db/exec! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
(run! process-file
|
||||
(->> (get-files conn)
|
||||
(filter (fn [{:keys [rown] :as row}]
|
||||
(if (int? partitions)
|
||||
(= current-partition (inc (mod rown partitions)))
|
||||
true)))
|
||||
(take max-items)))
|
||||
|
||||
;; Close and await tasks
|
||||
(pu/close! executor)))
|
||||
|
||||
(-> (deref stats)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause)
|
||||
(events/tap :error cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end"
|
||||
:rollback rollback?
|
||||
:elapsed elapsed)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; CACHE POPULATE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:sobjects-for-cache
|
||||
"SELECT id,
|
||||
row_number() OVER (ORDER BY created_at) AS index
|
||||
FROM storage_object
|
||||
WHERE (metadata->>'~:bucket' = 'file-media-object' OR
|
||||
metadata->>'~:bucket' IS NULL)
|
||||
AND metadata->>'~:content-type' = 'image/svg+xml'
|
||||
AND deleted_at IS NULL
|
||||
AND size < 1135899
|
||||
ORDER BY created_at ASC")
|
||||
|
||||
(defn populate-cache!
|
||||
"A REPL helper for migrate all files.
|
||||
|
||||
This function starts multiple concurrent file migration processes
|
||||
until thw maximum number of jobs is reached which by default has the
|
||||
value of `1`. This is controled with the `:max-jobs` option.
|
||||
|
||||
If you want to run this on multiple machines you will need to specify
|
||||
the total number of partitions and the current partition.
|
||||
|
||||
In order to get the report table populated, you will need to provide
|
||||
a correct `:label`. That label is also used for persist a file
|
||||
snaphot before continue with the migration."
|
||||
[& {:keys [max-jobs] :or {max-jobs 1}}]
|
||||
|
||||
(let [tpoint (dt/tpoint)
|
||||
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/cache/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
|
||||
retrieve-sobject
|
||||
(fn [id index]
|
||||
(let [path (feat/get-sobject-cache-path id)
|
||||
parent (fs/parent path)]
|
||||
|
||||
(try
|
||||
(when-not (fs/exists? parent)
|
||||
(fs/create-dir parent))
|
||||
|
||||
(if (fs/exists? path)
|
||||
(l/inf :hint "create cache entry" :status "exists" :index index :id (str id) :path (str path))
|
||||
(let [svg-data (feat/get-optimized-svg id)]
|
||||
(with-open [^java.lang.AutoCloseable stream (io/output-stream path)]
|
||||
(let [writer (fres/writer stream)]
|
||||
(fres/write! writer svg-data)))
|
||||
|
||||
(l/inf :hint "create cache entry" :status "created"
|
||||
:index index
|
||||
:id (str id)
|
||||
:path (str path))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "create cache entry"
|
||||
:status "error"
|
||||
:index index
|
||||
:id (str id)
|
||||
:path (str path)
|
||||
:cause cause))
|
||||
|
||||
(finally
|
||||
(ps/release! sjobs)))))
|
||||
|
||||
process-sobject
|
||||
(fn [{:keys [id index]}]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial retrieve-sobject id index)))]
|
||||
|
||||
(l/dbg :hint "migrate:start"
|
||||
:max-jobs max-jobs)
|
||||
|
||||
(try
|
||||
(binding [feat/*stats* stats
|
||||
feat/*skip-on-error* skip-on-error]
|
||||
(-> (assoc system ::db/rollback rollback?)
|
||||
(feat/migrate-team! team-id
|
||||
:validate? validate?
|
||||
:throw-on-validate? (not skip-on-error)))
|
||||
(binding [feat/*system* main/system]
|
||||
(run! process-sobject
|
||||
(db/exec! main/system [sql:sobjects-for-cache]))
|
||||
|
||||
(print-stats!
|
||||
(-> (deref feat/*stats*)
|
||||
(dissoc :total/files)
|
||||
(assoc :elapsed (dt/format-duration (tpoint))))))
|
||||
;; Close and await tasks
|
||||
(pu/close! executor))
|
||||
|
||||
{:elapsed (dt/format-duration (tpoint))}
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause))
|
||||
(l/dbg :hint "populate:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :elapsed elapsed))))))
|
||||
|
||||
(defn default-on-end
|
||||
[stats]
|
||||
(print-stats!
|
||||
(-> stats
|
||||
(update :elapsed/total dt/format-duration)
|
||||
(dissoc :total/teams))))
|
||||
|
||||
(defn migrate-teams!
|
||||
[{:keys [::db/pool] :as system}
|
||||
& {:keys [chunk-size max-jobs max-items start-at
|
||||
rollback? validate? preset skip-on-error
|
||||
max-time on-start on-progress on-error on-end]
|
||||
:or {chunk-size 10000
|
||||
validate? false
|
||||
rollback? true
|
||||
skip-on-error true
|
||||
on-end default-on-end
|
||||
preset :shutdown-on-failure
|
||||
max-jobs Integer/MAX_VALUE
|
||||
max-items Long/MAX_VALUE}}]
|
||||
|
||||
(letfn [(get-chunk [cursor]
|
||||
(let [sql (str/concat
|
||||
"SELECT id, created_at, features FROM team "
|
||||
" WHERE created_at < ? AND deleted_at IS NULL "
|
||||
" ORDER BY created_at desc LIMIT ?")
|
||||
rows (db/exec! pool [sql cursor chunk-size])]
|
||||
[(some->> rows peek :created-at) (seq rows)]))
|
||||
|
||||
(get-candidates []
|
||||
(->> (d/iteration get-chunk
|
||||
:vf second
|
||||
:kf first
|
||||
:initk (or start-at (dt/now)))
|
||||
(map #(update % :features db/decode-pgarray #{}))
|
||||
(remove #(contains? (:features %) "ephimeral/v2-migration"))
|
||||
(take max-items)
|
||||
(map :id)))
|
||||
|
||||
(migrate-team [team-id]
|
||||
(try
|
||||
(-> (assoc system ::db/rollback rollback?)
|
||||
(feat/migrate-team! team-id
|
||||
:validate? validate?
|
||||
:throw-on-validate? (not skip-on-error)))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unexpected error on processing team" :team-id (dm/str team-id) :cause cause))))
|
||||
|
||||
(process-team [scope tpoint mtime team-id]
|
||||
(ps/acquire! feat/*semaphore*)
|
||||
(let [ts (tpoint)]
|
||||
(if (and mtime (neg? (compare mtime ts)))
|
||||
(l/inf :hint "max time constraint reached" :elapsed (dt/format-duration ts))
|
||||
(px/submit! scope (partial migrate-team team-id)))))]
|
||||
|
||||
(l/dbg :hint "migrate:start")
|
||||
|
||||
(let [sem (ps/create :permits max-jobs)
|
||||
total (get-total-teams pool)
|
||||
stats (atom {:total/teams (min total max-items)})
|
||||
tpoint (dt/tpoint)
|
||||
mtime (some-> max-time dt/duration)]
|
||||
|
||||
(when (fn? on-start)
|
||||
(on-start {:total total :rollback rollback?}))
|
||||
|
||||
(add-watch stats :progress-report (report-progress-teams tpoint on-progress))
|
||||
|
||||
(binding [feat/*stats* stats
|
||||
feat/*semaphore* sem
|
||||
feat/*skip-on-error* skip-on-error]
|
||||
(try
|
||||
(pu/with-open [scope (px/structured-task-scope :preset preset
|
||||
:factory :virtual)]
|
||||
(loop [candidates (get-candidates)]
|
||||
(when-let [team-id (first candidates)]
|
||||
(when (process-team scope tpoint mtime team-id)
|
||||
(recur (rest candidates)))))
|
||||
|
||||
(p/await! scope))
|
||||
|
||||
(when (fn? on-end)
|
||||
(-> (deref stats)
|
||||
(assoc :elapsed/total (tpoint))
|
||||
(on-end)))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "migrate:error" :cause cause)
|
||||
(when (fn? on-error)
|
||||
(on-error cause)))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "migrate:end" :elapsed elapsed))))))))
|
||||
(l/dbg :hint "populate:end"
|
||||
:elapsed elapsed))))))
|
||||
|
||||
239
backend/src/app/srepl/fixes.clj
Normal file
239
backend/src/app/srepl/fixes.clj
Normal file
@@ -0,0 +1,239 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.srepl.fixes
|
||||
"A misc of fix functions"
|
||||
(:refer-clojure :exclude [parse-uuid])
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.repair :as cfr]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]
|
||||
[app.common.types.component :as ctk]
|
||||
[app.common.types.container :as ctn]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.srepl.helpers :as h]))
|
||||
|
||||
(defn disable-fdata-features
|
||||
[{:keys [id features] :as file} _]
|
||||
(when (or (contains? features "fdata/pointer-map")
|
||||
(contains? features "fdata/objects-map"))
|
||||
(l/warn :hint "disable fdata features" :file-id (str id))
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :features disj "fdata/pointer-map" "fdata/objects-map"))))
|
||||
|
||||
(def sql:get-fdata-files
|
||||
"SELECT id FROM file
|
||||
WHERE deleted_at is NULL
|
||||
AND (features @> '{fdata/pointer-map}' OR
|
||||
features @> '{fdata/objects-map}')
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn find-fdata-pointers
|
||||
[{:keys [id features data] :as file} _]
|
||||
(when (contains? features "fdata/pointer-map")
|
||||
(let [pointers (feat.fdata/get-used-pointer-ids data)]
|
||||
(l/warn :hint "found pointers" :file-id (str id) :pointers pointers)
|
||||
nil)))
|
||||
|
||||
(defn repair-file-media
|
||||
"A helper intended to be used with `srepl.main/process-files!` that
|
||||
fixes all not propertly referenced file-media-object for a file"
|
||||
[{:keys [id data] :as file} & _]
|
||||
(let [conn (db/get-connection h/*system*)
|
||||
used (bfc/collect-used-media data)
|
||||
ids (db/create-array conn "uuid" used)
|
||||
sql (str "SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
rows (db/exec! conn [sql ids])
|
||||
index (reduce (fn [index media]
|
||||
(if (not= (:file-id media) id)
|
||||
(let [media-id (uuid/next)]
|
||||
(l/wrn :hint "found not referenced media"
|
||||
:file-id (str id)
|
||||
:media-id (str (:id media)))
|
||||
|
||||
(db/insert! conn :file-media-object
|
||||
(-> media
|
||||
(assoc :file-id id)
|
||||
(assoc :id media-id)))
|
||||
(assoc index (:id media) media-id))
|
||||
index))
|
||||
{}
|
||||
rows)]
|
||||
|
||||
(when (seq index)
|
||||
(binding [bfc/*state* (atom {:index index})]
|
||||
(update file :data (fn [fdata]
|
||||
(-> fdata
|
||||
(update :pages-index #'bfc/relink-shapes)
|
||||
(update :components #'bfc/relink-shapes)
|
||||
(update :media #'bfc/relink-media)
|
||||
(d/without-nils))))))))
|
||||
|
||||
|
||||
(defn repair-file
|
||||
"Internal helper for validate and repair the file. The operation is
|
||||
applied multiple times untile file is fixed or max iteration counter
|
||||
is reached (default 10)"
|
||||
[file libs & {:keys [max-iterations] :or {max-iterations 10}}]
|
||||
|
||||
(let [validate-and-repair
|
||||
(fn [file libs iteration]
|
||||
(when-let [errors (not-empty (cfv/validate-file file libs))]
|
||||
(l/trc :hint "repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration
|
||||
:errors (count errors))
|
||||
(let [changes (cfr/repair-file file libs errors)]
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(update :data cpc/process-changes changes)))))
|
||||
|
||||
process-file
|
||||
(fn [file libs]
|
||||
(loop [file file
|
||||
iteration 0]
|
||||
(if (< iteration max-iterations)
|
||||
(if-let [file (validate-and-repair file libs iteration)]
|
||||
(recur file (inc iteration))
|
||||
file)
|
||||
(do
|
||||
(l/wrn :hint "max retry num reached on repairing file"
|
||||
:file-id (str (:id file))
|
||||
:iteration iteration)
|
||||
file))))
|
||||
|
||||
file'
|
||||
(process-file file libs)]
|
||||
|
||||
(when (not= (:revn file) (:revn file'))
|
||||
(l/trc :hint "file repaired" :file-id (str (:id file))))
|
||||
|
||||
file'))
|
||||
|
||||
(defn fix-touched-shapes-group
|
||||
[file _]
|
||||
;; Remove :shapes-group from the touched elements
|
||||
(letfn [(fix-fdata [data]
|
||||
(-> data
|
||||
(update :pages-index update-vals fix-container)))
|
||||
|
||||
(fix-container [container]
|
||||
(d/update-when container :objects update-vals fix-shape))
|
||||
|
||||
(fix-shape [shape]
|
||||
(d/update-when shape :touched
|
||||
(fn [touched]
|
||||
(disj touched :shapes-group))))]
|
||||
file (-> file
|
||||
(update :data fix-fdata))))
|
||||
|
||||
(defn add-swap-slots
|
||||
[file libs _opts]
|
||||
;; Detect swapped copies and try to generate a valid swap-slot.
|
||||
(letfn [(process-fdata [data]
|
||||
;; Walk through all containers in the file, both pages and deleted components.
|
||||
(reduce process-container data (ctf/object-containers-seq data)))
|
||||
|
||||
(process-container [data container]
|
||||
;; Walk through all shapes in depth-first tree order.
|
||||
(l/dbg :hint "Processing container" :type (:type container) :name (:name container))
|
||||
(let [root-shape (ctn/get-container-root container)]
|
||||
(ctf/update-container data
|
||||
container
|
||||
#(reduce process-shape % (ctn/get-direct-children container root-shape)))))
|
||||
|
||||
(process-shape [container shape]
|
||||
;; Look for head copies in the first level (either component roots or inside main components).
|
||||
;; Even if they have been swapped, we don't add slot to them because there is no way to know
|
||||
;; the original shape. Only children.
|
||||
(if (and (ctk/instance-head? shape)
|
||||
(ctk/in-component-copy? shape)
|
||||
(nil? (ctk/get-swap-slot shape)))
|
||||
(process-copy-head container shape)
|
||||
(reduce process-shape container (ctn/get-direct-children container shape))))
|
||||
|
||||
(process-copy-head [container head-shape]
|
||||
;; Process recursively all children, comparing each one with the corresponding child in the main
|
||||
;; component, looking by position. If the shape-ref does not point to the found child, then it has
|
||||
;; been swapped and need to set up a slot.
|
||||
(l/trc :hint "Processing copy-head" :id (:id head-shape) :name (:name head-shape))
|
||||
(let [component-shape (ctf/find-ref-shape file container libs head-shape :include-deleted? true :with-context? true)
|
||||
component-container (:container (meta component-shape))]
|
||||
(loop [container container
|
||||
children (map #(ctn/get-shape container %) (:shapes head-shape))
|
||||
component-children (map #(ctn/get-shape component-container %) (:shapes component-shape))]
|
||||
(let [child (first children)
|
||||
component-child (first component-children)]
|
||||
(if (or (nil? child) (nil? component-child))
|
||||
container
|
||||
(let [container (if (and (not (ctk/is-main-of? component-child child true))
|
||||
(nil? (ctk/get-swap-slot child))
|
||||
(ctk/instance-head? child))
|
||||
(let [slot (guess-swap-slot component-child component-container)]
|
||||
(l/dbg :hint "child" :id (:id child) :name (:name child) :slot slot)
|
||||
(ctn/update-shape container (:id child)
|
||||
#(update % :touched
|
||||
cfh/set-touched-group
|
||||
(ctk/build-swap-slot-group slot))))
|
||||
container)]
|
||||
(recur (process-copy-head container child)
|
||||
(rest children)
|
||||
(rest component-children))))))))
|
||||
|
||||
(guess-swap-slot [shape container]
|
||||
;; To guess the slot, we must follow the chain until we find the definitive main. But
|
||||
;; we cannot navigate by shape-ref, because main shapes may also have been swapped. So
|
||||
;; chain by position, too.
|
||||
(if-let [slot (ctk/get-swap-slot shape)]
|
||||
slot
|
||||
(if-not (ctk/in-component-copy? shape)
|
||||
(:id shape)
|
||||
(let [head-copy (ctn/get-component-shape (:objects container) shape)]
|
||||
(if (= (:id head-copy) (:id shape))
|
||||
(:id shape)
|
||||
(let [head-main (ctf/find-ref-shape file
|
||||
container
|
||||
libs
|
||||
head-copy
|
||||
:include-deleted? true
|
||||
:with-context? true)
|
||||
container-main (:container (meta head-main))
|
||||
shape-main (find-match-by-position shape
|
||||
head-copy
|
||||
container
|
||||
head-main
|
||||
container-main)]
|
||||
(guess-swap-slot shape-main container-main)))))))
|
||||
|
||||
(find-match-by-position [shape-copy head-copy container-copy head-main container-main]
|
||||
;; Find the shape in the main that has the same position under its parent than
|
||||
;; the copy under its one. To get the parent we must process recursively until
|
||||
;; the component head, because mains may also have been swapped.
|
||||
(let [parent-copy (ctn/get-shape container-copy (:parent-id shape-copy))
|
||||
parent-main (if (= (:id parent-copy) (:id head-copy))
|
||||
head-main
|
||||
(find-match-by-position parent-copy
|
||||
head-copy
|
||||
container-copy
|
||||
head-main
|
||||
container-main))
|
||||
index (cfh/get-position-on-parent (:objects container-copy)
|
||||
(:id shape-copy))
|
||||
shape-main-id (dm/get-in parent-main [:shapes index])]
|
||||
(ctn/get-shape container-main shape-main-id)))]
|
||||
|
||||
file (-> file
|
||||
(update :data process-fdata))))
|
||||
@@ -7,38 +7,18 @@
|
||||
(ns app.srepl.helpers
|
||||
"A main namespace for server repl."
|
||||
(:refer-clojure :exclude [parse-uuid])
|
||||
#_:clj-kondo/ignore
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.files.migrations :as pmg]
|
||||
[app.common.files.repair :as repair]
|
||||
[app.common.files.validate :as validate]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :refer [pprint]]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cfg]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.components-v2 :as feat.comp-v2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.main :refer [system]]
|
||||
[app.main :as main]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-update :as files-update]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[clojure.stacktrace :as strace]
|
||||
[clojure.walk :as walk]
|
||||
[cuerdas.core :as str]
|
||||
[expound.alpha :as expound]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
[app.util.pointer-map :as pmap]))
|
||||
|
||||
(def ^:dynamic *system* nil)
|
||||
|
||||
@@ -49,264 +29,160 @@
|
||||
|
||||
(defn parse-uuid
|
||||
[v]
|
||||
(if (uuid? v)
|
||||
v
|
||||
(d/parse-uuid v)))
|
||||
(if (string? v)
|
||||
(d/parse-uuid v)
|
||||
v))
|
||||
|
||||
(defn get-file
|
||||
"Get the migrated data of one file."
|
||||
([id] (get-file (or *system* main/system) id nil))
|
||||
([system id & {:keys [raw?] :as opts}]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(let [file (files/get-file system id :migrate? false)]
|
||||
(if raw?
|
||||
file
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file)))))))))
|
||||
|
||||
(defn update-file!
|
||||
[system {:keys [id] :as file}]
|
||||
(let [conn (db/get-connection system)
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! system id)
|
||||
file))
|
||||
file)
|
||||
|
||||
file (-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))]
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:deleted-at (:deleted-at file)
|
||||
:created-at (:created-at file)
|
||||
:modified-at (:modified-at file)
|
||||
:data-backend nil
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)})))
|
||||
|
||||
(defn update-team!
|
||||
[system {:keys [id] :as team}]
|
||||
(let [conn (db/get-connection system)
|
||||
params (-> team
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(dissoc :id))]
|
||||
(db/update! conn :team
|
||||
params
|
||||
{:id id})
|
||||
team))
|
||||
|
||||
(defn get-raw-file
|
||||
"Get the migrated data of one file."
|
||||
([id] (get-raw-file (or *system* main/system) id))
|
||||
([system id]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(files/get-file system id :migrate? false)))))
|
||||
|
||||
(defn reset-file-data!
|
||||
"Hardcode replace of the data of one file."
|
||||
[system id data]
|
||||
(db/tx-run! system (fn [system]
|
||||
(db/update! system :file
|
||||
{:data data}
|
||||
{:id id}))))
|
||||
|
||||
(defn get-file
|
||||
"Get the migrated data of one file."
|
||||
[system id & {:keys [migrate?] :or {migrate? true}}]
|
||||
(db/run! system
|
||||
(fn [system]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> (files/get-file system id :migrate? migrate?)
|
||||
(update :data feat.fdata/process-pointers deref))))))
|
||||
|
||||
(defn validate
|
||||
"Validate structure, referencial integrity and semantic coherence of
|
||||
all contents of a file. Returns a list of errors."
|
||||
[system id]
|
||||
(db/tx-run! system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(let [id (if (string? id) (parse-uuid id) id)
|
||||
file (files/get-file system id)
|
||||
libs (->> (files/get-file-libraries conn id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> (files/get-file system id :migrate? false)
|
||||
(feat.fdata/process-pointers deref)
|
||||
(pmg/migrate-file))))))
|
||||
(d/index-by :id))]
|
||||
(validate/validate-file file libs))))))
|
||||
(fn [system]
|
||||
(db/update! system :file
|
||||
{:data data}
|
||||
{:id id}))))
|
||||
|
||||
(defn repair!
|
||||
"Repair the list of errors detected by validation."
|
||||
[system id]
|
||||
(db/tx-run! system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(let [id (if (string? id) (parse-uuid id) id)
|
||||
file (files/get-file system id)
|
||||
libs (->> (files/get-file-libraries conn id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> (files/get-file system id :migrate? false)
|
||||
(feat.fdata/process-pointers deref)
|
||||
(pmg/migrate-file))))))
|
||||
(d/index-by :id))
|
||||
errors (validate/validate-file file libs)
|
||||
changes (repair/repair-file file libs errors)
|
||||
|
||||
file (-> file
|
||||
(update :revn inc)
|
||||
(update :data cpc/process-changes changes)
|
||||
(update :data blob/encode))]
|
||||
(def ^:private sql:snapshots-with-file
|
||||
"WITH files AS (
|
||||
SELECT f.id AS file_id,
|
||||
(SELECT fc.id
|
||||
FROM file_change AS fc
|
||||
WHERE fc.label = ?
|
||||
AND fc.file_id = f.id
|
||||
ORDER BY fc.created_at DESC
|
||||
LIMIT 1) AS id
|
||||
FROM file AS f
|
||||
) SELECT * FROM files
|
||||
WHERE file_id = ANY(?)
|
||||
AND id IS NOT NULL")
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! system id))
|
||||
(defn get-file-snapshots
|
||||
"Get a seq parirs of file-id and snapshot-id for a set of files
|
||||
and specified label"
|
||||
[conn label ids]
|
||||
(db/exec! conn [sql:snapshots-with-file label
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:data-backend nil
|
||||
:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)})
|
||||
:repaired)))))
|
||||
(defn take-team-snapshot!
|
||||
[system team-id label]
|
||||
(let [conn (db/get-connection system)]
|
||||
(->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(map (fn [file-id]
|
||||
{:file-id file-id
|
||||
:label label}))
|
||||
(reduce (fn [result params]
|
||||
(fsnap/take-file-snapshot! conn params)
|
||||
(inc result))
|
||||
0))))
|
||||
|
||||
(defn update-file!
|
||||
"Apply a function to the data of one file. Optionally save the changes or not.
|
||||
The function receives the decoded and migrated file data."
|
||||
[system & {:keys [update-fn id rollback? migrate? inc-revn?]
|
||||
:or {rollback? true migrate? true inc-revn? true}}]
|
||||
(letfn [(process-file [{:keys [::db/conn] :as system} {:keys [features] :as file}]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer system id)
|
||||
cfeat/*wrap-with-pointer-map-fn*
|
||||
(if (contains? features "fdata/pointer-map") pmap/wrap identity)
|
||||
cfeat/*wrap-with-objects-map-fn*
|
||||
(if (contains? features "fdata/objectd-map") omap/wrap identity)]
|
||||
(defn restore-team-snapshot!
|
||||
[system team-id label]
|
||||
(let [conn (db/get-connection system)
|
||||
ids (->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(into #{}))
|
||||
|
||||
(let [file (cond-> (update-fn file)
|
||||
inc-revn? (update :revn inc))
|
||||
features (db/create-array conn "text" (:features file))
|
||||
data (blob/encode (:data file))]
|
||||
snap (get-file-snapshots conn label ids)
|
||||
|
||||
(db/update! conn :file
|
||||
{:data data
|
||||
:revn (:revn file)
|
||||
:features features}
|
||||
{:id id}))
|
||||
ids' (into #{} (map :file-id) snap)
|
||||
team (-> (feat.comp-v2/get-team conn team-id)
|
||||
(update :features disj "components/v2"))]
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! system id))
|
||||
(when (not= ids ids')
|
||||
(throw (RuntimeException. "no uniform snapshot available")))
|
||||
|
||||
(dissoc file :data)))]
|
||||
(feat.comp-v2/update-team! conn team)
|
||||
(reduce (fn [result params]
|
||||
(fsnap/restore-file-snapshot! conn params)
|
||||
(inc result))
|
||||
0
|
||||
snap)))
|
||||
|
||||
(db/tx-run! system
|
||||
(fn [system]
|
||||
(binding [*system* system]
|
||||
(try
|
||||
(->> (files/get-file system id :migrate? migrate?)
|
||||
(process-file system))
|
||||
(finally
|
||||
(when rollback?
|
||||
(db/rollback! system)))))))))
|
||||
(defn process-file!
|
||||
[system file-id update-fn & {:keys [label validate? with-libraries?] :or {validate? true} :as opts}]
|
||||
|
||||
(defn analyze-files
|
||||
"Apply a function to all files in the database, reading them in
|
||||
batches. Do not change data.
|
||||
(when (string? label)
|
||||
(fsnap/take-file-snapshot! system {:file-id file-id :label label}))
|
||||
|
||||
The `on-file` parameter should be a function that receives the file
|
||||
and the previous state and returns the new state.
|
||||
(let [conn (db/get-connection system)
|
||||
file (get-file system file-id opts)
|
||||
libs (when with-libraries?
|
||||
(->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(get-file system id))))
|
||||
(d/index-by :id)))
|
||||
|
||||
Emits rollback at the end of operation."
|
||||
[system & {:keys [chunk-size max-items start-at on-file on-error on-end on-init with-libraries?]
|
||||
:or {chunk-size 10 max-items Long/MAX_VALUE}}]
|
||||
(letfn [(get-chunk [conn cursor]
|
||||
(let [sql (str "SELECT id, created_at FROM file "
|
||||
" WHERE created_at < ? AND deleted_at is NULL "
|
||||
" ORDER BY created_at desc LIMIT ?")
|
||||
rows (db/exec! conn [sql cursor chunk-size])]
|
||||
[(some->> rows peek :created-at) (map :id rows)]))
|
||||
file' (if with-libraries?
|
||||
(update-fn file libs opts)
|
||||
(update-fn file opts))]
|
||||
|
||||
(get-candidates [conn]
|
||||
(->> (d/iteration (partial get-chunk conn)
|
||||
:vf second
|
||||
:kf first
|
||||
:initk (or start-at (dt/now)))
|
||||
(take max-items)))
|
||||
|
||||
(on-error* [cause file]
|
||||
(println "unexpected exception happened on processing file: " (:id file))
|
||||
(strace/print-stack-trace cause))
|
||||
|
||||
(process-file [{:keys [::db/conn] :as system} file-id]
|
||||
(let [file (binding [pmap/*load-fn* (partial feat.fdata/load-pointer system file-id)]
|
||||
(-> (files/get-file system file-id)
|
||||
(update :data feat.fdata/process-pointers deref)))
|
||||
|
||||
libs (when with-libraries?
|
||||
(->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer system id)]
|
||||
(-> (files/get-file system id)
|
||||
(update :data feat.fdata/process-pointers deref))))))
|
||||
(d/index-by :id)))]
|
||||
(try
|
||||
(if with-libraries?
|
||||
(on-file file libs)
|
||||
(on-file file))
|
||||
(catch Throwable cause
|
||||
((or on-error on-error*) cause file)))))]
|
||||
|
||||
(db/tx-run! system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(try
|
||||
(binding [*system* system]
|
||||
(when (fn? on-init) (on-init))
|
||||
(run! (partial process-file system) (get-candidates conn)))
|
||||
(finally
|
||||
(when (fn? on-end)
|
||||
(ex/ignoring (on-end)))
|
||||
(db/rollback! system)))))))
|
||||
|
||||
(defn process-files!
|
||||
"Apply a function to all files in the database, reading them in
|
||||
batches."
|
||||
|
||||
[system & {:keys [chunk-size
|
||||
max-items
|
||||
workers
|
||||
start-at
|
||||
on-file
|
||||
on-error
|
||||
on-end
|
||||
on-init
|
||||
rollback?]
|
||||
:or {chunk-size 10
|
||||
max-items Long/MAX_VALUE
|
||||
workers 1
|
||||
rollback? true}}]
|
||||
(letfn [(get-chunk [conn cursor]
|
||||
(let [sql (str "SELECT id, created_at FROM file "
|
||||
" WHERE created_at < ? AND deleted_at is NULL "
|
||||
" ORDER BY created_at desc LIMIT ?")
|
||||
rows (db/exec! conn [sql cursor chunk-size])]
|
||||
[(some->> rows peek :created-at) (map :id rows)]))
|
||||
|
||||
(get-candidates [conn]
|
||||
(->> (d/iteration (partial get-chunk conn)
|
||||
:vf second
|
||||
:kf first
|
||||
:initk (or start-at (dt/now)))
|
||||
(take max-items)))
|
||||
|
||||
(on-error* [cause file]
|
||||
(println! "unexpected exception happened on processing file: " (:id file))
|
||||
(strace/print-stack-trace cause))
|
||||
|
||||
(process-file [system file-id]
|
||||
(try
|
||||
(let [{:keys [features] :as file} (files/get-file system file-id)]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer system file-id)
|
||||
cfeat/*wrap-with-pointer-map-fn*
|
||||
(if (contains? features "fdata/pointer-map") pmap/wrap identity)
|
||||
cfeat/*wrap-with-objects-map-fn*
|
||||
(if (contains? features "fdata/objectd-map") omap/wrap identity)]
|
||||
|
||||
(on-file file)
|
||||
|
||||
(when (contains? features "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! system file-id))))
|
||||
|
||||
(catch Throwable cause
|
||||
((or on-error on-error*) cause file-id))))
|
||||
|
||||
(run-worker [in index]
|
||||
(db/tx-run! system
|
||||
(fn [system]
|
||||
(binding [*system* system]
|
||||
(loop [i 0]
|
||||
(when-let [file-id (sp/take! in)]
|
||||
(println! "=> worker: index:" index "| loop:" i "| file:" (str file-id) "|" (px/get-name))
|
||||
(process-file system file-id)
|
||||
(recur (inc i)))))
|
||||
|
||||
(when rollback?
|
||||
(db/rollback! system)))))
|
||||
|
||||
(run-producer [input]
|
||||
(db/tx-run! system (fn [{:keys [::db/conn]}]
|
||||
(doseq [file-id (get-candidates conn)]
|
||||
(println! "=> producer:" file-id "|" (px/get-name))
|
||||
(sp/put! input file-id))
|
||||
(sp/close! input))))]
|
||||
|
||||
(when (fn? on-init) (on-init))
|
||||
|
||||
(let [input (sp/chan :buf chunk-size)
|
||||
producer (px/thread
|
||||
{:name "penpot/srepl/producer"}
|
||||
(run-producer input))
|
||||
threads (->> (range workers)
|
||||
(map (fn [index]
|
||||
(px/thread
|
||||
{:name (str "penpot/srepl/worker/" index)}
|
||||
(run-worker input index))))
|
||||
(cons producer)
|
||||
(doall))]
|
||||
|
||||
(run! p/await! threads)
|
||||
(when (fn? on-end) (on-end)))))
|
||||
(when (and (some? file')
|
||||
(not (identical? file file')))
|
||||
(when validate? (cfv/validate-file-schema! file'))
|
||||
(let [file' (update file' :revn inc)]
|
||||
(update-file! system file')
|
||||
true))))
|
||||
|
||||
@@ -9,90 +9,100 @@
|
||||
#_:clj-kondo/ignore
|
||||
(:require
|
||||
[app.auth :refer [derive-password]]
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.validate :as cfv]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as p]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as features.fdata]
|
||||
[app.features.components-v2 :as feat.comp-v2]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.main :as main]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-snapshot :as fsnap]
|
||||
[app.rpc.commands.management :as mgmt]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.srepl.cli :as cli]
|
||||
[app.srepl.fixes :as fixes]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.pprint :refer [pprint print-table]]
|
||||
[clojure.pprint :refer [print-table]]
|
||||
[clojure.stacktrace :as strace]
|
||||
[clojure.tools.namespace.repl :as repl]
|
||||
[cuerdas.core :as str]))
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.semaphore :as ps]
|
||||
[promesa.util :as pu]))
|
||||
|
||||
(defn print-available-tasks
|
||||
[system]
|
||||
(let [tasks (:app.worker/registry system)]
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASKS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn print-tasks
|
||||
[]
|
||||
(let [tasks (:app.worker/registry main/system)]
|
||||
(p/pprint (keys tasks) :level 200)))
|
||||
|
||||
(defn run-task!
|
||||
([system name]
|
||||
(run-task! system name {}))
|
||||
([system name params]
|
||||
(let [tasks (:app.worker/registry system)]
|
||||
(if-let [task-fn (get tasks name)]
|
||||
([tname]
|
||||
(run-task! tname {}))
|
||||
([tname params]
|
||||
(let [tasks (:app.worker/registry main/system)
|
||||
tname (if (keyword? tname) (name tname) name)]
|
||||
(if-let [task-fn (get tasks tname)]
|
||||
(task-fn params)
|
||||
(println (format "no task '%s' found" name))))))
|
||||
(println (format "no task '%s' found" tname))))))
|
||||
|
||||
(defn schedule-task!
|
||||
([system name]
|
||||
(schedule-task! system name {}))
|
||||
([system name props]
|
||||
(let [pool (:app.db/pool system)]
|
||||
([name]
|
||||
(schedule-task! name {}))
|
||||
([name props]
|
||||
(let [pool (:app.db/pool main/system)]
|
||||
(wrk/submit!
|
||||
::wrk/conn pool
|
||||
::wrk/task name
|
||||
::wrk/props props))))
|
||||
|
||||
(defn send-test-email!
|
||||
[system destination]
|
||||
(us/verify!
|
||||
:expr (some? system)
|
||||
:hint "system should be provided")
|
||||
|
||||
[destination]
|
||||
(us/verify!
|
||||
:expr (string? destination)
|
||||
:hint "destination should be provided")
|
||||
|
||||
(let [handler (:app.email/sendmail system)]
|
||||
(let [handler (:app.email/sendmail main/system)]
|
||||
(handler {:body "test email"
|
||||
:subject "test email"
|
||||
:to [destination]})))
|
||||
|
||||
(defn resend-email-verification-email!
|
||||
[system email]
|
||||
(us/verify!
|
||||
:expr (some? system)
|
||||
:hint "system should be provided")
|
||||
|
||||
(let [sprops (:app.setup/props system)
|
||||
pool (:app.db/pool system)
|
||||
[email]
|
||||
(let [sprops (:app.setup/props main/system)
|
||||
pool (:app.db/pool main/system)
|
||||
email (profile/clean-email email)
|
||||
profile (profile/get-profile-by-email pool email)]
|
||||
|
||||
(auth/send-email-verification! pool sprops profile)
|
||||
:email-sent))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PROFILES MANAGEMENT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn mark-profile-as-active!
|
||||
"Mark the profile blocked and removes all the http sessiones
|
||||
associated with the profile-id."
|
||||
[system email]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
[email]
|
||||
(db/with-atomic [conn (:app.db/pool main/system)]
|
||||
(when-let [profile (db/get* conn :profile
|
||||
{:email (str/lower email)}
|
||||
{:columns [:id :email]})]
|
||||
@@ -103,8 +113,8 @@
|
||||
(defn mark-profile-as-blocked!
|
||||
"Mark the profile blocked and removes all the http sessiones
|
||||
associated with the profile-id."
|
||||
[system email]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
[email]
|
||||
(db/with-atomic [conn (:app.db/pool main/system)]
|
||||
(when-let [profile (db/get* conn :profile
|
||||
{:email (str/lower email)}
|
||||
{:columns [:id :email]})]
|
||||
@@ -116,39 +126,42 @@
|
||||
(defn reset-password!
|
||||
"Reset a password to a specific one for a concrete user or all users
|
||||
if email is `:all` keyword."
|
||||
[system & {:keys [email password] :or {password "123123"} :as params}]
|
||||
[& {:keys [email password] :or {password "123123"} :as params}]
|
||||
(us/verify! (contains? params :email) "`email` parameter is mandatory")
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(db/with-atomic [conn (:app.db/pool main/system)]
|
||||
(let [password (derive-password password)]
|
||||
(if (= email :all)
|
||||
(db/exec! conn ["update profile set password=?" password])
|
||||
(let [email (str/lower email)]
|
||||
(db/exec! conn ["update profile set password=? where email=?" password email]))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FEATURES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare process-file!)
|
||||
|
||||
(defn enable-objects-map-feature-on-file!
|
||||
[system & {:keys [save? id]}]
|
||||
(h/update-file! system
|
||||
:id id
|
||||
:update-fn features.fdata/enable-objects-map
|
||||
:save? save?))
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-objects-map opts))
|
||||
|
||||
(defn enable-pointer-map-feature-on-file!
|
||||
[system & {:keys [save? id]}]
|
||||
(h/update-file! system
|
||||
:id id
|
||||
:update-fn features.fdata/enable-pointer-map
|
||||
:save? save?))
|
||||
[file-id & {:as opts}]
|
||||
(process-file! file-id feat.fdata/enable-pointer-map opts))
|
||||
|
||||
(defn enable-storage-features-on-file!
|
||||
[file-id & {:as opts}]
|
||||
(enable-objects-map-feature-on-file! file-id opts)
|
||||
(enable-pointer-map-feature-on-file! file-id opts))
|
||||
|
||||
(defn enable-team-feature!
|
||||
[system team-id feature]
|
||||
[team-id feature]
|
||||
(dm/verify!
|
||||
"feature should be supported"
|
||||
(contains? cfeat/supported-features feature))
|
||||
|
||||
(let [team-id (if (string? team-id)
|
||||
(parse-uuid team-id)
|
||||
team-id)]
|
||||
(db/tx-run! system
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [team (-> (db/get conn :team {:id team-id})
|
||||
(update :features db/decode-pgarray #{}))
|
||||
@@ -160,15 +173,13 @@
|
||||
:enabled))))))
|
||||
|
||||
(defn disable-team-feature!
|
||||
[system team-id feature]
|
||||
[team-id feature]
|
||||
(dm/verify!
|
||||
"feature should be supported"
|
||||
(contains? cfeat/supported-features feature))
|
||||
|
||||
(let [team-id (if (string? team-id)
|
||||
(parse-uuid team-id)
|
||||
team-id)]
|
||||
(db/tx-run! system
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [team (-> (db/get conn :team {:id team-id})
|
||||
(update :features db/decode-pgarray #{}))
|
||||
@@ -179,56 +190,10 @@
|
||||
{:id team-id})
|
||||
:disabled))))))
|
||||
|
||||
(defn enable-storage-features-on-file!
|
||||
[system & {:as params}]
|
||||
(enable-objects-map-feature-on-file! system params)
|
||||
(enable-pointer-map-feature-on-file! system params))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; NOTIFICATIONS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn instrument-var
|
||||
[var]
|
||||
(alter-var-root var (fn [f]
|
||||
(let [mf (meta f)]
|
||||
(if (::original mf)
|
||||
f
|
||||
(with-meta
|
||||
(fn [& params]
|
||||
(tap> params)
|
||||
(let [result (apply f params)]
|
||||
(tap> result)
|
||||
result))
|
||||
{::original f}))))))
|
||||
|
||||
(defn uninstrument-var
|
||||
[var]
|
||||
(alter-var-root var (fn [f]
|
||||
(or (::original (meta f)) f))))
|
||||
|
||||
(defn take-file-snapshot!
|
||||
"An internal helper that persist the file snapshot using non-gc
|
||||
collectable file-changes entry."
|
||||
[system & {:keys [file-id label]}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! system fsnap/take-file-snapshot! {:file-id file-id :label label})))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[system & {:keys [file-id id]}]
|
||||
(db/tx-run! system
|
||||
(fn [cfg]
|
||||
(let [file-id (h/parse-uuid file-id)
|
||||
id (h/parse-uuid id)]
|
||||
|
||||
(if (and (uuid? id) (uuid? file-id))
|
||||
(fsnap/restore-file-snapshot! cfg {:id id :file-id file-id})
|
||||
(println "=> invalid parameters"))))))
|
||||
|
||||
|
||||
(defn list-file-snapshots!
|
||||
[system & {:keys [file-id limit]}]
|
||||
(db/tx-run! system (fn [system]
|
||||
(let [params {:file-id (h/parse-uuid file-id)
|
||||
:limit limit}]
|
||||
(->> (fsnap/get-file-snapshots system (d/without-nils params))
|
||||
(print-table [:id :revn :created-at :label]))))))
|
||||
|
||||
(defn notify!
|
||||
[{:keys [::mbus/msgbus ::db/pool]} & {:keys [dest code message level]
|
||||
@@ -265,18 +230,13 @@
|
||||
{:columns [:profile-id]})
|
||||
(map :profile-id)))
|
||||
|
||||
(parse-uuid [v]
|
||||
(if (uuid? v)
|
||||
v
|
||||
(d/parse-uuid v)))
|
||||
|
||||
(resolve-dest [dest]
|
||||
(cond
|
||||
(uuid? dest)
|
||||
[dest]
|
||||
|
||||
(string? dest)
|
||||
(some-> dest parse-uuid resolve-dest)
|
||||
(some-> dest h/parse-uuid resolve-dest)
|
||||
|
||||
(nil? dest)
|
||||
(resolve-dest uuid/zero)
|
||||
@@ -314,21 +274,245 @@
|
||||
(coll? param)
|
||||
(sequence (comp
|
||||
(mapcat resolve-team)
|
||||
(keep parse-uuid))
|
||||
(keep h/parse-uuid))
|
||||
param)
|
||||
|
||||
(uuid? param)
|
||||
(resolve-team param)
|
||||
|
||||
(string? param)
|
||||
(some-> param parse-uuid resolve-team))
|
||||
(some-> param h/parse-uuid resolve-team))
|
||||
|
||||
(= op :profile-id)
|
||||
(if (coll? param)
|
||||
(sequence (keep parse-uuid) param)
|
||||
(sequence (keep h/parse-uuid) param)
|
||||
(resolve-dest param))))))]
|
||||
|
||||
(->> (resolve-dest dest)
|
||||
(filter some?)
|
||||
(into #{})
|
||||
(run! send))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; SNAPSHOTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn take-file-snapshot!
|
||||
"An internal helper that persist the file snapshot using non-gc
|
||||
collectable file-changes entry."
|
||||
[& {:keys [file-id label]}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system fsnap/take-file-snapshot! {:file-id file-id :label label})))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[file-id label]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(when-let [snapshot (->> (h/get-file-snapshots conn label #{file-id})
|
||||
(map :id)
|
||||
(first))]
|
||||
(fsnap/restore-file-snapshot! system
|
||||
{:id (:id snapshot)
|
||||
:file-id file-id}))))))
|
||||
|
||||
(defn list-file-snapshots!
|
||||
[file-id & {:keys [limit]}]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [system]
|
||||
(let [params {:file-id file-id :limit limit}]
|
||||
(->> (fsnap/get-file-snapshots system (d/without-nils params))
|
||||
(print-table [:label :id :revn :created-at])))))))
|
||||
|
||||
(defn take-team-snapshot!
|
||||
[team-id & {:keys [label rollback?] :or {rollback? true}}]
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
label (or label (fsnap/generate-snapshot-label))]
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(db/tx-run! h/take-team-snapshot! team-id label))))
|
||||
|
||||
(defn restore-team-snapshot!
|
||||
"Restore a snapshot on all files of the team. The snapshot should
|
||||
exists for all files; if is not the case, an exception is raised."
|
||||
[team-id label & {:keys [rollback?] :or {rollback? true}}]
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(-> (assoc main/system ::db/rollback rollback?)
|
||||
(db/tx-run! h/restore-team-snapshot! team-id label))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILE VALIDATION & REPAIR
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn validate-file
|
||||
"Validate structure, referencial integrity and semantic coherence of
|
||||
all contents of a file. Returns a list of errors."
|
||||
[file-id]
|
||||
(let [file-id (h/parse-uuid file-id)]
|
||||
(db/tx-run! (assoc main/system ::db/rollback true)
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(let [file (h/get-file system file-id)
|
||||
libs (->> (files/get-file-libraries conn file-id)
|
||||
(into [file] (map (fn [{:keys [id]}]
|
||||
(h/get-file system id))))
|
||||
(d/index-by :id))]
|
||||
(cfv/validate-file file libs))))))
|
||||
|
||||
(defn repair-file!
|
||||
"Repair the list of errors detected by validation."
|
||||
[file-id & {:keys [rollback?] :or {rollback? true} :as opts}]
|
||||
(let [system (assoc main/system ::db/rollback rollback?)
|
||||
file-id (h/parse-uuid file-id)
|
||||
opts (assoc opts :with-libraries? true)]
|
||||
(db/tx-run! system h/process-file! file-id fixes/repair-file opts)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PROCESSING
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def sql:get-files
|
||||
"SELECT id FROM file
|
||||
WHERE deleted_at is NULL
|
||||
ORDER BY created_at DESC")
|
||||
|
||||
(defn process-file!
|
||||
"Apply a function to the file. Optionally save the changes or not.
|
||||
The function receives the decoded and migrated file data."
|
||||
[file-id update-fn & {:keys [rollback?] :or {rollback? true} :as opts}]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [system]
|
||||
(binding [h/*system* system]
|
||||
(h/process-file! system file-id update-fn opts)))))
|
||||
|
||||
(defn process-team-files!
|
||||
"Apply a function to each file of the specified team."
|
||||
[team-id update-fn & {:keys [rollback? label] :or {rollback? true} :as opts}]
|
||||
(let [team-id (h/parse-uuid team-id)
|
||||
opts (dissoc opts :label)]
|
||||
(db/tx-run! (assoc main/system ::db/rollback rollback?)
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(when (string? label)
|
||||
(h/take-team-snapshot! system team-id label))
|
||||
|
||||
(binding [h/*system* system]
|
||||
(->> (feat.comp-v2/get-and-lock-team-files conn team-id)
|
||||
(reduce (fn [result file-id]
|
||||
(if (h/process-file! system file-id update-fn opts)
|
||||
(inc result)
|
||||
result))
|
||||
0)))))))
|
||||
|
||||
(defn process-files!
|
||||
"Apply a function to all files in the database"
|
||||
[update-fn & {:keys [max-items
|
||||
max-jobs
|
||||
rollback?
|
||||
query]
|
||||
:or {max-jobs 1
|
||||
max-items Long/MAX_VALUE
|
||||
rollback? true
|
||||
query sql:get-files}
|
||||
:as opts}]
|
||||
|
||||
(l/dbg :hint "process:start"
|
||||
:rollback rollback?
|
||||
:max-jobs max-jobs
|
||||
:max-items max-items)
|
||||
|
||||
(let [tpoint (dt/tpoint)
|
||||
factory (px/thread-factory :virtual false :prefix "penpot/file-process/")
|
||||
executor (px/cached-executor :factory factory)
|
||||
sjobs (ps/create :permits max-jobs)
|
||||
|
||||
process-file
|
||||
(fn [file-id idx tpoint]
|
||||
(try
|
||||
(l/trc :hint "process:file:start" :file-id (str file-id) :index idx)
|
||||
(let [system (assoc main/system ::db/rollback rollback?)]
|
||||
(db/tx-run! system (fn [system]
|
||||
(binding [h/*system* system]
|
||||
(h/process-file! system file-id update-fn opts)))))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/wrn :hint "unexpected error on processing file (skiping)"
|
||||
:file-id (str file-id)
|
||||
:index idx
|
||||
:cause cause))
|
||||
(finally
|
||||
(ps/release! sjobs)
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/trc :hint "process:file:end"
|
||||
:file-id (str file-id)
|
||||
:index idx
|
||||
:elapsed elapsed)))))
|
||||
|
||||
process-files
|
||||
(fn [{:keys [::db/conn] :as system}]
|
||||
(db/exec! conn ["SET statement_timeout = 0"])
|
||||
(db/exec! conn ["SET idle_in_transaction_session_timeout = 0"])
|
||||
|
||||
(try
|
||||
(reduce (fn [idx file-id]
|
||||
(ps/acquire! sjobs)
|
||||
(px/run! executor (partial process-file file-id idx (dt/tpoint)))
|
||||
(inc idx))
|
||||
0
|
||||
(->> (db/cursor conn [query] {:chunk-size 1})
|
||||
(take max-items)
|
||||
(map :id)))
|
||||
(finally
|
||||
;; Close and await tasks
|
||||
(pu/close! executor))))]
|
||||
|
||||
(try
|
||||
(db/tx-run! main/system process-files)
|
||||
|
||||
(catch Throwable cause
|
||||
(l/dbg :hint "process:error" :cause cause))
|
||||
|
||||
(finally
|
||||
(let [elapsed (dt/format-duration (tpoint))]
|
||||
(l/dbg :hint "process:end"
|
||||
:rollback rollback?
|
||||
:elapsed elapsed))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MISC
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn instrument-var
|
||||
[var]
|
||||
(alter-var-root var (fn [f]
|
||||
(let [mf (meta f)]
|
||||
(if (::original mf)
|
||||
f
|
||||
(with-meta
|
||||
(fn [& params]
|
||||
(tap> params)
|
||||
(let [result (apply f params)]
|
||||
(tap> result)
|
||||
result))
|
||||
{::original f}))))))
|
||||
|
||||
(defn uninstrument-var
|
||||
[var]
|
||||
(alter-var-root var (fn [f]
|
||||
(or (::original (meta f)) f))))
|
||||
|
||||
|
||||
(defn duplicate-team
|
||||
[team-id & {:keys [name]}]
|
||||
(let [team-id (h/parse-uuid team-id)]
|
||||
(db/tx-run! main/system
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(let [team (-> (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(mgmt/duplicate-team :team-id team-id :name name))
|
||||
rels (db/query conn :team-profile-rel {:team-id team-id})]
|
||||
|
||||
(doseq [rel rels]
|
||||
(let [params (-> rel
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :team-id (:id team)))]
|
||||
(db/insert! conn :team-profile-rel params
|
||||
{::db/return-keys false}))))))))
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
@@ -18,10 +16,13 @@
|
||||
[app.storage.impl :as impl]
|
||||
[app.storage.s3 :as ss3]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]))
|
||||
[promesa.core :as p])
|
||||
(:import
|
||||
java.io.InputStream))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Storage Module State
|
||||
@@ -78,10 +79,15 @@
|
||||
|
||||
(defn- create-database-object
|
||||
[{:keys [::backend ::db/pool-or-conn]} {:keys [::content ::expired-at ::touched-at] :as params}]
|
||||
(let [id (uuid/random)
|
||||
(let [id (or (:id params) (uuid/random))
|
||||
mdata (cond-> (get-metadata params)
|
||||
(satisfies? impl/IContentHash content)
|
||||
(assoc :hash (impl/get-hash content)))
|
||||
(assoc :hash (impl/get-hash content))
|
||||
|
||||
:always
|
||||
(dissoc :id))
|
||||
|
||||
;; FIXME: touch object on deduplicated put operation ??
|
||||
|
||||
;; NOTE: for now we don't reuse the deleted objects, but in
|
||||
;; futute we can consider reusing deleted objects if we
|
||||
@@ -165,19 +171,32 @@
|
||||
(impl/put-object object content))
|
||||
object)))
|
||||
|
||||
(def ^:private default-touch-delay
|
||||
"A default delay for the asynchronous touch operation"
|
||||
(dt/duration "5m"))
|
||||
|
||||
(defn touch-object!
|
||||
"Mark object as touched."
|
||||
[{:keys [::db/pool-or-conn] :as storage} object-or-id]
|
||||
[{:keys [::db/pool-or-conn] :as storage} object-or-id & {:keys [async]}]
|
||||
(us/assert! ::storage storage)
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
rs (db/update! pool-or-conn :storage-object
|
||||
{:touched-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
(pos? (db/get-update-count rs))))
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)]
|
||||
(if async
|
||||
(wrk/submit! ::wrk/conn pool-or-conn
|
||||
::wrk/task :object-update
|
||||
::wrk/delay default-touch-delay
|
||||
:object :storage-object
|
||||
:id id
|
||||
:key :touched-at
|
||||
:val (dt/now))
|
||||
(-> (db/update! pool-or-conn :storage-object
|
||||
{:touched-at (dt/now)}
|
||||
{:id id})
|
||||
(db/get-update-count)
|
||||
(pos?)))))
|
||||
|
||||
(defn get-object-data
|
||||
"Return an input stream instance of the object content."
|
||||
^InputStream
|
||||
[storage object]
|
||||
(us/assert! ::storage storage)
|
||||
(when (or (nil? (:expired-at object))
|
||||
@@ -222,231 +241,8 @@
|
||||
(let [id (if (impl/object? object-or-id) (:id object-or-id) object-or-id)
|
||||
res (db/update! pool-or-conn :storage-object
|
||||
{:deleted-at (dt/now)}
|
||||
{:id id}
|
||||
{::db/return-keys? false})]
|
||||
{:id id})]
|
||||
(pos? (db/get-update-count res))))
|
||||
|
||||
(dm/export impl/resolve-backend)
|
||||
(dm/export impl/calculate-hash)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Garbage Collection: Permanently delete objects
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; A task responsible to permanently delete already marked as deleted
|
||||
;; storage files. The storage objects are practically never marked to
|
||||
;; be deleted directly by the api call. The touched-gc is responsible
|
||||
;; of collecting the usage of the object and mark it as deleted. Only
|
||||
;; the TMP files are are created with expiration date in future.
|
||||
|
||||
(declare sql:retrieve-deleted-objects-chunk)
|
||||
|
||||
(defmethod ig/pre-init-spec ::gc-deleted-task [_]
|
||||
(s/keys :req [::storage ::db/pool]))
|
||||
|
||||
(defmethod ig/prep-key ::gc-deleted-task
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (dt/duration {:hours 2})))
|
||||
|
||||
(defmethod ig/init-key ::gc-deleted-task
|
||||
[_ {:keys [::db/pool ::storage ::min-age]}]
|
||||
(letfn [(get-to-delete-chunk [cursor]
|
||||
(let [sql (str "select s.* "
|
||||
" from storage_object as s "
|
||||
" where s.deleted_at is not null "
|
||||
" and s.deleted_at < ? "
|
||||
" order by s.deleted_at desc "
|
||||
" limit 25")
|
||||
rows (db/exec! pool [sql cursor])]
|
||||
[(some-> rows peek :deleted-at)
|
||||
(some->> (seq rows) (d/group-by #(-> % :backend keyword) :id #{}) seq)]))
|
||||
|
||||
(get-to-delete-chunks [min-age]
|
||||
(d/iteration get-to-delete-chunk
|
||||
:initk (dt/minus (dt/now) min-age)
|
||||
:vf second
|
||||
:kf first))
|
||||
|
||||
(delete-in-bulk! [backend-id ids]
|
||||
(try
|
||||
(db/with-atomic [conn pool]
|
||||
(let [sql "delete from storage_object where id = ANY(?)"
|
||||
ids' (db/create-array conn "uuid" ids)
|
||||
|
||||
total (-> (db/exec-one! conn [sql ids'])
|
||||
(db/get-update-count))]
|
||||
|
||||
(-> (impl/resolve-backend storage backend-id)
|
||||
(impl/del-objects-in-bulk ids))
|
||||
|
||||
(doseq [id ids]
|
||||
(l/dbg :hint "gc-deleted: permanently delete storage object" :backend backend-id :id id))
|
||||
|
||||
total))
|
||||
|
||||
(catch Throwable cause
|
||||
(l/err :hint "gc-deleted: unexpected error on bulk deletion"
|
||||
:ids (vec ids)
|
||||
:cause cause)
|
||||
0)))]
|
||||
|
||||
(fn [params]
|
||||
(let [min-age (or (some-> params :min-age dt/duration) min-age)]
|
||||
(loop [total 0
|
||||
chunks (get-to-delete-chunks min-age)]
|
||||
(if-let [[backend-id ids] (first chunks)]
|
||||
(let [deleted (delete-in-bulk! backend-id ids)]
|
||||
(recur (+ total deleted)
|
||||
(rest chunks)))
|
||||
(do
|
||||
(l/inf :hint "gc-deleted: task finished"
|
||||
:min-age (dt/format-duration min-age)
|
||||
:total total)
|
||||
{:deleted total})))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Garbage Collection: Analyze touched objects
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This task is part of the garbage collection process of storage
|
||||
;; objects and is responsible on analyzing the touched objects and
|
||||
;; mark them for deletion if corresponds.
|
||||
;;
|
||||
;; For example: when file_media_object is deleted, the depending
|
||||
;; storage_object are marked as touched. This means that some files
|
||||
;; that depend on a concrete storage_object are no longer exists and
|
||||
;; maybe this storage_object is no longer necessary and can be
|
||||
;; eligible for elimination. This task periodically analyzes touched
|
||||
;; objects and mark them as freeze (means that has other references
|
||||
;; and the object is still valid) or deleted (no more references to
|
||||
;; this object so is ready to be deleted).
|
||||
|
||||
(declare sql:retrieve-file-media-object-nrefs)
|
||||
(declare sql:retrieve-file-object-thumbnail-nrefs)
|
||||
(declare sql:retrieve-profile-nrefs)
|
||||
(declare sql:retrieve-team-font-variant-nrefs)
|
||||
(declare sql:retrieve-touched-objects-chunk)
|
||||
|
||||
(defmethod ig/pre-init-spec ::gc-touched-task [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::gc-touched-task
|
||||
[_ {:keys [::db/pool]}]
|
||||
(letfn [(get-team-font-variant-nrefs [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-team-font-variant-nrefs id id id id]) :nrefs))
|
||||
|
||||
(get-file-media-object-nrefs [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-file-media-object-nrefs id id]) :nrefs))
|
||||
|
||||
(get-profile-nrefs [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-profile-nrefs id id]) :nrefs))
|
||||
|
||||
(get-file-object-thumbnails [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-file-object-thumbnail-nrefs id]) :nrefs))
|
||||
|
||||
(mark-freeze-in-bulk [conn ids]
|
||||
(db/exec-one! conn ["update storage_object set touched_at=null where id = ANY(?)"
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
|
||||
(mark-delete-in-bulk [conn ids]
|
||||
(db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)"
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
|
||||
;; NOTE: A getter that retrieves the key witch will be used
|
||||
;; for group ids; previously we have no value, then we
|
||||
;; introduced the `:reference` prop, and then it is renamed
|
||||
;; to `:bucket` and now is string instead. This is
|
||||
;; implemented in this way for backward comaptibilty.
|
||||
|
||||
;; NOTE: we use the "file-media-object" as default value for
|
||||
;; backward compatibility because when we deploy it we can
|
||||
;; have old backend instances running in the same time as
|
||||
;; the new one and we can still have storage-objects created
|
||||
;; without bucket value. And we know that if it does not
|
||||
;; have value, it means :file-media-object.
|
||||
|
||||
(get-bucket [{:keys [metadata]}]
|
||||
(or (some-> metadata :bucket)
|
||||
(some-> metadata :reference d/name)
|
||||
"file-media-object"))
|
||||
|
||||
(retrieve-touched-chunk [conn cursor]
|
||||
(let [rows (->> (db/exec! conn [sql:retrieve-touched-objects-chunk cursor])
|
||||
(mapv #(d/update-when % :metadata db/decode-transit-pgobject)))]
|
||||
(when (seq rows)
|
||||
[(-> rows peek :created-at)
|
||||
(d/group-by get-bucket :id #{} rows)])))
|
||||
|
||||
(retrieve-touched [conn]
|
||||
(d/iteration (partial retrieve-touched-chunk conn)
|
||||
:initk (dt/now)
|
||||
:vf second
|
||||
:kf first))
|
||||
|
||||
(process-objects! [conn get-fn ids bucket]
|
||||
(loop [to-freeze #{}
|
||||
to-delete #{}
|
||||
ids (seq ids)]
|
||||
(if-let [id (first ids)]
|
||||
(let [nrefs (get-fn conn id)]
|
||||
(if (pos? nrefs)
|
||||
(do
|
||||
(l/debug :hint "gc-touched: processing storage object"
|
||||
:id id :status "freeze"
|
||||
:bucket bucket :refs nrefs)
|
||||
(recur (conj to-freeze id) to-delete (rest ids)))
|
||||
(do
|
||||
(l/debug :hint "gc-touched: processing storage object"
|
||||
:id id :status "delete"
|
||||
:bucket bucket :refs nrefs)
|
||||
(recur to-freeze (conj to-delete id) (rest ids)))))
|
||||
(do
|
||||
(some->> (seq to-freeze) (mark-freeze-in-bulk conn))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk conn))
|
||||
[(count to-freeze) (count to-delete)]))))]
|
||||
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(loop [to-freeze 0
|
||||
to-delete 0
|
||||
groups (retrieve-touched conn)]
|
||||
(if-let [[bucket ids] (first groups)]
|
||||
(let [[f d] (case bucket
|
||||
"file-media-object" (process-objects! conn get-file-media-object-nrefs ids bucket)
|
||||
"team-font-variant" (process-objects! conn get-team-font-variant-nrefs ids bucket)
|
||||
"file-object-thumbnail" (process-objects! conn get-file-object-thumbnails ids bucket)
|
||||
"profile" (process-objects! conn get-profile-nrefs ids bucket)
|
||||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (dm/fmt "unknown reference %" bucket)))]
|
||||
(recur (+ to-freeze (long f))
|
||||
(+ to-delete (long d))
|
||||
(rest groups)))
|
||||
(do
|
||||
(l/info :hint "gc-touched: task finished" :to-freeze to-freeze :to-delete to-delete)
|
||||
{:freeze to-freeze :delete to-delete})))))))
|
||||
|
||||
(def sql:retrieve-touched-objects-chunk
|
||||
"SELECT so.*
|
||||
FROM storage_object AS so
|
||||
WHERE so.touched_at IS NOT NULL
|
||||
AND so.created_at < ?
|
||||
ORDER by so.created_at DESC
|
||||
LIMIT 500;")
|
||||
|
||||
(def sql:retrieve-file-media-object-nrefs
|
||||
"select ((select count(*) from file_media_object where media_id = ?) +
|
||||
(select count(*) from file_media_object where thumbnail_id = ?)) as nrefs")
|
||||
|
||||
(def sql:retrieve-file-object-thumbnail-nrefs
|
||||
"select (select count(*) from file_tagged_object_thumbnail where media_id = ?) as nrefs")
|
||||
|
||||
(def sql:retrieve-team-font-variant-nrefs
|
||||
"select ((select count(*) from team_font_variant where woff1_file_id = ?) +
|
||||
(select count(*) from team_font_variant where woff2_file_id = ?) +
|
||||
(select count(*) from team_font_variant where otf_file_id = ?) +
|
||||
(select count(*) from team_font_variant where ttf_file_id = ?)) as nrefs")
|
||||
|
||||
(def sql:retrieve-profile-nrefs
|
||||
"select ((select count(*) from profile where photo_id = ?) +
|
||||
(select count(*) from team where photo_id = ?)) as nrefs")
|
||||
|
||||
125
backend/src/app/storage/gc_deleted.clj
Normal file
125
backend/src/app/storage/gc_deleted.clj
Normal file
@@ -0,0 +1,125 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.storage.gc-deleted
|
||||
"A task responsible to permanently delete already marked as deleted
|
||||
storage files. The storage objects are practically never marked to
|
||||
be deleted directly by the api call.
|
||||
|
||||
The touched-gc is responsible of collecting the usage of the object
|
||||
and mark it as deleted. Only the TMP files are are created with
|
||||
expiration date in future."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:lock-sobjects
|
||||
"SELECT id FROM storage_object
|
||||
WHERE id = ANY(?::uuid[])
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- lock-ids
|
||||
"Perform a select before delete for proper object locking and
|
||||
prevent concurrent operations and we proceed only with successfully
|
||||
locked objects."
|
||||
[conn ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:lock-sobjects ids])
|
||||
(into #{} (map :id))
|
||||
(not-empty))))
|
||||
|
||||
|
||||
(def ^:private sql:delete-sobjects
|
||||
"DELETE FROM storage_object
|
||||
WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- delete-sobjects!
|
||||
[conn ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)]
|
||||
(-> (db/exec-one! conn [sql:delete-sobjects ids])
|
||||
(db/get-update-count))))
|
||||
|
||||
(defn- delete-in-bulk!
|
||||
[cfg backend-id ids]
|
||||
;; We run the deletion on a separate transaction. This is
|
||||
;; because if some exception is raised inside procesing
|
||||
;; one chunk, it does not affects the rest of the chunks.
|
||||
(try
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn ::sto/storage]}]
|
||||
(when-let [ids (lock-ids conn ids)]
|
||||
(let [total (delete-sobjects! conn ids)]
|
||||
(-> (impl/resolve-backend storage backend-id)
|
||||
(impl/del-objects-in-bulk ids))
|
||||
|
||||
(doseq [id ids]
|
||||
(l/dbg :hint "permanently delete storage object"
|
||||
:id (str id)
|
||||
:backend (name backend-id)))
|
||||
total))))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unexpected error on bulk deletion"
|
||||
:ids ids
|
||||
:cause cause))))
|
||||
|
||||
|
||||
(defn- group-by-backend
|
||||
[items]
|
||||
(d/group-by (comp keyword :backend) :id #{} items))
|
||||
|
||||
(def ^:private sql:get-deleted-sobjects
|
||||
"SELECT s.* FROM storage_object AS s
|
||||
WHERE s.deleted_at IS NOT NULL
|
||||
AND s.deleted_at < now() - ?::interval
|
||||
ORDER BY s.deleted_at ASC")
|
||||
|
||||
(defn- get-buckets
|
||||
[conn min-age]
|
||||
(let [age (db/interval min-age)]
|
||||
(sequence
|
||||
(comp (partition-all 25)
|
||||
(mapcat group-by-backend))
|
||||
(db/cursor conn [sql:get-deleted-sobjects age]))))
|
||||
|
||||
|
||||
(defn- clean-deleted!
|
||||
[{:keys [::db/conn ::min-age] :as cfg}]
|
||||
(reduce (fn [total [backend-id ids]]
|
||||
(let [deleted (delete-in-bulk! cfg backend-id ids)]
|
||||
(+ total (or deleted 0))))
|
||||
0
|
||||
(get-buckets conn min-age)))
|
||||
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::sto/storage ::db/pool]))
|
||||
|
||||
(defmethod ig/prep-key ::handler
|
||||
[_ cfg]
|
||||
(assoc cfg ::min-age (dt/duration {:hours 2})))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [::min-age] :as cfg}]
|
||||
(fn [params]
|
||||
(let [min-age (dt/duration (or (:min-age params) min-age))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(let [cfg (assoc cfg ::min-age min-age)
|
||||
total (clean-deleted! cfg)]
|
||||
|
||||
(l/inf :hint "task finished"
|
||||
:min-age (dt/format-duration min-age)
|
||||
:total total)
|
||||
|
||||
{:deleted total}))))))
|
||||
|
||||
|
||||
208
backend/src/app/storage/gc_touched.clj
Normal file
208
backend/src/app/storage/gc_touched.clj
Normal file
@@ -0,0 +1,208 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.storage.gc-touched
|
||||
"This task is part of the garbage collection process of storage
|
||||
objects and is responsible on analyzing the touched objects and mark
|
||||
them for deletion if corresponds.
|
||||
|
||||
For example: when file_media_object is deleted, the depending
|
||||
storage_object are marked as touched. This means that some files
|
||||
that depend on a concrete storage_object are no longer exists and
|
||||
maybe this storage_object is no longer necessary and can be eligible
|
||||
for elimination. This task periodically analyzes touched objects and
|
||||
mark them as freeze (means that has other references and the object
|
||||
is still valid) or deleted (no more references to this object so is
|
||||
ready to be deleted)."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.impl :as impl]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(def ^:private sql:get-team-font-variant-nrefs
|
||||
"SELECT ((SELECT count(*) FROM team_font_variant WHERE woff1_file_id = ?) +
|
||||
(SELECT count(*) FROM team_font_variant WHERE woff2_file_id = ?) +
|
||||
(SELECT count(*) FROM team_font_variant WHERE otf_file_id = ?) +
|
||||
(SELECT count(*) FROM team_font_variant WHERE ttf_file_id = ?)) AS nrefs")
|
||||
|
||||
(defn- get-team-font-variant-nrefs
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:get-team-font-variant-nrefs id id id id])
|
||||
(get :nrefs)))
|
||||
|
||||
|
||||
(def ^:private
|
||||
sql:get-file-media-object-nrefs
|
||||
"SELECT ((SELECT count(*) FROM file_media_object WHERE media_id = ?) +
|
||||
(SELECT count(*) FROM file_media_object WHERE thumbnail_id = ?)) AS nrefs")
|
||||
|
||||
(defn- get-file-media-object-nrefs
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:get-file-media-object-nrefs id id])
|
||||
(get :nrefs)))
|
||||
|
||||
|
||||
(def ^:private sql:get-profile-nrefs
|
||||
"SELECT ((SELECT count(*) FROM profile WHERE photo_id = ?) +
|
||||
(SELECT count(*) FROM team WHERE photo_id = ?)) AS nrefs")
|
||||
|
||||
(defn- get-profile-nrefs
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:get-profile-nrefs id id])
|
||||
(get :nrefs)))
|
||||
|
||||
|
||||
(def ^:private
|
||||
sql:get-file-object-thumbnail-nrefs
|
||||
"SELECT (SELECT count(*) FROM file_tagged_object_thumbnail WHERE media_id = ?) AS nrefs")
|
||||
|
||||
(defn- get-file-object-thumbnails
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:get-file-object-thumbnail-nrefs id])
|
||||
(get :nrefs)))
|
||||
|
||||
|
||||
(def ^:private
|
||||
sql:get-file-thumbnail-nrefs
|
||||
"SELECT (SELECT count(*) FROM file_thumbnail WHERE media_id = ?) AS nrefs")
|
||||
|
||||
(defn- get-file-thumbnails
|
||||
[conn id]
|
||||
(-> (db/exec-one! conn [sql:get-file-thumbnail-nrefs id])
|
||||
(get :nrefs)))
|
||||
|
||||
|
||||
(def ^:private sql:mark-freeze-in-bulk
|
||||
"UPDATE storage_object
|
||||
SET touched_at = NULL
|
||||
WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- mark-freeze-in-bulk!
|
||||
[conn ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)]
|
||||
(db/exec-one! conn [sql:mark-freeze-in-bulk ids])))
|
||||
|
||||
|
||||
(def ^:private sql:mark-delete-in-bulk
|
||||
"UPDATE storage_object
|
||||
SET deleted_at = now(),
|
||||
touched_at = NULL
|
||||
WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- mark-delete-in-bulk!
|
||||
[conn ids]
|
||||
(let [ids (db/create-array conn "uuid" ids)]
|
||||
(db/exec-one! conn [sql:mark-delete-in-bulk ids])))
|
||||
|
||||
;; NOTE: A getter that retrieves the key which will be used for group
|
||||
;; ids; previously we have no value, then we introduced the
|
||||
;; `:reference` prop, and then it is renamed to `:bucket` and now is
|
||||
;; string instead. This is implemented in this way for backward
|
||||
;; comaptibilty.
|
||||
|
||||
;; NOTE: we use the "file-media-object" as default value for
|
||||
;; backward compatibility because when we deploy it we can
|
||||
;; have old backend instances running in the same time as
|
||||
;; the new one and we can still have storage-objects created
|
||||
;; without bucket value. And we know that if it does not
|
||||
;; have value, it means :file-media-object.
|
||||
|
||||
(defn- lookup-bucket
|
||||
[{:keys [metadata]}]
|
||||
(or (some-> metadata :bucket)
|
||||
(some-> metadata :reference d/name)
|
||||
"file-media-object"))
|
||||
|
||||
(defn- process-objects!
|
||||
[conn get-fn ids bucket]
|
||||
(loop [to-freeze #{}
|
||||
to-delete #{}
|
||||
ids (seq ids)]
|
||||
(if-let [id (first ids)]
|
||||
(let [nrefs (get-fn conn id)]
|
||||
(if (pos? nrefs)
|
||||
(do
|
||||
(l/debug :hint "processing object"
|
||||
:id (str id)
|
||||
:status "freeze"
|
||||
:bucket bucket :refs nrefs)
|
||||
(recur (conj to-freeze id) to-delete (rest ids)))
|
||||
(do
|
||||
(l/debug :hint "processing object"
|
||||
:id (str id)
|
||||
:status "delete"
|
||||
:bucket bucket :refs nrefs)
|
||||
(recur to-freeze (conj to-delete id) (rest ids)))))
|
||||
(do
|
||||
(some->> (seq to-freeze) (mark-freeze-in-bulk! conn))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk! conn))
|
||||
[(count to-freeze) (count to-delete)]))))
|
||||
|
||||
(defn- process-bucket!
|
||||
[conn bucket ids]
|
||||
(case bucket
|
||||
"file-media-object" (process-objects! conn get-file-media-object-nrefs ids bucket)
|
||||
"team-font-variant" (process-objects! conn get-team-font-variant-nrefs ids bucket)
|
||||
"file-object-thumbnail" (process-objects! conn get-file-object-thumbnails ids bucket)
|
||||
"file-thumbnail" (process-objects! conn get-file-thumbnails ids bucket)
|
||||
"profile" (process-objects! conn get-profile-nrefs ids bucket)
|
||||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (dm/fmt "unknown reference %" bucket))))
|
||||
|
||||
|
||||
(def ^:private
|
||||
sql:get-touched-storage-objects
|
||||
"SELECT so.*
|
||||
FROM storage_object AS so
|
||||
WHERE so.touched_at IS NOT NULL
|
||||
ORDER BY touched_at ASC
|
||||
FOR UPDATE
|
||||
SKIP LOCKED")
|
||||
|
||||
(defn- group-by-bucket
|
||||
[row]
|
||||
(d/group-by lookup-bucket :id #{} row))
|
||||
|
||||
(defn- get-buckets
|
||||
[conn]
|
||||
(sequence
|
||||
(comp (map impl/decode-row)
|
||||
(partition-all 25)
|
||||
(mapcat group-by-bucket))
|
||||
(db/cursor conn sql:get-touched-storage-objects)))
|
||||
|
||||
(defn- process-touched!
|
||||
[{:keys [::db/conn]}]
|
||||
(loop [buckets (get-buckets conn)
|
||||
freezed 0
|
||||
deleted 0]
|
||||
(if-let [[bucket ids] (first buckets)]
|
||||
(let [[nfo ndo] (process-bucket! conn bucket ids)]
|
||||
(recur (rest buckets)
|
||||
(+ freezed nfo)
|
||||
(+ deleted ndo)))
|
||||
(do
|
||||
(l/inf :hint "task finished"
|
||||
:to-freeze freezed
|
||||
:to-delete deleted)
|
||||
|
||||
{:freeze freezed :delete deleted}))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [_]
|
||||
(db/tx-run! cfg process-touched!)))
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
(:require
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.db :as-alias db]
|
||||
[app.db :as db]
|
||||
[app.storage :as-alias sto]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.hash :as bh]
|
||||
@@ -22,6 +22,13 @@
|
||||
java.nio.file.Path
|
||||
java.util.UUID))
|
||||
|
||||
(defn decode-row
|
||||
"Decode the storage-object row fields"
|
||||
[{:keys [metadata] :as row}]
|
||||
(cond-> row
|
||||
(some? metadata)
|
||||
(assoc :metadata (db/decode-transit-pgobject metadata))))
|
||||
|
||||
;; --- API Definition
|
||||
|
||||
(defmulti put-object (fn [cfg _ _] (::sto/type cfg)))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user