mirror of
https://github.com/penpot/penpot.git
synced 2025-12-24 06:58:34 -05:00
Compare commits
1392 Commits
2.9.0
...
test-inner
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
369979ffe6 | ||
|
|
db0cbbbc2e | ||
|
|
48304bd26f | ||
|
|
60e32bbc71 | ||
|
|
54451608dc | ||
|
|
b7727122d5 | ||
|
|
8880f07a6a | ||
|
|
aaca2c41d8 | ||
|
|
33417a4b20 | ||
|
|
2640889dc8 | ||
|
|
dd5f3396d1 | ||
|
|
dedeae8641 | ||
|
|
a7552d412a | ||
|
|
f58475a7c9 | ||
|
|
00bbb0bfb6 | ||
|
|
d93fe89c12 | ||
|
|
6e44330af4 | ||
|
|
624805fd6b | ||
|
|
9b6bb77422 | ||
|
|
9b8e04bb3c | ||
|
|
2e919809c9 | ||
|
|
645e123e3a | ||
|
|
cfb94d17b6 | ||
|
|
e9cb409ca4 | ||
|
|
8a0cd75257 | ||
|
|
fae488b15a | ||
|
|
b82828632e | ||
|
|
bf24e22588 | ||
|
|
7399b4d423 | ||
|
|
77b9eee6bd | ||
|
|
55896db49e | ||
|
|
f4c569d619 | ||
|
|
ca2cf18a49 | ||
|
|
6e352c167c | ||
|
|
3ec001de44 | ||
|
|
a1f11c89f2 | ||
|
|
33d70f0e45 | ||
|
|
4f24a8f5f1 | ||
|
|
b03cfffb9e | ||
|
|
956ad88e51 | ||
|
|
76f5c73de6 | ||
|
|
c6dd3e0eeb | ||
|
|
fde73f30b9 | ||
|
|
9d35a4317c | ||
|
|
e7ccfeccbf | ||
|
|
aa043d284f | ||
|
|
537dd171c0 | ||
|
|
c2026918a4 | ||
|
|
0120a5335b | ||
|
|
d0d2f43ca1 | ||
|
|
7e33a7c1a7 | ||
|
|
c13b58f42a | ||
|
|
a5c9f9e454 | ||
|
|
d73be5832b | ||
|
|
e1f2fca4af | ||
|
|
37d5a31589 | ||
|
|
177bdaa72c | ||
|
|
38ab2c61b9 | ||
|
|
cc32b22e8a | ||
|
|
d331c5ad83 | ||
|
|
6c6c2c3012 | ||
|
|
81632a03dd | ||
|
|
4fddf3d986 | ||
|
|
57aa9a585b | ||
|
|
f71f491590 | ||
|
|
6ae2401c5e | ||
|
|
53d8a2d6d7 | ||
|
|
bd65f3932e | ||
|
|
59845b756f | ||
|
|
b8c0c5c310 | ||
|
|
cfa8c21ee6 | ||
|
|
624bdaec88 | ||
|
|
24745bed40 | ||
|
|
d26c08f8e2 | ||
|
|
36adbd9118 | ||
|
|
0a3fe9836a | ||
|
|
fef0c11503 | ||
|
|
7e858784a1 | ||
|
|
203368c2ee | ||
|
|
4f54469629 | ||
|
|
5343e799f8 | ||
|
|
51e54a6bad | ||
|
|
f609747322 | ||
|
|
26ad039d99 | ||
|
|
3136096123 | ||
|
|
122d3bc41c | ||
|
|
3b52051113 | ||
|
|
32e1b55658 | ||
|
|
e9d177eae3 | ||
|
|
d42c65b9ca | ||
|
|
86ad56797b | ||
|
|
63497b8930 | ||
|
|
94719eebf8 | ||
|
|
9532dea2c6 | ||
|
|
40e1e27bf0 | ||
|
|
4338f97e9f | ||
|
|
2c4ec43d5f | ||
|
|
3d782a322d | ||
|
|
407d28d187 | ||
|
|
bf582ec55f | ||
|
|
858bc05ed5 | ||
|
|
cd01386210 | ||
|
|
3b2bb5f225 | ||
|
|
fe3bc96d0d | ||
|
|
28f23f397e | ||
|
|
a487dfe004 | ||
|
|
4f29156929 | ||
|
|
ce2d3d1652 | ||
|
|
3639ff9dbc | ||
|
|
ca5ec734a0 | ||
|
|
b08da4c3ff | ||
|
|
c9bec3924d | ||
|
|
6e725a75e1 | ||
|
|
81c3b84972 | ||
|
|
5868f7f6b2 | ||
|
|
653567d7de | ||
|
|
ce651fa0a9 | ||
|
|
e8a26ef83b | ||
|
|
8fd17c9c84 | ||
|
|
64b892f82d | ||
|
|
04185b3544 | ||
|
|
0a01fc8af9 | ||
|
|
ae624b3728 | ||
|
|
a48b719966 | ||
|
|
6425c0cb7d | ||
|
|
368f4cfe81 | ||
|
|
fdffa14d75 | ||
|
|
7fe965a870 | ||
|
|
d03f5c10fb | ||
|
|
3eb0f1c225 | ||
|
|
127fa931c7 | ||
|
|
30413dbc66 | ||
|
|
2810ae681f | ||
|
|
d706bb7c8d | ||
|
|
ef271db879 | ||
|
|
ec5e814a72 | ||
|
|
c44fd2dd1d | ||
|
|
6aa797f51b | ||
|
|
3cc54fd988 | ||
|
|
2233f34a15 | ||
|
|
839bb470df | ||
|
|
450ce869ba | ||
|
|
665587d492 | ||
|
|
8aaa953604 | ||
|
|
a2cb84ba0d | ||
|
|
639952abc8 | ||
|
|
2d63730bfa | ||
|
|
c1638817b2 | ||
|
|
76f6f71e02 | ||
|
|
0a700864c9 | ||
|
|
04ce4c3233 | ||
|
|
befcca86df | ||
|
|
b7bae3850b | ||
|
|
3f05dae455 | ||
|
|
48c9fb5690 | ||
|
|
4cdf1eed0c | ||
|
|
4a887840c6 | ||
|
|
10cf2c7f35 | ||
|
|
d048a251f1 | ||
|
|
0b3fc6a663 | ||
|
|
363b4e3778 | ||
|
|
f248ab5644 | ||
|
|
33da6fbec2 | ||
|
|
07bede8ba2 | ||
|
|
05bea14a88 | ||
|
|
718f42aa94 | ||
|
|
f2f8a488ad | ||
|
|
7594f1883b | ||
|
|
5c2dde7308 | ||
|
|
483a1bd703 | ||
|
|
e1a275c7a9 | ||
|
|
96d9724516 | ||
|
|
8158f2956f | ||
|
|
e45994e836 | ||
|
|
83da59e03c | ||
|
|
fb21a98b0c | ||
|
|
23baf6d18b | ||
|
|
28cf67e7ff | ||
|
|
1b50c13c4d | ||
|
|
7de95e108b | ||
|
|
c6b907d05c | ||
|
|
ffb4d6a890 | ||
|
|
69c4a8932a | ||
|
|
fa25307c05 | ||
|
|
43a136a9e9 | ||
|
|
3ec4c96b48 | ||
|
|
2eaeb8e9a5 | ||
|
|
604f6ca024 | ||
|
|
e3cf70d3a8 | ||
|
|
6aedac35f2 | ||
|
|
a11b0f54d7 | ||
|
|
ec0dc2931c | ||
|
|
9d65d11c91 | ||
|
|
f00fd1d5a8 | ||
|
|
d796dbb572 | ||
|
|
e979476b0e | ||
|
|
097897d8da | ||
|
|
ba092f03e1 | ||
|
|
61202e1cab | ||
|
|
f496ba78f3 | ||
|
|
b9a0c6d932 | ||
|
|
a59ce2ed16 | ||
|
|
c221b9366f | ||
|
|
8e0aa683a1 | ||
|
|
445d40b71c | ||
|
|
7889578ced | ||
|
|
a230d2fcf6 | ||
|
|
78fde35df9 | ||
|
|
bb65782d08 | ||
|
|
02a1992a0a | ||
|
|
1cce82f958 | ||
|
|
a576c0404a | ||
|
|
7d5c1c9b5f | ||
|
|
cd53d3659c | ||
|
|
132f7d6d3e | ||
|
|
b2a9c55874 | ||
|
|
d610e7c892 | ||
|
|
1b5557759a | ||
|
|
8148da58ed | ||
|
|
537f681944 | ||
|
|
9e7ec594ca | ||
|
|
7c529eedd4 | ||
|
|
500c5c81d4 | ||
|
|
6ea69c94ee | ||
|
|
9b3f68ad14 | ||
|
|
34363320ae | ||
|
|
092a5139e3 | ||
|
|
4a01121043 | ||
|
|
564ad8adba | ||
|
|
78e2d6fec3 | ||
|
|
c850f101d3 | ||
|
|
49721c0bcd | ||
|
|
c214cc1544 | ||
|
|
eaabe54c4b | ||
|
|
21fb38e5bd | ||
|
|
37aa59b164 | ||
|
|
24e4ece323 | ||
|
|
cbae3dca34 | ||
|
|
8307b699bf | ||
|
|
cd6865f54b | ||
|
|
e673035817 | ||
|
|
f6e77c09b3 | ||
|
|
87fc71b55d | ||
|
|
b76bfa2197 | ||
|
|
88493f6805 | ||
|
|
69bbdad570 | ||
|
|
df4279bdee | ||
|
|
c8c901ee4c | ||
|
|
8f0e5e36e9 | ||
|
|
a5e9f7229b | ||
|
|
5f22220a8b | ||
|
|
6c7661b04d | ||
|
|
b867f276f2 | ||
|
|
da8d7a78cf | ||
|
|
ec4936f5fe | ||
|
|
dd9ec54bd1 | ||
|
|
3ad4b0a453 | ||
|
|
83cd9c3db6 | ||
|
|
399feec032 | ||
|
|
481fa44f18 | ||
|
|
42c9f2123d | ||
|
|
d18a018236 | ||
|
|
4ab6ecec21 | ||
|
|
b39c00fbf6 | ||
|
|
8a0fddf1e4 | ||
|
|
95fdd75030 | ||
|
|
54489c4285 | ||
|
|
e7b8ad8ee2 | ||
|
|
6815806669 | ||
|
|
febe87aa7b | ||
|
|
83763b46ce | ||
|
|
1ddc196484 | ||
|
|
37d4844518 | ||
|
|
76e610dd06 | ||
|
|
99e8b22672 | ||
|
|
65adbfaadb | ||
|
|
0581c60800 | ||
|
|
7e92408807 | ||
|
|
03eeeda44f | ||
|
|
2f33009e69 | ||
|
|
1d5c407456 | ||
|
|
aa15232cc7 | ||
|
|
f53935f5df | ||
|
|
de04026dc8 | ||
|
|
f3b914534f | ||
|
|
fcc9282304 | ||
|
|
122619b197 | ||
|
|
dbf9bdceb5 | ||
|
|
f6eb492329 | ||
|
|
c66a8f5dc5 | ||
|
|
ed4df73e42 | ||
|
|
59e745e9ab | ||
|
|
d4b4d943c6 | ||
|
|
e4b4f1bd08 | ||
|
|
e58b2453b1 | ||
|
|
e9230b8b54 | ||
|
|
9d7cac5e73 | ||
|
|
17fefcf0bc | ||
|
|
4367bd2dc6 | ||
|
|
6e2b2e8924 | ||
|
|
f3805e3b70 | ||
|
|
262937c421 | ||
|
|
15ee75a692 | ||
|
|
942e3300dd | ||
|
|
eaa3904a3a | ||
|
|
0c66b5db73 | ||
|
|
cc40448cb5 | ||
|
|
6a2029ca3b | ||
|
|
f32913adcf | ||
|
|
d906f05a6f | ||
|
|
2402334fb2 | ||
|
|
c3e2621ed5 | ||
|
|
d37695d7a5 | ||
|
|
fadbe24aaa | ||
|
|
9d29d5e8cc | ||
|
|
e681f95a70 | ||
|
|
5c8b401037 | ||
|
|
9dfb0ebe84 | ||
|
|
08162c825d | ||
|
|
bc700334ca | ||
|
|
133590f19c | ||
|
|
66c5a0570e | ||
|
|
94cbf9d8f2 | ||
|
|
70143f8ae3 | ||
|
|
6c824651df | ||
|
|
1b81ddebb4 | ||
|
|
6076df5c80 | ||
|
|
6d2d66a079 | ||
|
|
239af4fb82 | ||
|
|
0ad4a9ca7e | ||
|
|
034463e63a | ||
|
|
aadc1aac1c | ||
|
|
2cdc76f1af | ||
|
|
23f49237f8 | ||
|
|
93fb54c116 | ||
|
|
7565bb8d24 | ||
|
|
0d394ee962 | ||
|
|
c4bebc1b0a | ||
|
|
6edc29dce2 | ||
|
|
d773e3a966 | ||
|
|
e18aef1d39 | ||
|
|
b033690239 | ||
|
|
9f732eb45a | ||
|
|
474453a503 | ||
|
|
c3d40659a9 | ||
|
|
15e2b35afc | ||
|
|
ad15887d57 | ||
|
|
d01f921344 | ||
|
|
9e035ec4fe | ||
|
|
fbacdf0351 | ||
|
|
3f4d699395 | ||
|
|
1626371337 | ||
|
|
4d8a70f1fa | ||
|
|
14d5de29da | ||
|
|
df718c940f | ||
|
|
80c78d9cd4 | ||
|
|
e2ce226814 | ||
|
|
28c4c1a286 | ||
|
|
f64105ad08 | ||
|
|
a346d29d76 | ||
|
|
ccb7b41b3a | ||
|
|
2c37c5c8ed | ||
|
|
ed767d9a5b | ||
|
|
57bfca4062 | ||
|
|
e9dcd64463 | ||
|
|
b498056c01 | ||
|
|
81f851cad4 | ||
|
|
245190f4f9 | ||
|
|
479ce99b32 | ||
|
|
6290b88d2e | ||
|
|
dba718b850 | ||
|
|
7c1205018b | ||
|
|
89763d7c5a | ||
|
|
7f6af6179b | ||
|
|
ceb184782f | ||
|
|
247c5c3700 | ||
|
|
0882c448f6 | ||
|
|
f8cebb9d63 | ||
|
|
1e248c7177 | ||
|
|
351a35dad6 | ||
|
|
eb088c31c1 | ||
|
|
45af469a11 | ||
|
|
232f2271d3 | ||
|
|
a30315c91c | ||
|
|
04542e1e66 | ||
|
|
36c986d8e8 | ||
|
|
38c3b2eaba | ||
|
|
98e91ecda5 | ||
|
|
54ac64db4b | ||
|
|
30ca6bf6ff | ||
|
|
81a364dfc4 | ||
|
|
c6b9954af8 | ||
|
|
f120cf82d3 | ||
|
|
7ec335ae96 | ||
|
|
8dcc46aba8 | ||
|
|
058a555594 | ||
|
|
e073b89604 | ||
|
|
140290cd60 | ||
|
|
5e6af5aea9 | ||
|
|
5df2a740b9 | ||
|
|
fd596a1371 | ||
|
|
87221eb7db | ||
|
|
69f2e131d7 | ||
|
|
69da63e01c | ||
|
|
dc689f9756 | ||
|
|
82e1a5003c | ||
|
|
024697ff87 | ||
|
|
fc4b717287 | ||
|
|
9e8cdc8a3f | ||
|
|
a51fd009bc | ||
|
|
f795f20ef8 | ||
|
|
ca21e7e8b4 | ||
|
|
93e7f2950b | ||
|
|
d0e5d0d952 | ||
|
|
e4c07e0ec0 | ||
|
|
068caf2784 | ||
|
|
436bc23da4 | ||
|
|
579de6558a | ||
|
|
2d45cba36c | ||
|
|
cf21ffb30f | ||
|
|
7a2fe232d5 | ||
|
|
9e17a0e65d | ||
|
|
220c27c354 | ||
|
|
b0e4257e56 | ||
|
|
b3cb7df33c | ||
|
|
fec420b6e9 | ||
|
|
35af5455a0 | ||
|
|
597fba79cc | ||
|
|
216b2d3072 | ||
|
|
bbc6709943 | ||
|
|
14f6e22610 | ||
|
|
2f27a78bc0 | ||
|
|
f5761066a9 | ||
|
|
3665bccaed | ||
|
|
fbbee98c3d | ||
|
|
854ad5bb4d | ||
|
|
a32f44a62c | ||
|
|
95f58ffda5 | ||
|
|
e8e27c25c0 | ||
|
|
42c416e3cb | ||
|
|
5ad04e0f4c | ||
|
|
9f4db4479c | ||
|
|
66997d2bc9 | ||
|
|
7350329658 | ||
|
|
544b118925 | ||
|
|
8ceb909cda | ||
|
|
af54e6ccc2 | ||
|
|
6ef0b8fd16 | ||
|
|
4a6d143a15 | ||
|
|
07dedbd3bb | ||
|
|
7ca8bf32b2 | ||
|
|
2e6fb1b9c5 | ||
|
|
43b03b9714 | ||
|
|
8e8d46b314 | ||
|
|
e964f9820e | ||
|
|
d933e91c6c | ||
|
|
24264e7d8a | ||
|
|
5b77df997b | ||
|
|
968274096d | ||
|
|
392e3ac34e | ||
|
|
9266ace537 | ||
|
|
e8336a401e | ||
|
|
18048a4b2e | ||
|
|
e28d4eaff1 | ||
|
|
fae574796f | ||
|
|
b057ed1b9a | ||
|
|
db59209b21 | ||
|
|
a62f1fb46f | ||
|
|
ffd2aa03a9 | ||
|
|
f1ebcaf635 | ||
|
|
6a4d0f05bc | ||
|
|
4d751c5acd | ||
|
|
2c5abb0cbf | ||
|
|
7f6bffdbfc | ||
|
|
9a5efe8671 | ||
|
|
e0dd8247d4 | ||
|
|
1f65e2f560 | ||
|
|
abad6a15bc | ||
|
|
bf6874a96d | ||
|
|
7cdb1925d6 | ||
|
|
aec4464749 | ||
|
|
1d14644250 | ||
|
|
fad148e6a6 | ||
|
|
3c05067c99 | ||
|
|
bbb78904fb | ||
|
|
879caf66eb | ||
|
|
4daf086214 | ||
|
|
c8b3a41117 | ||
|
|
c9dcc8a4ee | ||
|
|
4739c4730c | ||
|
|
603bb860ba | ||
|
|
55d9ca1439 | ||
|
|
a2f397c329 | ||
|
|
ada4e72c27 | ||
|
|
17376dfa3f | ||
|
|
8d65e1cc94 | ||
|
|
d4de367499 | ||
|
|
25521b18ff | ||
|
|
39bdf026ca | ||
|
|
1b6a833166 | ||
|
|
928dcf5cb8 | ||
|
|
12e91751c3 | ||
|
|
0f0c8466be | ||
|
|
204d0dfb9d | ||
|
|
e4a3fc3940 | ||
|
|
5ae173f01c | ||
|
|
840c1f59bc | ||
|
|
29dc99deae | ||
|
|
f7b3913c71 | ||
|
|
025f0d2fdb | ||
|
|
85591bd579 | ||
|
|
b3ae54775b | ||
|
|
045aa7c788 | ||
|
|
497282d964 | ||
|
|
362bb7d2f6 | ||
|
|
b4cd955484 | ||
|
|
55353b80a2 | ||
|
|
c46ab38d58 | ||
|
|
0f3ca67773 | ||
|
|
1c06c87acf | ||
|
|
d532558bab | ||
|
|
a4192ce835 | ||
|
|
d3e28a8307 | ||
|
|
3122917872 | ||
|
|
95df07a364 | ||
|
|
71ed845307 | ||
|
|
c9761684c1 | ||
|
|
dd35c82824 | ||
|
|
515b381f66 | ||
|
|
160873c63e | ||
|
|
fc35dc77ce | ||
|
|
b5648e1241 | ||
|
|
d07e00da21 | ||
|
|
253605f6cc | ||
|
|
2548bec651 | ||
|
|
da5da00bd4 | ||
|
|
72b44240b1 | ||
|
|
b21f79490b | ||
|
|
db09eacd4c | ||
|
|
731afb0e70 | ||
|
|
1080251d9a | ||
|
|
ea5bfbd72d | ||
|
|
b10dcb2d63 | ||
|
|
c4cd665594 | ||
|
|
1eb6f33bdd | ||
|
|
62dffd30a4 | ||
|
|
12a4934c41 | ||
|
|
e3bd9148f2 | ||
|
|
2b7bd8fa5c | ||
|
|
5717708b56 | ||
|
|
27bed84543 | ||
|
|
c6529f9585 | ||
|
|
30e139ed10 | ||
|
|
0aadc3b6b3 | ||
|
|
21a7ecb3fe | ||
|
|
65a2b10875 | ||
|
|
9e676a7ab2 | ||
|
|
3d1933411b | ||
|
|
a44f1bb09c | ||
|
|
65e59c8857 | ||
|
|
2833854d8d | ||
|
|
19e367e112 | ||
|
|
3f731f57e6 | ||
|
|
b740ee254e | ||
|
|
97d9480c8b | ||
|
|
f1fcc77f74 | ||
|
|
d078c49fe7 | ||
|
|
9d30a1c1e9 | ||
|
|
3b9f732b16 | ||
|
|
0a9c191582 | ||
|
|
cc6175d39c | ||
|
|
b424c0f84b | ||
|
|
29495474b1 | ||
|
|
7c0bd4ac9a | ||
|
|
b98f5a9851 | ||
|
|
4d823af46d | ||
|
|
ecec2db29e | ||
|
|
55d2d53a22 | ||
|
|
86f7bec171 | ||
|
|
5c6d296e60 | ||
|
|
bda6c61a11 | ||
|
|
3aa966e553 | ||
|
|
9ce0b9c86e | ||
|
|
42ab42fb56 | ||
|
|
7b60d386fb | ||
|
|
ae60a7260c | ||
|
|
bfacdc414f | ||
|
|
29628eea0a | ||
|
|
6d7723c36b | ||
|
|
710008ee9e | ||
|
|
a15be5c2d0 | ||
|
|
45a09928b3 | ||
|
|
bae5196d1e | ||
|
|
91fe7b2dd6 | ||
|
|
df6448e32e | ||
|
|
a9a9245ab6 | ||
|
|
e18f20666b | ||
|
|
adafe0648c | ||
|
|
1b9deecefc | ||
|
|
666410602b | ||
|
|
c956600d64 | ||
|
|
2100c8a115 | ||
|
|
d4fd246622 | ||
|
|
71fd6640af | ||
|
|
40e9a78f67 | ||
|
|
551a25661f | ||
|
|
544bedf7c2 | ||
|
|
4937580585 | ||
|
|
dba0f11670 | ||
|
|
776af8ea22 | ||
|
|
38e0d0035f | ||
|
|
f0c01d8714 | ||
|
|
678c4acdbc | ||
|
|
9b76048c2f | ||
|
|
2cf98745d7 | ||
|
|
267acc5b80 | ||
|
|
ae38c8e840 | ||
|
|
1128303fa1 | ||
|
|
5d1981047d | ||
|
|
0dae2a3c24 | ||
|
|
766accde29 | ||
|
|
c5f8bd5bea | ||
|
|
4edfcba350 | ||
|
|
ce20059a4c | ||
|
|
27fa0c0721 | ||
|
|
ee83a07674 | ||
|
|
57123569eb | ||
|
|
d112e83b0d | ||
|
|
b259ca2cd1 | ||
|
|
56db7078ae | ||
|
|
7a75002cc9 | ||
|
|
77ad1f57be | ||
|
|
9f881e49e5 | ||
|
|
238578f243 | ||
|
|
49d9dd7161 | ||
|
|
be28a310f7 | ||
|
|
460aafbedf | ||
|
|
9ba995edd9 | ||
|
|
85b15e4896 | ||
|
|
1420e8a59c | ||
|
|
3aa647e1f6 | ||
|
|
11b7b458bf | ||
|
|
cfa607f57f | ||
|
|
cea0143327 | ||
|
|
b8158ffec8 | ||
|
|
640894acd8 | ||
|
|
90bfae3ec1 | ||
|
|
73ed5f8bc5 | ||
|
|
2c1a8b59ba | ||
|
|
47d9c6f282 | ||
|
|
14d53c224f | ||
|
|
a1b8eb7074 | ||
|
|
e0f9bbb23f | ||
|
|
9b16a6bbd1 | ||
|
|
fede63ac0b | ||
|
|
ea1ab7c23b | ||
|
|
61d9b57bc7 | ||
|
|
bd63598185 | ||
|
|
31af6aebbd | ||
|
|
cc5f86bc84 | ||
|
|
68cd7075c0 | ||
|
|
11ff64b362 | ||
|
|
57a7b5b1da | ||
|
|
cf24bdd7a8 | ||
|
|
683db071d6 | ||
|
|
d3943b9162 | ||
|
|
613acd5b29 | ||
|
|
987dea8048 | ||
|
|
6b0d0a302f | ||
|
|
588eb0b4fa | ||
|
|
b30cb0e084 | ||
|
|
9244501c6e | ||
|
|
d5b743c604 | ||
|
|
e38dd21307 | ||
|
|
8c91109c63 | ||
|
|
c3eabbdb25 | ||
|
|
67661674e2 | ||
|
|
c70e7f3876 | ||
|
|
0295f0f7c8 | ||
|
|
54bb879cb6 | ||
|
|
b72704e54b | ||
|
|
362a31dd22 | ||
|
|
6c6ec7a620 | ||
|
|
21a7d30c5e | ||
|
|
52fef6c318 | ||
|
|
fc8029abf7 | ||
|
|
44f6c2f83c | ||
|
|
a4f20564af | ||
|
|
93d4b19477 | ||
|
|
7dd26dee13 | ||
|
|
4594635009 | ||
|
|
7e852cb3ac | ||
|
|
6e82b0f1ba | ||
|
|
472148ff9d | ||
|
|
d01df7738a | ||
|
|
73222f22d0 | ||
|
|
b90aba0f95 | ||
|
|
17fe012f7e | ||
|
|
60f45d1fd7 | ||
|
|
979b4276ca | ||
|
|
a32fe40528 | ||
|
|
b602df549e | ||
|
|
7f1ab08ec8 | ||
|
|
1263ea11fa | ||
|
|
ce26c52b30 | ||
|
|
5c8b3ac3d6 | ||
|
|
bd4d576172 | ||
|
|
e10169b3db | ||
|
|
f119a9548d | ||
|
|
c097aef152 | ||
|
|
000fa51c73 | ||
|
|
d815494ffa | ||
|
|
a25ba6b482 | ||
|
|
e8434c3370 | ||
|
|
7cf4ec2792 | ||
|
|
365ce25996 | ||
|
|
01ef55e4f4 | ||
|
|
3b81c1d750 | ||
|
|
40b34da788 | ||
|
|
732c79b7b5 | ||
|
|
d0f34f06a9 | ||
|
|
23d5bdd20b | ||
|
|
9f2dc06c95 | ||
|
|
62563d28d0 | ||
|
|
21e2ee9904 | ||
|
|
e6c418eb9c | ||
|
|
de5ff227d2 | ||
|
|
0f67730198 | ||
|
|
3da02e2b6b | ||
|
|
ab80021fb1 | ||
|
|
f31e9b8ac9 | ||
|
|
7d16515eb7 | ||
|
|
cd9ba482e3 | ||
|
|
dff1ca23d3 | ||
|
|
c363d4d937 | ||
|
|
de25a24a6d | ||
|
|
accc9a173f | ||
|
|
2d364dde5c | ||
|
|
c892a9f254 | ||
|
|
aaae35fb51 | ||
|
|
960b76f760 | ||
|
|
d921e7eaa3 | ||
|
|
49f06b25fa | ||
|
|
5ffb7ae2ec | ||
|
|
27945ace65 | ||
|
|
e39bf0b439 | ||
|
|
deee7f7334 | ||
|
|
20d61cbce2 | ||
|
|
9ad8d3fd08 | ||
|
|
4c35571336 | ||
|
|
37679b7ec6 | ||
|
|
194eded930 | ||
|
|
4e607d8da2 | ||
|
|
f5fd978a07 | ||
|
|
b28be62845 | ||
|
|
d76a5c615c | ||
|
|
03e05da41e | ||
|
|
5f886e141a | ||
|
|
021b8f81ca | ||
|
|
f32112544e | ||
|
|
27e311277a | ||
|
|
b9030fcc73 | ||
|
|
e1519f0ee4 | ||
|
|
7fefe6dbc8 | ||
|
|
fdf70ae9c1 | ||
|
|
528315b75c | ||
|
|
42d03a0325 | ||
|
|
0346c48b03 | ||
|
|
1d54fe2e24 | ||
|
|
255f5af2e3 | ||
|
|
eea65b12dd | ||
|
|
d4b7f231c7 | ||
|
|
473066cf5c | ||
|
|
d1607fbe54 | ||
|
|
5e84bda404 | ||
|
|
c1058c7fdb | ||
|
|
9d907071aa | ||
|
|
c32b94abcf | ||
|
|
9d8ad0ea6e | ||
|
|
2b1e107a44 | ||
|
|
2196318cfc | ||
|
|
b3d1701698 | ||
|
|
042bd03beb | ||
|
|
cce1dd86a2 | ||
|
|
a39a127f03 | ||
|
|
bd665f70bf | ||
|
|
e184a9a8b9 | ||
|
|
9b90236b72 | ||
|
|
bf6cdf729d | ||
|
|
361bdb4a04 | ||
|
|
58c6c94cb8 | ||
|
|
3827aa6bd4 | ||
|
|
adf7b0df50 | ||
|
|
97b4491a27 | ||
|
|
ecee7ecfc7 | ||
|
|
015bd9e453 | ||
|
|
49d5987b15 | ||
|
|
a5e4de97e3 | ||
|
|
378be9473d | ||
|
|
412cf61d7d | ||
|
|
754a1b6fa2 | ||
|
|
a4ada6dc8a | ||
|
|
ec94d08f4a | ||
|
|
b6b2d28464 | ||
|
|
32770c685a | ||
|
|
b770145436 | ||
|
|
441dc33e38 | ||
|
|
3f87e768a7 | ||
|
|
09e9340ba6 | ||
|
|
d5ff7b4144 | ||
|
|
ef0aee0a09 | ||
|
|
1e9682376e | ||
|
|
11b75408fe | ||
|
|
59f7ede4ff | ||
|
|
c9b61745a0 | ||
|
|
8954b05d76 | ||
|
|
974b76d7bd | ||
|
|
f505fcfa0d | ||
|
|
e4d610d503 | ||
|
|
cb4c155b32 | ||
|
|
0b346e02ff | ||
|
|
5c23a678cc | ||
|
|
946f641917 | ||
|
|
fb3923924b | ||
|
|
c882e8347a | ||
|
|
c1fd1a3b42 | ||
|
|
b1fe32baea | ||
|
|
fb7a7d02da | ||
|
|
20dfc2a216 | ||
|
|
d7d2d36e0a | ||
|
|
07904bcc5d | ||
|
|
9686075104 | ||
|
|
436e0e847d | ||
|
|
d50b070a64 | ||
|
|
80cb48fd6a | ||
|
|
e88039e46a | ||
|
|
3c45a8d0b4 | ||
|
|
c9d71f3b2d | ||
|
|
49c6efbc22 | ||
|
|
9f37175775 | ||
|
|
5ed870cc6e | ||
|
|
4fb1c7a630 | ||
|
|
2a3d7e470d | ||
|
|
f654eb2dcd | ||
|
|
c21d705143 | ||
|
|
85c1750706 | ||
|
|
e2151409bf | ||
|
|
4fe6cfc57a | ||
|
|
fd37fdde93 | ||
|
|
66b1d5b7bd | ||
|
|
2eed7444b7 | ||
|
|
2bf7a9dd5f | ||
|
|
7bacd8fbca | ||
|
|
ef376fbb7b | ||
|
|
b883882a32 | ||
|
|
18d5b84b00 | ||
|
|
e5e11b6383 | ||
|
|
01e963ae35 | ||
|
|
90a80c4b63 | ||
|
|
1bd45d3f8a | ||
|
|
b56f237780 | ||
|
|
4970ae3eb4 | ||
|
|
c62fadac47 | ||
|
|
a264f84e6c | ||
|
|
2e21f084fc | ||
|
|
55513b9ae5 | ||
|
|
9311ee4c87 | ||
|
|
07d0062645 | ||
|
|
f4b38af649 | ||
|
|
6e7bcd1243 | ||
|
|
b8c8579ff5 | ||
|
|
82295c79d4 | ||
|
|
5174591058 | ||
|
|
ed3fc5b8b2 | ||
|
|
f5f9157786 | ||
|
|
658303fa36 | ||
|
|
6cb0cb7f98 | ||
|
|
84013c21fa | ||
|
|
f3062ade39 | ||
|
|
ca19d4deeb | ||
|
|
dfceccca3d | ||
|
|
9e2d3b1fa1 | ||
|
|
4dc0f3b4eb | ||
|
|
215288b6b4 | ||
|
|
2e2c3e7bac | ||
|
|
0210b310b7 | ||
|
|
c77efc657c | ||
|
|
ce1e44eda4 | ||
|
|
48825e1e59 | ||
|
|
61cfe2d142 | ||
|
|
2d68f4dfd3 | ||
|
|
1e23937aa5 | ||
|
|
aecaf51953 | ||
|
|
da05d6b67d | ||
|
|
99a100ad63 | ||
|
|
bd3bcb4b18 | ||
|
|
534c7864fc | ||
|
|
4bd2eba573 | ||
|
|
563f608255 | ||
|
|
382b5e7e3a | ||
|
|
a503f8ae93 | ||
|
|
e1935fb3fb | ||
|
|
7ac44009d5 | ||
|
|
f22eef5bf6 | ||
|
|
b3763dec3f | ||
|
|
7c61049103 | ||
|
|
fe819c6ec4 | ||
|
|
1a4594a615 | ||
|
|
41751d60d2 | ||
|
|
8bd0edca46 | ||
|
|
d2bff2853f | ||
|
|
e2f22b86c7 | ||
|
|
ff96f7be85 | ||
|
|
108b5ab225 | ||
|
|
a403af7ebd | ||
|
|
43a238a896 | ||
|
|
e3c9588c1c | ||
|
|
25b63e5675 | ||
|
|
6c59d633cd | ||
|
|
daa408e291 | ||
|
|
bb0a891638 | ||
|
|
8aed47dad3 | ||
|
|
c5bd183f73 | ||
|
|
06441063f2 | ||
|
|
0e23c9f6ab | ||
|
|
8fff9afee6 | ||
|
|
9f11a2cb32 | ||
|
|
ff55318c04 | ||
|
|
41b7957eff | ||
|
|
053b2c6248 | ||
|
|
7e52aadb98 | ||
|
|
fad058ee59 | ||
|
|
69f41c300f | ||
|
|
18c5e0b9a8 | ||
|
|
568c2fd9d7 | ||
|
|
794eb78aca | ||
|
|
e761bcac85 | ||
|
|
1929ee36ed | ||
|
|
26f123f466 | ||
|
|
d9f186524d | ||
|
|
84cccd1b79 | ||
|
|
e66d44ca81 | ||
|
|
2f3b464715 | ||
|
|
286e477ad5 | ||
|
|
6e6749f42e | ||
|
|
7b6aa0c12a | ||
|
|
5f33ce9ef6 | ||
|
|
409f95ac17 | ||
|
|
25950be077 | ||
|
|
5230d54551 | ||
|
|
a79be05261 | ||
|
|
9eda1d0d78 | ||
|
|
f6c4f800c4 | ||
|
|
f363d6a801 | ||
|
|
e88ce0d52f | ||
|
|
fe5fe7a933 | ||
|
|
9c77296858 | ||
|
|
34da6b64df | ||
|
|
699cc147b5 | ||
|
|
4becd35e52 | ||
|
|
b1d792a757 | ||
|
|
18e6842e35 | ||
|
|
c4481be39f | ||
|
|
0df420d353 | ||
|
|
f60b6a4869 | ||
|
|
3e02dc550f | ||
|
|
1cf0de395c | ||
|
|
d40b68c004 | ||
|
|
50b9e8c6e6 | ||
|
|
d25f9cd4bd | ||
|
|
dac2d31b35 | ||
|
|
bedb98ad9f | ||
|
|
5f37601122 | ||
|
|
1d3a1a094a | ||
|
|
9652996f07 | ||
|
|
796aaed11e | ||
|
|
1da69cfa38 | ||
|
|
2a2735cd67 | ||
|
|
b0712b6dc5 | ||
|
|
cc31ee50df | ||
|
|
0552b6e713 | ||
|
|
30e655b1da | ||
|
|
0aeecc6268 | ||
|
|
5cec006a76 | ||
|
|
307e06372b | ||
|
|
9f24e76c27 | ||
|
|
7954eaf529 | ||
|
|
63456d2b75 | ||
|
|
6a4a22c77a | ||
|
|
32ad35aa19 | ||
|
|
e1522f1e8a | ||
|
|
05093a32f3 | ||
|
|
fe406b577e | ||
|
|
76c03af024 | ||
|
|
fe5cdcbdc7 | ||
|
|
2479a06f9a | ||
|
|
fc5e4a821b | ||
|
|
3cdbc27de9 | ||
|
|
a8ed1371d4 | ||
|
|
e7bac41c37 | ||
|
|
dede2a8f8e | ||
|
|
e94abad3eb | ||
|
|
f8bc6e12a9 | ||
|
|
a2c3208af9 | ||
|
|
a303df9c34 | ||
|
|
bda24f3829 | ||
|
|
94fd3119e8 | ||
|
|
c8091b42a7 | ||
|
|
41a859b444 | ||
|
|
1cf20f7604 | ||
|
|
629541bc9d | ||
|
|
44245d1b5f | ||
|
|
a8692c72c6 | ||
|
|
7b7da59ca9 | ||
|
|
b78e3159d1 | ||
|
|
2106028350 | ||
|
|
c2e7f9dc42 | ||
|
|
8568098c5e | ||
|
|
c8bc1ef1d2 | ||
|
|
59bd434d05 | ||
|
|
3cf1e53340 | ||
|
|
24f0956630 | ||
|
|
91991d5b32 | ||
|
|
447b6ed1ab | ||
|
|
336486fecd | ||
|
|
bbd417c119 | ||
|
|
e3ffd45a18 | ||
|
|
52a4a46ebd | ||
|
|
025423a75e | ||
|
|
a6f17e35dd | ||
|
|
9a8cac3cac | ||
|
|
884b857d17 | ||
|
|
a20bbeff79 | ||
|
|
2a5f1f870b | ||
|
|
cf5303a39c | ||
|
|
bf1e26c4e6 | ||
|
|
4713d943d1 | ||
|
|
df083cb315 | ||
|
|
6401b25964 | ||
|
|
65f4adc68e | ||
|
|
dfab472522 | ||
|
|
9759adf747 | ||
|
|
9ae1a08573 | ||
|
|
ddab2cab14 | ||
|
|
cf1a8fff65 | ||
|
|
45be4769d7 | ||
|
|
a68c41709a | ||
|
|
4290bce718 | ||
|
|
3dd237002b | ||
|
|
e0fb112bfb | ||
|
|
2629537fd3 | ||
|
|
cb7711f637 | ||
|
|
a114e9adb0 | ||
|
|
5fed5fa158 | ||
|
|
6c8873c7f5 | ||
|
|
37b0c4adc0 | ||
|
|
9106617436 | ||
|
|
68a95cf0d0 | ||
|
|
1438632dde | ||
|
|
112fa46896 | ||
|
|
6da5bbf33a | ||
|
|
95faf340c4 | ||
|
|
723ea508df | ||
|
|
cbe538261c | ||
|
|
1925e6782f | ||
|
|
bcd950c141 | ||
|
|
b215689566 | ||
|
|
dfe0f64c7c | ||
|
|
9de3910526 | ||
|
|
59eb75d8c3 | ||
|
|
6670b76ccc | ||
|
|
09b9383a0b | ||
|
|
31e37f352d | ||
|
|
c5958e4d61 | ||
|
|
f1e7149e88 | ||
|
|
d80ef17623 | ||
|
|
ffe469ce71 | ||
|
|
c35bb6e09a | ||
|
|
8d404d97a1 | ||
|
|
fa2b0bd67c | ||
|
|
9563d1b1f6 | ||
|
|
33fc578f96 | ||
|
|
79786dde16 | ||
|
|
926b2c9cfb | ||
|
|
c1b2aa7628 | ||
|
|
991b26b73f | ||
|
|
254a7461b2 | ||
|
|
1384ed8aba | ||
|
|
c9393c0cfb | ||
|
|
6eeb55fb88 | ||
|
|
c759afc10d | ||
|
|
090cb63568 | ||
|
|
f223831766 | ||
|
|
854f286364 | ||
|
|
2846b80cf7 | ||
|
|
ad0ef82ffc | ||
|
|
3bb547fc45 | ||
|
|
c3b326d95e | ||
|
|
8c1a97dac5 | ||
|
|
4053e8c8db | ||
|
|
ee86a3943d | ||
|
|
46b3e174ed | ||
|
|
c0c2c9489c | ||
|
|
c05c179d67 | ||
|
|
596193d34d | ||
|
|
15eee0d8d8 | ||
|
|
0b7444e8ff | ||
|
|
96a91dc710 | ||
|
|
503d431d8e | ||
|
|
b12d44150b | ||
|
|
66f5eb57b9 | ||
|
|
7023880e67 | ||
|
|
0e4cf23a93 | ||
|
|
3f93b0d44b | ||
|
|
4a6c3d6ad3 | ||
|
|
478439f055 | ||
|
|
9ef2454210 | ||
|
|
a702fee3cd | ||
|
|
cac639d267 | ||
|
|
0daf548773 | ||
|
|
9249a5d4ea | ||
|
|
4c83feaa31 | ||
|
|
4d8ad19eea | ||
|
|
e7e7d576b2 | ||
|
|
7f2af1c355 | ||
|
|
9bb92277e4 | ||
|
|
643621a389 | ||
|
|
ef8d2e7418 | ||
|
|
575342b3bb | ||
|
|
b6ecb4368e | ||
|
|
081df7fc03 | ||
|
|
358343b4df | ||
|
|
d7c19325cc | ||
|
|
b472a8ab19 | ||
|
|
2b83d0d0e9 | ||
|
|
911ac263fa | ||
|
|
545c78eb74 | ||
|
|
124b098c92 | ||
|
|
43ed430475 | ||
|
|
edd3b1512e | ||
|
|
d9623c3c88 | ||
|
|
f052e31ff0 | ||
|
|
0c3d73745e | ||
|
|
a6ecc4fb3c | ||
|
|
625d9ab188 | ||
|
|
e6cc15e19b | ||
|
|
0af2cd6413 | ||
|
|
443e0b0206 | ||
|
|
a4fa5e9304 | ||
|
|
d94c311a1e | ||
|
|
c0c8390a7d | ||
|
|
f4be117219 | ||
|
|
7eb590e9fd | ||
|
|
6588913141 | ||
|
|
5c4a60aee7 | ||
|
|
af02e12685 | ||
|
|
675864ce0b | ||
|
|
c55f3182d8 | ||
|
|
0d6eac7656 | ||
|
|
7acfd199aa | ||
|
|
33d6f543a1 | ||
|
|
4237ef572e | ||
|
|
6babea8b12 | ||
|
|
6b7f91c671 | ||
|
|
b3b183c151 | ||
|
|
59f2ee87e6 | ||
|
|
6af8940a46 | ||
|
|
e1a1110f06 | ||
|
|
1dcf1e0b0f | ||
|
|
4c3e345c9c | ||
|
|
a3b9a9f07b | ||
|
|
17ec360720 | ||
|
|
89b67d59d5 | ||
|
|
77be00014e | ||
|
|
e336f287b6 | ||
|
|
50aa6ff306 | ||
|
|
56f162f219 | ||
|
|
f7a0c4139a | ||
|
|
7c39e321c4 | ||
|
|
ce6a863599 | ||
|
|
832690e71e | ||
|
|
7526cb0d71 | ||
|
|
3292109ab0 | ||
|
|
be376d2030 | ||
|
|
4d455b5e9f | ||
|
|
50ce28e378 | ||
|
|
1eee8e2ce8 | ||
|
|
5e6ce9172f | ||
|
|
03adbc2ae8 | ||
|
|
e63a3f76f7 | ||
|
|
6d42d456fb | ||
|
|
c818b6f88f | ||
|
|
3f3c7905b4 | ||
|
|
e8dd13beb2 | ||
|
|
fc6b64aa68 | ||
|
|
5e0a2f66e3 | ||
|
|
108d4fabba | ||
|
|
2e277a39ca | ||
|
|
814ec43714 | ||
|
|
54bb9ea755 | ||
|
|
374e921672 | ||
|
|
64e5ea93a0 | ||
|
|
2562d70880 | ||
|
|
d99ef29152 | ||
|
|
d5a2cd9cd2 | ||
|
|
82d2889e96 | ||
|
|
ccd6ae5ade | ||
|
|
36bafc0d40 | ||
|
|
f7746b8f94 | ||
|
|
537c5ca7b8 | ||
|
|
4901a80684 | ||
|
|
03b5d44a7c | ||
|
|
8e51aa8df4 | ||
|
|
029a9674ca | ||
|
|
68cee1b1f1 | ||
|
|
3f74e230b2 | ||
|
|
6bf1919f8d | ||
|
|
e69d61eaf4 | ||
|
|
2f83f22753 | ||
|
|
f9d757bb85 | ||
|
|
6b6e80f4b8 | ||
|
|
f32b92a5b0 | ||
|
|
761a0a7009 | ||
|
|
129d3e61fa | ||
|
|
aa94671002 | ||
|
|
52cf136f84 | ||
|
|
808427795c | ||
|
|
3f71734cb4 | ||
|
|
9f14edb0d7 | ||
|
|
7fa7a806a8 | ||
|
|
d364f4db62 | ||
|
|
f2c431d029 | ||
|
|
6a667c30d6 | ||
|
|
de637fcf4e | ||
|
|
132069472c | ||
|
|
172c6ad4b8 | ||
|
|
73a72ec1c7 | ||
|
|
c39a8d84ac | ||
|
|
027e5c64cc | ||
|
|
ba42c9b85e | ||
|
|
cd1be43384 | ||
|
|
6176027263 | ||
|
|
58bd7c6bd4 | ||
|
|
f02667e031 | ||
|
|
99b40cecf2 | ||
|
|
c7a4c67d83 | ||
|
|
c29a8cb0c4 | ||
|
|
a9f4fe84fa | ||
|
|
f7832585dc | ||
|
|
e34bfb50a8 | ||
|
|
0a106c2604 | ||
|
|
8f5f88743b | ||
|
|
9562d2f1f0 | ||
|
|
ea482f16c8 | ||
|
|
50634e1a4c | ||
|
|
56de96d25b | ||
|
|
5d1c20c47c | ||
|
|
7de8e10721 | ||
|
|
80f41c4a69 | ||
|
|
a3557a81e4 | ||
|
|
0a02e526ee | ||
|
|
db9349e764 | ||
|
|
60903f349f | ||
|
|
b91e955486 | ||
|
|
6166f45a7f | ||
|
|
c103eb86db | ||
|
|
61d93d69b1 | ||
|
|
d5abf34538 | ||
|
|
7efc297cd9 | ||
|
|
98522a390e | ||
|
|
6fc949844d | ||
|
|
97e8c9250a | ||
|
|
551313d3de | ||
|
|
433e61bc4e | ||
|
|
818b03d8f2 | ||
|
|
ae3aef8dcc | ||
|
|
1b30325640 | ||
|
|
44d626d578 | ||
|
|
c8f5ec4698 | ||
|
|
07b15819d4 | ||
|
|
f519c6ef46 | ||
|
|
c69ee35e18 | ||
|
|
8d5ee92f16 | ||
|
|
e55d184d2b | ||
|
|
e976714964 | ||
|
|
ce0d6ffda2 | ||
|
|
bc2308f2ce | ||
|
|
0b47a366ab | ||
|
|
1892fa6782 | ||
|
|
6f35b7db24 | ||
|
|
4d9e070bcd | ||
|
|
61fe8e8d8e | ||
|
|
0934095e96 | ||
|
|
eba2ff7d8d | ||
|
|
283eb0419c | ||
|
|
9a0c36c442 | ||
|
|
ff1d26294a | ||
|
|
63bfbbb3c6 | ||
|
|
76d725559e | ||
|
|
d7ec8ccbc0 | ||
|
|
6def5e285b | ||
|
|
b46e9ee065 | ||
|
|
0457ca4fe5 | ||
|
|
083be7df88 | ||
|
|
200b69fae2 | ||
|
|
3b04cd37ff | ||
|
|
4d688b1d55 | ||
|
|
e43b6fb0b7 | ||
|
|
7895f03447 | ||
|
|
1f42b2f72d | ||
|
|
f4adfe56be | ||
|
|
33a679fbc0 | ||
|
|
9db67cc5e8 | ||
|
|
9834f0596b | ||
|
|
37cec8891f | ||
|
|
fd62141c04 | ||
|
|
4bdba6894d | ||
|
|
6c7fef29a8 | ||
|
|
a77edc5aa2 | ||
|
|
31f37a20e3 | ||
|
|
06b4ae5c96 | ||
|
|
a3e24785d3 | ||
|
|
78102210a5 | ||
|
|
7553d68100 | ||
|
|
44daa1cf65 | ||
|
|
bdbaa6d597 | ||
|
|
0e675a725d | ||
|
|
2a3046ba2e | ||
|
|
54d76123d0 | ||
|
|
6ffbf08826 | ||
|
|
d84ee8bb65 | ||
|
|
a16f40cb73 | ||
|
|
02cff2740f | ||
|
|
6049d97ed9 | ||
|
|
3f657a0c04 | ||
|
|
4b020dcc1a | ||
|
|
223a468bbf | ||
|
|
ddd0e447f6 | ||
|
|
0c0c81e9a5 | ||
|
|
e6ac2c1159 | ||
|
|
4c605b8151 | ||
|
|
2913899aa5 | ||
|
|
ecd3245612 | ||
|
|
dadeda4476 | ||
|
|
d129557f77 | ||
|
|
ff7e34e308 | ||
|
|
88055294a2 | ||
|
|
e473f45048 | ||
|
|
bcee670ac6 | ||
|
|
b93e96a18d | ||
|
|
b70f6af2df | ||
|
|
0e20bb6271 | ||
|
|
bd15ef4618 | ||
|
|
af5b942e05 | ||
|
|
098fd9fb0f | ||
|
|
a242962113 | ||
|
|
2b95e6b7a9 | ||
|
|
4189d01844 | ||
|
|
57330f53e2 | ||
|
|
1c79e726af | ||
|
|
cccea3dc71 | ||
|
|
c82c39caf3 | ||
|
|
33cf75e933 | ||
|
|
dfc8a1da4a | ||
|
|
b477ca0508 | ||
|
|
9a6989d2ca | ||
|
|
8aebe1a41e | ||
|
|
d788a4d252 | ||
|
|
2cddc6fb5b | ||
|
|
cdb600b081 | ||
|
|
ffb688696b | ||
|
|
8bb210e7b6 | ||
|
|
9ee488009f | ||
|
|
96d9b102b6 | ||
|
|
16fba49937 | ||
|
|
af99bd620c | ||
|
|
8a58b9d459 | ||
|
|
e3c62075b8 | ||
|
|
22a70eb5b2 | ||
|
|
4e2998a366 | ||
|
|
158f759cde | ||
|
|
3e3be95420 | ||
|
|
b5808701ec | ||
|
|
5427d207cd | ||
|
|
ee23d72d13 | ||
|
|
d914314c1c | ||
|
|
4aa9f1f62b | ||
|
|
fa72bb4adf | ||
|
|
ea0044f69a | ||
|
|
7e493376a4 | ||
|
|
8c5afe5ab3 | ||
|
|
e2b55d814b |
@@ -114,7 +114,7 @@ jobs:
|
||||
# uses the same cache as this task so we prepopulate it
|
||||
command: |
|
||||
yarn install
|
||||
yarn run playwright install chromium
|
||||
yarn run playwright install chromium --with-deps
|
||||
|
||||
- run:
|
||||
name: "lint scss on frontend"
|
||||
@@ -207,36 +207,6 @@ jobs:
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:6006 && yarn test:storybook"
|
||||
|
||||
test-integration:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
|
||||
working_directory: ~/repo
|
||||
resource_class: large
|
||||
|
||||
environment:
|
||||
JAVA_OPTS: -Xmx6g -Xms2g
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
# Download and cache dependencies
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "frontend/deps.edn"}}-{{ checksum "frontend/yarn.lock" }}
|
||||
|
||||
- run:
|
||||
name: "integration tests"
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
yarn run playwright install chromium
|
||||
yarn run test:e2e -x --workers=4
|
||||
|
||||
test-backend:
|
||||
docker:
|
||||
- image: penpotapp/devenv:latest
|
||||
@@ -311,24 +281,16 @@ jobs:
|
||||
workflows:
|
||||
penpot:
|
||||
jobs:
|
||||
- lint
|
||||
- test-frontend:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- test-library:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-components:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-integration:
|
||||
requires:
|
||||
- test-frontend: success
|
||||
- lint: success
|
||||
|
||||
- test-backend:
|
||||
@@ -339,4 +301,5 @@ workflows:
|
||||
requires:
|
||||
- lint: success
|
||||
|
||||
- lint
|
||||
- test-render-wasm
|
||||
|
||||
@@ -45,10 +45,16 @@
|
||||
:potok/reify-type
|
||||
{:level :error}
|
||||
|
||||
:missing-protocol-method
|
||||
{:level :off}
|
||||
|
||||
:unresolved-namespace
|
||||
{:level :warning
|
||||
:exclude [data_readers]}
|
||||
|
||||
:unused-value
|
||||
{:level :off}
|
||||
|
||||
:single-key-in
|
||||
{:level :warning}
|
||||
|
||||
@@ -64,6 +70,9 @@
|
||||
:redundant-nested-call
|
||||
{:level :off}
|
||||
|
||||
:redundant-str-call
|
||||
{:level :off}
|
||||
|
||||
:earmuffed-var-not-dynamic
|
||||
{:level :off}
|
||||
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -13,6 +13,7 @@
|
||||
- [ ] Add a detailed explanation of how to reproduce the issue and/or verify the fix, if applicable.
|
||||
- [ ] Include screenshots or videos, if applicable.
|
||||
- [ ] Add or modify existing integration tests in case of bugs or new features, if applicable.
|
||||
- [ ] Refactor any modified SCSS files following the refactor guide.
|
||||
- [ ] Check CI passes successfully.
|
||||
- [ ] Update the `CHANGES.md` file, referencing the related GitHub issue, if applicable.
|
||||
|
||||
|
||||
93
.github/workflows/build-bundle.yml
vendored
Normal file
93
.github/workflows/build-bundle.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
name: Bundles Builder
|
||||
|
||||
on:
|
||||
# Create bundle from manual action
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
build_wasm:
|
||||
description: 'BUILD_WASM. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
build_storybook:
|
||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
build_wasm:
|
||||
description: 'BUILD_WASM. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
build_storybook:
|
||||
description: 'BUILD_STORYBOOK. Valid values: yes, no'
|
||||
type: string
|
||||
required: false
|
||||
default: 'yes'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
name: Build and Upload Penpot Bundle
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
echo "bundle_version=$(git describe --tags --always)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build bundle
|
||||
env:
|
||||
BUILD_WASM: ${{ inputs.build_wasm }}
|
||||
BUILD_STORYBOOK: ${{ inputs.build_storybook }}
|
||||
run: ./manage.sh build-bundle
|
||||
|
||||
- name: Prepare directories for zipping
|
||||
run: |
|
||||
mkdir zips
|
||||
mv bundles penpot
|
||||
|
||||
- name: Create zip bundle
|
||||
run: |
|
||||
echo "📦 Packaging Penpot bundle..."
|
||||
zip -r zips/penpot.zip penpot
|
||||
|
||||
- name: Upload Penpot bundle to S3
|
||||
run: |
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_ref }}.zip --metadata bundle-version=${{ steps.vars.outputs.bundle_version }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 📦 *[PENPOT] Error building penpot bundles.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
Bundle version: `${{ steps.vars.outputs.bundle_version }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
64
.github/workflows/build-bundles.yml
vendored
64
.github/workflows/build-bundles.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Build and Upload Penpot Bundles
|
||||
|
||||
on:
|
||||
# Create bundle from manual action
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build-bundles:
|
||||
name: Build and Upload Penpot Bundles
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||
echo "gh_branch=${{ github.base_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Docker Buildx for multi-arch build
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Run manage.sh build-bundle from host
|
||||
run: ./manage.sh build-bundle
|
||||
|
||||
- name: Prepare directories for zipping
|
||||
run: |
|
||||
mkdir zips
|
||||
mv bundles penpot
|
||||
|
||||
- name: Create zip bundles
|
||||
run: |
|
||||
echo "📦 Packaging Penpot bundles..."
|
||||
zip -r zips/penpot.zip penpot
|
||||
|
||||
- name: Upload Penpot bundle to S3
|
||||
run: |
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.gh_branch}}-latest.zip
|
||||
aws s3 cp zips/penpot.zip s3://${{ secrets.S3_BUCKET }}/penpot-${{ steps.vars.outputs.commit_hash }}.zip
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
TEXT: |
|
||||
❌ *[PENPOT] Error during the execution of the job*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_branch}}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
15
.github/workflows/build-develop.yml
vendored
15
.github/workflows/build-develop.yml
vendored
@@ -1,12 +1,21 @@
|
||||
name: Build and Upload Penpot DEVELOP Bundles
|
||||
name: _DEVELOP
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '16 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-develop-bundle:
|
||||
uses: ./.github/workflows/build-bundles.yml
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "develop"
|
||||
|
||||
36
.github/workflows/build-docker-devenv.yml
vendored
Normal file
36
.github/workflows/build-docker-devenv.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: DevEnv Docker Image Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and push DevEnv Docker image
|
||||
environment: release-admins
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
password: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push DevEnv Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'penpotapp/devenv'
|
||||
with:
|
||||
context: ./docker/devenv/
|
||||
file: ./docker/devenv/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ env.DOCKER_IMAGE }}:latest
|
||||
cache-from: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
152
.github/workflows/build-docker.yml
vendored
Normal file
152
.github/workflows/build-docker.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
name: Docker Images Builder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Name of the branch or ref'
|
||||
type: string
|
||||
required: true
|
||||
default: 'develop'
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
name: Build and Push Penpot Docker Images
|
||||
runs-on: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.gh_ref }}
|
||||
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Penpot Bundles
|
||||
id: bundles
|
||||
env:
|
||||
FILE_NAME: penpot-${{ steps.vars.outputs.gh_ref }}.zip
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
run: |
|
||||
tmp=$(aws s3api head-object \
|
||||
--bucket ${{ secrets.S3_BUCKET }} \
|
||||
--key "$FILE_NAME" \
|
||||
--query 'Metadata."bundle-version"' \
|
||||
--output text)
|
||||
echo "bundle_version=$tmp" >> $GITHUB_OUTPUT
|
||||
pushd docker/images
|
||||
aws s3 cp s3://${{ secrets.S3_BUCKET }}/$FILE_NAME .
|
||||
unzip $FILE_NAME > /dev/null
|
||||
mv penpot/backend bundle-backend
|
||||
mv penpot/frontend bundle-frontend
|
||||
mv penpot/exporter bundle-exporter
|
||||
mv penpot/storybook bundle-storybook
|
||||
popd
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ secrets.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images:
|
||||
frontend
|
||||
backend
|
||||
exporter
|
||||
storybook
|
||||
labels: |
|
||||
bundle_version=${{ steps.bundles.outputs.bundle_version }}
|
||||
|
||||
- name: Build and push Backend Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'backend'
|
||||
BUNDLE_PATH: './bundle-backend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.backend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Frontend Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'frontend'
|
||||
BUNDLE_PATH: './bundle-frontend'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.frontend
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Exporter Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'exporter'
|
||||
BUNDLE_PATH: './bundle-exporter'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.exporter
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Build and push Storybook Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
env:
|
||||
DOCKER_IMAGE: 'storybook'
|
||||
BUNDLE_PATH: './bundle-storybook'
|
||||
with:
|
||||
context: ./docker/images/
|
||||
file: ./docker/images/Dockerfile.storybook
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:${{ steps.vars.outputs.gh_ref }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache
|
||||
cache-to: type=registry,ref=${{ secrets.DOCKER_REGISTRY }}/${{ env.DOCKER_IMAGE }}:buildcache,mode=max
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🐳 *[PENPOT] Error building penpot docker images.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
📦 Bundle: `${{ steps.bundles.outputs.bundle_version }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
17
.github/workflows/build-staging.yml
vendored
17
.github/workflows/build-staging.yml
vendored
@@ -1,12 +1,21 @@
|
||||
name: Build and Upload Penpot STAGING Bundles
|
||||
name: _STAGING
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 5 * * 1-5'
|
||||
- cron: '36 5-20 * * 1-5'
|
||||
|
||||
jobs:
|
||||
build-staging-bundle:
|
||||
uses: ./.github/workflows/build-bundles.yml
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
build_wasm: "yes"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: "staging"
|
||||
|
||||
30
.github/workflows/build-tag.yml
vendored
Normal file
30
.github/workflows/build-tag.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: _TAG
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build-bundle:
|
||||
uses: ./.github/workflows/build-bundle.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
build_wasm: "no"
|
||||
build_storybook: "yes"
|
||||
|
||||
build-docker:
|
||||
needs: build-bundle
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
|
||||
publish-final-tag:
|
||||
if: ${{ !contains(github.ref_name, '-RC') && !contains(github.ref_name, '-alpha') && !contains(github.ref_name, '-beta') && contains(github.ref_name, '.') }}
|
||||
needs: build-docker
|
||||
uses: ./.github/workflows/release.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
gh_ref: ${{ github.ref_name }}
|
||||
2
.github/workflows/commit-checker.yml
vendored
2
.github/workflows/commit-checker.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Check Commit Type
|
||||
uses: gsactions/commit-message-checker@v2
|
||||
with:
|
||||
pattern: '^(Merge|Revert|:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle):)\s[A-Z].*[^.]$'
|
||||
pattern: '^(((:(lipstick|globe_with_meridians|wrench|books|arrow_up|arrow_down|zap|ambulance|construction|boom|fire|whale|bug|sparkles|paperclip|tada|recycle|rewind|construction_worker):)\s[A-Z].*[^.])|(Merge|Revert).+[^.])$'
|
||||
flags: 'gm'
|
||||
error: 'Commit should match CONTRIBUTING.md guideline'
|
||||
checkAllCommitMessages: 'true' # optional: this checks all commits associated with a pull request
|
||||
|
||||
114
.github/workflows/release.yml
vendored
Normal file
114
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Release Publisher
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
workflow_call:
|
||||
inputs:
|
||||
gh_ref:
|
||||
description: 'Tag to release'
|
||||
type: string
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
environment: release-admins
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
version: ${{ steps.vars.outputs.gh_ref }}
|
||||
release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
steps:
|
||||
- name: Extract some useful variables
|
||||
id: vars
|
||||
run: |
|
||||
echo "gh_ref=${{ inputs.gh_ref || github.ref_name }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ steps.vars.outputs.gh_ref }}
|
||||
|
||||
# --- Publicly release the docker images ---
|
||||
- name: Configure ECR credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.DOCKER_USERNAME }}
|
||||
aws-secret-access-key: ${{ secrets.DOCKER_PASSWORD }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Install Skopeo
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y skopeo
|
||||
|
||||
- name: Copy images from AWS ECR to Docker Hub
|
||||
env:
|
||||
AWS_REGION: ${{ secrets.AWS_REGION }}
|
||||
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
|
||||
PUB_DOCKER_USERNAME: ${{ secrets.PUB_DOCKER_USERNAME }}
|
||||
PUB_DOCKER_PASSWORD: ${{ secrets.PUB_DOCKER_PASSWORD }}
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
run: |
|
||||
aws ecr get-login-password --region $AWS_REGION | \
|
||||
skopeo login --username AWS --password-stdin \
|
||||
$DOCKER_REGISTRY
|
||||
|
||||
echo "$PUB_DOCKER_PASSWORD" | skopeo login --username "$PUB_DOCKER_USERNAME" --password-stdin docker.io
|
||||
|
||||
IMAGES=("frontend" "backend" "exporter" "storybook")
|
||||
|
||||
for image in "${IMAGES[@]}"; do
|
||||
skopeo copy --all \
|
||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
||||
docker://docker.io/penpotapp/$image:$TAG
|
||||
|
||||
for alias in main latest; do
|
||||
skopeo copy --all \
|
||||
docker://$DOCKER_REGISTRY/$image:$TAG \
|
||||
docker://docker.io/penpotapp/$image:$alias
|
||||
done
|
||||
done
|
||||
|
||||
# --- Release notes extraction ---
|
||||
- name: Extract release notes from CHANGES.md
|
||||
id: extract_release_notes
|
||||
env:
|
||||
TAG: ${{ steps.vars.outputs.gh_ref }}
|
||||
run: |
|
||||
RELEASE_NOTES=$(awk "/^## $TAG$/{flag=1; next} /^## /{flag=0} flag" CHANGES.md | awk '{$1=$1};1')
|
||||
if [ -z "$RELEASE_NOTES" ]; then
|
||||
RELEASE_NOTES="No changes for $TAG according to CHANGES.md"
|
||||
fi
|
||||
echo "release_notes<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
# --- Create GitHub release ---
|
||||
- name: Create GitHub release
|
||||
uses: softprops/action-gh-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.vars.outputs.gh_ref }}
|
||||
name: ${{ steps.vars.outputs.gh_ref }}
|
||||
body: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
|
||||
- name: Notify Mattermost
|
||||
if: failure()
|
||||
uses: mattermost/action-mattermost-notify@master
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK }}
|
||||
MATTERMOST_CHANNEL: bot-alerts-cicd
|
||||
TEXT: |
|
||||
❌ 🚀 *[PENPOT] Error releasing penpot.*
|
||||
📄 Triggered from ref: `${{ steps.vars.outputs.gh_ref }}`
|
||||
🔗 Run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
@infra
|
||||
315
.github/workflows/tests.yml
vendored
Normal file
315
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,315 @@
|
||||
name: "CI"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- staging
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: "Code Linter"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check clojure code format
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install
|
||||
yarn run fmt:clj:check
|
||||
|
||||
test-common:
|
||||
name: "Common Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests on JVM
|
||||
working-directory: ./common
|
||||
run: |
|
||||
clojure -M:dev:test
|
||||
|
||||
- name: Run tests on NODE
|
||||
working-directory: ./common
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run test;
|
||||
|
||||
test-frontend:
|
||||
name: "Frontend Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Unit Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run test;
|
||||
|
||||
- name: Component Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
yarn run playwright install chromium --with-deps;
|
||||
yarn run build:storybook
|
||||
|
||||
npx concurrently -k -s first -n "SB,TEST" -c "magenta,blue" \
|
||||
"npx http-server storybook-static --port 6006 --silent" \
|
||||
"npx wait-on tcp:6006 && yarn test:storybook"
|
||||
|
||||
- name: Check SCSS Format
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
yarn run lint:scss;
|
||||
|
||||
test-backend:
|
||||
name: "Backend Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17
|
||||
# Provide the password for postgres
|
||||
env:
|
||||
POSTGRES_USER: penpot_test
|
||||
POSTGRES_PASSWORD: penpot_test
|
||||
POSTGRES_DB: penpot_test
|
||||
|
||||
# Set health checks to wait until postgres has started
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
redis:
|
||||
image: valkey/valkey:9
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./backend
|
||||
env:
|
||||
PENPOT_TEST_DATABASE_URI: "postgresql://postgres/penpot_test"
|
||||
PENPOT_TEST_DATABASE_USERNAME: penpot_test
|
||||
PENPOT_TEST_DATABASE_PASSWORD: penpot_test
|
||||
PENPOT_TEST_REDIS_URI: "redis://redis/1"
|
||||
|
||||
run: |
|
||||
clojure -M:dev:test --reporter kaocha.report/documentation
|
||||
|
||||
test-library:
|
||||
name: "Library Tests"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./library
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run build:bundle;
|
||||
yarn run test;
|
||||
|
||||
build-integration:
|
||||
name: "Build Integration Bundle"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build Bundle
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install
|
||||
yarn run build:app:assets
|
||||
yarn run build:app
|
||||
yarn run build:app:libs
|
||||
|
||||
- name: Build WASM
|
||||
working-directory: "./render-wasm"
|
||||
run: |
|
||||
./build release
|
||||
|
||||
- name: Store Bundle Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
test-integration-1:
|
||||
name: "Integration Tests 1/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run playwright install chromium --with-deps;
|
||||
yarn run test:e2e -x --workers=2 --reporter=list --shard="1/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-1
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-2:
|
||||
name: "Integration Tests 2/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run playwright install chromium --with-deps;
|
||||
yarn run test:e2e -x --workers=2 --reporter=list --shard "2/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-2
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-3:
|
||||
name: "Integration Tests 3/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run playwright install chromium --with-deps;
|
||||
yarn run test:e2e -x --workers=2 --reporter=list --shard "3/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-3
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
|
||||
test-integration-4:
|
||||
name: "Integration Tests 3/4"
|
||||
runs-on: ubuntu-24.04
|
||||
container: penpotapp/devenv:latest
|
||||
needs: build-integration
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: "integration-bundle-${{ github.sha }}"
|
||||
path: frontend/resources/public
|
||||
|
||||
- name: Run Tests
|
||||
working-directory: ./frontend
|
||||
run: |
|
||||
corepack enable;
|
||||
corepack install;
|
||||
yarn install;
|
||||
yarn run playwright install chromium --with-deps;
|
||||
yarn run test:e2e -x --workers=2 --reporter=list --shard "4/4";
|
||||
|
||||
- name: Upload test result
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: integration-tests-result-4
|
||||
path: frontend/test-results/
|
||||
overwrite: true
|
||||
retention-days: 3
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -31,6 +31,7 @@
|
||||
/.clj-kondo/.cache
|
||||
/_dump
|
||||
/notes
|
||||
/playground/
|
||||
/backend/*.md
|
||||
/backend/*.sql
|
||||
/backend/*.txt
|
||||
|
||||
249
CHANGES.md
249
CHANGES.md
@@ -1,6 +1,250 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 2.9.0 (Unreleased)
|
||||
## 2.12.0 (Unreleased)
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
#### Backend RPC API changes
|
||||
|
||||
The backend RPC API URLS are changed from `/api/rpc/command/<name>` to
|
||||
`/api/main/methods/<name>`. The previous PATH is preserved for backward
|
||||
compatibility; however, if you are a user of this API, it is strongly
|
||||
recommended that you adapt your code to use the new PATH.
|
||||
|
||||
|
||||
#### Updated SSO Callback URL
|
||||
|
||||
The OAuth / Single Sign-On (SSO) callback endpoint has changed to
|
||||
align with the new OpenID Connect (OIDC) implementation.
|
||||
|
||||
Old callback URL:
|
||||
|
||||
```
|
||||
https://<your_domain>/api/auth/oauth/<oauth_provider>/callback
|
||||
```
|
||||
|
||||
New callback URL:
|
||||
|
||||
```
|
||||
https://<your_domain>/api/auth/oidc/callback
|
||||
```
|
||||
|
||||
**Action required:**
|
||||
|
||||
If you have SSO/Social-Auth configured on your on-premise instance,
|
||||
the following actions are required before update:
|
||||
|
||||
Update your OAuth or SSO provider configuration (e.g., Okta, Google,
|
||||
Azure AD, etc.) to use the new callback URL. Failure to update may
|
||||
result in authentication failures after upgrading.
|
||||
|
||||
**Reason for change:**
|
||||
|
||||
This update standardizes all authentication flows under the single URL
|
||||
and makis it more modular, enabling the ability to configure SSO auth
|
||||
provider dinamically.
|
||||
|
||||
|
||||
#### Changes on default docker compose
|
||||
|
||||
We have updated the `docker/images/docker-compose.yaml` with a small
|
||||
change related to the `PENPOT_SECRET_KEY`. Since this version, this
|
||||
environment variable is also required on exporter. So if you are using
|
||||
penpot on-premise you will need to apply the same changes on your own
|
||||
`docker-compose.yaml` file.
|
||||
|
||||
We have removed the Minio server from the `docker/images/docker-compose.yml`
|
||||
example. It's still usable as before, we just removed the example.
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
- Ensure consistent snap behavior across all zoom levels [Github #7774](https://github.com/penpot/penpot/pull/7774) by [@Tokytome](https://github.com/Tokytome)
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- Add the ability to select boards to export as PDF [Taiga #12320](https://tree.taiga.io/project/penpot/issue/12320)
|
||||
- Add toggle for switching boolean property values [Taiga #12341](https://tree.taiga.io/project/penpot/us/12341)
|
||||
- Make the file export process more reliable [Taiga #12555](https://tree.taiga.io/project/penpot/us/12555)
|
||||
- Add auth flow changes [Taiga #12333](https://tree.taiga.io/project/penpot/us/12333)
|
||||
- Add new shape validation mechanism for shapes [Github #7696](https://github.com/penpot/penpot/pull/7696)
|
||||
- Apply color tokens from sidebar [Taiga #11353](https://tree.taiga.io/project/penpot/us/11353)
|
||||
- Display tokens in the inspect tab [Taiga #9313](https://tree.taiga.io/project/penpot/us/9313)
|
||||
- Refactor clipboard behavior to assess some minor inconsistencies and make pasting binary data faster. [Taiga #12571](https://tree.taiga.io/project/penpot/task/12571)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix text line-height values are wrong [Taiga #12252](https://tree.taiga.io/project/penpot/issue/12252)
|
||||
- Fix an error translation [Taiga #12402](https://tree.taiga.io/project/penpot/issue/12402)
|
||||
- Fix pan cursor not disabling viewport guides [Github #6985](https://github.com/penpot/penpot/issues/6985)
|
||||
- Fix viewport resize on locked shapes [Taiga #11974](https://tree.taiga.io/project/penpot/issue/11974)
|
||||
- Fix nested variant in a component doesn't keep inherited overrides [Taiga #12299](https://tree.taiga.io/project/penpot/issue/12299)
|
||||
- Fix on copy instance inside a components chain touched are missing [Taiga #12371](https://tree.taiga.io/project/penpot/issue/12371)
|
||||
- Fix problem with multiple selection and shadows [Github #7437](https://github.com/penpot/penpot/issues/7437)
|
||||
- Fix search shortcut [Taiga #10265](https://tree.taiga.io/project/penpot/issue/10265)
|
||||
- Fix shortcut conflict in text editor (increase/decrease font size vs word selection)
|
||||
- Fix problem with plugins generating code for pages different than current one [Taiga #12312](https://tree.taiga.io/project/penpot/issue/12312)
|
||||
- Fix input confirmation behavior is not uniform [Taiga #12294](https://tree.taiga.io/project/penpot/issue/12294)
|
||||
- Fix copy/pasting application/transit+json [Taiga #12721](https://tree.taiga.io/project/penpot/issue/12721)
|
||||
|
||||
## 2.11.1
|
||||
|
||||
- Fix WEBP shape export on docker images [Taiga #3838](https://tree.taiga.io/project/penpot/issue/3838)
|
||||
|
||||
## 2.11.0
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
- Deprecated configuration variables with the prefix `PENPOT_ASSETS_*`, and will be
|
||||
removed in future versions:
|
||||
|
||||
- The `PENPOT_ASSETS_STORAGE_BACKEND` becomes `PENPOT_OBJECTS_STORAGE_BACKEND` and its
|
||||
values passes from (`assets-fs` or `assets-s3`) to (`fs` or `s3`)
|
||||
- The `PENPOT_STORAGE_ASSETS_FS_DIRECTORY` becomes `PENPOT_OBJECTS_STORAGE_FS_DIRECTORY`
|
||||
- The `PENPOT_STORAGE_ASSETS_S3_BUCKET` becomes `PENPOT_OBJECTS_STORAGE_S3_BUCKET`
|
||||
- The `PENPOT_STORAGE_ASSETS_S3_REGION` becomes `PENPOT_OBJECTS_STORAGE_S3_REGION`
|
||||
- The `PENPOT_STORAGE_ASSETS_S3_ENDPOINT` becomes `PENPOT_OBJECTS_STORAGE_S3_ENDPOINT`
|
||||
- The `PENPOT_STORAGE_ASSETS_S3_IO_THREADS` replaced (see below)
|
||||
|
||||
- Add `PENPOT_NETTY_IO_THREADS` and `PENPOT_EXECUTOR_THREADS` variables to provide the
|
||||
control over concurrency of the shared resources used by netty. Penpot uses the netty IO
|
||||
threads for AWS S3 SDK and Redis/Valkey communication, and the EXEC threads to perform
|
||||
out of HTTP serving threads tasks such that cache invalidation, S3 response completion,
|
||||
configuration reloading and many other auxiliar tasks. By default they use a half number
|
||||
if available cpus with a minumum of 2 for both executors. You should not touch that
|
||||
variables unless you are know what you are doing.
|
||||
|
||||
- Replace the `PENPOT_STORAGE_ASSETS_S3_IO_THREADS` with a more general configuration
|
||||
`PENPOT_NETTY_IO_THREADS` used to configure a shared netty resources across different
|
||||
services which use netty internally (redis connection, S3 SDK client). This
|
||||
configuration is not very commonly used so don't expected real impact on any user.
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- New composite token: Typography [Taiga #10200](https://tree.taiga.io/project/penpot/us/10200)
|
||||
- Show current Penpot version [Taiga #11603](https://tree.taiga.io/project/penpot/us/11603)
|
||||
- Switch several variant copies at the same time [Taiga #11411](https://tree.taiga.io/project/penpot/us/11411)
|
||||
- Invitations management improvements [Taiga #3479](https://tree.taiga.io/project/penpot/us/3479)
|
||||
- Alternative ways of creating variants - Button Viewport [Taiga #11931](https://tree.taiga.io/project/penpot/us/11931)
|
||||
- Reorder properties for a component [Taiga #10225](https://tree.taiga.io/project/penpot/us/10225)
|
||||
- File Data storage layout refactor [Github #7345](https://github.com/penpot/penpot/pull/7345)
|
||||
- Make several queries optimization on comment threads [Github #7506](https://github.com/penpot/penpot/pull/7506)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix selection problems when devtools open [Taiga #11950](https://tree.taiga.io/project/penpot/issue/11950)
|
||||
- Fix long font names overlap [Taiga #11844](https://tree.taiga.io/project/penpot/issue/11844)
|
||||
- Fix paste behavior according to the selected element [Taiga #11979](https://tree.taiga.io/project/penpot/issue/11979)
|
||||
- Fix problem with export size [Github #7160](https://github.com/penpot/penpot/issues/7160)
|
||||
- Fix multi level library dependencies [Taiga #12155](https://tree.taiga.io/project/penpot/issue/12155)
|
||||
- Fix component context menu options order in assets tab [Taiga #11941](https://tree.taiga.io/project/penpot/issue/11941)
|
||||
- Fix error updating library [Taiga #12218](https://tree.taiga.io/project/penpot/issue/12218)
|
||||
- Fix restoring a variant in another file makes it overlap the existing variant [Taiga #12049](https://tree.taiga.io/project/penpot/issue/12049)
|
||||
- Fix auto-width changes to fixed when switching variants [Taiga #12172](https://tree.taiga.io/project/penpot/issue/12172)
|
||||
- Fix component number has no singular translation string [Taiga #12106](https://tree.taiga.io/project/penpot/issue/12106)
|
||||
- Fix adding/removing identical text fills [Taiga #12287](https://tree.taiga.io/project/penpot/issue/12287)
|
||||
- Fix scroll on the inspect tab [Taiga #12293](https://tree.taiga.io/project/penpot/issue/12293)
|
||||
- Fix lock proportion tooltip [Taiga #12326](https://tree.taiga.io/project/penpot/issue/12326)
|
||||
- Fix internal Error when selecting a set by name in the token theme editor [Taiga #12310](https://tree.taiga.io/project/penpot/issue/12310)
|
||||
- Fix drag & drop functionality is swapping instead or reordering [Taiga #12254](https://tree.taiga.io/project/penpot/issue/12254)
|
||||
- Fix variants not syncronizing tokens on switch [Taiga #12290](https://tree.taiga.io/project/penpot/issue/12290)
|
||||
- Fix incorrect behavior of Alt + Drag for variants [Taiga #12309](https://tree.taiga.io/project/penpot/issue/12309)
|
||||
- Fix text override is lost after switch [Taiga #12269](https://tree.taiga.io/project/penpot/issue/12269)
|
||||
- Fix exporting a board crashing the app [Taiga #12384](https://tree.taiga.io/project/penpot/issue/12384)
|
||||
- Fix nested variant in a component doesn't keep inherited overrides [Taiga #12299](https://tree.taiga.io/project/penpot/issue/12299)
|
||||
- Fix selected colors not showing colors from children shapes in multiple selection [Taiga #12384](https://tree.taiga.io/project/penpot/issue/12385)
|
||||
- Fix scrollbar issue in design tab [Taiga #12367](https://tree.taiga.io/project/penpot/issue/12367)
|
||||
- Fix library update notificacions showing when they should not [Taiga #12397](https://tree.taiga.io/project/penpot/issue/12397)
|
||||
- Fix remove flex button doesn’t work within variant [Taiga #12314](https://tree.taiga.io/project/penpot/issue/12314)
|
||||
- Fix an error translation [Taiga #12402](https://tree.taiga.io/project/penpot/issue/12402)
|
||||
- Fix problem with certain text input in some editable labels (pages, components, tokens...) being in conflict with the drag/drop functionality [Taiga #12316](https://tree.taiga.io/project/penpot/issue/12316)
|
||||
- Fix not controlled theme renaming [Taiga #12411](https://tree.taiga.io/project/penpot/issue/12411)
|
||||
- Fix paste without selection sends the new element in the back [Taiga #12382](https://tree.taiga.io/project/penpot/issue/12382)
|
||||
- Fix options button does not work for comments created in the lower part of the screen [Taiga #12422](https://tree.taiga.io/project/penpot/issue/12422)
|
||||
- Fix problem when checking usage with removed teams [Taiga #12442](https://tree.taiga.io/project/penpot/issue/12442)
|
||||
- Fix focus mode persisting across page/file navigation [Taiga #12469](https://tree.taiga.io/project/penpot/issue/12469)
|
||||
- Fix shadow color validation [Github #7705](https://github.com/penpot/penpot/pull/7705)
|
||||
- Fix exception on selection blend-mode using keyboard [Github #7710](https://github.com/penpot/penpot/pull/7710)
|
||||
- Fix crash when using decimal (floating-point) values for X/Y or width/height [Taiga #12543](https://tree.taiga.io/project/penpot/issue/12543)
|
||||
|
||||
## 2.10.1
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- Improve workpace file loading [Github 7366](https://github.com/penpot/penpot/pull/7366)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix regression with text shapes creation with Plugins API [Taiga #12244](https://tree.taiga.io/project/penpot/issue/12244)
|
||||
|
||||
## 2.10.0
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
- Variants
|
||||
|
||||
### :boom: Breaking changes & Deprecations
|
||||
|
||||
### :heart: Community contributions (Thank you!)
|
||||
|
||||
### :sparkles: New features & Enhancements
|
||||
|
||||
- Add efficiency enhancements to right sidebar [Github #7182](https://github.com/penpot/penpot/pull/7182)
|
||||
- Add defaults for artboard drawing [Taiga #494](https://tree.taiga.io/project/penpot/us/494?milestone=465047)
|
||||
- Continuous display of distances between elements when moving a layer with the keyboard [Taiga #1780](https://tree.taiga.io/project/penpot/us/1780)
|
||||
- New Number token - unitless values [Taiga #10936](https://tree.taiga.io/project/penpot/us/10936)
|
||||
- New font-family token [Taiga #10937](https://tree.taiga.io/project/penpot/us/10937)
|
||||
- New text case token [Taiga #10942](https://tree.taiga.io/project/penpot/us/10942)
|
||||
- New text-decoration token [Taiga #10941](https://tree.taiga.io/project/penpot/us/10941)
|
||||
- New letter spacing token [Taiga #10940](https://tree.taiga.io/project/penpot/us/10940)
|
||||
- New font weight token [Taiga #10939](https://tree.taiga.io/project/penpot/us/10939)
|
||||
- Upgrade Node to v22.18.0 [Github #7283](https://github.com/penpot/penpot/pull/7283)
|
||||
- Upgrade the base docker image for penpot frontend to v1.29.1 [Github #7283](https://github.com/penpot/penpot/pull/7283)
|
||||
- Create variant from an existing component [Taiga #2088](https://tree.taiga.io/project/penpot/us/2088)
|
||||
- Create variant from an existing variant [Taiga #8282](https://tree.taiga.io/project/penpot/us/8282)
|
||||
- Actions over a component with variants [Taiga #10503](https://tree.taiga.io/project/penpot/us/10503)
|
||||
- Create a variant by dragging a component into a component with variants [Taiga #8134](https://tree.taiga.io/project/penpot/us/8134)
|
||||
- Transform a variant into an individual component [Taiga #8141](https://tree.taiga.io/project/penpot/us/8141)
|
||||
- Delete variant [Taiga #6890](https://tree.taiga.io/project/penpot/us/6890)
|
||||
- Restore an orphaned copy of a variant [Taiga #10446](https://tree.taiga.io/project/penpot/us/10446)
|
||||
- Add, Edit & Delete variant properties name and value [Taiga #6892](https://tree.taiga.io/project/penpot/us/6892)
|
||||
- Retrieve variants [Taiga #6888](https://tree.taiga.io/project/penpot/us/6888)
|
||||
- Retrieve variants with nested components [Taiga #10277](https://tree.taiga.io/project/penpot/us/10277)
|
||||
- Create variants in bulk from existing components [Taiga #7926](https://tree.taiga.io/project/penpot/us/7926)
|
||||
- Alternative ways of creating variants - Button Design Tab [Taiga #10316](https://tree.taiga.io/project/penpot/us/10316)
|
||||
- Fix problem with component swapping panel [Taiga #12175](https://tree.taiga.io/project/penpot/issue/12175)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Display strokes information in inspect tab [Taiga #11154](https://tree.taiga.io/project/penpot/issue/11154)
|
||||
- Fix problem with booleans selection [Taiga #11627](https://tree.taiga.io/project/penpot/issue/11627)
|
||||
- Fix missing font when copy&paste a chunk of text [Taiga #11522](https://tree.taiga.io/project/penpot/issue/11522)
|
||||
- Fix bad swap slot after two swaps [Taiga #11659](https://tree.taiga.io/project/penpot/issue/11659)
|
||||
- Fix missing package for the `penpot_exporter` Docker image [GitHub #7205](https://github.com/penpot/penpot/issues/7025)
|
||||
- Fix issue where multiple dropdown menus could be opened simultaneously on the dashboard page [Taiga #11500](https://tree.taiga.io/project/penpot/issue/11500)
|
||||
- Fix font size/variant not updated when editing a text [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11552)
|
||||
- Fix issue where Alt + arrow keys shortcut interferes with letter-spacing when moving text layers [Taiga #11552](https://tree.taiga.io/project/penpot/issue/11771)
|
||||
- Fix consistency issues on how font variants are visualized [Taiga #11499](https://tree.taiga.io/project/penpot/us/11499)
|
||||
- Fix parsing rx and ry SVG values for rect radius [Taiga #11861](https://tree.taiga.io/project/penpot/issue/11861)
|
||||
- Fix misleading affordance in saved versions [Taiga #11887](https://tree.taiga.io/project/penpot/issue/11887)
|
||||
- Fix pasting RTF text crashes penpot [Taiga #11717](https://tree.taiga.io/project/penpot/issue/11717)
|
||||
- Fix navigation arrows in Libraries & Templates carousel [Taiga #10609](https://tree.taiga.io/project/penpot/issue/10609)
|
||||
- Fix applying tokens with zero value to size [Taiga #11618](https://tree.taiga.io/project/penpot/issue/11618)
|
||||
- Fix typo [Taiga #11969](https://tree.taiga.io/project/penpot/issue/11969)
|
||||
- Fix typo [Taiga #11970](https://tree.taiga.io/project/penpot/issue/11970)
|
||||
- Fix typos [Taiga #11971](https://tree.taiga.io/project/penpot/issue/11971)
|
||||
- Fix inconsistent naming for "Flatten" [Taiga #8371](https://tree.taiga.io/project/penpot/issue/8371)
|
||||
- Layout item tokens should be unapplied when moving out of a layout [Taiga #11012](https://tree.taiga.io/project/penpot/issue/11012)
|
||||
- Fix incorrect date displayed for support plan [Taiga #11986](https://tree.taiga.io/project/penpot/issue/11986)
|
||||
- Fix can't import 'borderWidth' type token [#132](https://github.com/tokens-studio/penpot/issues/132)
|
||||
- Fix moving elements up or down while pressing alt [Taiga Issue #11992](https://tree.taiga.io/project/penpot/issue/11992)
|
||||
- Fix conflicting shortcuts (remove dec/inc line height and letter spacing) [Taiga #12102](https://tree.taiga.io/project/penpot/issue/12102)
|
||||
- Fix conflicting shortcuts (remove text-align shortcuts) [Taiga #12047](https://tree.taiga.io/project/penpot/issue/12047)
|
||||
- Fix export file with empty tokens library [Taiga #12137](https://tree.taiga.io/project/penpot/issue/12137)
|
||||
- Fix context menu on spacing tokens [Taiga #12141](https://tree.taiga.io/project/penpot/issue/12141)
|
||||
|
||||
## 2.9.0
|
||||
|
||||
### :rocket: Epics and highlights
|
||||
|
||||
@@ -30,7 +274,6 @@
|
||||
- Add info to apply-token event [Taiga #11710](https://tree.taiga.io/project/penpot/task/11710)
|
||||
- Fix double click on set name input [Taiga #11747](https://tree.taiga.io/project/penpot/issue/11747)
|
||||
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Copying font size does not copy the unit [Taiga #11143](https://tree.taiga.io/project/penpot/issue/11143)
|
||||
@@ -78,7 +321,7 @@
|
||||
|
||||
**Penpot Library**
|
||||
|
||||
The initial prototype is completly reworked for provide a more consistent API
|
||||
The initial prototype is completly reworked to provide a more consistent API
|
||||
and to have proper validation and params decoding. All the details can be found
|
||||
on [its own changelog](library/CHANGES.md)
|
||||
|
||||
|
||||
@@ -77,17 +77,14 @@ Provide your team or organization with a completely owned collaborative design t
|
||||
### Integrations ###
|
||||
Penpot offers integration into the development toolchain, thanks to its support for webhooks and an API accessible through access tokens.
|
||||
|
||||
### What’s great for design ###
|
||||
With Penpot you can design libraries to share and reuse; turn design elements into components and tokens to allow reusability and scalability; and build realistic user flows and interactions.
|
||||
|
||||
### Design Tokens ###
|
||||
With Penpot’s standardized [design tokens](https://penpot.dev/collaboration/design-tokens) format, you can easily reuse and sync tokens across different platforms, workflows, and disciplines.
|
||||
### Building Design Systems: design tokens, components and variants ###
|
||||
Penpot brings design systems to code-minded teams: a single source of truth with native Design Tokens, Components, and Variants for scalable, reusable, and consistent UI across projects and platforms.
|
||||
|
||||
|
||||
<br />
|
||||
|
||||
<p align="center">
|
||||
<img src="https://img.plasmic.app/img-optimizer/v1/img?src=https%3A%2F%2Fimg.plasmic.app%2Fimg-optimizer%2Fv1%2Fimg%2F9dd677c36afb477e9666ccd1d3f009ad.png" alt="Open Source" style="width: 65%;">
|
||||
<img src="https://github.com/user-attachments/assets/cce75ad6-f783-473f-8803-da9eb8255fef">
|
||||
</p>
|
||||
|
||||
<br />
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
|
||||
:deps
|
||||
{penpot/common {:local/root "../common"}
|
||||
org.clojure/clojure {:mvn/version "1.12.1"}
|
||||
org.clojure/clojure {:mvn/version "1.12.2"}
|
||||
org.clojure/tools.namespace {:mvn/version "1.5.0"}
|
||||
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-3"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.7-4"}
|
||||
|
||||
io.prometheus/simpleclient {:mvn/version "0.16.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.16.0"}
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.16.0"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.7.0.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.8.1.RELEASE"}
|
||||
;; Minimal dependencies required by lettuce, we need to include them
|
||||
;; explicitly because clojure dependency management does not support
|
||||
;; yet the BOM format.
|
||||
@@ -28,29 +28,30 @@
|
||||
com.google.guava/guava {:mvn/version "33.4.8-jre"}
|
||||
|
||||
funcool/yetti
|
||||
{:git/tag "v11.4"
|
||||
:git/sha "ce50d42"
|
||||
{:git/tag "v11.8"
|
||||
:git/sha "1d1b33f"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc
|
||||
{:mvn/version "1.3.1002"}
|
||||
{:mvn/version "1.3.1070"}
|
||||
|
||||
metosin/reitit-core {:mvn/version "0.9.1"}
|
||||
nrepl/nrepl {:mvn/version "1.3.1"}
|
||||
nrepl/nrepl {:mvn/version "1.4.0"}
|
||||
|
||||
org.postgresql/postgresql {:mvn/version "42.7.6"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.7.7"}
|
||||
org.xerial/sqlite-jdbc {:mvn/version "3.50.3.0"}
|
||||
|
||||
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
|
||||
com.zaxxer/HikariCP {:mvn/version "7.0.2"}
|
||||
|
||||
io.whitfin/siphash {:mvn/version "2.0.0"}
|
||||
|
||||
buddy/buddy-hashers {:mvn/version "2.0.167"}
|
||||
buddy/buddy-sign {:mvn/version "3.6.1-359"}
|
||||
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.0"}
|
||||
com.github.ben-manes.caffeine/caffeine {:mvn/version "3.2.2"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.20.1"}
|
||||
org.jsoup/jsoup {:mvn/version "1.21.2"}
|
||||
org.im4java/im4java
|
||||
{:git/tag "1.4.0-penpot-2"
|
||||
:git/sha "e2b3e16"
|
||||
@@ -60,12 +61,12 @@
|
||||
|
||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||
|
||||
dawran6/emoji {:mvn/version "0.1.5"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.3"}
|
||||
dawran6/emoji {:mvn/version "0.2.0"}
|
||||
markdown-clj/markdown-clj {:mvn/version "1.12.4"}
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.31.55"}}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.33.10"}}
|
||||
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
:aliases
|
||||
@@ -80,12 +81,14 @@
|
||||
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:git/tag "v0.10.9" :git/sha "e405aac"}}
|
||||
{io.github.clojure/tools.build {:mvn/version "0.10.10"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
{:main-opts ["-m" "kaocha.runner"]
|
||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"]
|
||||
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||
"--sun-misc-unsafe-memory-access=allow"
|
||||
"--enable-native-access=ALL-UNNAMED"]
|
||||
:extra-deps {lambdaisland/kaocha {:mvn/version "1.91.1392"}}}
|
||||
|
||||
:outdated
|
||||
|
||||
@@ -6,12 +6,14 @@
|
||||
|
||||
(ns user
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.debug :as debug]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.perf :as perf]
|
||||
[app.common.pprint :as pp]
|
||||
@@ -19,20 +21,22 @@
|
||||
[app.common.schema.desc-js-like :as smdj]
|
||||
[app.common.schema.desc-native :as smdn]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.schema.openapi :as oapi]
|
||||
[app.common.spec :as us]
|
||||
[app.common.json :as json]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.main :as main]
|
||||
[app.srepl.helpers :as srepl.helpers]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.srepl.helpers :as h]
|
||||
[app.srepl.main :refer :all]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.time :as dt]
|
||||
[clj-async-profiler.core :as prof]
|
||||
[clojure.contrib.humanize :as hum]
|
||||
[clojure.datafy :refer [datafy]]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.pprint :refer [pprint print-table]]
|
||||
[clojure.repl :refer :all]
|
||||
|
||||
@@ -8,38 +8,41 @@
|
||||
<body>
|
||||
<p>
|
||||
<strong>Feedback from:</strong><br />
|
||||
{% if profile %}
|
||||
<span>
|
||||
<span>Name: </span>
|
||||
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
|
||||
</span>
|
||||
<br />
|
||||
|
||||
<span>
|
||||
<span>Email: </span>
|
||||
<span>{{profile.email}}</span>
|
||||
</span>
|
||||
<br />
|
||||
|
||||
<span>
|
||||
<span>ID: </span>
|
||||
<span><code>{{profile.id}}</code></span>
|
||||
</span>
|
||||
{% else %}
|
||||
<span>
|
||||
<span>Email: </span>
|
||||
<span>{{profile.email}}</span>
|
||||
</span>
|
||||
{% endif %}
|
||||
<span>
|
||||
<span>Name: </span>
|
||||
<span><code>{{profile.fullname|abbreviate:25}}</code></span>
|
||||
</span>
|
||||
<br />
|
||||
<span>
|
||||
<span>Email: </span>
|
||||
<span>{{profile.email}}</span>
|
||||
</span>
|
||||
<br />
|
||||
<span>
|
||||
<span>ID: </span>
|
||||
<span><code>{{profile.id}}</code></span>
|
||||
</span>
|
||||
</p>
|
||||
<p>
|
||||
<strong>Subject:</strong><br />
|
||||
<span>{{subject|abbreviate:300}}</span>
|
||||
<span>{{feedback-subject|abbreviate:300}}</span>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<strong>Type:</strong><br />
|
||||
<span>{{feedback-type|abbreviate:300}}</span>
|
||||
</p>
|
||||
|
||||
{% if feedback-error-href %}
|
||||
<p>
|
||||
<strong>Error HREF:</strong><br />
|
||||
<span>{{feedback-error-href|abbreviate:500}}</span>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<p>
|
||||
<strong>Message:</strong><br />
|
||||
{{content|linebreaks-br|safe}}
|
||||
{{feedback-content|linebreaks-br}}
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -1 +1 @@
|
||||
[PENPOT FEEDBACK]: {{subject}}
|
||||
[PENPOT FEEDBACK]: {{feedback-subject}}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{% if profile %}
|
||||
Feedback profile: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||
{% else %}
|
||||
Feedback from: {{email}}
|
||||
{% endif %}
|
||||
From: {{profile.fullname}} <{{profile.email}}> / {{profile.id}}
|
||||
Subject: {{feedback-subject}}
|
||||
Type: {{feedback-type}}
|
||||
{%- if feedback-error-href %}
|
||||
HREF: {{feedback-error-href}}
|
||||
{% endif -%}
|
||||
|
||||
Subject: {{subject}}
|
||||
Message:
|
||||
|
||||
{{content}}
|
||||
{{feedback-content}}
|
||||
|
||||
@@ -1 +1 @@
|
||||
Invitation to join {{team}}
|
||||
{{invited-by|abbreviate:25}} has invited you to join the team “{{ team|abbreviate:25 }}”
|
||||
@@ -1,6 +1,9 @@
|
||||
[{:id "tokens-starter-kit"
|
||||
:name "Design tokens starter kit"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"},
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Tokens%20starter%20kit.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Penpot Design System | Pencil"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/penpot-app.penpot"}
|
||||
{:id "wireframing-kit"
|
||||
:name "Wireframe library"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Wireframing%20kit%20v1.1.penpot"}
|
||||
@@ -10,9 +13,6 @@
|
||||
{:id "plants-app"
|
||||
:name "UI mockup example"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/Plants-app.penpot"}
|
||||
{:id "penpot-design-system"
|
||||
:name "Design system example"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/refs/heads/main/Penpot%20-%20Design%20System%20v2.1.penpot"}
|
||||
{:id "tutorial-for-beginners"
|
||||
:name "Tutorial for beginners"
|
||||
:file-uri "https://github.com/penpot/penpot-files/raw/main/tutorial-for-beginners.penpot"}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
<meta charset="utf-8" />
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>Builtin API Documentation - Penpot</title>
|
||||
<title>{{label|upper}} API Documentation</title>
|
||||
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
@@ -19,7 +19,7 @@
|
||||
<body>
|
||||
<main>
|
||||
<header>
|
||||
<h1>Penpot API Documentation (v{{version}})</h1>
|
||||
<h1>{{label|upper}}: API Documentation (v{{version}})</h1>
|
||||
<small class="menu">
|
||||
[
|
||||
<nav>
|
||||
@@ -31,9 +31,10 @@
|
||||
</header>
|
||||
<section class="doc-content">
|
||||
<h2>INTRODUCTION</h2>
|
||||
<p>This documentation is intended to be a general overview of the penpot RPC API.
|
||||
If you prefer, you can use <a href="/api/openapi.json">OpenAPI</a>
|
||||
and/or <a href="/api/openapi">SwaggerUI</a> as alternative.</p>
|
||||
<p>This documentation is intended to be a general overview of
|
||||
the {{label}} API. If you prefer, you can
|
||||
use <a href="{{openapi}}">Swagger/OpenAPI</a> as
|
||||
alternative.</p>
|
||||
|
||||
<h2>GENERAL NOTES</h2>
|
||||
|
||||
@@ -43,7 +44,7 @@
|
||||
that starts with <b>get-</b> in the name, can use GET HTTP
|
||||
method which in many cases benefits from the HTTP cache.</p>
|
||||
|
||||
|
||||
{% block auth-section %}
|
||||
<h3>Authentication</h3>
|
||||
<p>The penpot backend right now offers two way for authenticate the request:
|
||||
<b>cookies</b> (the same mechanism that we use ourselves on accessing the API from the
|
||||
@@ -56,9 +57,10 @@
|
||||
<p>The access token can be obtained on the appropriate section on profile settings
|
||||
and it should be provided using <b>`Authorization`</b> header with <b>`Token
|
||||
<token-string>`</b> value.</p>
|
||||
{% endblock %}
|
||||
|
||||
<h3>Content Negotiation</h3>
|
||||
<p>The penpot API by default operates indistinctly with: <b>`application/json`</b>
|
||||
<p>This API operates indistinctly with: <b>`application/json`</b>
|
||||
and <b>`application/transit+json`</b> content types. You should specify the
|
||||
desired content-type on the <b>`Accept`</b> header, the transit encoding is used
|
||||
by default.</p>
|
||||
@@ -75,13 +77,16 @@
|
||||
standard <a href="https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API">Fetch
|
||||
API</a></p>
|
||||
|
||||
{% block limits-section %}
|
||||
<h3>Limits</h3>
|
||||
<p>The rate limit work per user basis (this means that different api keys share
|
||||
the same rate limit). For now the limits are not documented because we are
|
||||
studying and analyzing the data. As a general rule, it should not be abused, if an
|
||||
abusive use is detected, we will proceed to block the user's access to the
|
||||
API.</p>
|
||||
{% endblock %}
|
||||
|
||||
{% block webhooks-section %}
|
||||
<h3>Webhooks</h3>
|
||||
<p>All methods that emit webhook events are marked with flag <b>WEBHOOK</b>, the
|
||||
data structure defined on each method represents the <i>payload</i> of the
|
||||
@@ -97,9 +102,11 @@
|
||||
"profileId": "db601c95-045f-808b-8002-361312e63531"
|
||||
}
|
||||
</pre>
|
||||
{% endblock %}
|
||||
|
||||
</section>
|
||||
<section class="rpc-doc-content">
|
||||
<h2>RPC METHODS REFERENCE:</h2>
|
||||
<h2>METHODS REFERENCE:</h2>
|
||||
<ul class="rpc-items">
|
||||
{% for item in methods %}
|
||||
{% include "app/templates/api-doc-entry.tmpl" with item=item %}
|
||||
|
||||
@@ -45,7 +45,41 @@ Debug Main Page
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>VIRTUAL CLOCK</legend>
|
||||
|
||||
<desc>
|
||||
<p>
|
||||
CURRENT CLOCK: <b>{{current-clock}}</b>
|
||||
<br />
|
||||
CURRENT OFFSET: <b>{{current-offset}}</b>
|
||||
<br />
|
||||
CURRENT TIME: <b>{{current-time}}</b>
|
||||
</p>
|
||||
|
||||
<p>Examples: 3h, -7h, 24h (allowed suffixes: h, s)</p>
|
||||
</desc>
|
||||
|
||||
<form method="post" action="/dbg/actions/set-virtual-clock">
|
||||
<div class="row">
|
||||
<input type="text" name="offset" placeholder="3h" value="" />
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<label for="force-verify">Are you sure?</label>
|
||||
<input id="force-verify" type="checkbox" name="force" />
|
||||
<br />
|
||||
<small>
|
||||
This is a just a security double check for prevent non intentional submits.
|
||||
</small>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<input type="submit" name="submit" value="Submit" />
|
||||
<input type="submit" name="reset" value="Reset" />
|
||||
</div>
|
||||
</form>
|
||||
</fieldset>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
1
backend/resources/app/templates/main-api-doc.tmpl
Normal file
1
backend/resources/app/templates/main-api-doc.tmpl
Normal file
@@ -0,0 +1 @@
|
||||
{% extends "app/templates/api-doc.tmpl" %}
|
||||
10
backend/resources/app/templates/management-api-doc.tmpl
Normal file
10
backend/resources/app/templates/management-api-doc.tmpl
Normal file
@@ -0,0 +1,10 @@
|
||||
{% extends "app/templates/api-doc.tmpl" %}
|
||||
|
||||
{% block auth-section %}
|
||||
{% endblock %}
|
||||
|
||||
{% block limits-section %}
|
||||
{% endblock %}
|
||||
|
||||
{% block webhooks-section %}
|
||||
{% endblock %}
|
||||
@@ -7,7 +7,7 @@
|
||||
name="description"
|
||||
content="SwaggerUI"
|
||||
/>
|
||||
<title>PENPOT Swagger UI</title>
|
||||
<title>{{label|upper}} API</title>
|
||||
<style>{{swagger-css|safe}}</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -16,7 +16,7 @@
|
||||
<script>
|
||||
window.onload = () => {
|
||||
window.ui = SwaggerUIBundle({
|
||||
url: '{{public-uri}}/api/openapi.json',
|
||||
url: '{{uri}}',
|
||||
dom_id: '#swagger-ui',
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis,
|
||||
|
||||
@@ -25,8 +25,7 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
@@ -25,8 +25,7 @@
|
||||
<Logger name="app.storage.tmp" level="info" />
|
||||
<Logger name="app.worker" level="trace" />
|
||||
<Logger name="app.msgbus" level="info" />
|
||||
<Logger name="app.http.websocket" level="info" />
|
||||
<Logger name="app.http.sse" level="info" />
|
||||
<Logger name="app.http" level="info" />
|
||||
<Logger name="app.util.websocket" level="info" />
|
||||
<Logger name="app.redis" level="info" />
|
||||
<Logger name="app.rpc.rlimit" level="info" />
|
||||
|
||||
83
backend/scripts/_env
Normal file
83
backend/scripts/_env
Normal file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_MANAGEMENT_API_KEY=super-secret-management-api-key
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-password
|
||||
disable-login-with-ldap \
|
||||
disable-login-with-oidc \
|
||||
disable-login-with-google \
|
||||
disable-login-with-github \
|
||||
disable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
disable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-user-feedback \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
disable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-redis-cache \
|
||||
enable-subscriptions";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export PENPOT_USER_FEEDBACK_DESTINATION="support@example.com"
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+UnlockExperimentalVMOptions \
|
||||
-XX:+UseShenandoahGC \
|
||||
-XX:+UseCompactObjectHeaders \
|
||||
-XX:ShenandoahGCMode=generational \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
function setup_minio() {
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
}
|
||||
|
||||
|
||||
@@ -1,112 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-login-with-ldap \
|
||||
enable-login-with-password
|
||||
enable-login-with-oidc \
|
||||
enable-login-with-google \
|
||||
enable-login-with-github \
|
||||
enable-login-with-gitlab \
|
||||
enable-backend-worker \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-audit-log \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
disable-secure-session-cookies \
|
||||
enable-smtp \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-rpc-climit \
|
||||
enable-rpc-rlimit \
|
||||
enable-quotes \
|
||||
enable-soft-rpc-rlimit \
|
||||
enable-auto-file-snapshot \
|
||||
enable-webhooks \
|
||||
enable-access-tokens \
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot"
|
||||
# export PENPOT_DATABASE_READONLY=true
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot_pre"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
|
||||
|
||||
# export PENPOT_LOGGERS_LOKI_URI="http://172.17.0.1:3100/loki/api/v1/push"
|
||||
# export PENPOT_AUDIT_LOG_ARCHIVE_URI="http://localhost:6070/api/audit"
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
export PENPOT_OBJECTS_STORAGE_FS_DIRECTORY="assets"
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv-repl.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
setup_minio;
|
||||
|
||||
export JAVA_OPTS="$JAVA_OPTS -Dlog4j2.configurationFile=log4j2-devenv-repl.xml"
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
|
||||
# Setup HEAP
|
||||
# export OPTIONS="$OPTIONS -J-Xms50m -J-Xmx1024m"
|
||||
# export OPTIONS="$OPTIONS -J-Xms1100m -J-Xmx1100m -J-XX:+AlwaysPreTouch"
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseG1GC"
|
||||
|
||||
# Setup GC
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UseZGC"
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /home/penpot/environ
|
||||
export PENPOT_FLAGS="$PENPOT_FLAGS disable-backend-worker"
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote -A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-Dlog4j2.configurationFile=log4j2-experiments.xml \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints \
|
||||
-J-Djdk.tracePinnedThreads=full \
|
||||
-J-XX:+UseTransparentHugePages \
|
||||
-J-XX:ReservedCodeCacheSize=1g \
|
||||
-J-Dpolyglot.engine.WarnInterpreterOnly=false \
|
||||
-J--enable-preview";
|
||||
|
||||
# Setup HEAP
|
||||
export OPTIONS="$OPTIONS -J-Xms320g -J-Xmx320g -J-XX:+AlwaysPreTouch"
|
||||
|
||||
export PENPOT_HTTP_SERVER_IO_THREADS=2
|
||||
export PENPOT_HTTP_SERVER_WORKER_THREADS=2
|
||||
|
||||
# Increase virtual thread pool size
|
||||
# export OPTIONS="$OPTIONS -J-Djdk.virtualThreadScheduler.parallelism=16"
|
||||
|
||||
# Disable C2 Compiler
|
||||
# export OPTIONS="$OPTIONS -J-XX:TieredStopAtLevel=1"
|
||||
|
||||
# Disable all compilers
|
||||
# export OPTIONS="$OPTIONS -J-Xint"
|
||||
|
||||
# Setup GC
|
||||
export OPTIONS="$OPTIONS -J-XX:+UseG1GC -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Setup GC
|
||||
#export OPTIONS="$OPTIONS -J-XX:+UseZGC -J-XX:+ZGenerational -J-Xlog:gc:logs/gc.log"
|
||||
|
||||
# Enable ImageMagick v7.x support
|
||||
# export OPTIONS="-J-Dim4java.useV7=true $OPTIONS";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@@ -1,44 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-backend-asserts \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
enable-file-snapshot \
|
||||
enable-tiered-file-data-storage";
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
|
||||
export JAVA_OPTS="
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-XX:+EnableDynamicAgentLoading \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
-XX:+UnlockDiagnosticVMOptions \
|
||||
-XX:+DebugNonSafepoints";
|
||||
|
||||
export CLOJURE_OPTIONS="-A:dev"
|
||||
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
source $SCRIPT_DIR/_env;
|
||||
export OPTIONS="-A:dev"
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
|
||||
clojure $CLOJURE_OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
exec clojure $OPTIONS -A:dev -M -m $entrypoint "$@";
|
||||
|
||||
@@ -1,70 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_SECRET_KEY=super-secret-devenv-key
|
||||
export PENPOT_HOST=devenv
|
||||
export PENPOT_FLAGS="\
|
||||
$PENPOT_FLAGS \
|
||||
enable-prepl-server \
|
||||
enable-urepl-server \
|
||||
enable-nrepl-server \
|
||||
enable-webhooks \
|
||||
enable-backend-asserts \
|
||||
enable-audit-log \
|
||||
enable-login-with-ldap \
|
||||
enable-transit-readable-response \
|
||||
enable-demo-users \
|
||||
enable-feature-fdata-pointer-map \
|
||||
enable-feature-fdata-objects-map \
|
||||
disable-secure-session-cookies \
|
||||
enable-rpc-climit \
|
||||
enable-smtp \
|
||||
enable-quotes \
|
||||
enable-file-snapshot \
|
||||
enable-access-tokens \
|
||||
enable-tiered-file-data-storage \
|
||||
enable-file-validation \
|
||||
enable-file-schema-validation \
|
||||
enable-subscriptions \
|
||||
enable-subscriptions-old ";
|
||||
|
||||
# Default deletion delay for devenv
|
||||
export PENPOT_DELETION_DELAY="24h"
|
||||
|
||||
# Setup default upload media file size to 100MiB
|
||||
export PENPOT_MEDIA_MAX_FILE_SIZE=104857600
|
||||
|
||||
# Setup default multipart upload size to 300MiB
|
||||
export PENPOT_HTTP_SERVER_MAX_MULTIPART_BODY_SIZE=314572800
|
||||
SCRIPT_DIR=$(dirname $0);
|
||||
source $SCRIPT_DIR/_env;
|
||||
|
||||
# Initialize MINIO config
|
||||
mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin -q
|
||||
mc admin user add penpot-s3 penpot-devenv penpot-devenv -q
|
||||
mc admin user info penpot-s3 penpot-devenv |grep -F -q "readwrite"
|
||||
if [ "$?" = "1" ]; then
|
||||
mc admin policy attach penpot-s3 readwrite --user=penpot-devenv -q
|
||||
fi
|
||||
mc mb penpot-s3/penpot -p -q
|
||||
|
||||
export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
export PENPOT_OBJECTS_STORAGE_BACKEND=s3
|
||||
export PENPOT_OBJECTS_STORAGE_S3_ENDPOINT=http://minio:9000
|
||||
export PENPOT_OBJECTS_STORAGE_S3_BUCKET=penpot
|
||||
|
||||
entrypoint=${1:-app.main};
|
||||
|
||||
export JAVA_OPTS="\
|
||||
-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-Djdk.attach.allowAttachSelf \
|
||||
-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-Djdk.tracePinnedThreads=full \
|
||||
-Dim4java.useV7=true \
|
||||
-XX:-OmitStackTraceInFastThrow \
|
||||
--sun-misc-unsafe-memory-access=allow \
|
||||
--enable-preview \
|
||||
--enable-native-access=ALL-UNNAMED";
|
||||
|
||||
export OPTIONS="-A:jmx-remote -A:dev"
|
||||
setup_minio;
|
||||
|
||||
shift 1;
|
||||
set -ex
|
||||
clojure $OPTIONS -M -m $entrypoint;
|
||||
exec clojure -A:jmx-remote -A:dev -M -m app.main "$@";
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,19 +15,21 @@
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.files.validate :as fval]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.common.weak :as weak]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.features.file-migrations :as fmigr]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
@@ -38,6 +40,7 @@
|
||||
|
||||
(def ^:dynamic *state* nil)
|
||||
(def ^:dynamic *options* nil)
|
||||
(def ^:dynamic *reference-file* nil)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
@@ -53,17 +56,12 @@
|
||||
(* 1024 1024 100))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare get-resolved-file-libraries)
|
||||
(declare update-file!)
|
||||
|
||||
(def file-attrs
|
||||
#{:id
|
||||
:name
|
||||
:migrations
|
||||
:features
|
||||
:project-id
|
||||
:is-shared
|
||||
:version
|
||||
:data})
|
||||
(sm/keys ctf/schema:file))
|
||||
|
||||
(defn parse-file-format
|
||||
[template]
|
||||
@@ -143,32 +141,176 @@
|
||||
([index coll attr]
|
||||
(reduce #(index-object %1 %2 attr) index coll)))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
(defn- decode-row-features
|
||||
[{:keys [features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
||||
|
||||
(def sql:get-minimal-file
|
||||
"SELECT f.id,
|
||||
f.revn,
|
||||
f.modified_at,
|
||||
f.deleted_at
|
||||
FROM file AS f
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn decode-file
|
||||
"A general purpose file decoding function that resolves all external
|
||||
pointers, run migrations and return plain vanilla file map"
|
||||
[cfg {:keys [id] :as file} & {:keys [migrate?] :or {migrate? true}}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [file (->> file
|
||||
(feat.fmigr/resolve-applied-migrations cfg)
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
libs (delay (get-resolved-file-libraries cfg file))]
|
||||
(defn get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(db/get-with-sql cfg [sql:get-minimal-file id] opts))
|
||||
|
||||
(-> file
|
||||
(update :features db/decode-pgarray #{})
|
||||
(update :data blob/decode)
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(update :data assoc :id id)
|
||||
(cond-> migrate? (fmg/migrate-file libs))))))
|
||||
(def sql:files-with-data
|
||||
"SELECT f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.has_media_trimmed,
|
||||
f.revn,
|
||||
f.data AS legacy_data,
|
||||
f.ignore_sync_until,
|
||||
f.comment_thread_seqn,
|
||||
f.features,
|
||||
f.version,
|
||||
f.vern,
|
||||
p.team_id,
|
||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
||||
fd.metadata AS metadata,
|
||||
fd.data AS data
|
||||
FROM file AS f
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)")
|
||||
|
||||
(def sql:get-file
|
||||
(str sql:files-with-data " WHERE f.id = ?"))
|
||||
|
||||
(def sql:get-file-without-data
|
||||
(str "WITH files AS (" sql:files-with-data ")"
|
||||
"SELECT f.id,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.has_media_trimmed,
|
||||
f.revn,
|
||||
f.ignore_sync_until,
|
||||
f.comment_thread_seqn,
|
||||
f.features,
|
||||
f.version,
|
||||
f.vern,
|
||||
f.team_id
|
||||
FROM files AS f
|
||||
WHERE f.id = ?"))
|
||||
|
||||
(defn- migrate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [read-only?]} {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple
|
||||
;; pointers and handly internally with objects map in their
|
||||
;; worst case (when probably all shapes and all pointers
|
||||
;; will be readed in any case), we just realize/resolve them
|
||||
;; before applying the migration to the file.
|
||||
file (-> (fdata/realize cfg file)
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
(do ;; When file is migrated, we break the rule of no
|
||||
;; perform mutations on get operations and update the
|
||||
;; file with all migrations applied
|
||||
(update-file! cfg file)
|
||||
(fmigr/resolve-applied-migrations cfg file))))))
|
||||
|
||||
(defn- get-file*
|
||||
[{:keys [::db/conn] :as cfg} id
|
||||
{:keys [migrate?
|
||||
realize?
|
||||
decode?
|
||||
skip-locked?
|
||||
include-deleted?
|
||||
load-data?
|
||||
throw-if-not-exists?
|
||||
lock-for-update?
|
||||
lock-for-share?]
|
||||
:or {lock-for-update? false
|
||||
lock-for-share? false
|
||||
load-data? true
|
||||
migrate? true
|
||||
decode? true
|
||||
include-deleted? false
|
||||
throw-if-not-exists? true
|
||||
realize? false}
|
||||
:as options}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
(when (and (not load-data?)
|
||||
(or lock-for-share? lock-for-share? skip-locked?))
|
||||
(throw (IllegalArgumentException. "locking is incompatible when `load-data?` is false")))
|
||||
|
||||
(let [sql
|
||||
(if load-data?
|
||||
sql:get-file
|
||||
sql:get-file-without-data)
|
||||
|
||||
sql
|
||||
(cond
|
||||
lock-for-update?
|
||||
(str sql " FOR UPDATE of f")
|
||||
|
||||
lock-for-share?
|
||||
(str sql " FOR SHARE of f")
|
||||
|
||||
:else
|
||||
sql)
|
||||
|
||||
sql
|
||||
(if skip-locked?
|
||||
(str sql " SKIP LOCKED")
|
||||
sql)
|
||||
|
||||
file
|
||||
(db/get-with-sql conn [sql id]
|
||||
{::db/throw-if-not-exists false
|
||||
::db/remove-deleted (not include-deleted?)})
|
||||
|
||||
file
|
||||
(-> file
|
||||
(d/update-when :features db/decode-pgarray #{})
|
||||
(d/update-when :metadata fdata/decode-metadata))]
|
||||
|
||||
(if file
|
||||
(if load-data?
|
||||
(let [file
|
||||
(->> file
|
||||
(fmigr/resolve-applied-migrations cfg)
|
||||
(fdata/resolve-file-data cfg))
|
||||
|
||||
will-migrate?
|
||||
(and migrate? (fmg/need-migration? file))]
|
||||
|
||||
(if decode?
|
||||
(cond->> (fdata/decode-file-data cfg file)
|
||||
(and realize? (not will-migrate?))
|
||||
(fdata/realize cfg)
|
||||
|
||||
will-migrate?
|
||||
(migrate-file cfg options))
|
||||
|
||||
file))
|
||||
file)
|
||||
|
||||
(when-not (or skip-locked? (not throw-if-not-exists?))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "database object not found"
|
||||
:table :file
|
||||
:file-id id)))))
|
||||
|
||||
(defn get-file
|
||||
"Get file, resolve all features and apply migrations.
|
||||
@@ -177,10 +319,7 @@
|
||||
operations on file, because it removes the ovehead of lazy fetching
|
||||
and decoding."
|
||||
[cfg file-id & {:as opts}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(some->> (db/get* conn :file {:id file-id}
|
||||
(assoc opts ::db/remove-deleted false))
|
||||
(decode-file cfg)))))
|
||||
(db/run! cfg get-file* file-id opts))
|
||||
|
||||
(defn clean-file-features
|
||||
[file]
|
||||
@@ -204,12 +343,12 @@
|
||||
(let [conn (db/get-connection cfg)
|
||||
ids (db/create-array conn "uuid" ids)]
|
||||
(->> (db/exec! conn [sql:get-teams ids])
|
||||
(map decode-row))))
|
||||
(map decode-row-features))))
|
||||
|
||||
(defn get-team
|
||||
[cfg team-id]
|
||||
(-> (db/get cfg :team {:id team-id})
|
||||
(decode-row)))
|
||||
(decode-row-features)))
|
||||
|
||||
(defn get-fonts
|
||||
[cfg team-id]
|
||||
@@ -301,7 +440,6 @@
|
||||
(do
|
||||
(l/trc :hint "lookup index"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:id (str id)
|
||||
:result (str (get mobj :id)))
|
||||
(get mobj :id))
|
||||
@@ -318,7 +456,6 @@
|
||||
(doseq [[old-id item] missing-index]
|
||||
(l/dbg :hint "create missing references"
|
||||
:file-id (str file-id)
|
||||
:snap-id (str (:snapshot-id file))
|
||||
:old-id (str old-id)
|
||||
:id (str (:id item)))
|
||||
(db/insert! conn :file-media-object item
|
||||
@@ -329,12 +466,16 @@
|
||||
(def sql:get-file-media
|
||||
"SELECT * FROM file_media_object WHERE id = ANY(?)")
|
||||
|
||||
(defn get-file-media*
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(->> (db/exec! conn [sql:get-file-media used])
|
||||
(mapv (fn [row] (assoc row :file-id id))))))
|
||||
|
||||
(defn get-file-media
|
||||
[cfg {:keys [data] :as file}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [used (cfh/collect-used-media data)
|
||||
used (db/create-array conn "uuid" used)]
|
||||
(db/exec! conn [sql:get-file-media used])))))
|
||||
[cfg file]
|
||||
(db/run! cfg get-file-media* file))
|
||||
|
||||
(def ^:private sql:get-team-files-ids
|
||||
"SELECT f.id FROM file AS f
|
||||
@@ -409,7 +550,7 @@
|
||||
[cfg data file-id]
|
||||
(let [library-ids (get-libraries cfg [file-id])]
|
||||
(reduce (fn [data library-id]
|
||||
(if-let [library (get-file cfg library-id)]
|
||||
(if-let [library (get-file cfg library-id :include-deleted? true)]
|
||||
(ctf/absorb-assets data (:data library))
|
||||
data))
|
||||
data
|
||||
@@ -421,6 +562,27 @@
|
||||
(db/exec-one! conn ["SET LOCAL idle_in_transaction_session_timeout = 0"])
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])))
|
||||
|
||||
(defn invalidate-thumbnails
|
||||
[cfg file-id]
|
||||
(let [storage (sto/resolve cfg)
|
||||
|
||||
sql-1
|
||||
(str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
|
||||
sql-2
|
||||
(str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")]
|
||||
|
||||
(run! #(sto/touch-object! storage %)
|
||||
(sequence
|
||||
(keep :media-id)
|
||||
(concat
|
||||
(db/exec! cfg [sql-1 file-id])
|
||||
(db/exec! cfg [sql-2 file-id]))))))
|
||||
|
||||
(defn process-file
|
||||
[cfg {:keys [id] :as file}]
|
||||
(let [libs (delay (get-resolved-file-libraries cfg file))]
|
||||
@@ -444,78 +606,102 @@
|
||||
;; all of them, not only the applied
|
||||
(vary-meta dissoc ::fmg/migrated))))
|
||||
|
||||
(defn encode-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id features] :as file}]
|
||||
(let [file (if (contains? features "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
(defn- encode-file
|
||||
[cfg {:keys [id features] :as file}]
|
||||
(let [file (if (and (contains? features "fdata/objects-map")
|
||||
(:data file))
|
||||
(fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? features "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (feat.fdata/enable-pointer-map file)]
|
||||
(feat.fdata/persist-pointers! cfg id)
|
||||
file (if (and (contains? features "fdata/pointer-map")
|
||||
(:data file))
|
||||
|
||||
(binding [pmap/*tracked* (pmap/create-tracked :inherit true)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
(-> file
|
||||
(update :features db/encode-pgarray conn "text")
|
||||
(update :data blob/encode))))
|
||||
(d/update-when :features into-array)
|
||||
(d/update-when :data blob/encode))))
|
||||
|
||||
(defn get-params-from-file
|
||||
(defn- file->params
|
||||
[file]
|
||||
(let [params {:has-media-trimmed (:has-media-trimmed file)
|
||||
:ignore-sync-until (:ignore-sync-until file)
|
||||
:project-id (:project-id file)
|
||||
:features (:features file)
|
||||
:name (:name file)
|
||||
:is-shared (:is-shared file)
|
||||
:version (:version file)
|
||||
:data (:data file)
|
||||
:id (:id file)
|
||||
:deleted-at (:deleted-at file)
|
||||
:created-at (:created-at file)
|
||||
:modified-at (:modified-at file)
|
||||
:revn (:revn file)
|
||||
:vern (:vern file)}]
|
||||
(-> (select-keys file file-attrs)
|
||||
(assoc :data nil)
|
||||
(dissoc :team-id)
|
||||
(dissoc :migrations)))
|
||||
|
||||
(-> (d/without-nils params)
|
||||
(assoc :data-backend nil)
|
||||
(assoc :data-ref-id nil))))
|
||||
(defn- file->file-data-params
|
||||
[{:keys [id] :as file} & {:as opts}]
|
||||
(let [created-at (or (:created-at file) (ct/now))
|
||||
modified-at (or (:modified-at file) created-at)]
|
||||
(d/without-nils
|
||||
{:id id
|
||||
:type "main"
|
||||
:file-id id
|
||||
:data (:data file)
|
||||
:metadata (:metadata file)
|
||||
:created-at created-at
|
||||
:modified-at modified-at})))
|
||||
|
||||
(defn insert-file!
|
||||
"Insert a new file into the database table"
|
||||
"Insert a new file into the database table. Expectes a not-encoded file.
|
||||
Returns nil."
|
||||
[{:keys [::db/conn] :as cfg} file & {:as opts}]
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(let [params (-> (encode-file cfg file)
|
||||
(get-params-from-file))]
|
||||
(db/insert! conn :file params opts)))
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(when (:migrations file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
|
||||
(let [file (encode-file cfg file)]
|
||||
(db/insert! conn :file
|
||||
(file->params file)
|
||||
(assoc opts ::db/return-keys false))
|
||||
|
||||
(->> (file->file-data-params file)
|
||||
(fdata/upsert! cfg))
|
||||
|
||||
nil))
|
||||
|
||||
(defn update-file!
|
||||
"Update an existing file on the database."
|
||||
[{:keys [::db/conn ::sto/storage] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||
(let [file (encode-file cfg file)
|
||||
params (-> (get-params-from-file file)
|
||||
(dissoc :id))]
|
||||
"Update an existing file on the database. Expects not encoded file."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} & {:as opts}]
|
||||
|
||||
;; If file was already offloaded, we touch the underlying storage
|
||||
;; object for properly trigger storage-gc-touched task
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage)))
|
||||
(if (::reset-migrations? opts false)
|
||||
(fmigr/reset-migrations! conn file)
|
||||
(fmigr/upsert-migrations! conn file))
|
||||
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(db/update! conn :file params {:id id} opts)))
|
||||
(let [file
|
||||
(encode-file cfg file)
|
||||
|
||||
file-params
|
||||
(file->params (dissoc file :id))
|
||||
|
||||
file-data-params
|
||||
(file->file-data-params file)]
|
||||
|
||||
(db/update! conn :file file-params
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(fdata/upsert! cfg file-data-params)
|
||||
nil))
|
||||
|
||||
(defn save-file!
|
||||
"Applies all the final validations and perist the file, binfile
|
||||
specific, should not be used outside of binfile domain"
|
||||
specific, should not be used outside of binfile domain.
|
||||
Returns nil"
|
||||
[{:keys [::timestamp] :as cfg} file & {:as opts}]
|
||||
|
||||
(assert (dt/instant? timestamp) "expected valid timestamp")
|
||||
(assert (ct/inst? timestamp) "expected valid timestamp")
|
||||
|
||||
(let [file (-> file
|
||||
(assoc :created-at timestamp)
|
||||
(assoc :modified-at timestamp)
|
||||
(assoc :ignore-sync-until (dt/plus timestamp (dt/duration {:seconds 5})))
|
||||
(cond-> (not (::overwrite cfg))
|
||||
(assoc :ignore-sync-until (ct/plus timestamp (ct/duration {:seconds 5}))))
|
||||
(update :features
|
||||
(fn [features]
|
||||
(-> (::features cfg #{})
|
||||
@@ -532,8 +718,9 @@
|
||||
(when (ex/exception? result)
|
||||
(l/error :hint "file schema validation error" :cause result))))
|
||||
|
||||
(insert-file! cfg file opts)))
|
||||
|
||||
(if (::overwrite cfg)
|
||||
(update-file! cfg file (assoc opts ::reset-migrations? true))
|
||||
(insert-file! cfg file opts))))
|
||||
|
||||
(def ^:private sql:get-file-libraries
|
||||
"WITH RECURSIVE libs AS (
|
||||
@@ -558,10 +745,11 @@
|
||||
l.revn,
|
||||
l.vern,
|
||||
l.synced_at,
|
||||
l.is_shared
|
||||
l.is_shared,
|
||||
l.version
|
||||
FROM libs AS l
|
||||
INNER JOIN project AS p ON (p.id = l.project_id)
|
||||
WHERE l.deleted_at IS NULL OR l.deleted_at > now();")
|
||||
WHERE l.deleted_at IS NULL;")
|
||||
|
||||
(defn get-file-libraries
|
||||
[conn file-id]
|
||||
@@ -570,12 +758,22 @@
|
||||
;; FIXME: :is-indirect set to false to all rows looks
|
||||
;; completly useless
|
||||
(map #(assoc % :is-indirect false))
|
||||
(map decode-row))
|
||||
(map decode-row-features))
|
||||
(db/exec! conn [sql:get-file-libraries file-id])))
|
||||
|
||||
(defn get-resolved-file-libraries
|
||||
"A helper for preload file libraries"
|
||||
[{:keys [::db/conn] :as cfg} file]
|
||||
(->> (get-file-libraries conn (:id file))
|
||||
(into [file] (map #(get-file cfg (:id %))))
|
||||
(d/index-by :id)))
|
||||
"Get all file libraries including itself. Returns an instance of
|
||||
LoadableWeakValueMap that allows do not have strong references to
|
||||
the loaded libraries and reduce possible memory pressure on having
|
||||
all this libraries loaded at same time on processing file validation
|
||||
or file migration.
|
||||
|
||||
This still requires at least one library at time to be loaded while
|
||||
access to it is performed, but it improves considerable not having
|
||||
the need of loading all the libraries at the same time."
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file}]
|
||||
(let [library-ids (->> (get-file-libraries conn (:id file))
|
||||
(map :id)
|
||||
(cons (:id file)))
|
||||
load-fn #(get-file cfg % :migrate? false)]
|
||||
(weak/loadable-weak-value-map library-ids load-fn {id file})))
|
||||
|
||||
@@ -36,11 +36,6 @@
|
||||
"fdata/shape-data-type"
|
||||
nil
|
||||
|
||||
;; There is no migration needed, but we don't want to allow
|
||||
;; copy paste nor import of variant files into no-variant teams
|
||||
"variants/v1"
|
||||
nil
|
||||
|
||||
(ex/raise :type :internal
|
||||
:code :no-migration-defined
|
||||
:hint (str/ffmt "no migation for feature '%' on file importation" feature)
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
[app.common.fressian :as fres]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -30,7 +31,6 @@
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.java.io :as jio]
|
||||
[clojure.set :as set]
|
||||
@@ -346,7 +346,7 @@
|
||||
thumbnails (->> (bfc/get-file-object-thumbnails cfg file-id)
|
||||
(mapv #(dissoc % :file-id)))
|
||||
|
||||
file (cond-> (bfc/get-file cfg file-id)
|
||||
file (cond-> (bfc/get-file cfg file-id :realize? true)
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
@@ -434,7 +434,7 @@
|
||||
(defn read-import!
|
||||
"Do the importation of the specified resource in penpot custom binary
|
||||
format."
|
||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (dt/now)} :as options}]
|
||||
[{:keys [::bfc/input ::bfc/timestamp] :or {timestamp (ct/now)} :as options}]
|
||||
|
||||
(dm/assert!
|
||||
"expected input stream"
|
||||
@@ -442,7 +442,7 @@
|
||||
|
||||
(dm/assert!
|
||||
"expected valid instant"
|
||||
(dt/instant? timestamp))
|
||||
(ct/inst? timestamp))
|
||||
|
||||
(let [version (read-header! input)]
|
||||
(read-import (assoc options ::version version ::bfc/timestamp timestamp))))
|
||||
@@ -682,7 +682,7 @@
|
||||
(io/coercible? output))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
@@ -720,7 +720,7 @@
|
||||
(satisfies? jio/IOFactory input))
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
@@ -742,6 +742,6 @@
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:elapsed (ct/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -23,7 +24,6 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]
|
||||
@@ -153,7 +153,7 @@
|
||||
|
||||
(defn- write-file!
|
||||
[cfg file-id]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
||||
thumbs (bfc/get-file-object-thumbnails cfg file-id)
|
||||
media (bfc/get-file-media cfg file)
|
||||
rels (bfc/get-files-rels cfg #{file-id})]
|
||||
@@ -344,7 +344,7 @@
|
||||
(defn export-team!
|
||||
[cfg team-id]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
cfg (create-database cfg)]
|
||||
|
||||
(l/inf :hint "start"
|
||||
@@ -378,15 +378,15 @@
|
||||
(l/inf :hint "end"
|
||||
:operation "export"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
:elapsed (ct/format-duration elapsed)))))))
|
||||
|
||||
(defn import-team!
|
||||
[cfg path]
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
|
||||
cfg (-> (create-database cfg path)
|
||||
(assoc ::bfc/timestamp (dt/now)))]
|
||||
(assoc ::bfc/timestamp (ct/now)))]
|
||||
|
||||
(l/inf :hint "start"
|
||||
:operation "import"
|
||||
@@ -434,4 +434,4 @@
|
||||
(l/inf :hint "end"
|
||||
:operation "import"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration elapsed)))))))
|
||||
:elapsed (ct/format-duration elapsed)))))))
|
||||
|
||||
@@ -20,13 +20,14 @@
|
||||
[app.common.media :as cmedia]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as cth]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.color :as ctcl]
|
||||
[app.common.types.component :as ctc]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.types.page :as ctp]
|
||||
[app.common.types.plugins :as ctpg]
|
||||
[app.common.types.shape :as cts]
|
||||
[app.common.types.tokens-lib :as cto]
|
||||
[app.common.types.tokens-lib :as ctob]
|
||||
[app.common.types.typography :as cty]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -35,14 +36,15 @@
|
||||
[app.storage :as sto]
|
||||
[app.storage.impl :as sto.impl]
|
||||
[app.util.events :as events]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as jio]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.fs :as fs]
|
||||
[datoteka.io :as io])
|
||||
(:import
|
||||
java.io.File
|
||||
java.io.InputStream
|
||||
java.io.OutputStreamWriter
|
||||
java.lang.AutoCloseable
|
||||
java.util.zip.ZipEntry
|
||||
java.util.zip.ZipFile
|
||||
java.util.zip.ZipOutputStream))
|
||||
@@ -92,7 +94,7 @@
|
||||
|
||||
(defn- default-now
|
||||
[o]
|
||||
(or o (dt/now)))
|
||||
(or o (ct/now)))
|
||||
|
||||
;; --- ENCODERS
|
||||
|
||||
@@ -103,25 +105,25 @@
|
||||
(sm/encoder ctp/schema:page sm/json-transformer))
|
||||
|
||||
(def encode-shape
|
||||
(sm/encoder ::cts/shape sm/json-transformer))
|
||||
(sm/encoder cts/schema:shape sm/json-transformer))
|
||||
|
||||
(def encode-media
|
||||
(sm/encoder ::ctf/media sm/json-transformer))
|
||||
(sm/encoder ctf/schema:media sm/json-transformer))
|
||||
|
||||
(def encode-component
|
||||
(sm/encoder ::ctc/component sm/json-transformer))
|
||||
(sm/encoder ctc/schema:component sm/json-transformer))
|
||||
|
||||
(def encode-color
|
||||
(sm/encoder ctcl/schema:library-color sm/json-transformer))
|
||||
|
||||
(def encode-typography
|
||||
(sm/encoder ::cty/typography sm/json-transformer))
|
||||
(sm/encoder cty/schema:typography sm/json-transformer))
|
||||
|
||||
(def encode-tokens-lib
|
||||
(sm/encoder ::cto/tokens-lib sm/json-transformer))
|
||||
(sm/encoder ctob/schema:tokens-lib sm/json-transformer))
|
||||
|
||||
(def encode-plugin-data
|
||||
(sm/encoder ::ctpg/plugin-data sm/json-transformer))
|
||||
(sm/encoder ctpg/schema:plugin-data sm/json-transformer))
|
||||
|
||||
(def encode-storage-object
|
||||
(sm/encoder schema:storage-object sm/json-transformer))
|
||||
@@ -138,7 +140,7 @@
|
||||
(sm/decoder ctf/schema:media sm/json-transformer))
|
||||
|
||||
(def decode-component
|
||||
(sm/decoder ::ctc/component sm/json-transformer))
|
||||
(sm/decoder ctc/schema:component sm/json-transformer))
|
||||
|
||||
(def decode-color
|
||||
(sm/decoder ctcl/schema:library-color sm/json-transformer))
|
||||
@@ -147,19 +149,19 @@
|
||||
(sm/decoder schema:file sm/json-transformer))
|
||||
|
||||
(def decode-page
|
||||
(sm/decoder ::ctp/page sm/json-transformer))
|
||||
(sm/decoder ctp/schema:page sm/json-transformer))
|
||||
|
||||
(def decode-shape
|
||||
(sm/decoder ::cts/shape sm/json-transformer))
|
||||
(sm/decoder cts/schema:shape sm/json-transformer))
|
||||
|
||||
(def decode-typography
|
||||
(sm/decoder ::cty/typography sm/json-transformer))
|
||||
(sm/decoder cty/schema:typography sm/json-transformer))
|
||||
|
||||
(def decode-tokens-lib
|
||||
(sm/decoder cto/schema:tokens-lib sm/json-transformer))
|
||||
(sm/decoder ctob/schema:tokens-lib sm/json-transformer))
|
||||
|
||||
(def decode-plugin-data
|
||||
(sm/decoder ::ctpg/plugin-data sm/json-transformer))
|
||||
(sm/decoder ctpg/schema:plugin-data sm/json-transformer))
|
||||
|
||||
(def decode-storage-object
|
||||
(sm/decoder schema:storage-object sm/json-transformer))
|
||||
@@ -173,31 +175,31 @@
|
||||
(sm/check-fn schema:manifest))
|
||||
|
||||
(def validate-file
|
||||
(sm/check-fn ::ctf/file))
|
||||
(sm/check-fn ctf/schema:file))
|
||||
|
||||
(def validate-page
|
||||
(sm/check-fn ::ctp/page))
|
||||
(sm/check-fn ctp/schema:page))
|
||||
|
||||
(def validate-shape
|
||||
(sm/check-fn ::cts/shape))
|
||||
(sm/check-fn cts/schema:shape))
|
||||
|
||||
(def validate-media
|
||||
(sm/check-fn ::ctf/media))
|
||||
(sm/check-fn ctf/schema:media))
|
||||
|
||||
(def validate-color
|
||||
(sm/check-fn ctcl/schema:library-color))
|
||||
|
||||
(def validate-component
|
||||
(sm/check-fn ::ctc/component))
|
||||
(sm/check-fn ctc/schema:component))
|
||||
|
||||
(def validate-typography
|
||||
(sm/check-fn ::cty/typography))
|
||||
(sm/check-fn cty/schema:typography))
|
||||
|
||||
(def validate-tokens-lib
|
||||
(sm/check-fn ::cto/tokens-lib))
|
||||
(sm/check-fn ctob/schema:tokens-lib))
|
||||
|
||||
(def validate-plugin-data
|
||||
(sm/check-fn ::ctpg/plugin-data))
|
||||
(sm/check-fn ctpg/schema:plugin-data))
|
||||
|
||||
(def validate-storage-object
|
||||
(sm/check-fn schema:storage-object))
|
||||
@@ -222,9 +224,12 @@
|
||||
(throw (IllegalArgumentException.
|
||||
"the `include-libraries` and `embed-assets` are mutally excluding options")))
|
||||
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(let [detach? (and (not embed-assets) (not include-libraries))]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(cond-> (bfc/get-file cfg file-id {::sql/for-update true})
|
||||
(cond-> (bfc/get-file cfg file-id
|
||||
{:realize? true
|
||||
:include-deleted? true
|
||||
:lock-for-update? true})
|
||||
detach?
|
||||
(-> (ctf/detach-external-references file-id)
|
||||
(dissoc :libraries))
|
||||
@@ -250,10 +255,12 @@
|
||||
|
||||
(write-entry! output path params)
|
||||
|
||||
(events/tap :progress {:section :storage-object :id id})
|
||||
|
||||
(with-open [input (sto/get-object-data storage sobject)]
|
||||
(.putNextEntry output (ZipEntry. (str "objects/" id ext)))
|
||||
(.putNextEntry ^ZipOutputStream output (ZipEntry. (str "objects/" id ext)))
|
||||
(io/copy input output :size (:size sobject))
|
||||
(.closeEntry output))))))
|
||||
(.closeEntry ^ZipOutputStream output))))))
|
||||
|
||||
(defn- export-file
|
||||
[{:keys [::file-id ::output] :as cfg}]
|
||||
@@ -274,6 +281,8 @@
|
||||
|
||||
thumbnails (bfc/get-file-object-thumbnails cfg file-id)]
|
||||
|
||||
(events/tap :progress {:section :file :id file-id})
|
||||
|
||||
(vswap! bfc/*state* update :files assoc file-id
|
||||
{:id file-id
|
||||
:name (:name file)
|
||||
@@ -281,13 +290,13 @@
|
||||
|
||||
(let [file (cond-> (select-keys file bfc/file-attrs)
|
||||
(:options data)
|
||||
(assoc :options (:options data))
|
||||
(assoc :options (:options data)))
|
||||
|
||||
:always
|
||||
(dissoc :data)
|
||||
file (-> file
|
||||
(dissoc :data)
|
||||
(dissoc :deleted-at)
|
||||
(encode-file))
|
||||
|
||||
:always
|
||||
(encode-file))
|
||||
path (str "files/" file-id ".json")]
|
||||
(write-entry! output path file))
|
||||
|
||||
@@ -345,7 +354,8 @@
|
||||
typography (encode-typography object)]
|
||||
(write-entry! output path typography)))
|
||||
|
||||
(when tokens-lib
|
||||
(when (and tokens-lib
|
||||
(not (ctob/empty-lib? tokens-lib)))
|
||||
(let [path (str "files/" file-id "/tokens.json")
|
||||
encoded-tokens (encode-tokens-lib tokens-lib)]
|
||||
(write-entry! output path encoded-tokens)))))
|
||||
@@ -445,7 +455,7 @@
|
||||
(defn- read-manifest
|
||||
[^ZipFile input]
|
||||
(let [entry (get-zip-entry input "manifest.json")]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(let [manifest (json/read reader :key-fn json/read-kebab-key)]
|
||||
(decode-manifest manifest)))))
|
||||
|
||||
@@ -535,24 +545,27 @@
|
||||
|
||||
(defn- read-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(json/read reader :key-fn json/read-kebab-key)))
|
||||
|
||||
(defn- read-plain-entry
|
||||
[^ZipFile input entry]
|
||||
(with-open [reader (zip-entry-reader input entry)]
|
||||
(with-open [^AutoCloseable reader (zip-entry-reader input entry)]
|
||||
(json/read reader)))
|
||||
|
||||
(defn- read-file
|
||||
[{:keys [::bfc/input ::file-id]}]
|
||||
[{:keys [::bfc/input ::bfc/timestamp]} file-id]
|
||||
(let [path (str "files/" file-id ".json")
|
||||
entry (get-zip-entry input path)]
|
||||
(-> (read-entry input entry)
|
||||
(decode-file)
|
||||
(update :revn d/nilv 1)
|
||||
(update :created-at d/nilv timestamp)
|
||||
(update :modified-at d/nilv timestamp)
|
||||
(validate-file))))
|
||||
|
||||
(defn- read-file-plugin-data
|
||||
[{:keys [::bfc/input ::file-id]}]
|
||||
[{:keys [::bfc/input]} file-id]
|
||||
(let [path (str "files/" file-id "/plugin-data.json")
|
||||
entry (get-zip-entry* input path)]
|
||||
(some->> entry
|
||||
@@ -561,7 +574,7 @@
|
||||
(validate-plugin-data))))
|
||||
|
||||
(defn- read-file-media
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
(->> (keep (match-media-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@@ -581,7 +594,7 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-colors
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
(->> (keep (match-color-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@@ -594,7 +607,7 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-components
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
(let [clean-component-post-decode
|
||||
(fn [component]
|
||||
(d/update-when component :objects
|
||||
@@ -625,7 +638,7 @@
|
||||
(not-empty))))
|
||||
|
||||
(defn- read-file-typographies
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
(->> (keep (match-typography-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@@ -638,14 +651,14 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-tokens-lib
|
||||
[{:keys [::bfc/input ::file-id ::entries]}]
|
||||
[{:keys [::bfc/input ::entries]} file-id]
|
||||
(when-let [entry (d/seek (match-tokens-lib-entry-fn file-id) entries)]
|
||||
(->> (read-plain-entry input entry)
|
||||
(decode-tokens-lib)
|
||||
(validate-tokens-lib))))
|
||||
|
||||
(defn- read-file-shapes
|
||||
[{:keys [::bfc/input ::file-id ::page-id ::entries] :as cfg}]
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id page-id]
|
||||
(->> (keep (match-shape-entry-fn file-id page-id) entries)
|
||||
(reduce (fn [result {:keys [id entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@@ -659,15 +672,14 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-pages
|
||||
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
||||
(->> (keep (match-page-entry-fn file-id) entries)
|
||||
(keep (fn [{:keys [id entry]}]
|
||||
(let [page (->> (read-entry input entry)
|
||||
(decode-page))
|
||||
page (dissoc page :options)]
|
||||
(when (= id (:id page))
|
||||
(let [objects (-> (assoc cfg ::page-id id)
|
||||
(read-file-shapes))]
|
||||
(let [objects (read-file-shapes cfg file-id id)]
|
||||
(assoc page :objects objects))))))
|
||||
(sort-by :index)
|
||||
(reduce (fn [result {:keys [id] :as page}]
|
||||
@@ -675,7 +687,7 @@
|
||||
(d/ordered-map))))
|
||||
|
||||
(defn- read-file-thumbnails
|
||||
[{:keys [::bfc/input ::file-id ::entries] :as cfg}]
|
||||
[{:keys [::bfc/input ::entries] :as cfg} file-id]
|
||||
(->> (keep (match-thumbnail-entry-fn file-id) entries)
|
||||
(reduce (fn [result {:keys [page-id frame-id tag entry]}]
|
||||
(let [object (->> (read-entry input entry)
|
||||
@@ -690,13 +702,13 @@
|
||||
(not-empty)))
|
||||
|
||||
(defn- read-file-data
|
||||
[cfg]
|
||||
(let [colors (read-file-colors cfg)
|
||||
typographies (read-file-typographies cfg)
|
||||
tokens-lib (read-file-tokens-lib cfg)
|
||||
components (read-file-components cfg)
|
||||
plugin-data (read-file-plugin-data cfg)
|
||||
pages (read-file-pages cfg)]
|
||||
[cfg file-id]
|
||||
(let [colors (read-file-colors cfg file-id)
|
||||
typographies (read-file-typographies cfg file-id)
|
||||
tokens-lib (read-file-tokens-lib cfg file-id)
|
||||
components (read-file-components cfg file-id)
|
||||
plugin-data (read-file-plugin-data cfg file-id)
|
||||
pages (read-file-pages cfg file-id)]
|
||||
{:pages (-> pages keys vec)
|
||||
:pages-index (into {} pages)
|
||||
:colors colors
|
||||
@@ -706,11 +718,11 @@
|
||||
:plugin-data plugin-data}))
|
||||
|
||||
(defn- import-file
|
||||
[{:keys [::bfc/project-id ::file-id ::file-name] :as cfg}]
|
||||
[{:keys [::db/conn ::bfc/project-id] :as cfg} {file-id :id file-name :name}]
|
||||
(let [file-id' (bfc/lookup-index file-id)
|
||||
file (read-file cfg)
|
||||
media (read-file-media cfg)
|
||||
thumbnails (read-file-thumbnails cfg)]
|
||||
file (read-file cfg file-id)
|
||||
media (read-file-media cfg file-id)
|
||||
thumbnails (read-file-thumbnails cfg file-id)]
|
||||
|
||||
(l/dbg :hint "processing file"
|
||||
:id (str file-id')
|
||||
@@ -719,28 +731,50 @@
|
||||
:version (:version file)
|
||||
::l/sync? true)
|
||||
|
||||
(events/tap :progress {:section :file :name file-name})
|
||||
(vswap! bfc/*state* update :index bfc/update-index media :id)
|
||||
|
||||
(when media
|
||||
;; Update index with media
|
||||
(l/dbg :hint "update media index"
|
||||
:file-id (str file-id')
|
||||
:total (count media)
|
||||
::l/sync? true)
|
||||
(events/tap :progress {:section :media :file-id file-id})
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :id media))
|
||||
(vswap! bfc/*state* update :media into media))
|
||||
(doseq [item media]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(assoc :file-id file-id')
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
|
||||
(when thumbnails
|
||||
(l/dbg :hint "update thumbnails index"
|
||||
:file-id (str file-id')
|
||||
:total (count thumbnails)
|
||||
::l/sync? true)
|
||||
(l/dbg :hint "inserting media object"
|
||||
:file-id (str file-id')
|
||||
:id (str (:id params))
|
||||
:media-id (str (:media-id params))
|
||||
:thumbnail-id (str (:thumbnail-id params))
|
||||
:old-id (str (:id item))
|
||||
::l/sync? true)
|
||||
|
||||
(vswap! bfc/*state* update :index bfc/update-index (map :media-id thumbnails))
|
||||
(vswap! bfc/*state* update :thumbnails into thumbnails))
|
||||
(db/insert! conn :file-media-object params
|
||||
::db/on-conflict-do-nothing? (::bfc/overwrite cfg))))
|
||||
|
||||
(let [data (-> (read-file-data cfg)
|
||||
(events/tap :progress {:section :thumbnails :file-id file-id})
|
||||
|
||||
(doseq [item thumbnails]
|
||||
(let [media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id')
|
||||
(cth/fmt-object-id))
|
||||
params {:file-id file-id'
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
|
||||
(l/dbg :hint "inserting object thumbnail"
|
||||
:file-id (str file-id')
|
||||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params
|
||||
::db/on-conflict-do-nothing? true)))
|
||||
|
||||
(events/tap :progress {:section :file :file-id file-id})
|
||||
|
||||
(let [data (-> (read-file-data cfg file-id)
|
||||
(d/without-nils)
|
||||
(assoc :id file-id')
|
||||
(cond-> (:options file)
|
||||
@@ -757,7 +791,7 @@
|
||||
file (ctf/check-file file)]
|
||||
|
||||
(bfm/register-pending-migrations! cfg file)
|
||||
(bfc/save-file! cfg file ::db/return-keys false)
|
||||
(bfc/save-file! cfg file)
|
||||
|
||||
file-id')))
|
||||
|
||||
@@ -787,103 +821,113 @@
|
||||
entries (keep (match-storage-entry-fn) entries)]
|
||||
|
||||
(doseq [{:keys [id entry]} entries]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-storage-object)
|
||||
(validate-storage-object))]
|
||||
(let [object (->> (read-entry input entry)
|
||||
(decode-storage-object)
|
||||
(validate-storage-object))
|
||||
|
||||
(when (not= id (:id object))
|
||||
ext (cmedia/mtype->extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
(zip-entry-storage-content input))]
|
||||
|
||||
(when (not= (:size object) (sto/get-size content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "the penpot file seems corrupt, found unexpected uuid (storage-object-id)"
|
||||
:expected-id (str id)
|
||||
:found-id (str (:id object))))
|
||||
:hint "found corrupted storage object: size does not match"
|
||||
:path path
|
||||
:expected-size (:size object)
|
||||
:found-size (sto/get-size content)))
|
||||
|
||||
(let [ext (cmedia/mtype->extension (:content-type object))
|
||||
path (str "objects/" id ext)
|
||||
content (->> path
|
||||
(get-zip-entry input)
|
||||
(zip-entry-storage-content input))]
|
||||
|
||||
(when (not= (:size object) (sto/get-size content))
|
||||
(when-let [hash (get object :hash)]
|
||||
(when (not= hash (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: size does not match"
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-size (:size object)
|
||||
:found-size (sto/get-size content)))
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content))))
|
||||
|
||||
(when-let [hash (get object :hash)]
|
||||
(when (not= hash (sto/get-hash content))
|
||||
(ex/raise :type :validation
|
||||
:code :inconsistent-penpot-file
|
||||
:hint "found corrupted storage object: hash does not match"
|
||||
:path path
|
||||
:expected-hash (:hash object)
|
||||
:found-hash (sto/get-hash content))))
|
||||
(let [params (-> object
|
||||
(dissoc :id :size)
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
sobject (sto/put-object! storage params)]
|
||||
|
||||
(let [params (-> object
|
||||
(dissoc :id :size)
|
||||
(assoc ::sto/content content)
|
||||
(assoc ::sto/deduplicate? true)
|
||||
(assoc ::sto/touched-at timestamp))
|
||||
sobject (sto/put-object! storage params)]
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:id (str (:id sobject))
|
||||
:prev-id (str id)
|
||||
:bucket (:bucket params)
|
||||
::l/sync? true)
|
||||
|
||||
(l/dbg :hint "persisted storage object"
|
||||
:id (str (:id sobject))
|
||||
:prev-id (str id)
|
||||
:bucket (:bucket params)
|
||||
::l/sync? true)
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject)))))))
|
||||
|
||||
(vswap! bfc/*state* update :index assoc id (:id sobject))))))))
|
||||
(defn- import-files*
|
||||
[{:keys [::manifest] :as cfg}]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
|
||||
(defn- import-file-media
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :media})
|
||||
(vswap! bfc/*state* update :index bfc/update-index (:files manifest) :id)
|
||||
|
||||
(doseq [item (:media @bfc/*state*)]
|
||||
(let [params (-> item
|
||||
(update :id bfc/lookup-index)
|
||||
(update :file-id bfc/lookup-index)
|
||||
(d/update-when :media-id bfc/lookup-index)
|
||||
(d/update-when :thumbnail-id bfc/lookup-index))]
|
||||
(import-storage-objects cfg)
|
||||
|
||||
(l/dbg :hint "inserting file media object"
|
||||
:old-id (str (:id item))
|
||||
:id (str (:id params))
|
||||
:file-id (str (:file-id params))
|
||||
::l/sync? true)
|
||||
(let [files (get manifest :files)
|
||||
result (reduce (fn [result {:keys [id] :as file}]
|
||||
(let [name' (get file :name)
|
||||
name' (if (map? name)
|
||||
(get name id)
|
||||
name')
|
||||
file (assoc file :name name')]
|
||||
(conj result (import-file cfg file))))
|
||||
[]
|
||||
files)]
|
||||
|
||||
(db/insert! conn :file-media-object params))))
|
||||
(import-file-relations cfg)
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
|
||||
(defn- import-file-thumbnails
|
||||
[{:keys [::db/conn] :as cfg}]
|
||||
(events/tap :progress {:section :thumbnails})
|
||||
(doseq [item (:thumbnails @bfc/*state*)]
|
||||
(let [file-id (bfc/lookup-index (:file-id item))
|
||||
media-id (bfc/lookup-index (:media-id item))
|
||||
object-id (-> (assoc item :file-id file-id)
|
||||
(cth/fmt-object-id))
|
||||
params {:file-id file-id
|
||||
:object-id object-id
|
||||
:tag (:tag item)
|
||||
:media-id media-id}]
|
||||
result))
|
||||
|
||||
(l/dbg :hint "inserting file object thumbnail"
|
||||
:file-id (str file-id)
|
||||
:media-id (str media-id)
|
||||
::l/sync? true)
|
||||
(defn- import-file-and-overwrite*
|
||||
[{:keys [::manifest ::bfc/file-id] :as cfg}]
|
||||
|
||||
(db/insert! conn :file-tagged-object-thumbnail params))))
|
||||
(when (not= 1 (count (:files manifest)))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-condition
|
||||
:hint "unable to perform in-place update with binfile containing more than 1 file"
|
||||
:manifest manifest))
|
||||
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
|
||||
(let [ref-file (bfc/get-minimal-file cfg file-id ::db/for-update true)
|
||||
file (first (get manifest :files))
|
||||
cfg (assoc cfg ::bfc/overwrite true)]
|
||||
|
||||
(vswap! bfc/*state* update :index assoc (:id file) file-id)
|
||||
|
||||
(binding [bfc/*options* cfg
|
||||
bfc/*reference-file* ref-file]
|
||||
|
||||
(import-storage-objects cfg)
|
||||
(import-file cfg file)
|
||||
|
||||
(bfc/invalidate-thumbnails cfg file-id)
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
|
||||
[file-id])))
|
||||
|
||||
(defn- import-files
|
||||
[{:keys [::bfc/timestamp ::bfc/input ::bfc/name] :or {timestamp (dt/now)} :as cfg}]
|
||||
[{:keys [::bfc/timestamp ::bfc/input] :or {timestamp (ct/now)} :as cfg}]
|
||||
|
||||
(assert (instance? ZipFile input) "expected zip file")
|
||||
(assert (dt/instant? timestamp) "expected valid instant")
|
||||
(assert (ct/inst? timestamp) "expected valid instant")
|
||||
|
||||
(let [manifest (-> (read-manifest input)
|
||||
(validate-manifest))
|
||||
entries (read-zip-entries input)]
|
||||
entries (read-zip-entries input)
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
|
||||
(when-not (= "penpot/export-files" (:type manifest))
|
||||
(ex/raise :type :validation
|
||||
@@ -891,7 +935,6 @@
|
||||
:hint "unexpected type on manifest"
|
||||
:manifest manifest))
|
||||
|
||||
|
||||
;; Check if all files referenced on manifest are present
|
||||
(doseq [{file-id :id features :features} (:files manifest)]
|
||||
(let [path (str "files/" file-id ".json")]
|
||||
@@ -907,35 +950,10 @@
|
||||
|
||||
(events/tap :progress {:section :manifest})
|
||||
|
||||
(let [index (bfc/update-index (map :id (:files manifest)))
|
||||
state {:media [] :index index}
|
||||
cfg (-> cfg
|
||||
(assoc ::entries entries)
|
||||
(assoc ::manifest manifest)
|
||||
(assoc ::bfc/timestamp timestamp))]
|
||||
|
||||
(binding [bfc/*state* (volatile! state)]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(bfc/disable-database-timeouts! cfg)
|
||||
(let [ids (->> (:files manifest)
|
||||
(reduce (fn [result {:keys [id] :as file}]
|
||||
(let [name' (get file :name)
|
||||
name' (if (map? name)
|
||||
(get name id)
|
||||
name')]
|
||||
(conj result (-> cfg
|
||||
(assoc ::file-id id)
|
||||
(assoc ::file-name name')
|
||||
(import-file)))))
|
||||
[]))]
|
||||
(import-file-relations cfg)
|
||||
(import-storage-objects cfg)
|
||||
(import-file-media cfg)
|
||||
(import-file-thumbnails cfg)
|
||||
|
||||
(bfm/apply-pending-migrations! cfg)
|
||||
|
||||
ids)))))))
|
||||
(binding [bfc/*state* (volatile! {:media [] :index {}})]
|
||||
(if (::bfc/file-id cfg)
|
||||
(db/tx-run! cfg import-file-and-overwrite*)
|
||||
(db/tx-run! cfg import-files*)))))
|
||||
|
||||
;; --- PUBLIC API
|
||||
|
||||
@@ -961,14 +979,14 @@
|
||||
"expected instance of jio/IOFactory for `input`")
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
ab (volatile! false)
|
||||
cs (volatile! nil)]
|
||||
(try
|
||||
(l/info :hint "start exportation" :export-id (str id))
|
||||
(binding [bfc/*state* (volatile! (bfc/initial-state))]
|
||||
(with-open [output (io/output-stream output)]
|
||||
(with-open [output (ZipOutputStream. output)]
|
||||
(with-open [^AutoCloseable output (io/output-stream output)]
|
||||
(with-open [^AutoCloseable output (ZipOutputStream. output)]
|
||||
(let [cfg (assoc cfg ::output output)]
|
||||
(export-files cfg)
|
||||
(export-storage-objects cfg)))))
|
||||
@@ -1007,12 +1025,12 @@
|
||||
"expected instance of jio/IOFactory for `input`")
|
||||
|
||||
(let [id (uuid/next)
|
||||
tp (dt/tpoint)
|
||||
tp (ct/tpoint)
|
||||
cs (volatile! nil)]
|
||||
|
||||
(l/info :hint "import: started" :id (str id))
|
||||
(try
|
||||
(with-open [input (ZipFile. (fs/file input))]
|
||||
(with-open [input (ZipFile. ^File (fs/file input))]
|
||||
(import-files (assoc cfg ::bfc/input input)))
|
||||
|
||||
(catch Throwable cause
|
||||
@@ -1022,11 +1040,11 @@
|
||||
(finally
|
||||
(l/info :hint "import: terminated"
|
||||
:id (str id)
|
||||
:elapsed (dt/format-duration (tp))
|
||||
:elapsed (ct/format-duration (tp))
|
||||
:error? (some? @cs))))))
|
||||
|
||||
(defn get-manifest
|
||||
[path]
|
||||
(with-open [input (ZipFile. (fs/file path))]
|
||||
(with-open [^AutoCloseable input (ZipFile. ^File (fs/file path))]
|
||||
(-> (read-manifest input)
|
||||
(validate-manifest))))
|
||||
|
||||
@@ -5,17 +5,16 @@
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.config
|
||||
"A configuration management."
|
||||
(:refer-clojure :exclude [get])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.flags :as flags]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.common.version :as v]
|
||||
[app.util.overrides]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core :as c]
|
||||
[clojure.java.io :as io]
|
||||
[cuerdas.core :as str]
|
||||
@@ -47,22 +46,27 @@
|
||||
:auto-file-snapshot-timeout "3h"
|
||||
|
||||
:public-uri "http://localhost:3449"
|
||||
|
||||
:host "localhost"
|
||||
:tenant "default"
|
||||
|
||||
:redis-uri "redis://redis/0"
|
||||
|
||||
:file-data-backend "legacy-db"
|
||||
|
||||
:objects-storage-backend "fs"
|
||||
:objects-storage-fs-directory "assets"
|
||||
|
||||
:auth-token-cookie-name "auth-token"
|
||||
|
||||
:assets-path "/internal/assets/"
|
||||
:smtp-default-reply-to "Penpot <no-reply@example.com>"
|
||||
:smtp-default-from "Penpot <no-reply@example.com>"
|
||||
|
||||
:profile-complaint-max-age (dt/duration {:days 7})
|
||||
:profile-complaint-max-age (ct/duration {:days 7})
|
||||
:profile-complaint-threshold 2
|
||||
|
||||
:profile-bounce-max-age (dt/duration {:days 7})
|
||||
:profile-bounce-max-age (ct/duration {:days 7})
|
||||
:profile-bounce-threshold 10
|
||||
|
||||
:telemetry-uri "https://telemetry.penpot.app/"
|
||||
@@ -88,7 +92,7 @@
|
||||
[:secret-key {:optional true} :string]
|
||||
|
||||
[:tenant {:optional false} :string]
|
||||
[:public-uri {:optional false} :string]
|
||||
[:public-uri {:optional false} ::sm/uri]
|
||||
[:host {:optional false} :string]
|
||||
|
||||
[:http-server-port {:optional true} ::sm/int]
|
||||
@@ -96,16 +100,19 @@
|
||||
[:http-server-max-body-size {:optional true} ::sm/int]
|
||||
[:http-server-max-multipart-body-size {:optional true} ::sm/int]
|
||||
[:http-server-io-threads {:optional true} ::sm/int]
|
||||
[:http-server-worker-threads {:optional true} ::sm/int]
|
||||
[:http-server-max-worker-threads {:optional true} ::sm/int]
|
||||
|
||||
[:management-api-key {:optional true} :string]
|
||||
|
||||
[:telemetry-uri {:optional true} :string]
|
||||
[:telemetry-with-taiga {:optional true} ::sm/boolean] ;; DELETE
|
||||
|
||||
[:auto-file-snapshot-every {:optional true} ::sm/int]
|
||||
[:auto-file-snapshot-timeout {:optional true} ::dt/duration]
|
||||
[:auto-file-snapshot-timeout {:optional true} ::ct/duration]
|
||||
|
||||
[:media-max-file-size {:optional true} ::sm/int]
|
||||
[:deletion-delay {:optional true} ::dt/duration] ;; REVIEW
|
||||
[:deletion-delay {:optional true} ::ct/duration]
|
||||
[:file-clean-delay {:optional true} ::ct/duration]
|
||||
[:telemetry-enabled {:optional true} ::sm/boolean]
|
||||
[:default-blob-version {:optional true} ::sm/int]
|
||||
[:allow-demo-users {:optional true} ::sm/boolean]
|
||||
@@ -146,12 +153,11 @@
|
||||
[:quotes-team-access-requests-per-team {:optional true} ::sm/int]
|
||||
[:quotes-team-access-requests-per-requester {:optional true} ::sm/int]
|
||||
|
||||
[:auth-data-cookie-domain {:optional true} :string]
|
||||
[:auth-token-cookie-name {:optional true} :string]
|
||||
[:auth-token-cookie-max-age {:optional true} ::dt/duration]
|
||||
[:auth-token-cookie-max-age {:optional true} ::ct/duration]
|
||||
|
||||
[:registration-domain-whitelist {:optional true} [::sm/set :string]]
|
||||
[:email-verify-threshold {:optional true} ::dt/duration]
|
||||
[:email-verify-threshold {:optional true} ::ct/duration]
|
||||
|
||||
[:github-client-id {:optional true} :string]
|
||||
[:github-client-secret {:optional true} :string]
|
||||
@@ -161,7 +167,7 @@
|
||||
[:google-client-id {:optional true} :string]
|
||||
[:google-client-secret {:optional true} :string]
|
||||
[:oidc-client-id {:optional true} :string]
|
||||
[:oidc-user-info-source {:optional true} :keyword]
|
||||
[:oidc-user-info-source {:optional true} [:enum "auto" "userinfo" "token"]]
|
||||
[:oidc-client-secret {:optional true} :string]
|
||||
[:oidc-base-uri {:optional true} :string]
|
||||
[:oidc-token-uri {:optional true} :string]
|
||||
@@ -186,9 +192,9 @@
|
||||
[:ldap-starttls {:optional true} ::sm/boolean]
|
||||
[:ldap-user-query {:optional true} :string]
|
||||
|
||||
[:profile-bounce-max-age {:optional true} ::dt/duration]
|
||||
[:profile-bounce-max-age {:optional true} ::ct/duration]
|
||||
[:profile-bounce-threshold {:optional true} ::sm/int]
|
||||
[:profile-complaint-max-age {:optional true} ::dt/duration]
|
||||
[:profile-complaint-max-age {:optional true} ::ct/duration]
|
||||
[:profile-complaint-threshold {:optional true} ::sm/int]
|
||||
|
||||
[:redis-uri {:optional true} ::sm/uri]
|
||||
@@ -210,24 +216,27 @@
|
||||
[:prepl-host {:optional true} :string]
|
||||
[:prepl-port {:optional true} ::sm/int]
|
||||
|
||||
[:file-data-backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
||||
|
||||
[:media-directory {:optional true} :string] ;; REVIEW
|
||||
[:media-uri {:optional true} :string]
|
||||
[:assets-path {:optional true} :string]
|
||||
|
||||
;; Legacy, will be removed in 2.5
|
||||
[:netty-io-threads {:optional true} ::sm/int]
|
||||
[:executor-threads {:optional true} ::sm/int]
|
||||
|
||||
;; DEPRECATED
|
||||
[:assets-storage-backend {:optional true} :keyword]
|
||||
[:storage-assets-fs-directory {:optional true} :string]
|
||||
[:storage-assets-s3-bucket {:optional true} :string]
|
||||
[:storage-assets-s3-region {:optional true} :keyword]
|
||||
[:storage-assets-s3-endpoint {:optional true} ::sm/uri]
|
||||
[:storage-assets-s3-io-threads {:optional true} ::sm/int]
|
||||
|
||||
[:objects-storage-backend {:optional true} :keyword]
|
||||
[:objects-storage-fs-directory {:optional true} :string]
|
||||
[:objects-storage-s3-bucket {:optional true} :string]
|
||||
[:objects-storage-s3-region {:optional true} :keyword]
|
||||
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]
|
||||
[:objects-storage-s3-io-threads {:optional true} ::sm/int]]))
|
||||
[:objects-storage-s3-endpoint {:optional true} ::sm/uri]]))
|
||||
|
||||
(defn- parse-flags
|
||||
[config]
|
||||
@@ -298,7 +307,12 @@
|
||||
(defn get-deletion-delay
|
||||
[]
|
||||
(or (c/get config :deletion-delay)
|
||||
(dt/duration {:days 7})))
|
||||
(ct/duration {:days 7})))
|
||||
|
||||
(defn get-file-clean-delay
|
||||
[]
|
||||
(or (c/get config :file-clean-delay)
|
||||
(ct/duration {:days 2})))
|
||||
|
||||
(defn get
|
||||
"A configuration getter. Helps code be more testable."
|
||||
@@ -307,5 +321,9 @@
|
||||
([key default]
|
||||
(c/get config key default)))
|
||||
|
||||
(defn logging-context
|
||||
[]
|
||||
{:version/backend (:full version)})
|
||||
|
||||
;; Set value for all new threads bindings.
|
||||
(alter-var-root #'*assert* (constantly (contains? flags :backend-asserts)))
|
||||
|
||||
@@ -10,19 +10,20 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.json :as json]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db.sql :as sql]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.set :as set]
|
||||
[integrant.core :as ig]
|
||||
[next.jdbc :as jdbc]
|
||||
[next.jdbc.date-time :as jdbc-dt]
|
||||
[next.jdbc.prepare :as jdbc.prepare]
|
||||
[next.jdbc.transaction])
|
||||
(:import
|
||||
com.zaxxer.hikari.HikariConfig
|
||||
@@ -33,6 +34,7 @@
|
||||
java.io.InputStream
|
||||
java.io.OutputStream
|
||||
java.sql.Connection
|
||||
java.sql.PreparedStatement
|
||||
java.sql.Savepoint
|
||||
org.postgresql.PGConnection
|
||||
org.postgresql.geometric.PGpoint
|
||||
@@ -296,7 +298,7 @@
|
||||
(defn insert!
|
||||
"A helper that builds an insert sql statement and executes it. By
|
||||
default returns the inserted row with all the field; you can delimit
|
||||
the returned columns with the `::columns` option."
|
||||
the returned columns with the `::sql/columns` option."
|
||||
[ds table params & {:as opts}]
|
||||
(let [conn (get-connectable ds)
|
||||
sql (sql/insert table params opts)
|
||||
@@ -377,9 +379,7 @@
|
||||
|
||||
(defn is-row-deleted?
|
||||
[{:keys [deleted-at]}]
|
||||
(and (dt/instant? deleted-at)
|
||||
(< (inst-ms deleted-at)
|
||||
(inst-ms (dt/now)))))
|
||||
(some? deleted-at))
|
||||
|
||||
(defn get*
|
||||
"Retrieve a single row from database that matches a simple filters. Do
|
||||
@@ -404,6 +404,23 @@
|
||||
:hint "database object not found"))
|
||||
row))
|
||||
|
||||
(defn get-with-sql
|
||||
[ds sql & {:as opts}]
|
||||
(let [rows
|
||||
(cond->> (exec! ds sql opts)
|
||||
(::remove-deleted opts true)
|
||||
(remove is-row-deleted?)
|
||||
|
||||
:always
|
||||
(not-empty))]
|
||||
|
||||
(when (and (not rows) (::throw-if-not-exists opts true))
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "database object not found"))
|
||||
|
||||
(first rows)))
|
||||
|
||||
(def ^:private default-plan-opts
|
||||
(-> default-opts
|
||||
(assoc :fetch-size 1000)
|
||||
@@ -558,10 +575,10 @@
|
||||
[system f & params]
|
||||
(cond
|
||||
(connection? system)
|
||||
(run! {::conn system} f)
|
||||
(apply run! {::conn system} f params)
|
||||
|
||||
(pool? system)
|
||||
(run! {::pool system} f)
|
||||
(apply run! {::pool system} f params)
|
||||
|
||||
(::conn system)
|
||||
(apply f system params)
|
||||
@@ -585,7 +602,7 @@
|
||||
(string? o)
|
||||
(pginterval o)
|
||||
|
||||
(dt/duration? o)
|
||||
(ct/duration? o)
|
||||
(interval (inst-ms o))
|
||||
|
||||
:else
|
||||
@@ -599,7 +616,7 @@
|
||||
val (.getValue o)]
|
||||
(if (or (= typ "json")
|
||||
(= typ "jsonb"))
|
||||
(json/decode val)
|
||||
(json/decode val :key-fn keyword)
|
||||
val))))
|
||||
|
||||
(defn decode-transit-pgobject
|
||||
@@ -640,7 +657,7 @@
|
||||
(when data
|
||||
(doto (org.postgresql.util.PGobject.)
|
||||
(.setType "jsonb")
|
||||
(.setValue (json/encode-str data)))))
|
||||
(.setValue (json/encode data)))))
|
||||
|
||||
;; --- Locks
|
||||
|
||||
@@ -686,3 +703,14 @@
|
||||
[cause]
|
||||
(and (sql-exception? cause)
|
||||
(= "40001" (.getSQLState ^java.sql.SQLException cause))))
|
||||
|
||||
(defn duplicate-key-error?
|
||||
[cause]
|
||||
(and (sql-exception? cause)
|
||||
(= "23505" (.getSQLState ^java.sql.SQLException cause))))
|
||||
|
||||
|
||||
(extend-protocol jdbc.prepare/SettableParameter
|
||||
clojure.lang.Keyword
|
||||
(set-parameter [^clojure.lang.Keyword v ^PreparedStatement s ^long i]
|
||||
(.setObject s i ^String (d/name v))))
|
||||
|
||||
@@ -53,8 +53,15 @@
|
||||
opts (cond-> opts
|
||||
(::order-by opts) (assoc :order-by (::order-by opts))
|
||||
(::columns opts) (assoc :columns (::columns opts))
|
||||
(::for-update opts) (assoc :suffix "FOR UPDATE")
|
||||
(::for-share opts) (assoc :suffix "FOR SHARE"))]
|
||||
|
||||
(or (::db/for-update opts)
|
||||
(::for-update opts))
|
||||
(assoc :suffix "FOR UPDATE")
|
||||
|
||||
(or (::db/for-share opts)
|
||||
(::for-share opts))
|
||||
(assoc :suffix "FOR SHARE"))]
|
||||
|
||||
(sql/for-query table where-params opts))))
|
||||
|
||||
(defn update
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.email
|
||||
"Main api for send emails."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
@@ -93,36 +94,44 @@
|
||||
headers)))
|
||||
|
||||
(defn- assign-body
|
||||
[^MimeMessage mmsg {:keys [body charset] :or {charset "utf-8"}}]
|
||||
(let [mpart (MimeMultipart. "mixed")]
|
||||
[^MimeMessage mmsg {:keys [body charset attachments] :or {charset "utf-8"}}]
|
||||
(let [mixed-mpart (MimeMultipart. "mixed")]
|
||||
(cond
|
||||
(string? body)
|
||||
(let [bpart (MimeBodyPart.)]
|
||||
(.setContent bpart ^String body (str "text/plain; charset=" charset))
|
||||
(.addBodyPart mpart bpart))
|
||||
|
||||
(vector? body)
|
||||
(let [mmp (MimeMultipart. "alternative")
|
||||
mbp (MimeBodyPart.)]
|
||||
(.addBodyPart mpart mbp)
|
||||
(.setContent mbp mmp)
|
||||
(doseq [item body]
|
||||
(let [mbp (MimeBodyPart.)]
|
||||
(.setContent mbp
|
||||
^String (:content item)
|
||||
^String (str (:type item "text/plain") "; charset=" charset))
|
||||
(.addBodyPart mmp mbp))))
|
||||
(let [text-part (MimeBodyPart.)]
|
||||
(.setText text-part ^String body ^String charset)
|
||||
(.addBodyPart mixed-mpart text-part))
|
||||
|
||||
(map? body)
|
||||
(let [bpart (MimeBodyPart.)]
|
||||
(.setContent bpart
|
||||
^String (:content body)
|
||||
^String (str (:type body "text/plain") "; charset=" charset))
|
||||
(.addBodyPart mpart bpart))
|
||||
(let [content-part (MimeBodyPart.)
|
||||
alternative-mpart (MimeMultipart. "alternative")]
|
||||
|
||||
(when-let [content (get body "text/html")]
|
||||
(let [html-part (MimeBodyPart.)]
|
||||
(.setContent html-part ^String content
|
||||
(str "text/html; charset=" charset))
|
||||
(.addBodyPart alternative-mpart html-part)))
|
||||
|
||||
(when-let [content (get body "text/plain")]
|
||||
(let [text-part (MimeBodyPart.)]
|
||||
(.setText text-part ^String content ^String charset)
|
||||
(.addBodyPart alternative-mpart text-part)))
|
||||
|
||||
(.setContent content-part alternative-mpart)
|
||||
(.addBodyPart mixed-mpart content-part))
|
||||
|
||||
:else
|
||||
(throw (ex-info "Unsupported type" {:body body})))
|
||||
(.setContent mmsg mpart)
|
||||
(throw (IllegalArgumentException. "invalid email body provided")))
|
||||
|
||||
(doseq [[name content] attachments]
|
||||
|
||||
(prn "attachment" name)
|
||||
(let [attachment-part (MimeBodyPart.)]
|
||||
(.setFileName attachment-part ^String name)
|
||||
(.setContent attachment-part ^String content (str "text/plain; charset=" charset))
|
||||
(.addBodyPart mixed-mpart attachment-part)))
|
||||
|
||||
(.setContent mmsg mixed-mpart)
|
||||
mmsg))
|
||||
|
||||
(defn- opts->props
|
||||
@@ -210,24 +219,26 @@
|
||||
(ex/raise :type :internal
|
||||
:code :missing-email-templates))
|
||||
{:subject subj
|
||||
:body (into
|
||||
[{:type "text/plain"
|
||||
:content text}]
|
||||
(when html
|
||||
[{:type "text/html"
|
||||
:content html}]))}))
|
||||
:body (d/without-nils
|
||||
{"text/plain" text
|
||||
"text/html" html})}))
|
||||
|
||||
(def ^:private schema:context
|
||||
[:map
|
||||
(def ^:private schema:params
|
||||
[:map {:title "Email Params"}
|
||||
[:to [:or ::sm/email [::sm/vec ::sm/email]]]
|
||||
[:reply-to {:optional true} ::sm/email]
|
||||
[:from {:optional true} ::sm/email]
|
||||
[:lang {:optional true} ::sm/text]
|
||||
[:subject {:optional true} ::sm/text]
|
||||
[:priority {:optional true} [:enum :high :low]]
|
||||
[:extra-data {:optional true} ::sm/text]])
|
||||
[:extra-data {:optional true} ::sm/text]
|
||||
[:body {:optional true}
|
||||
[:or :string [:map-of :string :string]]]
|
||||
[:attachments {:optional true}
|
||||
[:map-of :string :string]]])
|
||||
|
||||
(def ^:private check-context
|
||||
(sm/check-fn schema:context))
|
||||
(def ^:private check-params
|
||||
(sm/check-fn schema:params))
|
||||
|
||||
(defn template-factory
|
||||
[& {:keys [id schema]}]
|
||||
@@ -235,9 +246,9 @@
|
||||
(let [check-fn (if schema
|
||||
(sm/check-fn schema)
|
||||
(constantly nil))]
|
||||
(fn [context]
|
||||
(let [context (-> context check-context check-fn)
|
||||
email (build-email-template id context)]
|
||||
(fn [params]
|
||||
(let [params (-> params check-params check-fn)
|
||||
email (build-email-template id params)]
|
||||
(when-not email
|
||||
(ex/raise :type :internal
|
||||
:code :email-template-does-not-exists
|
||||
@@ -245,35 +256,40 @@
|
||||
:template-id id))
|
||||
|
||||
(cond-> (assoc email :id (name id))
|
||||
(:extra-data context)
|
||||
(assoc :extra-data (:extra-data context))
|
||||
(:extra-data params)
|
||||
(assoc :extra-data (:extra-data params))
|
||||
|
||||
(:from context)
|
||||
(assoc :from (:from context))
|
||||
(seq (:attachments params))
|
||||
(assoc :attachments (:attachments params))
|
||||
|
||||
(:reply-to context)
|
||||
(assoc :reply-to (:reply-to context))
|
||||
(:from params)
|
||||
(assoc :from (:from params))
|
||||
|
||||
(:to context)
|
||||
(assoc :to (:to context)))))))
|
||||
(:reply-to params)
|
||||
(assoc :reply-to (:reply-to params))
|
||||
|
||||
(:to params)
|
||||
(assoc :to (:to params)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC HIGH-LEVEL API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn render
|
||||
[email-factory context]
|
||||
(email-factory context))
|
||||
[email-factory params]
|
||||
(email-factory params))
|
||||
|
||||
(defn send!
|
||||
"Schedule an already defined email to be sent using asynchronously
|
||||
using worker task."
|
||||
[{:keys [::conn ::factory] :as context}]
|
||||
[{:keys [::conn ::factory] :as params}]
|
||||
(assert (db/connectable? conn) "expected a valid database connection or pool")
|
||||
|
||||
(let [email (if factory
|
||||
(factory context)
|
||||
(dissoc context ::conn))]
|
||||
(factory params)
|
||||
(-> params
|
||||
(dissoc params)
|
||||
(check-params)))]
|
||||
(wrk/submit! {::wrk/task :sendmail
|
||||
::wrk/delay 0
|
||||
::wrk/max-retries 4
|
||||
@@ -343,8 +359,10 @@
|
||||
|
||||
(def ^:private schema:feedback
|
||||
[:map
|
||||
[:subject ::sm/text]
|
||||
[:content ::sm/text]])
|
||||
[:feedback-subject ::sm/text]
|
||||
[:feedback-type ::sm/text]
|
||||
[:feedback-content ::sm/text]
|
||||
[:profile :map]])
|
||||
|
||||
(def user-feedback
|
||||
"A profile feedback email."
|
||||
|
||||
@@ -9,46 +9,22 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.helpers :as cfh]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.types.path :as path]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.objects-map :as omap]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.objects-map :as omap]
|
||||
[app.util.objects-map :as omap.legacy]
|
||||
[app.util.pointer-map :as pmap]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OFFLOAD
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn offloaded?
|
||||
[file]
|
||||
(= "objects-storage" (:data-backend file)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OBJECTS-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-objects-map
|
||||
[file & _opts]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(if (and (pmap/pointer-map? page)
|
||||
(not (pmap/loaded? page)))
|
||||
page
|
||||
(update page :objects omap/wrap)))
|
||||
|
||||
update-data
|
||||
(fn [fdata]
|
||||
(update fdata :pages-index d/update-vals update-page))]
|
||||
|
||||
(-> file
|
||||
(update :data update-data)
|
||||
(update :features conj "fdata/objects-map"))))
|
||||
|
||||
(defn process-objects
|
||||
"Apply a function to all objects-map on the file. Usualy used for convert
|
||||
the objects-map instances to plain maps"
|
||||
@@ -58,35 +34,237 @@
|
||||
(fn [page]
|
||||
(update page :objects
|
||||
(fn [objects]
|
||||
(if (omap/objects-map? objects)
|
||||
(if (or (omap/objects-map? objects)
|
||||
(omap.legacy/objects-map? objects))
|
||||
(update-fn objects)
|
||||
objects)))))
|
||||
fdata))
|
||||
|
||||
|
||||
(defn realize-objects
|
||||
"Process a file and remove all instances of objects map realizing them
|
||||
to a plain data. Used in operation where is more efficient have the
|
||||
whole file loaded in memory or we going to persist it in an
|
||||
alterantive storage."
|
||||
[_cfg file]
|
||||
(update file :data process-objects (partial into {})))
|
||||
|
||||
(defn enable-objects-map
|
||||
[file & _opts]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(update page :objects omap/wrap))
|
||||
|
||||
update-data
|
||||
(fn [fdata]
|
||||
(update fdata :pages-index d/update-vals update-page))]
|
||||
|
||||
(-> file
|
||||
(update :data update-data)
|
||||
(update :features conj "fdata/objects-map"))))
|
||||
|
||||
(defn disable-objects-map
|
||||
[file & _opts]
|
||||
(let [update-page
|
||||
(fn [page]
|
||||
(update page :objects #(into {} %)))
|
||||
|
||||
update-data
|
||||
(fn [fdata]
|
||||
(update fdata :pages-index d/update-vals update-page))]
|
||||
|
||||
(-> file
|
||||
(update :data update-data)
|
||||
(update :features disj "fdata/objects-map"))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; STORAGE
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmulti resolve-file-data
|
||||
(fn [_cfg file] (get file :backend "legacy-db")))
|
||||
|
||||
(defmethod resolve-file-data "legacy-db"
|
||||
[_cfg {:keys [legacy-data] :as file}]
|
||||
(-> file
|
||||
(assoc :data legacy-data)
|
||||
(dissoc :legacy-data)))
|
||||
|
||||
(defmethod resolve-file-data "db"
|
||||
[_cfg file]
|
||||
(dissoc file :legacy-data))
|
||||
|
||||
(defmethod resolve-file-data "storage"
|
||||
[cfg {:keys [metadata] :as file}]
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)
|
||||
ref-id (:storage-ref-id metadata)
|
||||
data (->> (sto/get-object storage ref-id)
|
||||
(sto/get-object-bytes storage))]
|
||||
(-> file
|
||||
(assoc :data data)
|
||||
(dissoc :legacy-data))))
|
||||
|
||||
(defn decode-file-data
|
||||
[_cfg {:keys [data] :as file}]
|
||||
(cond-> file
|
||||
(bytes? data)
|
||||
(assoc :data (blob/decode data))))
|
||||
|
||||
(def ^:private sql:insert-file-data
|
||||
"INSERT INTO file_data (file_id, id, created_at, modified_at, deleted_at,
|
||||
type, backend, metadata, data)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")
|
||||
|
||||
(def ^:private sql:upsert-file-data
|
||||
(str sql:insert-file-data
|
||||
" ON CONFLICT (file_id, id)
|
||||
DO UPDATE SET modified_at=?,
|
||||
deleted_at=?,
|
||||
backend=?,
|
||||
metadata=?,
|
||||
data=?"))
|
||||
|
||||
(defn- upsert-in-database
|
||||
[cfg {:keys [id file-id created-at modified-at deleted-at type backend data metadata]}]
|
||||
(let [created-at (or created-at (ct/now))
|
||||
metadata (some-> metadata db/json)
|
||||
modified-at (or modified-at created-at)]
|
||||
|
||||
(db/exec-one! cfg [sql:upsert-file-data
|
||||
file-id id
|
||||
created-at
|
||||
modified-at
|
||||
deleted-at
|
||||
type
|
||||
backend
|
||||
metadata
|
||||
data
|
||||
modified-at
|
||||
deleted-at
|
||||
backend
|
||||
metadata
|
||||
data])))
|
||||
|
||||
(defn- handle-persistence
|
||||
[cfg {:keys [type backend id file-id data] :as params}]
|
||||
|
||||
(cond
|
||||
(= backend "storage")
|
||||
(let [storage (sto/resolve cfg)
|
||||
content (sto/content data)
|
||||
sobject (sto/put-object! storage
|
||||
{::sto/content content
|
||||
::sto/touch true
|
||||
:bucket "file-data"
|
||||
:content-type "application/octet-stream"
|
||||
:file-id file-id
|
||||
:id id})
|
||||
metadata {:storage-ref-id (:id sobject)}
|
||||
params (-> params
|
||||
(assoc :metadata metadata)
|
||||
(assoc :data nil))]
|
||||
(upsert-in-database cfg params))
|
||||
|
||||
(= backend "db")
|
||||
(->> (dissoc params :metadata)
|
||||
(upsert-in-database cfg))
|
||||
|
||||
(= backend "legacy-db")
|
||||
(cond
|
||||
(= type "main")
|
||||
(do
|
||||
(db/delete! cfg :file-data
|
||||
{:id id :file-id file-id :type "main"}
|
||||
{::db/return-keys false})
|
||||
(db/update! cfg :file
|
||||
{:data data}
|
||||
{:id file-id}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(= type "snapshot")
|
||||
(do
|
||||
(db/delete! cfg :file-data
|
||||
{:id id :file-id file-id :type "snapshot"}
|
||||
{::db/return-keys false})
|
||||
(db/update! cfg :file-change
|
||||
{:data data}
|
||||
{:file-id file-id :id id}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(= type "fragment")
|
||||
(upsert-in-database cfg
|
||||
(-> (dissoc params :metadata)
|
||||
(assoc :backend "db")))
|
||||
|
||||
:else
|
||||
(throw (RuntimeException. "not implemented")))
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException.
|
||||
(str "backend '" backend "' not supported")))))
|
||||
|
||||
(defn process-metadata
|
||||
[cfg metadata]
|
||||
(when-let [storage-id (:storage-ref-id metadata)]
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(sto/touch-object! storage storage-id))))
|
||||
|
||||
(defn- default-backend
|
||||
[backend]
|
||||
(or backend (cf/get :file-data-backend)))
|
||||
|
||||
(def ^:private schema:metadata
|
||||
[:map {:title "Metadata"}
|
||||
[:storage-ref-id {:optional true} ::sm/uuid]])
|
||||
|
||||
(def decode-metadata-with-schema
|
||||
(sm/decoder schema:metadata sm/json-transformer))
|
||||
|
||||
(defn decode-metadata
|
||||
[metadata]
|
||||
(some-> metadata
|
||||
(db/decode-json-pgobject)
|
||||
(decode-metadata-with-schema)))
|
||||
|
||||
(def ^:private schema:update-params
|
||||
[:map {:closed true}
|
||||
[:id ::sm/uuid]
|
||||
[:type [:enum "main" "snapshot" "fragment"]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:backend {:optional true} [:enum "db" "legacy-db" "storage"]]
|
||||
[:metadata {:optional true} [:maybe schema:metadata]]
|
||||
[:data {:optional true} bytes?]
|
||||
[:created-at {:optional true} ::ct/inst]
|
||||
[:modified-at {:optional true} [:maybe ::ct/inst]]
|
||||
[:deleted-at {:optional true} [:maybe ::ct/inst]]])
|
||||
|
||||
(def ^:private check-update-params
|
||||
(sm/check-fn schema:update-params :hint "invalid params received for update"))
|
||||
|
||||
(defn upsert!
|
||||
"Create or update file data"
|
||||
[cfg params & {:as opts}]
|
||||
(let [params (-> (check-update-params params)
|
||||
(update :backend default-backend))]
|
||||
|
||||
(some->> (:metadata params)
|
||||
(process-metadata cfg))
|
||||
|
||||
(-> (handle-persistence cfg params)
|
||||
(db/get-update-count)
|
||||
(pos?))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; POINTER-MAP
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn get-file-data
|
||||
"Get file data given a file instance."
|
||||
[system file]
|
||||
(if (offloaded? file)
|
||||
(let [storage (sto/resolve system ::db/reuse-conn true)]
|
||||
(->> (sto/get-object storage (:data-ref-id file))
|
||||
(sto/get-object-bytes storage)))
|
||||
(:data file)))
|
||||
|
||||
(defn resolve-file-data
|
||||
[system file]
|
||||
(let [data (get-file-data system file)]
|
||||
(assoc file :data data)))
|
||||
|
||||
(defn load-pointer
|
||||
"A database loader pointer helper"
|
||||
[system file-id id]
|
||||
(let [fragment (db/get* system :file-data-fragment
|
||||
{:id id :file-id file-id}
|
||||
{::sql/columns [:data :data-backend :data-ref-id :id]})]
|
||||
[cfg file-id id]
|
||||
(let [fragment (some-> (db/get* cfg :file-data
|
||||
{:id id :file-id file-id :type "fragment"}
|
||||
{::sql/columns [:data :backend :id :metadata]})
|
||||
(update :metadata decode-metadata))]
|
||||
|
||||
(l/trc :hint "load pointer"
|
||||
:file-id (str file-id)
|
||||
@@ -100,22 +278,21 @@
|
||||
:file-id file-id
|
||||
:fragment-id id))
|
||||
|
||||
(let [data (get-file-data system fragment)]
|
||||
;; FIXME: conditional thread scheduling for decoding big objects
|
||||
(blob/decode data))))
|
||||
(-> (resolve-file-data cfg fragment)
|
||||
(get :data)
|
||||
(blob/decode))))
|
||||
|
||||
(defn persist-pointers!
|
||||
"Persist all currently tracked pointer objects"
|
||||
[system file-id]
|
||||
(let [conn (db/get-connection system)]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(db/insert! conn :file-data-fragment
|
||||
{:id id
|
||||
:file-id file-id
|
||||
:data content}))))))
|
||||
[cfg file-id]
|
||||
(doseq [[id item] @pmap/*tracked*]
|
||||
(when (pmap/modified? item)
|
||||
(l/trc :hint "persist pointer" :file-id (str file-id) :id (str id))
|
||||
(let [content (-> item deref blob/encode)]
|
||||
(upsert! cfg {:id id
|
||||
:file-id file-id
|
||||
:type "fragment"
|
||||
:data content})))))
|
||||
|
||||
(defn process-pointers
|
||||
"Apply a function to all pointers on the file. Usuly used for
|
||||
@@ -129,6 +306,14 @@
|
||||
(d/update-vals update-fn')
|
||||
(update :pages-index d/update-vals update-fn'))))
|
||||
|
||||
(defn realize-pointers
|
||||
"Process a file and remove all instances of pointers realizing them to
|
||||
a plain data. Used in operation where is more efficient have the
|
||||
whole file loaded in memory."
|
||||
[cfg {:keys [id] :as file}]
|
||||
(binding [pmap/*load-fn* (partial load-pointer cfg id)]
|
||||
(update file :data process-pointers deref)))
|
||||
|
||||
(defn get-used-pointer-ids
|
||||
"Given a file, return all pointer ids used in the data."
|
||||
[fdata]
|
||||
@@ -148,47 +333,12 @@
|
||||
(update :features conj "fdata/pointer-map")))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PATH-DATA
|
||||
;; GENERAL PURPOSE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn enable-path-data
|
||||
"Enable the fdata/path-data feature on the file."
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content path/content)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features conj "fdata/path-data"))))
|
||||
|
||||
(defn disable-path-data
|
||||
[file & _opts]
|
||||
(letfn [(update-object [object]
|
||||
(if (or (cfh/path-shape? object)
|
||||
(cfh/bool-shape? object))
|
||||
(update object :content vec)
|
||||
object))
|
||||
|
||||
(update-container [container]
|
||||
(d/update-when container :objects d/update-vals update-object))]
|
||||
|
||||
(when-let [conn db/*conn*]
|
||||
(db/delete! conn :file-migration {:file-id (:id file)
|
||||
:name "0003-convert-path-content"}))
|
||||
(-> file
|
||||
(update :data (fn [data]
|
||||
(-> data
|
||||
(update :pages-index d/update-vals update-container)
|
||||
(d/update-when :components d/update-vals update-container))))
|
||||
(update :features disj "fdata/path-data")
|
||||
(update :migrations disj "0003-convert-path-content")
|
||||
(vary-meta update ::fmg/migrated disj "0003-convert-path-content"))))
|
||||
(defn realize
|
||||
"A helper that combines realize-pointers and realize-objects"
|
||||
[cfg file]
|
||||
(->> file
|
||||
(realize-pointers cfg)
|
||||
(realize-objects cfg)))
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"Backend specific code for file migrations. Implemented as permanent feature of files."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.migrations :as fmg :refer [xf:map-name]]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]))
|
||||
@@ -26,12 +27,19 @@
|
||||
(defn upsert-migrations!
|
||||
"Persist or update file migrations. Return the updated/inserted number
|
||||
of rows"
|
||||
[conn {:keys [id] :as file}]
|
||||
(let [migrations (or (-> file meta ::fmg/migrated)
|
||||
(-> file :migrations not-empty)
|
||||
fmg/available-migrations)
|
||||
[cfg {:keys [id] :as file}]
|
||||
(let [conn (db/get-connection cfg)
|
||||
migrations (or (-> file meta ::fmg/migrated)
|
||||
(-> file :migrations))
|
||||
columns [:file-id :name]
|
||||
rows (mapv (fn [name] [id name]) migrations)]
|
||||
rows (->> migrations
|
||||
(mapv (fn [name] [id name]))
|
||||
(not-empty))]
|
||||
|
||||
(when-not rows
|
||||
(ex/raise :type :internal
|
||||
:code :missing-migrations
|
||||
:hint "no migrations available on file"))
|
||||
|
||||
(-> (db/insert-many! conn :file-migration columns rows
|
||||
{::db/return-keys false
|
||||
@@ -40,6 +48,6 @@
|
||||
|
||||
(defn reset-migrations!
|
||||
"Replace file migrations"
|
||||
[conn {:keys [id] :as file}]
|
||||
(db/delete! conn :file-migration {:file-id id})
|
||||
(upsert-migrations! conn file))
|
||||
[cfg {:keys [id] :as file}]
|
||||
(db/delete! cfg :file-migration {:file-id id})
|
||||
(upsert-migrations! cfg file))
|
||||
|
||||
446
backend/src/app/features/file_snapshots.clj
Normal file
446
backend/src/app/features/file_snapshots.clj
Normal file
@@ -0,0 +1,446 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.features.file-snapshots
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as-alias cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(def sql:snapshots
|
||||
"SELECT c.id,
|
||||
c.label,
|
||||
c.created_at,
|
||||
c.updated_at AS modified_at,
|
||||
c.deleted_at,
|
||||
c.profile_id,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.revn,
|
||||
c.features,
|
||||
c.migrations,
|
||||
c.version,
|
||||
c.file_id,
|
||||
c.data AS legacy_data,
|
||||
fd.data AS data,
|
||||
coalesce(fd.backend, 'legacy-db') AS backend,
|
||||
fd.metadata AS metadata
|
||||
FROM file_change AS c
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = c.file_id
|
||||
AND fd.id = c.id
|
||||
AND fd.type = 'snapshot')
|
||||
WHERE c.label IS NOT NULL")
|
||||
|
||||
(defn- decode-snapshot
|
||||
[snapshot]
|
||||
(some-> snapshot
|
||||
(-> (d/update-when :metadata fdata/decode-metadata)
|
||||
(d/update-when :migrations db/decode-pgarray [])
|
||||
(d/update-when :features db/decode-pgarray #{}))))
|
||||
|
||||
(def ^:private sql:get-minimal-file
|
||||
"SELECT f.id,
|
||||
f.revn,
|
||||
f.modified_at,
|
||||
f.deleted_at,
|
||||
fd.backend AS backend,
|
||||
fd.metadata AS metadata
|
||||
FROM file AS f
|
||||
LEFT JOIN file_data AS fd ON (fd.file_id = f.id AND fd.id = f.id)
|
||||
WHERE f.id = ?")
|
||||
|
||||
(defn- get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(-> (db/get-with-sql cfg [sql:get-minimal-file id] opts)
|
||||
(d/update-when :metadata fdata/decode-metadata)))
|
||||
|
||||
(def ^:private sql:get-snapshot-without-data
|
||||
(str "WITH snapshots AS (" sql:snapshots ")"
|
||||
"SELECT c.id,
|
||||
c.label,
|
||||
c.revn,
|
||||
c.created_at,
|
||||
c.modified_at,
|
||||
c.deleted_at,
|
||||
c.profile_id,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.features,
|
||||
c.metadata,
|
||||
c.migrations,
|
||||
c.version,
|
||||
c.file_id
|
||||
FROM snapshots AS c
|
||||
WHERE c.id = ?
|
||||
AND CASE WHEN c.created_by = 'user'
|
||||
THEN c.deleted_at IS NULL
|
||||
WHEN c.created_by = 'system'
|
||||
THEN c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz
|
||||
END"))
|
||||
|
||||
(defn get-minimal-snapshot
|
||||
[cfg snapshot-id]
|
||||
(let [now (ct/now)]
|
||||
(-> (db/get-with-sql cfg [sql:get-snapshot-without-data snapshot-id now]
|
||||
{::db/remove-deleted false})
|
||||
(decode-snapshot))))
|
||||
|
||||
(def ^:private sql:get-snapshot
|
||||
(str sql:snapshots
|
||||
" AND c.file_id = ?
|
||||
AND c.id = ?
|
||||
AND CASE WHEN c.created_by = 'user'
|
||||
THEN (c.deleted_at IS NULL)
|
||||
WHEN c.created_by = 'system'
|
||||
THEN (c.deleted_at IS NULL OR c.deleted_at >= ?::timestamptz)
|
||||
END"))
|
||||
|
||||
(defn- get-snapshot
|
||||
"Get snapshot with decoded data"
|
||||
[cfg file-id snapshot-id]
|
||||
(let [now (ct/now)]
|
||||
(->> (db/get-with-sql cfg [sql:get-snapshot file-id snapshot-id now]
|
||||
{::db/remove-deleted false})
|
||||
(decode-snapshot)
|
||||
(fdata/resolve-file-data cfg)
|
||||
(fdata/decode-file-data cfg))))
|
||||
|
||||
(def ^:private sql:get-visible-snapshots
|
||||
(str "WITH "
|
||||
"snapshots1 AS ( " sql:snapshots "),"
|
||||
"snapshots2 AS (
|
||||
SELECT c.id,
|
||||
c.label,
|
||||
c.revn,
|
||||
c.version,
|
||||
c.created_at,
|
||||
c.modified_at,
|
||||
c.created_by,
|
||||
c.locked_by,
|
||||
c.profile_id,
|
||||
c.deleted_at
|
||||
FROM snapshots1 AS c
|
||||
WHERE c.file_id = ?
|
||||
), snapshots3 AS (
|
||||
(SELECT * FROM snapshots2
|
||||
WHERE created_by = 'system'
|
||||
AND (deleted_at IS NULL OR
|
||||
deleted_at >= ?::timestamptz)
|
||||
LIMIT 500)
|
||||
UNION ALL
|
||||
(SELECT * FROM snapshots2
|
||||
WHERE created_by = 'user'
|
||||
AND deleted_at IS NULL
|
||||
LIMIT 500)
|
||||
)
|
||||
SELECT * FROM snapshots3
|
||||
ORDER BY created_at DESC"))
|
||||
|
||||
(defn get-visible-snapshots
|
||||
"Return a list of snapshots fecheable from the API, it has a limited
|
||||
set of fields and applies big but safe limits over all available
|
||||
snapshots. It return a ordered vector by the snapshot date of
|
||||
creation."
|
||||
[cfg file-id]
|
||||
(let [now (ct/now)]
|
||||
(->> (db/exec! cfg [sql:get-visible-snapshots file-id now])
|
||||
(mapv decode-snapshot))))
|
||||
|
||||
(def ^:private schema:decoded-file
|
||||
[:map {:title "DecodedFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn :int]
|
||||
[:vern :int]
|
||||
[:data :map]
|
||||
[:version :int]
|
||||
[:features ::cfeat/features]
|
||||
[:migrations [::sm/set :string]]])
|
||||
|
||||
(def ^:private schema:snapshot
|
||||
[:map {:title "Snapshot"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:version [::sm/int {:min 0}]]
|
||||
[:features ::cfeat/features]
|
||||
[:migrations [::sm/set ::sm/text]]
|
||||
[:profile-id {:optional true} ::sm/uuid]
|
||||
[:label ::sm/text]
|
||||
[:file-id ::sm/uuid]
|
||||
[:created-by [:enum "system" "user" "admin"]]
|
||||
[:deleted-at {:optional true} ::ct/inst]
|
||||
[:modified-at ::ct/inst]
|
||||
[:created-at ::ct/inst]])
|
||||
|
||||
(def ^:private check-snapshot
|
||||
(sm/check-fn schema:snapshot))
|
||||
|
||||
(def ^:private check-decoded-file
|
||||
(sm/check-fn schema:decoded-file))
|
||||
|
||||
(defn- generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (ct/now)
|
||||
(ct/format-inst)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(def ^:private schema:create-params
|
||||
[:map {:title "SnapshotCreateParams"}
|
||||
[:profile-id ::sm/uuid]
|
||||
[:created-by {:optional true} [:enum "user" "system"]]
|
||||
[:label {:optional true} ::sm/text]
|
||||
[:session-id {:optional true} ::sm/uuid]
|
||||
[:modified-at {:optional true} ::ct/inst]
|
||||
[:deleted-at {:optional true} ::ct/inst]])
|
||||
|
||||
(def ^:private check-create-params
|
||||
(sm/check-fn schema:create-params))
|
||||
|
||||
(defn create!
|
||||
"Create a file snapshot; expects a non-encoded file"
|
||||
[cfg file & {:as params}]
|
||||
(let [{:keys [label created-by deleted-at profile-id session-id]}
|
||||
(check-create-params params)
|
||||
|
||||
file
|
||||
(check-decoded-file file)
|
||||
|
||||
created-by
|
||||
(or created-by "system")
|
||||
|
||||
snapshot-id
|
||||
(uuid/next)
|
||||
|
||||
created-at
|
||||
(ct/now)
|
||||
|
||||
deleted-at
|
||||
(or deleted-at
|
||||
(if (= created-by "system")
|
||||
(ct/in-future (cf/get-deletion-delay))
|
||||
nil))
|
||||
|
||||
label
|
||||
(or label (generate-snapshot-label))
|
||||
|
||||
snapshot
|
||||
(cond-> {:id snapshot-id
|
||||
:revn (:revn file)
|
||||
:version (:version file)
|
||||
:file-id (:id file)
|
||||
:features (:features file)
|
||||
:migrations (:migrations file)
|
||||
:label label
|
||||
:created-at created-at
|
||||
:modified-at created-at
|
||||
:created-by created-by}
|
||||
|
||||
deleted-at
|
||||
(assoc :deleted-at deleted-at)
|
||||
|
||||
:always
|
||||
(check-snapshot))]
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
(-> snapshot
|
||||
(update :features into-array)
|
||||
(update :migrations into-array)
|
||||
(assoc :updated-at created-at)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :session-id session-id)
|
||||
(dissoc :modified-at))
|
||||
{::db/return-keys false})
|
||||
|
||||
(fdata/upsert! cfg
|
||||
{:id snapshot-id
|
||||
:file-id (:id file)
|
||||
:type "snapshot"
|
||||
:data (blob/encode (:data file))
|
||||
:created-at created-at
|
||||
:deleted-at deleted-at})
|
||||
|
||||
snapshot))
|
||||
|
||||
(def ^:private schema:update-params
|
||||
[:map {:title "SnapshotUpdateParams"}
|
||||
[:id ::sm/uuid]
|
||||
[:file-id ::sm/uuid]
|
||||
[:label ::sm/text]
|
||||
[:modified-at {:optional true} ::ct/inst]])
|
||||
|
||||
(def ^:private check-update-params
|
||||
(sm/check-fn schema:update-params))
|
||||
|
||||
(defn update!
|
||||
[cfg params]
|
||||
|
||||
(let [{:keys [id file-id label modified-at]}
|
||||
(check-update-params params)
|
||||
|
||||
modified-at
|
||||
(or modified-at (ct/now))]
|
||||
|
||||
(db/update! cfg :file-data
|
||||
{:deleted-at nil
|
||||
:modified-at modified-at}
|
||||
{:file-id file-id
|
||||
:id id
|
||||
:type "snapshot"}
|
||||
{::db/return-keys false})
|
||||
|
||||
(-> (db/update! cfg :file-change
|
||||
{:label label
|
||||
:created-by "user"
|
||||
:updated-at modified-at
|
||||
:deleted-at nil}
|
||||
{:file-id file-id
|
||||
:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?))))
|
||||
|
||||
(defn restore!
|
||||
[{:keys [::db/conn] :as cfg} file-id snapshot-id]
|
||||
(let [file (get-minimal-file conn file-id {::db/for-update true})
|
||||
vern (rand-int Integer/MAX_VALUE)
|
||||
|
||||
storage
|
||||
(sto/resolve cfg {::db/reuse-conn true})
|
||||
|
||||
snapshot
|
||||
(get-snapshot cfg file-id snapshot-id)]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:snapshot-id snapshot-id
|
||||
:file-id file-id))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :internal
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(let [;; If the snapshot has applied migrations stored, we reuse
|
||||
;; them, if not, we take a safest set of migrations as
|
||||
;; starting point. This is because, at the time of
|
||||
;; implementing snapshots, migrations were not taken into
|
||||
;; account so we need to make this backward compatible in
|
||||
;; some way.
|
||||
migrations
|
||||
(or (:migrations snapshot)
|
||||
(fmg/generate-migrations-from-version 67))
|
||||
|
||||
file
|
||||
(-> file
|
||||
(update :revn inc)
|
||||
(assoc :migrations migrations)
|
||||
(assoc :data (:data snapshot))
|
||||
(assoc :vern vern)
|
||||
(assoc :version (:version snapshot))
|
||||
(assoc :has-media-trimmed false)
|
||||
(assoc :modified-at (:modified-at snapshot))
|
||||
(assoc :features (:features snapshot)))]
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; In the same way, on reseting the file data, we need to restore
|
||||
;; the applied migrations on the moment of taking the snapshot
|
||||
(bfc/update-file! cfg file ::bfc/reset-migrations? true)
|
||||
|
||||
;; FIXME: this should be separated functions, we should not have
|
||||
;; inline sql here.
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
vern)))
|
||||
|
||||
(defn delete!
|
||||
[cfg & {:keys [id file-id deleted-at]}]
|
||||
(assert (uuid? id) "missing id")
|
||||
(assert (uuid? file-id) "missing file-id")
|
||||
(assert (ct/inst? deleted-at) "missing deleted-at")
|
||||
|
||||
(wrk/submit! {::db/conn (db/get-connection cfg)
|
||||
::wrk/task :delete-object
|
||||
::wrk/params {:object :snapshot
|
||||
:deleted-at deleted-at
|
||||
:file-id file-id
|
||||
:id id}})
|
||||
(db/update! cfg :file-change
|
||||
{:deleted-at deleted-at}
|
||||
{:id id :file-id file-id}
|
||||
{::db/return-keys false})
|
||||
true)
|
||||
|
||||
(def ^:private sql:get-snapshots
|
||||
(str sql:snapshots " AND c.file_id = ?"))
|
||||
|
||||
(defn lock-by!
|
||||
[conn id profile-id]
|
||||
(-> (db/update! conn :file-change
|
||||
{:locked-by profile-id}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?)))
|
||||
|
||||
(defn unlock!
|
||||
[conn id]
|
||||
(-> (db/update! conn :file-change
|
||||
{:locked-by nil}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
(db/get-update-count)
|
||||
(pos?)))
|
||||
|
||||
(defn reduce-snapshots
|
||||
"Process the file snapshots using efficient reduction; the file
|
||||
reduction comes with all snapshots, including maked as deleted"
|
||||
[cfg file-id xform f init]
|
||||
(let [conn (db/get-connection cfg)
|
||||
xform (comp
|
||||
(map (partial fdata/resolve-file-data cfg))
|
||||
(map (partial fdata/decode-file-data cfg))
|
||||
xform)]
|
||||
|
||||
(->> (db/plan conn [sql:get-snapshots file-id] {:fetch-size 1})
|
||||
(transduce xform f init))))
|
||||
@@ -7,8 +7,8 @@
|
||||
(ns app.features.logical-deletion
|
||||
"A code related to handle logical deletion mechanism"
|
||||
(:require
|
||||
[app.config :as cf]
|
||||
[app.util.time :as dt]))
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]))
|
||||
|
||||
(def ^:private canceled-status
|
||||
#{"canceled" "unpaid"})
|
||||
@@ -20,10 +20,10 @@
|
||||
(if-let [{:keys [type status]} (get team :subscription)]
|
||||
(cond
|
||||
(and (= "unlimited" type) (not (contains? canceled-status status)))
|
||||
(dt/duration {:days 30})
|
||||
(ct/duration {:days 30})
|
||||
|
||||
(and (= "enterprise" type) (not (contains? canceled-status status)))
|
||||
(dt/duration {:days 90})
|
||||
(ct/duration {:days 90})
|
||||
|
||||
:else
|
||||
(cf/get-deletion-delay))
|
||||
|
||||
@@ -17,17 +17,16 @@
|
||||
[app.http.awsns :as-alias awsns]
|
||||
[app.http.debug :as-alias debug]
|
||||
[app.http.errors :as errors]
|
||||
[app.http.management :as mgmt]
|
||||
[app.http.middleware :as mw]
|
||||
[app.http.security :as sec]
|
||||
[app.http.session :as session]
|
||||
[app.http.websocket :as-alias ws]
|
||||
[app.main :as-alias main]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.worker :as wrk]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[reitit.core :as r]
|
||||
[reitit.middleware :as rr]
|
||||
[yetti.adapter :as yt]
|
||||
@@ -54,6 +53,8 @@
|
||||
[:map
|
||||
[::port ::sm/int]
|
||||
[::host ::sm/text]
|
||||
[::io-threads {:optional true} ::sm/int]
|
||||
[::max-worker-threads {:optional true} ::sm/int]
|
||||
[::max-body-size {:optional true} ::sm/int]
|
||||
[::max-multipart-body-size {:optional true} ::sm/int]
|
||||
[::router {:optional true} [:fn r/router?]]
|
||||
@@ -64,31 +65,41 @@
|
||||
(assert (sm/check schema:server-params params)))
|
||||
|
||||
(defmethod ig/init-key ::server
|
||||
[_ {:keys [::handler ::router ::host ::port ::wrk/executor] :as cfg}]
|
||||
[_ {:keys [::handler ::router ::host ::port ::mtx/metrics] :as cfg}]
|
||||
(l/info :hint "starting http server" :port port :host host)
|
||||
(let [options {:http/port port
|
||||
:http/host host
|
||||
:http/max-body-size (::max-body-size cfg)
|
||||
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||
:xnio/direct-buffers false
|
||||
:xnio/io-threads (or (::io-threads cfg)
|
||||
(max 3 (px/get-available-processors)))
|
||||
:xnio/dispatch executor
|
||||
:ring/compat :ring2
|
||||
:socket/backlog 4069}
|
||||
(let [on-dispatch
|
||||
(fn [_ start-at-ns]
|
||||
(let [timing (- (System/nanoTime) start-at-ns)
|
||||
timing (int (/ timing 1000000))]
|
||||
(mtx/run! metrics
|
||||
:id :http-server-dispatch-timing
|
||||
:val timing)))
|
||||
|
||||
handler (cond
|
||||
(some? router)
|
||||
(router-handler router)
|
||||
options
|
||||
{:http/port port
|
||||
:http/host host
|
||||
:http/max-body-size (::max-body-size cfg)
|
||||
:http/max-multipart-body-size (::max-multipart-body-size cfg)
|
||||
:xnio/direct-buffers false
|
||||
:xnio/io-threads (::io-threads cfg)
|
||||
:xnio/max-worker-threads (::max-worker-threads cfg)
|
||||
:ring/compat :ring2
|
||||
:events/on-dispatch on-dispatch
|
||||
:socket/backlog 4069}
|
||||
|
||||
(some? handler)
|
||||
handler
|
||||
handler
|
||||
(cond
|
||||
(some? router)
|
||||
(router-handler router)
|
||||
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "handler or router are required")))
|
||||
(some? handler)
|
||||
handler
|
||||
|
||||
options (d/without-nils options)
|
||||
server (yt/server handler options)]
|
||||
:else
|
||||
(throw (UnsupportedOperationException. "handler or router are required")))
|
||||
|
||||
server
|
||||
(yt/server handler (d/without-nils options))]
|
||||
|
||||
(assoc cfg ::server (yt/start! server))))
|
||||
|
||||
@@ -137,12 +148,12 @@
|
||||
[:map
|
||||
[::ws/routes schema:routes]
|
||||
[::rpc/routes schema:routes]
|
||||
[::rpc.doc/routes schema:routes]
|
||||
[::oidc/routes schema:routes]
|
||||
[::assets/routes schema:routes]
|
||||
[::debug/routes schema:routes]
|
||||
[::mtx/routes schema:routes]
|
||||
[::awsns/routes schema:routes]
|
||||
[::mgmt/routes schema:routes]
|
||||
::session/manager
|
||||
::setup/props
|
||||
::db/pool])
|
||||
@@ -155,10 +166,12 @@
|
||||
[_ cfg]
|
||||
(rr/router
|
||||
[["" {:middleware [[mw/server-timing]
|
||||
[sec/sec-fetch-metadata]
|
||||
[mw/params]
|
||||
[mw/format-response]
|
||||
[session/soft-auth cfg]
|
||||
[actoken/soft-auth cfg]
|
||||
[mw/auth {:bearer (partial session/decode-token cfg)
|
||||
:cookie (partial session/decode-token cfg)
|
||||
:token (partial actoken/decode-token cfg)}]
|
||||
[mw/parse-request]
|
||||
[mw/errors errors/handle]
|
||||
[mw/restrict-methods]]}
|
||||
@@ -170,9 +183,9 @@
|
||||
["/webhooks"
|
||||
(::awsns/routes cfg)]
|
||||
|
||||
(::ws/routes cfg)
|
||||
["/management"
|
||||
(::mgmt/routes cfg)]
|
||||
|
||||
["/api" {:middleware [[mw/cors]]}
|
||||
(::oidc/routes cfg)
|
||||
(::rpc.doc/routes cfg)
|
||||
(::rpc/routes cfg)]]]))
|
||||
(::ws/routes cfg)
|
||||
(::oidc/routes cfg)
|
||||
(::rpc/routes cfg)]]))
|
||||
|
||||
@@ -9,23 +9,19 @@
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[yetti.request :as yreq]))
|
||||
[app.tokens :as tokens]))
|
||||
|
||||
(def header-re #"^Token\s+(.*)")
|
||||
|
||||
(defn- get-token
|
||||
[request]
|
||||
(some->> (yreq/get-header request "authorization")
|
||||
(re-matches header-re)
|
||||
(second)))
|
||||
|
||||
(defn- decode-token
|
||||
[props token]
|
||||
(when token
|
||||
(tokens/verify props {:token token :iss "access-token"})))
|
||||
(defn decode-token
|
||||
[cfg token]
|
||||
(try
|
||||
(tokens/verify cfg {:token token :iss "access-token"})
|
||||
(catch Throwable cause
|
||||
(l/trc :hint "exception on decoding token"
|
||||
:token token
|
||||
:cause cause))))
|
||||
|
||||
(def sql:get-token-data
|
||||
"SELECT perms, profile_id, expires_at
|
||||
@@ -35,47 +31,28 @@
|
||||
OR (expires_at > now()));")
|
||||
|
||||
(defn- get-token-data
|
||||
[pool token-id]
|
||||
[pool claims]
|
||||
(when-not (db/read-only? pool)
|
||||
(some-> (db/exec-one! pool [sql:get-token-data token-id])
|
||||
(update :perms db/decode-pgarray #{}))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
"Soft Authentication, will be executed synchronously on the undertow
|
||||
worker thread."
|
||||
[handler {:keys [::setup/props]}]
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
claims (decode-token props token)]
|
||||
(cond-> request
|
||||
(map? claims)
|
||||
(assoc ::id (:tid claims))))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(fn [request]
|
||||
(handler (handle-request request)))))
|
||||
(when-let [token-id (get claims :tid)]
|
||||
(some-> (db/exec-one! pool [sql:get-token-data token-id])
|
||||
(update :perms db/decode-pgarray #{})))))
|
||||
|
||||
(defn- wrap-authz
|
||||
"Authorization middleware, will be executed synchronously on vthread."
|
||||
[handler {:keys [::db/pool]}]
|
||||
(fn [request]
|
||||
(let [{:keys [perms profile-id expires-at]} (some->> (::id request) (get-token-data pool))]
|
||||
(handler (cond-> request
|
||||
(some? perms)
|
||||
(assoc ::perms perms)
|
||||
(some? profile-id)
|
||||
(assoc ::profile-id profile-id)
|
||||
(some? expires-at)
|
||||
(assoc ::expires-at expires-at))))))
|
||||
(let [{:keys [type claims]} (get request ::http/auth-data)]
|
||||
(if (= :token type)
|
||||
(let [{:keys [perms profile-id expires-at]} (some->> claims (get-token-data pool))]
|
||||
;; FIXME: revisit this, this data looks unused
|
||||
(handler (cond-> request
|
||||
(some? perms)
|
||||
(assoc ::perms perms)
|
||||
(some? profile-id)
|
||||
(assoc ::profile-id profile-id)
|
||||
(some? expires-at)
|
||||
(assoc ::expires-at expires-at))))
|
||||
|
||||
(def soft-auth
|
||||
{:name ::soft-auth
|
||||
:compile (fn [& _]
|
||||
(when (contains? cf/flags :access-tokens)
|
||||
wrap-soft-auth))})
|
||||
(handler request)))))
|
||||
|
||||
(def authz
|
||||
{:name ::authz
|
||||
|
||||
@@ -9,18 +9,18 @@
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.util.time :as dt]
|
||||
[integrant.core :as ig]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
(def ^:private cache-max-age
|
||||
(dt/duration {:hours 24}))
|
||||
(ct/duration {:hours 24}))
|
||||
|
||||
(def ^:private signature-max-age
|
||||
(dt/duration {:hours 24 :minutes 15}))
|
||||
(ct/duration {:hours 24 :minutes 15}))
|
||||
|
||||
(defn get-id
|
||||
[{:keys [path-params]}]
|
||||
|
||||
@@ -17,11 +17,9 @@
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.data.json :as j]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
@@ -40,8 +38,8 @@
|
||||
[_ cfg]
|
||||
(letfn [(handler [request]
|
||||
(let [data (-> request yreq/body slurp)]
|
||||
(px/run! :vthread (partial handle-request cfg data)))
|
||||
{::yres/status 200})]
|
||||
(handle-request cfg data)
|
||||
{::yres/status 200}))]
|
||||
["/sns" {:handler handler
|
||||
:allowed-methods #{:post}}]))
|
||||
|
||||
@@ -109,7 +107,7 @@
|
||||
[cfg headers]
|
||||
(let [tdata (get headers "x-penpot-data")]
|
||||
(when-not (str/empty? tdata)
|
||||
(let [result (tokens/verify (::setup/props cfg) {:token tdata :iss :profile-identity})]
|
||||
(let [result (tokens/verify cfg {:token tdata :iss :profile-identity})]
|
||||
(:profile-id result)))))
|
||||
|
||||
(defn- parse-notification
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
(:require
|
||||
[app.common.schema :as sm]
|
||||
[integrant.core :as ig]
|
||||
[java-http-clj.core :as http]
|
||||
[promesa.core :as p])
|
||||
[java-http-clj.core :as http])
|
||||
(:import
|
||||
java.net.http.HttpClient))
|
||||
|
||||
@@ -29,14 +28,9 @@
|
||||
|
||||
(defn send!
|
||||
([client req] (send! client req {}))
|
||||
([client req {:keys [response-type sync?] :or {response-type :string sync? false}}]
|
||||
([client req {:keys [response-type] :or {response-type :string}}]
|
||||
(assert (client? client) "expected valid http client")
|
||||
(if sync?
|
||||
(http/send req {:client client :as response-type})
|
||||
(try
|
||||
(http/send-async req {:client client :as response-type})
|
||||
(catch Throwable cause
|
||||
(p/rejected cause))))))
|
||||
(http/send req {:client client :as response-type})))
|
||||
|
||||
(defn- resolve-client
|
||||
[params]
|
||||
@@ -56,8 +50,8 @@
|
||||
([cfg-or-client request]
|
||||
(let [client (resolve-client cfg-or-client)
|
||||
request (update request :uri str)]
|
||||
(send! client request {:sync? true})))
|
||||
(send! client request {})))
|
||||
([cfg-or-client request options]
|
||||
(let [client (resolve-client cfg-or-client)
|
||||
request (update request :uri str)]
|
||||
(send! client request (merge {:sync? true} options)))))
|
||||
(send! client request options))))
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -26,12 +27,12 @@
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.setup :as-alias setup]
|
||||
[app.setup.clock :as clock]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[emoji.core :as emj]
|
||||
@@ -49,11 +50,17 @@
|
||||
|
||||
(defn index-handler
|
||||
[_cfg _request]
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/html"}
|
||||
::yres/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {:version (:full cf/version)
|
||||
:supported-features cfeat/supported-features}))})
|
||||
(let [{:keys [clock offset]} @clock/current]
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/html"}
|
||||
::yres/body (-> (io/resource "app/templates/debug.tmpl")
|
||||
(tmpl/render {:version (:full cf/version)
|
||||
:current-clock (str clock)
|
||||
:current-offset (if offset
|
||||
(ct/format-duration offset)
|
||||
"NO OFFSET")
|
||||
:current-time (ct/format-inst (ct/now) :http)
|
||||
:supported-features cfeat/supported-features}))}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FILE CHANGES
|
||||
@@ -137,7 +144,7 @@
|
||||
file (some-> params :file :path io/read* t/decode)]
|
||||
|
||||
(if (and file project-id)
|
||||
(let [fname (str "Imported: " (:name file) "(" (dt/now) ")")
|
||||
(let [fname (str "Imported: " (:name file) "(" (ct/now) ")")
|
||||
reuse-id? (contains? params :reuseid)
|
||||
file-id (or (and reuse-id? (ex/ignoring (-> params :file :filename parse-uuid)))
|
||||
(uuid/next))]
|
||||
@@ -222,7 +229,7 @@
|
||||
(-> (io/resource "app/templates/error-report.v3.tmpl")
|
||||
(tmpl/render (-> content
|
||||
(assoc :id id)
|
||||
(assoc :created-at (dt/format-instant created-at :rfc1123))))))]
|
||||
(assoc :created-at (ct/format-inst created-at :rfc1123))))))]
|
||||
|
||||
(if-let [report (get-report request)]
|
||||
(let [result (case (:version report)
|
||||
@@ -246,7 +253,7 @@
|
||||
(defn error-list-handler
|
||||
[{:keys [::db/pool]} _request]
|
||||
(let [items (->> (db/exec! pool [sql:error-reports])
|
||||
(map #(update % :created-at dt/format-instant :rfc1123)))]
|
||||
(map #(update % :created-at ct/format-inst :rfc1123)))]
|
||||
{::yres/status 200
|
||||
::yres/body (-> (io/resource "app/templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))
|
||||
@@ -390,34 +397,6 @@
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body (str/ffmt "PROFILE '%' ACTIVATED" (:email profile))}))))))
|
||||
|
||||
|
||||
(defn- reset-file-version
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [file-id (some-> params :file-id d/parse-uuid)
|
||||
version (some-> params :version d/parse-integer)]
|
||||
|
||||
(when-not (contains? params :force)
|
||||
(ex/raise :type :validation
|
||||
:code :missing-force
|
||||
:hint "missing force checkbox"))
|
||||
|
||||
(when (nil? file-id)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-file-id
|
||||
:hint "provided invalid file id"))
|
||||
|
||||
(when (nil? version)
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-version
|
||||
:hint "provided invalid version"))
|
||||
|
||||
(db/tx-run! cfg srepl/process-file! file-id #(assoc % :version version))
|
||||
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK"}))
|
||||
|
||||
|
||||
(defn- handle-team-features
|
||||
[cfg {:keys [params] :as request}]
|
||||
(let [team-id (some-> params :team-id d/parse-uuid)
|
||||
@@ -462,6 +441,24 @@
|
||||
::yres/headers {"content-type" "text/plain"}
|
||||
::yres/body "OK"}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; VIRTUAL CLOCK
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- set-virtual-clock
|
||||
[_ {:keys [params] :as request}]
|
||||
(let [offset (some-> params :offset str/trim not-empty ct/duration)
|
||||
reset? (contains? params :reset)]
|
||||
(if (= "production" (cf/get :tenant))
|
||||
{::yres/status 501
|
||||
::yres/body "OPERATION NOT ALLOWED"}
|
||||
(do
|
||||
(if (or reset? (zero? (inst-ms offset)))
|
||||
(clock/set-offset! nil)
|
||||
(clock/set-offset! offset))
|
||||
{::yres/status 302
|
||||
::yres/headers {"location" "/dbg"}}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; OTHER SMALL VIEWS/HANDLERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -548,10 +545,10 @@
|
||||
["/error/:id" {:handler (partial error-handler cfg)}]
|
||||
["/error" {:handler (partial error-list-handler cfg)}]
|
||||
["/actions" {:middleware [[errors]]}
|
||||
["/set-virtual-clock"
|
||||
{:handler (partial set-virtual-clock cfg)}]
|
||||
["/resend-email-verification"
|
||||
{:handler (partial resend-email-notification cfg)}]
|
||||
["/reset-file-version"
|
||||
{:handler (partial reset-file-version cfg)}]
|
||||
["/handle-team-features"
|
||||
{:handler (partial handle-team-features cfg)}]
|
||||
["/file-export" {:handler (partial export-handler cfg)}]
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.config :as cf]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as-alias actoken]
|
||||
[app.http.auth :as-alias auth]
|
||||
[app.http.session :as-alias session]
|
||||
[app.util.inet :as inet]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -22,18 +23,16 @@
|
||||
(defn request->context
|
||||
"Extracts error report relevant context data from request."
|
||||
[request]
|
||||
(let [claims (-> {}
|
||||
(into (::session/token-claims request))
|
||||
(into (::actoken/token-claims request)))]
|
||||
{:request/path (:path request)
|
||||
:request/method (:method request)
|
||||
:request/params (:params request)
|
||||
:request/user-agent (yreq/get-header request "user-agent")
|
||||
:request/ip-addr (inet/parse-request request)
|
||||
:request/profile-id (:uid claims)
|
||||
:version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")
|
||||
:version/backend (:full cf/version)}))
|
||||
|
||||
(let [{:keys [claims] :as auth} (get request ::http/auth-data)]
|
||||
(-> (cf/logging-context)
|
||||
(assoc :request/path (:path request))
|
||||
(assoc :request/method (:method request))
|
||||
(assoc :request/params (:params request))
|
||||
(assoc :request/user-agent (yreq/get-header request "user-agent"))
|
||||
(assoc :request/ip-addr (inet/parse-request request))
|
||||
(assoc :request/profile-id (get claims :uid))
|
||||
(assoc :request/auth-data auth)
|
||||
(assoc :version/frontend (or (yreq/get-header request "x-frontend-version") "unknown")))))
|
||||
|
||||
(defmulti handle-error
|
||||
(fn [cause _ _]
|
||||
@@ -61,8 +60,6 @@
|
||||
::yres/body data}
|
||||
|
||||
(binding [l/*context* (request->context request)]
|
||||
(l/err :hint "restriction error"
|
||||
:cause err)
|
||||
{::yres/status 400
|
||||
::yres/body data}))))
|
||||
|
||||
|
||||
240
backend/src/app/http/management.clj
Normal file
240
backend/src/app/http/management.clj
Normal file
@@ -0,0 +1,240 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.http.management
|
||||
"Internal mangement HTTP API"
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.generators :as sg]
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.middleware :as mw]
|
||||
[app.main :as-alias main]
|
||||
[app.rpc.commands.profile :as cmd.profile]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.worker :as-alias wrk]
|
||||
[integrant.core :as ig]
|
||||
[yetti.response :as-alias yres]))
|
||||
|
||||
;; ---- ROUTES
|
||||
|
||||
(declare ^:private authenticate)
|
||||
(declare ^:private get-customer)
|
||||
(declare ^:private update-customer)
|
||||
|
||||
(defmethod ig/assert-key ::routes
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool"))
|
||||
|
||||
(def ^:private default-system
|
||||
{:name ::default-system
|
||||
:compile
|
||||
(fn [_ _]
|
||||
(fn [handler cfg]
|
||||
(fn [request]
|
||||
(handler cfg request))))})
|
||||
|
||||
(def ^:private transaction
|
||||
{:name ::transaction
|
||||
:compile
|
||||
(fn [data _]
|
||||
(when (:transaction data)
|
||||
(fn [handler]
|
||||
(fn [cfg request]
|
||||
(db/tx-run! cfg handler request)))))})
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::setup/props] :as cfg}]
|
||||
|
||||
(let [management-key (or (cf/get :management-api-key)
|
||||
(get props :management-key))]
|
||||
|
||||
["" {:middleware [[mw/shared-key-auth management-key]
|
||||
[default-system cfg]
|
||||
[transaction]]}
|
||||
["/authenticate"
|
||||
{:handler authenticate
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
["/get-customer"
|
||||
{:handler get-customer
|
||||
:transaction true
|
||||
:allowed-methods #{:post}}]
|
||||
|
||||
["/update-customer"
|
||||
{:handler update-customer
|
||||
:allowed-methods #{:post}
|
||||
:transaction true}]]))
|
||||
|
||||
;; ---- HELPERS
|
||||
|
||||
(defn- coercer
|
||||
[schema & {:as opts}]
|
||||
(let [decode-fn (sm/decoder schema sm/json-transformer)
|
||||
check-fn (sm/check-fn schema opts)]
|
||||
(fn [data]
|
||||
(-> data decode-fn check-fn))))
|
||||
|
||||
;; ---- API: AUTHENTICATE
|
||||
|
||||
(defn- authenticate
|
||||
[cfg request]
|
||||
(let [token (-> request :params :token)
|
||||
result (tokens/verify cfg {:token token :iss "authentication"})]
|
||||
{::yres/status 200
|
||||
::yres/body result}))
|
||||
|
||||
;; ---- API: GET-CUSTOMER
|
||||
|
||||
(def ^:private schema:get-customer
|
||||
[:map [:id ::sm/uuid]])
|
||||
|
||||
(def ^:private coerce-get-customer-params
|
||||
(coercer schema:get-customer
|
||||
:type :validation
|
||||
:hint "invalid data provided for `get-customer` rpc call"))
|
||||
|
||||
(def ^:private sql:get-customer-slots
|
||||
"WITH teams AS (
|
||||
SELECT tpr.team_id AS id,
|
||||
tpr.profile_id AS profile_id
|
||||
FROM team_profile_rel AS tpr
|
||||
WHERE tpr.is_owner IS true
|
||||
AND tpr.profile_id = ?
|
||||
), teams_with_slots AS (
|
||||
SELECT tpr.team_id AS id,
|
||||
count(*) AS total
|
||||
FROM team_profile_rel AS tpr
|
||||
WHERE tpr.team_id IN (SELECT id FROM teams)
|
||||
AND tpr.can_edit IS true
|
||||
GROUP BY 1
|
||||
ORDER BY 2
|
||||
)
|
||||
SELECT max(total) AS total FROM teams_with_slots;")
|
||||
|
||||
(defn- get-customer-slots
|
||||
[cfg profile-id]
|
||||
(let [result (db/exec-one! cfg [sql:get-customer-slots profile-id])]
|
||||
(:total result)))
|
||||
|
||||
(defn- get-customer
|
||||
[cfg request]
|
||||
(let [profile-id (-> request :params coerce-get-customer-params :id)
|
||||
profile (cmd.profile/get-profile cfg profile-id)
|
||||
result {:id (get profile :id)
|
||||
:name (get profile :fullname)
|
||||
:email (get profile :email)
|
||||
:num-editors (get-customer-slots cfg profile-id)
|
||||
:subscription (-> profile :props :subscription)}]
|
||||
{::yres/status 200
|
||||
::yres/body result}))
|
||||
|
||||
|
||||
;; ---- API: UPDATE-CUSTOMER
|
||||
|
||||
(def ^:private schema:timestamp
|
||||
(sm/type-schema
|
||||
{:type ::timestamp
|
||||
:pred ct/inst?
|
||||
:type-properties
|
||||
{:title "inst"
|
||||
:description "The same as :app.common.time/inst but encodes to epoch"
|
||||
:error/message "should be an instant"
|
||||
:gen/gen (->> (sg/small-int)
|
||||
(sg/fmap (fn [v] (ct/inst v))))
|
||||
:decode/string ct/inst
|
||||
:encode/string inst-ms
|
||||
:decode/json ct/inst
|
||||
:encode/json inst-ms}}))
|
||||
|
||||
(def ^:private schema:subscription
|
||||
[:map {:title "Subscription"}
|
||||
[:id ::sm/text]
|
||||
[:customer-id ::sm/text]
|
||||
[:type [:enum
|
||||
"unlimited"
|
||||
"professional"
|
||||
"enterprise"]]
|
||||
[:status [:enum
|
||||
"active"
|
||||
"canceled"
|
||||
"incomplete"
|
||||
"incomplete_expired"
|
||||
"past_due"
|
||||
"paused"
|
||||
"trialing"
|
||||
"unpaid"]]
|
||||
|
||||
[:billing-period [:enum
|
||||
"month"
|
||||
"day"
|
||||
"week"
|
||||
"year"]]
|
||||
[:quantity :int]
|
||||
[:description [:maybe ::sm/text]]
|
||||
[:created-at schema:timestamp]
|
||||
[:start-date [:maybe schema:timestamp]]
|
||||
[:ended-at [:maybe schema:timestamp]]
|
||||
[:trial-end [:maybe schema:timestamp]]
|
||||
[:trial-start [:maybe schema:timestamp]]
|
||||
[:cancel-at [:maybe schema:timestamp]]
|
||||
[:canceled-at [:maybe schema:timestamp]]
|
||||
[:current-period-end [:maybe schema:timestamp]]
|
||||
[:current-period-start [:maybe schema:timestamp]]
|
||||
[:cancel-at-period-end :boolean]
|
||||
|
||||
[:cancellation-details
|
||||
[:map {:title "CancellationDetails"}
|
||||
[:comment [:maybe ::sm/text]]
|
||||
[:reason [:maybe ::sm/text]]
|
||||
[:feedback [:maybe
|
||||
[:enum
|
||||
"customer_service"
|
||||
"low_quality"
|
||||
"missing_feature"
|
||||
"other"
|
||||
"switched_service"
|
||||
"too_complex"
|
||||
"too_expensive"
|
||||
"unused"]]]]]])
|
||||
|
||||
(def ^:private schema:update-customer
|
||||
[:map
|
||||
[:id ::sm/uuid]
|
||||
[:subscription [:maybe schema:subscription]]])
|
||||
|
||||
(def ^:private coerce-update-customer-params
|
||||
(coercer schema:update-customer
|
||||
:type :validation
|
||||
:hint "invalid data provided for `update-customer` rpc call"))
|
||||
|
||||
(defn- update-customer
|
||||
[cfg request]
|
||||
(let [{:keys [id subscription]}
|
||||
(-> request :params coerce-update-customer-params)
|
||||
|
||||
{:keys [props] :as profile}
|
||||
(cmd.profile/get-profile cfg id ::db/for-update true)
|
||||
|
||||
props
|
||||
(assoc props :subscription subscription)]
|
||||
|
||||
(l/dbg :hint "update customer"
|
||||
:profile-id (str id)
|
||||
:subscription-type (get subscription :type)
|
||||
:subscription-status (get subscription :status)
|
||||
:subscription-quantity (get subscription :quantity))
|
||||
|
||||
(db/update! cfg :profile
|
||||
{:props (db/tjson props)}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
{::yres/status 201
|
||||
::yres/body nil}))
|
||||
@@ -12,8 +12,11 @@
|
||||
[app.common.schema :as-alias sm]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cf]
|
||||
[app.http :as-alias http]
|
||||
[app.http.errors :as errors]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[buddy.core.codecs :as bc]
|
||||
[cuerdas.core :as str]
|
||||
[yetti.adapter :as yt]
|
||||
[yetti.middleware :as ymw]
|
||||
@@ -240,3 +243,77 @@
|
||||
(if (contains? allowed method)
|
||||
(handler request)
|
||||
{::yres/status 405}))))))})
|
||||
|
||||
(defn- wrap-auth
|
||||
[handler decoders]
|
||||
(let [token-re
|
||||
#"(?i)^(Token|Bearer)\s+(.*)"
|
||||
|
||||
get-token-from-authorization
|
||||
(fn [request]
|
||||
(when-let [[_ token-type token] (some->> (yreq/get-header request "authorization")
|
||||
(re-matches token-re))]
|
||||
(if (= "token" (str/lower token-type))
|
||||
{:type :token
|
||||
:token token}
|
||||
{:type :bearer
|
||||
:token token})))
|
||||
|
||||
get-token-from-cookie
|
||||
(fn [request]
|
||||
(let [cname (cf/get :auth-token-cookie-name)
|
||||
token (some-> (yreq/get-cookie request cname) :value)]
|
||||
(when-not (str/empty? token)
|
||||
{:type :cookie
|
||||
:token token})))
|
||||
|
||||
get-token
|
||||
(some-fn get-token-from-cookie get-token-from-authorization)
|
||||
|
||||
process-request
|
||||
(fn [request]
|
||||
(if-let [{:keys [type token] :as auth} (get-token request)]
|
||||
(let [decode-fn (get decoders type)]
|
||||
(if (or (= type :cookie) (= type :bearer))
|
||||
(let [metadata (tokens/decode-header token)]
|
||||
;; NOTE: we only proceed to decode claims on new
|
||||
;; cookie tokens. The old cookies dont need to be
|
||||
;; decoded because they use the token string as ID
|
||||
(if (and (= (:kid metadata) 1)
|
||||
(= (:ver metadata) 1)
|
||||
(some? decode-fn))
|
||||
(assoc request ::http/auth-data (assoc auth
|
||||
:claims (decode-fn token)
|
||||
:metadata metadata))
|
||||
(assoc request ::http/auth-data (assoc auth :metadata {:ver 0}))))
|
||||
|
||||
(if decode-fn
|
||||
(assoc request ::http/auth-data (assoc auth :claims (decode-fn token)))
|
||||
(assoc request ::http/auth-data auth))))
|
||||
|
||||
request))]
|
||||
|
||||
(fn [request]
|
||||
(-> request process-request handler))))
|
||||
|
||||
(def auth
|
||||
{:name ::auth
|
||||
:compile (constantly wrap-auth)})
|
||||
|
||||
(defn- wrap-shared-key-auth
|
||||
[handler shared-key]
|
||||
(if shared-key
|
||||
(let [shared-key (if (string? shared-key)
|
||||
shared-key
|
||||
(bc/bytes->b64-str shared-key true))]
|
||||
(fn [request]
|
||||
(let [key (yreq/get-header request "x-shared-key")]
|
||||
(if (= key shared-key)
|
||||
(handler request)
|
||||
{::yres/status 403}))))
|
||||
(fn [_ _]
|
||||
{::yres/status 403})))
|
||||
|
||||
(def shared-key-auth
|
||||
{:name ::shared-key-auth
|
||||
:compile (constantly wrap-shared-key-auth)})
|
||||
|
||||
55
backend/src/app/http/security.clj
Normal file
55
backend/src/app/http/security.clj
Normal file
@@ -0,0 +1,55 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.http.security
|
||||
"Additional security layer middlewares"
|
||||
(:require
|
||||
[app.config :as cf]
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
(def ^:private safe-methods
|
||||
#{:get :head :options})
|
||||
|
||||
(defn- wrap-sec-fetch-metadata
|
||||
"Sec-Fetch metadata security layer middleware"
|
||||
[handler]
|
||||
(fn [request]
|
||||
(let [site (yreq/get-header request "sec-fetch-site")]
|
||||
(cond
|
||||
(= site "same-origin")
|
||||
(handler request)
|
||||
|
||||
(or (= site "same-site")
|
||||
(= site "cross-site"))
|
||||
(if (contains? safe-methods (yreq/method request))
|
||||
(handler request)
|
||||
{::yres/status 403})
|
||||
|
||||
:else
|
||||
(handler request)))))
|
||||
|
||||
(def sec-fetch-metadata
|
||||
{:name ::sec-fetch-metadata
|
||||
:compile (fn [_ _]
|
||||
(when (contains? cf/flags :sec-fetch-metadata-middleware)
|
||||
wrap-sec-fetch-metadata))})
|
||||
|
||||
(defn- wrap-client-header-check
|
||||
"Check for a penpot custom header to be present as additional CSRF
|
||||
protection"
|
||||
[handler]
|
||||
(fn [request]
|
||||
(let [client (yreq/get-header request "x-client")]
|
||||
(if (some? client)
|
||||
(handler request)
|
||||
{::yres/status 403}))))
|
||||
|
||||
(def client-header-check
|
||||
{:name ::client-header-check
|
||||
:compile (fn [_ _]
|
||||
(when (contains? cf/flags :client-header-check-middleware)
|
||||
wrap-client-header-check))})
|
||||
@@ -10,45 +10,40 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uri :as u]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.http :as-alias http]
|
||||
[app.http.auth :as-alias http.auth]
|
||||
[app.http.session.tasks :as-alias tasks]
|
||||
[app.main :as-alias main]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[yetti.request :as yreq]))
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; DEFAULTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; A default cookie name for storing the session.
|
||||
(def default-auth-token-cookie-name "auth-token")
|
||||
|
||||
;; A cookie that we can use to check from other sites of the same
|
||||
;; domain if a user is authenticated.
|
||||
(def default-auth-data-cookie-name "auth-data")
|
||||
|
||||
;; Default value for cookie max-age
|
||||
(def default-cookie-max-age (dt/duration {:days 7}))
|
||||
(def default-cookie-max-age (ct/duration {:days 7}))
|
||||
|
||||
;; Default age for automatic session renewal
|
||||
(def default-renewal-max-age (dt/duration {:hours 6}))
|
||||
(def default-renewal-max-age (ct/duration {:hours 6}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PROTOCOLS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defprotocol ISessionManager
|
||||
(read [_ key])
|
||||
(write! [_ key data])
|
||||
(update! [_ data])
|
||||
(delete! [_ key]))
|
||||
(read-session [_ id])
|
||||
(create-session [_ params])
|
||||
(update-session [_ session])
|
||||
(delete-session [_ id]))
|
||||
|
||||
(defn manager?
|
||||
[o]
|
||||
@@ -63,67 +58,82 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def ^:private schema:params
|
||||
[:map {:title "session-params"}
|
||||
[:user-agent ::sm/text]
|
||||
[:map {:title "SessionParams" :closed true}
|
||||
[:profile-id ::sm/uuid]
|
||||
[:created-at ::sm/inst]])
|
||||
[:user-agent {:optional true} ::sm/text]
|
||||
[:sso-provider-id {:optional true} ::sm/uuid]
|
||||
[:sso-session-id {:optional true} :string]])
|
||||
|
||||
(def ^:private valid-params?
|
||||
(sm/validator schema:params))
|
||||
|
||||
(defn- prepare-session-params
|
||||
[key params]
|
||||
(assert (string? key) "expected key to be a string")
|
||||
(assert (not (str/blank? key)) "expected key to be not empty")
|
||||
(assert (valid-params? params) "expected valid params")
|
||||
|
||||
{:user-agent (:user-agent params)
|
||||
:profile-id (:profile-id params)
|
||||
:created-at (:created-at params)
|
||||
:updated-at (:created-at params)
|
||||
:id key})
|
||||
|
||||
(defn- database-manager
|
||||
[pool]
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(db/exec-one! pool (sql/select :http-session {:id token})))
|
||||
(read-session [_ id]
|
||||
(if (string? id)
|
||||
;; Backward compatibility
|
||||
(let [session (db/exec-one! pool (sql/select :http-session {:id id}))]
|
||||
(-> session
|
||||
(assoc :modified-at (:updated-at session))
|
||||
(dissoc :updated-at)))
|
||||
(db/exec-one! pool (sql/select :http-session-v2 {:id id}))))
|
||||
|
||||
(write! [_ key params]
|
||||
(let [params (prepare-session-params key params)]
|
||||
(db/insert! pool :http-session params)
|
||||
params))
|
||||
(create-session [_ params]
|
||||
(assert (valid-params? params) "expect valid session params")
|
||||
|
||||
(update! [_ params]
|
||||
(let [updated-at (dt/now)]
|
||||
(db/update! pool :http-session
|
||||
{:updated-at updated-at}
|
||||
{:id (:id params)})
|
||||
(assoc params :updated-at updated-at)))
|
||||
(let [now (ct/now)
|
||||
params (-> params
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :created-at now)
|
||||
(assoc :modified-at now))]
|
||||
(db/insert! pool :http-session-v2 params
|
||||
{::db/return-keys true})))
|
||||
|
||||
(delete! [_ token]
|
||||
(db/delete! pool :http-session {:id token})
|
||||
(update-session [_ session]
|
||||
(let [modified-at (ct/now)]
|
||||
(if (string? (:id session))
|
||||
(db/insert! pool :http-session-v2
|
||||
(-> session
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :created-at modified-at)
|
||||
(assoc :modified-at modified-at)))
|
||||
(db/update! pool :http-session-v2
|
||||
{:modified-at modified-at}
|
||||
{:id (:id session)}
|
||||
{::db/return-keys true}))))
|
||||
|
||||
(delete-session [_ id]
|
||||
(if (string? id)
|
||||
(db/delete! pool :http-session {:id id} {::db/return-keys false})
|
||||
(db/delete! pool :http-session-v2 {:id id} {::db/return-keys false}))
|
||||
nil)))
|
||||
|
||||
(defn inmemory-manager
|
||||
[]
|
||||
(let [cache (atom {})]
|
||||
(reify ISessionManager
|
||||
(read [_ token]
|
||||
(get @cache token))
|
||||
(read-session [_ id]
|
||||
(get @cache id))
|
||||
|
||||
(write! [_ key params]
|
||||
(let [params (prepare-session-params key params)]
|
||||
(swap! cache assoc key params)
|
||||
params))
|
||||
(create-session [_ params]
|
||||
(assert (valid-params? params) "expect valid session params")
|
||||
|
||||
(update! [_ params]
|
||||
(let [updated-at (dt/now)]
|
||||
(swap! cache update (:id params) assoc :updated-at updated-at)
|
||||
(assoc params :updated-at updated-at)))
|
||||
(let [now (ct/now)
|
||||
session (-> params
|
||||
(assoc :id (uuid/next))
|
||||
(assoc :created-at now)
|
||||
(assoc :modified-at now))]
|
||||
(swap! cache assoc (:id session) session)
|
||||
session))
|
||||
|
||||
(delete! [_ token]
|
||||
(swap! cache dissoc token)
|
||||
(update-session [_ session]
|
||||
(let [modified-at (ct/now)]
|
||||
(swap! cache update (:id session) assoc :modified-at modified-at)
|
||||
(assoc session :modified-at modified-at)))
|
||||
|
||||
(delete-session [_ id]
|
||||
(swap! cache dissoc id)
|
||||
nil))))
|
||||
|
||||
(defmethod ig/assert-key ::manager
|
||||
@@ -143,109 +153,116 @@
|
||||
;; MANAGER IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare ^:private assign-auth-token-cookie)
|
||||
(declare ^:private assign-auth-data-cookie)
|
||||
(declare ^:private clear-auth-token-cookie)
|
||||
(declare ^:private clear-auth-data-cookie)
|
||||
(declare ^:private gen-token)
|
||||
(declare ^:private assign-session-cookie)
|
||||
(declare ^:private clear-session-cookie)
|
||||
|
||||
(defn- assign-token
|
||||
[cfg session]
|
||||
(let [claims {:iss "authentication"
|
||||
:aud "penpot"
|
||||
:sid (:id session)
|
||||
:iat (:modified-at session)
|
||||
:uid (:profile-id session)
|
||||
:sso-provider-id (:sso-provider-id session)
|
||||
:sso-session-id (:sso-session-id session)}
|
||||
header {:kid 1 :ver 1}
|
||||
token (tokens/generate cfg claims header)]
|
||||
(assoc session :token token)))
|
||||
|
||||
(defn create-fn
|
||||
[{:keys [::manager ::setup/props]} profile-id]
|
||||
[{:keys [::manager] :as cfg} {profile-id :id :as profile}
|
||||
& {:keys [sso-provider-id sso-session-id]}]
|
||||
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(assert (uuid? profile-id) "expected valid uuid for profile-id")
|
||||
|
||||
(fn [request response]
|
||||
(let [uagent (yreq/get-header request "user-agent")
|
||||
params {:profile-id profile-id
|
||||
:user-agent uagent
|
||||
:created-at (dt/now)}
|
||||
token (gen-token props params)
|
||||
session (write! manager token params)]
|
||||
(l/trace :hint "create" :profile-id (str profile-id))
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-auth-data-cookie session)))))
|
||||
session (->> {:user-agent uagent
|
||||
:profile-id profile-id
|
||||
:sso-provider-id sso-provider-id
|
||||
:sso-session-id sso-session-id}
|
||||
(d/without-nils)
|
||||
(create-session manager)
|
||||
(assign-token cfg))]
|
||||
|
||||
(l/trc :hint "create" :id (str (:id session)) :profile-id (str profile-id))
|
||||
(assign-session-cookie response session))))
|
||||
|
||||
(defn delete-fn
|
||||
[{:keys [::manager]}]
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(fn [request response]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (yreq/get-cookie request cname)]
|
||||
(l/trace :hint "delete" :profile-id (:profile-id request))
|
||||
(some->> (:value cookie) (delete! manager))
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body nil)
|
||||
(clear-auth-token-cookie)
|
||||
(clear-auth-data-cookie)))))
|
||||
(some->> (get request ::id) (delete-session manager))
|
||||
(clear-session-cookie response)))
|
||||
|
||||
(defn- gen-token
|
||||
[props {:keys [profile-id created-at]}]
|
||||
(tokens/generate props {:iss "authentication"
|
||||
:iat created-at
|
||||
:uid profile-id}))
|
||||
(defn- decode-token
|
||||
[props token]
|
||||
(when token
|
||||
(tokens/verify props {:token token :iss "authentication"})))
|
||||
(defn decode-token
|
||||
[cfg token]
|
||||
(try
|
||||
(tokens/verify cfg {:token token :iss "authentication"})
|
||||
(catch Throwable cause
|
||||
(l/trc :hint "exception on decoding token"
|
||||
:token token
|
||||
:cause cause))))
|
||||
|
||||
(defn- get-token
|
||||
(defn get-session
|
||||
[request]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
cookie (some-> (yreq/get-cookie request cname) :value)]
|
||||
(when-not (str/empty? cookie)
|
||||
cookie)))
|
||||
(get request ::session))
|
||||
|
||||
(defn- get-session
|
||||
[manager token]
|
||||
(some->> token (read manager)))
|
||||
(defn invalidate-others
|
||||
[cfg session]
|
||||
(let [sql "delete from http_session_v2 where profile_id = ? and id != ?"]
|
||||
(-> (db/exec-one! cfg [sql (:profile-id session) (:id session)])
|
||||
(db/get-update-count))))
|
||||
|
||||
(defn- renew-session?
|
||||
[{:keys [updated-at] :as session}]
|
||||
(and (dt/instant? updated-at)
|
||||
(let [elapsed (dt/diff updated-at (dt/now))]
|
||||
(neg? (compare default-renewal-max-age elapsed)))))
|
||||
|
||||
(defn- wrap-soft-auth
|
||||
[handler {:keys [::manager ::setup/props]}]
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(letfn [(handle-request [request]
|
||||
(try
|
||||
(let [token (get-token request)
|
||||
claims (decode-token props token)]
|
||||
(cond-> request
|
||||
(map? claims)
|
||||
(-> (assoc ::token-claims claims)
|
||||
(assoc ::token token))))
|
||||
(catch Throwable cause
|
||||
(l/trace :hint "exception on decoding malformed token" :cause cause)
|
||||
request)))]
|
||||
|
||||
(fn [request]
|
||||
(handler (handle-request request)))))
|
||||
[{:keys [id modified-at] :as session}]
|
||||
(or (string? id)
|
||||
(and (ct/inst? modified-at)
|
||||
(let [elapsed (ct/diff modified-at (ct/now))]
|
||||
(neg? (compare default-renewal-max-age elapsed))))))
|
||||
|
||||
(defn- wrap-authz
|
||||
[handler {:keys [::manager]}]
|
||||
[handler {:keys [::manager] :as cfg}]
|
||||
(assert (manager? manager) "expected valid session manager")
|
||||
(fn [request]
|
||||
(let [session (get-session manager (::token request))
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(assoc ::profile-id (:profile-id session)
|
||||
::id (:id session)))
|
||||
response (handler request)]
|
||||
(let [{:keys [type token claims metadata]} (get request ::http/auth-data)]
|
||||
(cond
|
||||
(= type :cookie)
|
||||
(let [session (case (:ver metadata)
|
||||
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
|
||||
0 (read-session manager token)
|
||||
1 (some->> (:sid claims) (read-session manager))
|
||||
nil)
|
||||
|
||||
(if (renew-session? session)
|
||||
(let [session (update! manager session)]
|
||||
(-> response
|
||||
(assign-auth-token-cookie session)
|
||||
(assign-auth-data-cookie session)))
|
||||
response))))
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(-> (assoc ::profile-id (:profile-id session))
|
||||
(assoc ::session session)))
|
||||
|
||||
(def soft-auth
|
||||
{:name ::soft-auth
|
||||
:compile (constantly wrap-soft-auth)})
|
||||
response (handler request)]
|
||||
|
||||
(if (and session (renew-session? session))
|
||||
(let [session (->> session
|
||||
(update-session manager)
|
||||
(assign-token cfg))]
|
||||
(assign-session-cookie response session))
|
||||
response))
|
||||
|
||||
(= type :bearer)
|
||||
(let [session (case (:ver metadata)
|
||||
;; BACKWARD COMPATIBILITY WITH OLD TOKENS
|
||||
0 (read-session manager token)
|
||||
1 (some->> (:sid claims) (read-session manager))
|
||||
nil)
|
||||
request (cond-> request
|
||||
(some? session)
|
||||
(-> (assoc ::profile-id (:profile-id session))
|
||||
(assoc ::session session)))]
|
||||
(handler request))
|
||||
|
||||
:else
|
||||
(handler request)))))
|
||||
|
||||
(def authz
|
||||
{:name ::authz
|
||||
@@ -253,17 +270,17 @@
|
||||
|
||||
;; --- IMPL
|
||||
|
||||
(defn- assign-auth-token-cookie
|
||||
[response {token :id updated-at :updated-at}]
|
||||
(defn- assign-session-cookie
|
||||
[response {token :token modified-at :modified-at}]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
|
||||
created-at (or updated-at (dt/now))
|
||||
renewal (dt/plus created-at default-renewal-max-age)
|
||||
expires (dt/plus created-at max-age)
|
||||
created-at modified-at
|
||||
renewal (ct/plus created-at default-renewal-max-age)
|
||||
expires (ct/plus created-at max-age)
|
||||
secure? (contains? cf/flags :secure-session-cookies)
|
||||
strict? (contains? cf/flags :strict-session-cookies)
|
||||
cors? (contains? cf/flags :cors)
|
||||
name (cf/get :auth-token-cookie-name default-auth-token-cookie-name)
|
||||
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
|
||||
name (cf/get :auth-token-cookie-name)
|
||||
comment (str "Renewal at: " (ct/format-inst renewal :rfc1123))
|
||||
cookie {:path "/"
|
||||
:http-only true
|
||||
:expires expires
|
||||
@@ -271,59 +288,21 @@
|
||||
:comment comment
|
||||
:same-site (if cors? :none (if strict? :strict :lax))
|
||||
:secure secure?}]
|
||||
(update response :cookies assoc name cookie)))
|
||||
(update response ::yres/cookies assoc name cookie)))
|
||||
|
||||
(defn- assign-auth-data-cookie
|
||||
[response {profile-id :profile-id updated-at :updated-at}]
|
||||
(let [max-age (cf/get :auth-token-cookie-max-age default-cookie-max-age)
|
||||
domain (cf/get :auth-data-cookie-domain)
|
||||
cname default-auth-data-cookie-name
|
||||
|
||||
created-at (or updated-at (dt/now))
|
||||
renewal (dt/plus created-at default-renewal-max-age)
|
||||
expires (dt/plus created-at max-age)
|
||||
|
||||
comment (str "Renewal at: " (dt/format-instant renewal :rfc1123))
|
||||
secure? (contains? cf/flags :secure-session-cookies)
|
||||
strict? (contains? cf/flags :strict-session-cookies)
|
||||
cors? (contains? cf/flags :cors)
|
||||
|
||||
cookie {:domain domain
|
||||
:expires expires
|
||||
:path "/"
|
||||
:comment comment
|
||||
:value (u/map->query-string {:profile-id profile-id})
|
||||
:same-site (if cors? :none (if strict? :strict :lax))
|
||||
:secure secure?}]
|
||||
|
||||
(cond-> response
|
||||
(string? domain)
|
||||
(update :cookies assoc cname cookie))))
|
||||
|
||||
(defn- clear-auth-token-cookie
|
||||
(defn- clear-session-cookie
|
||||
[response]
|
||||
(let [cname (cf/get :auth-token-cookie-name default-auth-token-cookie-name)]
|
||||
(update response :cookies assoc cname {:path "/" :value "" :max-age 0})))
|
||||
|
||||
(defn- clear-auth-data-cookie
|
||||
[response]
|
||||
(let [cname default-auth-data-cookie-name
|
||||
domain (cf/get :auth-data-cookie-domain)]
|
||||
(cond-> response
|
||||
(string? domain)
|
||||
(update :cookies assoc cname {:domain domain :path "/" :value "" :max-age 0}))))
|
||||
|
||||
(let [cname (cf/get :auth-token-cookie-name)]
|
||||
(update response ::yres/cookies assoc cname {:path "/" :value "" :max-age 0})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK: SESSION GC
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; FIXME: MOVE
|
||||
|
||||
(defmethod ig/assert-key ::tasks/gc
|
||||
[_ params]
|
||||
(assert (db/pool? (::db/pool params)) "expected valid database pool")
|
||||
(assert (dt/duration? (::tasks/max-age params))))
|
||||
(assert (ct/duration? (::tasks/max-age params))))
|
||||
|
||||
(defmethod ig/expand-key ::tasks/gc
|
||||
[k v]
|
||||
@@ -332,22 +311,23 @@
|
||||
|
||||
(def ^:private
|
||||
sql:delete-expired
|
||||
"delete from http_session
|
||||
where updated_at < now() - ?::interval
|
||||
"DELETE FROM http_session
|
||||
WHERE updated_at < ?::timestamptz
|
||||
or (updated_at is null and
|
||||
created_at < now() - ?::interval)")
|
||||
created_at < ?::timestamptz)")
|
||||
|
||||
(defn- collect-expired-tasks
|
||||
[{:keys [::db/conn ::tasks/max-age]}]
|
||||
(let [interval (db/interval max-age)
|
||||
result (db/exec-one! conn [sql:delete-expired interval interval])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :task "gc"
|
||||
:hint "clean http sessions"
|
||||
:deleted result)
|
||||
(let [threshold (ct/minus (ct/now) max-age)
|
||||
result (-> (db/exec-one! conn [sql:delete-expired threshold threshold])
|
||||
(db/get-update-count))]
|
||||
(l/dbg :task "gc"
|
||||
:hint "clean http sessions"
|
||||
:deleted result)
|
||||
result))
|
||||
|
||||
(defmethod ig/init-key ::tasks/gc
|
||||
[_ {:keys [::tasks/max-age] :as cfg}]
|
||||
(l/debug :hint "initializing session gc task" :max-age max-age)
|
||||
(fn [_] (db/tx-run! cfg collect-expired-tasks)))
|
||||
(l/dbg :hint "initializing session gc task" :max-age max-age)
|
||||
(fn [_]
|
||||
(db/tx-run! cfg collect-expired-tasks)))
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
(println "event:" (d/name name))
|
||||
(println "data:" (t/encode-str data {:type :json-verbose}))
|
||||
(println))]
|
||||
(.getBytes data "UTF-8"))
|
||||
(.getBytes ^String data "UTF-8"))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unexpected error on encoding value on sse stream"
|
||||
:cause cause)
|
||||
@@ -44,7 +44,8 @@
|
||||
(def default-headers
|
||||
{"Content-Type" "text/event-stream;charset=UTF-8"
|
||||
"Cache-Control" "no-cache, no-store, max-age=0, must-revalidate"
|
||||
"Pragma" "no-cache"})
|
||||
"Pragma" "no-cache"
|
||||
"X-Accel-Buffering" "no"})
|
||||
|
||||
(defn response
|
||||
[handler & {:keys [buf] :or {buf 32} :as opts}]
|
||||
@@ -54,7 +55,7 @@
|
||||
::yres/body (yres/stream-body
|
||||
(fn [_ output]
|
||||
(let [channel (sp/chan :buf buf :xf (keep encode))
|
||||
listener (events/start-listener
|
||||
listener (events/spawn-listener
|
||||
channel
|
||||
(partial write! output)
|
||||
(partial pu/close! output))]
|
||||
|
||||
@@ -11,12 +11,12 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint :as pp]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.http.session :as session]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as mbus]
|
||||
[app.util.time :as dt]
|
||||
[app.util.websocket :as ws]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec.csp :as sp]
|
||||
@@ -239,7 +239,7 @@
|
||||
|
||||
(defn- on-connect
|
||||
[{:keys [::mtx/metrics]} {:keys [::ws/id] :as wsp}]
|
||||
(let [created-at (dt/now)]
|
||||
(let [created-at (ct/now)]
|
||||
(l/trace :fn "on-connect" :conn-id id)
|
||||
(swap! state assoc id wsp)
|
||||
(mtx/run! metrics
|
||||
@@ -253,7 +253,7 @@
|
||||
(mtx/run! metrics :id :websocket-active-connections :dec 1)
|
||||
(mtx/run! metrics
|
||||
:id :websocket-session-timing
|
||||
:val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0))))))
|
||||
:val (/ (inst-ms (ct/diff created-at (ct/now))) 1000.0))))))
|
||||
|
||||
(defn- on-rcv-message
|
||||
[{:keys [::mtx/metrics ::profile-id ::session-id]} message]
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -23,9 +24,9 @@
|
||||
[app.setup :as-alias setup]
|
||||
[app.util.inet :as inet]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
[cuerdas.core :as str]
|
||||
[yetti.request :as yreq]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HELPERS
|
||||
@@ -90,6 +91,22 @@
|
||||
::ip-addr (::rpc/ip-addr params)
|
||||
::context (d/without-nils context)}))
|
||||
|
||||
(defn get-external-session-id
|
||||
[request]
|
||||
(when-let [session-id (yreq/get-header request "x-external-session-id")]
|
||||
(when-not (or (> (count session-id) 256)
|
||||
(= session-id "null")
|
||||
(str/blank? session-id))
|
||||
session-id)))
|
||||
|
||||
(defn- get-external-event-origin
|
||||
[request]
|
||||
(when-let [origin (yreq/get-header request "x-event-origin")]
|
||||
(when-not (or (> (count origin) 256)
|
||||
(= origin "null")
|
||||
(str/blank? origin))
|
||||
origin)))
|
||||
|
||||
;; --- SPECS
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -108,9 +125,9 @@
|
||||
[::ip-addr {:optional true} ::sm/text]
|
||||
[::props {:optional true} [:map-of :keyword :any]]
|
||||
[::context {:optional true} [:map-of :keyword :any]]
|
||||
[::tracked-at {:optional true} ::sm/inst]
|
||||
[::tracked-at {:optional true} ::ct/inst]
|
||||
[::webhooks/event? {:optional true} ::sm/boolean]
|
||||
[::webhooks/batch-timeout {:optional true} ::dt/duration]
|
||||
[::webhooks/batch-timeout {:optional true} ::ct/duration]
|
||||
[::webhooks/batch-key {:optional true}
|
||||
[:or ::sm/fn ::sm/text :keyword]]])
|
||||
|
||||
@@ -126,8 +143,6 @@
|
||||
(::rpc/profile-id params)
|
||||
uuid/zero)
|
||||
|
||||
session-id (get params ::rpc/external-session-id)
|
||||
event-origin (get params ::rpc/external-event-origin)
|
||||
props (-> (or (::replace-props resultm)
|
||||
(-> params
|
||||
(merge (::props resultm))
|
||||
@@ -138,8 +153,10 @@
|
||||
|
||||
token-id (::actoken/id request)
|
||||
context (-> (::context resultm)
|
||||
(assoc :external-session-id session-id)
|
||||
(assoc :external-event-origin event-origin)
|
||||
(assoc :external-session-id
|
||||
(get-external-session-id request))
|
||||
(assoc :external-event-origin
|
||||
(get-external-event-origin request))
|
||||
(assoc :access-token-id (some-> token-id str))
|
||||
(d/without-nils))
|
||||
|
||||
@@ -199,7 +216,7 @@
|
||||
(defn- handle-event!
|
||||
[cfg event]
|
||||
(let [params (event->params event)
|
||||
tnow (dt/now)]
|
||||
tnow (ct/now)]
|
||||
|
||||
(when (contains? cf/flags :audit-log)
|
||||
;; NOTE: this operation may cause primary key conflicts on inserts
|
||||
@@ -273,7 +290,7 @@
|
||||
(let [event (-> (d/without-nils event)
|
||||
(check-event))]
|
||||
(db/run! cfg (fn [cfg]
|
||||
(let [tnow (dt/now)
|
||||
(let [tnow (ct/now)
|
||||
params (-> (event->params event)
|
||||
(assoc :created-at tnow)
|
||||
(update :tracked-at #(or % tnow)))]
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
[app.http.client :as http]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.time :as dt]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.exec :as px]))
|
||||
@@ -53,9 +52,8 @@
|
||||
|
||||
(defn- send!
|
||||
[{:keys [::uri] :as cfg} events]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
(let [token (tokens/generate cfg
|
||||
{:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid uuid/zero})
|
||||
body (t/encode {:events events})
|
||||
headers {"content-type" "application/transit+json"
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
ctx (-> context
|
||||
(assoc :tenant (cf/get :tenant))
|
||||
(assoc :host (cf/get :host))
|
||||
(assoc :public-uri (cf/get :public-uri))
|
||||
(assoc :public-uri (str (cf/get :public-uri)))
|
||||
(assoc :logger/name logger)
|
||||
(assoc :logger/level level)
|
||||
(dissoc :request/params :value :params :data))]
|
||||
|
||||
@@ -10,13 +10,13 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.uri :as uri]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.data.json :as json]
|
||||
[cuerdas.core :as str]
|
||||
@@ -124,7 +124,7 @@
|
||||
{:id (:id whook)})))
|
||||
|
||||
(db/update! pool :webhook
|
||||
{:updated-at (dt/now)
|
||||
{:updated-at (ct/now)
|
||||
:error-code nil
|
||||
:error-count 0}
|
||||
{:id (:id whook)})))
|
||||
@@ -132,7 +132,7 @@
|
||||
(report-delivery! [whook req rsp err]
|
||||
(db/insert! pool :webhook-delivery
|
||||
{:webhook-id (:id whook)
|
||||
:created-at (dt/now)
|
||||
:created-at (ct/now)
|
||||
:error-code err
|
||||
:req-data (db/tjson req)
|
||||
:rsp-data (db/tjson rsp)}))]
|
||||
@@ -155,7 +155,7 @@
|
||||
(let [req {:uri (:uri whook)
|
||||
:headers {"content-type" (:mtype whook)
|
||||
"user-agent" (str/ffmt "penpot/%" (:main cf/version))}
|
||||
:timeout (dt/duration "4s")
|
||||
:timeout (ct/duration "4s")
|
||||
:method :post
|
||||
:body body}]
|
||||
(try
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.auth.oidc.providers :as-alias oidc.providers]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.email :as-alias email]
|
||||
@@ -19,7 +20,8 @@
|
||||
[app.http.awsns :as http.awsns]
|
||||
[app.http.client :as-alias http.client]
|
||||
[app.http.debug :as-alias http.debug]
|
||||
[app.http.session :as-alias session]
|
||||
[app.http.management :as mgmt]
|
||||
[app.http.session :as session]
|
||||
[app.http.session.tasks :as-alias session.tasks]
|
||||
[app.http.websocket :as http.ws]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
@@ -29,7 +31,6 @@
|
||||
[app.redis :as-alias rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.doc :as-alias rpc.doc]
|
||||
[app.setup :as-alias setup]
|
||||
[app.srepl :as-alias srepl]
|
||||
[app.storage :as-alias sto]
|
||||
@@ -38,8 +39,9 @@
|
||||
[app.storage.gc-touched :as-alias sto.gc-touched]
|
||||
[app.storage.s3 :as-alias sto.s3]
|
||||
[app.svgo :as-alias svgo]
|
||||
[app.util.time :as dt]
|
||||
[app.util.cron]
|
||||
[app.worker :as-alias wrk]
|
||||
[app.worker.executor]
|
||||
[clojure.test :as test]
|
||||
[clojure.tools.namespace.repl :as repl]
|
||||
[cuerdas.core :as str]
|
||||
@@ -146,23 +148,11 @@
|
||||
::mdef/labels []
|
||||
::mdef/type :histogram}
|
||||
|
||||
:executors-active-threads
|
||||
{::mdef/name "penpot_executors_active_threads"
|
||||
::mdef/help "Current number of threads available in the executor service."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}
|
||||
|
||||
:executors-completed-tasks
|
||||
{::mdef/name "penpot_executors_completed_tasks_total"
|
||||
::mdef/help "Approximate number of completed tasks by the executor."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :counter}
|
||||
|
||||
:executors-running-threads
|
||||
{::mdef/name "penpot_executors_running_threads"
|
||||
::mdef/help "Current number of threads with state RUNNING."
|
||||
::mdef/labels ["name"]
|
||||
::mdef/type :gauge}})
|
||||
:http-server-dispatch-timing
|
||||
{::mdef/name "penpot_http_server_dispatch_timing"
|
||||
::mdef/help "Histogram of dispatch handler"
|
||||
::mdef/labels []
|
||||
::mdef/type :histogram}})
|
||||
|
||||
(def system-config
|
||||
{::db/pool
|
||||
@@ -174,14 +164,12 @@
|
||||
::db/max-size (cf/get :database-max-pool-size 60)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
;; Default thread pool for IO operations
|
||||
::wrk/executor
|
||||
{}
|
||||
;; Default netty IO pool (shared between several services)
|
||||
::wrk/netty-io-executor
|
||||
{:threads (cf/get :netty-io-threads)}
|
||||
|
||||
::wrk/monitor
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/name "default"}
|
||||
::wrk/netty-executor
|
||||
{:threads (cf/get :executor-threads)}
|
||||
|
||||
:app.migrations/migrations
|
||||
{::db/pool (ig/ref ::db/pool)}
|
||||
@@ -192,17 +180,27 @@
|
||||
::mtx/routes
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
::rds/redis
|
||||
{::rds/uri (cf/get :redis-uri)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
::rds/client
|
||||
{::rds/uri
|
||||
(cf/get :redis-uri)
|
||||
|
||||
::wrk/netty-executor
|
||||
(ig/ref ::wrk/netty-executor)
|
||||
|
||||
::wrk/netty-io-executor
|
||||
(ig/ref ::wrk/netty-io-executor)}
|
||||
|
||||
::rds/pool
|
||||
{::rds/client (ig/ref ::rds/client)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
::mbus/msgbus
|
||||
{::wrk/executor (ig/ref ::wrk/executor)
|
||||
::rds/redis (ig/ref ::rds/redis)}
|
||||
{::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||
::rds/client (ig/ref ::rds/client)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
:app.storage.tmp/cleaner
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
{::wrk/executor (ig/ref ::wrk/netty-executor)}
|
||||
|
||||
::sto.gc-deleted/handler
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
@@ -230,9 +228,10 @@
|
||||
::http/host (cf/get :http-server-host)
|
||||
::http/router (ig/ref ::http/router)
|
||||
::http/io-threads (cf/get :http-server-io-threads)
|
||||
::http/max-worker-threads (cf/get :http-server-max-worker-threads)
|
||||
::http/max-body-size (cf/get :http-server-max-body-size)
|
||||
::http/max-multipart-body-size (cf/get :http-server-max-multipart-body-size)
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)}
|
||||
|
||||
::ldap/provider
|
||||
{:host (cf/get :ldap-host)
|
||||
@@ -260,26 +259,33 @@
|
||||
::oidc.providers/generic
|
||||
{::http.client/client (ig/ref ::http.client/client)}
|
||||
|
||||
::oidc/providers
|
||||
[(ig/ref ::oidc.providers/google)
|
||||
(ig/ref ::oidc.providers/github)
|
||||
(ig/ref ::oidc.providers/gitlab)
|
||||
(ig/ref ::oidc.providers/generic)]
|
||||
|
||||
::oidc/routes
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::oidc/providers {:google (ig/ref ::oidc.providers/google)
|
||||
:github (ig/ref ::oidc.providers/github)
|
||||
:gitlab (ig/ref ::oidc.providers/gitlab)
|
||||
:oidc (ig/ref ::oidc.providers/generic)}
|
||||
::oidc/providers (ig/ref ::oidc/providers)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::email/blacklist (ig/ref ::email/blacklist)
|
||||
::email/whitelist (ig/ref ::email/whitelist)}
|
||||
|
||||
::mgmt/routes
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
:app.http/router
|
||||
{::session/manager (ig/ref ::session/manager)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::rpc/routes (ig/ref ::rpc/routes)
|
||||
::rpc.doc/routes (ig/ref ::rpc.doc/routes)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::mtx/routes (ig/ref ::mtx/routes)
|
||||
::oidc/routes (ig/ref ::oidc/routes)
|
||||
::mgmt/routes (ig/ref ::mgmt/routes)
|
||||
::http.debug/routes (ig/ref ::http.debug/routes)
|
||||
::http.assets/routes (ig/ref ::http.assets/routes)
|
||||
::http.ws/routes (ig/ref ::http.ws/routes)
|
||||
@@ -295,33 +301,35 @@
|
||||
{::db/pool (ig/ref ::db/pool)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::setup/props (ig/ref ::setup/props)
|
||||
::session/manager (ig/ref ::session/manager)}
|
||||
|
||||
:app.http.assets/routes
|
||||
{::http.assets/path (cf/get :assets-path)
|
||||
::http.assets/cache-max-age (dt/duration {:hours 24})
|
||||
::http.assets/cache-max-agesignature-max-age (dt/duration {:hours 24 :minutes 5})
|
||||
::http.assets/cache-max-age (ct/duration {:hours 24})
|
||||
::http.assets/cache-max-agesignature-max-age (ct/duration {:hours 24 :minutes 5})
|
||||
::sto/storage (ig/ref ::sto/storage)}
|
||||
|
||||
::rpc/climit
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||
::climit/config (cf/get :rpc-climit-config)
|
||||
::climit/enabled (contains? cf/flags :rpc-climit)}
|
||||
|
||||
:app.rpc/rlimit
|
||||
{::wrk/executor (ig/ref ::wrk/executor)}
|
||||
{::wrk/executor (ig/ref ::wrk/netty-executor)}
|
||||
|
||||
:app.rpc/methods
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/executor (ig/ref ::wrk/executor)
|
||||
::rds/pool (ig/ref ::rds/pool)
|
||||
::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::ldap/provider (ig/ref ::ldap/provider)
|
||||
::sto/storage (ig/ref ::sto/storage)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::rds/client (ig/ref ::rds/client)
|
||||
|
||||
::rpc/climit (ig/ref ::rpc/climit)
|
||||
::rpc/rlimit (ig/ref ::rpc/rlimit)
|
||||
@@ -331,14 +339,26 @@
|
||||
::email/blacklist (ig/ref ::email/blacklist)
|
||||
::email/whitelist (ig/ref ::email/whitelist)}
|
||||
|
||||
:app.rpc.doc/routes
|
||||
{:app.rpc/methods (ig/ref :app.rpc/methods)}
|
||||
:app.rpc/management-methods
|
||||
{::http.client/client (ig/ref ::http.client/client)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::rds/pool (ig/ref ::rds/pool)
|
||||
::wrk/executor (ig/ref ::wrk/netty-executor)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::sto/storage (ig/ref ::sto/storage)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::mbus/msgbus (ig/ref ::mbus/msgbus)
|
||||
::rds/client (ig/ref ::rds/client)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
:app.rpc/routes
|
||||
{::rpc/methods (ig/ref :app.rpc/methods)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
::rpc/routes
|
||||
{::rpc/methods (ig/ref :app.rpc/methods)
|
||||
::rpc/management-methods (ig/ref :app.rpc/management-methods)
|
||||
|
||||
;; FIXME: revisit if db/pool is necessary here
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::session/manager (ig/ref ::session/manager)
|
||||
::setup/props (ig/ref ::setup/props)}
|
||||
|
||||
::wrk/registry
|
||||
{::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
@@ -426,6 +446,9 @@
|
||||
;; module requires the migrations to run before initialize.
|
||||
::migrations (ig/ref :app.migrations/migrations)}
|
||||
|
||||
::setup/clock
|
||||
{}
|
||||
|
||||
:app.loggers.audit.archive-task/handler
|
||||
{::setup/props (ig/ref ::setup/props)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
@@ -469,49 +492,50 @@
|
||||
(cf/get :objects-storage-s3-bucket))
|
||||
::sto.s3/io-threads (or (cf/get :storage-assets-s3-io-threads)
|
||||
(cf/get :objects-storage-s3-io-threads))
|
||||
::wrk/executor (ig/ref ::wrk/executor)}
|
||||
|
||||
::wrk/netty-io-executor
|
||||
(ig/ref ::wrk/netty-io-executor)}
|
||||
|
||||
:app.storage.fs/backend
|
||||
{::sto.fs/directory (or (cf/get :storage-assets-fs-directory)
|
||||
(cf/get :objects-storage-fs-directory))}})
|
||||
|
||||
|
||||
(def worker-config
|
||||
{::wrk/cron
|
||||
{::wrk/registry (ig/ref ::wrk/registry)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/entries
|
||||
[{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
[{:cron #penpot/cron "0 0 0 * * ?" ;; daily
|
||||
:task :session-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
|
||||
:task :objects-gc}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
|
||||
:task :storage-gc-deleted}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
|
||||
:task :storage-gc-touched}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
{:cron #penpot/cron "0 0 0 * * ?" ;; daily
|
||||
:task :tasks-gc}
|
||||
|
||||
{:cron #app/cron "0 0 2 * * ?" ;; daily
|
||||
{:cron #penpot/cron "0 0 2 * * ?" ;; daily
|
||||
:task :file-gc-scheduler}
|
||||
|
||||
{:cron #app/cron "0 30 */3,23 * * ?"
|
||||
{:cron #penpot/cron "0 30 */3,23 * * ?"
|
||||
:task :telemetry}
|
||||
|
||||
(when (contains? cf/flags :audit-log-archive)
|
||||
{:cron #app/cron "0 */5 * * * ?" ;; every 5m
|
||||
{:cron #penpot/cron "0 */5 * * * ?" ;; every 5m
|
||||
:task :audit-log-archive})
|
||||
|
||||
(when (contains? cf/flags :audit-log-gc)
|
||||
{:cron #app/cron "30 */5 * * * ?" ;; every 5m
|
||||
{:cron #penpot/cron "30 */5 * * * ?" ;; every 5m
|
||||
:task :audit-log-gc})]}
|
||||
|
||||
::wrk/dispatcher
|
||||
{::rds/redis (ig/ref ::rds/redis)
|
||||
{::rds/client (ig/ref ::rds/client)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)
|
||||
::wrk/tenant (cf/get :tenant)}
|
||||
@@ -520,7 +544,7 @@
|
||||
{::wrk/parallelism (cf/get ::worker-default-parallelism 1)
|
||||
::wrk/queue :default
|
||||
::wrk/tenant (cf/get :tenant)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::rds/client (ig/ref ::rds/client)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}
|
||||
@@ -529,7 +553,7 @@
|
||||
{::wrk/parallelism (cf/get ::worker-webhook-parallelism 1)
|
||||
::wrk/queue :webhooks
|
||||
::wrk/tenant (cf/get :tenant)
|
||||
::rds/redis (ig/ref ::rds/redis)
|
||||
::rds/client (ig/ref ::rds/client)
|
||||
::wrk/registry (ig/ref ::wrk/registry)
|
||||
::mtx/metrics (ig/ref ::mtx/metrics)
|
||||
::db/pool (ig/ref ::db/pool)}})
|
||||
|
||||
@@ -14,11 +14,12 @@
|
||||
[app.common.media :as cm]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.openapi :as-alias oapi]
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]
|
||||
[app.db :as-alias db]
|
||||
[app.http.client :as http]
|
||||
[app.storage :as-alias sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.time :as dt]
|
||||
[buddy.core.bytes :as bb]
|
||||
[buddy.core.codecs :as bc]
|
||||
[clojure.java.shell :as sh]
|
||||
@@ -37,16 +38,17 @@
|
||||
org.im4java.core.IMOperation
|
||||
org.im4java.core.Info))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 10)) ; 10 MiB
|
||||
|
||||
(def schema:upload
|
||||
(sm/register!
|
||||
^{::sm/type ::upload}
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]]))
|
||||
[:map {:title "Upload"}
|
||||
[:filename :string]
|
||||
[:size ::sm/int]
|
||||
[:path ::fs/path]
|
||||
[:mtype {:optional true} :string]
|
||||
[:headers {:optional true}
|
||||
[:map-of :string :string]]])
|
||||
|
||||
(def ^:private schema:input
|
||||
[:map {:title "Input"}
|
||||
@@ -118,7 +120,7 @@
|
||||
(defn- parse-svg
|
||||
[text]
|
||||
(let [text (strip-doctype text)]
|
||||
(dm/with-open [istream (IOUtils/toInputStream text "UTF-8")]
|
||||
(dm/with-open [istream (IOUtils/toInputStream ^String text "UTF-8")]
|
||||
(xml/parse istream secure-parser-factory))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -243,7 +245,7 @@
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-svg-file
|
||||
:hint "uploaded svg does not provides dimensions"))
|
||||
(merge input info {:ts (dt/now)}))
|
||||
(merge input info {:ts (ct/now) :size (fs/size path)}))
|
||||
|
||||
(let [instance (Info. (str path))
|
||||
mtype' (.getProperty instance "Mime type")]
|
||||
@@ -263,7 +265,8 @@
|
||||
(assoc input
|
||||
:width width
|
||||
:height height
|
||||
:ts (dt/now)))))))
|
||||
:size (fs/size path)
|
||||
:ts (ct/now)))))))
|
||||
|
||||
(defmethod process-error org.im4java.core.InfoException
|
||||
[error]
|
||||
@@ -272,6 +275,54 @@
|
||||
:hint "invalid image"
|
||||
:cause error))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMAGE HELPERS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn download-image
|
||||
"Download an image from the provided URI and return the media input object"
|
||||
[{:keys [::http/client]} uri]
|
||||
(letfn [(parse-and-validate [{:keys [headers] :as response}]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
mtype (get headers "content-type")
|
||||
format (cm/mtype->format mtype)
|
||||
max-size (cf/get :media-max-file-size default-max-file-size)]
|
||||
|
||||
(when-not size
|
||||
(ex/raise :type :validation
|
||||
:code :unknown-size
|
||||
:hint "seems like the url points to resource with unknown size"))
|
||||
|
||||
(when (> size max-size)
|
||||
(ex/raise :type :validation
|
||||
:code :file-too-large
|
||||
:hint (str/ffmt "the file size % is greater than the maximum %"
|
||||
size
|
||||
default-max-file-size)))
|
||||
|
||||
(when (nil? format)
|
||||
(ex/raise :type :validation
|
||||
:code :media-type-not-allowed
|
||||
:hint "seems like the url points to an invalid media object"))
|
||||
|
||||
{:size size :mtype mtype :format format}))]
|
||||
|
||||
(let [{:keys [body] :as response} (http/req! client
|
||||
{:method :get :uri uri}
|
||||
{:response-type :input-stream})
|
||||
{:keys [size mtype]} (parse-and-validate response)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write* path body :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
:code :mismatch-write-size
|
||||
:hint "unexpected state: unable to write to file"))
|
||||
|
||||
{;; :size size
|
||||
:path path
|
||||
:mtype mtype})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; FONTS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@@ -441,7 +441,22 @@
|
||||
:fn (mg/resource "app/migrations/sql/0139-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0140-mod-file-change-table.sql"
|
||||
:fn (mg/resource "app/migrations/sql/0140-mod-file-change-table.sql")}])
|
||||
:fn (mg/resource "app/migrations/sql/0140-mod-file-change-table.sql")}
|
||||
|
||||
{:name "0140-add-locked-by-column-to-file-change-table"
|
||||
:fn (mg/resource "app/migrations/sql/0140-add-locked-by-column-to-file-change-table.sql")}
|
||||
|
||||
{:name "0141-add-idx-to-file-library-rel"
|
||||
:fn (mg/resource "app/migrations/sql/0141-add-idx-to-file-library-rel.sql")}
|
||||
|
||||
{:name "0141-add-file-data-table.sql"
|
||||
:fn (mg/resource "app/migrations/sql/0141-add-file-data-table.sql")}
|
||||
|
||||
{:name "0142-add-sso-provider-table"
|
||||
:fn (mg/resource "app/migrations/sql/0142-add-sso-provider-table.sql")}
|
||||
|
||||
{:name "0143-http-session-v2-table"
|
||||
:fn (mg/resource "app/migrations/sql/0143-add-http-session-v2-table.sql")}])
|
||||
|
||||
(defn apply-migrations!
|
||||
[pool name migrations]
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pprint]
|
||||
[app.srepl.fixes.media-refs :refer [process-file]]
|
||||
[app.srepl.main :as srepl]
|
||||
[app.srepl.procs.media-refs]
|
||||
[clojure.edn :as edn]))
|
||||
|
||||
(def ^:private required-services
|
||||
@@ -20,7 +20,10 @@
|
||||
:app.storage/storage
|
||||
:app.metrics/metrics
|
||||
:app.db/pool
|
||||
:app.worker/executor])
|
||||
:app.worker/netty-io-executor])
|
||||
|
||||
(def default-options
|
||||
{:rollback? false})
|
||||
|
||||
(defn -main
|
||||
[& [options]]
|
||||
@@ -28,22 +31,20 @@
|
||||
(let [config-var (requiring-resolve 'app.main/system-config)
|
||||
start-var (requiring-resolve 'app.main/start-custom)
|
||||
stop-var (requiring-resolve 'app.main/stop)
|
||||
config (select-keys @config-var required-services)]
|
||||
|
||||
config (select-keys @config-var required-services)
|
||||
options (if (string? options)
|
||||
(ex/ignoring (edn/read-string options))
|
||||
{})
|
||||
options (-> (merge default-options options)
|
||||
(assoc :proc-fn #'app.srepl.procs.media-refs/fix-media-refs))]
|
||||
|
||||
(start-var config)
|
||||
|
||||
(let [options (if (string? options)
|
||||
(ex/ignoring (edn/read-string options))
|
||||
{})]
|
||||
|
||||
(l/inf :hint "executing media-refs migration" :options options)
|
||||
(srepl/process-files! process-file options))
|
||||
|
||||
(l/inf :hint "executing media-refs migration" :options options)
|
||||
(srepl/process! options)
|
||||
(stop-var)
|
||||
(System/exit 0))
|
||||
(catch Throwable cause
|
||||
(ex/print-throwable cause)
|
||||
(flush)
|
||||
(System/exit -1))))
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
-- Add locked_by column to file_change table for version locking feature
|
||||
-- This allows users to lock their own saved versions to prevent deletion by others
|
||||
|
||||
ALTER TABLE file_change
|
||||
ADD COLUMN locked_by uuid NULL REFERENCES profile(id) ON DELETE SET NULL DEFERRABLE;
|
||||
|
||||
-- Create index for locked versions queries
|
||||
CREATE INDEX file_change__locked_by__idx ON file_change (locked_by) WHERE locked_by IS NOT NULL;
|
||||
|
||||
-- Add comment for documentation
|
||||
COMMENT ON COLUMN file_change.locked_by IS 'Profile ID of user who has locked this version. Only the creator can lock/unlock their own versions. Locked versions cannot be deleted by others.';
|
||||
38
backend/src/app/migrations/sql/0141-add-file-data-table.sql
Normal file
38
backend/src/app/migrations/sql/0141-add-file-data-table.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
CREATE TABLE file_data (
|
||||
file_id uuid NOT NULL REFERENCES file(id) DEFERRABLE,
|
||||
id uuid NOT NULL,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
modified_at timestamptz NOT NULL DEFAULT now(),
|
||||
deleted_at timestamptz NULL,
|
||||
|
||||
type text NOT NULL,
|
||||
backend text NULL,
|
||||
|
||||
metadata jsonb NULL,
|
||||
data bytea NULL,
|
||||
|
||||
PRIMARY KEY (file_id, id)
|
||||
|
||||
) PARTITION BY HASH (file_id);
|
||||
|
||||
CREATE TABLE file_data_00 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 0);
|
||||
CREATE TABLE file_data_01 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 1);
|
||||
CREATE TABLE file_data_02 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 2);
|
||||
CREATE TABLE file_data_03 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 3);
|
||||
CREATE TABLE file_data_04 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 4);
|
||||
CREATE TABLE file_data_05 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 5);
|
||||
CREATE TABLE file_data_06 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 6);
|
||||
CREATE TABLE file_data_07 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 7);
|
||||
CREATE TABLE file_data_08 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 8);
|
||||
CREATE TABLE file_data_09 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 9);
|
||||
CREATE TABLE file_data_10 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 10);
|
||||
CREATE TABLE file_data_11 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 11);
|
||||
CREATE TABLE file_data_12 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 12);
|
||||
CREATE TABLE file_data_13 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 13);
|
||||
CREATE TABLE file_data_14 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 14);
|
||||
CREATE TABLE file_data_15 PARTITION OF file_data FOR VALUES WITH (MODULUS 16, REMAINDER 15);
|
||||
|
||||
CREATE INDEX file_data__deleted_at__idx
|
||||
ON file_data (deleted_at, file_id, id)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
@@ -0,0 +1,2 @@
|
||||
CREATE INDEX IF NOT EXISTS file_library_rel__library_file_id__idx
|
||||
ON file_library_rel (library_file_id);
|
||||
@@ -0,0 +1,33 @@
|
||||
CREATE TABLE sso_provider (
|
||||
id uuid PRIMARY KEY,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
modified_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
is_enabled boolean NOT NULL DEFAULT true,
|
||||
|
||||
type text NOT NULL CHECK (type IN ('oidc')),
|
||||
domain text NOT NULL,
|
||||
|
||||
client_id text NOT NULL,
|
||||
client_secret text NOT NULL,
|
||||
|
||||
base_uri text NOT NULL,
|
||||
token_uri text NULL,
|
||||
auth_uri text NULL,
|
||||
user_uri text NULL,
|
||||
jwks_uri text NULL,
|
||||
logout_uri text NULL,
|
||||
|
||||
roles_attr text NULL,
|
||||
email_attr text NULL,
|
||||
name_attr text NULL,
|
||||
user_info_source text NOT NULL DEFAULT 'token'
|
||||
CHECK (user_info_source IN ('token', 'userinfo', 'auto')),
|
||||
|
||||
scopes text[] NULL,
|
||||
roles text[] NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX sso_provider__domain__idx
|
||||
ON sso_provider(domain);
|
||||
@@ -0,0 +1,23 @@
|
||||
CREATE TABLE http_session_v2 (
|
||||
id uuid PRIMARY KEY,
|
||||
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
modified_at timestamptz NOT NULL DEFAULT now(),
|
||||
|
||||
profile_id uuid REFERENCES profile(id) ON DELETE CASCADE,
|
||||
user_agent text NULL,
|
||||
|
||||
sso_provider_id uuid NULL REFERENCES sso_provider(id) ON DELETE CASCADE,
|
||||
sso_session_id text NULL
|
||||
);
|
||||
|
||||
CREATE INDEX http_session_v2__profile_id__idx
|
||||
ON http_session_v2(profile_id);
|
||||
|
||||
CREATE INDEX http_session_v2__sso_provider_id__idx
|
||||
ON http_session_v2(sso_provider_id)
|
||||
WHERE sso_provider_id IS NOT NULL;
|
||||
|
||||
CREATE INDEX http_session_v2__sso_session_id__idx
|
||||
ON http_session_v2(sso_session_id)
|
||||
WHERE sso_session_id IS NOT NULL;
|
||||
@@ -10,13 +10,12 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cfg]
|
||||
[app.redis :as rds]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.csp :as sp]))
|
||||
|
||||
@@ -56,17 +55,19 @@
|
||||
[k v]
|
||||
{k (-> (d/without-nils v)
|
||||
(assoc ::buffer-size 128)
|
||||
(assoc ::timeout (dt/duration {:seconds 30})))})
|
||||
(assoc ::timeout (ct/duration {:seconds 30})))})
|
||||
|
||||
(def ^:private schema:params
|
||||
[:map ::rds/redis ::wrk/executor])
|
||||
[:map
|
||||
::rds/client
|
||||
::wrk/executor])
|
||||
|
||||
(defmethod ig/assert-key ::msgbus
|
||||
[_ params]
|
||||
(assert (sm/check schema:params params)))
|
||||
|
||||
(defmethod ig/init-key ::msgbus
|
||||
[_ {:keys [::buffer-size ::wrk/executor ::timeout ::rds/redis] :as cfg}]
|
||||
[_ {:keys [::buffer-size ::wrk/executor ::timeout] :as cfg}]
|
||||
(l/info :hint "initialize msgbus" :buffer-size buffer-size)
|
||||
(let [cmd-ch (sp/chan :buf buffer-size)
|
||||
rcv-ch (sp/chan :buf (sp/dropping-buffer buffer-size))
|
||||
@@ -74,8 +75,9 @@
|
||||
:xf xform-prefix-topic)
|
||||
state (agent {})
|
||||
|
||||
pconn (rds/connect redis :type :default :timeout timeout)
|
||||
sconn (rds/connect redis :type :pubsub :timeout timeout)
|
||||
;; Open persistent connections to redis
|
||||
pconn (rds/connect cfg :timeout timeout)
|
||||
sconn (rds/connect-pubsub cfg :timeout timeout)
|
||||
|
||||
_ (set-error-handler! state #(l/error :cause % :hint "unexpected error on agent" ::l/sync? true))
|
||||
_ (set-error-mode! state :continue)
|
||||
@@ -189,14 +191,13 @@
|
||||
|
||||
(defn- create-listener
|
||||
[rcv-ch]
|
||||
(rds/pubsub-listener
|
||||
:on-message (fn [_ topic message]
|
||||
{:on-message (fn [_ topic message]
|
||||
;; There are no back pressure, so we use a slidding
|
||||
;; buffer for cases when the pubsub broker sends
|
||||
;; more messages that we can process.
|
||||
(let [val {:topic topic :message (t/decode message)}]
|
||||
(let [val {:topic topic :message (t/decode-str message)}]
|
||||
(when-not (sp/offer! rcv-ch val)
|
||||
(l/warn :msg "dropping message on subscription loop"))))))
|
||||
(l/warn :msg "dropping message on subscription loop"))))})
|
||||
|
||||
(defn- process-input
|
||||
[{:keys [::state ::wrk/executor] :as cfg} topic message]
|
||||
@@ -216,8 +217,7 @@
|
||||
(rds/add-listener sconn (create-listener rcv-ch))
|
||||
|
||||
(px/thread
|
||||
{:name "penpot/msgbus/io-loop"
|
||||
:virtual true}
|
||||
{:name "penpot/msgbus"}
|
||||
(try
|
||||
(loop []
|
||||
(let [timeout-ch (sp/timeout-chan 1000)
|
||||
@@ -263,7 +263,7 @@
|
||||
intended to be used in core.async go blocks."
|
||||
[{:keys [::pconn] :as cfg} {:keys [topic message]}]
|
||||
(try
|
||||
(p/await! (rds/publish pconn topic (t/encode message)))
|
||||
(rds/publish pconn topic (t/encode-str message))
|
||||
(catch InterruptedException cause
|
||||
(throw cause))
|
||||
(catch Throwable cause
|
||||
|
||||
@@ -6,23 +6,22 @@
|
||||
|
||||
(ns app.redis
|
||||
"The msgbus abstraction implemented using redis as underlying backend."
|
||||
(:refer-clojure :exclude [eval])
|
||||
(:refer-clojure :exclude [eval get set run!])
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.generic-pool :as gpool]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.metrics :as mtx]
|
||||
[app.redis.script :as-alias rscript]
|
||||
[app.util.cache :as cache]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[app.worker :as wrk]
|
||||
[app.worker.executor]
|
||||
[clojure.core :as c]
|
||||
[clojure.java.io :as io]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px])
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
clojure.lang.MapEntry
|
||||
io.lettuce.core.KeyValue
|
||||
@@ -32,12 +31,10 @@
|
||||
io.lettuce.core.RedisException
|
||||
io.lettuce.core.RedisURI
|
||||
io.lettuce.core.ScriptOutputType
|
||||
io.lettuce.core.api.StatefulConnection
|
||||
io.lettuce.core.SetArgs
|
||||
io.lettuce.core.api.StatefulRedisConnection
|
||||
io.lettuce.core.api.async.RedisAsyncCommands
|
||||
io.lettuce.core.api.async.RedisScriptingAsyncCommands
|
||||
io.lettuce.core.api.sync.RedisCommands
|
||||
io.lettuce.core.codec.ByteArrayCodec
|
||||
io.lettuce.core.api.sync.RedisScriptingCommands
|
||||
io.lettuce.core.codec.RedisCodec
|
||||
io.lettuce.core.codec.StringCodec
|
||||
io.lettuce.core.pubsub.RedisPubSubListener
|
||||
@@ -45,244 +42,238 @@
|
||||
io.lettuce.core.pubsub.api.sync.RedisPubSubCommands
|
||||
io.lettuce.core.resource.ClientResources
|
||||
io.lettuce.core.resource.DefaultClientResources
|
||||
io.netty.channel.nio.NioEventLoopGroup
|
||||
io.netty.util.HashedWheelTimer
|
||||
io.netty.util.Timer
|
||||
io.netty.util.concurrent.EventExecutorGroup
|
||||
java.lang.AutoCloseable
|
||||
java.time.Duration))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(declare ^:private initialize-resources)
|
||||
(declare ^:private shutdown-resources)
|
||||
(declare ^:private impl-eval)
|
||||
(def ^:const MAX-EVAL-RETRIES 18)
|
||||
|
||||
(defprotocol IRedis
|
||||
(-connect [_ options])
|
||||
(-get-or-connect [_ key options]))
|
||||
(def default-timeout
|
||||
(ct/duration "10s"))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMPL & PRIVATE API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defprotocol IConnection
|
||||
(publish [_ topic message])
|
||||
(rpush [_ key payload])
|
||||
(blpop [_ timeout keys])
|
||||
(eval [_ script]))
|
||||
(-set-timeout [_ timeout] "set connection timeout")
|
||||
(-get-timeout [_] "get current timeout")
|
||||
(-reset-timeout [_] "reset to default timeout"))
|
||||
|
||||
(defprotocol IDefaultConnection
|
||||
"Public API of default redis connection"
|
||||
(-publish [_ topic message])
|
||||
(-rpush [_ key payload])
|
||||
(-blpop [_ timeout keys])
|
||||
(-eval [_ script])
|
||||
(-get [_ key])
|
||||
(-set [_ key val args])
|
||||
(-del [_ key-or-keys])
|
||||
(-ping [_]))
|
||||
|
||||
(defprotocol IPubSubConnection
|
||||
(add-listener [_ listener])
|
||||
(subscribe [_ topics])
|
||||
(unsubscribe [_ topics]))
|
||||
(-add-listener [_ listener])
|
||||
(-subscribe [_ topics])
|
||||
(-unsubscribe [_ topics]))
|
||||
|
||||
(def default-codec
|
||||
(RedisCodec/of StringCodec/UTF8 ByteArrayCodec/INSTANCE))
|
||||
|
||||
(def string-codec
|
||||
(def ^:private default-codec
|
||||
(RedisCodec/of StringCodec/UTF8 StringCodec/UTF8))
|
||||
|
||||
(sm/register!
|
||||
{:type ::connection
|
||||
:pred #(satisfies? IConnection %)
|
||||
:type-properties
|
||||
{:title "connection"
|
||||
:description "redis connection instance"}})
|
||||
(defn- impl-eval
|
||||
[cmd cache metrics script]
|
||||
(let [keys (into-array String (map str (::rscript/keys script)))
|
||||
vals (into-array String (map str (::rscript/vals script)))
|
||||
sname (::rscript/name script)
|
||||
|
||||
(sm/register!
|
||||
{:type ::pubsub-connection
|
||||
:pred #(satisfies? IPubSubConnection %)
|
||||
:type-properties
|
||||
{:title "connection"
|
||||
:description "redis connection instance"}})
|
||||
read-script
|
||||
(fn []
|
||||
(-> script ::rscript/path io/resource slurp))
|
||||
|
||||
(defn redis?
|
||||
[o]
|
||||
(satisfies? IRedis o))
|
||||
load-script
|
||||
(fn []
|
||||
(let [id (.scriptLoad ^RedisScriptingCommands cmd
|
||||
^String (read-script))]
|
||||
(swap! cache assoc sname id)
|
||||
(l/trc :hint "load script" :name sname :id id)
|
||||
|
||||
(sm/register!
|
||||
{:type ::redis
|
||||
:pred redis?})
|
||||
id))
|
||||
|
||||
(def ^:private schema:script
|
||||
[:map {:title "script"}
|
||||
[::rscript/name qualified-keyword?]
|
||||
[::rscript/path ::sm/text]
|
||||
[::rscript/keys {:optional true} [:vector :any]]
|
||||
[::rscript/vals {:optional true} [:vector :any]]])
|
||||
eval-script
|
||||
(fn [id]
|
||||
(try
|
||||
(let [tpoint (ct/tpoint)
|
||||
result (.evalsha ^RedisScriptingCommands cmd
|
||||
^String id
|
||||
^ScriptOutputType ScriptOutputType/MULTI
|
||||
^"[Ljava.lang.String;" keys
|
||||
^"[Ljava.lang.String;" vals)
|
||||
elapsed (tpoint)]
|
||||
|
||||
(def valid-script?
|
||||
(sm/lazy-validator schema:script))
|
||||
(mtx/run! metrics {:id :redis-eval-timing
|
||||
:labels [(name sname)]
|
||||
:val (inst-ms elapsed)})
|
||||
|
||||
(defmethod ig/expand-key ::redis
|
||||
[k v]
|
||||
(let [cpus (px/get-available-processors)
|
||||
threads (max 1 (int (* cpus 0.2)))]
|
||||
{k (-> (d/without-nils v)
|
||||
(assoc ::timeout (dt/duration "10s"))
|
||||
(assoc ::io-threads (max 3 threads))
|
||||
(assoc ::worker-threads (max 3 threads)))}))
|
||||
(l/trc :hint "eval script"
|
||||
:name (name sname)
|
||||
:id id
|
||||
:params (str/join "," (::rscript/vals script))
|
||||
:elapsed (ct/format-duration elapsed))
|
||||
|
||||
(def ^:private schema:redis-params
|
||||
[:map {:title "redis-params"}
|
||||
::wrk/executor
|
||||
::mtx/metrics
|
||||
[::uri ::sm/uri]
|
||||
[::worker-threads ::sm/int]
|
||||
[::io-threads ::sm/int]
|
||||
[::timeout ::dt/duration]])
|
||||
result)
|
||||
|
||||
(defmethod ig/assert-key ::redis
|
||||
[_ params]
|
||||
(assert (sm/check schema:redis-params params)))
|
||||
(catch io.lettuce.core.RedisNoScriptException _cause
|
||||
::load)
|
||||
|
||||
(defmethod ig/init-key ::redis
|
||||
[_ params]
|
||||
(initialize-resources params))
|
||||
(catch Throwable cause
|
||||
(when-let [on-error (::rscript/on-error script)]
|
||||
(on-error cause))
|
||||
(throw cause))))
|
||||
|
||||
(defmethod ig/halt-key! ::redis
|
||||
[_ instance]
|
||||
(d/close! instance))
|
||||
eval-script'
|
||||
(fn [id]
|
||||
(loop [id id
|
||||
retries 0]
|
||||
(if (> retries MAX-EVAL-RETRIES)
|
||||
(ex/raise :type :internal
|
||||
:code ::max-eval-retries-reached
|
||||
:hint (str "unable to eval redis script " sname))
|
||||
(let [result (eval-script id)]
|
||||
(if (= result ::load)
|
||||
(recur (load-script)
|
||||
(inc retries))
|
||||
result)))))]
|
||||
|
||||
(defn- initialize-resources
|
||||
"Initialize redis connection resources"
|
||||
[{:keys [::uri ::io-threads ::worker-threads ::wrk/executor ::mtx/metrics] :as params}]
|
||||
(if-let [id (c/get @cache sname)]
|
||||
(eval-script' id)
|
||||
(-> (load-script)
|
||||
(eval-script')))))
|
||||
|
||||
(l/inf :hint "initialize redis resources"
|
||||
:uri (str uri)
|
||||
:io-threads io-threads
|
||||
:worker-threads worker-threads)
|
||||
(deftype Connection [^StatefulRedisConnection conn
|
||||
^RedisCommands cmd
|
||||
^Duration timeout
|
||||
cache metrics]
|
||||
AutoCloseable
|
||||
(close [_]
|
||||
(ex/ignoring (.close conn)))
|
||||
|
||||
(let [timer (HashedWheelTimer.)
|
||||
resources (.. (DefaultClientResources/builder)
|
||||
(ioThreadPoolSize ^long io-threads)
|
||||
(computationThreadPoolSize ^long worker-threads)
|
||||
(timer ^Timer timer)
|
||||
(build))
|
||||
IConnection
|
||||
(-set-timeout [_ timeout]
|
||||
(.setTimeout conn ^Duration timeout))
|
||||
|
||||
redis-uri (RedisURI/create ^String (str uri))
|
||||
(-reset-timeout [_]
|
||||
(.setTimeout conn timeout))
|
||||
|
||||
shutdown (fn [client conn]
|
||||
(ex/ignoring (.close ^StatefulConnection conn))
|
||||
(ex/ignoring (.close ^RedisClient client))
|
||||
(l/trc :hint "disconnect" :hid (hash client)))
|
||||
(-get-timeout [_]
|
||||
(.getTimeout conn))
|
||||
|
||||
on-remove (fn [key val cause]
|
||||
(l/trace :hint "evict connection (cache)" :key key :reason cause)
|
||||
(some-> val d/close!))
|
||||
IDefaultConnection
|
||||
(-publish [_ topic message]
|
||||
(.publish cmd ^String topic ^String message))
|
||||
|
||||
cache (cache/create :executor executor
|
||||
:on-remove on-remove
|
||||
:keepalive "5m")]
|
||||
(reify
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(ex/ignoring (cache/invalidate! cache))
|
||||
(ex/ignoring (.shutdown ^ClientResources resources))
|
||||
(ex/ignoring (.stop ^Timer timer)))
|
||||
(-rpush [_ key elements]
|
||||
(try
|
||||
(let [vals (make-array String (count elements))]
|
||||
(loop [i 0 xs (seq elements)]
|
||||
(when xs
|
||||
(aset ^"[[Ljava.lang.String;" vals i ^String (first xs))
|
||||
(recur (inc i) (next xs))))
|
||||
|
||||
IRedis
|
||||
(-get-or-connect [this key options]
|
||||
(let [create (fn [_] (-connect this options))]
|
||||
(cache/get cache key create)))
|
||||
(.rpush cmd
|
||||
^String key
|
||||
^"[[Ljava.lang.String;" vals))
|
||||
|
||||
(-connect [_ options]
|
||||
(let [timeout (or (:timeout options) (::timeout params))
|
||||
codec (get options :codec default-codec)
|
||||
type (get options :type :default)
|
||||
client (RedisClient/create ^ClientResources resources
|
||||
^RedisURI redis-uri)]
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(l/trc :hint "connect" :hid (hash client))
|
||||
(if (= type :pubsub)
|
||||
(let [conn (.connectPubSub ^RedisClient client
|
||||
^RedisCodec codec)]
|
||||
(.setTimeout ^StatefulConnection conn
|
||||
^Duration timeout)
|
||||
(reify
|
||||
IPubSubConnection
|
||||
(add-listener [_ listener]
|
||||
(assert (instance? RedisPubSubListener listener) "expected listener instance")
|
||||
(.addListener ^StatefulRedisPubSubConnection conn
|
||||
^RedisPubSubListener listener))
|
||||
(-blpop [_ keys timeout]
|
||||
(try
|
||||
(let [keys (into-array String keys)]
|
||||
(when-let [res (.blpop cmd
|
||||
^double timeout
|
||||
^"[Ljava.lang.String;" keys)]
|
||||
(MapEntry/create
|
||||
(.getKey ^KeyValue res)
|
||||
(.getValue ^KeyValue res))))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(subscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection conn)]
|
||||
(.subscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
(-get [_ key]
|
||||
(assert (string? key) "key expected to be string")
|
||||
(.get cmd ^String key))
|
||||
|
||||
(unsubscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String (map str topics))
|
||||
cmd (.sync ^StatefulRedisPubSubConnection conn)]
|
||||
(.unsubscribe ^RedisPubSubCommands cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
(-set [_ key val args]
|
||||
(.set cmd
|
||||
^String key
|
||||
^bytes val
|
||||
^SetArgs args))
|
||||
|
||||
(-del [_ keys]
|
||||
(let [keys (into-array String keys)]
|
||||
(.del cmd ^String/1 keys)))
|
||||
|
||||
(-ping [_]
|
||||
(.ping cmd))
|
||||
|
||||
(-eval [_ script]
|
||||
(impl-eval cmd cache metrics script)))
|
||||
|
||||
|
||||
AutoCloseable
|
||||
(close [_] (shutdown client conn))))
|
||||
(deftype SubscriptionConnection [^StatefulRedisPubSubConnection conn
|
||||
^RedisPubSubCommands cmd
|
||||
^Duration timeout]
|
||||
AutoCloseable
|
||||
(close [_]
|
||||
(ex/ignoring (.close conn)))
|
||||
|
||||
(let [conn (.connect ^RedisClient client ^RedisCodec codec)]
|
||||
(.setTimeout ^StatefulConnection conn ^Duration timeout)
|
||||
(reify
|
||||
IConnection
|
||||
(publish [_ topic message]
|
||||
(assert (string? topic) "expected topic to be string")
|
||||
(assert (bytes? message) "expected message to be a byte array")
|
||||
IConnection
|
||||
(-set-timeout [_ timeout]
|
||||
(.setTimeout conn ^Duration timeout))
|
||||
|
||||
(let [pcomm (.async ^StatefulRedisConnection conn)]
|
||||
(.publish ^RedisAsyncCommands pcomm ^String topic ^bytes message)))
|
||||
(-reset-timeout [_]
|
||||
(.setTimeout conn timeout))
|
||||
|
||||
(rpush [_ key payload]
|
||||
(assert (or (and (vector? payload)
|
||||
(every? bytes? payload))
|
||||
(bytes? payload)))
|
||||
(try
|
||||
(let [cmd (.sync ^StatefulRedisConnection conn)
|
||||
data (if (vector? payload) payload [payload])
|
||||
vals (make-array (. Class (forName "[B")) (count data))]
|
||||
(-get-timeout [_]
|
||||
(.getTimeout conn))
|
||||
|
||||
(loop [i 0 xs (seq data)]
|
||||
(when xs
|
||||
(aset ^"[[B" vals i ^bytes (first xs))
|
||||
(recur (inc i) (next xs))))
|
||||
IPubSubConnection
|
||||
(-add-listener [_ listener]
|
||||
(.addListener conn ^RedisPubSubListener listener))
|
||||
|
||||
(.rpush ^RedisCommands cmd
|
||||
^String key
|
||||
^"[[B" vals))
|
||||
(-subscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String topics)]
|
||||
(.subscribe cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
(-unsubscribe [_ topics]
|
||||
(try
|
||||
(let [topics (into-array String topics)]
|
||||
(.unsubscribe cmd topics))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause)))))))
|
||||
|
||||
(blpop [_ timeout keys]
|
||||
(try
|
||||
(let [keys (into-array Object (map str keys))
|
||||
cmd (.sync ^StatefulRedisConnection conn)
|
||||
timeout (/ (double (inst-ms timeout)) 1000.0)]
|
||||
(when-let [res (.blpop ^RedisCommands cmd
|
||||
^double timeout
|
||||
^"[Ljava.lang.String;" keys)]
|
||||
(MapEntry/create
|
||||
(.getKey ^KeyValue res)
|
||||
(.getValue ^KeyValue res))))
|
||||
(catch RedisCommandInterruptedException cause
|
||||
(throw (InterruptedException. (ex-message cause))))))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; PUBLIC API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(eval [_ script]
|
||||
(assert (valid-script? script) "expected valid script")
|
||||
(impl-eval conn metrics script))
|
||||
|
||||
AutoCloseable
|
||||
(close [_] (shutdown client conn))))))))))
|
||||
|
||||
(defn connect
|
||||
[instance & {:as opts}]
|
||||
(assert (satisfies? IRedis instance) "expected valid redis instance")
|
||||
(-connect instance opts))
|
||||
|
||||
(defn get-or-connect
|
||||
[instance key & {:as opts}]
|
||||
(assert (satisfies? IRedis instance) "expected valid redis instance")
|
||||
(-get-or-connect instance key opts))
|
||||
(defn build-set-args
|
||||
[options]
|
||||
(reduce-kv (fn [^SetArgs args k v]
|
||||
(case k
|
||||
:ex (if (instance? Duration v)
|
||||
(.ex args ^Duration v)
|
||||
(.ex args (long v)))
|
||||
:px (.px args (long v))
|
||||
:nx (if v (.nx args) args)
|
||||
:keep-ttl (if v (.keepttl args) args)))
|
||||
(SetArgs.)
|
||||
options))
|
||||
|
||||
(defn pubsub-listener
|
||||
[& {:keys [on-message on-subscribe on-unsubscribe]}]
|
||||
@@ -311,61 +302,172 @@
|
||||
(when on-unsubscribe
|
||||
(on-unsubscribe nil topic count)))))
|
||||
|
||||
(def ^:private scripts-cache (atom {}))
|
||||
(defn connect
|
||||
[cfg & {:as options}]
|
||||
(assert (contains? cfg ::mtx/metrics) "missing ::mtx/metrics on provided system")
|
||||
(assert (contains? cfg ::client) "missing ::rds/client on provided system")
|
||||
|
||||
(defn- impl-eval
|
||||
[^StatefulRedisConnection connection metrics script]
|
||||
(let [cmd (.async ^StatefulRedisConnection connection)
|
||||
keys (into-array String (map str (::rscript/keys script)))
|
||||
vals (into-array String (map str (::rscript/vals script)))
|
||||
sname (::rscript/name script)]
|
||||
(let [state (::client cfg)
|
||||
|
||||
(letfn [(on-error [cause]
|
||||
(if (instance? io.lettuce.core.RedisNoScriptException cause)
|
||||
(do
|
||||
(l/error :hint "no script found" :name sname :cause cause)
|
||||
(->> (load-script)
|
||||
(p/mcat eval-script)))
|
||||
(if-let [on-error (::rscript/on-error script)]
|
||||
(on-error cause)
|
||||
(p/rejected cause))))
|
||||
cache (::cache state)
|
||||
client (::client state)
|
||||
timeout (or (some-> (:timeout options) ct/duration)
|
||||
(::timeout state))
|
||||
|
||||
(eval-script [sha]
|
||||
(let [tpoint (dt/tpoint)]
|
||||
(->> (.evalsha ^RedisScriptingAsyncCommands cmd
|
||||
^String sha
|
||||
^ScriptOutputType ScriptOutputType/MULTI
|
||||
^"[Ljava.lang.String;" keys
|
||||
^"[Ljava.lang.String;" vals)
|
||||
(p/fmap (fn [result]
|
||||
(let [elapsed (tpoint)]
|
||||
(mtx/run! metrics {:id :redis-eval-timing
|
||||
:labels [(name sname)]
|
||||
:val (inst-ms elapsed)})
|
||||
(l/trace :hint "eval script"
|
||||
:name (name sname)
|
||||
:sha sha
|
||||
:params (str/join "," (::rscript/vals script))
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
result)))
|
||||
(p/merr on-error))))
|
||||
conn (.connect ^RedisClient client
|
||||
^RedisCodec default-codec)
|
||||
cmd (.sync ^StatefulRedisConnection conn)]
|
||||
|
||||
(read-script []
|
||||
(-> script ::rscript/path io/resource slurp))
|
||||
(.setTimeout ^StatefulRedisConnection conn ^Duration timeout)
|
||||
(->Connection conn cmd timeout cache (::mtx/metrics cfg))))
|
||||
|
||||
(load-script []
|
||||
(l/trace :hint "load script" :name sname)
|
||||
(->> (.scriptLoad ^RedisScriptingAsyncCommands cmd
|
||||
^String (read-script))
|
||||
(p/fmap (fn [sha]
|
||||
(swap! scripts-cache assoc sname sha)
|
||||
sha))))]
|
||||
(defn connect-pubsub
|
||||
[cfg & {:as options}]
|
||||
(let [state (::client cfg)
|
||||
client (::client state)
|
||||
|
||||
(p/await!
|
||||
(if-let [sha (get @scripts-cache sname)]
|
||||
(eval-script sha)
|
||||
(->> (load-script)
|
||||
(p/mapcat eval-script)))))))
|
||||
timeout (or (some-> (:timeout options) ct/duration)
|
||||
(::timeout state))
|
||||
conn (.connectPubSub ^RedisClient client
|
||||
^RedisCodec default-codec)
|
||||
cmd (.sync ^StatefulRedisPubSubConnection conn)]
|
||||
|
||||
|
||||
(.setTimeout ^StatefulRedisPubSubConnection conn
|
||||
^Duration timeout)
|
||||
(->SubscriptionConnection conn cmd timeout)))
|
||||
|
||||
(defn get
|
||||
[conn key]
|
||||
(assert (string? key) "key must be string instance")
|
||||
(try
|
||||
(-get conn key)
|
||||
(catch RedisCommandTimeoutException cause
|
||||
(l/err :hint "timeout on get redis key" :key key :cause cause)
|
||||
nil)))
|
||||
|
||||
(defn set
|
||||
([conn key val]
|
||||
(set conn key val nil))
|
||||
([conn key val args]
|
||||
(assert (string? key) "key must be string instance")
|
||||
(assert (string? val) "val must be string instance")
|
||||
(let [args (cond
|
||||
(or (instance? SetArgs args)
|
||||
(nil? args))
|
||||
args
|
||||
|
||||
(map? args)
|
||||
(build-set-args args)
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid args")))]
|
||||
|
||||
(try
|
||||
(-set conn key val args)
|
||||
(catch RedisCommandTimeoutException cause
|
||||
(l/err :hint "timeout on set redis key" :key key :cause cause)
|
||||
nil)))))
|
||||
|
||||
(defn del
|
||||
[conn key-or-keys]
|
||||
(let [keys (if (vector? key-or-keys) key-or-keys [key-or-keys])]
|
||||
(assert (every? string? keys) "only string keys allowed")
|
||||
(try
|
||||
(-del conn keys)
|
||||
(catch RedisCommandTimeoutException cause
|
||||
(l/err :hint "timeout on del redis key" :key key :cause cause)
|
||||
nil))))
|
||||
|
||||
(defn ping
|
||||
[conn]
|
||||
(-ping conn))
|
||||
|
||||
(defn blpop
|
||||
[conn key-or-keys timeout]
|
||||
(let [keys (if (vector? key-or-keys) key-or-keys [key-or-keys])
|
||||
timeout (cond
|
||||
(ct/duration? timeout)
|
||||
(/ (double (inst-ms timeout)) 1000.0)
|
||||
|
||||
(double? timeout)
|
||||
timeout
|
||||
|
||||
(int? timeout)
|
||||
(/ (double timeout) 1000.0)
|
||||
|
||||
:else
|
||||
0)]
|
||||
|
||||
(assert (every? string? keys) "only string keys allowed")
|
||||
(-blpop conn keys timeout)))
|
||||
|
||||
(defn rpush
|
||||
[conn key elements]
|
||||
(assert (string? key) "key must be string instance")
|
||||
(assert (every? string? elements) "elements should be all strings")
|
||||
(let [elements (vec elements)]
|
||||
(-rpush conn key elements)))
|
||||
|
||||
(defn publish
|
||||
[conn topic payload]
|
||||
(assert (string? topic) "expected topic to be string")
|
||||
(assert (string? payload) "expected message to be a byte array")
|
||||
(-publish conn topic payload))
|
||||
|
||||
(def ^:private schema:script
|
||||
[:map {:title "script"}
|
||||
[::rscript/name qualified-keyword?]
|
||||
[::rscript/path ::sm/text]
|
||||
[::rscript/keys {:optional true} [:vector :any]]
|
||||
[::rscript/vals {:optional true} [:vector :any]]])
|
||||
|
||||
(def ^:private valid-script?
|
||||
(sm/lazy-validator schema:script))
|
||||
|
||||
(defn eval
|
||||
[conn script]
|
||||
(assert (valid-script? script) "expected valid script")
|
||||
(-eval conn script))
|
||||
|
||||
(defn add-listener
|
||||
[conn listener]
|
||||
(let [listener (cond
|
||||
(map? listener)
|
||||
(pubsub-listener listener)
|
||||
|
||||
(instance? RedisPubSubListener listener)
|
||||
listener
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "invalid listener provided")))]
|
||||
|
||||
(-add-listener conn listener)))
|
||||
|
||||
(defn subscribe
|
||||
[conn topic-or-topics]
|
||||
(let [topics (if (vector? topic-or-topics) topic-or-topics [topic-or-topics])]
|
||||
(assert (every? string? topics))
|
||||
(-subscribe conn topics)))
|
||||
|
||||
(defn unsubscribe
|
||||
[conn topic-or-topics]
|
||||
(let [topics (if (vector? topic-or-topics) topic-or-topics [topic-or-topics])]
|
||||
(assert (every? string? topics))
|
||||
(-unsubscribe conn topics)))
|
||||
|
||||
(defn set-timeout
|
||||
[conn timeout]
|
||||
(let [timeout (ct/duration timeout)]
|
||||
(-set-timeout conn timeout)))
|
||||
|
||||
(defn get-timeout
|
||||
[conn]
|
||||
(-get-timeout conn))
|
||||
|
||||
(defn reset-timeout
|
||||
[conn]
|
||||
(-reset-timeout conn))
|
||||
|
||||
(defn timeout-exception?
|
||||
[cause]
|
||||
@@ -374,3 +476,121 @@
|
||||
(defn exception?
|
||||
[cause]
|
||||
(instance? RedisException cause))
|
||||
|
||||
(defn get-pooled
|
||||
[cfg]
|
||||
(let [pool (::pool cfg)]
|
||||
(gpool/get pool)))
|
||||
|
||||
(defn close
|
||||
[o]
|
||||
(.close ^AutoCloseable o))
|
||||
|
||||
(defn pool
|
||||
[cfg & {:as options}]
|
||||
(gpool/create :create-fn (partial connect cfg options)
|
||||
:destroy-fn close
|
||||
:dispose-fn -reset-timeout))
|
||||
|
||||
(defn run!
|
||||
[cfg f & args]
|
||||
(if (gpool/pool? cfg)
|
||||
(apply f {::pool cfg} f args)
|
||||
(let [pool (::pool cfg)]
|
||||
(with-open [^AutoCloseable conn (gpool/get pool)]
|
||||
(apply f (assoc cfg ::conn @conn) args)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; INITIALIZATION
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/expand-key ::client
|
||||
[k v]
|
||||
{k (-> (d/without-nils v)
|
||||
(assoc ::timeout (ct/duration "10s")))})
|
||||
|
||||
(def ^:private schema:client
|
||||
[:map {:title "RedisClient"}
|
||||
[::timer [:fn #(instance? HashedWheelTimer %)]]
|
||||
[::cache ::sm/atom]
|
||||
[::timeout ::ct/duration]
|
||||
[::resources [:fn #(instance? DefaultClientResources %)]]])
|
||||
|
||||
(def check-client
|
||||
(sm/check-fn schema:client))
|
||||
|
||||
(sm/register! ::client schema:client)
|
||||
(sm/register!
|
||||
{:type ::pool
|
||||
:pred gpool/pool?})
|
||||
|
||||
(def ^:private schema:client-params
|
||||
[:map {:title "redis-params"}
|
||||
::wrk/netty-io-executor
|
||||
::wrk/netty-executor
|
||||
[::uri ::sm/uri]
|
||||
[::timeout ::ct/duration]])
|
||||
|
||||
(def ^:private check-client-params
|
||||
(sm/check-fn schema:client-params))
|
||||
|
||||
(defmethod ig/assert-key ::client
|
||||
[_ params]
|
||||
(check-client-params params))
|
||||
|
||||
(defmethod ig/init-key ::client
|
||||
[_ {:keys [::uri ::wrk/netty-io-executor ::wrk/netty-executor] :as params}]
|
||||
|
||||
(l/inf :hint "initialize redis client" :uri (str uri))
|
||||
|
||||
(let [timer (HashedWheelTimer.)
|
||||
cache (atom {})
|
||||
|
||||
resources (.. (DefaultClientResources/builder)
|
||||
(eventExecutorGroup ^EventExecutorGroup netty-executor)
|
||||
|
||||
;; We provide lettuce with a shared event loop
|
||||
;; group instance instead of letting lettuce to
|
||||
;; create its own
|
||||
(eventLoopGroupProvider
|
||||
(reify io.lettuce.core.resource.EventLoopGroupProvider
|
||||
(allocate [_ _] netty-io-executor)
|
||||
(threadPoolSize [_]
|
||||
(.executorCount ^NioEventLoopGroup netty-io-executor))
|
||||
(release [_ _ _ _ _]
|
||||
;; Do nothing
|
||||
)
|
||||
(shutdown [_ _ _ _]
|
||||
;; Do nothing
|
||||
)))
|
||||
|
||||
(timer ^Timer timer)
|
||||
(build))
|
||||
|
||||
redis-uri (RedisURI/create ^String (str uri))
|
||||
client (RedisClient/create ^ClientResources resources
|
||||
^RedisURI redis-uri)]
|
||||
|
||||
{::client client
|
||||
::cache cache
|
||||
::timer timer
|
||||
::timeout default-timeout
|
||||
::resources resources}))
|
||||
|
||||
(defmethod ig/halt-key! ::client
|
||||
[_ {:keys [::client ::timer ::resources]}]
|
||||
(ex/ignoring (.shutdown ^RedisClient client))
|
||||
(ex/ignoring (.shutdown ^ClientResources resources))
|
||||
(ex/ignoring (.stop ^Timer timer)))
|
||||
|
||||
(defmethod ig/assert-key ::pool
|
||||
[_ {:keys [::client]}]
|
||||
(check-client client))
|
||||
|
||||
(defmethod ig/init-key ::pool
|
||||
[_ cfg]
|
||||
(pool cfg {:timeout (ct/duration 2000)}))
|
||||
|
||||
(defmethod ig/halt-key! ::pool
|
||||
[_ instance]
|
||||
(.close ^java.lang.AutoCloseable instance))
|
||||
|
||||
@@ -12,18 +12,24 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.spec :as us]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http :as-alias http]
|
||||
[app.http.access-token :as actoken]
|
||||
[app.http.client :as-alias http.client]
|
||||
[app.http.middleware :as mw]
|
||||
[app.http.security :as sec]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.main :as-alias main]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as-alias mbus]
|
||||
[app.redis :as rds]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.cond :as cond]
|
||||
[app.rpc.doc :as doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
@@ -31,11 +37,9 @@
|
||||
[app.storage :as-alias sto]
|
||||
[app.util.inet :as inet]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[yetti.request :as yreq]
|
||||
[yetti.response :as yres]))
|
||||
|
||||
@@ -43,7 +47,7 @@
|
||||
|
||||
(defn- default-handler
|
||||
[_]
|
||||
(p/rejected (ex/error :type :not-found)))
|
||||
(ex/raise :type :not-found))
|
||||
|
||||
(defn- handle-response-transformation
|
||||
[response request mdata]
|
||||
@@ -63,74 +67,65 @@
|
||||
(let [mdata (meta result)
|
||||
response (if (fn? result)
|
||||
(result request)
|
||||
(let [result (rph/unwrap result)]
|
||||
{::yres/status (::http/status mdata 200)
|
||||
::yres/headers (::http/headers mdata {})
|
||||
(let [result (rph/unwrap result)
|
||||
status (or (::http/status mdata)
|
||||
(if (nil? result)
|
||||
204
|
||||
200))
|
||||
headers (cond-> (::http/headers mdata {})
|
||||
(yres/stream-body? result)
|
||||
(assoc "content-type" "application/octet-stream"))]
|
||||
{::yres/status status
|
||||
::yres/headers headers
|
||||
::yres/body result}))]
|
||||
|
||||
(-> response
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))
|
||||
|
||||
(defn get-external-session-id
|
||||
[request]
|
||||
(when-let [session-id (yreq/get-header request "x-external-session-id")]
|
||||
(when-not (or (> (count session-id) 256)
|
||||
(= session-id "null")
|
||||
(str/blank? session-id))
|
||||
session-id)))
|
||||
|
||||
(defn- get-external-event-origin
|
||||
[request]
|
||||
(when-let [origin (yreq/get-header request "x-event-origin")]
|
||||
(when-not (or (> (count origin) 256)
|
||||
(= origin "null")
|
||||
(str/blank? origin))
|
||||
origin)))
|
||||
|
||||
(defn- rpc-handler
|
||||
(defn- make-rpc-handler
|
||||
"Ring handler that dispatches cmd requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [params path-params method] :as request}]
|
||||
(let [handler-name (:type path-params)
|
||||
etag (yreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
[methods]
|
||||
(let [methods (update-vals methods peek)]
|
||||
(fn [{:keys [params path-params method] :as request}]
|
||||
(let [handler-name (:type path-params)
|
||||
etag (yreq/get-header request "if-none-match")
|
||||
profile-id (or (::session/profile-id request)
|
||||
(::actoken/profile-id request))
|
||||
ip-addr (inet/parse-request request)
|
||||
|
||||
ip-addr (inet/parse-request request)
|
||||
session-id (get-external-session-id request)
|
||||
event-origin (get-external-event-origin request)
|
||||
data (-> params
|
||||
(assoc ::handler-name handler-name)
|
||||
(assoc ::ip-addr ip-addr)
|
||||
(assoc ::request-at (ct/now))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
|
||||
data (-> params
|
||||
(assoc ::handler-name handler-name)
|
||||
(assoc ::ip-addr ip-addr)
|
||||
(assoc ::request-at (dt/now))
|
||||
(assoc ::external-session-id session-id)
|
||||
(assoc ::external-event-origin event-origin)
|
||||
(assoc ::session/id (::session/id request))
|
||||
(assoc ::cond/key etag)
|
||||
(cond-> (uuid? profile-id)
|
||||
(assoc ::profile-id profile-id)))
|
||||
data (with-meta data
|
||||
{::http/request request})
|
||||
|
||||
data (vary-meta data assoc ::http/request request)
|
||||
handler-fn (get methods (keyword handler-name) default-handler)]
|
||||
handler-fn (get methods (keyword handler-name) default-handler)]
|
||||
|
||||
(when (and (or (= method :get)
|
||||
(= method :head))
|
||||
(not (str/starts-with? handler-name "get-")))
|
||||
(ex/raise :type :restriction
|
||||
:code :method-not-allowed
|
||||
:hint "method not allowed for this request"))
|
||||
(when (and (or (= method :get)
|
||||
(= method :head))
|
||||
(not (str/starts-with? handler-name "get-")))
|
||||
(ex/raise :type :restriction
|
||||
:code :method-not-allowed
|
||||
:hint "method not allowed for this request"))
|
||||
|
||||
(binding [cond/*enabled* true]
|
||||
(let [response (handler-fn data)]
|
||||
(handle-response request response)))))
|
||||
;; FIXME: why we have this cond enabled here, we need to move it outside this handler
|
||||
(binding [cond/*enabled* true]
|
||||
(let [response (handler-fn data)]
|
||||
(handle-response request response)))))))
|
||||
|
||||
(defn- wrap-metrics
|
||||
"Wrap service method with metrics measurement."
|
||||
[{:keys [::mtx/metrics ::metrics-id]} f mdata]
|
||||
(let [labels (into-array String [(::sv/name mdata)])]
|
||||
(fn [cfg params]
|
||||
(let [tp (dt/tpoint)]
|
||||
(let [tp (ct/tpoint)]
|
||||
(try
|
||||
(f cfg params)
|
||||
(finally
|
||||
@@ -200,7 +195,7 @@
|
||||
::sm/explain (explain params)))))))
|
||||
f))
|
||||
|
||||
(defn- wrap-all
|
||||
(defn- wrap
|
||||
[cfg f mdata]
|
||||
(as-> f $
|
||||
(wrap-db-transaction cfg $ mdata)
|
||||
@@ -214,17 +209,30 @@
|
||||
(wrap-params-validation cfg $ mdata)
|
||||
(wrap-authentication cfg $ mdata)))
|
||||
|
||||
(defn- wrap
|
||||
(defn- wrap-management
|
||||
[cfg f mdata]
|
||||
(l/trc :hint "register method" :name (::sv/name mdata))
|
||||
(let [f (wrap-all cfg f mdata)]
|
||||
(partial f cfg)))
|
||||
(as-> f $
|
||||
(wrap-db-transaction cfg $ mdata)
|
||||
(retry/wrap-retry cfg $ mdata)
|
||||
(climit/wrap cfg $ mdata)
|
||||
(wrap-metrics cfg $ mdata)
|
||||
(wrap-audit cfg $ mdata)
|
||||
(wrap-spec-conform cfg $ mdata)
|
||||
(wrap-params-validation cfg $ mdata)
|
||||
(wrap-authentication cfg $ mdata)))
|
||||
|
||||
(defn- process-method
|
||||
[cfg [vfn mdata]]
|
||||
[(keyword (::sv/name mdata)) [mdata (wrap cfg vfn mdata)]])
|
||||
[cfg module wrap-fn [f mdata]]
|
||||
(l/trc :hint "add method" :module module :name (::sv/name mdata))
|
||||
(let [f (wrap-fn cfg f mdata)
|
||||
k (keyword (::sv/name mdata))]
|
||||
[k [mdata (partial f cfg)]]))
|
||||
|
||||
(defn- resolve-command-methods
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API METHODS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- resolve-methods
|
||||
[cfg]
|
||||
(let [cfg (assoc cfg ::type "command" ::metrics-id :rpc-command-timing)]
|
||||
(->> (sv/scan-ns
|
||||
@@ -239,7 +247,6 @@
|
||||
'app.rpc.commands.files
|
||||
'app.rpc.commands.files-create
|
||||
'app.rpc.commands.files-share
|
||||
'app.rpc.commands.files-temp
|
||||
'app.rpc.commands.files-update
|
||||
'app.rpc.commands.files-snapshot
|
||||
'app.rpc.commands.files-thumbnails
|
||||
@@ -254,7 +261,7 @@
|
||||
'app.rpc.commands.verify-token
|
||||
'app.rpc.commands.viewer
|
||||
'app.rpc.commands.webhooks)
|
||||
(map (partial process-method cfg))
|
||||
(map (partial process-method cfg "rpc" wrap))
|
||||
(into {}))))
|
||||
|
||||
(def ^:private schema:methods-params
|
||||
@@ -262,6 +269,7 @@
|
||||
::session/manager
|
||||
::http.client/client
|
||||
::db/pool
|
||||
::rds/pool
|
||||
::mbus/msgbus
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
@@ -277,7 +285,50 @@
|
||||
(defmethod ig/init-key ::methods
|
||||
[_ cfg]
|
||||
(let [cfg (d/without-nils cfg)]
|
||||
(resolve-command-methods cfg)))
|
||||
(resolve-methods cfg)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MANAGEMENT METHODS
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- resolve-management-methods
|
||||
[cfg]
|
||||
(let [cfg (assoc cfg ::type "management" ::metrics-id :rpc-management-timing)]
|
||||
(->> (sv/scan-ns
|
||||
'app.rpc.management.subscription
|
||||
'app.rpc.management.exporter)
|
||||
(map (partial process-method cfg "management" wrap-management))
|
||||
(into {}))))
|
||||
|
||||
(def ^:private schema:management-methods-params
|
||||
[:map {:title "management-methods-params"}
|
||||
::session/manager
|
||||
::http.client/client
|
||||
::db/pool
|
||||
::rds/pool
|
||||
::mbus/msgbus
|
||||
::sto/storage
|
||||
::mtx/metrics
|
||||
::setup/props])
|
||||
|
||||
(defmethod ig/assert-key ::management-methods
|
||||
[_ params]
|
||||
(assert (sm/check schema:management-methods-params params)))
|
||||
|
||||
(defmethod ig/init-key ::management-methods
|
||||
[_ cfg]
|
||||
(let [cfg (d/without-nils cfg)]
|
||||
(resolve-management-methods cfg)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; ROUTES
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- redirect
|
||||
[href]
|
||||
(fn [_]
|
||||
{::yres/status 308
|
||||
::yres/headers {"location" (str href)}}))
|
||||
|
||||
(def ^:private schema:methods
|
||||
[:map-of :keyword [:tuple :map ::sm/fn]])
|
||||
@@ -292,11 +343,50 @@
|
||||
(assert (db/pool? (::db/pool params)) "expect valid database pool")
|
||||
(assert (some? (::setup/props params)))
|
||||
(assert (session/manager? (::session/manager params)) "expect valid session manager")
|
||||
(assert (valid-methods? (::methods params)) "expect valid methods map"))
|
||||
(assert (valid-methods? (::methods params)) "expect valid methods map")
|
||||
(assert (valid-methods? (::management-methods params)) "expect valid methods map"))
|
||||
|
||||
(defmethod ig/init-key ::routes
|
||||
[_ {:keys [::methods] :as cfg}]
|
||||
(let [methods (update-vals methods peek)]
|
||||
[["/rpc" {:middleware [[session/authz cfg]
|
||||
[actoken/authz cfg]]}
|
||||
["/command/:type" {:handler (partial rpc-handler methods)}]]]))
|
||||
[_ {:keys [::methods ::management-methods ::setup/props] :as cfg}]
|
||||
|
||||
(let [public-uri (cf/get :public-uri)
|
||||
management-key (or (cf/get :management-api-key)
|
||||
(get props :management-key))]
|
||||
|
||||
["/api"
|
||||
["/management"
|
||||
["/methods/:type"
|
||||
{:middleware [[mw/shared-key-auth management-key]
|
||||
[session/authz cfg]]
|
||||
:handler (make-rpc-handler management-methods)}]
|
||||
|
||||
(doc/routes :methods management-methods
|
||||
:label "management"
|
||||
:base-uri (u/join public-uri "/api/management")
|
||||
:description "MANAGEMENT API")]
|
||||
|
||||
["/main"
|
||||
["/methods/:type"
|
||||
{:middleware [[mw/cors]
|
||||
[sec/client-header-check]
|
||||
[session/authz cfg]
|
||||
[actoken/authz cfg]]
|
||||
:handler (make-rpc-handler methods)}]
|
||||
|
||||
(doc/routes :methods methods
|
||||
:label "main"
|
||||
:base-uri (u/join public-uri "/api/main")
|
||||
:description "MAIN API")]
|
||||
|
||||
;; BACKWARD COMPATIBILITY
|
||||
["/_doc" {:handler (redirect (u/join public-uri "/api/main/doc"))}]
|
||||
["/doc" {:handler (redirect (u/join public-uri "/api/main/doc"))}]
|
||||
["/openapi" {:handler (redirect (u/join public-uri "/api/main/doc/openapi"))}]
|
||||
["/openapi.join" {:handler (redirect (u/join public-uri "/api/main/doc/openapi.json"))}]
|
||||
|
||||
["/rpc/command/:type"
|
||||
{:middleware [[mw/cors]
|
||||
[sec/client-header-check]
|
||||
[session/authz cfg]
|
||||
[actoken/authz cfg]]
|
||||
:handler (make-rpc-handler methods)}]]))
|
||||
|
||||
@@ -11,17 +11,16 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.util.cache :as cache]
|
||||
[app.util.services :as-alias sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[clojure.edn :as edn]
|
||||
[clojure.set :as set]
|
||||
[datoteka.fs :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]
|
||||
[promesa.exec.bulkhead :as pbh])
|
||||
(:import
|
||||
clojure.lang.ExceptionInfo
|
||||
@@ -154,7 +153,7 @@
|
||||
:id limit-id
|
||||
:label limit-label
|
||||
:queue queue
|
||||
:elapsed (some-> elapsed dt/format-duration)
|
||||
:elapsed (some-> elapsed ct/format-duration)
|
||||
:params @limit-params)))
|
||||
|
||||
(def ^:private idseq (AtomicLong. 0))
|
||||
@@ -171,7 +170,7 @@
|
||||
mlabels (into-array String [(id->str limit-id)])
|
||||
limit-id (id->str limit-id limit-key)
|
||||
limiter (cache/get cache limit-id (partial create-limiter config))
|
||||
tpoint (dt/tpoint)
|
||||
tpoint (ct/tpoint)
|
||||
req-id (.incrementAndGet ^AtomicLong idseq)]
|
||||
(try
|
||||
(let [stats (pbh/get-stats limiter)]
|
||||
@@ -289,13 +288,9 @@
|
||||
(get-limits cfg)))
|
||||
|
||||
(defn invoke!
|
||||
"Run a function in context of climit.
|
||||
Intended to be used in virtual threads."
|
||||
[{:keys [::executor ::rpc/climit] :as cfg} f params]
|
||||
"Run a function in context of climit."
|
||||
[{:keys [::rpc/climit] :as cfg} f params]
|
||||
(let [f (if climit
|
||||
(let [f (if (some? executor)
|
||||
(fn [cfg params] (px/await! (px/submit! executor (fn [] (f cfg params)))))
|
||||
f)]
|
||||
(build-exec-chain cfg f))
|
||||
(build-exec-chain cfg f)
|
||||
f)]
|
||||
(f cfg params)))
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.rpc.commands.access-token
|
||||
(:require
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.main :as-alias main]
|
||||
@@ -15,22 +16,22 @@
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.setup :as-alias setup]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(defn- decode-row
|
||||
[row]
|
||||
(dissoc row :perms))
|
||||
|
||||
(defn create-access-token
|
||||
[{:keys [::db/conn ::setup/props]} profile-id name expiration]
|
||||
(let [created-at (dt/now)
|
||||
token-id (uuid/next)
|
||||
token (tokens/generate props {:iss "access-token"
|
||||
:tid token-id
|
||||
:iat created-at})
|
||||
[{:keys [::db/conn] :as cfg} profile-id name expiration]
|
||||
(let [token-id (uuid/next)
|
||||
expires-at (some-> expiration (ct/in-future))
|
||||
created-at (ct/now)
|
||||
token (tokens/generate cfg {:iss "access-token"
|
||||
:uid profile-id
|
||||
:iat created-at
|
||||
:tid token-id})
|
||||
|
||||
expires-at (some-> expiration dt/in-future)
|
||||
token (db/insert! conn :access-token
|
||||
{:id token-id
|
||||
:name name
|
||||
@@ -49,7 +50,7 @@
|
||||
(def ^:private schema:create-access-token
|
||||
[:map {:title "create-access-token"}
|
||||
[:name [:string {:max 250 :min 1}]]
|
||||
[:expiration {:optional true} ::dt/duration]])
|
||||
[:expiration {:optional true} ::ct/duration]])
|
||||
|
||||
(sv/defmethod ::create-access-token
|
||||
{::doc/added "1.18"
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -20,8 +21,7 @@
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.inet :as inet]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(def ^:private event-columns
|
||||
[:id
|
||||
@@ -49,7 +49,7 @@
|
||||
|
||||
(defn- adjust-timestamp
|
||||
[{:keys [timestamp created-at] :as event}]
|
||||
(let [margin (inst-ms (dt/diff timestamp created-at))]
|
||||
(let [margin (inst-ms (ct/diff timestamp created-at))]
|
||||
(if (or (neg? margin)
|
||||
(> margin 3600000))
|
||||
;; If event is in future or lags more than 1 hour, we reasign
|
||||
@@ -63,7 +63,7 @@
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id events] :as params}]
|
||||
(let [request (-> params meta ::http/request)
|
||||
ip-addr (inet/parse-request request)
|
||||
tnow (dt/now)
|
||||
tnow (ct/now)
|
||||
xform (comp
|
||||
(map (fn [event]
|
||||
(-> event
|
||||
|
||||
@@ -6,20 +6,25 @@
|
||||
|
||||
(ns app.rpc.commands.auth
|
||||
(:require
|
||||
[app.auth :as auth]
|
||||
[app.auth.oidc :as oidc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.email :as eml]
|
||||
[app.email.blacklist :as email.blacklist]
|
||||
[app.email.whitelist :as email.whitelist]
|
||||
[app.http :as-alias http]
|
||||
[app.http.session :as session]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as-alias climit]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
@@ -28,9 +33,9 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.setup :as-alias setup]
|
||||
[app.setup.welcome-file :refer [create-welcome-file]]
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
@@ -42,7 +47,7 @@
|
||||
|
||||
(defn- elapsed-verify-threshold?
|
||||
[profile]
|
||||
(let [elapsed (dt/diff (:modified-at profile) (dt/now))
|
||||
(let [elapsed (ct/diff (:modified-at profile) (ct/now))
|
||||
verify-threshold (cf/get :email-verify-threshold)]
|
||||
(pos? (compare elapsed verify-threshold))))
|
||||
|
||||
@@ -62,7 +67,7 @@
|
||||
(ex/raise :type :validation
|
||||
:code :account-without-password
|
||||
:hint "the current account does not have password")
|
||||
(let [result (profile/verify-password cfg password (:password profile))]
|
||||
(let [result (auth/verify-password password (:password profile))]
|
||||
(when (:update result)
|
||||
(l/trc :hint "updating profile password"
|
||||
:id (str (:id profile))
|
||||
@@ -85,7 +90,7 @@
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials))
|
||||
(when-let [deleted-at (:deleted-at profile)]
|
||||
(when (dt/is-after? (dt/now) deleted-at)
|
||||
(when (ct/is-after? (ct/now) deleted-at)
|
||||
(ex/raise :type :validation
|
||||
:code :wrong-credentials)))
|
||||
|
||||
@@ -98,7 +103,7 @@
|
||||
(profile/strip-private-attrs))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
|
||||
(tokens/verify cfg {:token token :iss :team-invitation}))
|
||||
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't login with other email and
|
||||
@@ -108,7 +113,7 @@
|
||||
(assoc profile :is-admin (let [admins (cf/get :admins)]
|
||||
(contains? admins (:email profile)))))]
|
||||
(-> response
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-transform (session/create-fn cfg profile))
|
||||
(rph/with-meta {::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))]
|
||||
|
||||
@@ -144,7 +149,24 @@
|
||||
[cfg params]
|
||||
(if (= (:profile-id params)
|
||||
(::rpc/profile-id params))
|
||||
(rph/with-transform {} (session/delete-fn cfg))
|
||||
(let [{:keys [claims]}
|
||||
(rph/get-auth-data params)
|
||||
|
||||
provider
|
||||
(some->> (get claims :sso-provider-id)
|
||||
(oidc/get-provider cfg))
|
||||
|
||||
response
|
||||
(if (and provider (:logout-uri provider))
|
||||
(let [params {"logout_hint" (get claims :sso-session-id)
|
||||
"client_id" (get provider :client-id)
|
||||
"post_logout_redirect_uri" (str (cf/get :public-uri))}
|
||||
uri (-> (u/uri (:logout-uri provider))
|
||||
(assoc :query (u/map->query-string params)))]
|
||||
{:redirect-uri uri})
|
||||
{})]
|
||||
|
||||
(rph/with-transform response (session/delete-fn cfg)))
|
||||
{}))
|
||||
|
||||
;; ---- COMMAND: Recover Profile
|
||||
@@ -152,11 +174,11 @@
|
||||
(defn recover-profile
|
||||
[{:keys [::db/conn] :as cfg} {:keys [token password]}]
|
||||
(letfn [(validate-token [token]
|
||||
(let [tdata (tokens/verify (::setup/props cfg) {:token token :iss :password-recovery})]
|
||||
(let [tdata (tokens/verify cfg {:token token :iss :password-recovery})]
|
||||
(:profile-id tdata)))
|
||||
|
||||
(update-password [conn profile-id]
|
||||
(let [pwd (profile/derive-password cfg password)]
|
||||
(let [pwd (auth/derive-password password)]
|
||||
(db/update! conn :profile {:password pwd :is-active true} {:id profile-id})
|
||||
nil))]
|
||||
|
||||
@@ -191,7 +213,7 @@
|
||||
:hint "registration disabled"))
|
||||
|
||||
(when (contains? params :invitation-token)
|
||||
(let [invitation (tokens/verify (::setup/props cfg)
|
||||
(let [invitation (tokens/verify cfg
|
||||
{:token (:invitation-token params)
|
||||
:iss :team-invitation})]
|
||||
(when-not (= (:email params) (:member-email invitation))
|
||||
@@ -244,11 +266,11 @@
|
||||
:backend "penpot"
|
||||
:iss :prepared-register
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 7})
|
||||
:exp (ct/in-future {:days 7})
|
||||
:props {:newsletter-updates (or accept-newsletter-updates false)}}
|
||||
|
||||
params (d/without-nils params)
|
||||
token (tokens/generate (::setup/props cfg) params)]
|
||||
token (tokens/generate cfg params)]
|
||||
|
||||
(with-meta {:token token}
|
||||
{::audit/profile-id uuid/zero})))
|
||||
@@ -270,11 +292,29 @@
|
||||
|
||||
;; ---- COMMAND: Register Profile
|
||||
|
||||
(defn create-profile!
|
||||
(defn import-profile-picture
|
||||
[cfg uri]
|
||||
(try
|
||||
(let [storage (sto/resolve cfg)
|
||||
input (media/download-image cfg uri)
|
||||
input (media/run {:cmd :info :input input})
|
||||
hash (sto/calculate-hash (:path input))
|
||||
content (-> (sto/content (:path input) (:size input))
|
||||
(sto/wrap-with-hash hash))
|
||||
sobject (sto/put-object! storage {::sto/content content
|
||||
::sto/deduplicate? true
|
||||
:bucket "profile"
|
||||
:content-type (:mtype input)})]
|
||||
(:id sobject))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "unable to import profile picture"
|
||||
:cause cause)
|
||||
nil)))
|
||||
|
||||
(defn create-profile
|
||||
"Create the profile entry on the database with limited set of input
|
||||
attrs (all the other attrs are filled with default values)."
|
||||
[conn {:keys [email] :as params}]
|
||||
(dm/assert! ::sm/email email)
|
||||
[{:keys [::db/conn] :as cfg} {:keys [email] :as params}]
|
||||
(let [id (or (:id params) (uuid/next))
|
||||
props (-> (audit/extract-utm-params params)
|
||||
(merge (:props params))
|
||||
@@ -282,8 +322,7 @@
|
||||
:viewed-walkthrough? false
|
||||
:nudge {:big 10 :small 1}
|
||||
:v2-info-shown true
|
||||
:release-notes-viewed (:main cf/version)})
|
||||
(db/tjson))
|
||||
:release-notes-viewed (:main cf/version)}))
|
||||
|
||||
password (or (:password params) "!")
|
||||
|
||||
@@ -298,6 +337,12 @@
|
||||
theme (:theme params nil)
|
||||
email (str/lower email)
|
||||
|
||||
photo-id (some->> (or (:oidc/picture props)
|
||||
(:google/picture props)
|
||||
(:github/picture props)
|
||||
(:gitlab/picture props))
|
||||
(import-profile-picture cfg))
|
||||
|
||||
params {:id id
|
||||
:fullname (:fullname params)
|
||||
:email email
|
||||
@@ -305,27 +350,26 @@
|
||||
:lang locale
|
||||
:password password
|
||||
:deleted-at (:deleted-at params)
|
||||
:props props
|
||||
:props (db/tjson props)
|
||||
:theme theme
|
||||
:photo-id photo-id
|
||||
:is-active is-active
|
||||
:is-muted is-muted
|
||||
:is-demo is-demo}]
|
||||
|
||||
(try
|
||||
(-> (db/insert! conn :profile params)
|
||||
(profile/decode-row))
|
||||
(catch org.postgresql.util.PSQLException cause
|
||||
(let [state (.getSQLState cause)]
|
||||
(if (not= state "23505")
|
||||
(throw cause)
|
||||
(if (db/duplicate-key-error? cause)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists
|
||||
:hint "email already exists"
|
||||
:cause cause)
|
||||
(throw cause))))))
|
||||
|
||||
(do
|
||||
(l/error :hint "not an error" :cause cause)
|
||||
(ex/raise :type :validation
|
||||
:code :email-already-exists
|
||||
:hint "email already exists"
|
||||
:cause cause))))))))
|
||||
|
||||
(defn create-profile-rels!
|
||||
(defn create-profile-rels
|
||||
[conn {:keys [id] :as profile}]
|
||||
(let [features (cfeat/get-enabled-features cf/flags)
|
||||
team (teams/create-team conn
|
||||
@@ -342,17 +386,17 @@
|
||||
|
||||
(defn send-email-verification!
|
||||
[{:keys [::db/conn] :as cfg} profile]
|
||||
(let [vtoken (tokens/generate (::setup/props cfg)
|
||||
(let [vtoken (tokens/generate cfg
|
||||
{:iss :verify-email
|
||||
:exp (dt/in-future "72h")
|
||||
:exp (ct/in-future "72h")
|
||||
:profile-id (:id profile)
|
||||
:email (:email profile)})
|
||||
;; NOTE: this token is mainly used for possible complains
|
||||
;; identification on the sns webhook
|
||||
ptoken (tokens/generate (::setup/props cfg)
|
||||
ptoken (tokens/generate cfg
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
:exp (ct/in-future {:days 30})})]
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/register
|
||||
:public-uri (cf/get :public-uri)
|
||||
@@ -363,7 +407,7 @@
|
||||
|
||||
(defn register-profile
|
||||
[{:keys [::db/conn ::wrk/executor] :as cfg} {:keys [token] :as params}]
|
||||
(let [claims (tokens/verify (::setup/props cfg) {:token token :iss :prepared-register})
|
||||
(let [claims (tokens/verify cfg {:token token :iss :prepared-register})
|
||||
params (into claims params)
|
||||
|
||||
profile (if-let [profile-id (:profile-id claims)]
|
||||
@@ -375,18 +419,19 @@
|
||||
;; to detect if the profile is already registered
|
||||
(or (profile/get-profile-by-email conn (:email claims))
|
||||
(let [is-active (or (boolean (:is-active claims))
|
||||
(boolean (:email-verified claims))
|
||||
(not (contains? cf/flags :email-verification)))
|
||||
params (-> params
|
||||
(assoc :is-active is-active)
|
||||
(update :password #(profile/derive-password cfg %)))
|
||||
profile (->> (create-profile! conn params)
|
||||
(create-profile-rels! conn))]
|
||||
(update :password auth/derive-password))
|
||||
profile (->> (create-profile cfg params)
|
||||
(create-profile-rels conn))]
|
||||
(vary-meta profile assoc :created true))))
|
||||
|
||||
created? (-> profile meta :created true?)
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens/verify (::setup/props cfg) {:token token :iss :team-invitation}))
|
||||
(tokens/verify cfg {:token token :iss :team-invitation}))
|
||||
|
||||
props (-> (audit/profile->props profile)
|
||||
(assoc :from-invitation (some? invitation)))
|
||||
@@ -418,10 +463,10 @@
|
||||
(and (some? invitation)
|
||||
(= (:email profile)
|
||||
(:member-email invitation)))
|
||||
(let [claims (assoc invitation :member-id (:id profile))
|
||||
token (tokens/generate (::setup/props cfg) claims)]
|
||||
(let [invitation (assoc invitation :member-id (:id profile))
|
||||
token (tokens/generate cfg invitation)]
|
||||
(-> {:invitation-token token}
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-transform (session/create-fn cfg profile claims))
|
||||
(rph/with-meta {::audit/replace-props props
|
||||
::audit/context {:action "accept-invitation"}
|
||||
::audit/profile-id (:id profile)})))
|
||||
@@ -432,7 +477,7 @@
|
||||
created?
|
||||
(if (:is-active profile)
|
||||
(-> (profile/strip-private-attrs profile)
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-transform (session/create-fn cfg profile claims))
|
||||
(rph/with-defer create-welcome-file-when-needed)
|
||||
(rph/with-meta
|
||||
{::audit/replace-props props
|
||||
@@ -466,7 +511,7 @@
|
||||
|
||||
(when (= action "resend-email-verification")
|
||||
(db/update! conn :profile
|
||||
{:modified-at (dt/now)}
|
||||
{:modified-at (ct/now)}
|
||||
{:id (:id profile)})
|
||||
(send-email-verification! cfg profile))
|
||||
|
||||
@@ -493,17 +538,17 @@
|
||||
(defn- request-profile-recovery
|
||||
[{:keys [::db/conn] :as cfg} {:keys [email] :as params}]
|
||||
(letfn [(create-recovery-token [{:keys [id] :as profile}]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
(let [token (tokens/generate cfg
|
||||
{:iss :password-recovery
|
||||
:exp (dt/in-future "15m")
|
||||
:exp (ct/in-future "15m")
|
||||
:profile-id id})]
|
||||
(assoc profile :token token)))
|
||||
|
||||
(send-email-notification [conn profile]
|
||||
(let [ptoken (tokens/generate (::setup/props cfg)
|
||||
(let [ptoken (tokens/generate cfg
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
:exp (ct/in-future {:days 30})})]
|
||||
(eml/send! {::eml/conn conn
|
||||
::eml/factory eml/password-recovery
|
||||
:public-uri (cf/get :public-uri)
|
||||
@@ -544,7 +589,7 @@
|
||||
:else
|
||||
(do
|
||||
(db/update! conn :profile
|
||||
{:modified-at (dt/now)}
|
||||
{:modified-at (ct/now)}
|
||||
{:id (:id profile)})
|
||||
(->> profile
|
||||
(create-recovery-token)
|
||||
@@ -561,4 +606,32 @@
|
||||
[cfg params]
|
||||
(db/tx-run! cfg request-profile-recovery params))
|
||||
|
||||
;; --- COMMAND: get-sso-config
|
||||
|
||||
(defn- extract-domain
|
||||
"Extract the domain part from email"
|
||||
[email]
|
||||
(let [at (str/last-index-of email "@")]
|
||||
(when (and (>= at 0)
|
||||
(< at (dec (count email))))
|
||||
(-> (subs email (inc at))
|
||||
(str/trim)
|
||||
(str/lower)))))
|
||||
|
||||
(def ^:private schema:get-sso-provider
|
||||
[:map {:title "get-sso-config"}
|
||||
[:email ::sm/email]])
|
||||
|
||||
(def ^:private schema:get-sso-provider-result
|
||||
[:map {:title "SSOProvider"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::get-sso-provider
|
||||
{::rpc/auth false
|
||||
::doc/added "2.12"
|
||||
::sm/params schema:get-sso-provider
|
||||
::sm/result schema:get-sso-provider-result}
|
||||
[cfg {:keys [email]}]
|
||||
(when-let [domain (extract-domain email)]
|
||||
(when-let [config (db/get* cfg :sso-provider {:domain domain})]
|
||||
(select-keys config [:id]))))
|
||||
|
||||
@@ -11,8 +11,9 @@
|
||||
[app.binfile.v1 :as bf.v1]
|
||||
[app.binfile.v3 :as bf.v3]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as u]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.sse :as sse]
|
||||
@@ -24,12 +25,12 @@
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.tasks.file-gc]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[promesa.exec :as px]
|
||||
[yetti.response :as yres]))
|
||||
[datoteka.fs :as fs]))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
@@ -39,62 +40,47 @@
|
||||
schema:export-binfile
|
||||
[:map {:title "export-binfile"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:include-libraries ::sm/boolean]
|
||||
[:embed-assets ::sm/boolean]])
|
||||
|
||||
(defn stream-export-v1
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(yres/stream-body
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v1/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
(defn- export-binfile
|
||||
[{:keys [::sto/storage] :as cfg} {:keys [file-id include-libraries embed-assets]}]
|
||||
(let [output (tmp/tempfile*)]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output))
|
||||
|
||||
(defn stream-export-v3
|
||||
[cfg {:keys [file-id include-libraries embed-assets] :as params}]
|
||||
(yres/stream-body
|
||||
(fn [_ output-stream]
|
||||
(try
|
||||
(-> cfg
|
||||
(assoc ::bfc/ids #{file-id})
|
||||
(assoc ::bfc/embed-assets embed-assets)
|
||||
(assoc ::bfc/include-libraries include-libraries)
|
||||
(bf.v3/export-files! output-stream))
|
||||
(catch Throwable cause
|
||||
(l/err :hint "exception on exporting file"
|
||||
:file-id (str file-id)
|
||||
:cause cause))))))
|
||||
(let [data (sto/content output)
|
||||
object (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/touched-at (ct/in-future {:minutes 60})
|
||||
:content-type "application/zip"
|
||||
:bucket "tempfile"})]
|
||||
|
||||
(-> (cf/get :public-uri)
|
||||
(u/join "/assets/by-id/")
|
||||
(u/join (str (:id object)))))
|
||||
|
||||
(finally
|
||||
(fs/delete output)))))
|
||||
|
||||
(sv/defmethod ::export-binfile
|
||||
"Export a penpot file in a binary format."
|
||||
{::doc/added "1.15"
|
||||
::doc/changes [["2.12" "Remove version parameter, only one version is supported"]]
|
||||
::webhooks/event? true
|
||||
::sm/params schema:export-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id version file-id] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(files/check-read-permissions! pool profile-id file-id)
|
||||
(fn [_]
|
||||
(let [version (or version 1)
|
||||
body (case (int version)
|
||||
1 (stream-export-v1 cfg params)
|
||||
2 (throw (ex-info "not-implemented" {}))
|
||||
3 (stream-export-v3 cfg params))]
|
||||
|
||||
{::yres/status 200
|
||||
::yres/headers {"content-type" "application/octet-stream"}
|
||||
::yres/body body})))
|
||||
(sse/response (partial export-binfile cfg params)))
|
||||
|
||||
;; --- Command: import-binfile
|
||||
|
||||
(defn- import-binfile
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [profile-id project-id version name file]}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [profile-id project-id version name file]}]
|
||||
(let [team (teams/get-team pool
|
||||
:profile-id profile-id
|
||||
:project-id project-id)
|
||||
@@ -105,16 +91,12 @@
|
||||
(assoc ::bfc/name name)
|
||||
(assoc ::bfc/input (:path file)))
|
||||
|
||||
;; NOTE: the importation process performs some operations that are
|
||||
;; not very friendly with virtual threads, and for avoid
|
||||
;; unexpected blocking of other concurrent operations we dispatch
|
||||
;; that operation to a dedicated executor.
|
||||
result (case (int version)
|
||||
1 (px/invoke! executor (partial bf.v1/import-files! cfg))
|
||||
3 (px/invoke! executor (partial bf.v3/import-files! cfg)))]
|
||||
1 (bf.v1/import-files! cfg)
|
||||
3 (bf.v3/import-files! cfg))]
|
||||
|
||||
(db/update! pool :project
|
||||
{:modified-at (dt/now)}
|
||||
{:modified-at (ct/now)}
|
||||
{:id project-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
@@ -125,21 +107,35 @@
|
||||
[:name [:or [:string {:max 250}]
|
||||
[:map-of ::sm/uuid [:string {:max 250}]]]]
|
||||
[:project-id ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:version {:optional true} ::sm/int]
|
||||
[:file ::media/upload]])
|
||||
[:file media/schema:upload]])
|
||||
|
||||
(sv/defmethod ::import-binfile
|
||||
"Import a penpot file in a binary format."
|
||||
"Import a penpot file in a binary format. If `file-id` is provided,
|
||||
an in-place import will be performed instead of creating a new file.
|
||||
|
||||
The in-place imports are only supported for binfile-v3 and when a
|
||||
.penpot file only contains one penpot file.
|
||||
"
|
||||
{::doc/added "1.15"
|
||||
::doc/changes ["1.20" "Add file-id param for in-place import"
|
||||
"1.20" "Set default version to 3"]
|
||||
|
||||
::webhooks/event? true
|
||||
::sse/stream? true
|
||||
::sm/params schema:import-binfile}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version file] :as params}]
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id version file-id file] :as params}]
|
||||
(projects/check-edition-permissions! pool profile-id project-id)
|
||||
(let [version (or version 1)
|
||||
(let [version (or version 3)
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :version version))
|
||||
|
||||
cfg (cond-> cfg
|
||||
(uuid? file-id)
|
||||
(assoc ::bfc/file-id file-id))
|
||||
|
||||
manifest (case (int version)
|
||||
1 nil
|
||||
3 (bf.v3/get-manifest (:path file)))]
|
||||
@@ -147,5 +143,6 @@
|
||||
(with-meta
|
||||
(sse/response (partial import-binfile cfg params))
|
||||
{::audit/props {:file nil
|
||||
:file-id file-id
|
||||
:generated-by (:generated-by manifest)
|
||||
:referer (:referer manifest)}})))
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
|
||||
(ns app.rpc.commands.comments
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uri :as uri]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -29,7 +31,6 @@
|
||||
[app.rpc.retry :as rtry]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
@@ -163,34 +164,16 @@
|
||||
(def xf-decode-row
|
||||
(map decode-row))
|
||||
|
||||
(def ^:private
|
||||
sql:get-file
|
||||
"SELECT f.id, f.modified_at, f.revn, f.features, f.name,
|
||||
f.project_id, p.team_id, f.data,
|
||||
f.data_ref_id, f.data_backend
|
||||
FROM file as f
|
||||
INNER JOIN project as p on (p.id = f.project_id)
|
||||
WHERE f.id = ?
|
||||
AND (f.deleted_at IS NULL OR f.deleted_at > now())")
|
||||
|
||||
(defn- get-file
|
||||
"A specialized version of get-file for comments module."
|
||||
[cfg file-id page-id]
|
||||
(let [file (db/exec-one! cfg [sql:get-file file-id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint "file not found"))
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [file (->> file
|
||||
(files/decode-row)
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
data (get file :data)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data))))))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)]
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
data (get file :data)]
|
||||
(-> file
|
||||
(assoc :page-name (dm/get-in data [:pages-index page-id :name]))
|
||||
(assoc :page-id page-id)
|
||||
(dissoc :data)))))
|
||||
|
||||
;; FIXME: rename
|
||||
(defn- get-comment-thread
|
||||
@@ -222,7 +205,7 @@
|
||||
|
||||
(defn upsert-comment-thread-status!
|
||||
([conn profile-id thread-id]
|
||||
(upsert-comment-thread-status! conn profile-id thread-id (dt/in-future "1s")))
|
||||
(upsert-comment-thread-status! conn profile-id thread-id (ct/in-future "1s")))
|
||||
([conn profile-id thread-id mod-at]
|
||||
(db/exec-one! conn [sql:upsert-comment-thread-status thread-id profile-id mod-at mod-at])))
|
||||
|
||||
@@ -251,34 +234,39 @@
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(get-comment-threads conn profile-id file-id))))
|
||||
|
||||
(def ^:private sql:comment-threads
|
||||
"SELECT DISTINCT ON (ct.id)
|
||||
ct.*,
|
||||
pf.fullname AS owner_fullname,
|
||||
pf.email AS owner_email,
|
||||
pf.photo_id AS owner_photo_id,
|
||||
p.team_id AS team_id,
|
||||
f.name AS file_name,
|
||||
f.project_id AS project_id,
|
||||
first_value(c.content) OVER w AS content,
|
||||
(SELECT count(1)
|
||||
FROM comment AS c
|
||||
WHERE c.thread_id = ct.id) AS count_comments,
|
||||
(SELECT count(1)
|
||||
FROM comment AS c
|
||||
WHERE c.thread_id = ct.id
|
||||
AND c.created_at >= coalesce(cts.modified_at, ct.created_at)) AS count_unread_comments
|
||||
FROM comment_thread AS ct
|
||||
INNER JOIN comment AS c ON (c.thread_id = ct.id)
|
||||
INNER JOIN file AS f ON (f.id = ct.file_id)
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
LEFT JOIN comment_thread_status AS cts ON (cts.thread_id = ct.id AND cts.profile_id = ?)
|
||||
LEFT JOIN profile AS pf ON (ct.owner_id = pf.id)
|
||||
WINDOW w AS (PARTITION BY c.thread_id ORDER BY c.created_at ASC)")
|
||||
(defn- get-comment-threads-sql
|
||||
[where]
|
||||
(str/ffmt
|
||||
"SELECT DISTINCT ON (ct.id)
|
||||
ct.*,
|
||||
pf.fullname AS owner_fullname,
|
||||
pf.email AS owner_email,
|
||||
pf.photo_id AS owner_photo_id,
|
||||
p.team_id AS team_id,
|
||||
f.name AS file_name,
|
||||
f.project_id AS project_id,
|
||||
first_value(c.content) OVER w AS content,
|
||||
(SELECT count(1)
|
||||
FROM comment AS c
|
||||
WHERE c.thread_id = ct.id) AS count_comments,
|
||||
(SELECT count(1)
|
||||
FROM comment AS c
|
||||
WHERE c.thread_id = ct.id
|
||||
AND c.created_at >= coalesce(cts.modified_at, ct.created_at)) AS count_unread_comments
|
||||
FROM comment_thread AS ct
|
||||
INNER JOIN comment AS c ON (c.thread_id = ct.id)
|
||||
INNER JOIN file AS f ON (f.id = ct.file_id)
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
LEFT JOIN comment_thread_status AS cts ON (cts.thread_id = ct.id AND cts.profile_id = ?)
|
||||
LEFT JOIN profile AS pf ON (ct.owner_id = pf.id)
|
||||
WHERE f.deleted_at IS NULL
|
||||
AND p.deleted_at IS NULL
|
||||
%1
|
||||
WINDOW w AS (PARTITION BY c.thread_id ORDER BY c.created_at ASC)"
|
||||
where))
|
||||
|
||||
(def ^:private sql:comment-threads-by-file-id
|
||||
(str "WITH threads AS (" sql:comment-threads ")"
|
||||
"SELECT * FROM threads WHERE file_id = ?"))
|
||||
(get-comment-threads-sql "AND ct.file_id = ?"))
|
||||
|
||||
(defn- get-comment-threads
|
||||
[conn profile-id file-id]
|
||||
@@ -287,7 +275,30 @@
|
||||
|
||||
;; --- COMMAND: Get Unread Comment Threads
|
||||
|
||||
(declare ^:private get-unread-comment-threads)
|
||||
(def ^:private sql:unread-all-comment-threads-by-team
|
||||
(str "WITH threads AS ("
|
||||
(get-comment-threads-sql "AND p.team_id = ?")
|
||||
")"
|
||||
"SELECT t.* FROM threads AS t
|
||||
WHERE t.count_unread_comments > 0"))
|
||||
|
||||
(def ^:private sql:unread-partial-comment-threads-by-team
|
||||
(str "WITH threads AS ("
|
||||
(get-comment-threads-sql "AND p.team_id = ? AND (ct.owner_id = ? OR ? = ANY(ct.mentions))")
|
||||
")"
|
||||
"SELECT t.* FROM threads AS t
|
||||
WHERE t.count_unread_comments > 0"))
|
||||
|
||||
(defn- get-unread-comment-threads
|
||||
[cfg profile-id team-id]
|
||||
(let [profile (-> (db/get cfg :profile {:id profile-id} ::db/remove-deleted false)
|
||||
(profile/decode-row))
|
||||
notify (or (-> profile :props :notifications :dashboard-comments) :all)
|
||||
result (case notify
|
||||
:all (db/exec! cfg [sql:unread-all-comment-threads-by-team profile-id team-id])
|
||||
:partial (db/exec! cfg [sql:unread-partial-comment-threads-by-team profile-id team-id profile-id profile-id])
|
||||
[])]
|
||||
(into [] xf-decode-row result)))
|
||||
|
||||
(def ^:private
|
||||
schema:get-unread-comment-threads
|
||||
@@ -298,41 +309,8 @@
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-unread-comment-threads}
|
||||
[cfg {:keys [::rpc/profile-id team-id] :as params}]
|
||||
(db/run!
|
||||
cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-unread-comment-threads conn profile-id team-id))))
|
||||
|
||||
(def sql:unread-all-comment-threads-by-team
|
||||
(str "WITH threads AS (" sql:comment-threads ")"
|
||||
"SELECT * FROM threads WHERE count_unread_comments > 0 AND team_id = ?"))
|
||||
|
||||
;; The partial configuration will retrieve only comments created by the user and
|
||||
;; threads that have a mention to the user.
|
||||
(def sql:unread-partial-comment-threads-by-team
|
||||
(str "WITH threads AS (" sql:comment-threads ")"
|
||||
"SELECT * FROM threads
|
||||
WHERE count_unread_comments > 0
|
||||
AND team_id = ?
|
||||
AND (owner_id = ? OR ? = ANY(mentions))"))
|
||||
|
||||
(defn- get-unread-comment-threads
|
||||
[conn profile-id team-id]
|
||||
(let [profile (-> (db/get conn :profile {:id profile-id})
|
||||
(profile/decode-row))
|
||||
notify (or (-> profile :props :notifications :dashboard-comments) :all)]
|
||||
|
||||
(case notify
|
||||
:all
|
||||
(->> (db/exec! conn [sql:unread-all-comment-threads-by-team profile-id team-id])
|
||||
(into [] xf-decode-row))
|
||||
|
||||
:partial
|
||||
(->> (db/exec! conn [sql:unread-partial-comment-threads-by-team profile-id team-id profile-id profile-id])
|
||||
(into [] xf-decode-row))
|
||||
|
||||
[])))
|
||||
(teams/check-read-permissions! cfg profile-id team-id)
|
||||
(get-unread-comment-threads cfg profile-id team-id))
|
||||
|
||||
;; --- COMMAND: Get Single Comment Thread
|
||||
|
||||
@@ -343,16 +321,17 @@
|
||||
[:id ::sm/uuid]
|
||||
[:share-id {:optional true} [:maybe ::sm/uuid]]])
|
||||
|
||||
(def ^:private sql:get-comment-thread
|
||||
(get-comment-threads-sql "AND ct.file_id = ? AND ct.id = ?"))
|
||||
|
||||
(sv/defmethod ::get-comment-thread
|
||||
{::doc/added "1.15"
|
||||
::sm/params schema:get-comment-thread}
|
||||
[cfg {:keys [::rpc/profile-id file-id id share-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-comment-permissions! conn profile-id file-id share-id)
|
||||
(let [sql (str "WITH threads AS (" sql:comment-threads ")"
|
||||
"SELECT * FROM threads WHERE id = ? AND file_id = ?")]
|
||||
(-> (db/exec-one! conn [sql profile-id id file-id])
|
||||
(decode-row))))))
|
||||
(some-> (db/exec-one! conn [sql:get-comment-thread profile-id file-id id])
|
||||
(decode-row)))))
|
||||
|
||||
;; --- COMMAND: Retrieve Comments
|
||||
|
||||
|
||||
@@ -7,16 +7,16 @@
|
||||
(ns app.rpc.commands.demo
|
||||
"A demo specific mutations."
|
||||
(:require
|
||||
[app.auth :refer [derive-password]]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.time :as ct]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.auth :as auth]
|
||||
[app.rpc.commands.profile :as profile]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.nonce :as bn]))
|
||||
|
||||
@@ -39,21 +39,20 @@
|
||||
fullname (str "Demo User " sem)
|
||||
|
||||
password (-> (bn/random-bytes 16)
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->b64 true)
|
||||
(bc/bytes->str))
|
||||
|
||||
params {:email email
|
||||
:fullname fullname
|
||||
:is-active true
|
||||
:deleted-at (dt/in-future (cf/get-deletion-delay))
|
||||
:password (profile/derive-password cfg password)
|
||||
:props {}}]
|
||||
|
||||
|
||||
(let [profile (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(->> (auth/create-profile! conn params)
|
||||
(auth/create-profile-rels! conn))))]
|
||||
(with-meta {:email email
|
||||
:password password}
|
||||
{::audit/profile-id (:id profile)}))))
|
||||
:is-demo true
|
||||
:deleted-at (ct/in-future (cf/get-deletion-delay))
|
||||
:password (derive-password password)
|
||||
:props {}}
|
||||
profile (db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(->> (auth/create-profile cfg params)
|
||||
(auth/create-profile-rels conn))))]
|
||||
(with-meta {:email email
|
||||
:password password}
|
||||
{::audit/profile-id (:id profile)})))
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.rpc.commands.feedback
|
||||
"A general purpose feedback module."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.config :as cf]
|
||||
@@ -21,8 +22,11 @@
|
||||
|
||||
(def ^:private schema:send-user-feedback
|
||||
[:map {:title "send-user-feedback"}
|
||||
[:subject [:string {:max 400}]]
|
||||
[:content [:string {:max 2500}]]])
|
||||
[:subject [:string {:max 500}]]
|
||||
[:content [:string {:max 2500}]]
|
||||
[:type {:optional true} :string]
|
||||
[:error-href {:optional true} [:string {:max 2500}]]
|
||||
[:error-report {:optional true} :string]])
|
||||
|
||||
(sv/defmethod ::send-user-feedback
|
||||
{::doc/added "1.18"
|
||||
@@ -39,16 +43,26 @@
|
||||
|
||||
(defn- send-user-feedback!
|
||||
[pool profile params]
|
||||
(let [dest (or (cf/get :user-feedback-destination)
|
||||
;; LEGACY
|
||||
(cf/get :feedback-destination))]
|
||||
(let [destination
|
||||
(or (cf/get :user-feedback-destination)
|
||||
;; LEGACY
|
||||
(cf/get :feedback-destination))
|
||||
|
||||
attachments
|
||||
(d/without-nils
|
||||
{"error-report.txt" (:error-report params)})]
|
||||
|
||||
(eml/send! {::eml/conn pool
|
||||
::eml/factory eml/user-feedback
|
||||
:from dest
|
||||
:to dest
|
||||
:profile profile
|
||||
:from (cf/get :smtp-default-from)
|
||||
:to destination
|
||||
:reply-to (:email profile)
|
||||
:email (:email profile)
|
||||
:subject (:subject params)
|
||||
:content (:content params)})
|
||||
:attachments attachments
|
||||
|
||||
:feedback-subject (:subject params)
|
||||
:feedback-type (:type params "not-specified")
|
||||
:feedback-content (:content params)
|
||||
:feedback-error-href (:error-href params)
|
||||
:profile profile})
|
||||
nil))
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.schema.desc-js-like :as-alias smdj]
|
||||
[app.common.time :as ct]
|
||||
[app.common.transit :as t]
|
||||
[app.common.types.components-list :as ctkl]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.common.uri :as uri]
|
||||
@@ -23,10 +25,12 @@
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.http.sse :as sse]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.loggers.webhooks :as-alias webhooks]
|
||||
[app.msgbus :as mbus]
|
||||
[app.redis :as rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
@@ -35,12 +39,11 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.events :as events]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; --- FEATURES
|
||||
|
||||
@@ -52,15 +55,13 @@
|
||||
;; --- HELPERS
|
||||
|
||||
(def long-cache-duration
|
||||
(dt/duration {:days 7}))
|
||||
(ct/duration {:days 7}))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [data changes features] :as row}]
|
||||
[{:keys [features] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
features (assoc :features (db/decode-pgarray features #{}))
|
||||
changes (assoc :changes (blob/decode changes))
|
||||
data (assoc :data (blob/decode data)))))
|
||||
(db/pgarray? features) (assoc :features (db/decode-pgarray features #{})))))
|
||||
|
||||
(defn check-version!
|
||||
[file]
|
||||
@@ -78,13 +79,16 @@
|
||||
|
||||
;; --- FILE PERMISSIONS
|
||||
|
||||
|
||||
(def ^:private sql:file-permissions
|
||||
"select fpr.is_owner,
|
||||
fpr.is_admin,
|
||||
fpr.can_edit
|
||||
from file_profile_rel as fpr
|
||||
inner join file as f on (f.id = fpr.file_id)
|
||||
where fpr.file_id = ?
|
||||
and fpr.profile_id = ?
|
||||
and f.deleted_at is null
|
||||
union all
|
||||
select tpr.is_owner,
|
||||
tpr.is_admin,
|
||||
@@ -94,6 +98,7 @@
|
||||
inner join file as f on (p.id = f.project_id)
|
||||
where f.id = ?
|
||||
and tpr.profile_id = ?
|
||||
and f.deleted_at is null
|
||||
union all
|
||||
select ppr.is_owner,
|
||||
ppr.is_admin,
|
||||
@@ -101,7 +106,8 @@
|
||||
from project_profile_rel as ppr
|
||||
inner join file as f on (f.project_id = ppr.project_id)
|
||||
where f.id = ?
|
||||
and ppr.profile_id = ?")
|
||||
and ppr.profile_id = ?
|
||||
and f.deleted_at is null")
|
||||
|
||||
(defn get-file-permissions
|
||||
[conn profile-id file-id]
|
||||
@@ -187,15 +193,15 @@
|
||||
[:name [:string {:max 250}]]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:vern [::sm/int {:min 0}]]
|
||||
[:modified-at ::dt/instant]
|
||||
[:modified-at ::ct/inst]
|
||||
[:is-shared ::sm/boolean]
|
||||
[:project-id ::sm/uuid]
|
||||
[:created-at ::dt/instant]
|
||||
[:created-at ::ct/inst]
|
||||
[:data {:optional true} ::sm/any]])
|
||||
|
||||
(def schema:permissions-mixin
|
||||
[:map {:title "PermissionsMixin"}
|
||||
[:permissions ::perms/permissions]])
|
||||
[:permissions perms/schema:permissions]])
|
||||
|
||||
(def schema:file-with-permissions
|
||||
[:merge {:title "FileWithPermissions"}
|
||||
@@ -206,93 +212,11 @@
|
||||
schema:get-file
|
||||
[:map {:title "get-file"}
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:id ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]])
|
||||
|
||||
(defn- migrate-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as file} {:keys [read-only?]}]
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
;; For avoid unnecesary overhead of creating multiple pointers and
|
||||
;; handly internally with objects map in their worst case (when
|
||||
;; probably all shapes and all pointers will be readed in any
|
||||
;; case), we just realize/resolve them before applying the
|
||||
;; migration to the file
|
||||
file (-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file libs))]
|
||||
|
||||
(if (or read-only? (db/read-only? conn))
|
||||
file
|
||||
(let [;; When file is migrated, we break the rule of no perform
|
||||
;; mutations on get operations and update the file with all
|
||||
;; migrations applied
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(feat.fdata/enable-objects-map file)
|
||||
file)
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/enable-pointer-map file)
|
||||
file)]
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (blob/encode (:data file))
|
||||
:version (:version file)
|
||||
:features (db/create-array conn "text" (:features file))}
|
||||
{:id id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(when (contains? (:features file) "fdata/pointer-map")
|
||||
(feat.fdata/persist-pointers! cfg id))
|
||||
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(feat.fmigr/resolve-applied-migrations cfg file))))))
|
||||
|
||||
(defn get-file
|
||||
[{:keys [::db/conn ::wrk/executor] :as cfg} id
|
||||
& {:keys [project-id
|
||||
migrate?
|
||||
include-deleted?
|
||||
lock-for-update?
|
||||
preload-pointers?]
|
||||
:or {include-deleted? false
|
||||
lock-for-update? false
|
||||
migrate? true
|
||||
preload-pointers? false}
|
||||
:as options}]
|
||||
|
||||
(assert (db/connection? conn) "expected cfg with valid connection")
|
||||
|
||||
(let [params (merge {:id id}
|
||||
(when (some? project-id)
|
||||
{:project-id project-id}))
|
||||
file (->> (db/get conn :file params
|
||||
{::db/check-deleted (not include-deleted?)
|
||||
::db/remove-deleted (not include-deleted?)
|
||||
::sql/for-update lock-for-update?})
|
||||
(feat.fmigr/resolve-applied-migrations cfg)
|
||||
(feat.fdata/resolve-file-data cfg))
|
||||
|
||||
;; NOTE: we perform the file decoding in a separate thread
|
||||
;; because it has heavy and synchronous operations for
|
||||
;; decoding file body that are not very friendly with virtual
|
||||
;; threads.
|
||||
file (px/invoke! executor #(decode-row file))
|
||||
|
||||
file (if (and migrate? (fmg/need-migration? file))
|
||||
(migrate-file cfg file options)
|
||||
file)]
|
||||
|
||||
(if preload-pointers?
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
|
||||
file)))
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn get-minimal-file
|
||||
[cfg id & {:as opts}]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern :data-ref-id :data-backend])]
|
||||
(let [opts (assoc opts ::sql/columns [:id :modified-at :deleted-at :revn :vern])]
|
||||
(db/get cfg :file {:id id} opts)))
|
||||
|
||||
(defn- get-minimal-file-with-perms
|
||||
@@ -304,7 +228,7 @@
|
||||
(defn get-file-etag
|
||||
[{:keys [::rpc/profile-id]} {:keys [modified-at revn vern permissions]}]
|
||||
(str profile-id "/" revn "/" vern "/" (hash fmg/available-migrations) "/"
|
||||
(dt/format-instant modified-at :iso)
|
||||
(ct/format-inst modified-at :iso)
|
||||
"/"
|
||||
(uri/map->query-string permissions)))
|
||||
|
||||
@@ -332,23 +256,32 @@
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
|
||||
file (-> (get-file cfg id :project-id project-id)
|
||||
file (-> (bfc/get-file cfg id
|
||||
:project-id project-id)
|
||||
(assoc :permissions perms)
|
||||
(assoc :team-id (:id team))
|
||||
(check-version!))]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
;; This operation is needed for backward comapatibility with frontends that
|
||||
;; does not support pointer-map resolution mechanism; this just resolves the
|
||||
;; pointers on backend and return a complete file.
|
||||
(if (and (contains? (:features file) "fdata/pointer-map")
|
||||
(not (contains? (:features params) "fdata/pointer-map")))
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(update file :data feat.fdata/process-pointers deref))
|
||||
file))))
|
||||
(as-> file file
|
||||
;; This operation is needed for backward comapatibility with
|
||||
;; frontends that does not support pointer-map resolution
|
||||
;; mechanism; this just resolves the pointers on backend and
|
||||
;; return a complete file
|
||||
(if (and (contains? (:features file) "fdata/pointer-map")
|
||||
(not (contains? (:features params) "fdata/pointer-map")))
|
||||
(feat.fdata/realize-pointers cfg file)
|
||||
file)
|
||||
|
||||
;; This operation is needed for backward comapatibility with
|
||||
;; frontends that does not support objects-map mechanism; this
|
||||
;; just converts all objects map instaces to plain maps
|
||||
(if (and (contains? (:features file) "fdata/objects-map")
|
||||
(not (contains? (:features params) "fdata/objects-map")))
|
||||
(feat.fdata/realize-objects cfg file)
|
||||
file)))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-fragment (by id)
|
||||
|
||||
@@ -356,8 +289,8 @@
|
||||
[:map {:title "FileFragment"}
|
||||
[:id ::sm/uuid]
|
||||
[:file-id ::sm/uuid]
|
||||
[:created-at ::dt/instant]
|
||||
[:content any?]])
|
||||
[:created-at ::ct/inst]
|
||||
[:content ::sm/any]])
|
||||
|
||||
(def schema:get-file-fragment
|
||||
[:map {:title "get-file-fragment"}
|
||||
@@ -367,10 +300,8 @@
|
||||
|
||||
(defn- get-file-fragment
|
||||
[cfg file-id fragment-id]
|
||||
(let [resolve-file-data (partial feat.fdata/resolve-file-data cfg)]
|
||||
(some-> (db/get cfg :file-data-fragment {:file-id file-id :id fragment-id})
|
||||
(resolve-file-data)
|
||||
(update :data blob/decode))))
|
||||
(some-> (db/get cfg :file-data {:file-id file-id :id fragment-id :type "fragment"})
|
||||
(update :data blob/decode)))
|
||||
|
||||
(sv/defmethod ::get-file-fragment
|
||||
"Retrieve a file fragment by its ID. Only authenticated users."
|
||||
@@ -424,9 +355,8 @@
|
||||
::sm/params schema:get-project-files
|
||||
::sm/result schema:files}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id project-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(projects/check-read-permissions! conn profile-id project-id)
|
||||
(get-project-files conn project-id)))
|
||||
(projects/check-read-permissions! pool profile-id project-id)
|
||||
(get-project-files pool project-id))
|
||||
|
||||
;; --- COMMAND QUERY: has-file-libraries
|
||||
|
||||
@@ -460,6 +390,39 @@
|
||||
(:has-libraries row)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-library-usage
|
||||
|
||||
|
||||
(declare get-library-usage)
|
||||
|
||||
(def schema:get-library-usage
|
||||
[:map {:title "get-library-usage"}
|
||||
[:file-id ::sm/uuid]])
|
||||
:sample
|
||||
(sv/defmethod ::get-library-usage
|
||||
"Gets the number of files that use the specified library."
|
||||
{::doc/added "2.10.0"
|
||||
::sm/params schema:get-library-usage
|
||||
::sm/result ::sm/int}
|
||||
[{:keys [::db/pool] :as cfg} {:keys [::rpc/profile-id file-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(check-read-permissions! pool profile-id file-id)
|
||||
(get-library-usage conn file-id)))
|
||||
|
||||
(def ^:private sql:get-library-usage
|
||||
"SELECT COUNT(*) AS used
|
||||
FROM file_library_rel AS flr
|
||||
JOIN file AS fl ON (flr.library_file_id = fl.id)
|
||||
WHERE flr.library_file_id = ?::uuid
|
||||
AND (fl.deleted_at IS NULL OR
|
||||
fl.deleted_at > now())")
|
||||
|
||||
(defn- get-library-usage
|
||||
[conn file-id]
|
||||
(let [row (db/exec-one! conn [sql:get-library-usage file-id])]
|
||||
{:used-in (:used row)}))
|
||||
|
||||
|
||||
;; --- QUERY COMMAND: get-page
|
||||
|
||||
(defn- prune-objects
|
||||
@@ -495,7 +458,7 @@
|
||||
|
||||
(let [perms (get-permissions conn profile-id file-id share-id)
|
||||
|
||||
file (get-file cfg file-id :read-only? true)
|
||||
file (bfc/get-file cfg file-id :read-only? true)
|
||||
|
||||
proj (db/get conn :project {:id (:project-id file)})
|
||||
|
||||
@@ -551,66 +514,136 @@
|
||||
|
||||
;; --- COMMAND QUERY: get-team-shared-files
|
||||
|
||||
(defn- get-components-with-variants
|
||||
"Return a set with all the variant-ids, and a list of components, but
|
||||
with only one component by variant.
|
||||
|
||||
Returns a vector of unique components and a set of all variant ids"
|
||||
[fdata]
|
||||
(loop [variant-ids #{}
|
||||
components' []
|
||||
components (ctkl/components-seq fdata)]
|
||||
(if-let [{:keys [variant-id] :as component} (first components)]
|
||||
(cond
|
||||
(nil? variant-id)
|
||||
(recur variant-ids
|
||||
(conj components' component)
|
||||
(rest components))
|
||||
|
||||
(contains? variant-ids variant-id)
|
||||
(recur variant-ids
|
||||
components'
|
||||
(rest components))
|
||||
|
||||
:else
|
||||
(recur (conj variant-ids variant-id)
|
||||
(conj components' component)
|
||||
(rest components)))
|
||||
|
||||
[(d/index-by :id components') variant-ids])))
|
||||
|
||||
(defn- sample-assets
|
||||
[assets limit]
|
||||
(let [assets (into [] (map val) assets)]
|
||||
{:count (count assets)
|
||||
:sample (->> assets
|
||||
(sort-by #(str/lower (:name %)))
|
||||
(into [] (take limit)))}))
|
||||
|
||||
(defn- calculate-library-summary
|
||||
"Calculate the file library summary (counters and samples)"
|
||||
[{:keys [data] :as file}]
|
||||
(let [load-objects
|
||||
(fn [sample]
|
||||
(mapv #(ctf/load-component-objects data %) sample))
|
||||
|
||||
[components variant-ids]
|
||||
(get-components-with-variants data)
|
||||
|
||||
components-sample
|
||||
(-> (sample-assets components 4)
|
||||
(update :sample load-objects))]
|
||||
|
||||
{:components components-sample
|
||||
:variants {:count (count variant-ids)}
|
||||
:colors (sample-assets (:colors data) 3)
|
||||
:typographies (sample-assets (:typographies data) 3)}))
|
||||
|
||||
(def ^:private file-summary-cache-key-ttl
|
||||
(ct/duration {:days 30}))
|
||||
|
||||
(def file-summary-cache-key-prefix
|
||||
"penpot.library-summary.")
|
||||
|
||||
(defn- get-file-with-summary
|
||||
"Get a file without data with a summary of its local library content"
|
||||
[cfg id]
|
||||
(let [get-from-cache
|
||||
(fn [{:keys [::rds/conn]} cache-key]
|
||||
(when-let [result (rds/get conn cache-key)]
|
||||
(let [file (bfc/get-file cfg id :load-data? false)
|
||||
summary (t/decode-str result)]
|
||||
(-> (assoc file :library-summary summary)
|
||||
(dissoc :data)))))
|
||||
|
||||
calculate-from-db
|
||||
(fn []
|
||||
(let [file (bfc/get-file cfg id)
|
||||
result (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(calculate-library-summary file))]
|
||||
(-> file
|
||||
(assoc :library-summary result)
|
||||
(dissoc :legacy-data)
|
||||
(dissoc :data))))
|
||||
|
||||
persist-to-cache
|
||||
(fn [{:keys [::rds/conn]} data cache-key]
|
||||
(rds/set conn cache-key (t/encode-str data)
|
||||
(rds/build-set-args {:ex file-summary-cache-key-ttl})))]
|
||||
|
||||
(if (contains? cf/flags :redis-cache)
|
||||
(let [cache-key (str file-summary-cache-key-prefix id)]
|
||||
(or (rds/run! cfg get-from-cache cache-key)
|
||||
(let [file (calculate-from-db)]
|
||||
(rds/run! cfg persist-to-cache (:library-summary file) cache-key)
|
||||
file)))
|
||||
(calculate-from-db))))
|
||||
|
||||
(def ^:private sql:team-shared-files
|
||||
"select f.id,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.data,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.data_backend,
|
||||
f.data_ref_id,
|
||||
f.name,
|
||||
f.version,
|
||||
f.is_shared,
|
||||
ft.media_id,
|
||||
p.team_id
|
||||
from file as f
|
||||
inner join project as p on (p.id = f.project_id)
|
||||
left join file_thumbnail as ft on (ft.file_id = f.id and ft.revn = f.revn and ft.deleted_at is null)
|
||||
where f.is_shared = true
|
||||
and f.deleted_at is null
|
||||
and p.deleted_at is null
|
||||
and p.team_id = ?
|
||||
order by f.modified_at desc")
|
||||
"WITH file_library_agg AS (
|
||||
SELECT flr.file_id,
|
||||
coalesce(array_agg(flr.library_file_id) filter (WHERE flr.library_file_id IS NOT NULL), '{}') AS library_file_ids
|
||||
FROM file_library_rel flr
|
||||
GROUP BY flr.file_id
|
||||
)
|
||||
|
||||
(defn- get-library-summary
|
||||
[cfg {:keys [id data] :as file}]
|
||||
(letfn [(assets-sample [assets limit]
|
||||
(let [sorted-assets (->> (vals assets)
|
||||
(sort-by #(str/lower (:name %))))]
|
||||
{:count (count sorted-assets)
|
||||
:sample (into [] (take limit sorted-assets))}))]
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [load-objects (fn [component]
|
||||
(ctf/load-component-objects data component))
|
||||
components-sample (-> (assets-sample (ctkl/components data) 4)
|
||||
(update :sample #(mapv load-objects %)))]
|
||||
{:components components-sample
|
||||
:media (assets-sample (:media data) 3)
|
||||
:colors (assets-sample (:colors data) 3)
|
||||
:typographies (assets-sample (:typographies data) 3)}))))
|
||||
SELECT f.id,
|
||||
fla.library_file_ids,
|
||||
ft.media_id AS thumbnail_id
|
||||
FROM file AS f
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
LEFT JOIN file_thumbnail AS ft ON (ft.file_id = f.id AND ft.revn = f.revn AND ft.deleted_at IS NULL)
|
||||
LEFT JOIN file_library_agg AS fla ON (fla.file_id = f.id)
|
||||
WHERE f.is_shared = true
|
||||
AND f.deleted_at IS NULL
|
||||
AND p.deleted_at IS NULL
|
||||
AND p.team_id = ?
|
||||
ORDER BY f.modified_at DESC")
|
||||
|
||||
(defn- get-team-shared-files
|
||||
[{:keys [::db/conn] :as cfg} {:keys [team-id profile-id]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(->> (db/exec! conn [sql:team-shared-files team-id])
|
||||
(into #{} (comp
|
||||
;; NOTE: this decode operation is a workaround for a
|
||||
;; fast fix, this should be approached with a more
|
||||
;; efficient implementation, for now it loads all
|
||||
;; the files in memory.
|
||||
(map (partial bfc/decode-file cfg))
|
||||
(map (fn [row]
|
||||
(if-let [media-id (:media-id row)]
|
||||
(-> row
|
||||
(dissoc :media-id)
|
||||
(assoc :thumbnail-id media-id))
|
||||
(dissoc row :media-id))))
|
||||
(map #(assoc % :library-summary (get-library-summary cfg %)))
|
||||
(map #(dissoc % :data))))))
|
||||
|
||||
(let [process-row
|
||||
(fn [{:keys [id library-file-ids]}]
|
||||
(let [file (get-file-with-summary cfg id)]
|
||||
(assoc file :library-file-ids (db/decode-pgarray library-file-ids #{}))))
|
||||
|
||||
xform
|
||||
(map process-row)]
|
||||
|
||||
(->> (db/plan conn [sql:team-shared-files team-id] {:fetch-size 1})
|
||||
(transduce xform conj #{}))))
|
||||
|
||||
(def ^:private schema:get-team-shared-files
|
||||
[:map {:title "get-team-shared-files"}
|
||||
@@ -623,6 +656,28 @@
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg get-team-shared-files (assoc params :profile-id profile-id)))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-summary
|
||||
|
||||
(defn- get-file-summary
|
||||
[cfg id]
|
||||
(let [file (get-file-with-summary cfg id)]
|
||||
(-> (:library-summary file)
|
||||
(assoc :name (:name file)))))
|
||||
|
||||
(def ^:private
|
||||
schema:get-file-summary
|
||||
[:map {:title "get-file-summary"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::get-file-summary
|
||||
"Retrieve a file summary by its ID. Only authenticated users."
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:get-file-summary}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(check-read-permissions! cfg profile-id id)
|
||||
(get-file-summary cfg id))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-libraries
|
||||
|
||||
(def ^:private schema:get-file-libraries
|
||||
@@ -711,46 +766,61 @@
|
||||
(get-team-recent-files conn team-id)))
|
||||
|
||||
|
||||
;; --- COMMAND QUERY: get-file-summary
|
||||
;; --- COMMAND QUERY: get-team-deleted-files
|
||||
|
||||
(defn- get-file-summary
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id id project-id] :as params}]
|
||||
(check-read-permissions! conn profile-id id)
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:project-id project-id
|
||||
:file-id id)
|
||||
(def sql:team-deleted-files
|
||||
"WITH deleted_files AS (
|
||||
SELECT f.id,
|
||||
f.revn,
|
||||
f.vern,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.is_shared,
|
||||
f.deleted_at AS will_be_deleted_at,
|
||||
ft.media_id AS thumbnail_id,
|
||||
row_number() OVER w AS row_num,
|
||||
p.team_id
|
||||
FROM file AS f
|
||||
INNER JOIN project AS p ON (p.id = f.project_id)
|
||||
LEFT JOIN file_thumbnail AS ft on (ft.file_id = f.id
|
||||
AND ft.revn = f.revn
|
||||
AND ft.deleted_at is null)
|
||||
WHERE p.team_id = ?
|
||||
AND (p.deleted_at > ?::timestamptz OR
|
||||
f.deleted_at > ?::timestamptz)
|
||||
WINDOW w AS (PARTITION BY f.project_id
|
||||
ORDER BY f.modified_at DESC)
|
||||
ORDER BY f.modified_at DESC
|
||||
)
|
||||
SELECT * FROM deleted_files")
|
||||
|
||||
file (get-file cfg id
|
||||
:project-id project-id
|
||||
:read-only? true)]
|
||||
(defn get-team-deleted-files
|
||||
[conn team-id]
|
||||
(let [now (ct/now)]
|
||||
(db/exec! conn [sql:team-deleted-files team-id now now])))
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
{:name (:name file)
|
||||
:components-count (count (ctkl/components-seq (:data file)))
|
||||
:graphics-count (count (get-in file [:data :media] []))
|
||||
:colors-count (count (get-in file [:data :colors] []))
|
||||
:typography-count (count (get-in file [:data :typographies] []))})))
|
||||
|
||||
(sv/defmethod ::get-file-summary
|
||||
"Retrieve a file summary by its ID. Only authenticated users."
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:get-file}
|
||||
[cfg {:keys [::rpc/profile-id] :as params}]
|
||||
(db/tx-run! cfg get-file-summary (assoc params :profile-id profile-id)))
|
||||
(def ^:private schema:get-team-deleted-files
|
||||
[:map {:title "get-team-deleted-files"}
|
||||
[:team-id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::get-team-deleted-files
|
||||
{::doc/added "2.12"
|
||||
::sm/params schema:get-team-deleted-files}
|
||||
[cfg {:keys [::rpc/profile-id team-id]}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-team-deleted-files conn team-id))))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-info
|
||||
|
||||
|
||||
(defn- get-file-info
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
|
||||
(db/get* conn :file
|
||||
{:id id}
|
||||
{::sql/columns [:id]}))
|
||||
(db/get conn :file
|
||||
{:id id}
|
||||
{::sql/columns [:id :deleted-at]}))
|
||||
|
||||
(sv/defmethod ::get-file-info
|
||||
"Retrieve minimal file info by its ID."
|
||||
@@ -770,7 +840,7 @@
|
||||
[conn {:keys [id name]}]
|
||||
(db/update! conn :file
|
||||
{:name name
|
||||
:modified-at (dt/now)}
|
||||
:modified-at (ct/now)}
|
||||
{:id id}
|
||||
{::db/return-keys true}))
|
||||
|
||||
@@ -783,8 +853,8 @@
|
||||
[:id ::sm/uuid]
|
||||
[:project-id ::sm/uuid]
|
||||
[:name [:string {:max 250}]]
|
||||
[:created-at ::dt/instant]
|
||||
[:modified-at ::dt/instant]]
|
||||
[:created-at ::ct/inst]
|
||||
[:modified-at ::ct/inst]]
|
||||
|
||||
::sm/params
|
||||
[:map {:title "RenameFileParams"}
|
||||
@@ -795,8 +865,8 @@
|
||||
[:map {:title "SimplifiedFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:name [:string {:max 250}]]
|
||||
[:created-at ::dt/instant]
|
||||
[:modified-at ::dt/instant]]
|
||||
[:created-at ::ct/inst]
|
||||
[:modified-at ::ct/inst]]
|
||||
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id id] :as params}]
|
||||
@@ -810,7 +880,7 @@
|
||||
|
||||
;; --- MUTATION COMMAND: set-file-shared
|
||||
|
||||
(def sql:get-referenced-files
|
||||
(def ^:private sql:get-referenced-files
|
||||
"SELECT f.id
|
||||
FROM file_library_rel AS flr
|
||||
INNER JOIN file AS f ON (f.id = flr.file_id)
|
||||
@@ -821,56 +891,51 @@
|
||||
(defn- absorb-library-by-file!
|
||||
[cfg ldata file-id]
|
||||
|
||||
(dm/assert!
|
||||
"expected cfg with valid connection"
|
||||
(db/connection-map? cfg))
|
||||
(assert (db/connection-map? cfg)
|
||||
"expected cfg with valid connection")
|
||||
|
||||
(binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg file-id)
|
||||
pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (-> (get-file cfg file-id
|
||||
:include-deleted? true
|
||||
:lock-for-update? true)
|
||||
(let [file (-> (bfc/get-file cfg file-id
|
||||
:include-deleted? true
|
||||
:lock-for-update? true)
|
||||
(update :data ctf/absorb-assets ldata))]
|
||||
|
||||
(l/trc :hint "library absorbed"
|
||||
:library-id (str (:id ldata))
|
||||
:file-id (str file-id))
|
||||
|
||||
(db/update! cfg :file
|
||||
{:revn (inc (:revn file))
|
||||
:data (blob/encode (:data file))
|
||||
:modified-at (dt/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id})
|
||||
|
||||
(feat.fdata/persist-pointers! cfg file-id))))
|
||||
(bfc/update-file! cfg {:id file-id
|
||||
:migrations (:migrations file)
|
||||
:revn (inc (:revn file))
|
||||
:data (:data file)
|
||||
:modified-at (ct/now)
|
||||
:has-media-trimmed false}))))
|
||||
|
||||
(defn- absorb-library
|
||||
"Find all files using a shared library, and absorb all library assets
|
||||
into the file local libraries"
|
||||
[cfg {:keys [id] :as library}]
|
||||
[cfg {:keys [id data] :as library}]
|
||||
|
||||
(dm/assert!
|
||||
"expected cfg with valid connection"
|
||||
(db/connection-map? cfg))
|
||||
(assert (db/connection-map? cfg)
|
||||
"expected cfg with valid connection")
|
||||
|
||||
(let [ldata (binding [pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(-> library :data (feat.fdata/process-pointers deref)))
|
||||
ids (->> (db/exec! cfg [sql:get-referenced-files id])
|
||||
(map :id))]
|
||||
(let [ids (->> (db/exec! cfg [sql:get-referenced-files id])
|
||||
(sequence bfc/xf-map-id))]
|
||||
|
||||
(l/trc :hint "absorbing library"
|
||||
:library-id (str id)
|
||||
:files (str/join "," (map str ids)))
|
||||
|
||||
(run! (partial absorb-library-by-file! cfg ldata) ids)
|
||||
(run! (partial absorb-library-by-file! cfg data) ids)
|
||||
library))
|
||||
|
||||
(defn absorb-library!
|
||||
[{:keys [::db/conn] :as cfg} id]
|
||||
(let [file (-> (get-file cfg id
|
||||
:lock-for-update? true
|
||||
:include-deleted? true)
|
||||
(let [file (-> (bfc/get-file cfg id
|
||||
:realize? true
|
||||
:lock-for-update? true
|
||||
:include-deleted? true)
|
||||
(check-version!))
|
||||
|
||||
proj (db/get* conn :project {:id (:project-id file)}
|
||||
@@ -900,7 +965,7 @@
|
||||
(db/delete! conn :file-library-rel {:library-file-id id})
|
||||
(db/update! conn :file
|
||||
{:is-shared false
|
||||
:modified-at (dt/now)}
|
||||
:modified-at (ct/now)}
|
||||
{:id id})
|
||||
(select-keys file [:id :name :is-shared]))
|
||||
|
||||
@@ -909,7 +974,7 @@
|
||||
(let [file (assoc file :is-shared true)]
|
||||
(db/update! conn :file
|
||||
{:is-shared true
|
||||
:modified-at (dt/now)}
|
||||
:modified-at (ct/now)}
|
||||
{:id id})
|
||||
file)
|
||||
|
||||
@@ -945,7 +1010,7 @@
|
||||
[conn team file-id]
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
file (db/update! conn :file
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:deleted-at (ct/in-future delay)}
|
||||
{:id file-id}
|
||||
{::db/return-keys [:id :name :is-shared :deleted-at
|
||||
:project-id :created-at :modified-at]})]
|
||||
@@ -967,7 +1032,14 @@
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id id)
|
||||
file (mark-file-deleted conn team id)]
|
||||
file (mark-file-deleted conn team id)
|
||||
msgbus (::mbus/msgbus cfg)]
|
||||
|
||||
(mbus/pub! msgbus
|
||||
:topic id
|
||||
:message {:type :file-deleted
|
||||
:file-id id
|
||||
:profile-id profile-id})
|
||||
|
||||
(rph/with-meta (rph/wrap)
|
||||
{::audit/props {:project-id (:project-id file)
|
||||
@@ -1000,6 +1072,7 @@
|
||||
[:library-id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::link-file-to-library
|
||||
"Link a file to a library. Returns the recursive list of libraries used by that library"
|
||||
{::doc/added "1.17"
|
||||
::webhooks/event? true
|
||||
::sm/params schema:link-file-to-library}
|
||||
@@ -1013,7 +1086,8 @@
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(check-edition-permissions! conn profile-id library-id)
|
||||
(link-file-to-library conn params))))
|
||||
(link-file-to-library conn params)
|
||||
(bfc/get-libraries cfg [library-id]))))
|
||||
|
||||
;; --- MUTATION COMMAND: unlink-file-from-library
|
||||
|
||||
@@ -1043,7 +1117,7 @@
|
||||
(defn update-sync
|
||||
[conn {:keys [file-id library-id] :as params}]
|
||||
(db/update! conn :file-library-rel
|
||||
{:synced-at (dt/now)}
|
||||
{:synced-at (ct/now)}
|
||||
{:file-id file-id
|
||||
:library-file-id library-id}
|
||||
{::db/return-keys true}))
|
||||
@@ -1068,14 +1142,14 @@
|
||||
[conn {:keys [file-id date] :as params}]
|
||||
(db/update! conn :file
|
||||
{:ignore-sync-until date
|
||||
:modified-at (dt/now)}
|
||||
:modified-at (ct/now)}
|
||||
{:id file-id}
|
||||
{::db/return-keys true}))
|
||||
|
||||
(def ^:private schema:ignore-file-library-sync-status
|
||||
[:map {:title "ignore-file-library-sync-status"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:date ::dt/instant]])
|
||||
[:date ::ct/inst]])
|
||||
|
||||
;; TODO: improve naming
|
||||
(sv/defmethod ::ignore-file-library-sync-status
|
||||
@@ -1087,3 +1161,138 @@
|
||||
(check-edition-permissions! conn profile-id file-id)
|
||||
(-> (ignore-sync conn params)
|
||||
(update :features db/decode-pgarray #{})))
|
||||
|
||||
;; --- MUTATION COMMAND: delete-files-immediatelly
|
||||
|
||||
(def ^:private sql:delete-team-files
|
||||
"UPDATE file AS uf SET deleted_at = ?::timestamptz
|
||||
FROM (
|
||||
SELECT f.id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
JOIN team AS t ON (t.id = p.team_id)
|
||||
WHERE t.deleted_at IS NULL
|
||||
AND t.id = ?
|
||||
AND f.id = ANY(?::uuid[])
|
||||
) AS subquery
|
||||
WHERE uf.id = subquery.id
|
||||
RETURNING uf.id, uf.deleted_at;")
|
||||
|
||||
(def ^:private schema:permanently-delete-team-files
|
||||
[:map {:title "permanently-delete-team-files"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:ids [::sm/set ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::permanently-delete-team-files
|
||||
"Mark the specified files to be deleted immediatelly on the
|
||||
specified team. The team-id on params will be used to filter and
|
||||
check writable permissons on team."
|
||||
|
||||
{::doc/added "2.12"
|
||||
::sm/params schema:permanently-delete-team-files
|
||||
::db/transaction true}
|
||||
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id ::rpc/request-at team-id ids]}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
|
||||
(reduce (fn [acc {:keys [id deleted-at]}]
|
||||
(wrk/submit! {::db/conn conn
|
||||
::wrk/task :delete-object
|
||||
::wrk/params {:object :file
|
||||
:deleted-at deleted-at
|
||||
:id id}})
|
||||
(conj acc id))
|
||||
#{}
|
||||
(db/plan conn [sql:delete-team-files request-at team-id
|
||||
(db/create-array conn "uuid" ids)])))
|
||||
|
||||
;; --- MUTATION COMMAND: restore-files-immediatelly
|
||||
|
||||
(def ^:private sql:resolve-editable-files
|
||||
"SELECT f.id, f.project_id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
JOIN team AS t ON (t.id = p.team_id)
|
||||
WHERE t.deleted_at IS NULL
|
||||
AND t.id = ?
|
||||
AND f.id = ANY(?::uuid[])")
|
||||
|
||||
(defn- restore-file
|
||||
[conn file-id]
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:has-media-trimmed false}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-media-object
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-data
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-thumbnail
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at nil}
|
||||
{:file-id file-id}
|
||||
{::db/return-keys false}))
|
||||
|
||||
(def ^:private sql:restore-projects
|
||||
"UPDATE project SET deleted_at = null WHERE id = ANY(?::uuid[])")
|
||||
|
||||
(defn- restore-projects
|
||||
[conn project-ids]
|
||||
(let [project-ids (db/create-array conn "uuid" project-ids)]
|
||||
(->> (db/exec-one! conn [sql:restore-projects project-ids])
|
||||
(db/get-update-count))))
|
||||
|
||||
(defn- restore-deleted-team-files
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id team-id ids]}]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(let [total-files
|
||||
(count ids)
|
||||
|
||||
{:keys [files projects]}
|
||||
(reduce (fn [result {:keys [id project-id]}]
|
||||
(let [index (-> result :files count)]
|
||||
(events/tap :progress {:file-id id :index index :total total-files})
|
||||
(restore-file conn id)
|
||||
|
||||
(-> result
|
||||
(update :files conj id)
|
||||
(update :projects conj project-id))))
|
||||
|
||||
{:files #{} :projectes #{}}
|
||||
(db/plan conn [sql:resolve-editable-files team-id
|
||||
(db/create-array conn "uuid" ids)]))]
|
||||
|
||||
(restore-projects conn projects)
|
||||
|
||||
files))
|
||||
|
||||
(def ^:private schema:restore-deleted-team-files
|
||||
[:map {:title "restore-deleted-team-files"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:ids [::sm/set ::sm/uuid]]])
|
||||
|
||||
(sv/defmethod ::restore-deleted-team-files
|
||||
"Removes the deletion mark from the specified files (and respective
|
||||
projects) on the specified team."
|
||||
{::doc/added "2.12"
|
||||
::sse/stream? true
|
||||
::sm/params schema:restore-deleted-team-files}
|
||||
[cfg params]
|
||||
(sse/response #(db/tx-run! cfg restore-deleted-team-files params)))
|
||||
|
||||
@@ -8,7 +8,9 @@
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.file :as ctf]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -22,13 +24,13 @@
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]))
|
||||
|
||||
(defn create-file-role!
|
||||
[conn {:keys [file-id profile-id role]}]
|
||||
(let [params {:file-id file-id
|
||||
:profile-id profile-id}]
|
||||
|
||||
(->> (perms/assign-role-flags params role)
|
||||
(db/insert! conn :file-profile-rel))))
|
||||
|
||||
@@ -44,28 +46,30 @@
|
||||
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
cfeat/*current* features]
|
||||
|
||||
(let [file (ctf/make-file {:id id
|
||||
:project-id project-id
|
||||
:name name
|
||||
:revn revn
|
||||
:is-shared is-shared
|
||||
:features features
|
||||
:migrations fmg/available-migrations
|
||||
:ignore-sync-until ignore-sync-until
|
||||
:modified-at modified-at
|
||||
:created-at modified-at
|
||||
:deleted-at deleted-at}
|
||||
{:create-page create-page
|
||||
:page-id page-id})
|
||||
file (-> (bfc/insert-file! cfg file)
|
||||
(bfc/decode-row))]
|
||||
:page-id page-id})]
|
||||
|
||||
(bfc/insert-file! cfg file)
|
||||
|
||||
(->> (assoc params :file-id (:id file) :role :owner)
|
||||
(create-file-role! conn))
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:modified-at (ct/now)}
|
||||
{:id project-id})
|
||||
|
||||
file)))
|
||||
(bfc/get-file cfg (:id file)))))
|
||||
|
||||
(def ^:private schema:create-file
|
||||
[:map {:title "create-file"}
|
||||
@@ -111,14 +115,15 @@
|
||||
;; FIXME: IMPORTANT: this code can have race conditions, because
|
||||
;; we have no locks for updating team so, creating two files
|
||||
;; concurrently can lead to lost team features updating
|
||||
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (->> features
|
||||
(set/union (:features team))
|
||||
(db/create-array conn "text"))]
|
||||
(let [features (-> features
|
||||
(set/union (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(into-array))]
|
||||
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)}
|
||||
|
||||
@@ -8,52 +8,20 @@
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.files.migrations :as fmg]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.common.time :as ct]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :refer [reset-migrations!]]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.main :as-alias main]
|
||||
[app.msgbus :as mbus]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [migrations] :as row}]
|
||||
(when row
|
||||
(cond-> row
|
||||
(some? migrations)
|
||||
(assoc :migrations (db/decode-pgarray migrations)))))
|
||||
|
||||
(def sql:get-file-snapshots
|
||||
"WITH changes AS (
|
||||
SELECT id, label, revn, created_at, created_by, profile_id
|
||||
FROM file_change
|
||||
WHERE file_id = ?
|
||||
AND data IS NOT NULL
|
||||
AND (deleted_at IS NULL OR deleted_at > now())
|
||||
), versions AS (
|
||||
(SELECT * FROM changes WHERE created_by = 'system' LIMIT 1000)
|
||||
UNION ALL
|
||||
(SELECT * FROM changes WHERE created_by != 'system' LIMIT 1000)
|
||||
)
|
||||
SELECT * FROM versions
|
||||
ORDER BY created_at DESC;")
|
||||
|
||||
(defn get-file-snapshots
|
||||
[conn file-id]
|
||||
(db/exec! conn [sql:get-file-snapshots file-id]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(def ^:private schema:get-file-snapshots
|
||||
[:map {:title "get-file-snapshots"}
|
||||
@@ -65,73 +33,7 @@
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn]}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
(get-file-snapshots conn file-id))))
|
||||
|
||||
(defn- generate-snapshot-label
|
||||
[]
|
||||
(let [ts (-> (dt/now)
|
||||
(dt/format-instant)
|
||||
(str/replace #"[T:\.]" "-")
|
||||
(str/rtrim "Z"))]
|
||||
(str "snapshot-" ts)))
|
||||
|
||||
(defn create-file-snapshot!
|
||||
[cfg file & {:keys [label created-by deleted-at profile-id]
|
||||
:or {deleted-at :default
|
||||
created-by :system}}]
|
||||
|
||||
(assert (#{:system :user :admin} created-by)
|
||||
"expected valid keyword for created-by")
|
||||
|
||||
(let [created-by
|
||||
(name created-by)
|
||||
|
||||
deleted-at
|
||||
(cond
|
||||
(= deleted-at :default)
|
||||
(dt/plus (dt/now) (cf/get-deletion-delay))
|
||||
|
||||
(dt/instant? deleted-at)
|
||||
deleted-at
|
||||
|
||||
:else
|
||||
nil)
|
||||
|
||||
label
|
||||
(or label (generate-snapshot-label))
|
||||
|
||||
snapshot-id
|
||||
(uuid/next)
|
||||
|
||||
data
|
||||
(blob/encode (:data file))
|
||||
|
||||
features
|
||||
(into-array (:features file))
|
||||
|
||||
migrations
|
||||
(into-array (:migrations file))]
|
||||
|
||||
(l/dbg :hint "creating file snapshot"
|
||||
:file-id (str (:id file))
|
||||
:id (str snapshot-id)
|
||||
:label label)
|
||||
|
||||
(db/insert! cfg :file-change
|
||||
{:id snapshot-id
|
||||
:revn (:revn file)
|
||||
:data data
|
||||
:version (:version file)
|
||||
:features features
|
||||
:migrations migrations
|
||||
:profile-id profile-id
|
||||
:file-id (:id file)
|
||||
:label label
|
||||
:deleted-at deleted-at
|
||||
:created-by created-by}
|
||||
{::db/return-keys false})
|
||||
|
||||
{:id snapshot-id :label label}))
|
||||
(fsnap/get-visible-snapshots conn file-id))))
|
||||
|
||||
(def ^:private schema:create-file-snapshot
|
||||
[:map
|
||||
@@ -144,7 +46,7 @@
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id file-id label]}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
(let [file (bfc/get-file cfg file-id :realize? true)
|
||||
project (db/get-by-id cfg :project (:project-id file))]
|
||||
|
||||
(-> cfg
|
||||
@@ -155,96 +57,10 @@
|
||||
(quotes/check! {::quotes/id ::quotes/snapshots-per-file}
|
||||
{::quotes/id ::quotes/snapshots-per-team}))
|
||||
|
||||
(create-file-snapshot! cfg file
|
||||
{:label label
|
||||
:profile-id profile-id
|
||||
:created-by :user})))
|
||||
|
||||
(defn restore-file-snapshot!
|
||||
[{:keys [::db/conn ::mbus/msgbus] :as cfg} file-id snapshot-id]
|
||||
(let [storage (sto/resolve cfg {::db/reuse-conn true})
|
||||
file (files/get-minimal-file conn file-id {::db/for-update true})
|
||||
vern (rand-int Integer/MAX_VALUE)
|
||||
snapshot (some->> (db/get* conn :file-change
|
||||
{:file-id file-id
|
||||
:id snapshot-id}
|
||||
{::db/for-share true})
|
||||
(feat.fdata/resolve-file-data cfg)
|
||||
(decode-row))
|
||||
|
||||
;; If snapshot has tracked applied migrations, we reuse them,
|
||||
;; if not we take a safest set of migrations as starting
|
||||
;; point. This is because, at the time of implementing
|
||||
;; snapshots, migrations were not taken into account so we
|
||||
;; need to make this backward compatible in some way.
|
||||
file (assoc file :migrations
|
||||
(or (:migrations snapshot)
|
||||
(fmg/generate-migrations-from-version 67)))]
|
||||
|
||||
(when-not snapshot
|
||||
(ex/raise :type :not-found
|
||||
:code :snapshot-not-found
|
||||
:hint "unable to find snapshot with the provided label"
|
||||
:snapshot-id snapshot-id
|
||||
:file-id file-id))
|
||||
|
||||
(when-not (:data snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-without-data
|
||||
:hint "snapshot has no data"
|
||||
:label (:label snapshot)
|
||||
:file-id file-id))
|
||||
|
||||
(l/dbg :hint "restoring snapshot"
|
||||
:file-id (str file-id)
|
||||
:label (:label snapshot)
|
||||
:snapshot-id (str (:id snapshot)))
|
||||
|
||||
;; If the file was already offloaded, on restoring the snapshot we
|
||||
;; are going to replace the file data, so we need to touch the old
|
||||
;; referenced storage object and avoid possible leaks
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(sto/touch-object! storage (:data-ref-id file)))
|
||||
|
||||
;; In the same way, on reseting the file data, we need to restore
|
||||
;; the applied migrations on the moment of taking the snapshot
|
||||
(reset-migrations! conn file)
|
||||
|
||||
(db/update! conn :file
|
||||
{:data (:data snapshot)
|
||||
:revn (inc (:revn file))
|
||||
:vern vern
|
||||
:version (:version snapshot)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:has-media-trimmed false
|
||||
:features (:features snapshot)}
|
||||
{:id file-id})
|
||||
|
||||
;; clean object thumbnails
|
||||
(let [sql (str "update file_tagged_object_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; clean file thumbnails
|
||||
(let [sql (str "update file_thumbnail "
|
||||
" set deleted_at = now() "
|
||||
" where file_id=? returning media_id")
|
||||
res (db/exec! conn [sql file-id])]
|
||||
(doseq [media-id (into #{} (keep :media-id) res)]
|
||||
(sto/touch-object! storage media-id)))
|
||||
|
||||
;; Send to the clients a notification to reload the file
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-restore
|
||||
:file-id (:id file)
|
||||
:vern vern})
|
||||
{:id (:id snapshot)
|
||||
:label (:label snapshot)}))
|
||||
(fsnap/create! cfg file
|
||||
{:label label
|
||||
:profile-id profile-id
|
||||
:created-by "user"})))
|
||||
|
||||
(def ^:private schema:restore-file-snapshot
|
||||
[:map {:title "restore-file-snapshot"}
|
||||
@@ -253,75 +69,152 @@
|
||||
|
||||
(sv/defmethod ::restore-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:restore-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id file-id id] :as params}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)]
|
||||
(create-file-snapshot! cfg file
|
||||
{:profile-id profile-id
|
||||
:created-by :system})
|
||||
(restore-file-snapshot! cfg file-id id)))))
|
||||
::sm/params schema:restore-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn ::mbus/msgbus] :as cfg} {:keys [::rpc/profile-id file-id id] :as params}]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(let [file (bfc/get-file cfg file-id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id file-id)
|
||||
delay (ldel/get-deletion-delay team)]
|
||||
|
||||
(fsnap/create! cfg file
|
||||
{:profile-id profile-id
|
||||
:deleted-at (ct/in-future delay)
|
||||
:created-by "system"})
|
||||
|
||||
(let [vern (fsnap/restore! cfg file-id id)]
|
||||
;; Send to the clients a notification to reload the file
|
||||
(mbus/pub! msgbus
|
||||
:topic (:id file)
|
||||
:message {:type :file-restore
|
||||
:file-id (:id file)
|
||||
:vern vern})
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:update-file-snapshot
|
||||
[:map {:title "update-file-snapshot"}
|
||||
[:id ::sm/uuid]
|
||||
[:label ::sm/text]])
|
||||
|
||||
(defn- update-file-snapshot!
|
||||
[conn snapshot-id label]
|
||||
(-> (db/update! conn :file-change
|
||||
{:label label
|
||||
:created-by "user"
|
||||
:deleted-at nil}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys true})
|
||||
(dissoc :data :features :migrations)))
|
||||
|
||||
(defn- get-snapshot
|
||||
"Get a minimal snapshot from database and lock for update"
|
||||
[conn id]
|
||||
(db/get conn :file-change
|
||||
{:id id}
|
||||
{::sql/columns [:id :file-id :created-by :deleted-at]
|
||||
::db/for-update true}))
|
||||
|
||||
(sv/defmethod ::update-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:update-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id label]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
(update-file-snapshot! conn id label)))))
|
||||
::sm/params schema:update-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id label]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
(fsnap/update! conn (assoc snapshot :label label))))
|
||||
|
||||
(def ^:private schema:remove-file-snapshot
|
||||
[:map {:title "remove-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(defn- delete-file-snapshot!
|
||||
[conn snapshot-id]
|
||||
(db/update! conn :file-change
|
||||
{:deleted-at (dt/now)}
|
||||
{:id snapshot-id}
|
||||
{::db/return-keys false})
|
||||
nil)
|
||||
|
||||
(sv/defmethod ::delete-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:remove-file-snapshot}
|
||||
[cfg {:keys [::rpc/profile-id id]}]
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn]}]
|
||||
(let [snapshot (get-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
::sm/params schema:remove-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-deleted
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-deleted
|
||||
:file-id (:file-id snapshot)
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(delete-file-snapshot! conn id)))))
|
||||
(when (and (some? (:locked-by snapshot))
|
||||
(not= (:locked-by snapshot) profile-id))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-is-locked
|
||||
:file-id (:file-id snapshot)
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(let [team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:file-id (:file-id snapshot))
|
||||
delay (ldel/get-deletion-delay team)]
|
||||
(fsnap/delete! conn (assoc snapshot :deleted-at (ct/in-future delay))))))
|
||||
|
||||
;;; Lock/unlock version endpoints
|
||||
|
||||
(def ^:private schema:lock-file-snapshot
|
||||
[:map {:title "lock-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::lock-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:lock-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-locked
|
||||
:hint "Only user-created versions can be locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
;; Only the creator can lock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-lock
|
||||
:hint "Only the version creator can lock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
|
||||
;; Check if already locked
|
||||
(when (:locked-by snapshot)
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-already-locked
|
||||
:hint "Version is already locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:locked-by (:locked-by snapshot)))
|
||||
|
||||
(fsnap/lock-by! conn id profile-id)))
|
||||
|
||||
(def ^:private schema:unlock-file-snapshot
|
||||
[:map {:title "unlock-file-snapshot"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::unlock-file-snapshot
|
||||
{::doc/added "1.20"
|
||||
::sm/params schema:unlock-file-snapshot
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn]} {:keys [::rpc/profile-id id]}]
|
||||
(let [snapshot (fsnap/get-minimal-snapshot conn id)]
|
||||
(files/check-edition-permissions! conn profile-id (:file-id snapshot))
|
||||
|
||||
(when (not= (:created-by snapshot) "user")
|
||||
(ex/raise :type :validation
|
||||
:code :system-snapshots-cant-be-unlocked
|
||||
:hint "Only user-created versions can be unlocked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
;; Only the creator can unlock their own version
|
||||
(when (not= (:profile-id snapshot) profile-id)
|
||||
(ex/raise :type :validation
|
||||
:code :only-creator-can-unlock
|
||||
:hint "Only the version creator can unlock it"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id
|
||||
:creator-id (:profile-id snapshot)))
|
||||
|
||||
;; Check if not locked
|
||||
(when (not (:locked-by snapshot))
|
||||
(ex/raise :type :validation
|
||||
:code :snapshot-not-locked
|
||||
:hint "Version is not locked"
|
||||
:snapshot-id id
|
||||
:profile-id profile-id))
|
||||
|
||||
(fsnap/unlock! conn id)))
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) KALEIDOS INC
|
||||
|
||||
(ns app.rpc.commands.files-temp
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.files.changes :as cpc]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.files-create :as files.create]
|
||||
[app.rpc.commands.files-update :as-alias files.update]
|
||||
[app.rpc.commands.projects :as projects]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.set :as set]))
|
||||
|
||||
;; --- MUTATION COMMAND: create-temp-file
|
||||
|
||||
(def ^:private schema:create-temp-file
|
||||
[:map {:title "create-temp-file"}
|
||||
[:name [:string {:max 250}]]
|
||||
[:project-id ::sm/uuid]
|
||||
[:id {:optional true} ::sm/uuid]
|
||||
[:is-shared ::sm/boolean]
|
||||
[:features ::cfeat/features]
|
||||
[:create-page ::sm/boolean]])
|
||||
|
||||
(sv/defmethod ::create-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:create-temp-file
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id project-id] :as params}]
|
||||
(projects/check-edition-permissions! conn profile-id project-id)
|
||||
(let [team (teams/get-team conn :profile-id profile-id :project-id project-id)
|
||||
;; When we create files, we only need to respect the team
|
||||
;; features, because some features can be enabled
|
||||
;; globally, but the team is still not migrated properly.
|
||||
input-features
|
||||
(:features params #{})
|
||||
|
||||
;; If the imported project doesn't contain v2 we need to remove it
|
||||
team-features
|
||||
(cond-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(not (contains? input-features "components/v2"))
|
||||
(disj "components/v2"))
|
||||
|
||||
;; We also include all no migration features declared by
|
||||
;; client; that enables the ability to enable a runtime
|
||||
;; feature on frontend and make it permanent on file
|
||||
features
|
||||
(-> input-features
|
||||
(set/intersection cfeat/no-migration-features)
|
||||
(set/union team-features))
|
||||
|
||||
params
|
||||
(-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :deleted-at (dt/in-future {:days 1}))
|
||||
(assoc :features features))]
|
||||
|
||||
(files.create/create-file cfg params)))
|
||||
|
||||
;; --- MUTATION COMMAND: update-temp-file
|
||||
|
||||
|
||||
(def ^:private schema:update-temp-file
|
||||
[:map {:title "update-temp-file"}
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:revn [::sm/int {:min 0}]]
|
||||
[:session-id ::sm/uuid]
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:update-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id session-id id revn changes] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
(rph/with-meta (rph/wrap nil)
|
||||
{::audit/replace-props {:file-id id
|
||||
:revn revn}}))))
|
||||
|
||||
;; --- MUTATION COMMAND: persist-temp-file
|
||||
|
||||
(defn persist-temp-file
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id] :as params}]
|
||||
(let [file (files/get-file cfg id
|
||||
:migrate? false
|
||||
:lock-for-update? true)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
(let [changes (->> (db/cursor conn
|
||||
(sql/select :file-change {:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
{:chunk-size 10})
|
||||
(sequence (mapcat (comp blob/decode :changes))))
|
||||
|
||||
file (update file :data cpc/process-changes changes)
|
||||
|
||||
file (if (contains? (:features file) "fdata/objects-map")
|
||||
(fdata/enable-objects-map file)
|
||||
file)
|
||||
|
||||
file (if (contains? (:features file) "fdata/pointer-map")
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)]
|
||||
(let [file (fdata/enable-pointer-map file)]
|
||||
(fdata/persist-pointers! cfg id)
|
||||
file))
|
||||
file)]
|
||||
|
||||
;; Delete changes from the changes history
|
||||
(db/delete! conn :file-change {:file-id id})
|
||||
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn 1
|
||||
:data (blob/encode (:data file))}
|
||||
{:id id})
|
||||
nil)))
|
||||
|
||||
(def ^:private schema:persist-temp-file
|
||||
[:map {:title "persist-temp-file"}
|
||||
[:id ::sm/uuid]])
|
||||
|
||||
(sv/defmethod ::persist-temp-file
|
||||
{::doc/added "1.17"
|
||||
::doc/module :files
|
||||
::sm/params schema:persist-temp-file}
|
||||
[cfg {:keys [::rpc/profile-id id] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(persist-temp-file cfg params))))
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
(ns app.rpc.commands.files-thumbnails
|
||||
(:require
|
||||
[app.binfile.common :as bfc]
|
||||
[app.common.data :as d]
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.features :as cfeat]
|
||||
@@ -13,6 +14,7 @@
|
||||
[app.common.geom.shapes :as gsh]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.thumbnails :as thc]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.shape-tree :as ctt]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -30,13 +32,12 @@
|
||||
[app.storage :as sto]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
;; --- FEATURES
|
||||
|
||||
(def long-cache-duration
|
||||
(dt/duration {:days 7}))
|
||||
(ct/duration {:days 7}))
|
||||
|
||||
;; --- COMMAND QUERY: get-file-object-thumbnails
|
||||
|
||||
@@ -95,7 +96,7 @@
|
||||
;; loading all pages into memory for find the frame set for thumbnail.
|
||||
|
||||
(defn get-file-data-for-thumbnail
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file}]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [data id] :as file} strip-frames-with-thumbnails]
|
||||
(letfn [;; function responsible on finding the frame marked to be
|
||||
;; used as thumbnail; the returned frame always have
|
||||
;; the :page-id set to the page that it belongs.
|
||||
@@ -172,7 +173,7 @@
|
||||
|
||||
;; Assoc the available thumbnails and prune not visible shapes
|
||||
;; for avoid transfer unnecessary data.
|
||||
:always
|
||||
strip-frames-with-thumbnails
|
||||
(update :objects assoc-thumbnails page-id thumbs)))))
|
||||
|
||||
(def ^:private
|
||||
@@ -185,7 +186,8 @@
|
||||
[:map {:title "PartialFile"}
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} ::sm/int]
|
||||
[:page [:map-of :keyword ::sm/any]]])
|
||||
[:page [:map-of :keyword ::sm/any]]
|
||||
[:strip-frames-with-thumbnails {:optional true} ::sm/boolean]])
|
||||
|
||||
(sv/defmethod ::get-file-data-for-thumbnail
|
||||
"Retrieves the data for generate the thumbnail of the file. Used
|
||||
@@ -194,7 +196,7 @@
|
||||
::doc/module :files
|
||||
::sm/params schema:get-file-data-for-thumbnail
|
||||
::sm/result schema:partial-file}
|
||||
[cfg {:keys [::rpc/profile-id file-id] :as params}]
|
||||
[cfg {:keys [::rpc/profile-id file-id strip-frames-with-thumbnails] :as params}]
|
||||
(db/run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-read-permissions! conn profile-id file-id)
|
||||
|
||||
@@ -202,16 +204,20 @@
|
||||
:profile-id profile-id
|
||||
:file-id file-id)
|
||||
|
||||
file (files/get-file cfg file-id
|
||||
:preload-pointers? true
|
||||
:read-only? true)]
|
||||
file (bfc/get-file cfg file-id
|
||||
:realize? true
|
||||
:read-only? true)
|
||||
|
||||
strip-frames-with-thumbnails
|
||||
(or (nil? strip-frames-with-thumbnails) ;; if not present, default to true
|
||||
(true? strip-frames-with-thumbnails))]
|
||||
|
||||
(-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
{:file-id file-id
|
||||
:revn (:revn file)
|
||||
:page (get-file-data-for-thumbnail cfg file)}))))
|
||||
:page (get-file-data-for-thumbnail cfg file strip-frames-with-thumbnails)}))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; MUTATION COMMANDS
|
||||
@@ -247,7 +253,7 @@
|
||||
(defn- create-file-object-thumbnail!
|
||||
[{:keys [::sto/storage] :as cfg} file object-id media tag]
|
||||
(let [file-id (:id file)
|
||||
timestamp (dt/now)
|
||||
timestamp (ct/now)
|
||||
media (persist-thumbnail! storage media timestamp)
|
||||
[th1 th2] (db/tx-run! cfg (fn [{:keys [::db/conn]}]
|
||||
(let [th1 (db/exec-one! conn [sql:get-file-object-thumbnail file-id object-id tag])
|
||||
@@ -271,7 +277,7 @@
|
||||
[:map {:title "create-file-object-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:object-id [:string {:max 250}]]
|
||||
[:media ::media/upload]
|
||||
[:media media/schema:upload]
|
||||
[:tag {:optional true} [:string {:max 50}]]])
|
||||
|
||||
(sv/defmethod ::create-file-object-thumbnail
|
||||
@@ -302,7 +308,7 @@
|
||||
{::sql/for-update true})]
|
||||
(sto/touch-object! storage media-id)
|
||||
(db/update! conn :file-tagged-object-thumbnail
|
||||
{:deleted-at (dt/now)}
|
||||
{:deleted-at (ct/now)}
|
||||
{:file-id file-id
|
||||
:object-id object-id
|
||||
:tag tag})))
|
||||
@@ -338,7 +344,8 @@
|
||||
hash (sto/calculate-hash path)
|
||||
data (-> (sto/content path)
|
||||
(sto/wrap-with-hash hash))
|
||||
tnow (dt/now)
|
||||
tnow (ct/now)
|
||||
|
||||
media (sto/put-object! storage
|
||||
{::sto/content data
|
||||
::sto/deduplicate? true
|
||||
@@ -381,7 +388,7 @@
|
||||
[:map {:title "create-file-thumbnail"}
|
||||
[:file-id ::sm/uuid]
|
||||
[:revn ::sm/int]
|
||||
[:media ::media/upload]])
|
||||
[:media media/schema:upload]])
|
||||
|
||||
(sv/defmethod ::create-file-thumbnail
|
||||
"Creates or updates the file thumbnail. Mainly used for paint the
|
||||
|
||||
@@ -15,31 +15,29 @@
|
||||
[app.common.files.validate :as val]
|
||||
[app.common.logging :as l]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.features.fdata :as feat.fdata]
|
||||
[app.features.file-migrations :as feat.fmigr]
|
||||
[app.features.fdata :as fdata]
|
||||
[app.features.file-snapshots :as fsnap]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
[app.http.errors :as errors]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.loggers.webhooks :as webhooks]
|
||||
[app.metrics :as mtx]
|
||||
[app.msgbus :as mbus]
|
||||
[app.redis :as rds]
|
||||
[app.rpc :as-alias rpc]
|
||||
[app.rpc.climit :as climit]
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.commands.teams :as teams]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.storage :as sto]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.pointer-map :as pmap]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.set :as set]
|
||||
[promesa.exec :as px]))
|
||||
[clojure.set :as set]))
|
||||
|
||||
(declare ^:private get-lagged-changes)
|
||||
(declare ^:private send-notifications!)
|
||||
@@ -47,6 +45,7 @@
|
||||
(declare ^:private update-file*)
|
||||
(declare ^:private process-changes-and-validate)
|
||||
(declare ^:private take-snapshot?)
|
||||
(declare ^:private invalidate-caches!)
|
||||
|
||||
;; PUBLIC API; intended to be used outside of this module
|
||||
(declare update-file!)
|
||||
@@ -64,10 +63,10 @@
|
||||
[:revn {:min 0} ::sm/int]
|
||||
[:vern {:min 0} ::sm/int]
|
||||
[:features {:optional true} ::cfeat/features]
|
||||
[:changes {:optional true} [:vector ::cpc/change]]
|
||||
[:changes {:optional true} [:vector cpc/schema:change]]
|
||||
[:changes-with-metadata {:optional true}
|
||||
[:vector [:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:changes [:vector cpc/schema:change]]
|
||||
[:hint-origin {:optional true} :keyword]
|
||||
[:hint-events {:optional true} [:vector [:string {:max 250}]]]]]]
|
||||
[:skip-validate {:optional true} ::sm/boolean]])
|
||||
@@ -76,7 +75,7 @@
|
||||
schema:update-file-result
|
||||
[:vector {:title "update-file-result"}
|
||||
[:map
|
||||
[:changes [:vector ::cpc/change]]
|
||||
[:changes [:vector cpc/schema:change]]
|
||||
[:file-id ::sm/uuid]
|
||||
[:id ::sm/uuid]
|
||||
[:revn {:min 0} ::sm/int]
|
||||
@@ -123,83 +122,84 @@
|
||||
[:update-file/global]]
|
||||
|
||||
::webhooks/event? true
|
||||
::webhooks/batch-timeout (dt/duration "2m")
|
||||
::webhooks/batch-timeout (ct/duration "2m")
|
||||
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
|
||||
|
||||
::sm/params schema:update-file
|
||||
::sm/result schema:update-file-result
|
||||
::doc/module :files
|
||||
::doc/added "1.17"}
|
||||
[{:keys [::mtx/metrics] :as cfg}
|
||||
::doc/added "1.17"
|
||||
::db/transaction true}
|
||||
[{:keys [::mtx/metrics ::db/conn] :as cfg}
|
||||
{:keys [::rpc/profile-id id changes changes-with-metadata] :as params}]
|
||||
(db/tx-run! cfg (fn [{:keys [::db/conn] :as cfg}]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
(let [file (get-file conn id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id (:team-id file))
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/xact-lock! conn id)
|
||||
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
(let [file (get-file cfg id)
|
||||
team (teams/get-team conn
|
||||
:profile-id profile-id
|
||||
:team-id (:team-id file))
|
||||
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
features (-> (cfeat/get-team-enabled-features cf/flags team)
|
||||
(cfeat/check-client-features! (:features params))
|
||||
(cfeat/check-file-features! (:features file)))
|
||||
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features))
|
||||
(assoc :team team)
|
||||
(assoc :file file)
|
||||
(assoc :changes changes))
|
||||
changes (if changes-with-metadata
|
||||
(->> changes-with-metadata (mapcat :changes) vec)
|
||||
(vec changes))
|
||||
|
||||
cfg (assoc cfg ::timestamp (dt/now))
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features (set/difference features cfeat/frontend-only-features))
|
||||
(assoc :team team)
|
||||
(assoc :file file)
|
||||
(assoc :changes changes))
|
||||
|
||||
tpoint (dt/tpoint)]
|
||||
cfg (assoc cfg ::timestamp (ct/now))
|
||||
|
||||
tpoint (ct/tpoint)]
|
||||
|
||||
(when (not= (:vern params)
|
||||
(:vern file))
|
||||
(ex/raise :type :validation
|
||||
:code :vern-conflict
|
||||
:hint "A different version has been restored for the file."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
;; When newly computed features does not match exactly with the
|
||||
;; features defined on team row, we update it
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (-> features
|
||||
(set/union (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(into-array))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
|
||||
|
||||
(when (not= (:vern params)
|
||||
(:vern file))
|
||||
(ex/raise :type :validation
|
||||
:code :vern-conflict
|
||||
:hint "A different version has been restored for the file."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(when (> (:revn params)
|
||||
(:revn file))
|
||||
(ex/raise :type :validation
|
||||
:code :revn-conflict
|
||||
:hint "The incoming revision number is greater that stored version."
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
;; When newly computed features does not match exactly with
|
||||
;; the features defined on team row, we update it
|
||||
(when-let [features (-> features
|
||||
(set/difference (:features team))
|
||||
(set/difference cfeat/no-team-inheritable-features)
|
||||
(not-empty))]
|
||||
(let [features (->> features
|
||||
(set/union (:features team))
|
||||
(db/create-array conn "text"))]
|
||||
(db/update! conn :team
|
||||
{:features features}
|
||||
{:id (:id team)}
|
||||
{::db/return-keys false})))
|
||||
|
||||
|
||||
(mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
(binding [l/*context* (some-> (meta params)
|
||||
(get :app.http/request)
|
||||
(errors/request->context))]
|
||||
(-> (update-file* cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (dt/format-duration elapsed))))))))))
|
||||
(binding [l/*context* (some-> (meta params)
|
||||
(get :app.http/request)
|
||||
(errors/request->context))]
|
||||
(-> (update-file* cfg params)
|
||||
(rph/with-defer #(let [elapsed (tpoint)]
|
||||
(l/trace :hint "update-file" :time (ct/format-duration elapsed))))))))
|
||||
|
||||
(defn- update-file*
|
||||
"Internal function, part of the update-file process, that encapsulates
|
||||
@@ -209,31 +209,41 @@
|
||||
Follow the inner implementation to `update-file-data!` function.
|
||||
|
||||
Only intended for internal use on this module."
|
||||
[{:keys [::db/conn ::wrk/executor ::timestamp] :as cfg}
|
||||
[{:keys [::db/conn ::timestamp] :as cfg}
|
||||
{:keys [profile-id file team features changes session-id skip-validate] :as params}]
|
||||
|
||||
(let [;; Retrieve the file data
|
||||
file (feat.fmigr/resolve-applied-migrations cfg file)
|
||||
file (feat.fdata/resolve-file-data cfg file)
|
||||
file (assoc file :features
|
||||
(-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file))))]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial fdata/load-pointer cfg (:id file))]
|
||||
|
||||
;; We create a new lexycal scope for clearly delimit the result of
|
||||
;; executing this update file operation and all its side effects
|
||||
(let [file (px/invoke! executor
|
||||
(fn []
|
||||
;; Process the file data on separated thread for avoid to do
|
||||
;; the CPU intensive operation on vthread.
|
||||
(binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(update-file-data! cfg file
|
||||
process-changes-and-validate
|
||||
changes skip-validate))))]
|
||||
(let [file (assoc file :features
|
||||
(-> features
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(set/union (:features file))))
|
||||
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(persist-file! cfg file)
|
||||
;; We need to preserve the original revn for the response
|
||||
revn
|
||||
(get file :revn)
|
||||
|
||||
file
|
||||
(binding [cfeat/*current* features
|
||||
cfeat/*previous* (:features file)]
|
||||
(update-file-data! cfg file
|
||||
process-changes-and-validate
|
||||
changes skip-validate))
|
||||
|
||||
deleted-at
|
||||
(ct/plus timestamp (ct/duration {:hours 1}))]
|
||||
|
||||
(when-let [file (::snapshot file)]
|
||||
(let [deleted-at (ct/plus timestamp (ldel/get-deletion-delay team))
|
||||
label (str "internal/snapshot/" revn)]
|
||||
|
||||
(fsnap/create! cfg file
|
||||
{:label label
|
||||
:created-by "system"
|
||||
:deleted-at deleted-at
|
||||
:profile-id profile-id
|
||||
:session-id session-id})))
|
||||
|
||||
;; Insert change (xlog) with deleted_at in a future data for
|
||||
;; make them automatically eleggible for GC once they expires
|
||||
@@ -243,87 +253,71 @@
|
||||
:profile-id profile-id
|
||||
:created-at timestamp
|
||||
:updated-at timestamp
|
||||
:deleted-at (if (::snapshot-data file)
|
||||
(dt/plus timestamp (ldel/get-deletion-delay team))
|
||||
(dt/plus timestamp (dt/duration {:hours 1})))
|
||||
:deleted-at deleted-at
|
||||
:file-id (:id file)
|
||||
:revn (:revn file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:label (::snapshot-label file)
|
||||
:data (::snapshot-data file)
|
||||
:features (into-array (:features file))
|
||||
:changes (blob/encode changes)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(persist-file! cfg file)
|
||||
|
||||
(when (contains? cf/flags :redis-cache)
|
||||
(invalidate-caches! cfg file))
|
||||
|
||||
;; Send asynchronous notifications
|
||||
(send-notifications! cfg params file))
|
||||
(send-notifications! cfg params file)
|
||||
|
||||
(when (feat.fdata/offloaded? file)
|
||||
(let [storage (sto/resolve cfg ::db/reuse-conn true)]
|
||||
(some->> (:data-ref-id file) (sto/touch-object! storage))))
|
||||
|
||||
(let [response {:revn (:revn file)
|
||||
:lagged (get-lagged-changes conn params)}]
|
||||
(vary-meta response assoc ::audit/replace-props
|
||||
{:id (:id file)
|
||||
:name (:name file)
|
||||
:features (:features file)
|
||||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}))))
|
||||
|
||||
(defn update-file!
|
||||
"A public api that allows apply a transformation to a file with all context setup."
|
||||
[{:keys [::db/conn] :as cfg} file-id update-fn & args]
|
||||
(let [file (get-file cfg file-id)
|
||||
file (apply update-file-data! cfg file update-fn args)]
|
||||
(feat.fmigr/upsert-migrations! conn file)
|
||||
(persist-file! cfg file)))
|
||||
|
||||
(def ^:private sql:get-file
|
||||
"SELECT f.*, p.team_id
|
||||
FROM file AS f
|
||||
JOIN project AS p ON (p.id = f.project_id)
|
||||
WHERE f.id = ?
|
||||
AND (f.deleted_at IS NULL OR
|
||||
f.deleted_at > now())
|
||||
FOR KEY SHARE")
|
||||
(with-meta {:revn revn :lagged (get-lagged-changes conn params)}
|
||||
{::audit/replace-props
|
||||
{:id (:id file)
|
||||
:name (:name file)
|
||||
:features (:features file)
|
||||
:project-id (:project-id file)
|
||||
:team-id (:team-id file)}}))))
|
||||
|
||||
(defn get-file
|
||||
"Get not-decoded file, only decodes the features set."
|
||||
[conn id]
|
||||
(let [file (db/exec-one! conn [sql:get-file id])]
|
||||
(when-not file
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found
|
||||
:hint (format "file with id '%s' does not exists" id)))
|
||||
(update file :features db/decode-pgarray #{})))
|
||||
[cfg id]
|
||||
(bfc/get-file cfg id :decode? false :lock-for-share? true))
|
||||
|
||||
(defn persist-file!
|
||||
"Function responsible of persisting already encoded file. Should be
|
||||
used together with `get-file` and `update-file-data!`.
|
||||
|
||||
It also updates the project modified-at attr."
|
||||
[{:keys [::db/conn ::timestamp]} file]
|
||||
[{:keys [::db/conn ::timestamp] :as cfg} file]
|
||||
(let [;; The timestamp can be nil because this function is also
|
||||
;; intended to be used outside of this module
|
||||
modified-at (or timestamp (dt/now))]
|
||||
modified-at
|
||||
(or timestamp (ct/now))
|
||||
|
||||
file
|
||||
(-> file
|
||||
(dissoc ::snapshot)
|
||||
(assoc :modified-at modified-at)
|
||||
(assoc :has-media-trimmed false))]
|
||||
|
||||
(db/update! conn :project
|
||||
{:modified-at modified-at}
|
||||
{:id (:project-id file)}
|
||||
{::db/return-keys false})
|
||||
|
||||
(db/update! conn :file
|
||||
{:revn (:revn file)
|
||||
:data (:data file)
|
||||
:version (:version file)
|
||||
:features (:features file)
|
||||
:data-backend nil
|
||||
:data-ref-id nil
|
||||
:modified-at modified-at
|
||||
:has-media-trimmed false}
|
||||
{:id (:id file)}
|
||||
{::db/return-keys false})))
|
||||
(bfc/update-file! cfg file)))
|
||||
|
||||
(defn- invalidate-caches!
|
||||
[cfg {:keys [id] :as file}]
|
||||
(rds/run! cfg (fn [{:keys [::rds/conn]}]
|
||||
(let [key (str files/file-summary-cache-key-prefix id)]
|
||||
(rds/del conn key)))))
|
||||
|
||||
(defn- attach-snapshot
|
||||
"Attach snapshot data to the file. This should be called before the
|
||||
upcoming file operations are applied to the file."
|
||||
[cfg migrated? file]
|
||||
(let [snapshot (if migrated? file (fdata/realize cfg file))]
|
||||
(assoc file ::snapshot snapshot)))
|
||||
|
||||
(defn- update-file-data!
|
||||
"Perform a file data transformation in with all update context setup.
|
||||
@@ -335,52 +329,35 @@
|
||||
fdata/pointer-map modified fragments."
|
||||
|
||||
[cfg {:keys [id] :as file} update-fn & args]
|
||||
(binding [pmap/*tracked* (pmap/create-tracked)
|
||||
pmap/*load-fn* (partial feat.fdata/load-pointer cfg id)]
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file)))))
|
||||
libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
(let [file (update file :data (fn [data]
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id id))))
|
||||
libs (delay (bfc/get-resolved-file-libraries cfg file))
|
||||
|
||||
;; For avoid unnecesary overhead of creating multiple pointers
|
||||
;; and handly internally with objects map in their worst
|
||||
;; case (when probably all shapes and all pointers will be
|
||||
;; readed in any case), we just realize/resolve them before
|
||||
;; applying the migration to the file
|
||||
file (if (fmg/need-migration? file)
|
||||
(-> file
|
||||
(update :data feat.fdata/process-pointers deref)
|
||||
(update :data feat.fdata/process-objects (partial into {}))
|
||||
(fmg/migrate-file libs))
|
||||
file)
|
||||
need-migration?
|
||||
(fmg/need-migration? file)
|
||||
|
||||
file (apply update-fn cfg file args)
|
||||
take-snapshot?
|
||||
(take-snapshot? file)
|
||||
|
||||
;; TODO: reuse operations if file is migrated
|
||||
;; TODO: move encoding to a separated thread
|
||||
file (if (take-snapshot? file)
|
||||
(let [tpoint (dt/tpoint)
|
||||
snapshot (-> (:data file)
|
||||
(feat.fdata/process-pointers deref)
|
||||
(feat.fdata/process-objects (partial into {}))
|
||||
(blob/encode))
|
||||
elapsed (tpoint)
|
||||
label (str "internal/snapshot/" (:revn file))]
|
||||
;; For avoid unnecesary overhead of creating multiple
|
||||
;; pointers and handly internally with objects map in their
|
||||
;; worst case (when probably all shapes and all pointers
|
||||
;; will be readed in any case), we just realize/resolve them
|
||||
;; before applying the migration to the file
|
||||
file
|
||||
(cond-> file
|
||||
;; need-migration?
|
||||
;; (->> (fdata/realize cfg))
|
||||
|
||||
(l/trc :hint "take snapshot"
|
||||
:file-id (str (:id file))
|
||||
:revn (:revn file)
|
||||
:label label
|
||||
:elapsed (dt/format-duration elapsed))
|
||||
need-migration?
|
||||
(fmg/migrate-file libs)
|
||||
|
||||
(-> file
|
||||
(assoc ::snapshot-data snapshot)
|
||||
(assoc ::snapshot-label label)))
|
||||
file)]
|
||||
|
||||
(bfc/encode-file cfg file))))
|
||||
take-snapshot?
|
||||
(->> (attach-snapshot cfg need-migration?)))]
|
||||
|
||||
(apply update-fn cfg file args)))
|
||||
|
||||
(defn- soft-validate-file-schema!
|
||||
[file]
|
||||
@@ -408,7 +385,6 @@
|
||||
(not skip-validate))
|
||||
(bfc/get-resolved-file-libraries cfg file))
|
||||
|
||||
|
||||
;; The main purpose of this atom is provide a contextual state
|
||||
;; for the changes subsystem where optionally some hints can
|
||||
;; be provided for the changes processing. Right now we are
|
||||
@@ -452,11 +428,11 @@
|
||||
(when (contains? cf/flags :auto-file-snapshot)
|
||||
(let [freq (or (cf/get :auto-file-snapshot-every) 20)
|
||||
timeout (or (cf/get :auto-file-snapshot-timeout)
|
||||
(dt/duration {:hours 1}))]
|
||||
(ct/duration {:hours 1}))]
|
||||
|
||||
(or (= 1 freq)
|
||||
(zero? (mod revn freq))
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(> (inst-ms (ct/diff modified-at (ct/now)))
|
||||
(inst-ms timeout))))))
|
||||
|
||||
(def ^:private sql:lagged-changes
|
||||
@@ -470,8 +446,9 @@
|
||||
(defn- get-lagged-changes
|
||||
[conn {:keys [id revn] :as params}]
|
||||
(->> (db/exec! conn [sql:lagged-changes id revn])
|
||||
(map files/decode-row)
|
||||
(vec)))
|
||||
(filter :changes)
|
||||
(mapv (fn [row]
|
||||
(update row :changes blob/decode)))))
|
||||
|
||||
(defn- send-notifications!
|
||||
[cfg {:keys [team changes session-id] :as params} file]
|
||||
@@ -496,5 +473,5 @@
|
||||
:file-id (:id file)
|
||||
:session-id session-id
|
||||
:revn (:revn file)
|
||||
:modified-at (dt/now)
|
||||
:modified-at (ct/now)
|
||||
:changes lchanges}))))
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
@@ -25,10 +26,7 @@
|
||||
[app.rpc.helpers :as rph]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[promesa.exec :as px]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(def valid-weight #{100 200 300 400 500 600 700 800 900 950})
|
||||
(def valid-style #{"normal" "italic"})
|
||||
@@ -37,14 +35,13 @@
|
||||
|
||||
(def ^:private
|
||||
schema:get-font-variants
|
||||
[:schema {:title "get-font-variants"}
|
||||
[:and
|
||||
[:map
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} ::sm/uuid]]
|
||||
[::sm/contains-any #{:team-id :file-id :project-id}]]])
|
||||
[:and
|
||||
[:map {:title "get-font-variants"}
|
||||
[:team-id {:optional true} ::sm/uuid]
|
||||
[:file-id {:optional true} ::sm/uuid]
|
||||
[:project-id {:optional true} ::sm/uuid]
|
||||
[:share-id {:optional true} ::sm/uuid]]
|
||||
[::sm/contains-any #{:team-id :file-id :project-id}]])
|
||||
|
||||
(sv/defmethod ::get-font-variants
|
||||
{::doc/added "1.18"
|
||||
@@ -106,7 +103,7 @@
|
||||
(create-font-variant cfg (assoc params :profile-id profile-id)))))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor]} {:keys [data] :as params}]
|
||||
[{:keys [::sto/storage ::db/conn]} {:keys [data] :as params}]
|
||||
(letfn [(generate-missing! [data]
|
||||
(let [data (media/run {:cmd :generate-fonts :input data})]
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
@@ -124,7 +121,7 @@
|
||||
content (-> (sto/content resource)
|
||||
(sto/wrap-with-hash hash))]
|
||||
{::sto/content content
|
||||
::sto/touched-at (dt/now)
|
||||
::sto/touched-at (ct/now)
|
||||
::sto/deduplicate? true
|
||||
:content-type mtype
|
||||
:bucket "team-font-variant"})))
|
||||
@@ -158,7 +155,7 @@
|
||||
:otf-file-id (:id otf)
|
||||
:ttf-file-id (:id ttf)}))]
|
||||
|
||||
(let [data (px/invoke! executor (partial generate-missing! data))
|
||||
(let [data (generate-missing! data)
|
||||
assets (persist-fonts-files! data)
|
||||
result (insert-font-variant! assets)]
|
||||
(vary-meta result assoc ::audit/replace-props (update params :data (comp vec keys))))))
|
||||
@@ -217,7 +214,7 @@
|
||||
{::sql/for-update true})
|
||||
|
||||
delay (ldel/get-deletion-delay team)
|
||||
tnow (dt/in-future delay)]
|
||||
tnow (ct/in-future delay)]
|
||||
|
||||
(teams/check-edition-permissions! (:permissions team))
|
||||
|
||||
@@ -261,7 +258,7 @@
|
||||
|
||||
(teams/check-edition-permissions! (:permissions team))
|
||||
(db/update! conn :team-font-variant
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:deleted-at (ct/in-future delay)}
|
||||
{:id (:id variant)}
|
||||
{::db/return-keys false})
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
::doc/added "1.15"
|
||||
::doc/module :auth
|
||||
::sm/params schema:login-with-ldap}
|
||||
[{:keys [::setup/props ::ldap/provider] :as cfg} params]
|
||||
[{:keys [::ldap/provider] :as cfg} params]
|
||||
(when-not provider
|
||||
(ex/raise :type :restriction
|
||||
:code :ldap-not-initialized
|
||||
@@ -60,18 +60,18 @@
|
||||
;; user comes from team-invitation process; in this case,
|
||||
;; regenerate token and send back to the user a new invitation
|
||||
;; token (and mark current session as logged).
|
||||
(let [claims (tokens/verify props {:token token :iss :team-invitation})
|
||||
(let [claims (tokens/verify cfg {:token token :iss :team-invitation})
|
||||
claims (assoc claims
|
||||
:member-id (:id profile)
|
||||
:member-email (:email profile))
|
||||
token (tokens/generate props claims)]
|
||||
token (tokens/generate cfg claims)]
|
||||
(-> {:invitation-token token}
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-transform (session/create-fn cfg profile))
|
||||
(rph/with-meta {::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)})))
|
||||
|
||||
(-> (profile/strip-private-attrs profile)
|
||||
(rph/with-transform (session/create-fn cfg (:id profile)))
|
||||
(rph/with-transform (session/create-fn cfg profile))
|
||||
(rph/with-meta {::audit/props (:props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
|
||||
@@ -83,6 +83,6 @@
|
||||
(profile/clean-email)
|
||||
(profile/get-profile-by-email conn))
|
||||
(->> (assoc info :is-active true :is-demo false)
|
||||
(auth/create-profile! conn)
|
||||
(auth/create-profile-rels! conn)
|
||||
(auth/create-profile cfg)
|
||||
(auth/create-profile-rels conn)
|
||||
(profile/strip-private-attrs))))))
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -27,17 +28,14 @@
|
||||
[app.setup :as-alias setup]
|
||||
[app.setup.templates :as tmpl]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[promesa.exec :as px]))
|
||||
[app.util.services :as sv]))
|
||||
|
||||
;; --- COMMAND: Duplicate File
|
||||
|
||||
(defn duplicate-file
|
||||
[{:keys [::db/conn ::bfc/timestamp] :as cfg} {:keys [profile-id file-id name reset-shared-flag] :as params}]
|
||||
(let [;; We don't touch the original file on duplication
|
||||
file (bfc/get-file cfg file-id)
|
||||
file (bfc/get-file cfg file-id :realize? true)
|
||||
project-id (:project-id file)
|
||||
file (-> file
|
||||
(update :id bfc/lookup-index)
|
||||
@@ -104,7 +102,7 @@
|
||||
(db/exec-one! conn ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
|
||||
(binding [bfc/*state* (volatile! {:index {file-id (uuid/next)}})]
|
||||
(duplicate-file (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(duplicate-file (assoc cfg ::bfc/timestamp (ct/now))
|
||||
(-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :reset-shared-flag true)))))))
|
||||
@@ -164,7 +162,7 @@
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
;; Defer all constraints
|
||||
(db/exec-one! cfg ["SET CONSTRAINTS ALL DEFERRED"])
|
||||
(-> (assoc cfg ::bfc/timestamp (dt/now))
|
||||
(-> (assoc cfg ::bfc/timestamp (ct/now))
|
||||
(duplicate-project (assoc params :profile-id profile-id))))))
|
||||
|
||||
(defn duplicate-team
|
||||
@@ -313,15 +311,14 @@
|
||||
|
||||
;; Update the modification date of the all affected projects
|
||||
;; ensuring that the destination project is the most recent one.
|
||||
(doseq [project-id (into (list project-id) source)]
|
||||
|
||||
;; NOTE: as this is executed on virtual thread, sleeping does
|
||||
;; not causes major issues, and allows an easy way to set a
|
||||
;; trully different modification date to each file.
|
||||
(px/sleep 10)
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:id project-id}))
|
||||
(loop [project-ids (into (list project-id) source)
|
||||
modified-at (ct/now)]
|
||||
(when-let [project-id (first project-ids)]
|
||||
(db/update! conn :project
|
||||
{:modified-at modified-at}
|
||||
{:id project-id})
|
||||
(recur (rest project-ids)
|
||||
(ct/plus modified-at 10))))
|
||||
|
||||
nil))
|
||||
|
||||
@@ -396,12 +393,7 @@
|
||||
;; --- COMMAND: Clone Template
|
||||
|
||||
(defn clone-template
|
||||
[{:keys [::db/pool ::wrk/executor] :as cfg} {:keys [project-id profile-id] :as params} template]
|
||||
|
||||
;; NOTE: the importation process performs some operations
|
||||
;; that are not very friendly with virtual threads, and for
|
||||
;; avoid unexpected blocking of other concurrent operations
|
||||
;; we dispatch that operation to a dedicated executor.
|
||||
[{:keys [::db/pool] :as cfg} {:keys [project-id profile-id] :as params} template]
|
||||
(let [template (tmp/tempfile-from template
|
||||
:prefix "penpot.template."
|
||||
:suffix ""
|
||||
@@ -419,13 +411,13 @@
|
||||
(assoc ::bfc/features (cfeat/get-team-enabled-features cf/flags team)))
|
||||
|
||||
result (if (= format :binfile-v3)
|
||||
(px/invoke! executor (partial bf.v3/import-files! cfg))
|
||||
(px/invoke! executor (partial bf.v1/import-files! cfg)))]
|
||||
(bf.v3/import-files! cfg)
|
||||
(bf.v1/import-files! cfg))]
|
||||
|
||||
(db/tx-run! cfg
|
||||
(fn [{:keys [::db/conn] :as cfg}]
|
||||
(db/update! conn :project
|
||||
{:modified-at (dt/now)}
|
||||
{:modified-at (ct/now)}
|
||||
{:id project-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
|
||||
@@ -7,13 +7,10 @@
|
||||
(ns app.rpc.commands.media
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.media :as cm]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.http.client :as http]
|
||||
[app.loggers.audit :as-alias audit]
|
||||
[app.media :as media]
|
||||
[app.rpc :as-alias rpc]
|
||||
@@ -21,16 +18,7 @@
|
||||
[app.rpc.commands.files :as files]
|
||||
[app.rpc.doc :as-alias doc]
|
||||
[app.storage :as sto]
|
||||
[app.storage.tmp :as tmp]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as-alias wrk]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.io :as io]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(def default-max-file-size
|
||||
(* 1024 1024 10)) ; 10 MiB
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(def thumbnail-options
|
||||
{:width 100
|
||||
@@ -48,7 +36,7 @@
|
||||
[:file-id ::sm/uuid]
|
||||
[:is-local ::sm/boolean]
|
||||
[:name [:string {:max 250}]]
|
||||
[:content ::media/upload]])
|
||||
[:content media/schema:upload]])
|
||||
|
||||
(sv/defmethod ::upload-file-media-object
|
||||
{::doc/added "1.17"
|
||||
@@ -67,7 +55,7 @@
|
||||
mobj (create-file-media-object cfg params)]
|
||||
|
||||
(db/update! conn :file
|
||||
{:modified-at (dt/now)
|
||||
{:modified-at (ct/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
@@ -153,9 +141,9 @@
|
||||
(assoc ::image (process-main-image info)))))
|
||||
|
||||
(defn- create-file-media-object
|
||||
[{:keys [::sto/storage ::db/conn ::wrk/executor] :as cfg}
|
||||
[{:keys [::sto/storage ::db/conn] :as cfg}
|
||||
{:keys [id file-id is-local name content]}]
|
||||
(let [result (px/invoke! executor (partial process-image content))
|
||||
(let [result (process-image content)
|
||||
image (sto/put-object! storage (::image result))
|
||||
thumb (when-let [params (::thumb result)]
|
||||
(sto/put-object! storage params))]
|
||||
@@ -192,63 +180,19 @@
|
||||
mobj (create-file-media-object-from-url cfg (assoc params :profile-id profile-id))]
|
||||
|
||||
(db/update! pool :file
|
||||
{:modified-at (dt/now)
|
||||
{:modified-at (ct/now)
|
||||
:has-media-trimmed false}
|
||||
{:id file-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
mobj))
|
||||
|
||||
(defn download-image
|
||||
[{:keys [::http/client]} uri]
|
||||
(letfn [(parse-and-validate [{:keys [headers] :as response}]
|
||||
(let [size (some-> (get headers "content-length") d/parse-integer)
|
||||
mtype (get headers "content-type")
|
||||
format (cm/mtype->format mtype)
|
||||
max-size (cf/get :media-max-file-size default-max-file-size)]
|
||||
|
||||
(when-not size
|
||||
(ex/raise :type :validation
|
||||
:code :unknown-size
|
||||
:hint "seems like the url points to resource with unknown size"))
|
||||
|
||||
(when (> size max-size)
|
||||
(ex/raise :type :validation
|
||||
:code :file-too-large
|
||||
:hint (str/ffmt "the file size % is greater than the maximum %"
|
||||
size
|
||||
default-max-file-size)))
|
||||
|
||||
(when (nil? format)
|
||||
(ex/raise :type :validation
|
||||
:code :media-type-not-allowed
|
||||
:hint "seems like the url points to an invalid media object"))
|
||||
|
||||
{:size size :mtype mtype :format format}))]
|
||||
|
||||
(let [{:keys [body] :as response} (http/req! client
|
||||
{:method :get :uri uri}
|
||||
{:response-type :input-stream :sync? true})
|
||||
{:keys [size mtype]} (parse-and-validate response)
|
||||
path (tmp/tempfile :prefix "penpot.media.download.")
|
||||
written (io/write* path body :size size)]
|
||||
|
||||
(when (not= written size)
|
||||
(ex/raise :type :internal
|
||||
:code :mismatch-write-size
|
||||
:hint "unexpected state: unable to write to file"))
|
||||
|
||||
{:filename "tempfile"
|
||||
:size size
|
||||
:path path
|
||||
:mtype mtype})))
|
||||
|
||||
(defn- create-file-media-object-from-url
|
||||
[cfg {:keys [url name] :as params}]
|
||||
(let [content (download-image cfg url)
|
||||
(let [content (media/download-image cfg url)
|
||||
params (-> params
|
||||
(assoc :content content)
|
||||
(assoc :name (or name (:filename content))))]
|
||||
(assoc :name (d/nilv name "unknown")))]
|
||||
|
||||
;; NOTE: we use the climit here in a dynamic invocation because we
|
||||
;; don't want saturate the process-image limit with IO (download
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.plugins :refer [schema:plugin-registry]]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
@@ -28,18 +29,14 @@
|
||||
[app.storage :as sto]
|
||||
[app.tokens :as tokens]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[cuerdas.core :as str]
|
||||
[promesa.exec :as px]))
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
(declare check-profile-existence!)
|
||||
(declare decode-row)
|
||||
(declare derive-password)
|
||||
(declare filter-props)
|
||||
(declare get-profile)
|
||||
(declare strip-private-attrs)
|
||||
(declare verify-password)
|
||||
|
||||
(def schema:props-notifications
|
||||
[:map {:title "props-notifications"}
|
||||
@@ -70,8 +67,8 @@
|
||||
[:is-blocked {:optional true} ::sm/boolean]
|
||||
[:is-demo {:optional true} ::sm/boolean]
|
||||
[:is-muted {:optional true} ::sm/boolean]
|
||||
[:created-at {:optional true} ::sm/inst]
|
||||
[:modified-at {:optional true} ::sm/inst]
|
||||
[:created-at {:optional true} ::ct/inst]
|
||||
[:modified-at {:optional true} ::ct/inst]
|
||||
[:default-project-id {:optional true} ::sm/uuid]
|
||||
[:default-team-id {:optional true} ::sm/uuid]
|
||||
[:props {:optional true} schema:props]])
|
||||
@@ -110,7 +107,9 @@
|
||||
(defn get-profile
|
||||
"Get profile by id. Throws not-found exception if no profile found."
|
||||
[conn id & {:as opts}]
|
||||
(-> (db/get-by-id conn :profile id opts)
|
||||
;; NOTE: We need to set ::db/remove-deleted to false because demo profiles
|
||||
;; are created with a set deleted-at value
|
||||
(-> (db/get-by-id conn :profile id (assoc opts ::db/remove-deleted false))
|
||||
(decode-row)))
|
||||
|
||||
;; --- MUTATION: Update Profile (own)
|
||||
@@ -131,9 +130,7 @@
|
||||
;; NOTE: we need to retrieve the profile independently if we use
|
||||
;; it or not for explicit locking and avoid concurrent updates of
|
||||
;; the same row/object.
|
||||
(let [profile (-> (db/get-by-id conn :profile profile-id ::sql/for-update true)
|
||||
(decode-row))
|
||||
|
||||
(let [profile (get-profile conn profile-id ::db/for-update true)
|
||||
;; Update the profile map with direct params
|
||||
profile (-> profile
|
||||
(assoc :fullname fullname)
|
||||
@@ -143,9 +140,9 @@
|
||||
(db/update! conn :profile
|
||||
{:fullname fullname
|
||||
:lang lang
|
||||
:theme theme
|
||||
:props (db/tjson (:props profile))}
|
||||
{:id profile-id})
|
||||
:theme theme}
|
||||
{:id profile-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(-> profile
|
||||
(strip-private-attrs)
|
||||
@@ -157,7 +154,6 @@
|
||||
|
||||
(declare validate-password!)
|
||||
(declare update-profile-password!)
|
||||
(declare invalidate-profile-session!)
|
||||
|
||||
(def ^:private
|
||||
schema:update-profile-password
|
||||
@@ -172,8 +168,7 @@
|
||||
::climit/id :auth/global
|
||||
::db/transaction true}
|
||||
[cfg {:keys [::rpc/profile-id password] :as params}]
|
||||
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))
|
||||
session-id (::session/id params)]
|
||||
(let [profile (validate-password! cfg (assoc params :profile-id profile-id))]
|
||||
|
||||
(when (= (:email profile) (str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
@@ -181,20 +176,18 @@
|
||||
:hint "you can't use your email as password"))
|
||||
|
||||
(update-profile-password! cfg (assoc profile :password password))
|
||||
(invalidate-profile-session! cfg profile-id session-id)
|
||||
nil))
|
||||
|
||||
(defn- invalidate-profile-session!
|
||||
"Removes all sessions except the current one."
|
||||
[{:keys [::db/conn]} profile-id session-id]
|
||||
(let [sql "delete from http_session where profile_id = ? and id != ?"]
|
||||
(:next.jdbc/update-count (db/exec-one! conn [sql profile-id session-id]))))
|
||||
(->> (rph/get-request params)
|
||||
(session/get-session)
|
||||
(session/invalidate-others cfg))
|
||||
|
||||
nil))
|
||||
|
||||
(defn- validate-password!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id old-password] :as params}]
|
||||
(let [profile (db/get-by-id conn :profile profile-id ::sql/for-update true)]
|
||||
(when (and (not= (:password profile) "!")
|
||||
(not (:valid (verify-password cfg old-password (:password profile)))))
|
||||
(not (:valid (auth/verify-password old-password (:password profile)))))
|
||||
(ex/raise :type :validation
|
||||
:code :old-password-not-match))
|
||||
profile))
|
||||
@@ -203,7 +196,7 @@
|
||||
[{:keys [::db/conn] :as cfg} {:keys [id password] :as profile}]
|
||||
(when-not (db/read-only? conn)
|
||||
(db/update! conn :profile
|
||||
{:password (derive-password cfg password)}
|
||||
{:password (auth/derive-password password)}
|
||||
{:id id})
|
||||
nil))
|
||||
|
||||
@@ -228,21 +221,22 @@
|
||||
|
||||
(defn- update-notifications!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id dashboard-comments email-comments email-invites]}]
|
||||
(let [profile (get-profile conn profile-id)
|
||||
(let [profile
|
||||
(get-profile conn profile-id ::db/for-update true)
|
||||
|
||||
notifications
|
||||
{:dashboard-comments dashboard-comments
|
||||
:email-comments email-comments
|
||||
:email-invites email-invites}]
|
||||
:email-invites email-invites}
|
||||
|
||||
(db/update!
|
||||
conn :profile
|
||||
{:props
|
||||
(-> (:props profile)
|
||||
(assoc :notifications notifications)
|
||||
(db/tjson))}
|
||||
{:id (:id profile)})
|
||||
props
|
||||
(-> (get profile :props)
|
||||
(assoc :notifications notifications))]
|
||||
|
||||
(db/update! conn :profile
|
||||
{:props (db/tjson props)}
|
||||
{:id profile-id}
|
||||
{::db/return-keys false})
|
||||
nil))
|
||||
|
||||
;; --- MUTATION: Update Photo
|
||||
@@ -253,7 +247,7 @@
|
||||
(def ^:private
|
||||
schema:update-profile-photo
|
||||
[:map {:title "update-profile-photo"}
|
||||
[:file ::media/upload]])
|
||||
[:file media/schema:upload]])
|
||||
|
||||
(sv/defmethod ::update-profile-photo
|
||||
{:doc/added "1.1"
|
||||
@@ -286,9 +280,9 @@
|
||||
:file-path (str (:path file))
|
||||
:file-mtype (:mtype file)}}))))
|
||||
|
||||
(defn- generate-thumbnail!
|
||||
[_ file]
|
||||
(let [input (media/run {:cmd :info :input file})
|
||||
(defn- generate-thumbnail
|
||||
[_ input]
|
||||
(let [input (media/run {:cmd :info :input input})
|
||||
thumb (media/run {:cmd :profile-thumbnail
|
||||
:format :jpeg
|
||||
:quality 85
|
||||
@@ -304,13 +298,12 @@
|
||||
:content-type (:mtype thumb)}))
|
||||
|
||||
(defn upload-photo
|
||||
[{:keys [::sto/storage ::wrk/executor] :as cfg} {:keys [file] :as params}]
|
||||
[{:keys [::sto/storage] :as cfg} {:keys [file] :as params}]
|
||||
(let [params (-> cfg
|
||||
(assoc ::climit/id [[:process-image/by-profile (:profile-id params)]
|
||||
[:process-image/global]])
|
||||
(assoc ::climit/label "upload-photo")
|
||||
(assoc ::climit/executor executor)
|
||||
(climit/invoke! generate-thumbnail! file))]
|
||||
(climit/invoke! generate-thumbnail file))]
|
||||
(sto/put-object! storage params)))
|
||||
|
||||
;; --- MUTATION: Request Email Change
|
||||
@@ -350,15 +343,15 @@
|
||||
|
||||
(defn- request-email-change!
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile email] :as params}]
|
||||
(let [token (tokens/generate (::setup/props cfg)
|
||||
(let [token (tokens/generate cfg
|
||||
{:iss :change-email
|
||||
:exp (dt/in-future "15m")
|
||||
:exp (ct/in-future "15m")
|
||||
:profile-id (:id profile)
|
||||
:email email})
|
||||
ptoken (tokens/generate (::setup/props cfg)
|
||||
ptoken (tokens/generate cfg
|
||||
{:iss :profile-identity
|
||||
:profile-id (:id profile)
|
||||
:exp (dt/in-future {:days 30})})]
|
||||
:exp (ct/in-future {:days 30})})]
|
||||
|
||||
(when (not= email (:email profile))
|
||||
(check-profile-existence! conn params))
|
||||
@@ -411,7 +404,7 @@
|
||||
|
||||
(defn update-profile-props
|
||||
[{:keys [::db/conn] :as cfg} profile-id props]
|
||||
(let [profile (get-profile conn profile-id ::sql/for-update true)
|
||||
(let [profile (get-profile conn profile-id ::db/for-update true)
|
||||
props (reduce-kv (fn [props k v]
|
||||
;; We don't accept namespaced keys
|
||||
(if (simple-ident? k)
|
||||
@@ -424,16 +417,17 @@
|
||||
|
||||
(db/update! conn :profile
|
||||
{:props (db/tjson props)}
|
||||
{:id profile-id})
|
||||
{:id profile-id}
|
||||
{::db/return-keys false})
|
||||
|
||||
(filter-props props)))
|
||||
|
||||
(sv/defmethod ::update-profile-props
|
||||
{::doc/added "1.0"
|
||||
::sm/params schema:update-profile-props}
|
||||
::sm/params schema:update-profile-props
|
||||
::db/transaction true}
|
||||
[cfg {:keys [::rpc/profile-id props]}]
|
||||
(db/tx-run! cfg (fn [cfg]
|
||||
(update-profile-props cfg profile-id props))))
|
||||
(update-profile-props cfg profile-id props))
|
||||
|
||||
;; --- MUTATION: Delete Profile
|
||||
|
||||
@@ -444,7 +438,7 @@
|
||||
::db/transaction true}
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/profile-id] :as params}]
|
||||
(let [teams (get-owned-teams conn profile-id)
|
||||
deleted-at (dt/now)]
|
||||
deleted-at (ct/now)]
|
||||
|
||||
;; If we found owned teams with participants, we don't allow
|
||||
;; delete profile until the user properly transfer ownership or
|
||||
@@ -471,6 +465,29 @@
|
||||
(-> (rph/wrap nil)
|
||||
(rph/with-transform (session/delete-fn cfg)))))
|
||||
|
||||
(def sql:get-subscription-editors
|
||||
"SELECT DISTINCT
|
||||
p.id,
|
||||
p.fullname AS name,
|
||||
p.email AS email
|
||||
FROM team_profile_rel AS tpr1
|
||||
JOIN team as t
|
||||
ON tpr1.team_id = t.id
|
||||
JOIN team_profile_rel AS tpr2
|
||||
ON (tpr1.team_id = tpr2.team_id)
|
||||
JOIN profile AS p
|
||||
ON (tpr2.profile_id = p.id)
|
||||
WHERE tpr1.profile_id = ?
|
||||
AND tpr1.is_owner IS true
|
||||
AND tpr2.can_edit IS true
|
||||
AND t.deleted_at IS NULL")
|
||||
|
||||
(sv/defmethod ::get-subscription-usage
|
||||
{::doc/added "2.9"}
|
||||
[cfg {:keys [::rpc/profile-id]}]
|
||||
(let [editors (db/exec! cfg [sql:get-subscription-editors profile-id])]
|
||||
{:editors editors}))
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(def sql:owned-teams
|
||||
@@ -528,15 +545,6 @@
|
||||
[props]
|
||||
(into {} (filter (fn [[k _]] (simple-ident? k))) props))
|
||||
|
||||
(defn derive-password
|
||||
[{:keys [::wrk/executor]} password]
|
||||
(when password
|
||||
(px/invoke! executor (partial auth/derive-password password))))
|
||||
|
||||
(defn verify-password
|
||||
[{:keys [::wrk/executor]} password password-data]
|
||||
(px/invoke! executor (partial auth/verify-password password password-data)))
|
||||
|
||||
(defn decode-row
|
||||
[{:keys [props] :as row}]
|
||||
(cond-> row
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
[app.common.data.macros :as dm]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.time :as ct]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as-alias sql]
|
||||
[app.features.logical-deletion :as ldel]
|
||||
@@ -21,7 +22,6 @@
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.quotes :as quotes]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]))
|
||||
|
||||
;; --- Check Project Permissions
|
||||
@@ -70,7 +70,27 @@
|
||||
|
||||
;; --- QUERY: Get projects
|
||||
|
||||
(declare get-projects)
|
||||
(def ^:private sql:projects
|
||||
"SELECT p.*,
|
||||
coalesce(tpp.is_pinned, false) as is_pinned,
|
||||
(SELECT count(*) FROM file AS f
|
||||
WHERE f.project_id = p.id
|
||||
AND f.deleted_at is null) AS count,
|
||||
(SELECT count(*) FROM file AS f
|
||||
WHERE f.project_id = p.id) AS total_count
|
||||
FROM project AS p
|
||||
INNER JOIN team AS t ON (t.id = p.team_id)
|
||||
LEFT JOIN team_project_profile_rel AS tpp
|
||||
ON (tpp.project_id = p.id AND
|
||||
tpp.team_id = p.team_id AND
|
||||
tpp.profile_id = ?)
|
||||
WHERE p.team_id = ?
|
||||
AND t.deleted_at is null
|
||||
ORDER BY p.modified_at DESC")
|
||||
|
||||
(defn get-projects
|
||||
[conn profile-id team-id]
|
||||
(db/exec! conn [sql:projects profile-id team-id]))
|
||||
|
||||
(def ^:private schema:get-projects
|
||||
[:map {:title "get-projects"}
|
||||
@@ -78,32 +98,11 @@
|
||||
|
||||
(sv/defmethod ::get-projects
|
||||
{::doc/added "1.18"
|
||||
::doc/changes [["2.12" "This endpoint now return deleted but recoverable projects"]]
|
||||
::sm/params schema:get-projects}
|
||||
[{:keys [::db/pool]} {:keys [::rpc/profile-id team-id]}]
|
||||
(dm/with-open [conn (db/open pool)]
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(get-projects conn profile-id team-id)))
|
||||
|
||||
(def sql:projects
|
||||
"select p.*,
|
||||
coalesce(tpp.is_pinned, false) as is_pinned,
|
||||
(select count(*) from file as f
|
||||
where f.project_id = p.id
|
||||
and deleted_at is null) as count
|
||||
from project as p
|
||||
inner join team as t on (t.id = p.team_id)
|
||||
left join team_project_profile_rel as tpp
|
||||
on (tpp.project_id = p.id and
|
||||
tpp.team_id = p.team_id and
|
||||
tpp.profile_id = ?)
|
||||
where p.team_id = ?
|
||||
and p.deleted_at is null
|
||||
and t.deleted_at is null
|
||||
order by p.modified_at desc")
|
||||
|
||||
(defn get-projects
|
||||
[conn profile-id team-id]
|
||||
(db/exec! conn [sql:projects profile-id team-id]))
|
||||
[cfg {:keys [::rpc/profile-id team-id]}]
|
||||
(teams/check-read-permissions! cfg profile-id team-id)
|
||||
(get-projects cfg profile-id team-id))
|
||||
|
||||
;; --- QUERY: Get all projects
|
||||
|
||||
@@ -170,12 +169,19 @@
|
||||
;; --- MUTATION: Create Project
|
||||
|
||||
(defn- create-project
|
||||
[{:keys [::db/conn] :as cfg} {:keys [profile-id team-id] :as params}]
|
||||
(let [project (teams/create-project conn params)]
|
||||
[{:keys [::db/conn] :as cfg} {:keys [::rpc/request-at profile-id team-id] :as params}]
|
||||
(assert (ct/inst? request-at) "expect request-at assigned")
|
||||
(let [params (-> params
|
||||
(assoc :created-at request-at)
|
||||
(assoc :modified-at request-at))
|
||||
project (teams/create-project conn params)
|
||||
timestamp (::rpc/request-at params)]
|
||||
(teams/create-project-role conn profile-id (:id project) :owner)
|
||||
(db/insert! conn :team-project-profile-rel
|
||||
{:project-id (:id project)
|
||||
:profile-id profile-id
|
||||
:created-at timestamp
|
||||
:modified-at timestamp
|
||||
:team-id team-id
|
||||
:is-pinned false})
|
||||
(assoc project :is-pinned false)))
|
||||
@@ -218,7 +224,7 @@
|
||||
(sv/defmethod ::update-project-pin
|
||||
{::doc/added "1.18"
|
||||
::sm/params schema:update-project-pin
|
||||
::webhooks/batch-timeout (dt/duration "5s")
|
||||
::webhooks/batch-timeout (ct/duration "5s")
|
||||
::webhooks/batch-key (webhooks/key-fn ::rpc/profile-id :id)
|
||||
::webhooks/event? true
|
||||
::db/transaction true}
|
||||
@@ -257,7 +263,7 @@
|
||||
[conn team project-id]
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
project (db/update! conn :project
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:deleted-at (ct/in-future delay)}
|
||||
{:id project-id}
|
||||
{::db/return-keys true})]
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.features :as cfeat]
|
||||
[app.common.schema :as sm]
|
||||
[app.common.types.team :as tt]
|
||||
[app.common.time :as ct]
|
||||
[app.common.types.team :as types.team]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
@@ -30,21 +31,20 @@
|
||||
[app.setup :as-alias setup]
|
||||
[app.storage :as sto]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.set :as set]))
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
(def ^:private sql:team-permissions
|
||||
"select tpr.is_owner,
|
||||
"SELECT tpr.is_owner,
|
||||
tpr.is_admin,
|
||||
tpr.can_edit
|
||||
from team_profile_rel as tpr
|
||||
join team as t on (t.id = tpr.team_id)
|
||||
where tpr.profile_id = ?
|
||||
and tpr.team_id = ?
|
||||
and t.deleted_at is null")
|
||||
FROM team_profile_rel AS tpr
|
||||
JOIN team AS t ON (t.id = tpr.team_id)
|
||||
WHERE tpr.profile_id = ?
|
||||
AND tpr.team_id = ?
|
||||
AND t.deleted_at IS NULL")
|
||||
|
||||
(defn get-permissions
|
||||
[conn profile-id team-id]
|
||||
@@ -443,13 +443,18 @@
|
||||
[:team-id ::sm/uuid]])
|
||||
|
||||
(def sql:team-invitations
|
||||
"select email_to as email, role, (valid_until < now()) as expired
|
||||
from team_invitation where team_id = ? order by valid_until desc, created_at desc")
|
||||
"SELECT email_to AS email,
|
||||
role,
|
||||
(valid_until < ?::timestamptz) AS expired
|
||||
FROM team_invitation
|
||||
WHERE team_id = ?
|
||||
ORDER BY valid_until DESC, created_at DESC")
|
||||
|
||||
(defn get-team-invitations
|
||||
[conn team-id]
|
||||
(->> (db/exec! conn [sql:team-invitations team-id])
|
||||
(mapv #(update % :role keyword))))
|
||||
(let [now (ct/now)]
|
||||
(->> (db/exec! conn [sql:team-invitations now team-id])
|
||||
(mapv #(update % :role keyword)))))
|
||||
|
||||
(sv/defmethod ::get-team-invitations
|
||||
{::doc/added "1.17"
|
||||
@@ -503,7 +508,7 @@
|
||||
|
||||
(let [features (-> (cfeat/get-enabled-features cf/flags)
|
||||
(set/difference cfeat/frontend-only-features)
|
||||
(cfeat/check-client-features! (:features params)))
|
||||
(set/difference cfeat/no-team-inheritable-features))
|
||||
params (-> params
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :features features))
|
||||
@@ -629,7 +634,7 @@
|
||||
|
||||
;; assign owner role to new profile
|
||||
(db/update! conn :team-profile-rel
|
||||
(get tt/permissions-for-role :owner)
|
||||
(get types.team/permissions-for-role :owner)
|
||||
{:team-id id :profile-id reassign-to}))
|
||||
|
||||
;; and finally, if all other conditions does not match and the
|
||||
@@ -666,7 +671,7 @@
|
||||
|
||||
(let [delay (ldel/get-deletion-delay team)
|
||||
team (db/update! conn :team
|
||||
{:deleted-at (dt/in-future delay)}
|
||||
{:deleted-at (ct/in-future delay)}
|
||||
{:id id}
|
||||
{::db/return-keys true})]
|
||||
|
||||
@@ -742,7 +747,7 @@
|
||||
:team-id team-id
|
||||
:role role})
|
||||
|
||||
(let [params (get tt/permissions-for-role role)]
|
||||
(let [params (get types.team/permissions-for-role role)]
|
||||
;; Only allow single owner on team
|
||||
(when (= role :owner)
|
||||
(db/update! conn :team-profile-rel
|
||||
@@ -760,7 +765,7 @@
|
||||
[:map {:title "update-team-member-role"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:member-id ::sm/uuid]
|
||||
[:role ::tt/role]])
|
||||
[:role types.team/schema:role]])
|
||||
|
||||
(sv/defmethod ::update-team-member-role
|
||||
{::doc/added "1.17"
|
||||
@@ -810,7 +815,7 @@
|
||||
(def ^:private schema:update-team-photo
|
||||
[:map {:title "update-team-photo"}
|
||||
[:team-id ::sm/uuid]
|
||||
[:file ::media/upload]])
|
||||
[:file media/schema:upload]])
|
||||
|
||||
(sv/defmethod ::update-team-photo
|
||||
{::doc/added "1.17"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user