mirror of
https://github.com/penpot/penpot.git
synced 2026-01-06 13:28:57 -05:00
Compare commits
560 Commits
1.10.3-bet
...
1.12.0-bet
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fae79d67e6 | ||
|
|
271f69d59d | ||
|
|
6563cd9c8b | ||
|
|
e60b8a7aef | ||
|
|
a644599b16 | ||
|
|
5d2715dd32 | ||
|
|
1bad233e2f | ||
|
|
f64b1d3651 | ||
|
|
eb57c2f980 | ||
|
|
ecd491cd09 | ||
|
|
dead3138b3 | ||
|
|
0416082d4d | ||
|
|
05c77d0248 | ||
|
|
2fc4c30bed | ||
|
|
d2590c7651 | ||
|
|
b4c87ad0b9 | ||
|
|
37a35b1827 | ||
|
|
24a0b4445e | ||
|
|
87c1bc4bdb | ||
|
|
e15f5bb432 | ||
|
|
496ba433e9 | ||
|
|
0b0ae756a3 | ||
|
|
0ade0405f5 | ||
|
|
aeed535f1b | ||
|
|
974084a9ca | ||
|
|
88706534c2 | ||
|
|
70def21153 | ||
|
|
46bfb2aacd | ||
|
|
7cf27ac86d | ||
|
|
d24f16563f | ||
|
|
bb68838fa4 | ||
|
|
aed6a8a5ff | ||
|
|
96facc5100 | ||
|
|
6486b24c8b | ||
|
|
75a8f85ebb | ||
|
|
3d8f757712 | ||
|
|
4efd8b7d3f | ||
|
|
5d17933593 | ||
|
|
206778021f | ||
|
|
4a262de550 | ||
|
|
350663b7ce | ||
|
|
f1db0fea03 | ||
|
|
256ed7410f | ||
|
|
09a4cb30ec | ||
|
|
aa3826c389 | ||
|
|
b91042c1e5 | ||
|
|
7eed8c5ee5 | ||
|
|
3207860374 | ||
|
|
b3bb8b6692 | ||
|
|
5b8b13c94c | ||
|
|
e8426006e3 | ||
|
|
116fafd0e1 | ||
|
|
e9fe1800e0 | ||
|
|
82796822d1 | ||
|
|
ce61b783fb | ||
|
|
9b78b2a432 | ||
|
|
321b2c7c23 | ||
|
|
dee397615c | ||
|
|
ef9339f6f1 | ||
|
|
f7f32408fc | ||
|
|
d4e6992442 | ||
|
|
420ece7005 | ||
|
|
741d2b3f3c | ||
|
|
c8bf319b39 | ||
|
|
34df52be5f | ||
|
|
fc2399a885 | ||
|
|
699ec93ca4 | ||
|
|
10598063d1 | ||
|
|
db1e9574cd | ||
|
|
af74a1575b | ||
|
|
03242e1a9c | ||
|
|
dcbd89ff7c | ||
|
|
2312561041 | ||
|
|
b591fbecf0 | ||
|
|
3fbb440436 | ||
|
|
d358185a04 | ||
|
|
8babb59f75 | ||
|
|
3461ec2281 | ||
|
|
3dd94bd362 | ||
|
|
827c2140b7 | ||
|
|
5a5222a97a | ||
|
|
bea3699451 | ||
|
|
93174f54a3 | ||
|
|
e1348725c1 | ||
|
|
528839cde2 | ||
|
|
c5c331ee30 | ||
|
|
69effa37a3 | ||
|
|
4c7a781228 | ||
|
|
62a67bdb94 | ||
|
|
c5c0b36f28 | ||
|
|
0d48c758df | ||
|
|
4856413b24 | ||
|
|
a1586280a9 | ||
|
|
00950b2c97 | ||
|
|
79666bd51a | ||
|
|
ca284a86a3 | ||
|
|
ee5b341d0e | ||
|
|
85cab5031d | ||
|
|
2f7029516b | ||
|
|
a1da4d4233 | ||
|
|
24724e3340 | ||
|
|
048ab9a0fc | ||
|
|
40b005f46e | ||
|
|
ae2a99acb0 | ||
|
|
a81b6db093 | ||
|
|
39b05f5f9f | ||
|
|
979f61df99 | ||
|
|
e665f4e285 | ||
|
|
2c25dfcf1b | ||
|
|
0632028579 | ||
|
|
95b9085258 | ||
|
|
cdc91feb28 | ||
|
|
4caf278da5 | ||
|
|
809a3420c1 | ||
|
|
af8e9058a3 | ||
|
|
2b1c8cafe9 | ||
|
|
1abcd5819b | ||
|
|
76b34bb600 | ||
|
|
67c6a042a0 | ||
|
|
72c2a213b4 | ||
|
|
ec1cc8ec64 | ||
|
|
fbbb079599 | ||
|
|
b8f2f3e34d | ||
|
|
39b29ee3f0 | ||
|
|
5f6cb1e0d7 | ||
|
|
46250e6fab | ||
|
|
fc2a26f249 | ||
|
|
341caa3489 | ||
|
|
38b7474f0b | ||
|
|
c91e2d13c0 | ||
|
|
7134bbf484 | ||
|
|
6b18b258a4 | ||
|
|
86e4826e48 | ||
|
|
6461ebe2b8 | ||
|
|
bfb23ad60b | ||
|
|
637d6a0076 | ||
|
|
cbb8d13570 | ||
|
|
2a6ba79e9a | ||
|
|
1e0dacfe9b | ||
|
|
b194c0c5d8 | ||
|
|
9789b7081a | ||
|
|
03052ddd28 | ||
|
|
779f685f72 | ||
|
|
1dee767762 | ||
|
|
5cac5eb26b | ||
|
|
b26cbeccca | ||
|
|
8d4612c683 | ||
|
|
e352c70013 | ||
|
|
8c3c9a8ca4 | ||
|
|
ada837f7e4 | ||
|
|
1599b2644a | ||
|
|
acc3d00fd5 | ||
|
|
0f459ede50 | ||
|
|
105cb6fa13 | ||
|
|
1797c702a7 | ||
|
|
5f580f10ca | ||
|
|
bd359f42f5 | ||
|
|
34bf73210e | ||
|
|
f1db4aae35 | ||
|
|
7710ffcbf1 | ||
|
|
e9f45a0d0a | ||
|
|
743c2c3385 | ||
|
|
6f714facf9 | ||
|
|
5f81c7bc2d | ||
|
|
72b00fa9af | ||
|
|
449756a0e4 | ||
|
|
75930a0ce9 | ||
|
|
a2c3b0926b | ||
|
|
57666e9173 | ||
|
|
37f4b83d96 | ||
|
|
5576b7568c | ||
|
|
99e067b863 | ||
|
|
5103624fe0 | ||
|
|
26e5d57ced | ||
|
|
b586f2552c | ||
|
|
0fbcec667c | ||
|
|
f40c58c64a | ||
|
|
d66619fe6d | ||
|
|
5c1b007c1b | ||
|
|
86c394f4ce | ||
|
|
90d130a3bc | ||
|
|
f185836fd4 | ||
|
|
4c851856ff | ||
|
|
bc2a0432b9 | ||
|
|
f72e140327 | ||
|
|
a633ed3c9a | ||
|
|
a8a6882708 | ||
|
|
1b76ed97e1 | ||
|
|
04f7169aef | ||
|
|
d83b362c9f | ||
|
|
b1d55348dc | ||
|
|
f8a46c56e9 | ||
|
|
420525cdf0 | ||
|
|
686cacd5ae | ||
|
|
0092806dda | ||
|
|
2f8c63505f | ||
|
|
d892be4971 | ||
|
|
59ed833abc | ||
|
|
110fb2e8db | ||
|
|
9f7a04e330 | ||
|
|
ccbc519c04 | ||
|
|
036860b91b | ||
|
|
7ac2a55315 | ||
|
|
f6cf8d2b1b | ||
|
|
16788d7ab7 | ||
|
|
3142d48f3c | ||
|
|
e1a88ae899 | ||
|
|
a2e80cee47 | ||
|
|
5f14769abc | ||
|
|
406c4063de | ||
|
|
b4bc30e56f | ||
|
|
3482d6c303 | ||
|
|
9dfd5c0bcc | ||
|
|
b2b3de2782 | ||
|
|
50c20e2290 | ||
|
|
a10dcbd918 | ||
|
|
6e0433a34b | ||
|
|
8833e19c7f | ||
|
|
663358bdae | ||
|
|
d9b1c0e2e6 | ||
|
|
39334b81ac | ||
|
|
62f7323acf | ||
|
|
3f89baa1fe | ||
|
|
f0fd1bb40c | ||
|
|
f303d7b33e | ||
|
|
d356a3fa56 | ||
|
|
64e7cad292 | ||
|
|
0766938f98 | ||
|
|
918829ad0a | ||
|
|
540e1fc492 | ||
|
|
ac30754a96 | ||
|
|
b470a0ebbf | ||
|
|
69daee4137 | ||
|
|
3d6c903273 | ||
|
|
bc04a0b9f0 | ||
|
|
bfef94dbfb | ||
|
|
9e06275945 | ||
|
|
6410bcf3c8 | ||
|
|
20baf02726 | ||
|
|
8f6fdf361b | ||
|
|
ffa134f824 | ||
|
|
b4bf6b9235 | ||
|
|
c3e37b0e04 | ||
|
|
374bba763b | ||
|
|
2d00e68b78 | ||
|
|
9a965dc693 | ||
|
|
b96ad5b37f | ||
|
|
07a0f67b32 | ||
|
|
c754a757eb | ||
|
|
dcd53183a8 | ||
|
|
5641132eb9 | ||
|
|
b4c23f3554 | ||
|
|
7385445aa8 | ||
|
|
5409f83167 | ||
|
|
43951aad69 | ||
|
|
9681d8c805 | ||
|
|
ff4d3cfeac | ||
|
|
8e4338c1c9 | ||
|
|
c27d709b6b | ||
|
|
8caa289586 | ||
|
|
f7568f6348 | ||
|
|
6a6f079a84 | ||
|
|
0f04b86316 | ||
|
|
1dae8a0771 | ||
|
|
9bc816fc1c | ||
|
|
11ea4c7aec | ||
|
|
0c53aa158b | ||
|
|
072e4a4f98 | ||
|
|
1b3b3b0ee6 | ||
|
|
d1e4f0de3e | ||
|
|
fd3f304e07 | ||
|
|
9e7551551f | ||
|
|
36bb5cbe01 | ||
|
|
f754c12e8c | ||
|
|
6f5916e334 | ||
|
|
13dd1cb6b6 | ||
|
|
eb4e7e0f0c | ||
|
|
7afb3e2c6d | ||
|
|
9cf5258053 | ||
|
|
56dfdaecb7 | ||
|
|
1d174a4379 | ||
|
|
2aeded1940 | ||
|
|
c23691284c | ||
|
|
f7f6515561 | ||
|
|
438c14d29d | ||
|
|
87351000ae | ||
|
|
0895a69bac | ||
|
|
4285972e41 | ||
|
|
d33542c4dc | ||
|
|
bda97adf4f | ||
|
|
b6f460940f | ||
|
|
aa0e8ed8d6 | ||
|
|
b99fa16b96 | ||
|
|
630d7a3220 | ||
|
|
03c91664cb | ||
|
|
13773d829a | ||
|
|
d9e6e9b017 | ||
|
|
5d8982c734 | ||
|
|
f13c82da2a | ||
|
|
363b0ba997 | ||
|
|
a4c45942c9 | ||
|
|
a86e3a8636 | ||
|
|
db61d579e6 | ||
|
|
e6e3f2cbd5 | ||
|
|
ffdd539233 | ||
|
|
ef17af38a1 | ||
|
|
6dedfaea2f | ||
|
|
cbb3783d84 | ||
|
|
327c095d79 | ||
|
|
88e222420c | ||
|
|
045eec072b | ||
|
|
5f3c381f88 | ||
|
|
6090cf6c68 | ||
|
|
9ac4239c11 | ||
|
|
da2a3b6883 | ||
|
|
b4accaad07 | ||
|
|
edaef0096a | ||
|
|
afba5ff083 | ||
|
|
8b8d614150 | ||
|
|
090dbfda10 | ||
|
|
04f5a6a9f9 | ||
|
|
d8311ac3fa | ||
|
|
9c7f4dfd98 | ||
|
|
8da66e1599 | ||
|
|
2927b0cfc6 | ||
|
|
4663c296cd | ||
|
|
9403f8fd6e | ||
|
|
badb5c6a9b | ||
|
|
e5430259e9 | ||
|
|
50fd44d3f2 | ||
|
|
a8249b73b6 | ||
|
|
a15f867059 | ||
|
|
4216e2e92b | ||
|
|
8ef20be9bd | ||
|
|
6413c9dddd | ||
|
|
eb10f075b9 | ||
|
|
cd55ed7c8d | ||
|
|
2fb96a1b7d | ||
|
|
c48da3d316 | ||
|
|
9488a9a1ad | ||
|
|
2feb22d3bd | ||
|
|
f74569506e | ||
|
|
6633d0b4fb | ||
|
|
6fb35b40d7 | ||
|
|
614d699098 | ||
|
|
8f4fbff40f | ||
|
|
aaf8d2a233 | ||
|
|
0eb2336bc6 | ||
|
|
f9cc9164b3 | ||
|
|
238ec60f89 | ||
|
|
363a82d068 | ||
|
|
4360c1fe4b | ||
|
|
1d575ece06 | ||
|
|
d246788a35 | ||
|
|
e9fa04dd1b | ||
|
|
8e57932966 | ||
|
|
51ea354bcb | ||
|
|
6334520c66 | ||
|
|
6354883a6f | ||
|
|
477f553675 | ||
|
|
1ded4b2b28 | ||
|
|
16c4116c15 | ||
|
|
f5cfbce1c2 | ||
|
|
7bbf98dfb1 | ||
|
|
533cac7881 | ||
|
|
6afc734e91 | ||
|
|
c4fb826d89 | ||
|
|
1321bdeac5 | ||
|
|
e0b7001a09 | ||
|
|
88120b83bd | ||
|
|
a952f7369c | ||
|
|
d4fab3b46c | ||
|
|
06b3499e7d | ||
|
|
fdd66bd513 | ||
|
|
3b5aaf21fa | ||
|
|
59c46833ed | ||
|
|
aee35cb456 | ||
|
|
4a55ee2965 | ||
|
|
4b490e3ca4 | ||
|
|
6727717d1a | ||
|
|
d08891cffa | ||
|
|
799a83ba73 | ||
|
|
261724e555 | ||
|
|
10e7d660ef | ||
|
|
bdfea7cda5 | ||
|
|
fdb1c5e1f9 | ||
|
|
71734df489 | ||
|
|
071b81eadd | ||
|
|
2abe3fde71 | ||
|
|
27e64ccaa8 | ||
|
|
c9185f265c | ||
|
|
79e5716f36 | ||
|
|
9f0e156916 | ||
|
|
d24d45f4cb | ||
|
|
bf55250ae9 | ||
|
|
36016ad9ef | ||
|
|
bf66b81702 | ||
|
|
758ffbf217 | ||
|
|
f24563503a | ||
|
|
a2dbc40571 | ||
|
|
a096b0777f | ||
|
|
87690a534c | ||
|
|
a70e416b0b | ||
|
|
cd1170c543 | ||
|
|
2962dc1faa | ||
|
|
535c1fd007 | ||
|
|
2bd94aff0e | ||
|
|
9ea90c3400 | ||
|
|
0ac5d85117 | ||
|
|
d3a83142ae | ||
|
|
d5886123d8 | ||
|
|
39c7bfb49f | ||
|
|
8479a6581d | ||
|
|
e5885e83eb | ||
|
|
914b41fcd4 | ||
|
|
224aa5b89a | ||
|
|
01c89f6554 | ||
|
|
f0e1bc1d59 | ||
|
|
7b487e1bc3 | ||
|
|
c394495a26 | ||
|
|
6dae420254 | ||
|
|
c69d7f50a3 | ||
|
|
ae9b95f81b | ||
|
|
493a7680e0 | ||
|
|
c28a2acfc7 | ||
|
|
60af960f42 | ||
|
|
4c86d5cfe3 | ||
|
|
99a6142134 | ||
|
|
b2211aec59 | ||
|
|
fa09fff2b5 | ||
|
|
0204cdab83 | ||
|
|
445195e9eb | ||
|
|
7f5b0f359c | ||
|
|
2d118ecc65 | ||
|
|
0ee34637f5 | ||
|
|
9554dfbc5e | ||
|
|
0cad1a1e7e | ||
|
|
888ffa1bcd | ||
|
|
219f9c478d | ||
|
|
a9904c6ada | ||
|
|
81cbc33dbb | ||
|
|
24062beebe | ||
|
|
f3548aff8c | ||
|
|
771bb20976 | ||
|
|
8072caeff1 | ||
|
|
d5568fcc25 | ||
|
|
0feccc9d1c | ||
|
|
e18ecb8c49 | ||
|
|
f5b87a9865 | ||
|
|
3b93434dd3 | ||
|
|
d522096caf | ||
|
|
6c67110dde | ||
|
|
963efc369b | ||
|
|
0df219c3ad | ||
|
|
a0d527f795 | ||
|
|
e44ea47497 | ||
|
|
9ee5a3159c | ||
|
|
06d41c552b | ||
|
|
7874971550 | ||
|
|
9925716134 | ||
|
|
1359a1aa7a | ||
|
|
6ae36982b6 | ||
|
|
136d269605 | ||
|
|
932c0ed4ad | ||
|
|
371875440f | ||
|
|
b01a9f2f95 | ||
|
|
0d2def102f | ||
|
|
beff3fe843 | ||
|
|
7a97c94f2b | ||
|
|
ce81908f02 | ||
|
|
76dafea8a6 | ||
|
|
86bbfde19e | ||
|
|
71d6f7b1a2 | ||
|
|
0c0ab612c0 | ||
|
|
73042115e0 | ||
|
|
0f7166d34a | ||
|
|
f35f2c95f0 | ||
|
|
4d280bdb6d | ||
|
|
47acab766d | ||
|
|
1cc3819e65 | ||
|
|
16fa6259ea | ||
|
|
95717c4c32 | ||
|
|
7564f27f95 | ||
|
|
565046aaa6 | ||
|
|
fb9b023fae | ||
|
|
b05908a760 | ||
|
|
3bbcd235e1 | ||
|
|
9d66984c62 | ||
|
|
9024408ed2 | ||
|
|
2b32e864fd | ||
|
|
626d0cba46 | ||
|
|
2a11e9962d | ||
|
|
7dffddd437 | ||
|
|
6a7600fd52 | ||
|
|
b897f202dd | ||
|
|
eb396f2367 | ||
|
|
95bf3e3af4 | ||
|
|
19944202fb | ||
|
|
2596ad27c3 | ||
|
|
ece914303a | ||
|
|
7a0c12e073 | ||
|
|
14b23b491f | ||
|
|
039b03249b | ||
|
|
3919cf4f86 | ||
|
|
319a9fd2de | ||
|
|
cf1f9f93aa | ||
|
|
0dd805da7f | ||
|
|
e7a1833c44 | ||
|
|
54f8487b46 | ||
|
|
b68d721b39 | ||
|
|
b9ccb4e52c | ||
|
|
c4a11f73a0 | ||
|
|
f96d4198c3 | ||
|
|
fe6a0ec5b8 | ||
|
|
e7b4010eba | ||
|
|
c4947d3737 | ||
|
|
8a8d677f85 | ||
|
|
baf4393310 | ||
|
|
723916d930 | ||
|
|
591d66564d | ||
|
|
79a2d522bf | ||
|
|
7e5b10eb3e | ||
|
|
896a07fa9a | ||
|
|
07e8bb00fb | ||
|
|
5d2742dd37 | ||
|
|
9ae3f1eb68 | ||
|
|
8c6e0cf43a | ||
|
|
1e220fd506 | ||
|
|
4ff7855fd4 | ||
|
|
eb57354109 | ||
|
|
a82a33cecf | ||
|
|
c90fc2a9bf | ||
|
|
c1a40e4aeb | ||
|
|
9999b8bfab | ||
|
|
cf62008acf | ||
|
|
1c959a6653 | ||
|
|
f566d2a0da | ||
|
|
0be2b2791f | ||
|
|
bf51e3db60 | ||
|
|
abca69f408 | ||
|
|
6eac9102c9 | ||
|
|
0a7da1b7f2 | ||
|
|
b4361cb202 | ||
|
|
d2d4090e27 | ||
|
|
583eb53c9d | ||
|
|
39246f2beb | ||
|
|
cd2d3d5fa3 | ||
|
|
589e646023 | ||
|
|
b7ba3098ae | ||
|
|
631c5ecae3 | ||
|
|
4962e45bd9 | ||
|
|
c57219a356 | ||
|
|
03e6a187c5 | ||
|
|
66b0039566 | ||
|
|
17da51440c | ||
|
|
40326177fd | ||
|
|
4ab0272fa6 | ||
|
|
fb33366c91 | ||
|
|
75352c9afe | ||
|
|
bfb30fe68d |
@@ -47,6 +47,13 @@ jobs:
|
||||
clj-kondo --version
|
||||
clj-kondo --parallel --lint src/
|
||||
|
||||
- run:
|
||||
name: frontend styles prettier
|
||||
working_directory: "./frontend"
|
||||
command: |
|
||||
yarn install
|
||||
yarn run lint-scss
|
||||
|
||||
- run:
|
||||
name: backend lint
|
||||
working_directory: "./backend"
|
||||
@@ -74,21 +81,27 @@ jobs:
|
||||
node target/tests.js
|
||||
|
||||
environment:
|
||||
JAVA_HOME: /usr/lib/jvm/openjdk16
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin:/usr/lib/jvm/openjdk16/bin
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
|
||||
# - run:
|
||||
# working_directory: "./common"
|
||||
# name: common tests (cljs)
|
||||
# command: |
|
||||
# yarn install
|
||||
# yarn run compile-test
|
||||
# node target/test.js
|
||||
#
|
||||
# environment:
|
||||
# PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
|
||||
- run:
|
||||
working_directory: "./common"
|
||||
name: common tests
|
||||
name: common tests (clj)
|
||||
command: |
|
||||
yarn install
|
||||
clojure -M:dev:shadow-cljs compile test
|
||||
node target/tests.js
|
||||
clojure -X:dev:test
|
||||
|
||||
environment:
|
||||
JAVA_HOME: /usr/lib/jvm/openjdk16
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin:/usr/lib/jvm/openjdk16/bin
|
||||
PATH: /usr/local/nodejs/bin/:/usr/local/bin:/bin:/usr/bin
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
{:analyze-call
|
||||
{app.common.data/export hooks.export/export
|
||||
potok.core/reify hooks.export/potok-reify
|
||||
cljs.core/specify! hooks.export/clojure-specify
|
||||
app.util.services/defmethod hooks.export/service-defmethod
|
||||
}}
|
||||
|
||||
|
||||
51
.github/ISSUE_TEMPLATE/bug_report.md
vendored
51
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -8,49 +8,48 @@ assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Actual behavior**
|
||||
|
||||
A clear and concise description of what happens instead; what the bug is.
|
||||
|
||||
**Screenshots**
|
||||
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
|
||||
- OS: (e.g. iOS)
|
||||
- Browser (e.g. chrome, safari)
|
||||
- Version (e.g. 22)
|
||||
- OS (e.g. iOS):
|
||||
- Browser & version (e.g. Chrome 89.0):
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
|
||||
- Device: (e.g. iPhone6)
|
||||
- OS: (e.g. iOS8.1)
|
||||
- Browser (e.g. stock browser, safari)
|
||||
- Version (e.g. 22)
|
||||
- Device & model (e.g. iPhone 6):
|
||||
- OS & version (e.g. iOS 8.1):
|
||||
- Browser & version (e.g. stock browser 22):
|
||||
|
||||
**Environment (please complete the following information):**
|
||||
Specify if using SAAS (https://design.penpot.app) or self-hosted instance.
|
||||
- Host (e.g. https://design.penpot.app, local instance):
|
||||
|
||||
If self-hosted instance, add OS and runtime information to help explain your problem.
|
||||
*If self-hosted:*
|
||||
- OS Version (e.g. Ubuntu 16.04):
|
||||
- Docker / Docker-compose version (e.g. Docker version 18.03.0-ce, build 0520e24):
|
||||
- Image version (e.g. Alpine):
|
||||
|
||||
- OS Version: (e.g. Ubuntu 16.04)
|
||||
Docker commands or docker-compose file (if possible and if proceed.x):
|
||||
```
|
||||
|
||||
Also provide Docker commands or docker-compose file if possible and if proceed.x
|
||||
|
||||
- Docker / Docker-compose Version: (e.g. Docker version 18.03.0-ce, build 0520e24)
|
||||
- Image (e.g. alpine)
|
||||
|
||||
**Frontend Stack Trace (if self-hosted)**
|
||||
```
|
||||
|
||||
Frontend Stack Trace:
|
||||
<details>
|
||||
|
||||
```
|
||||
@@ -59,8 +58,7 @@ Also provide Docker commands or docker-compose file if possible and if proceed.x
|
||||
|
||||
</details>
|
||||
|
||||
**Backend Stack Trace (if self-hosted)**
|
||||
|
||||
Backend Stack Trace:
|
||||
<details>
|
||||
|
||||
```
|
||||
@@ -69,5 +67,6 @@ Also provide Docker commands or docker-compose file if possible and if proceed.x
|
||||
|
||||
</details>
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
**Additional context:**
|
||||
|
||||
Any other context about the problem.
|
||||
|
||||
75
.gitignore
vendored
75
.gitignore
vendored
@@ -1,45 +1,54 @@
|
||||
figwheel_server.log
|
||||
*jar
|
||||
*-init.clj
|
||||
*.jar
|
||||
*.penpot
|
||||
*.orig
|
||||
.calva
|
||||
.clj-kondo
|
||||
.cpcache
|
||||
.lein-deps-sum
|
||||
.lein-failures
|
||||
.lein-repl-history
|
||||
.lein-plugins/
|
||||
.repl
|
||||
.lein-repl-history
|
||||
.lsp
|
||||
.nrepl-port
|
||||
.cpcache
|
||||
.nyc_output
|
||||
.rebel_readline_history
|
||||
/vendor/**/target
|
||||
/cd.md
|
||||
node_modules
|
||||
/backend/target/
|
||||
/backend/resources/public/media
|
||||
/backend/resources/public/assets
|
||||
.repl
|
||||
/.clj-kondo/.cache
|
||||
/_dump
|
||||
/backend/-
|
||||
/backend/assets/
|
||||
/backend/dist/
|
||||
/backend/logs/
|
||||
/backend/-
|
||||
/telemetry/
|
||||
/frontend/npm-debug.log
|
||||
/frontend/target/
|
||||
/frontend/dist/
|
||||
/frontend/out/
|
||||
/frontend/.shadow-cljs
|
||||
/frontend/resources/public/*
|
||||
/frontend/resources/fonts/experiments
|
||||
/exporter/target
|
||||
/exporter/.shadow-cljs
|
||||
/docker/images/bundle*
|
||||
/common/.shadow-cljs
|
||||
/common/target
|
||||
/.clj-kondo/.cache
|
||||
/backend/resources/public/assets
|
||||
/backend/resources/public/media
|
||||
/backend/target/
|
||||
/bundle*
|
||||
/media
|
||||
/cd.md
|
||||
/clj-profiler/
|
||||
/common/.shadow-cljs
|
||||
/common/coverage
|
||||
/common/target
|
||||
/deploy
|
||||
/web
|
||||
/_dump
|
||||
/docker/images/bundle*
|
||||
/exporter/.shadow-cljs
|
||||
/exporter/target
|
||||
/frontend/.shadow-cljs
|
||||
/frontend/package-lock.json
|
||||
/frontend/cypress/videos/*/
|
||||
/frontend/cypress/fixtures/validuser.json
|
||||
/frontend/dist/
|
||||
/frontend/npm-debug.log
|
||||
/frontend/out/
|
||||
/frontend/resources/fonts/experiments
|
||||
/frontend/resources/public/*
|
||||
/frontend/target/
|
||||
/frontend/cypress/videos/*/
|
||||
/media
|
||||
/telemetry/
|
||||
/vendor/**/target
|
||||
/vendor/svgclean/bundle*.js
|
||||
|
||||
.calva
|
||||
.clj-kondo
|
||||
.lsp
|
||||
/web
|
||||
clj-profiler/
|
||||
figwheel_server.log
|
||||
node_modules
|
||||
|
||||
407
CHANGES.md
407
CHANGES.md
@@ -1,15 +1,181 @@
|
||||
# CHANGELOG
|
||||
|
||||
## :rocket: Next
|
||||
## 1.12.0-beta
|
||||
|
||||
### :boom: Breaking changes
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Open feedback in a new window [Taiga #2901](https://tree.taiga.io/project/penpot/us/2901)
|
||||
- Improve usage of file menu [Taiga #2853](https://tree.taiga.io/project/penpot/us/2853)
|
||||
- Rotation to snap to 15º intervals with shift [Taiga #2437](https://tree.taiga.io/project/penpot/issue/2437)
|
||||
- Support border radius and stroke properties for images [Taiga #497](https://tree.taiga.io/project/penpot/us/497)
|
||||
- Disallow using same password as user email [Taiga #2454](https://tree.taiga.io/project/penpot/us/2454)
|
||||
- Add configurable nudge amount [Taiga #910](https://tree.taiga.io/project/penpot/us/910)
|
||||
- Add stroke properties for image shapes [Taiga #497](https://tree.taiga.io/project/penpot/us/497)
|
||||
- On user settings, hide the theme selector as long as we only have one theme [Taiga #2610](https://tree.taiga.io/project/penpot/us/2610)
|
||||
- Automatically open comments from dashboard notifications [Taiga #2605](https://tree.taiga.io/project/penpot/us/2605)
|
||||
- Enhance the behaviour of the artboards list on view mode [Taiga #2634](https://tree.taiga.io/project/penpot/us/2634)
|
||||
- Add recent used fonts in font selection widget [Taiga #1381](https://tree.taiga.io/project/penpot/us/1381)
|
||||
- Allow to align items relative to groups [Taiga #2533](https://tree.taiga.io/project/penpot/us/2533)
|
||||
- Scroll bars [Taiga #2550](https://tree.taiga.io/project/penpot/task/2550)
|
||||
- Add select layer option to context menu [Taiga #2474](https://tree.taiga.io/project/penpot/us/2474)
|
||||
- Guides [Taiga #290](https://tree.taiga.io/project/penpot/us/290)
|
||||
- Improve file menu by adding semantically groups [Github #1203](https://github.com/penpot/penpot/issues/1203)
|
||||
- Add update components in bulk option in context menu [Taiga #1975](https://tree.taiga.io/project/penpot/us/1975)
|
||||
- Create first E2E tests [Taiga #2608](https://tree.taiga.io/project/penpot/task/2608), [Taiga #2608](https://tree.taiga.io/project/penpot/task/2608)
|
||||
- Redesign of workspace toolbars [Taiga #2319](https://tree.taiga.io/project/penpot/us/2319)
|
||||
- Graphic Tablet usability improvements [Taiga #1913](https://tree.taiga.io/project/penpot/us/1913)
|
||||
- Improved mouse collision detection for groups and text shapes [Taiga #2452](https://tree.taiga.io/project/penpot/us/2452), [Taiga #2453](https://tree.taiga.io/project/penpot/us/2453)
|
||||
- Add support for alternative S3 storage providers and all aws regions [#1267](https://github.com/penpot/penpot/issues/1267)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
- Fixed ungroup typography when editing it [Taiga #2391](https://tree.taiga.io/project/penpot/issue/2391)
|
||||
- Fixed error when trying to post an empty comment [Taiga #2603](https://tree.taiga.io/project/penpot/issue/2603)
|
||||
- Fixed missing translation strings [Taiga #2786](https://tree.taiga.io/project/penpot/issue/2786)
|
||||
- Fixed color palette outside viewport [Taiga #2715](https://tree.taiga.io/project/penpot/issue/2715)
|
||||
- Fixed missing translate string [Taiga #2780](https://tree.taiga.io/project/penpot/issue/2780)
|
||||
- Fixed handoff shadow type text [Taiga #2717](https://tree.taiga.io/project/penpot/issue/2717)
|
||||
- Fixed components get "dirty" marker when moved [Taiga #2764](https://tree.taiga.io/project/penpot/issue/2764)
|
||||
- Fixed cannot align objects in a group that is not part of a frame [Taiga #2762](https://tree.taiga.io/project/penpot/issue/2762)
|
||||
- Fix problem with double click on exit path editing [Taiga #2906](https://tree.taiga.io/project/penpot/issue/2906)
|
||||
- Fixed alignment of layers with children [Taiga #2862](https://tree.taiga.io/project/penpot/issue/2862)
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- Cleanup unused static images (by @rhcarvalho) [#1561](https://github.com/penpot/penpot/pull/1561)
|
||||
- Compress static images to save space (by @rhcarvalho) [#1562](https://github.com/penpot/penpot/pull/1562)
|
||||
|
||||
# 1.10.3-beta
|
||||
## 1.11.2-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix issue on handling empty content on boolean shapes
|
||||
- Fix race condition issue on component renaming
|
||||
- Handle EOF errors on writting streamed response
|
||||
- Handle EOF errors on websocket send/ping methods
|
||||
- Disable parallel upload of file media on import (causes too much
|
||||
contention on the rlimit subsistem that does not works as expected
|
||||
on high load).
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add health check endpoint on API
|
||||
- Increase default max connection pool size to 60
|
||||
- Reduce resource usage of the error reporter.
|
||||
|
||||
## 1.11.1-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix issue related to default http host config value.
|
||||
- Fix issue on rendering frames on firefox.
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
- Update nodejs version to 16.13.1 on docker images.
|
||||
|
||||
## 1.11.0-beta
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add an option to hide artboards names on the viewport [Taiga #2034](https://tree.taiga.io/project/penpot/issue/2034)
|
||||
- Limit pasted object position to container boundaries [Taiga #2449](https://tree.taiga.io/project/penpot/us/2449)
|
||||
- Add new options for zoom widget in workspace and viewer mode [Taiga #896](https://tree.taiga.io/project/penpot/us/896)
|
||||
- Allow decimals on stroke width and positions [Taiga #2035](https://tree.taiga.io/project/penpot/issue/2035)
|
||||
- Ability to ignore background when exporting an artboard [Taiga #1395](https://tree.taiga.io/project/penpot/us/1395)
|
||||
- Show color hex or name on hover [Taiga #2413](https://tree.taiga.io/project/penpot/us/2413)
|
||||
- Add shortcut to create artboard from selected objects [Taiga #2412](https://tree.taiga.io/project/penpot/us/2412)
|
||||
- Add shortcut for opacity [Taiga #2442](https://tree.taiga.io/project/penpot/us/2442)
|
||||
- Setting fill automatically for new texts [Taiga #2441](https://tree.taiga.io/project/penpot/us/2441)
|
||||
- Add shortcut to move action [Github #1213](https://github.com/penpot/penpot/issues/1213)
|
||||
- Add alt as mod key to add stroke color from library menu [Taiga #2207](https://tree.taiga.io/project/penpot/us/2207)
|
||||
- Add detach in bulk option to context menu [Taiga #2210](https://tree.taiga.io/project/penpot/us/2210)
|
||||
- Add penpot look and feel to multiuser cursors [Taiga #1387](https://tree.taiga.io/project/penpot/us/1387)
|
||||
- Add actions to go to main component context menu option [Taiga #2053](https://tree.taiga.io/project/penpot/us/2053)
|
||||
- Add contrast between component select color and shape select color [Taiga #2121](https://tree.taiga.io/project/penpot/issue/2121)
|
||||
- Add animations in interactions [Taiga #2244](https://tree.taiga.io/project/penpot/us/2244)
|
||||
- Add performance improvements on .penpot file import process [Taiga #2497](https://tree.taiga.io/project/penpot/us/2497)
|
||||
- On team settings set color of members count to black [Taiga #2607](https://tree.taiga.io/project/penpot/us/2607)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix remove gradient if any when applying color from library [Taiga #2299](https://tree.taiga.io/project/penpot/issue/2299)
|
||||
- Fix Enter as key action to exit edit path [Taiga #2444](https://tree.taiga.io/project/penpot/issue/2444)
|
||||
- Fix add fill color from palette to groups and components [Taiga #2313](https://tree.taiga.io/project/penpot/issue/2313)
|
||||
- Fix default project name in all languages [Taiga #2280](https://tree.taiga.io/project/penpot/issue/2280)
|
||||
- Fix line-height and letter-spacing inputs to allow negative values [Taiga #2381](https://tree.taiga.io/project/penpot/issue/2381)
|
||||
- Fix typo in Handoff tooltip [Taiga #2428](https://tree.taiga.io/project/penpot/issue/2428)
|
||||
- Fix crash when pressing Shift+1 on empty file [#1435](https://github.com/penpot/penpot/issues/1435)
|
||||
- Fix masked group resize strange behavior [Taiga #2317](https://tree.taiga.io/project/penpot/issue/2317)
|
||||
- Fix problems when exporting all artboards [Taiga #2234](https://tree.taiga.io/project/penpot/issue/2234)
|
||||
- Fix problems with team management [#1353](https://github.com/penpot/penpot/issues/1353)
|
||||
- Fix problem when importing in shared libraries [#1362](https://github.com/penpot/penpot/issues/1362)
|
||||
- Fix problem with join nodes [#1422](https://github.com/penpot/penpot/issues/1422)
|
||||
- After team onboarding importing a file will import into the team drafts [Taiga #2408](https://tree.taiga.io/project/penpot/issue/2408)
|
||||
- Fix problem exporting shapes from handoff mode [Taiga #2386](https://tree.taiga.io/project/penpot/issue/2386)
|
||||
- Fix lock/hide elements in context menu when multiples shapes selected [Taiga #2340](https://tree.taiga.io/project/penpot/issue/2340)
|
||||
- Fix problem with booleans [Taiga #2356](https://tree.taiga.io/project/penpot/issue/2356)
|
||||
- Fix line-height/letter-spacing inputs behaviour [Taiga #2331](https://tree.taiga.io/project/penpot/issue/2331)
|
||||
- Fix dotted style in strokes [Taiga #2312](https://tree.taiga.io/project/penpot/issue/2312)
|
||||
- Fix problem when resizing texts inside groups [Taiga #2310](https://tree.taiga.io/project/penpot/issue/2310)
|
||||
- Fix problem with multiple exports [Taiga #2468](https://tree.taiga.io/project/penpot/issue/2468)
|
||||
- Allow import to continue from recoverable failures [#1412](https://github.com/penpot/penpot/issues/1412)
|
||||
- Improved behaviour on text options when not text is selected [Taiga #2390](https://tree.taiga.io/project/penpot/issue/2390)
|
||||
- Fix decimal numbers in export viewbox [Taiga #2290](https://tree.taiga.io/project/penpot/issue/2290)
|
||||
- Right click over artboard name to open its menu [Taiga #1679](https://tree.taiga.io/project/penpot/issue/1679)
|
||||
- Make the default session cookue use SameSite=Lax instead of Strict (causes some issues in latest versions of Chrome)
|
||||
- Fix "open in new tab" on dashboard [Taiga #2235](https://tree.taiga.io/project/penpot/issue/2355)
|
||||
- Changing pages while comments activated will not close the panel [#1350](https://github.com/penpot/penpot/issues/1350)
|
||||
- Fix navigate comments in right sidebar [Taiga #2163](https://tree.taiga.io/project/penpot/issue/2163)
|
||||
- Fix keep name of component equal to the shape name [Taiga #2341](https://tree.taiga.io/project/penpot/issue/2341)
|
||||
- Fix lossing changes when changing selection and an input was already changed [Taiga #2329](https://tree.taiga.io/project/penpot/issue/2329), [Taiga #2330](https://tree.taiga.io/project/penpot/issue/2330)
|
||||
- Fix blur input field when click on viewport [Taiga #2164](https://tree.taiga.io/project/penpot/issue/2164)
|
||||
- Fix default page id in workspace [Taiga #2205](https://tree.taiga.io/project/penpot/issue/2205)
|
||||
- Fix problem when importing a file with grids [Taiga #2314](https://tree.taiga.io/project/penpot/issue/2314)
|
||||
- Fix problem with imported svgs with filters [Taiga #2478](https://tree.taiga.io/project/penpot/issue/2478)
|
||||
- Fix issues when updating selrect in paths [Taiga #2366](https://tree.taiga.io/project/penpot/issue/2366)
|
||||
- Fix scroll jumps in handoff mode [Taiga #2383](https://tree.taiga.io/project/penpot/issue/2383)
|
||||
- Fix handoff text with opacity [Taiga #2384](https://tree.taiga.io/project/penpot/issue/2384)
|
||||
- Restored rules color [Taiga #2460](https://tree.taiga.io/project/penpot/issue/2460)
|
||||
- Fix thumbnail not taking frame blending mode [Taiga #2301](https://tree.taiga.io/project/penpot/issue/2301)
|
||||
- Fix import/export with SVG edge cases [Taiga #2389](https://tree.taiga.io/project/penpot/issue/2389)
|
||||
- Avoid modifying component when moving into a group [Taiga #2534](https://tree.taiga.io/project/penpot/issue/2534)
|
||||
- Show correctly group types label in handoff [Taiga #2482](https://tree.taiga.io/project/penpot/issue/2482)
|
||||
- Display view mode buttons always centered in viewer [#Taiga 2466](https://tree.taiga.io/project/penpot/issue/2466)
|
||||
- Fix default profile image generation issue [Taiga #2601](https://tree.taiga.io/project/penpot/issue/2601)
|
||||
- Fix edit blur attributes for multiselection [Taiga #2625](https://tree.taiga.io/project/penpot/issue/2625)
|
||||
- Fix auto hide header in viewer full screen [Taiga #2632](https://tree.taiga.io/project/penpot/issue/2632)
|
||||
- Fix zoom in/out after fit or fill [Taiga #2630](https://tree.taiga.io/project/penpot/issue/2630)
|
||||
- Normalize zoom levels in workspace and viewer [Taiga #2631](https://tree.taiga.io/project/penpot/issue/2631)
|
||||
- Avoid empty names in projects, files and pages [Taiga #2594](https://tree.taiga.io/project/penpot/issue/2594)
|
||||
- Fix "move to" menu when duplicated team or project names [Taiga #2655](https://tree.taiga.io/project/penpot/issue/2655)
|
||||
- Fix ungroup a component leaves an asterisk in layers [Taiga #2694](https://tree.taiga.io/project/penpot/issue/2694)
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
- Update devenv docker image dependencies
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- Spelling fixes (by @jsoref) [#1340](https://github.com/penpot/penpot/pull/1340)
|
||||
- Explain folders in components (by @candideu) [Penpot-docs #42](https://github.com/penpot/penpot-docs/pull/42)
|
||||
- Readability improvements of user guide (by @PaulSchulz) [Penpot-docs #50](https://github.com/penpot/penpot-docs/pull/50)
|
||||
|
||||
## 1.10.4-beta
|
||||
|
||||
### :sparkles: Enhacements
|
||||
|
||||
- Allow parametrice file snapshoting interval
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix issue on :mov-object change impl
|
||||
- Minor fix on how file changes log is persisted
|
||||
- Fix many issues on error reporting
|
||||
|
||||
## 1.10.3-beta
|
||||
|
||||
### :sparkles: Enhacements
|
||||
|
||||
@@ -20,7 +186,7 @@
|
||||
|
||||
- Fix unexpected exception on saving pages with default grids [#2409](https://tree.taiga.io/project/penpot/issue/2409)
|
||||
- Fix react warnings on setting size 1 on row and column grids.
|
||||
- Fix minor issues on ZMQ logging listener (used in error reporting service).
|
||||
- Fix minor issues on ZMQ logging listener (used in error reporting service)
|
||||
- Remove "ALPHA" from the code.
|
||||
- Fix value and nil handling on numeric-input component. This fixes many issues related to typography, components, etc. renaming.
|
||||
- Fix NPE on email complains processing.
|
||||
@@ -31,8 +197,7 @@
|
||||
|
||||
- Update log4j2 dependency.
|
||||
|
||||
|
||||
# 1.10.2-beta
|
||||
## 1.10.2-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
@@ -44,14 +209,12 @@
|
||||
|
||||
- Update log4j2 dependency.
|
||||
|
||||
|
||||
# 1.10.1-beta
|
||||
## 1.10.1-beta
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problems with team management [#1353](https://github.com/penpot/penpot/issues/1353)
|
||||
|
||||
|
||||
## 1.10.0-beta
|
||||
|
||||
### :boom: Breaking changes
|
||||
@@ -63,18 +226,19 @@
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Enhance corner radius behavior [Taiga #2190](https://tree.taiga.io/project/penpot/issue/2190).
|
||||
- Allow preserve scroll position in interactions [Taiga #2250](https://tree.taiga.io/project/penpot/us/2250).
|
||||
- Allow ungroup groups in bulk [Taiga #2211](https://tree.taiga.io/project/penpot/us/2211)
|
||||
- Enhance corner radius behavior [Taiga #2190](https://tree.taiga.io/project/penpot/issue/2190)
|
||||
- Allow preserve scroll position in interactions [Taiga #2250](https://tree.taiga.io/project/penpot/us/2250)
|
||||
- Add new onboarding modals.
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem with exporting before the document is saved [Taiga #2189](https://tree.taiga.io/project/penpot/issue/2189).
|
||||
- Fix undo stacking when changing color from color-picker [Taiga #2191](https://tree.taiga.io/project/penpot/issue/2191).
|
||||
- Fix pages dropdown in viewer [Taiga #2087](https://tree.taiga.io/project/penpot/issue/2087).
|
||||
- Fix problem when exporting texts with gradients or opacity [Taiga #2200](https://tree.taiga.io/project/penpot/issue/2200).
|
||||
- Fix problem with view mode comments [Taiga #2226](https://tree.taiga.io/project/penpot/issue/2226).
|
||||
- Disallow to create a component when already has one [Taiga #2237](https://tree.taiga.io/project/penpot/issue/2237).
|
||||
- Fix problem with exporting before the document is saved [Taiga #2189](https://tree.taiga.io/project/penpot/issue/2189)
|
||||
- Fix undo stacking when changing color from color-picker [Taiga #2191](https://tree.taiga.io/project/penpot/issue/2191)
|
||||
- Fix pages dropdown in viewer [Taiga #2087](https://tree.taiga.io/project/penpot/issue/2087)
|
||||
- Fix problem when exporting texts with gradients or opacity [Taiga #2200](https://tree.taiga.io/project/penpot/issue/2200)
|
||||
- Fix problem with view mode comments [Taiga #2226](https://tree.taiga.io/project/penpot/issue/2226)
|
||||
- Disallow to create a component when already has one [Taiga #2237](https://tree.taiga.io/project/penpot/issue/2237)
|
||||
- Add ellipsis in long labels for input fields [Taiga #2224](https://tree.taiga.io/project/penpot/issue/2224)
|
||||
- Fix problem with text rendering on export [Taiga #2223](https://tree.taiga.io/project/penpot/issue/2223)
|
||||
- Fix problem when flattening booleans losing styles [Taiga #2217](https://tree.taiga.io/project/penpot/issue/2217)
|
||||
@@ -87,61 +251,66 @@
|
||||
- Add placeholder to create shareable link
|
||||
- Fix project files count not refreshing correctly after import [Taiga #2216](https://tree.taiga.io/project/penpot/issue/2216)
|
||||
- Remove button after import process finish [Taiga #2215](https://tree.taiga.io/project/penpot/issue/2215)
|
||||
- Fix problem with styles in the viewer [Taiga #2467](https://tree.taiga.io/project/penpot/issue/2467)
|
||||
- Fix default state in viewer [Taiga #2465](https://tree.taiga.io/project/penpot/issue/2465)
|
||||
- Fix division by zero in bool operation [Taiga #2349](https://tree.taiga.io/project/penpot/issue/2349)
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- To the translation community for the hard work on making penpot
|
||||
available on so many languages.
|
||||
- Guide to integrate with Azure Directory (by @skrzyneckik) [Penpot-docs #33](https://github.com/penpot/penpot-docs/pull/33)
|
||||
- Improve libraries section readability (by @PaulSchulz) [Penpot-docs #39](https://github.com/penpot/penpot-docs/pull/39)
|
||||
|
||||
## 1.9.0-alpha
|
||||
|
||||
### :boom: Breaking changes
|
||||
|
||||
- Some stroke-caps can change behaviour.
|
||||
- Text display bug fix could potentialy make some texts jump a line.
|
||||
- Text display bug fix could potentially make some texts jump a line.
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add boolean shapes: intersections, unions, difference and exclusions[Taiga #748](https://tree.taiga.io/project/penpot/us/748).
|
||||
- Add advanced prototyping [Taiga #244](https://tree.taiga.io/project/penpot/us/244).
|
||||
- Add multiple flows [Taiga #2091](https://tree.taiga.io/project/penpot/us/2091).
|
||||
- Add boolean shapes: intersections, unions, difference and exclusions[Taiga #748](https://tree.taiga.io/project/penpot/us/748)
|
||||
- Add advanced prototyping [Taiga #244](https://tree.taiga.io/project/penpot/us/244)
|
||||
- Add multiple flows [Taiga #2091](https://tree.taiga.io/project/penpot/us/2091)
|
||||
- Change order of the teams menu so it's in the joined time order.
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Enhance duplicating prototype connections behaviour [Taiga #2093](https://tree.taiga.io/project/penpot/us/2093).
|
||||
- Ignore constraints in horizontal or vertical flip [Taiga #2038](https://tree.taiga.io/project/penpot/issue/2038).
|
||||
- Fix color and typographies refs lost when duplicated file [Taiga #2165](https://tree.taiga.io/project/penpot/issue/2165).
|
||||
- Fix problem with overflow dropdown on stroke-cap [#1216](https://github.com/penpot/penpot/issues/1216).
|
||||
- Fix menu context for single element nested in components [#1186](https://github.com/penpot/penpot/issues/1186).
|
||||
- Fix error screen when operations over comments fail [#1219](https://github.com/penpot/penpot/issues/1219).
|
||||
- Fix undo problem when changing typography/color from library [#1230](https://github.com/penpot/penpot/issues/1230).
|
||||
- Fix problem with text margin while rendering [#1231](https://github.com/penpot/penpot/issues/1231).
|
||||
- Fix problem with masked texts on exporting [Taiga #2116](https://tree.taiga.io/project/penpot/issue/2116).
|
||||
- Fix text editor enter behaviour with centered texts [Taiga #2126](https://tree.taiga.io/project/penpot/issue/2126).
|
||||
- Fix residual stroke on imported svg [Taiga #2125](https://tree.taiga.io/project/penpot/issue/2125).
|
||||
- Add links for terms of service and privacy policy in register checkbox [Taiga #2020](https://tree.taiga.io/project/penpot/issue/2020).
|
||||
- Allow three character hex and web colors in color picker hex input [#1184](https://github.com/penpot/penpot/issues/1184).
|
||||
- Allow lowercase search for fonts [#1180](https://github.com/penpot/penpot/issues/1180).
|
||||
- Fix group renaming problem [Taiga #1969](https://tree.taiga.io/project/penpot/issue/1969).
|
||||
- Fix export group with shadows on children [Taiga #2036](https://tree.taiga.io/project/penpot/issue/2036).
|
||||
- Fix zoom context menu in viewer [Taiga #2041](https://tree.taiga.io/project/penpot/issue/2041).
|
||||
- Fix stroke caps adjustments in relation with stroke size [Taiga #2123](https://tree.taiga.io/project/penpot/issue/2123).
|
||||
- Fix problem duplicating paths [Taiga #2147](https://tree.taiga.io/project/penpot/issue/2147).
|
||||
- Fix problem inheriting attributes from SVG root when importing [Taiga #2124](https://tree.taiga.io/project/penpot/issue/2124).
|
||||
- Fix problem with lines and inside/outside stroke [Taiga #2146](https://tree.taiga.io/project/penpot/issue/2146).
|
||||
- Add stroke width in selection calculation [Taiga #2146](https://tree.taiga.io/project/penpot/issue/2146).
|
||||
- Fix shift+wheel to horizontal scrolling in MacOS [#1217](https://github.com/penpot/penpot/issues/1217).
|
||||
- Fix path stroke is not working properly with high thickness [Taiga #2154](https://tree.taiga.io/project/penpot/issue/2154).
|
||||
- Fix bug with transformation operations [Taiga #2155](https://tree.taiga.io/project/penpot/issue/2155).
|
||||
- Fix bug in firefox when a text box is inside a mask [Taiga #2152](https://tree.taiga.io/project/penpot/issue/2152).
|
||||
- Enhance duplicating prototype connections behaviour [Taiga #2093](https://tree.taiga.io/project/penpot/us/2093)
|
||||
- Ignore constraints in horizontal or vertical flip [Taiga #2038](https://tree.taiga.io/project/penpot/issue/2038)
|
||||
- Fix color and typographies refs lost when duplicated file [Taiga #2165](https://tree.taiga.io/project/penpot/issue/2165)
|
||||
- Fix problem with overflow dropdown on stroke-cap [#1216](https://github.com/penpot/penpot/issues/1216)
|
||||
- Fix menu context for single element nested in components [#1186](https://github.com/penpot/penpot/issues/1186)
|
||||
- Fix error screen when operations over comments fail [#1219](https://github.com/penpot/penpot/issues/1219)
|
||||
- Fix undo problem when changing typography/color from library [#1230](https://github.com/penpot/penpot/issues/1230)
|
||||
- Fix problem with text margin while rendering [#1231](https://github.com/penpot/penpot/issues/1231)
|
||||
- Fix problem with masked texts on exporting [Taiga #2116](https://tree.taiga.io/project/penpot/issue/2116)
|
||||
- Fix text editor enter behaviour with centered texts [Taiga #2126](https://tree.taiga.io/project/penpot/issue/2126)
|
||||
- Fix residual stroke on imported svg [Taiga #2125](https://tree.taiga.io/project/penpot/issue/2125)
|
||||
- Add links for terms of service and privacy policy in register checkbox [Taiga #2020](https://tree.taiga.io/project/penpot/issue/2020)
|
||||
- Allow three character hex and web colors in color picker hex input [#1184](https://github.com/penpot/penpot/issues/1184)
|
||||
- Allow lowercase search for fonts [#1180](https://github.com/penpot/penpot/issues/1180)
|
||||
- Fix group renaming problem [Taiga #1969](https://tree.taiga.io/project/penpot/issue/1969)
|
||||
- Fix export group with shadows on children [Taiga #2036](https://tree.taiga.io/project/penpot/issue/2036)
|
||||
- Fix zoom context menu in viewer [Taiga #2041](https://tree.taiga.io/project/penpot/issue/2041)
|
||||
- Fix stroke caps adjustments in relation with stroke size [Taiga #2123](https://tree.taiga.io/project/penpot/issue/2123)
|
||||
- Fix problem duplicating paths [Taiga #2147](https://tree.taiga.io/project/penpot/issue/2147)
|
||||
- Fix problem inheriting attributes from SVG root when importing [Taiga #2124](https://tree.taiga.io/project/penpot/issue/2124)
|
||||
- Fix problem with lines and inside/outside stroke [Taiga #2146](https://tree.taiga.io/project/penpot/issue/2146)
|
||||
- Add stroke width in selection calculation [Taiga #2146](https://tree.taiga.io/project/penpot/issue/2146)
|
||||
- Fix shift+wheel to horizontal scrolling in MacOS [#1217](https://github.com/penpot/penpot/issues/1217)
|
||||
- Fix path stroke is not working properly with high thickness [Taiga #2154](https://tree.taiga.io/project/penpot/issue/2154)
|
||||
- Fix bug with transformation operations [Taiga #2155](https://tree.taiga.io/project/penpot/issue/2155)
|
||||
- Fix bug in firefox when a text box is inside a mask [Taiga #2152](https://tree.taiga.io/project/penpot/issue/2152)
|
||||
- Fix problem with stroke inside/outside [Taiga #2186](https://tree.taiga.io/project/penpot/issue/2186)
|
||||
- Fix masks export area [Taiga #2189](https://tree.taiga.io/project/penpot/issue/2189)
|
||||
- Fix paste in place in arboards [Taiga #2188](https://tree.taiga.io/project/penpot/issue/2188)
|
||||
- Fix paste in place in artboards [Taiga #2188](https://tree.taiga.io/project/penpot/issue/2188)
|
||||
- Fix font size input stuck on selection change [Taiga #2184](https://tree.taiga.io/project/penpot/issue/2184)
|
||||
- Fix stroke cut on shapes export [Taiga #2171](https://tree.taiga.io/project/penpot/issue/2171)
|
||||
- Fix no color when boolean with an SVG [Taiga #2193](https://tree.taiga.io/project/penpot/issue/2193)
|
||||
- Fix unlink color styles at strokes [Taiga #2206](https://tree.taiga.io/project/penpot/issue/2206).
|
||||
- Fix unlink color styles at strokes [Taiga #2206](https://tree.taiga.io/project/penpot/issue/2206)
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
@@ -150,13 +319,11 @@
|
||||
- To the translation community for the hard work on making penpot
|
||||
available on so many languages.
|
||||
|
||||
|
||||
|
||||
## 1.8.4-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem importing components [Taiga #2151](https://tree.taiga.io/project/penpot/issue/2151).
|
||||
- Fix problem importing components [Taiga #2151](https://tree.taiga.io/project/penpot/issue/2151)
|
||||
|
||||
## 1.8.3-alpha
|
||||
|
||||
@@ -168,18 +335,17 @@
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix problem with masking images in viewer [#1238](https://github.com/penpot/penpot/issues/1238).
|
||||
- Fix problem with masking images in viewer [#1238](https://github.com/penpot/penpot/issues/1238)
|
||||
|
||||
## 1.8.1-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix project renaming issue (and some other related to the same underlying bug).
|
||||
- Fix project renaming issue (and some other related to the same underlying bug)
|
||||
- Fix internal exception on audit log persistence layer.
|
||||
- Set proper environment variable on docker images for chrome executable.
|
||||
- Fix internal metrics on websocket connections.
|
||||
|
||||
|
||||
## 1.8.0-alpha
|
||||
|
||||
### :boom: Breaking changes
|
||||
@@ -190,25 +356,25 @@
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add tooltips to color picker tabs [Taiga #1814](https://tree.taiga.io/project/penpot/us/1814).
|
||||
- Add styling to the end point of any open paths [Taiga #1107](https://tree.taiga.io/project/penpot/us/1107).
|
||||
- Allow to zoom with ctrl + middle button [Taiga #1428](https://tree.taiga.io/project/penpot/us/1428).
|
||||
- Auto placement of duplicated objects [Taiga #1386](https://tree.taiga.io/project/penpot/us/1386).
|
||||
- Enable penpot SVG metadata only when exporting complete files [Taiga #1914](https://tree.taiga.io/project/penpot/us/1914?milestone=295883).
|
||||
- Export to PDF all artboards of one page [Taiga #1895](https://tree.taiga.io/project/penpot/us/1895).
|
||||
- Go to a undo step clicking on a history element of the list [Taiga #1374](https://tree.taiga.io/project/penpot/us/1374).
|
||||
- Increment font size by 10 with shift+arrows [1047](https://github.com/penpot/penpot/issues/1047).
|
||||
- New shortcut to detach components Ctrl+Shift+K [Taiga #1799](https://tree.taiga.io/project/penpot/us/1799).
|
||||
- Set email inputs to type "email", to aid keyboard entry [Taiga #1921](https://tree.taiga.io/project/penpot/issue/1921).
|
||||
- Use shift+move to move element orthogonally [#823](https://github.com/penpot/penpot/issues/823).
|
||||
- Use space + mouse drag to pan, instead of only space [Taiga #1800](https://tree.taiga.io/project/penpot/us/1800).
|
||||
- Allow navigate through pages on the viewer [Taiga #1550](https://tree.taiga.io/project/penpot/us/1550).
|
||||
- Allow create share links with specific pages [Taiga #1844](https://tree.taiga.io/project/penpot/us/1844).
|
||||
- Add tooltips to color picker tabs [Taiga #1814](https://tree.taiga.io/project/penpot/us/1814)
|
||||
- Add styling to the end point of any open paths [Taiga #1107](https://tree.taiga.io/project/penpot/us/1107)
|
||||
- Allow to zoom with ctrl + middle button [Taiga #1428](https://tree.taiga.io/project/penpot/us/1428)
|
||||
- Auto placement of duplicated objects [Taiga #1386](https://tree.taiga.io/project/penpot/us/1386)
|
||||
- Enable penpot SVG metadata only when exporting complete files [Taiga #1914](https://tree.taiga.io/project/penpot/us/1914?milestone=295883)
|
||||
- Export to PDF all artboards of one page [Taiga #1895](https://tree.taiga.io/project/penpot/us/1895)
|
||||
- Go to a undo step clicking on a history element of the list [Taiga #1374](https://tree.taiga.io/project/penpot/us/1374)
|
||||
- Increment font size by 10 with shift+arrows [1047](https://github.com/penpot/penpot/issues/1047)
|
||||
- New shortcut to detach components Ctrl+Shift+K [Taiga #1799](https://tree.taiga.io/project/penpot/us/1799)
|
||||
- Set email inputs to type "email", to aid keyboard entry [Taiga #1921](https://tree.taiga.io/project/penpot/issue/1921)
|
||||
- Use shift+move to move element orthogonally [#823](https://github.com/penpot/penpot/issues/823)
|
||||
- Use space + mouse drag to pan, instead of only space [Taiga #1800](https://tree.taiga.io/project/penpot/us/1800)
|
||||
- Allow navigate through pages on the viewer [Taiga #1550](https://tree.taiga.io/project/penpot/us/1550)
|
||||
- Allow create share links with specific pages [Taiga #1844](https://tree.taiga.io/project/penpot/us/1844)
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Prevent adding numeric suffix to layer names when not needed [Taiga #1929](https://tree.taiga.io/project/penpot/us/1929).
|
||||
- Prevent deleting or moving the drafts project [Taiga #1935](https://tree.taiga.io/project/penpot/issue/1935).
|
||||
- Prevent adding numeric suffix to layer names when not needed [Taiga #1929](https://tree.taiga.io/project/penpot/us/1929)
|
||||
- Prevent deleting or moving the drafts project [Taiga #1935](https://tree.taiga.io/project/penpot/issue/1935)
|
||||
- Fix problem with zoom and selection [Taiga #1919](https://tree.taiga.io/project/penpot/issue/1919)
|
||||
- Fix problem with borders on shape export [#1092](https://github.com/penpot/penpot/issues/1092)
|
||||
- Fix thumbnail cropping issue [Taiga #1964](https://tree.taiga.io/project/penpot/issue/1964)
|
||||
@@ -221,11 +387,12 @@
|
||||
- Fix problem while moving imported SVG's [#1199](https://github.com/penpot/penpot/issues/1199)
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
### :boom: Breaking changes
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- eduayme [#1129](https://github.com/penpot/penpot/pull/1129).
|
||||
|
||||
- eduayme [#1129](https://github.com/penpot/penpot/pull/1129)
|
||||
|
||||
## 1.7.4-alpha
|
||||
|
||||
@@ -234,14 +401,12 @@
|
||||
- Fix demo user creation (self-hosted only)
|
||||
- Add better ldap response validation and reporting (self-hosted only)
|
||||
|
||||
|
||||
## 1.7.3-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix font uploading issue on Windows.
|
||||
|
||||
|
||||
## 1.7.2-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
@@ -250,8 +415,8 @@
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Add scroll bar to Teams menu [Taiga #1894](https://tree.taiga.io/project/penpot/issue/1894).
|
||||
- Fix repeated names when duplicating artboards or groups [Taiga #1892](https://tree.taiga.io/project/penpot/issue/1892).
|
||||
- Add scroll bar to Teams menu [Taiga #1894](https://tree.taiga.io/project/penpot/issue/1894)
|
||||
- Fix repeated names when duplicating artboards or groups [Taiga #1892](https://tree.taiga.io/project/penpot/issue/1892)
|
||||
- Fix properly messages lifecycle on navigate.
|
||||
- Fix handling repeated names on duplicate object trees.
|
||||
- Fix group naming on group creation.
|
||||
@@ -265,7 +430,6 @@
|
||||
|
||||
- soultipsy [#1100](https://github.com/penpot/penpot/pull/1100)
|
||||
|
||||
|
||||
## 1.7.1-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -275,19 +439,18 @@
|
||||
- Fix issue on undo page deletion.
|
||||
- Fix some issues related to constraints.
|
||||
|
||||
|
||||
## 1.7.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Allow nested asset groups [Taiga #1716](https://tree.taiga.io/project/penpot/us/1716).
|
||||
- Allow to ungroup assets [Taiga #1719](https://tree.taiga.io/project/penpot/us/1719).
|
||||
- Allow to rename assets groups [Taiga #1721](https://tree.taiga.io/project/penpot/us/1721).
|
||||
- Component constraints (left, right, left and right, center, scale...) [Taiga #1125](https://tree.taiga.io/project/penpot/us/1125).
|
||||
- Export elements to PDF [Taiga #519](https://tree.taiga.io/project/penpot/us/519).
|
||||
- Memorize collapse state of assets in panel [Taiga #1718](https://tree.taiga.io/project/penpot/us/1718).
|
||||
- Headers button sets and menus review [Taiga #1663](https://tree.taiga.io/project/penpot/us/1663).
|
||||
- Preserve components if possible, when pasted into a different file [Taiga #1063](https://tree.taiga.io/project/penpot/issue/1063).
|
||||
- Allow nested asset groups [Taiga #1716](https://tree.taiga.io/project/penpot/us/1716)
|
||||
- Allow to ungroup assets [Taiga #1719](https://tree.taiga.io/project/penpot/us/1719)
|
||||
- Allow to rename assets groups [Taiga #1721](https://tree.taiga.io/project/penpot/us/1721)
|
||||
- Component constraints (left, right, left and right, center, scale...) [Taiga #1125](https://tree.taiga.io/project/penpot/us/1125)
|
||||
- Export elements to PDF [Taiga #519](https://tree.taiga.io/project/penpot/us/519)
|
||||
- Memorize collapse state of assets in panel [Taiga #1718](https://tree.taiga.io/project/penpot/us/1718)
|
||||
- Headers button sets and menus review [Taiga #1663](https://tree.taiga.io/project/penpot/us/1663)
|
||||
- Preserve components if possible, when pasted into a different file [Taiga #1063](https://tree.taiga.io/project/penpot/issue/1063)
|
||||
- Add the ability to offload file data to a cheaper storage when file becomes inactive.
|
||||
- Import/Export Penpot files from dashboard.
|
||||
- Double click won't make a shape a path until you change a node [Taiga #1796](https://tree.taiga.io/project/penpot/us/1796)
|
||||
@@ -296,21 +459,20 @@
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Process numeric input changes only if the value actually changed.
|
||||
- Remove unnecesary redirect from history when user goes to workspace from dashboard [Taiga #1820](https://tree.taiga.io/project/penpot/issue/1820).
|
||||
- Detach shapes from deleted assets [Taiga #1850](https://tree.taiga.io/project/penpot/issue/1850).
|
||||
- Fix tooltip position on view application [Taiga #1819](https://tree.taiga.io/project/penpot/issue/1819).
|
||||
- Fix dashboard navigation on moving file to other team [Taiga #1817](https://tree.taiga.io/project/penpot/issue/1817).
|
||||
- Fix workspace header presence styles and invalid link [Taiga #1813](https://tree.taiga.io/project/penpot/issue/1813).
|
||||
- Fix color-input wrong behavior (on workspace page color) [Taiga #1795](https://tree.taiga.io/project/penpot/issue/1795).
|
||||
- Fix file contextual menu in shared libraries at dashboard [Taiga #1865](https://tree.taiga.io/project/penpot/issue/1865).
|
||||
- Remove unnecessary redirect from history when user goes to workspace from dashboard [Taiga #1820](https://tree.taiga.io/project/penpot/issue/1820)
|
||||
- Detach shapes from deleted assets [Taiga #1850](https://tree.taiga.io/project/penpot/issue/1850)
|
||||
- Fix tooltip position on view application [Taiga #1819](https://tree.taiga.io/project/penpot/issue/1819)
|
||||
- Fix dashboard navigation on moving file to other team [Taiga #1817](https://tree.taiga.io/project/penpot/issue/1817)
|
||||
- Fix workspace header presence styles and invalid link [Taiga #1813](https://tree.taiga.io/project/penpot/issue/1813)
|
||||
- Fix color-input wrong behavior (on workspace page color) [Taiga #1795](https://tree.taiga.io/project/penpot/issue/1795)
|
||||
- Fix file contextual menu in shared libraries at dashboard [Taiga #1865](https://tree.taiga.io/project/penpot/issue/1865)
|
||||
- Fix problem with color picker and fonts [#1049](https://github.com/penpot/penpot/issues/1049)
|
||||
- Fix negative values in blur [Taiga #1815](https://tree.taiga.io/project/penpot/issue/1815)
|
||||
- Fix problem when editing color in group [Taiga #1816](https://tree.taiga.io/project/penpot/issue/1816)
|
||||
- Fix resize/rotate with mouse buttons different than left [#1060](https://github.com/penpot/penpot/issues/1060)
|
||||
- Fix header partialy visible on fullscreen viewer mode [Taiga #1875](https://tree.taiga.io/project/penpot/issue/1875)
|
||||
- Fix header partially visible on fullscreen viewer mode [Taiga #1875](https://tree.taiga.io/project/penpot/issue/1875)
|
||||
- Fix dynamic alignment enabled with hidden objects [#1063](https://github.com/penpot/penpot/issues/1063)
|
||||
|
||||
|
||||
## 1.6.5-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -321,8 +483,8 @@
|
||||
|
||||
### :sparkles: Minor improvements
|
||||
|
||||
- Decrease default bulk buffers on storage tasks.
|
||||
- Reduce file_change preserve interval to 24h.
|
||||
- Decrease default bulk buffers on storage tasks.
|
||||
- Reduce file_change preserve interval to 24h.
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
@@ -335,7 +497,6 @@
|
||||
- Properly handle nil values on `update-shapes` function.
|
||||
- Replace frame term usage by artboard on viewer app.
|
||||
|
||||
|
||||
## 1.6.3-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -362,42 +523,39 @@
|
||||
- Minor fix on previous commit.
|
||||
- Minor improvements on svg uploading on libraries.
|
||||
|
||||
|
||||
## 1.6.1-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Add safety check on reg-objects change impl.
|
||||
- Fix custom fonts embbedding issue.
|
||||
- Fix custom fonts embedding issue.
|
||||
- Fix dashboard ordering issue.
|
||||
- Fix problem when creating a component with empty data.
|
||||
- Fix problem with moving shapes into frames.
|
||||
- Fix problems with mov-objects.
|
||||
- Fix unexpected excetion related to rounding integers.
|
||||
- Fix unexpected exception related to rounding integers.
|
||||
- Fix wrong type usage on libraries changes.
|
||||
- Improve editor lifecycle management.
|
||||
- Make the navigation async by default.
|
||||
|
||||
|
||||
## 1.6.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add improved workspace font selector [Taiga US #292](https://tree.taiga.io/project/penpot/us/292).
|
||||
- Add improved workspace font selector [Taiga US #292](https://tree.taiga.io/project/penpot/us/292)
|
||||
- Add option to interactively scale text [Taiga #1527](https://tree.taiga.io/project/penpot/us/1527)
|
||||
- Add performance improvements on dashboard data loading.
|
||||
- Add performance improvements to indexes handling on workspace.
|
||||
- Add the ability to upload/use custom fonts (and automatically generate all needed webfonts) [Taiga US #292](https://tree.taiga.io/project/penpot/us/292).
|
||||
- Add the ability to upload/use custom fonts (and automatically generate all needed webfonts) [Taiga US #292](https://tree.taiga.io/project/penpot/us/292)
|
||||
- Transform shapes to path on double click
|
||||
- Translate automatic names of new files and projects.
|
||||
- Use shift instead of ctrl/cmd to keep aspect ratio [Taiga 1697](https://tree.taiga.io/project/penpot/issue/1697).
|
||||
- New translations: Portuguese (Brazil) and Romanias.
|
||||
|
||||
- Use shift instead of ctrl/cmd to keep aspect ratio [Taiga 1697](https://tree.taiga.io/project/penpot/issue/1697)
|
||||
- New translations: Portuguese (Brazil) and Romanias.
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Remove interactions when the destination artboard is deleted [Taiga #1656](https://tree.taiga.io/project/penpot/issue/1656).
|
||||
- Fix problem with fonts that ends with numbers [#940](https://github.com/penpot/penpot/issues/940).
|
||||
- Remove interactions when the destination artboard is deleted [Taiga #1656](https://tree.taiga.io/project/penpot/issue/1656)
|
||||
- Fix problem with fonts that ends with numbers [#940](https://github.com/penpot/penpot/issues/940)
|
||||
- Fix problem with imported SVG on editing paths [#971](https://github.com/penpot/penpot/issues/971)
|
||||
- Fix problem with color picker positioning
|
||||
- Fix order on color palette [#961](https://github.com/penpot/penpot/issues/961)
|
||||
@@ -411,7 +569,6 @@
|
||||
- Update exporter dependencies (puppeteer), that fixes some unexpected exceptions.
|
||||
- Update string manipulation library.
|
||||
|
||||
|
||||
### :boom: Breaking changes
|
||||
|
||||
- The OIDC setting `PENPOT_OIDC_SCOPES` has changed the default semantics. Before this
|
||||
@@ -422,7 +579,6 @@
|
||||
|
||||
- Translations: Portuguese (Brazil) and Romanias.
|
||||
|
||||
|
||||
## 1.5.4-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -430,7 +586,6 @@
|
||||
- Fix issues on group rendering.
|
||||
- Fix problem with text editing auto-height [Taiga #1683](https://tree.taiga.io/project/penpot/issue/1683)
|
||||
|
||||
|
||||
## 1.5.3-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -454,7 +609,6 @@
|
||||
- Increase default team invitation token expiration to 48h.
|
||||
- Fix wrong error message when an expired token is used.
|
||||
|
||||
|
||||
## 1.5.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
@@ -500,7 +654,6 @@
|
||||
- madmath03 (by [Monogramm](https://github.com/Monogramm)) [#807](https://github.com/penpot/penpot/pull/807)
|
||||
- zzkt [#814](https://github.com/penpot/penpot/pull/814)
|
||||
|
||||
|
||||
## 1.4.1-alpha
|
||||
|
||||
### :bug: Bugs fixed
|
||||
@@ -512,7 +665,6 @@
|
||||
- Fix incorrect state management of user lang selection.
|
||||
- Fix email validation usability issue on team invitation lightbox.
|
||||
|
||||
|
||||
## 1.4.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
@@ -521,7 +673,7 @@
|
||||
- Add http caching layer on top of Query RPC.
|
||||
- Add layer opacity and blend mode to shapes [Taiga #937](https://tree.taiga.io/project/penpot/us/937)
|
||||
- Add more chinese translations [#726](https://github.com/penpot/penpot/pull/726)
|
||||
- Add native support for text-direction (RTL, LTR & auto).
|
||||
- Add native support for text-direction (RTL, LTR & auto)
|
||||
- Add several enhancements in shape selection [Taiga #1195](https://tree.taiga.io/project/penpot/us/1195)
|
||||
- Add thumbnail in memory caching mechanism.
|
||||
- Add turkish translation strings [#759](https://github.com/penpot/penpot/pull/759), [#794](https://github.com/penpot/penpot/pull/794)
|
||||
@@ -529,13 +681,12 @@
|
||||
- Hide viewer navbar on fullscreen [Taiga 1375](https://tree.taiga.io/project/penpot/us/1375)
|
||||
- Import SVG will create Penpot's shapes [Taiga #1006](https://tree.taiga.io/project/penpot/us/1066)
|
||||
- Improve french translations [#731](https://github.com/penpot/penpot/pull/731)
|
||||
- Reimplement workspace presence (remove database state).
|
||||
- Reimplement workspace presence (remove database state)
|
||||
- Remember last visited team when you re-enter the application [Taiga #1376](https://tree.taiga.io/project/penpot/us/1376)
|
||||
- Rename artboard with double click on the title [Taiga #1392](https://tree.taiga.io/project/penpot/us/1392)
|
||||
- Replace Slate-Editor with DraftJS [Taiga #1346](https://tree.taiga.io/project/penpot/us/1346)
|
||||
- Set proper page title [Taiga #1377](https://tree.taiga.io/project/penpot/us/1377)
|
||||
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Disable buttons in view mode for users without permissions [Taiga #1328](https://tree.taiga.io/project/penpot/issue/1328)
|
||||
@@ -576,15 +727,13 @@
|
||||
|
||||
- The LDAP configuration variables interpolation starts using `:`
|
||||
(example `:username`) instead of `$`. The main reason is avoid
|
||||
unnecesary conflict with bash interpolation.
|
||||
|
||||
unnecessary conflict with bash interpolation.
|
||||
|
||||
### :arrow_up: Deps updates
|
||||
|
||||
- Update backend to JDK16.
|
||||
- Update exporter nodejs to v14.16.0
|
||||
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- iblueer [#726](https://github.com/penpot/penpot/pull/726)
|
||||
@@ -592,27 +741,25 @@
|
||||
- girafic [#748](https://github.com/penpot/penpot/pull/748)
|
||||
- mbrksntrk [#794](https://github.com/penpot/penpot/pull/794)
|
||||
|
||||
|
||||
## 1.3.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
|
||||
- Add emailcatcher and ldap test containers to devenv. [#506](https://github.com/penpot/penpot/pull/506)
|
||||
- Add major refactor of internal pubsub/redis code; improves scalability and performance [#640](https://github.com/penpot/penpot/pull/640)
|
||||
- Add more chinese transtions [#687](https://github.com/penpot/penpot/pull/687)
|
||||
- Add more chinese translations [#687](https://github.com/penpot/penpot/pull/687)
|
||||
- Add more presets for artboard [#654](https://github.com/penpot/penpot/pull/654)
|
||||
- Add optional loki integration [#645](https://github.com/penpot/penpot/pull/645)
|
||||
- Add proper http session lifecycle handling.
|
||||
- Allow to set border radius of each rect corner individually
|
||||
- Bounce & Complaint handling [#635](https://github.com/penpot/penpot/pull/635)
|
||||
- Disable groups interactions when holding "Ctrl" key (deep selection)
|
||||
- New action in context menu to "edit" some shapes (binded to key "Enter")
|
||||
|
||||
- New action in context menu to "edit" some shapes (bound to key "Enter")
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Add more improvements to french translation strings [#591](https://github.com/penpot/penpot/pull/591)
|
||||
- Add some missing database indexes (mainly improves performance on large databases on file-update rpc method, and some background tasks).
|
||||
- Add some missing database indexes (mainly improves performance on large databases on file-update rpc method, and some background tasks)
|
||||
- Disables filters in masking elements (issue with Firefox rendering)
|
||||
- Drawing tool will have priority over resize/rotate handlers [Taiga #1225](https://tree.taiga.io/project/penpot/issue/1225)
|
||||
- Fix broken bounding box on editing paths [Taiga #1254](https://tree.taiga.io/project/penpot/issue/1254)
|
||||
@@ -626,16 +773,14 @@
|
||||
- Have language change notification written in the new language [Taiga #1205](https://tree.taiga.io/project/penpot/issue/1205)
|
||||
- Hide register screen when registration is disabled [#598](https://github.com/penpot/penpot/issues/598)
|
||||
- Properly handle errors on github, gitlab and ldap auth backends.
|
||||
- Properly mark profile auth backend (on first register/ auth with 3rd party auth provider).
|
||||
- Properly mark profile auth backend (on first register/ auth with 3rd party auth provider)
|
||||
- Refactor LDAP auth backend.
|
||||
|
||||
|
||||
### :heart: Community contributions by (Thank you!)
|
||||
|
||||
- girafic [#538](https://github.com/penpot/penpot/pull/654)
|
||||
- arkhi [#591](https://github.com/penpot/penpot/pull/591)
|
||||
|
||||
|
||||
## 1.2.0-alpha
|
||||
|
||||
### :sparkles: New features
|
||||
@@ -650,7 +795,6 @@
|
||||
- Show a pixel grid when zoom greater than 800% [#519](https://github.com/penpot/penpot/discussions/519)
|
||||
- Fix behavior of select all command when there are objects outside frames [Taiga #1209](https://tree.taiga.io/project/penpot/issue/1209)
|
||||
|
||||
|
||||
### :bug: Bugs fixed
|
||||
|
||||
- Fix 404 when access shared link [#615](https://github.com/penpot/penpot/issues/615)
|
||||
@@ -686,7 +830,6 @@
|
||||
- Improved MacOS shortcuts and helpers
|
||||
- Small changes to shape creation
|
||||
|
||||
|
||||
## 1.0.0-alpha
|
||||
|
||||
Initial release
|
||||
|
||||
@@ -19,9 +19,9 @@ If you found a bug, please report it, as far as possible with:
|
||||
- a browser and the browser version used
|
||||
- a dev tools console exception stack trace (if it is available)
|
||||
|
||||
If you found a bug that you consider better discuse in private (for
|
||||
If you found a bug that you consider better discuss in private (for
|
||||
example: security bugs), consider first send an email to
|
||||
`info@penpot.app`.
|
||||
`support@penpot.app`.
|
||||
|
||||
**We don't have formal bug bounty program for security reports; this
|
||||
is an open source application and your contribution will be recognized
|
||||
@@ -54,7 +54,7 @@ We will use the `easy fix` mark for tag for indicate issues that are
|
||||
easy for beginners.
|
||||
|
||||
|
||||
## Commit Message Guidelines ##
|
||||
## Commit Guidelines ##
|
||||
|
||||
We have very precise rules over how our git commit messages can be formatted.
|
||||
|
||||
@@ -78,7 +78,6 @@ Where type is:
|
||||
- :ambulance: `:ambulance:` a commit that fixes critical bug
|
||||
- :books: `:books:` a commit that improves or adds documentation
|
||||
- :construction: `:construction:`: a wip commit
|
||||
- :construction_worker: `:construction_worker:` a commit with CI related stuff
|
||||
- :boom: `:boom:` a commit with breaking changes
|
||||
- :wrench: `:wrench:` a commit for config updates
|
||||
- :zap: `:zap:` a commit with performance improvements
|
||||
@@ -91,13 +90,14 @@ More info:
|
||||
- https://gist.github.com/parmentf/035de27d6ed1dce0b36a
|
||||
- https://gist.github.com/rxaviers/7360908
|
||||
|
||||
The subject should be:
|
||||
|
||||
- Use the imperative mood.
|
||||
- Capitalize the first letter.
|
||||
- Don't put a period at the end of the subject line.
|
||||
- Put a blank line between the subject line and the body.
|
||||
Each commit should have:
|
||||
|
||||
- A concise subject using imperative mood.
|
||||
- The subject should have capitalized the first letter and without
|
||||
period at the end.
|
||||
- A blank line between the subject line and the body.
|
||||
- An entry on the CHANGES.md file if applicable, referencing the
|
||||
github or taiga issue/user-story using the these same rules.
|
||||
|
||||
## Code of conduct ##
|
||||
|
||||
|
||||
@@ -70,9 +70,9 @@ You can ask and answer questions, have open-ended conversations, and follow alon
|
||||
|
||||
✉️ [Mail us](mailto:info@penpot.app)
|
||||
|
||||
💬 [Github discussions](https://github.com/penpot/penpot/discussions)
|
||||
💬 [GitHub discussions](https://github.com/penpot/penpot/discussions)
|
||||
|
||||
🐞 [Github issues](mailto:info@penpot.apphttps://github.com/penpot/penpot/issues)
|
||||
🐞 [GitHub issues](https://github.com/penpot/penpot/issues)
|
||||
|
||||
✍️️ [Gitter](https://gitter.im/penpot/community)
|
||||
|
||||
@@ -81,7 +81,7 @@ You can ask and answer questions, have open-ended conversations, and follow alon
|
||||
You can ask and answer questions, have open-ended conversations, and follow along on decisions affecting the project.
|
||||
Would you like to know more about Penpot? We recommend you to visit our youtube channel and learn more about the functionalities and possibilities of Penpot with our video tutorials.
|
||||
|
||||
🎞️ [Youtube channel](https://www.youtube.com/channel/UCAqS8G72uv9P5HG1IfgnQ9g)
|
||||
🎞️ [YouTube channel](https://www.youtube.com/channel/UCAqS8G72uv9P5HG1IfgnQ9g)
|
||||
|
||||
## License ##
|
||||
|
||||
|
||||
36
backend/build.clj
Normal file
36
backend/build.clj
Normal file
@@ -0,0 +1,36 @@
|
||||
(ns build
|
||||
(:refer-clojure :exclude [compile])
|
||||
(:require
|
||||
[clojure.tools.build.api :as b]
|
||||
[clojure.java.io]))
|
||||
|
||||
(def class-dir "target/classes")
|
||||
(def basis (b/create-basis {:project "deps.edn"}))
|
||||
(def jar-file "target/penpot.jar")
|
||||
|
||||
(defn clean [_]
|
||||
(b/delete {:path "target"}))
|
||||
|
||||
(defn jar [_]
|
||||
(b/copy-dir
|
||||
{:src-dirs ["src" "resources"]
|
||||
:target-dir class-dir})
|
||||
|
||||
(b/compile-clj
|
||||
{:basis basis
|
||||
:src-dirs ["src"]
|
||||
:class-dir class-dir})
|
||||
|
||||
(b/uber
|
||||
{:class-dir class-dir
|
||||
:uber-file jar-file
|
||||
:main 'clojure.main
|
||||
:exclude [#"goog.*" #"^javasist.*"]
|
||||
:basis basis}))
|
||||
|
||||
(defn compile [_]
|
||||
(b/javac
|
||||
{:src-dirs ["dev/java"]
|
||||
:class-dir class-dir
|
||||
:basis basis
|
||||
:javac-opts ["-source" "11" "-target" "11"]}))
|
||||
@@ -6,67 +6,68 @@
|
||||
org.zeromq/jeromq {:mvn/version "0.5.2"}
|
||||
|
||||
com.taoensso/nippy {:mvn/version "3.1.1"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.0-4"}
|
||||
com.github.luben/zstd-jni {:mvn/version "1.5.2-1"}
|
||||
org.clojure/data.fressian {:mvn/version "1.0.0"}
|
||||
|
||||
;; NOTE: don't upgrade to latest version, breaking change is
|
||||
;; introduced on 0.10.0 that suffixes counters with _total if they
|
||||
;; are not already has this suffix.
|
||||
io.prometheus/simpleclient {:mvn/version "0.9.0"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.9.0"}
|
||||
io.prometheus/simpleclient_jetty {:mvn/version "0.9.0"
|
||||
io.prometheus/simpleclient {:mvn/version "0.14.1"}
|
||||
io.prometheus/simpleclient_hotspot {:mvn/version "0.14.1"}
|
||||
io.prometheus/simpleclient_jetty {:mvn/version "0.14.1"
|
||||
:exclusions [org.eclipse.jetty/jetty-server
|
||||
org.eclipse.jetty/jetty-servlet]}
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.9.0"}
|
||||
io.prometheus/simpleclient_httpserver {:mvn/version "0.14.1"}
|
||||
|
||||
io.lettuce/lettuce-core {:mvn/version "6.1.5.RELEASE"}
|
||||
io.lettuce/lettuce-core {:mvn/version "6.1.6.RELEASE"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
info.sunng/ring-jetty9-adapter {:mvn/version "0.15.2"}
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.2.709"}
|
||||
funcool/yetti {:git/tag "v4.0" :git/sha "59ed2a7"
|
||||
:git/url "https://github.com/funcool/yetti.git"
|
||||
:exclusions [org.slf4j/slf4j-api]}
|
||||
|
||||
com.github.seancorfield/next.jdbc {:mvn/version "1.2.761"}
|
||||
metosin/reitit-ring {:mvn/version "0.5.15"}
|
||||
org.postgresql/postgresql {:mvn/version "42.2.23"}
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.0"}
|
||||
org.postgresql/postgresql {:mvn/version "42.3.2"}
|
||||
com.zaxxer/HikariCP {:mvn/version "5.0.1"}
|
||||
funcool/datoteka {:mvn/version "2.0.0"}
|
||||
|
||||
buddy/buddy-core {:mvn/version "1.10.1"}
|
||||
buddy/buddy-hashers {:mvn/version "1.8.1"}
|
||||
buddy/buddy-sign {:mvn/version "3.4.1"}
|
||||
buddy/buddy-hashers {:mvn/version "1.8.158"}
|
||||
buddy/buddy-sign {:mvn/version "3.4.333"}
|
||||
|
||||
org.jsoup/jsoup {:mvn/version "1.14.2"}
|
||||
org.jsoup/jsoup {:mvn/version "1.14.3"}
|
||||
org.im4java/im4java {:mvn/version "1.4.0"}
|
||||
org.lz4/lz4-java {:mvn/version "1.8.0"}
|
||||
|
||||
org.clojars.pntblnk/clj-ldap {:mvn/version "0.0.17"}
|
||||
integrant/integrant {:mvn/version "0.8.0"}
|
||||
|
||||
io.sentry/sentry {:mvn/version "5.1.2"}
|
||||
io.sentry/sentry {:mvn/version "5.6.1"}
|
||||
|
||||
;; Pretty Print specs
|
||||
pretty-spec/pretty-spec {:mvn/version "0.1.4"}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.17.40"}}
|
||||
software.amazon.awssdk/s3 {:mvn/version "2.17.122"}}
|
||||
|
||||
:paths ["src" "resources"]
|
||||
:paths ["src" "resources" "target/classes"]
|
||||
:aliases
|
||||
{:dev
|
||||
{:extra-deps
|
||||
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
org.clojure/test.check {:mvn/version "RELEASE"}
|
||||
org.clojure/data.csv {:mvn/version "1.0.0"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "0.5.1"}
|
||||
|
||||
criterium/criterium {:mvn/version "RELEASE"}
|
||||
clojure-humanize/clojure-humanize {:mvn/version "0.2.2"}
|
||||
org.clojure/data.csv {:mvn/version "RELEASE"}
|
||||
com.clojure-goes-fast/clj-async-profiler {:mvn/version "RELEASE"}
|
||||
mockery/mockery {:mvn/version "RELEASE"}}
|
||||
:extra-paths ["test" "dev"]}
|
||||
|
||||
:kaocha
|
||||
{:extra-deps {lambdaisland/kaocha {:mvn/version "1.0.887"}}
|
||||
:main-opts ["-m" "kaocha.runner"]}
|
||||
:build
|
||||
{:extra-deps
|
||||
{io.github.clojure/tools.build {:git/tag "v0.7.5" :git/sha "34727f7"}}
|
||||
:ns-default build}
|
||||
|
||||
:test
|
||||
{:extra-deps {io.github.cognitect-labs/test-runner
|
||||
{:git/url "https://github.com/cognitect-labs/test-runner.git"
|
||||
:git/sha "dd6da11611eeb87f08780a30ac8ea6012d4c05ce"}}
|
||||
{:extra-paths ["test"]
|
||||
:extra-deps
|
||||
{io.github.cognitect-labs/test-runner
|
||||
{:git/tag "v0.5.0" :git/sha "b3fd0d2"}}
|
||||
:exec-fn cognitect.test-runner.api/test}
|
||||
|
||||
:outdated
|
||||
|
||||
@@ -6,13 +6,19 @@
|
||||
|
||||
(ns user
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.perf :as perf]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cfg]
|
||||
[app.main :as main]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.fressian :as fres]
|
||||
[app.util.json :as json]
|
||||
[app.util.time :as dt]
|
||||
[app.util.transit :as t]
|
||||
[clj-async-profiler.core :as prof]
|
||||
[clojure.contrib.humanize :as hum]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.pprint :refer [pprint print-table]]
|
||||
[clojure.repl :refer :all]
|
||||
@@ -22,31 +28,14 @@
|
||||
[clojure.test :as test]
|
||||
[clojure.tools.namespace.repl :as repl]
|
||||
[clojure.walk :refer [macroexpand-all]]
|
||||
[criterium.core :refer [quick-bench bench with-progress-reporting]]
|
||||
[datoteka.core]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(repl/disable-reload! (find-ns 'integrant.core))
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defonce system nil)
|
||||
|
||||
;; --- Benchmarking Tools
|
||||
|
||||
(defmacro run-quick-bench
|
||||
[& exprs]
|
||||
`(with-progress-reporting (quick-bench (do ~@exprs) :verbose)))
|
||||
|
||||
(defmacro run-quick-bench'
|
||||
[& exprs]
|
||||
`(quick-bench (do ~@exprs)))
|
||||
|
||||
(defmacro run-bench
|
||||
[& exprs]
|
||||
`(with-progress-reporting (bench (do ~@exprs) :verbose)))
|
||||
|
||||
(defmacro run-bench'
|
||||
[& exprs]
|
||||
`(bench (do ~@exprs)))
|
||||
|
||||
;; --- Development Stuff
|
||||
|
||||
(defn- run-tests
|
||||
@@ -91,11 +80,13 @@
|
||||
|
||||
(defn compression-bench
|
||||
[data]
|
||||
(print-table
|
||||
[{:v1 (alength (blob/encode data {:version 1}))
|
||||
:v2 (alength (blob/encode data {:version 2}))
|
||||
:v3 (alength (blob/encode data {:version 3}))}]))
|
||||
|
||||
(let [humanize (fn [v] (hum/filesize v :binary true :format " %.4f "))]
|
||||
(print-table
|
||||
[{:v1 (humanize (alength (blob/encode data {:version 1})))
|
||||
:v2 (humanize (alength (blob/encode data {:version 2})))
|
||||
:v3 (humanize (alength (blob/encode data {:version 3})))
|
||||
:v4 (humanize (alength (blob/encode data {:version 4})))
|
||||
}])))
|
||||
|
||||
(defonce debug-tap
|
||||
(do
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>penpot - error report {{id}}</title>
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=JetBrains+Mono">
|
||||
<style>
|
||||
body {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
pre {
|
||||
margin: 0px;
|
||||
}
|
||||
* {
|
||||
font-family: "JetBrains Mono", monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
.table {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
.table-row {
|
||||
display: flex;
|
||||
/* width: 100%; */
|
||||
/* border: 1px solid red; */
|
||||
}
|
||||
|
||||
.table-key {
|
||||
font-weight: 600;
|
||||
width: 60px;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.table-val {
|
||||
font-weight: 200;
|
||||
color: #333;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
margin-top: 15px;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.multiline .table-key {
|
||||
margin-bottom: 10px;
|
||||
border-bottom: 1px dashed #dddddd;
|
||||
/* padding: 4px; */
|
||||
width: unset;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="table">
|
||||
<div class="table-row">
|
||||
<div class="table-key" title="Error ID">ERID: </div>
|
||||
<div class="table-val">{{id}}</div>
|
||||
</div>
|
||||
{% if profile-id %}
|
||||
<div class="table-row">
|
||||
<div class="table-key" title="Profile ID">PFID: </div>
|
||||
<div class="table-val">{{profile-id}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if user-agent %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">UAGT: </div>
|
||||
<div class="table-val">{{user-agent}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if frontend-version %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">FVER: </div>
|
||||
<div class="table-val">{{frontend-version}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="table-row">
|
||||
<div class="table-key">BVER: </div>
|
||||
<div class="table-val">{{version}}</div>
|
||||
</div>
|
||||
|
||||
{% if host %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">HOST: </div>
|
||||
<div class="table-val">{{host}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if tenant %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">ENV: </div>
|
||||
<div class="table-val">{{tenant}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if public-uri %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">PURI: </div>
|
||||
<div class="table-val">{{public-uri}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if type %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">TYPE: </div>
|
||||
<div class="table-val">{{type}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if code %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">CODE: </div>
|
||||
<div class="table-val">{{code}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if error %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">CLSS: </div>
|
||||
<div class="table-val">{{error.class}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if hint %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">HINT: </div>
|
||||
<div class="table-val">{{hint}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if method %}
|
||||
<div class="table-row">
|
||||
<div class="table-key">PATH: </div>
|
||||
<div class="table-val">{{method|upper}} {{path}}</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div>(<a href="#explain">go to explain</a>)</div>
|
||||
<div>(<a href="#edata">go to edata</a>)</div>
|
||||
<div>(<a href="#trace">go to trace</a>)</div>
|
||||
|
||||
{% if params %}
|
||||
<div id="params" class="table-row multiline">
|
||||
<div class="table-key">PARAMS: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{params}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data %}
|
||||
<div id="edata" class="table-row multiline">
|
||||
<div class="table-key">ERROR DATA: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{data}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if spec-problems %}
|
||||
<div id="edata" class="table-row multiline">
|
||||
<div class="table-key">SPEC PROBLEMS: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{spec-problems}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if cause %}
|
||||
<div id="trace" class="table-row multiline">
|
||||
<div class="table-key">TRACE:</div>
|
||||
<div class="table-val">
|
||||
<pre>{{cause}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% elif trace %}
|
||||
<div id="trace" class="table-row multiline">
|
||||
<div class="table-key">TRACE:</div>
|
||||
<div class="table-val">
|
||||
<pre>{{trace}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% elif error %}
|
||||
<div id="trace" class="table-row multiline">
|
||||
<div class="table-key">TRACE:</div>
|
||||
<div class="table-val">
|
||||
<pre>{{error.trace}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<Configuration status="info" monitorInterval="60">
|
||||
<Appenders>
|
||||
<Console name="console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] [%t] %level{length=1} %logger{36} - %msg%n"/>
|
||||
<PatternLayout pattern="[%d{YYYY-MM-dd HH:mm:ss.SSS}] %level{length=1} %logger{36} - %msg%n"/>
|
||||
</Console>
|
||||
</Appenders>
|
||||
|
||||
|
||||
18
backend/resources/templates/base.tmpl
Normal file
18
backend/resources/templates/base.tmpl
Normal file
@@ -0,0 +1,18 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="robots" content="noindex,nofollow">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge" />
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=JetBrains+Mono">
|
||||
<style>
|
||||
{% include "templates/styles.css" %}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
32
backend/resources/templates/debug.tmpl
Normal file
32
backend/resources/templates/debug.tmpl
Normal file
@@ -0,0 +1,32 @@
|
||||
{% extends "templates/base.tmpl" %}
|
||||
|
||||
{% block title %}
|
||||
Debug Main Page
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<h1>Debug INDEX:</h1>
|
||||
<div>[<a href="/dbg/error">ERRORS</a>]</div>
|
||||
</nav>
|
||||
<main class="index">
|
||||
<section>
|
||||
<h2>Download file data:</h2>
|
||||
<desc>Given an FILE-ID, downloads the file data as file. The file data is encoded using transit.</desc>
|
||||
<form method="get" action="/dbg/file/data">
|
||||
<input type="text" style="width:300px" name="file-id" placeholder="file-id" />
|
||||
<input type="hidden" name="download" value="1" />
|
||||
<input type="submit" value="Download" />
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<h2>Upload File Data:</h2>
|
||||
<desc>Create a new file on your draft projects using the file downloaded from the previous section.</desc>
|
||||
<form method="post" enctype="multipart/form-data" action="/dbg/file/data">
|
||||
<input type="file" name="file" value="" />
|
||||
<input type="submit" value="Upload" />
|
||||
</form>
|
||||
</section>
|
||||
</main>
|
||||
{% endblock %}
|
||||
18
backend/resources/templates/error-list.tmpl
Normal file
18
backend/resources/templates/error-list.tmpl
Normal file
@@ -0,0 +1,18 @@
|
||||
{% extends "templates/base.tmpl" %}
|
||||
|
||||
{% block title %}
|
||||
penpot - error list
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<h1>Latest error reports:</h1>
|
||||
</nav>
|
||||
<main class="horizontal-list">
|
||||
<ul>
|
||||
{% for item in items %}
|
||||
<li><a href="/dbg/error/{{item.id}}">{{item.created-at}}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</main>
|
||||
{% endblock %}
|
||||
98
backend/resources/templates/error-report.tmpl
Normal file
98
backend/resources/templates/error-report.tmpl
Normal file
@@ -0,0 +1,98 @@
|
||||
{% extends "templates/base.tmpl" %}
|
||||
|
||||
{% block title %}
|
||||
penpot - error report {{id}}
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<nav>
|
||||
<div>[<a href="/dbg/error">⮜</a>]</div>
|
||||
<div>[<a href="#context">context</a>]</div>
|
||||
<div>[<a href="#params">request params</a>]</div>
|
||||
{% if data %}
|
||||
<div>[<a href="#edata">error data</a>]</div>
|
||||
{% endif %}
|
||||
{% if spec-explain %}
|
||||
<div>[<a href="#spec-explain">spec explain</a>]</div>
|
||||
{% endif %}
|
||||
{% if spec-problems %}
|
||||
<div>[<a href="#spec-problems">spec problems</a>]</div>
|
||||
{% endif %}
|
||||
{% if spec-value %}
|
||||
<div>[<a href="#spec-value">spec value</a>]</div>
|
||||
{% endif %}
|
||||
|
||||
{% if trace %}
|
||||
<div>[<a href="#trace">error trace</a>]</div>
|
||||
{% endif %}
|
||||
</nav>
|
||||
<main>
|
||||
<div class="table">
|
||||
<div class="table-row multiline">
|
||||
<div id="context" class="table-key">CONTEXT: </div>
|
||||
|
||||
<div class="table-val">
|
||||
<h1>{{hint}}</h1>
|
||||
</div>
|
||||
|
||||
<div class="table-val">
|
||||
<pre>{{context}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if params %}
|
||||
<div class="table-row multiline">
|
||||
<div id="params" class="table-key">REQUEST PARAMS: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{params}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if data %}
|
||||
<div class="table-row multiline">
|
||||
<div id="edata" class="table-key">ERROR DATA: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{data}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if spec-explain %}
|
||||
<div class="table-row multiline">
|
||||
<div id="spec-explain" class="table-key">SPEC EXPLAIN: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{spec-explain}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if spec-problems %}
|
||||
<div class="table-row multiline">
|
||||
<div id="spec-problems" class="table-key">SPEC PROBLEMS: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{spec-problems}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if spec-value %}
|
||||
<div class="table-row multiline">
|
||||
<div id="spec-value" class="table-key">SPEC VALUE: </div>
|
||||
<div class="table-val">
|
||||
<pre>{{spec-value}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if trace %}
|
||||
<div class="table-row multiline">
|
||||
<div id="trace" class="table-key">TRACE:</div>
|
||||
<div class="table-val">
|
||||
<pre>{{trace}}</pre>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</main>
|
||||
{% endblock %}
|
||||
150
backend/resources/templates/styles.css
Normal file
150
backend/resources/templates/styles.css
Normal file
@@ -0,0 +1,150 @@
|
||||
* {
|
||||
font-family: "JetBrains Mono", monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
pre {
|
||||
margin: 0px;
|
||||
line-height: 16px;
|
||||
}
|
||||
|
||||
desc {
|
||||
display: flex;
|
||||
margin-bottom: 10px;
|
||||
font-size: 10px;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
input[type=text], input[type=submit] {
|
||||
padding: 3px;
|
||||
}
|
||||
|
||||
main {
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
nav {
|
||||
position: fixed;
|
||||
width: 100vw;
|
||||
top: 0;
|
||||
left: 0;
|
||||
padding: 5px 20px;
|
||||
display: flex;
|
||||
background: #e3e3e3;
|
||||
}
|
||||
|
||||
nav > h1 {
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
nav > div {
|
||||
text-transform: uppercase;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
nav > div:not(:last-child) {
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.table {
|
||||
margin-top: 25px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.table-row {
|
||||
display: flex;
|
||||
padding-bottom: 15px;
|
||||
/* width: 100%; */
|
||||
/* border: 1px solid red; */
|
||||
}
|
||||
|
||||
.table-key {
|
||||
font-weight: 600;
|
||||
width: 60px;
|
||||
padding: 4px;
|
||||
|
||||
padding-top: 40px;
|
||||
margin-top: -40px;
|
||||
}
|
||||
|
||||
.table-val {
|
||||
font-weight: 200;
|
||||
color: #333;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.multiline {
|
||||
margin-top: 15px;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.multiline .table-key {
|
||||
margin-bottom: 10px;
|
||||
border-bottom: 1px dashed #dddddd;
|
||||
/* padding: 4px; */
|
||||
width: unset;
|
||||
}
|
||||
|
||||
.index {
|
||||
margin-top: 40px;
|
||||
}
|
||||
|
||||
.index > section {
|
||||
padding: 10px;
|
||||
background-color: #e3e3e3;
|
||||
}
|
||||
|
||||
.index > section:not(:last-child) {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
|
||||
.index > section > h2 {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
.horizontal-list {
|
||||
margin: 20px;
|
||||
margin-top: 40px;
|
||||
}
|
||||
|
||||
.horizontal-list ul {
|
||||
display: flex;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
flex-direction: column;
|
||||
flex-wrap: wrap;
|
||||
height: calc(100vh - 75px);
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.horizontal-list li {
|
||||
list-style: none;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
line-height: 18px;
|
||||
min-width: 210px;
|
||||
margin: 0px 20px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.horizontal-list li:hover {
|
||||
background-color: #e9e9e9;
|
||||
}
|
||||
|
||||
.horizontal-list li > a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
@@ -1,79 +1,20 @@
|
||||
#!/usr/bin/env bb
|
||||
#!/usr/bin/env bash
|
||||
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
CURRENT_VERSION=$1;
|
||||
|
||||
(ns build
|
||||
(:require
|
||||
[clojure.string :as str]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.pprint :refer [pprint]]
|
||||
[babashka.fs :as fs]
|
||||
[babashka.process :refer [$ check]]))
|
||||
set -ex
|
||||
|
||||
(defn split-cp
|
||||
[data]
|
||||
(str/split data #":"))
|
||||
rm -rf target;
|
||||
mkdir -p target/classes;
|
||||
mkdir -p target/dist;
|
||||
echo "$CURRENT_VERSION" > target/classes/version.txt;
|
||||
|
||||
(def classpath
|
||||
(->> ($ clojure -Spath)
|
||||
(check)
|
||||
(:out)
|
||||
(slurp)
|
||||
(split-cp)
|
||||
(map str/trim)))
|
||||
clojure -T:build jar;
|
||||
mv target/penpot.jar target/dist/penpot.jar
|
||||
cp scripts/run.template.sh target/dist/run.sh;
|
||||
cp scripts/manage.template.sh target/dist/manage.sh;
|
||||
chmod +x target/dist/run.sh;
|
||||
chmod +x target/dist/manage.sh;
|
||||
|
||||
(def classpath-jars
|
||||
(let [xfm (filter #(str/ends-with? % ".jar"))]
|
||||
(into #{} xfm classpath)))
|
||||
|
||||
(def classpath-paths
|
||||
(let [xfm (comp (remove #(str/ends-with? % ".jar"))
|
||||
(filter #(.isDirectory (io/file %))))]
|
||||
(into #{} xfm classpath)))
|
||||
|
||||
(def version
|
||||
(or (first *command-line-args*) "%version%"))
|
||||
|
||||
;; Clean previous dist
|
||||
(-> ($ rm -rf "./target/dist") check)
|
||||
|
||||
;; Create a new dist
|
||||
(-> ($ mkdir -p "./target/dist/deps") check)
|
||||
|
||||
;; Copy all jar deps into dist
|
||||
(run! (fn [item] (-> ($ cp ~item "./target/dist/deps/") check)) classpath-jars)
|
||||
|
||||
;; Create the application jar
|
||||
(spit "./target/dist/version.txt" version)
|
||||
|
||||
(-> ($ jar cvf "./target/dist/deps/app.jar" -C ~(first classpath-paths) ".") check)
|
||||
(-> ($ jar uvf "./target/dist/deps/app.jar" -C "./target/dist" "version.txt") check)
|
||||
(run! (fn [item]
|
||||
(-> ($ jar uvf "./target/dist/deps/app.jar" -C ~item ".") check))
|
||||
(rest classpath-paths))
|
||||
|
||||
;; Copy logging configuration
|
||||
(-> ($ cp "./resources/log4j2.xml" "./target/dist/") check)
|
||||
|
||||
;; Create classpath file
|
||||
(let [jars (->> (into ["app.jar"] classpath-jars)
|
||||
(map fs/file-name)
|
||||
(map #(fs/path "deps" %))
|
||||
(map str))]
|
||||
(spit "./target/dist/classpath" (str/join ":" jars)))
|
||||
|
||||
;; Copy run script template
|
||||
(-> ($ cp "./scripts/run.template.sh" "./target/dist/run.sh") check)
|
||||
|
||||
;; Copy run script template
|
||||
(-> ($ cp "./scripts/manage.template.sh" "./target/dist/manage.sh") check)
|
||||
|
||||
;; Add exec permisions to scripts.
|
||||
(-> ($ chmod +x "./target/dist/run.sh") check)
|
||||
(-> ($ chmod +x "./target/dist/manage.sh") check)
|
||||
|
||||
nil
|
||||
|
||||
@@ -16,4 +16,4 @@ if [ -f ./environ ]; then
|
||||
source ./environ
|
||||
fi
|
||||
|
||||
exec $JAVA_CMD $JVM_OPTS -classpath $(cat classpath) -Dlog4j2.configurationFile=./log4j2.xml clojure.main -m app.cli.manage "$@"
|
||||
exec $JAVA_CMD $JVM_OPTS -jar penpot.jar -m app.cli.manage "$@"
|
||||
|
||||
@@ -1,19 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_FLAGS="enable-asserts enable-audit-log $PENPOT_FLAGS"
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot"
|
||||
# export PENPOT_DATABASE_READONLY=true
|
||||
|
||||
# export PENPOT_DATABASE_URI="postgresql://172.17.0.1:5432/penpot_pre"
|
||||
# export PENPOT_DATABASE_USERNAME="penpot_pre"
|
||||
# export PENPOT_DATABASE_PASSWORD="penpot_pre"
|
||||
# export PENPOT_FLAGS="enable-asserts enable-audit-log $PENPOT_FLAGS"
|
||||
|
||||
# Initialize MINIO config
|
||||
# mc alias set penpot-s3/ http://minio:9000 minioadmin minioadmin
|
||||
# mc admin user add penpot-s3 penpot-devenv penpot-devenv
|
||||
# mc admin policy set penpot-s3 readwrite user=penpot-devenv
|
||||
# mc mb penpot-s3/penpot -p
|
||||
# export AWS_ACCESS_KEY_ID=penpot-devenv
|
||||
# export AWS_SECRET_ACCESS_KEY=penpot-devenv
|
||||
# export PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
|
||||
# export PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://minio:9000
|
||||
# export PENPOT_STORAGE_ASSETS_S3_REGION=eu-central-1
|
||||
# export PENPOT_STORAGE_ASSETS_S3_BUCKET=penpot
|
||||
|
||||
export OPTIONS="
|
||||
-A:jmx-remote:dev \
|
||||
-A:dev \
|
||||
-J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
|
||||
-J-Dclojure.tools.logging.factory=clojure.tools.logging.impl/log4j2-factory \
|
||||
-J-Dlog4j2.configurationFile=log4j2-devenv.xml \
|
||||
-J-XX:+UseShenandoahGC \
|
||||
-J-XX:+UseZGC \
|
||||
-J-XX:-OmitStackTraceInFastThrow \
|
||||
-J-Xms50m -J-Xmx512m";
|
||||
|
||||
# export OPTIONS="$OPTIONS -J-XX:+UnlockDiagnosticVMOptions";
|
||||
# export OPTIONS="$OPTIONS -J-XX:-TieredCompilation -J-XX:CompileThreshold=10000";
|
||||
-J-Xms50m -J-Xmx1024m \
|
||||
-J-Djdk.attach.allowAttachSelf \
|
||||
-J-XX:+UnlockDiagnosticVMOptions \
|
||||
-J-XX:+DebugNonSafepoints";
|
||||
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
@@ -17,4 +17,4 @@ if [ -f ./environ ]; then
|
||||
fi
|
||||
|
||||
set -x
|
||||
exec $JAVA_CMD $JVM_OPTS -classpath "$(cat classpath)" -Dlog4j2.configurationFile=./log4j2.xml "$@" clojure.main -m app.main
|
||||
exec $JAVA_CMD $JVM_OPTS "$@" -jar penpot.jar -m app.main
|
||||
|
||||
@@ -140,7 +140,6 @@
|
||||
indicating the action the program should take and the options provided."
|
||||
[args]
|
||||
(let [{:keys [options arguments errors summary] :as opts} (parse-opts args cli-options)]
|
||||
;; (pp/pprint opts)
|
||||
(cond
|
||||
(:help options) ; help => exit OK with usage summary
|
||||
{:exit-message (usage summary) :ok? true}
|
||||
|
||||
@@ -41,8 +41,7 @@
|
||||
data))
|
||||
|
||||
(def defaults
|
||||
{:http-server-port 6060
|
||||
:host "devenv"
|
||||
{:host "devenv"
|
||||
:tenant "dev"
|
||||
:database-uri "postgresql://postgres/penpot"
|
||||
:database-username "penpot"
|
||||
@@ -51,6 +50,9 @@
|
||||
:default-blob-version 3
|
||||
:loggers-zmq-uri "tcp://localhost:45556"
|
||||
|
||||
:file-change-snapshot-every 5
|
||||
:file-change-snapshot-timeout "3h"
|
||||
|
||||
:public-uri "http://localhost:3449"
|
||||
:redis-uri "redis://redis/0"
|
||||
|
||||
@@ -98,12 +100,25 @@
|
||||
(s/def ::audit-log-archive-uri ::us/string)
|
||||
(s/def ::audit-log-gc-max-age ::dt/duration)
|
||||
|
||||
(s/def ::admins ::us/set-of-str)
|
||||
(s/def ::file-change-snapshot-every ::us/integer)
|
||||
(s/def ::file-change-snapshot-timeout ::dt/duration)
|
||||
|
||||
(s/def ::default-executor-parallelism ::us/integer)
|
||||
(s/def ::blocking-executor-parallelism ::us/integer)
|
||||
(s/def ::worker-executor-parallelism ::us/integer)
|
||||
|
||||
(s/def ::secret-key ::us/string)
|
||||
(s/def ::allow-demo-users ::us/boolean)
|
||||
(s/def ::assets-path ::us/string)
|
||||
(s/def ::authenticated-cookie-domain ::us/string)
|
||||
(s/def ::database-password (s/nilable ::us/string))
|
||||
(s/def ::database-uri ::us/string)
|
||||
(s/def ::database-username (s/nilable ::us/string))
|
||||
(s/def ::database-readonly ::us/boolean)
|
||||
(s/def ::database-min-pool-size ::us/integer)
|
||||
(s/def ::database-max-pool-size ::us/integer)
|
||||
|
||||
(s/def ::default-blob-version ::us/integer)
|
||||
(s/def ::error-report-webhook ::us/string)
|
||||
(s/def ::user-feedback-destination ::us/string)
|
||||
@@ -125,6 +140,9 @@
|
||||
(s/def ::oidc-roles-attr ::us/keyword)
|
||||
(s/def ::host ::us/string)
|
||||
(s/def ::http-server-port ::us/integer)
|
||||
(s/def ::http-server-host ::us/string)
|
||||
(s/def ::http-server-min-threads ::us/integer)
|
||||
(s/def ::http-server-max-threads ::us/integer)
|
||||
(s/def ::http-session-idle-max-age ::dt/duration)
|
||||
(s/def ::http-session-updater-batch-max-age ::dt/duration)
|
||||
(s/def ::http-session-updater-batch-max-size ::us/integer)
|
||||
@@ -170,9 +188,11 @@
|
||||
(s/def ::storage-assets-fs-directory ::us/string)
|
||||
(s/def ::storage-assets-s3-bucket ::us/string)
|
||||
(s/def ::storage-assets-s3-region ::us/keyword)
|
||||
(s/def ::storage-assets-s3-endpoint ::us/string)
|
||||
(s/def ::storage-fdata-s3-bucket ::us/string)
|
||||
(s/def ::storage-fdata-s3-region ::us/keyword)
|
||||
(s/def ::storage-fdata-s3-prefix ::us/string)
|
||||
(s/def ::storage-fdata-s3-endpoint ::us/string)
|
||||
(s/def ::telemetry-uri ::us/string)
|
||||
(s/def ::telemetry-with-taiga ::us/boolean)
|
||||
(s/def ::tenant ::us/string)
|
||||
@@ -185,14 +205,24 @@
|
||||
(s/def ::config
|
||||
(s/keys :opt-un [::secret-key
|
||||
::flags
|
||||
::admins
|
||||
::allow-demo-users
|
||||
::audit-log-archive-uri
|
||||
::audit-log-gc-max-age
|
||||
::authenticated-cookie-domain
|
||||
::database-password
|
||||
::database-uri
|
||||
::database-username
|
||||
::database-readonly
|
||||
::database-min-pool-size
|
||||
::database-max-pool-size
|
||||
::default-blob-version
|
||||
::error-report-webhook
|
||||
::default-executor-parallelism
|
||||
::blocking-executor-parallelism
|
||||
::worker-executor-parallelism
|
||||
::file-change-snapshot-every
|
||||
::file-change-snapshot-timeout
|
||||
::user-feedback-destination
|
||||
::github-client-id
|
||||
::github-client-secret
|
||||
@@ -211,7 +241,10 @@
|
||||
::oidc-roles-attr
|
||||
::oidc-roles
|
||||
::host
|
||||
::http-server-host
|
||||
::http-server-port
|
||||
::http-server-max-threads
|
||||
::http-server-min-threads
|
||||
::http-session-idle-max-age
|
||||
::http-session-updater-batch-max-age
|
||||
::http-session-updater-batch-max-size
|
||||
@@ -261,10 +294,12 @@
|
||||
::storage-assets-fs-directory
|
||||
::storage-assets-s3-bucket
|
||||
::storage-assets-s3-region
|
||||
::storage-assets-s3-endpoint
|
||||
::fdata-storage-backend
|
||||
::storage-fdata-s3-bucket
|
||||
::storage-fdata-s3-region
|
||||
::storage-fdata-s3-prefix
|
||||
::storage-fdata-s3-endpoint
|
||||
::telemetry-enabled
|
||||
::telemetry-uri
|
||||
::telemetry-referer
|
||||
|
||||
@@ -47,43 +47,63 @@
|
||||
;; Initialization
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare instrument-jdbc!)
|
||||
(declare apply-migrations!)
|
||||
|
||||
(s/def ::name keyword?)
|
||||
(s/def ::uri ::us/not-empty-string)
|
||||
(s/def ::min-pool-size ::us/integer)
|
||||
(s/def ::max-pool-size ::us/integer)
|
||||
(s/def ::connection-timeout ::us/integer)
|
||||
(s/def ::max-size ::us/integer)
|
||||
(s/def ::min-size ::us/integer)
|
||||
(s/def ::migrations map?)
|
||||
(s/def ::name keyword?)
|
||||
(s/def ::password ::us/string)
|
||||
(s/def ::read-only ::us/boolean)
|
||||
(s/def ::uri ::us/not-empty-string)
|
||||
(s/def ::username ::us/string)
|
||||
(s/def ::validation-timeout ::us/integer)
|
||||
|
||||
(defmethod ig/pre-init-spec ::pool [_]
|
||||
(s/keys :req-un [::uri ::name ::min-pool-size ::max-pool-size]
|
||||
:opt-un [::migrations ::mtx/metrics ::read-only]))
|
||||
(s/keys :req-un [::uri ::name
|
||||
::min-size
|
||||
::max-size
|
||||
::connection-timeout
|
||||
::validation-timeout]
|
||||
:opt-un [::migrations
|
||||
::username
|
||||
::password
|
||||
::mtx/metrics
|
||||
::read-only]))
|
||||
|
||||
(defmethod ig/prep-key ::pool
|
||||
[_ cfg]
|
||||
(merge {:name :main
|
||||
:min-size 0
|
||||
:max-size 30
|
||||
:connection-timeout 10000
|
||||
:validation-timeout 10000
|
||||
:idle-timeout 120000 ; 2min
|
||||
:max-lifetime 1800000 ; 30m
|
||||
:read-only false}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::pool
|
||||
[_ {:keys [migrations metrics name] :as cfg}]
|
||||
(l/info :action "initialize connection pool" :name (d/name name) :uri (:uri cfg))
|
||||
(some-> metrics :registry instrument-jdbc!)
|
||||
[_ {:keys [migrations name read-only] :as cfg}]
|
||||
(l/info :hint "initialize connection pool"
|
||||
:name (d/name name)
|
||||
:uri (:uri cfg)
|
||||
:read-only read-only
|
||||
:with-credentials (and (contains? cfg :username)
|
||||
(contains? cfg :password))
|
||||
:min-size (:min-size cfg)
|
||||
:max-size (:max-size cfg))
|
||||
|
||||
(let [pool (create-pool cfg)]
|
||||
(some->> (seq migrations) (apply-migrations! pool))
|
||||
(when-not read-only
|
||||
(some->> (seq migrations) (apply-migrations! pool)))
|
||||
pool))
|
||||
|
||||
(defmethod ig/halt-key! ::pool
|
||||
[_ pool]
|
||||
(.close ^HikariDataSource pool))
|
||||
|
||||
(defn- instrument-jdbc!
|
||||
[registry]
|
||||
(mtx/instrument-vars!
|
||||
[#'next.jdbc/execute-one!
|
||||
#'next.jdbc/execute!]
|
||||
{:registry registry
|
||||
:type :counter
|
||||
:name "database_query_total"
|
||||
:help "An absolute counter of database queries."}))
|
||||
|
||||
(defn- apply-migrations!
|
||||
[pool migrations]
|
||||
(with-open [conn ^AutoCloseable (open pool)]
|
||||
@@ -96,26 +116,23 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def initsql
|
||||
(str "SET statement_timeout = 200000;\n"
|
||||
"SET idle_in_transaction_session_timeout = 200000;"))
|
||||
(str "SET statement_timeout = 300000;\n"
|
||||
"SET idle_in_transaction_session_timeout = 300000;"))
|
||||
|
||||
(defn- create-datasource-config
|
||||
[{:keys [metrics read-only] :or {read-only false} :as cfg}]
|
||||
(let [dburi (:uri cfg)
|
||||
username (:username cfg)
|
||||
password (:password cfg)
|
||||
config (HikariConfig.)]
|
||||
[{:keys [metrics uri] :as cfg}]
|
||||
(let [config (HikariConfig.)]
|
||||
(doto config
|
||||
(.setJdbcUrl (str "jdbc:" dburi))
|
||||
(.setPoolName (d/name (:name cfg)))
|
||||
(.setJdbcUrl (str "jdbc:" uri))
|
||||
(.setPoolName (d/name (:name cfg)))
|
||||
(.setAutoCommit true)
|
||||
(.setReadOnly read-only)
|
||||
(.setConnectionTimeout 10000) ;; 10seg
|
||||
(.setValidationTimeout 10000) ;; 10seg
|
||||
(.setIdleTimeout 120000) ;; 2min
|
||||
(.setMaxLifetime 1800000) ;; 30min
|
||||
(.setMinimumIdle (:min-pool-size cfg 0))
|
||||
(.setMaximumPoolSize (:max-pool-size cfg 50))
|
||||
(.setReadOnly (:read-only cfg))
|
||||
(.setConnectionTimeout (:connection-timeout cfg))
|
||||
(.setValidationTimeout (:validation-timeout cfg))
|
||||
(.setIdleTimeout (:idle-timeout cfg))
|
||||
(.setMaxLifetime (:max-lifetime cfg))
|
||||
(.setMinimumIdle (:min-size cfg))
|
||||
(.setMaximumPoolSize (:max-size cfg))
|
||||
(.setConnectionInitSql initsql)
|
||||
(.setInitializationFailTimeout -1))
|
||||
|
||||
@@ -125,8 +142,8 @@
|
||||
(PrometheusMetricsTrackerFactory.)
|
||||
(.setMetricsTrackerFactory config)))
|
||||
|
||||
(when username (.setUsername config username))
|
||||
(when password (.setPassword config password))
|
||||
(some->> ^String (:username cfg) (.setUsername config))
|
||||
(some->> ^String (:password cfg) (.setPassword config))
|
||||
|
||||
config))
|
||||
|
||||
@@ -136,10 +153,14 @@
|
||||
|
||||
(s/def ::pool pool?)
|
||||
|
||||
(defn pool-closed?
|
||||
(defn closed?
|
||||
[pool]
|
||||
(.isClosed ^HikariDataSource pool))
|
||||
|
||||
(defn read-only?
|
||||
[pool]
|
||||
(.isReadOnly ^HikariDataSource pool))
|
||||
|
||||
(defn create-pool
|
||||
[cfg]
|
||||
(let [dsc (create-datasource-config cfg)]
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cf]
|
||||
[app.http.doc :as doc]
|
||||
[app.http.errors :as errors]
|
||||
[app.http.middleware :as middleware]
|
||||
@@ -17,85 +18,91 @@
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[reitit.ring :as rr]
|
||||
[ring.adapter.jetty9 :as jetty])
|
||||
[yetti.adapter :as yt])
|
||||
(:import
|
||||
org.eclipse.jetty.server.Server
|
||||
org.eclipse.jetty.server.handler.ErrorHandler
|
||||
org.eclipse.jetty.server.handler.StatisticsHandler))
|
||||
|
||||
(declare router-handler)
|
||||
(declare wrap-router)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP SERVER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::handler fn?)
|
||||
(s/def ::router some?)
|
||||
(s/def ::ws (s/map-of ::us/string fn?))
|
||||
(s/def ::port ::us/integer)
|
||||
(s/def ::host ::us/string)
|
||||
(s/def ::name ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::server [_]
|
||||
(s/keys :req-un [::port]
|
||||
:opt-un [::ws ::name ::mtx/metrics ::router ::handler]))
|
||||
(s/def ::max-threads ::cf/http-server-max-threads)
|
||||
(s/def ::min-threads ::cf/http-server-min-threads)
|
||||
|
||||
(defmethod ig/prep-key ::server
|
||||
[_ cfg]
|
||||
(merge {:name "http"} (d/without-nils cfg)))
|
||||
(merge {:name "http"
|
||||
:min-threads 4
|
||||
:max-threads 60
|
||||
:port 6060
|
||||
:host "0.0.0.0"}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::server [_]
|
||||
(s/keys :req-un [::port ::host ::name ::min-threads ::max-threads]
|
||||
:opt-un [::mtx/metrics ::router ::handler]))
|
||||
|
||||
(defn- instrument-metrics
|
||||
[^Server server metrics]
|
||||
(let [stats (doto (StatisticsHandler.)
|
||||
(.setHandler (.getHandler server)))]
|
||||
(.setHandler server stats)
|
||||
(mtx/instrument-jetty! (:registry metrics) stats)
|
||||
server))
|
||||
|
||||
(defmethod ig/init-key ::server
|
||||
[_ {:keys [handler router ws port name metrics] :as opts}]
|
||||
(l/info :msg "starting http server" :port port :name name)
|
||||
(let [pre-start (fn [^Server server]
|
||||
(let [handler (doto (ErrorHandler.)
|
||||
(.setShowStacks true)
|
||||
(.setServer server))]
|
||||
(.setErrorHandler server ^ErrorHandler handler)
|
||||
(when metrics
|
||||
(let [stats (StatisticsHandler.)]
|
||||
(.setHandler ^StatisticsHandler stats (.getHandler server))
|
||||
(.setHandler server stats)
|
||||
(mtx/instrument-jetty! (:registry metrics) stats)))))
|
||||
|
||||
options (merge
|
||||
{:port port
|
||||
:h2c? true
|
||||
:join? false
|
||||
:allow-null-path-info true
|
||||
:configurator pre-start}
|
||||
(when (seq ws)
|
||||
{:websockets ws}))
|
||||
|
||||
handler (cond
|
||||
(fn? handler) handler
|
||||
(some? router) (router-handler router)
|
||||
:else (ex/raise :type :internal
|
||||
:code :invalid-argument
|
||||
:hint "Missing `handler` or `router` option."))
|
||||
|
||||
server (jetty/run-jetty handler options)]
|
||||
(assoc opts :server server)))
|
||||
[_ {:keys [handler router port name metrics host] :as opts}]
|
||||
(l/info :hint "starting http server"
|
||||
:port port :host host :name name
|
||||
:min-threads (:min-threads opts)
|
||||
:max-threads (:max-threads opts))
|
||||
(let [options {:http/port port
|
||||
:http/host host
|
||||
:thread-pool/max-threads (:max-threads opts)
|
||||
:thread-pool/min-threads (:min-threads opts)
|
||||
:ring/async true}
|
||||
handler (cond
|
||||
(fn? handler) handler
|
||||
(some? router) (wrap-router router)
|
||||
:else (ex/raise :type :internal
|
||||
:code :invalid-argument
|
||||
:hint "Missing `handler` or `router` option."))
|
||||
server (-> (yt/server handler (d/without-nils options))
|
||||
(cond-> metrics (instrument-metrics metrics)))]
|
||||
(assoc opts :server (yt/start! server))))
|
||||
|
||||
(defmethod ig/halt-key! ::server
|
||||
[_ {:keys [server name port] :as opts}]
|
||||
(l/info :msg "stoping http server"
|
||||
:name name
|
||||
:port port)
|
||||
(jetty/stop-server server))
|
||||
(l/info :msg "stoping http server" :name name :port port)
|
||||
(yt/stop! server))
|
||||
|
||||
(defn- router-handler
|
||||
(defn- wrap-router
|
||||
[router]
|
||||
(let [handler (rr/ring-handler router
|
||||
(rr/routes
|
||||
(rr/create-resource-handler {:path "/"})
|
||||
(rr/create-default-handler))
|
||||
{:middleware [middleware/server-timing]})]
|
||||
(fn [request]
|
||||
(try
|
||||
(handler request)
|
||||
(catch Throwable e
|
||||
(l/with-context (errors/get-error-context request e)
|
||||
(l/error :hint (ex-message e) :cause e)
|
||||
{:status 500 :body "internal server error"}))))))
|
||||
(let [default (rr/routes
|
||||
(rr/create-resource-handler {:path "/"})
|
||||
(rr/create-default-handler))
|
||||
options {:middleware [middleware/wrap-server-timing]
|
||||
:inject-match? false
|
||||
:inject-router? false}
|
||||
handler (rr/ring-handler router default options)]
|
||||
(fn [request respond _]
|
||||
(handler request respond (fn [cause]
|
||||
(l/error :hint "unexpected error processing request"
|
||||
::l/context (errors/get-error-context request cause)
|
||||
:query-string (:query-string request)
|
||||
:cause cause)
|
||||
(respond {:status 500 :body "internal server error"}))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Http Main Handler (Router)
|
||||
;; HTTP ROUTER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::rpc map?)
|
||||
@@ -104,17 +111,17 @@
|
||||
(s/def ::storage map?)
|
||||
(s/def ::assets map?)
|
||||
(s/def ::feedback fn?)
|
||||
(s/def ::error-report-handler fn?)
|
||||
(s/def ::ws fn?)
|
||||
(s/def ::audit-http-handler fn?)
|
||||
(s/def ::debug map?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::router [_]
|
||||
(s/keys :req-un [::rpc ::session ::mtx/metrics
|
||||
(s/keys :req-un [::rpc ::session ::mtx/metrics ::ws
|
||||
::oauth ::storage ::assets ::feedback
|
||||
::error-report-handler
|
||||
::audit-http-handler]))
|
||||
::debug ::audit-http-handler]))
|
||||
|
||||
(defmethod ig/init-key ::router
|
||||
[_ {:keys [session rpc oauth metrics assets feedback] :as cfg}]
|
||||
[_ {:keys [ws session rpc oauth metrics assets feedback debug] :as cfg}]
|
||||
(rr/router
|
||||
[["/metrics" {:get (:handler metrics)}]
|
||||
["/assets" {:middleware [[middleware/format-response-body]
|
||||
@@ -125,24 +132,44 @@
|
||||
["/by-file-media-id/:id" {:get (:file-objects-handler assets)}]
|
||||
["/by-file-media-id/:id/thumbnail" {:get (:file-thumbnails-handler assets)}]]
|
||||
|
||||
["/dbg"
|
||||
["/error-by-id/:id" {:get (:error-report-handler cfg)}]]
|
||||
["/dbg" {:middleware [[middleware/multipart-params]
|
||||
[middleware/params]
|
||||
[middleware/keyword-params]
|
||||
[middleware/format-response-body]
|
||||
[middleware/errors errors/handle]
|
||||
[middleware/cookies]
|
||||
[(:middleware session)]]}
|
||||
["" {:get (:index debug)}]
|
||||
["/error-by-id/:id" {:get (:retrieve-error debug)}]
|
||||
["/error/:id" {:get (:retrieve-error debug)}]
|
||||
["/error" {:get (:retrieve-error-list debug)}]
|
||||
["/file/data" {:get (:retrieve-file-data debug)
|
||||
:post (:upload-file-data debug)}]
|
||||
["/file/changes" {:get (:retrieve-file-changes debug)}]]
|
||||
|
||||
["/webhooks"
|
||||
["/sns" {:post (:sns-webhook cfg)}]]
|
||||
|
||||
["/ws/notifications"
|
||||
{:middleware [[middleware/params]
|
||||
[middleware/keyword-params]
|
||||
[middleware/format-response-body]
|
||||
[middleware/errors errors/handle]
|
||||
[middleware/cookies]
|
||||
[(:middleware session)]]
|
||||
:get ws}]
|
||||
|
||||
["/api" {:middleware [[middleware/cors]
|
||||
[middleware/etag]
|
||||
[middleware/format-response-body]
|
||||
[middleware/params]
|
||||
[middleware/multipart-params]
|
||||
[middleware/keyword-params]
|
||||
[middleware/format-response-body]
|
||||
[middleware/parse-request-body]
|
||||
[middleware/errors errors/handle]
|
||||
[middleware/cookies]]}
|
||||
|
||||
["/health" {:get (:health-check debug)}]
|
||||
["/_doc" {:get (doc/handler rpc)}]
|
||||
|
||||
["/feedback" {:middleware [(:middleware session)]
|
||||
:post feedback}]
|
||||
["/auth/oauth/:provider" {:post (:handler oauth)}]
|
||||
|
||||
@@ -13,9 +13,12 @@
|
||||
[app.db :as db]
|
||||
[app.metrics :as mtx]
|
||||
[app.storage :as sto]
|
||||
[app.util.async :as async]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(def ^:private cache-max-age
|
||||
(dt/duration {:hours 24}))
|
||||
@@ -52,10 +55,10 @@
|
||||
:body (sto/get-object-bytes storage obj)}
|
||||
|
||||
:s3
|
||||
(let [url (sto/get-object-url storage obj {:max-age signature-max-age})]
|
||||
(let [{:keys [host port] :as url} (sto/get-object-url storage obj {:max-age signature-max-age})]
|
||||
{:status 307
|
||||
:headers {"location" (str url)
|
||||
"x-host" (:host url)
|
||||
"x-host" (cond-> host port (str ":" port))
|
||||
"cache-control" (str "max-age=" (inst-ms cache-max-age))}
|
||||
:body ""})
|
||||
|
||||
@@ -69,29 +72,38 @@
|
||||
:body ""}))))
|
||||
|
||||
(defn- generic-handler
|
||||
[{:keys [storage] :as cfg} _request id]
|
||||
(let [obj (sto/get-object storage id)]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{:status 404 :body ""})))
|
||||
[{:keys [storage executor] :as cfg} request kf]
|
||||
(async/with-dispatch executor
|
||||
(let [id (get-in request [:path-params :id])
|
||||
mobj (get-file-media-object storage id)
|
||||
obj (sto/get-object storage (kf mobj))]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{:status 404 :body ""}))))
|
||||
|
||||
(defn objects-handler
|
||||
[cfg request]
|
||||
(let [id (get-in request [:path-params :id])]
|
||||
(generic-handler cfg request (coerce-id id))))
|
||||
[{:keys [storage executor] :as cfg} request respond raise]
|
||||
(-> (async/with-dispatch executor
|
||||
(let [id (get-in request [:path-params :id])
|
||||
id (coerce-id id)
|
||||
obj (sto/get-object storage id)]
|
||||
(if obj
|
||||
(serve-object cfg obj)
|
||||
{:status 404 :body ""})))
|
||||
(p/then respond)
|
||||
(p/catch raise)))
|
||||
|
||||
(defn file-objects-handler
|
||||
[{:keys [storage] :as cfg} request]
|
||||
(let [id (get-in request [:path-params :id])
|
||||
mobj (get-file-media-object storage id)]
|
||||
(generic-handler cfg request (:media-id mobj))))
|
||||
[cfg request respond raise]
|
||||
(-> (generic-handler cfg request :media-id)
|
||||
(p/then respond)
|
||||
(p/catch raise)))
|
||||
|
||||
(defn file-thumbnails-handler
|
||||
[{:keys [storage] :as cfg} request]
|
||||
(let [id (get-in request [:path-params :id])
|
||||
mobj (get-file-media-object storage id)]
|
||||
(generic-handler cfg request (or (:thumbnail-id mobj) (:media-id mobj)))))
|
||||
|
||||
[cfg request respond raise]
|
||||
(-> (generic-handler cfg request #(or (:thumbnail-id %) (:media-id %)))
|
||||
(p/then respond)
|
||||
(p/catch raise)))
|
||||
|
||||
;; --- Initialization
|
||||
|
||||
@@ -101,10 +113,16 @@
|
||||
(s/def ::signature-max-age ::dt/duration)
|
||||
|
||||
(defmethod ig/pre-init-spec ::handlers [_]
|
||||
(s/keys :req-un [::storage ::mtx/metrics ::assets-path ::cache-max-age ::signature-max-age]))
|
||||
(s/keys :req-un [::storage
|
||||
::wrk/executor
|
||||
::mtx/metrics
|
||||
::assets-path
|
||||
::cache-max-age
|
||||
::signature-max-age]))
|
||||
|
||||
(defmethod ig/init-key ::handlers
|
||||
[_ cfg]
|
||||
{:objects-handler #(objects-handler cfg %)
|
||||
:file-objects-handler #(file-objects-handler cfg %)
|
||||
:file-thumbnails-handler #(file-thumbnails-handler cfg %)})
|
||||
{:objects-handler (partial objects-handler cfg)
|
||||
:file-objects-handler (partial file-objects-handler cfg)
|
||||
:file-thumbnails-handler (partial file-thumbnails-handler cfg)})
|
||||
|
||||
|
||||
@@ -26,25 +26,30 @@
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
(fn [request]
|
||||
(let [body (parse-json (slurp (:body request)))
|
||||
mtype (get body "Type")]
|
||||
(cond
|
||||
(= mtype "SubscriptionConfirmation")
|
||||
(let [surl (get body "SubscribeURL")
|
||||
stopic (get body "TopicArn")]
|
||||
(l/info :action "subscription received" :topic stopic :url surl)
|
||||
(http/send! {:uri surl :method :post :timeout 10000}))
|
||||
(fn [request respond _]
|
||||
(try
|
||||
(let [body (parse-json (slurp (:body request)))
|
||||
mtype (get body "Type")]
|
||||
(cond
|
||||
(= mtype "SubscriptionConfirmation")
|
||||
(let [surl (get body "SubscribeURL")
|
||||
stopic (get body "TopicArn")]
|
||||
(l/info :action "subscription received" :topic stopic :url surl)
|
||||
(http/send! {:uri surl :method :post :timeout 10000}))
|
||||
|
||||
(= mtype "Notification")
|
||||
(when-let [message (parse-json (get body "Message"))]
|
||||
(let [notification (parse-notification cfg message)]
|
||||
(process-report cfg notification)))
|
||||
(= mtype "Notification")
|
||||
(when-let [message (parse-json (get body "Message"))]
|
||||
(let [notification (parse-notification cfg message)]
|
||||
(process-report cfg notification)))
|
||||
|
||||
:else
|
||||
(l/warn :hint "unexpected data received"
|
||||
:report (pr-str body)))
|
||||
{:status 200 :body ""})))
|
||||
:else
|
||||
(l/warn :hint "unexpected data received"
|
||||
:report (pr-str body))))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unexpected exception on awsns handler"
|
||||
:cause cause)))
|
||||
|
||||
(respond {:status 200 :body ""})))
|
||||
|
||||
(defn- parse-bounce
|
||||
[data]
|
||||
@@ -173,14 +178,14 @@
|
||||
|
||||
(defn- process-report
|
||||
[cfg {:keys [type profile-id] :as report}]
|
||||
(l/trace :action "procesing report" :report (pr-str report))
|
||||
(l/trace :action "processing report" :report (pr-str report))
|
||||
(cond
|
||||
;; In this case we receive a bounce/complaint notification without
|
||||
;; confirmed identity, we just emit a warning but do nothing about
|
||||
;; it because this is not a normal case. All notifications should
|
||||
;; come with profile identity.
|
||||
(nil? profile-id)
|
||||
(l/warn :msg "a notification without identity recevied from AWS"
|
||||
(l/warn :msg "a notification without identity received from AWS"
|
||||
:report (pr-str report))
|
||||
|
||||
(= "bounce" type)
|
||||
|
||||
227
backend/src/app/http/debug.clj
Normal file
227
backend/src/app/http/debug.clj
Normal file
@@ -0,0 +1,227 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.http.debug
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.rpc.mutations.files :as m.files]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.util.async :as async]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.template :as tmpl]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[datoteka.core :as fs]
|
||||
[fipp.edn :as fpp]
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]))
|
||||
|
||||
;; (selmer.parser/cache-off!)
|
||||
|
||||
(defn authorized?
|
||||
[pool {:keys [profile-id]}]
|
||||
(or (= "devenv" (cf/get :host))
|
||||
(let [profile (ex/ignoring (profile/retrieve-profile-data pool profile-id))
|
||||
admins (or (cf/get :admins) #{})]
|
||||
(contains? admins (:email profile)))))
|
||||
|
||||
(defn index
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
{:status 200
|
||||
:headers {"content-type" "text/html"}
|
||||
:body (-> (io/resource "templates/debug.tmpl")
|
||||
(tmpl/render {}))})
|
||||
|
||||
|
||||
(def sql:retrieve-range-of-changes
|
||||
"select revn, changes from file_change where file_id=? and revn >= ? and revn <= ? order by revn")
|
||||
|
||||
(def sql:retrieve-single-change
|
||||
"select revn, changes, data from file_change where file_id=? and revn = ?")
|
||||
|
||||
(defn prepare-response
|
||||
[{:keys [params] :as request} body]
|
||||
(when-not body
|
||||
(ex/raise :type :not-found
|
||||
:code :enpty-data
|
||||
:hint "empty response"))
|
||||
|
||||
(cond-> {:status 200
|
||||
:headers {"content-type" "application/transit+json"}
|
||||
:body body}
|
||||
(contains? params :download)
|
||||
(update :headers assoc "content-disposition" "attachment")))
|
||||
|
||||
(defn retrieve-file-data
|
||||
[{:keys [pool]} {:keys [params] :as request}]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [file-id (some-> (get-in request [:params :file-id]) uuid/uuid)
|
||||
revn (some-> (get-in request [:params :revn]) d/parse-integer)]
|
||||
(when-not file-id
|
||||
(ex/raise :type :validation
|
||||
:code :missing-arguments))
|
||||
|
||||
(let [data (if (integer? revn)
|
||||
(some-> (db/exec-one! pool [sql:retrieve-single-change file-id revn]) :data)
|
||||
(some-> (db/get-by-id pool :file file-id) :data))]
|
||||
(if (contains? params :download)
|
||||
(-> (prepare-response request data)
|
||||
(update :headers assoc "content-type" "application/octet-stream"))
|
||||
(prepare-response request (some-> data blob/decode))))))
|
||||
|
||||
(defn upload-file-data
|
||||
[{:keys [pool]} {:keys [profile-id params] :as request}]
|
||||
(let [project-id (some-> (profile/retrieve-additional-data pool profile-id) :default-project-id)
|
||||
data (some-> params :file :tempfile fs/slurp-bytes blob/decode)]
|
||||
|
||||
(if (and data project-id)
|
||||
(let [fname (str "imported-file-" (dt/now))]
|
||||
(m.files/create-file pool {:id (uuid/next)
|
||||
:name fname
|
||||
:project-id project-id
|
||||
:profile-id profile-id
|
||||
:data data})
|
||||
{:status 200
|
||||
:body "OK"})
|
||||
{:status 500
|
||||
:body "error"})))
|
||||
|
||||
(defn retrieve-file-changes
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [file-id (some-> (get-in request [:params :id]) uuid/uuid)
|
||||
revn (or (get-in request [:params :revn]) "latest")]
|
||||
|
||||
(when (or (not file-id) (not revn))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-arguments
|
||||
:hint "missing arguments"))
|
||||
|
||||
(cond
|
||||
(d/num-string? revn)
|
||||
(let [item (db/exec-one! pool [sql:retrieve-single-change file-id (d/parse-integer revn)])]
|
||||
(prepare-response request (some-> item :changes blob/decode vec)))
|
||||
|
||||
(str/includes? revn ":")
|
||||
(let [[start end] (->> (str/split revn #":")
|
||||
(map str/trim)
|
||||
(map d/parse-integer))
|
||||
items (db/exec! pool [sql:retrieve-range-of-changes file-id start end])]
|
||||
(prepare-response request
|
||||
(some->> items
|
||||
(map :changes)
|
||||
(map blob/decode)
|
||||
(mapcat identity)
|
||||
(vec))))
|
||||
:else
|
||||
(ex/raise :type :validation :code :invalid-arguments))))
|
||||
|
||||
|
||||
(defn retrieve-error
|
||||
[{:keys [pool]} request]
|
||||
(letfn [(parse-id [request]
|
||||
(let [id (get-in request [:path-params :id])
|
||||
id (us/uuid-conformer id)]
|
||||
(when (uuid? id)
|
||||
id)))
|
||||
|
||||
(retrieve-report [id]
|
||||
(ex/ignoring
|
||||
(some-> (db/get-by-id pool :server-error-report id) :content db/decode-transit-pgobject)))
|
||||
|
||||
(render-template [report]
|
||||
(let [context (dissoc report
|
||||
:trace :cause :params :data :spec-problems
|
||||
:spec-explain :spec-value :error :explain :hint)
|
||||
params {:context (with-out-str (fpp/pprint context {:width 300}))
|
||||
:hint (:hint report)
|
||||
:spec-explain (:spec-explain report)
|
||||
:spec-problems (:spec-problems report)
|
||||
:spec-value (:spec-value report)
|
||||
:data (:data report)
|
||||
:trace (or (:trace report)
|
||||
(some-> report :error :trace))
|
||||
:params (:params report)}]
|
||||
(-> (io/resource "templates/error-report.tmpl")
|
||||
(tmpl/render params))))
|
||||
]
|
||||
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
|
||||
(let [result (some-> (parse-id request)
|
||||
(retrieve-report)
|
||||
(render-template))]
|
||||
(if result
|
||||
{:status 200
|
||||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}
|
||||
:body result}
|
||||
{:status 404
|
||||
:body "not found"}))))
|
||||
|
||||
(def sql:error-reports
|
||||
"select id, created_at from server_error_report order by created_at desc limit 100")
|
||||
|
||||
(defn retrieve-error-list
|
||||
[{:keys [pool]} request]
|
||||
(when-not (authorized? pool request)
|
||||
(ex/raise :type :authentication
|
||||
:code :only-admins-allowed))
|
||||
(let [items (db/exec! pool [sql:error-reports])
|
||||
items (map #(update % :created-at dt/format-instant :rfc1123) items)]
|
||||
{:status 200
|
||||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}
|
||||
:body (-> (io/resource "templates/error-list.tmpl")
|
||||
(tmpl/render {:items items}))}))
|
||||
|
||||
(defn health-check
|
||||
"Mainly a task that performs a health check."
|
||||
[{:keys [pool]} _]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/exec-one! conn ["select count(*) as count from server_prop;"])
|
||||
{:status 200 :body "Ok"}))
|
||||
|
||||
(defn- wrap-async
|
||||
[{:keys [executor] :as cfg} f]
|
||||
(fn [request respond raise]
|
||||
(-> (async/with-dispatch executor
|
||||
(f cfg request))
|
||||
(p/then respond)
|
||||
(p/catch raise))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handlers [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::handlers
|
||||
[_ cfg]
|
||||
{:index (wrap-async cfg index)
|
||||
:health-check (wrap-async cfg health-check)
|
||||
:retrieve-file-data (wrap-async cfg retrieve-file-data)
|
||||
:retrieve-file-changes (wrap-async cfg retrieve-file-changes)
|
||||
:retrieve-error (wrap-async cfg retrieve-error)
|
||||
:retrieve-error-list (wrap-async cfg retrieve-error-list)
|
||||
:upload-file-data (wrap-async cfg upload-file-data)})
|
||||
@@ -46,8 +46,9 @@
|
||||
[rpc]
|
||||
(let [context (prepare-context rpc)]
|
||||
(if (contains? cf/flags :backend-api-doc)
|
||||
(fn [_]
|
||||
{:status 200
|
||||
:body (-> (io/resource "api-doc.tmpl")
|
||||
(tmpl/render context))})
|
||||
(constantly {:status 404 :body ""}))))
|
||||
(fn [_ respond _]
|
||||
(respond {:status 200
|
||||
:body (-> (io/resource "api-doc.tmpl")
|
||||
(tmpl/render context))}))
|
||||
(fn [_ respond _]
|
||||
(respond {:status 404 :body ""})))))
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[clojure.pprint]
|
||||
[app.common.spec :as us]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]))
|
||||
|
||||
@@ -24,12 +23,14 @@
|
||||
[request error]
|
||||
(let [data (ex-data error)]
|
||||
(merge
|
||||
{:id (uuid/next)
|
||||
:path (:uri request)
|
||||
{:path (:uri request)
|
||||
:method (:request-method request)
|
||||
:hint (or (:hint data) (ex-message error))
|
||||
:params (l/stringify-data (:params request))
|
||||
:spec-problems (some-> data ::s/problems)
|
||||
:hint (ex-message error)
|
||||
:params (:params request)
|
||||
|
||||
:spec-problems (some->> data ::s/problems (take 10) seq vec)
|
||||
:spec-value (some->> data ::s/value)
|
||||
:data (some-> data (dissoc ::s/problems ::s/value ::s/spec))
|
||||
:ip-addr (parse-client-ip request)
|
||||
:profile-id (:profile-id request)}
|
||||
|
||||
@@ -37,7 +38,8 @@
|
||||
{:user-agent (get headers "user-agent")
|
||||
:frontend-version (get headers "x-frontend-version" "unknown")})
|
||||
|
||||
(dissoc data ::s/problems))))
|
||||
(when (and data (::s/problems data))
|
||||
{:spec-explain (us/pretty-explain data)}))))
|
||||
|
||||
(defmulti handle-exception
|
||||
(fn [err & _rest]
|
||||
@@ -54,28 +56,26 @@
|
||||
{:status 400 :body (ex-data err)})
|
||||
|
||||
(defmethod handle-exception :validation
|
||||
[err req]
|
||||
(let [header (get-in req [:headers "accept"])
|
||||
edata (ex-data err)]
|
||||
(if (and (= :spec-validation (:code edata))
|
||||
(str/starts-with? header "text/html"))
|
||||
{:status 400
|
||||
:headers {"content-type" "text/html"}
|
||||
:body (str "<pre style='font-size:16px'>"
|
||||
(:explain edata)
|
||||
"</pre>\n")}
|
||||
{:status 400
|
||||
:body (dissoc edata ::s/problems)})))
|
||||
[err _]
|
||||
(let [data (ex-data err)
|
||||
explain (us/pretty-explain data)]
|
||||
{:status 400
|
||||
:body (-> data
|
||||
(dissoc ::s/problems)
|
||||
(dissoc ::s/value)
|
||||
(cond-> explain (assoc :explain explain)))}))
|
||||
|
||||
(defmethod handle-exception :assertion
|
||||
[error request]
|
||||
(let [edata (ex-data error)]
|
||||
(l/with-context (get-error-context request error)
|
||||
(l/error :hint (ex-message error) :cause error))
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-error-context request error)
|
||||
:cause error)
|
||||
|
||||
{:status 500
|
||||
:body {:type :server-error
|
||||
:code :assertion
|
||||
:data (dissoc edata ::s/problems)}}))
|
||||
:data (dissoc edata ::s/problems ::s/value ::s/spec)}}))
|
||||
|
||||
(defmethod handle-exception :not-found
|
||||
[err _]
|
||||
@@ -93,9 +93,9 @@
|
||||
(ex/exception? (:handling edata)))
|
||||
(handle-exception (:handling edata) request)
|
||||
(do
|
||||
(l/with-context (get-error-context request error)
|
||||
(l/error :hint (ex-message error) :cause error))
|
||||
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-error-context request error)
|
||||
:cause error)
|
||||
{:status 500
|
||||
:body {:type :server-error
|
||||
:code :unexpected
|
||||
@@ -105,13 +105,9 @@
|
||||
(defmethod handle-exception org.postgresql.util.PSQLException
|
||||
[error request]
|
||||
(let [state (.getSQLState ^java.sql.SQLException error)]
|
||||
|
||||
(l/with-context (get-error-context request error)
|
||||
(l/error :hint "psql exception"
|
||||
:error-message (ex-message error)
|
||||
:state state
|
||||
:cause error))
|
||||
|
||||
(l/error ::l/raw (ex-message error)
|
||||
::l/context (get-error-context request error)
|
||||
:cause error)
|
||||
(cond
|
||||
(= state "57014")
|
||||
{:status 504
|
||||
|
||||
@@ -14,48 +14,55 @@
|
||||
[app.db :as db]
|
||||
[app.emails :as eml]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(declare send-feedback)
|
||||
(declare ^:private send-feedback)
|
||||
(declare ^:private handler)
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
(s/keys :req-un [::db/pool ::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [pool] :as scfg}]
|
||||
(let [ftoken (cf/get :feedback-token ::no-token)
|
||||
enabled (contains? cf/flags :user-feedback)]
|
||||
(fn [{:keys [profile-id] :as request}]
|
||||
(let [token (get-in request [:headers "x-feedback-token"])
|
||||
params (d/merge (:params request)
|
||||
(:body-params request))]
|
||||
[_ {:keys [executor] :as cfg}]
|
||||
(let [enabled? (contains? cf/flags :user-feedback)]
|
||||
(if enabled?
|
||||
(fn [request respond raise]
|
||||
(-> (px/submit! executor #(handler cfg request))
|
||||
(p/then' respond)
|
||||
(p/catch raise)))
|
||||
(fn [_ _ raise]
|
||||
(raise (ex/error :type :validation
|
||||
:code :feedback-disabled
|
||||
:hint "feedback module is disabled"))))))
|
||||
|
||||
(when-not enabled
|
||||
(ex/raise :type :validation
|
||||
:code :feedback-disabled
|
||||
:hint "feedback module is disabled"))
|
||||
(defn- handler
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id] :as request}]
|
||||
(let [ftoken (cf/get :feedback-token ::no-token)
|
||||
token (get-in request [:headers "x-feedback-token"])
|
||||
params (d/merge (:params request)
|
||||
(:body-params request))]
|
||||
(cond
|
||||
(uuid? profile-id)
|
||||
(let [profile (profile/retrieve-profile-data pool profile-id)
|
||||
params (assoc params :from (:email profile))]
|
||||
(send-feedback pool profile params))
|
||||
|
||||
(cond
|
||||
(uuid? profile-id)
|
||||
(let [profile (profile/retrieve-profile-data pool profile-id)
|
||||
params (assoc params :from (:email profile))]
|
||||
(when-not (:is-muted profile)
|
||||
(send-feedback pool profile params)))
|
||||
(= token ftoken)
|
||||
(send-feedback cfg nil params))
|
||||
|
||||
(= token ftoken)
|
||||
(send-feedback scfg nil params))
|
||||
|
||||
{:status 204 :body ""}))))
|
||||
{:status 204 :body ""}))
|
||||
|
||||
(s/def ::content ::us/string)
|
||||
(s/def ::from ::us/email)
|
||||
(s/def ::subject ::us/string)
|
||||
|
||||
(s/def ::feedback
|
||||
(s/keys :req-un [::from ::subject ::content]))
|
||||
|
||||
(defn send-feedback
|
||||
(defn- send-feedback
|
||||
[pool profile params]
|
||||
(let [params (us/conform ::feedback params)
|
||||
destination (cf/get :feedback-destination)]
|
||||
|
||||
@@ -9,24 +9,25 @@
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cf]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.json :as json]
|
||||
[buddy.core.codecs :as bc]
|
||||
[buddy.core.hash :as bh]
|
||||
[ring.core.protocols :as rp]
|
||||
[ring.middleware.cookies :refer [wrap-cookies]]
|
||||
[ring.middleware.keyword-params :refer [wrap-keyword-params]]
|
||||
[ring.middleware.multipart-params :refer [wrap-multipart-params]]
|
||||
[ring.middleware.params :refer [wrap-params]]))
|
||||
[ring.middleware.params :refer [wrap-params]]
|
||||
[yetti.adapter :as yt]))
|
||||
|
||||
(defn wrap-server-timing
|
||||
[handler]
|
||||
(let [seconds-from #(float (/ (- (System/nanoTime) %) 1000000000))]
|
||||
(fn [request]
|
||||
(let [start (System/nanoTime)
|
||||
response (handler request)]
|
||||
(update response :headers
|
||||
(fn [headers]
|
||||
(assoc headers "Server-Timing" (str "total;dur=" (seconds-from start)))))))))
|
||||
(letfn [(get-age [start]
|
||||
(float (/ (- (System/nanoTime) start) 1000000000)))
|
||||
|
||||
(update-headers [headers start]
|
||||
(assoc headers "Server-Timing" (str "total;dur=" (get-age start))))]
|
||||
|
||||
(fn [request respond raise]
|
||||
(let [start (System/nanoTime)]
|
||||
(handler request #(respond (update % :headers update-headers start)) raise)))))
|
||||
|
||||
(defn wrap-parse-request-body
|
||||
[handler]
|
||||
@@ -37,63 +38,101 @@
|
||||
(parse-json [body]
|
||||
(json/read body))
|
||||
|
||||
(parse [type body]
|
||||
(try
|
||||
(case type
|
||||
:json (parse-json body)
|
||||
:transit (parse-transit body))
|
||||
(catch Exception e
|
||||
(let [data {:type :parse
|
||||
:hint "unable to parse request body"
|
||||
:message (ex-message e)}]
|
||||
{:status 400
|
||||
:headers {"content-type" "application/transit+json"}
|
||||
:body (t/encode-str data {:type :json-verbose})}))))]
|
||||
(handle-request [{:keys [headers body] :as request}]
|
||||
(let [ctype (get headers "content-type")]
|
||||
(case ctype
|
||||
"application/transit+json"
|
||||
(let [params (parse-transit body)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params)))
|
||||
|
||||
(fn [{:keys [headers body] :as request}]
|
||||
(let [ctype (get headers "content-type")]
|
||||
(handler
|
||||
(case ctype
|
||||
"application/transit+json"
|
||||
(let [params (parse :transit body)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params)))
|
||||
"application/json"
|
||||
(let [params (parse-json body)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params)))
|
||||
|
||||
"application/json"
|
||||
(let [params (parse :json body)]
|
||||
(-> request
|
||||
(assoc :body-params params)
|
||||
(update :params merge params)))
|
||||
request)))
|
||||
|
||||
request))))))
|
||||
(handle-exception [cause]
|
||||
(let [data {:type :validation
|
||||
:code :unable-to-parse-request-body
|
||||
:hint "malformed params"}]
|
||||
(l/error :hint (ex-message cause) :cause cause)
|
||||
{:status 400
|
||||
:headers {"content-type" "application/transit+json"}
|
||||
:body (t/encode-str data {:type :json-verbose})}))]
|
||||
|
||||
(fn [request respond raise]
|
||||
(try
|
||||
(let [request (handle-request request)]
|
||||
(handler request respond raise))
|
||||
(catch Exception cause
|
||||
(respond (handle-exception cause)))))))
|
||||
|
||||
(def parse-request-body
|
||||
{:name ::parse-request-body
|
||||
:compile (constantly wrap-parse-request-body)})
|
||||
|
||||
(defn- impl-format-response-body
|
||||
[response _request]
|
||||
(let [body (:body response)
|
||||
opts {:type :json}]
|
||||
(cond
|
||||
(coll? body)
|
||||
(-> response
|
||||
(update :headers assoc "content-type" "application/transit+json")
|
||||
(assoc :body (t/encode body opts)))
|
||||
(defn buffered-output-stream
|
||||
"Returns a buffered output stream that ignores flush calls. This is
|
||||
needed because transit-java calls flush very aggresivelly on each
|
||||
object write."
|
||||
[^java.io.OutputStream os ^long chunk-size]
|
||||
(proxy [java.io.BufferedOutputStream] [os (int chunk-size)]
|
||||
;; Explicitly do not forward flush
|
||||
(flush [])
|
||||
(close []
|
||||
(proxy-super flush)
|
||||
(proxy-super close))))
|
||||
|
||||
(nil? body)
|
||||
(assoc response :status 204 :body "")
|
||||
(def ^:const buffer-size (:http/output-buffer-size yt/base-defaults))
|
||||
|
||||
:else
|
||||
response)))
|
||||
|
||||
(defn- wrap-format-response-body
|
||||
(defn wrap-format-response-body
|
||||
[handler]
|
||||
(fn [request]
|
||||
(let [response (handler request)]
|
||||
(cond-> response
|
||||
(map? response) (impl-format-response-body request)))))
|
||||
(letfn [(transit-streamable-body [data opts]
|
||||
(reify rp/StreamableResponseBody
|
||||
(write-body-to-stream [_ _ output-stream]
|
||||
;; Use the same buffer as jetty output buffer size
|
||||
(try
|
||||
(with-open [bos (buffered-output-stream output-stream buffer-size)]
|
||||
(let [tw (t/writer bos opts)]
|
||||
(t/write! tw data)))
|
||||
(catch org.eclipse.jetty.io.EofException _cause
|
||||
;; Do nothing, EOF means client closes connection abruptly
|
||||
nil)
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unexpected error on encoding response"
|
||||
:cause cause))))))
|
||||
|
||||
(impl-format-response-body [response {:keys [query-params] :as request}]
|
||||
(let [body (:body response)
|
||||
opts {:type (if (contains? query-params "transit_verbose") :json-verbose :json)}]
|
||||
(cond
|
||||
(:ws response)
|
||||
response
|
||||
|
||||
(coll? body)
|
||||
(-> response
|
||||
(update :headers assoc "content-type" "application/transit+json")
|
||||
(assoc :body (transit-streamable-body body opts)))
|
||||
|
||||
(nil? body)
|
||||
(assoc response :status 204 :body "")
|
||||
|
||||
:else
|
||||
response)))
|
||||
|
||||
(handle-response [response request]
|
||||
(cond-> response
|
||||
(map? response) (impl-format-response-body request)))]
|
||||
|
||||
(fn [request respond raise]
|
||||
(handler request
|
||||
(fn [response]
|
||||
(respond (handle-response response request)))
|
||||
raise))))
|
||||
|
||||
(def format-response-body
|
||||
{:name ::format-response-body
|
||||
@@ -101,21 +140,14 @@
|
||||
|
||||
(defn wrap-errors
|
||||
[handler on-error]
|
||||
(fn [request]
|
||||
(try
|
||||
(handler request)
|
||||
(catch Throwable e
|
||||
(on-error e request)))))
|
||||
(fn [request respond _]
|
||||
(handler request respond (fn [cause]
|
||||
(-> cause (on-error request) respond)))))
|
||||
|
||||
(def errors
|
||||
{:name ::errors
|
||||
:compile (constantly wrap-errors)})
|
||||
|
||||
(def metrics
|
||||
{:name ::metrics
|
||||
:wrap (fn [handler]
|
||||
(mtx/wrap-counter handler {:id "http__requests_counter"
|
||||
:help "Absolute http requests counter."}))})
|
||||
(def cookies
|
||||
{:name ::cookies
|
||||
:compile (constantly wrap-cookies)})
|
||||
@@ -136,47 +168,7 @@
|
||||
{:name ::server-timing
|
||||
:compile (constantly wrap-server-timing)})
|
||||
|
||||
(defn wrap-etag
|
||||
[handler]
|
||||
(letfn [(generate-etag [{:keys [body] :as response}]
|
||||
(str "W/\"" (-> body bh/blake2b-128 bc/bytes->hex) "\""))
|
||||
(get-match [{:keys [headers] :as request}]
|
||||
(get headers "if-none-match"))]
|
||||
(fn [request]
|
||||
(let [response (handler request)]
|
||||
(if (= :get (:request-method request))
|
||||
(let [etag (generate-etag response)
|
||||
match (get-match request)
|
||||
response (update response :headers #(assoc % "ETag" etag))]
|
||||
(cond-> response
|
||||
(and (string? match)
|
||||
(= :get (:request-method request))
|
||||
(= etag match))
|
||||
(-> response
|
||||
(assoc :body "")
|
||||
(assoc :status 304))))
|
||||
response)))))
|
||||
|
||||
(def etag
|
||||
{:name ::etag
|
||||
:compile (constantly wrap-etag)})
|
||||
|
||||
(defn activity-logger
|
||||
[handler]
|
||||
(let [logger "penpot.profile-activity"]
|
||||
(fn [{:keys [headers] :as request}]
|
||||
(let [ip-addr (get headers "x-forwarded-for")
|
||||
profile-id (:profile-id request)
|
||||
qstring (:query-string request)]
|
||||
(l/info ::l/async true
|
||||
::l/logger logger
|
||||
:ip-addr ip-addr
|
||||
:profile-id profile-id
|
||||
:uri (str (:uri request) (when qstring (str "?" qstring)))
|
||||
:method (name (:request-method request)))
|
||||
(handler request)))))
|
||||
|
||||
(defn- wrap-cors
|
||||
(defn wrap-cors
|
||||
[handler]
|
||||
(if-not (contains? cf/flags :cors)
|
||||
handler
|
||||
@@ -191,12 +183,15 @@
|
||||
(assoc "access-control-allow-credentials" "true")
|
||||
(assoc "access-control-expose-headers" "x-requested-with, content-type, cookie")
|
||||
(assoc "access-control-allow-headers" "x-frontend-version, content-type, accept, x-requested-width"))))))]
|
||||
(fn [request]
|
||||
(fn [request respond raise]
|
||||
(if (= (:request-method request) :options)
|
||||
(-> {:status 200 :body ""}
|
||||
(add-cors-headers request))
|
||||
(let [response (handler request)]
|
||||
(add-cors-headers response request)))))))
|
||||
(add-cors-headers request)
|
||||
(respond))
|
||||
(handler request
|
||||
(fn [response]
|
||||
(respond (add-cors-headers response request)))
|
||||
raise))))))
|
||||
|
||||
(def cors
|
||||
{:name ::cors
|
||||
|
||||
@@ -21,7 +21,10 @@
|
||||
[clojure.set :as set]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
;; TODO: make it fully async (?)
|
||||
|
||||
(defn- build-redirect-uri
|
||||
[{:keys [provider] :as cfg}]
|
||||
@@ -130,7 +133,7 @@
|
||||
(when-not (set/subset? provider-roles profile-roles)
|
||||
(ex/raise :type :internal
|
||||
:code :unable-to-auth
|
||||
:hint "not enought permissions"))))
|
||||
:hint "not enough permissions"))))
|
||||
|
||||
(cond-> info
|
||||
(some? (:invitation-token state))
|
||||
@@ -213,28 +216,35 @@
|
||||
(redirect-response uri))))
|
||||
|
||||
(defn- auth-handler
|
||||
[{:keys [tokens] :as cfg} {:keys [params] :as request}]
|
||||
(let [invitation (:invitation-token params)
|
||||
props (extract-utm-props params)
|
||||
state (tokens :generate
|
||||
{:iss :oauth
|
||||
:invitation-token invitation
|
||||
:props props
|
||||
:exp (dt/in-future "15m")})
|
||||
uri (build-auth-uri cfg state)]
|
||||
{:status 200
|
||||
:body {:redirect-uri uri}}))
|
||||
[{:keys [tokens executor] :as cfg} {:keys [params] :as request} respond _]
|
||||
(px/run!
|
||||
executor
|
||||
(fn []
|
||||
(let [invitation (:invitation-token params)
|
||||
props (extract-utm-props params)
|
||||
state (tokens :generate
|
||||
{:iss :oauth
|
||||
:invitation-token invitation
|
||||
:props props
|
||||
:exp (dt/in-future "15m")})
|
||||
uri (build-auth-uri cfg state)]
|
||||
|
||||
(respond
|
||||
{:status 200
|
||||
:body {:redirect-uri uri}})))))
|
||||
|
||||
(defn- callback-handler
|
||||
[cfg request]
|
||||
(try
|
||||
(let [info (retrieve-info cfg request)
|
||||
profile (retrieve-profile cfg info)]
|
||||
(generate-redirect cfg request info profile))
|
||||
(catch Exception e
|
||||
(l/warn :hint "error on oauth process"
|
||||
:cause e)
|
||||
(generate-error-redirect cfg e))))
|
||||
[{:keys [executor] :as cfg} request respond _]
|
||||
(px/run!
|
||||
executor
|
||||
(fn []
|
||||
(try
|
||||
(let [info (retrieve-info cfg request)
|
||||
profile (retrieve-profile cfg info)]
|
||||
(respond (generate-redirect cfg request info profile)))
|
||||
(catch Exception cause
|
||||
(l/warn :hint "error on oauth process" :cause cause)
|
||||
(respond (generate-error-redirect cfg cause)))))))
|
||||
|
||||
;; --- INIT
|
||||
|
||||
@@ -250,15 +260,19 @@
|
||||
|
||||
(defn wrap-handler
|
||||
[cfg handler]
|
||||
(fn [request]
|
||||
(fn [request respond raise]
|
||||
(let [provider (get-in request [:path-params :provider])
|
||||
provider (get-in @cfg [:providers provider])]
|
||||
(when-not provider
|
||||
(ex/raise :type :not-found
|
||||
:context {:provider provider}
|
||||
:hint "provider not configured"))
|
||||
(-> (assoc @cfg :provider provider)
|
||||
(handler request)))))
|
||||
(if provider
|
||||
(handler (assoc @cfg :provider provider)
|
||||
request
|
||||
respond
|
||||
raise)
|
||||
(raise
|
||||
(ex/error
|
||||
:type :not-found
|
||||
:provider provider
|
||||
:hint "provider not configured"))))))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ cfg]
|
||||
@@ -268,14 +282,29 @@
|
||||
|
||||
(defn- discover-oidc-config
|
||||
[{:keys [base-uri] :as opts}]
|
||||
|
||||
(let [discovery-uri (u/join base-uri ".well-known/openid-configuration")
|
||||
response (http/send! {:method :get :uri (str discovery-uri)})]
|
||||
(when (= 200 (:status response))
|
||||
response (ex/try (http/send! {:method :get :uri (str discovery-uri)}))]
|
||||
(cond
|
||||
(ex/exception? response)
|
||||
(do
|
||||
(l/warn :hint "unable to discover oidc configuration"
|
||||
:discover-uri (str discovery-uri)
|
||||
:cause response)
|
||||
nil)
|
||||
|
||||
(= 200 (:status response))
|
||||
(let [data (json/read-str (:body response))]
|
||||
(assoc opts
|
||||
:token-uri (get data "token_endpoint")
|
||||
:auth-uri (get data "authorization_endpoint")
|
||||
:user-uri (get data "userinfo_endpoint"))))))
|
||||
{:token-uri (get data "token_endpoint")
|
||||
:auth-uri (get data "authorization_endpoint")
|
||||
:user-uri (get data "userinfo_endpoint")})
|
||||
|
||||
:else
|
||||
(do
|
||||
(l/warn :hint "unable to discover OIDC configuration"
|
||||
:uri (str discovery-uri)
|
||||
:response-status-code (:status response))
|
||||
nil))))
|
||||
|
||||
(defn- obfuscate-string
|
||||
[s]
|
||||
@@ -299,17 +328,23 @@
|
||||
(if (and (string? (:base-uri opts))
|
||||
(string? (:client-id opts))
|
||||
(string? (:client-secret opts)))
|
||||
(if (and (string? (:token-uri opts))
|
||||
(string? (:user-uri opts))
|
||||
(string? (:auth-uri opts)))
|
||||
(do
|
||||
(l/info :action "initialize" :provider "oidc" :method "static"
|
||||
:opts (pr-str (update opts :client-secret obfuscate-string)))
|
||||
(assoc-in cfg [:providers "oidc"] opts))
|
||||
(let [opts (discover-oidc-config opts)]
|
||||
(l/info :action "initialize" :provider "oidc" :method "discover"
|
||||
:opts (pr-str (update opts :client-secret obfuscate-string)))
|
||||
(assoc-in cfg [:providers "oidc"] opts)))
|
||||
(do
|
||||
(l/debug :hint "initialize oidc provider" :name "generic-oidc"
|
||||
:opts (update opts :client-secret obfuscate-string))
|
||||
(if (and (string? (:token-uri opts))
|
||||
(string? (:user-uri opts))
|
||||
(string? (:auth-uri opts)))
|
||||
(do
|
||||
(l/debug :hint "initialized with user provided configuration")
|
||||
(assoc-in cfg [:providers "oidc"] opts))
|
||||
(do
|
||||
(l/debug :hint "trying to discover oidc provider configuration using BASE_URI")
|
||||
(if-let [opts' (discover-oidc-config opts)]
|
||||
(do
|
||||
(l/debug :hint "discovered opts" :additional-opts opts')
|
||||
(assoc-in cfg [:providers "oidc"] (merge opts opts')))
|
||||
|
||||
cfg))))
|
||||
cfg)))
|
||||
|
||||
(defn- initialize-google-provider
|
||||
|
||||
@@ -11,96 +11,167 @@
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cfg]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.async :as aa]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig]
|
||||
[ring.middleware.session.store :as rss]))
|
||||
|
||||
;; A default cookie name for storing the session. We don't allow
|
||||
;; configure it.
|
||||
(def cookie-name "auth-token")
|
||||
;; A default cookie name for storing the session. We don't allow to configure it.
|
||||
(def token-cookie-name "auth-token")
|
||||
|
||||
;; A cookie that we can use to check from other sites of the same domain if a user
|
||||
;; is registered. Is not intended for on premise installations, although nothing
|
||||
;; prevents using it if some one wants to.
|
||||
(def authenticated-cookie-name "authenticated")
|
||||
|
||||
(deftype DatabaseStore [pool tokens]
|
||||
rss/SessionStore
|
||||
(read-session [_ token]
|
||||
(db/exec-one! pool (sql/select :http-session {:id token})))
|
||||
|
||||
(write-session [_ _ data]
|
||||
(let [profile-id (:profile-id data)
|
||||
user-agent (:user-agent data)
|
||||
token (tokens :generate {:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid profile-id})
|
||||
|
||||
now (dt/now)
|
||||
params {:user-agent user-agent
|
||||
:profile-id profile-id
|
||||
:created-at now
|
||||
:updated-at now
|
||||
:id token}]
|
||||
(db/insert! pool :http-session params)
|
||||
token))
|
||||
|
||||
(delete-session [_ token]
|
||||
(db/delete! pool :http-session {:id token})
|
||||
nil))
|
||||
|
||||
(deftype MemoryStore [cache tokens]
|
||||
rss/SessionStore
|
||||
(read-session [_ token]
|
||||
(get @cache token))
|
||||
|
||||
(write-session [_ _ data]
|
||||
(let [profile-id (:profile-id data)
|
||||
user-agent (:user-agent data)
|
||||
token (tokens :generate {:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid profile-id})
|
||||
params {:user-agent user-agent
|
||||
:profile-id profile-id
|
||||
:id token}]
|
||||
|
||||
(swap! cache assoc token params)
|
||||
token))
|
||||
|
||||
(delete-session [_ token]
|
||||
(swap! cache dissoc token)
|
||||
nil))
|
||||
|
||||
;; --- IMPL
|
||||
|
||||
(defn- create-session
|
||||
[{:keys [conn tokens] :as cfg} {:keys [profile-id headers] :as request}]
|
||||
(let [token (tokens :generate {:iss "authentication"
|
||||
:iat (dt/now)
|
||||
:uid profile-id})
|
||||
params {:user-agent (get headers "user-agent")
|
||||
:profile-id profile-id
|
||||
:id token}]
|
||||
(db/insert! conn :http-session params)))
|
||||
[store request profile-id]
|
||||
(let [params {:user-agent (get-in request [:headers "user-agent"])
|
||||
:profile-id profile-id}]
|
||||
(rss/write-session store nil params)))
|
||||
|
||||
(defn- delete-session
|
||||
[{:keys [conn] :as cfg} {:keys [cookies] :as request}]
|
||||
(when-let [token (get-in cookies [cookie-name :value])]
|
||||
(db/delete! conn :http-session {:id token}))
|
||||
nil)
|
||||
[store {:keys [cookies] :as request}]
|
||||
(when-let [token (get-in cookies [token-cookie-name :value])]
|
||||
(rss/delete-session store token)))
|
||||
|
||||
(defn- retrieve-session
|
||||
[{:keys [conn] :as cfg} id]
|
||||
(when id
|
||||
(db/exec-one! conn ["select id, profile_id from http_session where id = ?" id])))
|
||||
[store token]
|
||||
(when token
|
||||
(rss/read-session store token)))
|
||||
|
||||
(defn- retrieve-from-request
|
||||
[cfg {:keys [cookies] :as request}]
|
||||
(->> (get-in cookies [cookie-name :value])
|
||||
(retrieve-session cfg)))
|
||||
[store {:keys [cookies] :as request}]
|
||||
(->> (get-in cookies [token-cookie-name :value])
|
||||
(retrieve-session store)))
|
||||
|
||||
(defn- add-cookies
|
||||
[response {:keys [id] :as session}]
|
||||
[response token]
|
||||
(let [cors? (contains? cfg/flags :cors)
|
||||
secure? (contains? cfg/flags :secure-session-cookies)]
|
||||
(assoc response :cookies {cookie-name {:path "/"
|
||||
:http-only true
|
||||
:value id
|
||||
:same-site (cond (not secure?) :lax
|
||||
cors? :none
|
||||
:else :strict)
|
||||
:secure secure?}})))
|
||||
secure? (contains? cfg/flags :secure-session-cookies)
|
||||
authenticated-cookie-domain (cfg/get :authenticated-cookie-domain)]
|
||||
(update response :cookies
|
||||
(fn [cookies]
|
||||
(cond-> cookies
|
||||
:always
|
||||
(assoc token-cookie-name {:path "/"
|
||||
:http-only true
|
||||
:value token
|
||||
:same-site (if cors? :none :lax)
|
||||
:secure secure?})
|
||||
|
||||
(some? authenticated-cookie-domain)
|
||||
(assoc authenticated-cookie-name {:domain authenticated-cookie-domain
|
||||
:path "/"
|
||||
:value true
|
||||
:same-site :strict
|
||||
:secure secure?}))))))
|
||||
|
||||
(defn- clear-cookies
|
||||
[response]
|
||||
(assoc response :cookies {cookie-name {:value "" :max-age -1}}))
|
||||
(let [authenticated-cookie-domain (cfg/get :authenticated-cookie-domain)]
|
||||
(assoc response :cookies {token-cookie-name {:path "/"
|
||||
:value ""
|
||||
:max-age -1}
|
||||
authenticated-cookie-name {:domain authenticated-cookie-domain
|
||||
:path "/"
|
||||
:value ""
|
||||
:max-age -1}})))
|
||||
|
||||
(defn- middleware
|
||||
[cfg handler]
|
||||
(fn [request]
|
||||
(if-let [{:keys [id profile-id] :as session} (retrieve-from-request cfg request)]
|
||||
[events-ch store handler]
|
||||
(fn [request respond raise]
|
||||
(if-let [{:keys [id profile-id] :as session} (retrieve-from-request store request)]
|
||||
(do
|
||||
(a/>!! (::events-ch cfg) id)
|
||||
(handler (assoc request :profile-id profile-id)))
|
||||
(handler request))))
|
||||
(a/>!! events-ch id)
|
||||
(l/set-context! {:profile-id profile-id})
|
||||
(handler (assoc request :profile-id profile-id :session-id id) respond raise))
|
||||
(handler request respond raise))))
|
||||
|
||||
;; --- STATE INIT: SESSION
|
||||
|
||||
(s/def ::tokens fn?)
|
||||
(defmethod ig/pre-init-spec ::session [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
(s/keys :req-un [::db/pool ::tokens]))
|
||||
|
||||
(defmethod ig/prep-key ::session
|
||||
[_ cfg]
|
||||
(d/merge {:buffer-size 64}
|
||||
(d/merge {:buffer-size 128}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::session
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(let [events (a/chan (a/dropping-buffer (:buffer-size cfg)))
|
||||
cfg (-> cfg
|
||||
(assoc :conn pool)
|
||||
(assoc ::events-ch events))]
|
||||
[_ {:keys [pool tokens] :as cfg}]
|
||||
(let [events-ch (a/chan (a/dropping-buffer (:buffer-size cfg)))
|
||||
store (if (db/read-only? pool)
|
||||
(->MemoryStore (atom {}) tokens)
|
||||
(->DatabaseStore pool tokens))]
|
||||
|
||||
(when (db/read-only? pool)
|
||||
(l/warn :hint "sessions module initialized with in-memory store"))
|
||||
|
||||
(-> cfg
|
||||
(assoc :middleware #(middleware cfg %))
|
||||
(assoc ::events-ch events-ch)
|
||||
(assoc :middleware (partial middleware events-ch store))
|
||||
(assoc :create (fn [profile-id]
|
||||
(fn [request response]
|
||||
(let [request (assoc request :profile-id profile-id)
|
||||
session (create-session cfg request)]
|
||||
(add-cookies response session)))))
|
||||
(let [token (create-session store request profile-id)]
|
||||
(add-cookies response token)))))
|
||||
(assoc :delete (fn [request response]
|
||||
(delete-session cfg request)
|
||||
(delete-session store request)
|
||||
(-> response
|
||||
(assoc :status 204)
|
||||
(assoc :body "")
|
||||
@@ -137,16 +208,11 @@
|
||||
:max-batch-size (str (:max-batch-size cfg)))
|
||||
(let [input (aa/batch (::events-ch session)
|
||||
{:max-batch-size (:max-batch-size cfg)
|
||||
:max-batch-age (inst-ms (:max-batch-age cfg))})
|
||||
mcnt (mtx/create
|
||||
{:name "http_session_update_total"
|
||||
:help "A counter of session update batch events."
|
||||
:registry (:registry metrics)
|
||||
:type :counter})]
|
||||
:max-batch-age (inst-ms (:max-batch-age cfg))})]
|
||||
(a/go-loop []
|
||||
(when-let [[reason batch] (a/<! input)]
|
||||
(let [result (a/<! (update-sessions cfg batch))]
|
||||
(mcnt :inc)
|
||||
(mtx/run! metrics {:id :session-update-total :inc 1})
|
||||
(cond
|
||||
(ex/exception? result)
|
||||
(l/error :task "updater"
|
||||
@@ -155,9 +221,10 @@
|
||||
|
||||
(= :size reason)
|
||||
(l/debug :task "updater"
|
||||
:action "update sessions"
|
||||
:hint "update sessions"
|
||||
:reason (name reason)
|
||||
:count result))
|
||||
|
||||
(recur))))))
|
||||
|
||||
(defn- update-sessions
|
||||
@@ -184,17 +251,20 @@
|
||||
|
||||
(defmethod ig/init-key ::gc-task
|
||||
[_ {:keys [pool max-age] :as cfg}]
|
||||
(l/debug :hint "initializing session gc task" :max-age max-age)
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [interval (db/interval max-age)
|
||||
result (db/exec-one! conn [sql:delete-expired interval])
|
||||
result (db/exec-one! conn [sql:delete-expired interval interval])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :task "gc"
|
||||
:action "clean http sessions"
|
||||
:count result)
|
||||
:hint "clean http sessions"
|
||||
:deleted result)
|
||||
result))))
|
||||
|
||||
(def ^:private
|
||||
sql:delete-expired
|
||||
"delete from http_session
|
||||
where updated_at < now() - ?::interval")
|
||||
where updated_at < now() - ?::interval
|
||||
or (updated_at is null and
|
||||
created_at < now() - ?::interval)")
|
||||
|
||||
144
backend/src/app/http/websocket.clj
Normal file
144
backend/src/app/http/websocket.clj
Normal file
@@ -0,0 +1,144 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.http.websocket
|
||||
"A penpot notification service for file cooperative edition."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.websocket :as ws]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[yetti.websocket :as yws]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; WEBSOCKET HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare send-presence!)
|
||||
|
||||
(defmulti handle-message
|
||||
(fn [_wsp message] (:type message)))
|
||||
|
||||
(defmethod handle-message :connect
|
||||
[wsp _]
|
||||
(let [{:keys [msgbus file-id team-id session-id ::ws/output-ch]} @wsp
|
||||
sub-ch (a/chan (a/dropping-buffer 32))]
|
||||
|
||||
(swap! wsp assoc :sub-ch sub-ch)
|
||||
|
||||
;; Start a subscription forwarding goroutine
|
||||
(a/go-loop []
|
||||
(when-let [val (a/<! sub-ch)]
|
||||
(when-not (= (:session-id val) session-id)
|
||||
;; If we receive a connect message of other user, we need
|
||||
;; to send an update presence to all participants.
|
||||
(when (= :connect (:type val))
|
||||
(a/<! (send-presence! @wsp :presence)))
|
||||
|
||||
;; Then, just forward the message
|
||||
(a/>! output-ch val))
|
||||
(recur)))
|
||||
|
||||
(a/go
|
||||
(a/<! (msgbus :sub {:topics [file-id team-id] :chan sub-ch}))
|
||||
(a/<! (send-presence! @wsp :connect)))))
|
||||
|
||||
(defmethod handle-message :disconnect
|
||||
[wsp _]
|
||||
(a/close! (:sub-ch @wsp))
|
||||
(send-presence! @wsp :disconnect))
|
||||
|
||||
(defmethod handle-message :keepalive
|
||||
[_ _]
|
||||
(a/go :nothing))
|
||||
|
||||
(defmethod handle-message :pointer-update
|
||||
[wsp message]
|
||||
(let [{:keys [profile-id file-id session-id msgbus]} @wsp]
|
||||
(msgbus :pub {:topic file-id
|
||||
:message (assoc message
|
||||
:profile-id profile-id
|
||||
:session-id session-id)})))
|
||||
|
||||
(defmethod handle-message :default
|
||||
[_ message]
|
||||
(a/go
|
||||
(l/log :level :warn
|
||||
:msg "received unexpected message"
|
||||
:message message)))
|
||||
|
||||
;; --- IMPL
|
||||
|
||||
(defn- send-presence!
|
||||
([ws] (send-presence! ws :presence))
|
||||
([{:keys [msgbus session-id profile-id file-id]} type]
|
||||
(msgbus :pub {:topic file-id
|
||||
:message {:type type
|
||||
:session-id session-id
|
||||
:profile-id profile-id}})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP HANDLER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare retrieve-file)
|
||||
|
||||
(s/def ::msgbus fn?)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::session-id ::us/uuid)
|
||||
|
||||
(s/def ::handler-params
|
||||
(s/keys :req-un [::file-id ::session-id]))
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req-un [::msgbus ::db/pool ::mtx/metrics]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(fn [{:keys [profile-id params] :as req} respond raise]
|
||||
(let [params (us/conform ::handler-params params)
|
||||
file (retrieve-file pool (:file-id params))
|
||||
cfg (-> (merge cfg params)
|
||||
(assoc :profile-id profile-id)
|
||||
(assoc :team-id (:team-id file)))]
|
||||
|
||||
(cond
|
||||
(not profile-id)
|
||||
(raise (ex/error :type :authentication
|
||||
:hint "Authentication required."))
|
||||
|
||||
(not file)
|
||||
(raise (ex/error :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
|
||||
(not (yws/upgrade-request? req))
|
||||
(raise (ex/error :type :validation
|
||||
:code :websocket-request-expected
|
||||
:hint "this endpoint only accepts websocket connections"))
|
||||
|
||||
:else
|
||||
(->> (ws/handler handle-message cfg)
|
||||
(yws/upgrade req)
|
||||
(respond))))))
|
||||
|
||||
(def ^:private
|
||||
sql:retrieve-file
|
||||
"select f.id as id,
|
||||
p.team_id as team_id
|
||||
from file as f
|
||||
join project as p on (p.id = f.project_id)
|
||||
where f.id = ?")
|
||||
|
||||
(defn- retrieve-file
|
||||
[conn id]
|
||||
(db/exec-one! conn [sql:retrieve-file id]))
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]
|
||||
[lambdaisland.uri :as u]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(defn parse-client-ip
|
||||
@@ -41,33 +42,26 @@
|
||||
|
||||
(defn clean-props
|
||||
[{:keys [profile-id] :as event}]
|
||||
(letfn [(clean-common [props]
|
||||
(-> props
|
||||
(dissoc :session-id)
|
||||
(dissoc :password)
|
||||
(dissoc :old-password)
|
||||
(dissoc :token)))
|
||||
(let [invalid-keys #{:session-id
|
||||
:password
|
||||
:old-password
|
||||
:token}
|
||||
xform (comp
|
||||
(remove (fn [kv]
|
||||
(qualified-keyword? (first kv))))
|
||||
(remove (fn [kv]
|
||||
(contains? invalid-keys (first kv))))
|
||||
(remove (fn [[k v]]
|
||||
(and (= k :profile-id)
|
||||
(= v profile-id))))
|
||||
(filter (fn [[_ v]]
|
||||
(or (string? v)
|
||||
(keyword? v)
|
||||
(uuid? v)
|
||||
(boolean? v)
|
||||
(number? v)))))]
|
||||
|
||||
(clean-profile-id [props]
|
||||
(cond-> props
|
||||
(= profile-id (:profile-id props))
|
||||
(dissoc :profile-id)))
|
||||
|
||||
(clean-complex-data [props]
|
||||
(reduce-kv (fn [props k v]
|
||||
(cond-> props
|
||||
(or (string? v)
|
||||
(uuid? v)
|
||||
(boolean? v)
|
||||
(number? v))
|
||||
(assoc k v)
|
||||
|
||||
(keyword? v)
|
||||
(assoc k (name v))))
|
||||
{}
|
||||
props))]
|
||||
|
||||
(update event :props #(-> % clean-common clean-profile-id clean-complex-data))))
|
||||
(update event :props #(into {} xform %))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; HTTP Handler
|
||||
@@ -82,52 +76,62 @@
|
||||
(s/def ::timestamp dt/instant?)
|
||||
(s/def ::context (s/map-of ::us/keyword any?))
|
||||
|
||||
(s/def ::event
|
||||
(s/def ::frontend-event
|
||||
(s/keys :req-un [::type ::name ::props ::timestamp ::profile-id]
|
||||
:opt-un [::context]))
|
||||
|
||||
(s/def ::events (s/every ::event))
|
||||
(s/def ::frontend-events (s/every ::frontend-event))
|
||||
|
||||
(defmethod ig/init-key ::http-handler
|
||||
[_ {:keys [executor] :as cfg}]
|
||||
(fn [{:keys [params profile-id] :as request}]
|
||||
(when (contains? cf/flags :audit-log)
|
||||
(let [events (->> (:events params)
|
||||
(remove #(not= profile-id (:profile-id %)))
|
||||
(us/conform ::events))
|
||||
ip-addr (parse-client-ip request)
|
||||
cfg (-> cfg
|
||||
(assoc :source "frontend")
|
||||
(assoc :events events)
|
||||
(assoc :ip-addr ip-addr))]
|
||||
(px/run! executor #(persist-http-events cfg))))
|
||||
{:status 204 :body ""}))
|
||||
[_ {:keys [executor pool] :as cfg}]
|
||||
(if (or (db/read-only? pool) (not (contains? cf/flags :audit-log)))
|
||||
(do
|
||||
(l/warn :hint "audit log http handler disabled or db is read-only")
|
||||
(fn [_ respond _]
|
||||
(respond {:status 204 :body ""})))
|
||||
|
||||
|
||||
(letfn [(handler [{:keys [params profile-id] :as request}]
|
||||
(let [events (->> (:events params)
|
||||
(remove #(not= profile-id (:profile-id %)))
|
||||
(us/conform ::frontend-events))
|
||||
|
||||
ip-addr (parse-client-ip request)
|
||||
cfg (-> cfg
|
||||
(assoc :source "frontend")
|
||||
(assoc :events events)
|
||||
(assoc :ip-addr ip-addr))]
|
||||
(persist-http-events cfg)))
|
||||
|
||||
(handle-error [cause]
|
||||
(let [xdata (ex-data cause)]
|
||||
(if (= :spec-validation (:code xdata))
|
||||
(l/error ::l/raw (str "spec validation on persist-events:\n" (us/pretty-explain xdata)))
|
||||
(l/error :hint "error on persist-events" :cause cause))))]
|
||||
|
||||
(fn [request respond _]
|
||||
;; Fire and forget, log error in case of errro
|
||||
(-> (px/submit! executor #(handler request))
|
||||
(p/catch handle-error))
|
||||
|
||||
(respond {:status 204 :body ""})))))
|
||||
|
||||
(defn- persist-http-events
|
||||
[{:keys [pool events ip-addr source] :as cfg}]
|
||||
(try
|
||||
(let [columns [:id :name :source :type :tracked-at :profile-id :ip-addr :props :context]
|
||||
prepare-xf (map (fn [event]
|
||||
[(uuid/next)
|
||||
(:name event)
|
||||
source
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:profile-id event)
|
||||
(db/inet ip-addr)
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))]))
|
||||
events (us/conform ::events events)]
|
||||
(when (seq events)
|
||||
(->> (into [] prepare-xf events)
|
||||
(db/insert-multi! pool :audit-log columns))))
|
||||
(catch Throwable e
|
||||
(let [xdata (ex-data e)]
|
||||
(if (= :spec-validation (:code xdata))
|
||||
(l/error ::l/raw (str "spec validation on persist-events:\n"
|
||||
(:explain xdata)))
|
||||
(l/error :hint "error on persist-events"
|
||||
:cause e))))))
|
||||
(let [columns [:id :name :source :type :tracked-at :profile-id :ip-addr :props :context]
|
||||
prepare-xf (map (fn [event]
|
||||
[(uuid/next)
|
||||
(:name event)
|
||||
source
|
||||
(:type event)
|
||||
(:timestamp event)
|
||||
(:profile-id event)
|
||||
(db/inet ip-addr)
|
||||
(db/tjson (:props event))
|
||||
(db/tjson (d/without-nils (:context event)))]))]
|
||||
(when (seq events)
|
||||
(->> (into [] prepare-xf events)
|
||||
(db/insert-multi! pool :audit-log columns)))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Collector
|
||||
@@ -142,36 +146,53 @@
|
||||
(defmethod ig/pre-init-spec ::collector [_]
|
||||
(s/keys :req-un [::db/pool ::wrk/executor]))
|
||||
|
||||
(def event-xform
|
||||
(s/def ::ip-addr string?)
|
||||
(s/def ::backend-event
|
||||
(s/keys :req-un [::type ::name ::profile-id]
|
||||
:opt-un [::ip-addr ::props]))
|
||||
|
||||
(def ^:private backend-event-xform
|
||||
(comp
|
||||
(filter :profile-id)
|
||||
(filter #(us/valid? ::backend-event %))
|
||||
(map clean-props)))
|
||||
|
||||
(defmethod ig/init-key ::collector
|
||||
[_ cfg]
|
||||
(when (contains? cf/flags :audit-log)
|
||||
(l/info :msg "intializing audit log collector")
|
||||
(let [input (a/chan 512 event-xform)
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(cond
|
||||
(not (contains? cf/flags :audit-log))
|
||||
(do
|
||||
(l/info :hint "audit log collection disabled")
|
||||
(constantly nil))
|
||||
|
||||
(db/read-only? pool)
|
||||
(do
|
||||
(l/warn :hint "audit log collection disabled, db is read-only")
|
||||
(constantly nil))
|
||||
|
||||
:else
|
||||
(let [input (a/chan 512 backend-event-xform)
|
||||
buffer (aa/batch input {:max-batch-size 100
|
||||
:max-batch-age (* 10 1000) ; 10s
|
||||
:init []})]
|
||||
(l/info :hint "audit log collector initialized")
|
||||
(a/go-loop []
|
||||
(when-let [[_type events] (a/<! buffer)]
|
||||
(let [res (a/<! (persist-events cfg events))]
|
||||
(when (ex/exception? res)
|
||||
(l/error :hint "error on persiting events"
|
||||
:cause res)))
|
||||
(recur)))
|
||||
(l/error :hint "error on persisting events" :cause res))
|
||||
(recur))))
|
||||
|
||||
(fn [& {:keys [cmd] :as params}]
|
||||
(let [params (-> params
|
||||
(dissoc :cmd)
|
||||
(assoc :tracked-at (dt/now)))]
|
||||
(case cmd
|
||||
:stop (a/close! input)
|
||||
:submit (when-not (a/offer! input params)
|
||||
(l/warn :msg "activity channel is full"))))))))
|
||||
(case cmd
|
||||
:stop
|
||||
(a/close! input)
|
||||
|
||||
:submit
|
||||
(let [params (-> params
|
||||
(dissoc :cmd)
|
||||
(assoc :tracked-at (dt/now)))]
|
||||
(when-not (a/offer! input params)
|
||||
(l/warn :hint "activity channel is full"))))))))
|
||||
|
||||
(defn- persist-events
|
||||
[{:keys [pool executor] :as cfg} events]
|
||||
@@ -189,13 +210,13 @@
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert-multi! conn :audit-log
|
||||
[:id :name :type :profile-id :tracked-at :ip-addr :props :source]
|
||||
(sequence (map event->row) events)))))))
|
||||
(sequence (keep event->row) events)))))))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Archive Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This is a task responsible to send the accomulated events to an
|
||||
;; This is a task responsible to send the accumulated events to an
|
||||
;; external service for archival.
|
||||
|
||||
(declare archive-events)
|
||||
@@ -216,6 +237,7 @@
|
||||
(:enabled props false))
|
||||
uri (or uri (:uri props))
|
||||
cfg (assoc cfg :uri uri)]
|
||||
|
||||
(when (and enabled (not uri))
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-configured
|
||||
|
||||
@@ -7,17 +7,13 @@
|
||||
(ns app.loggers.database
|
||||
"A specific logger impl that persists errors on the database."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.util.async :as aa]
|
||||
[app.util.template :as tmpl]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.java.io :as io]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[integrant.core :as ig]))
|
||||
@@ -32,9 +28,8 @@
|
||||
|
||||
(defn- persist-on-database!
|
||||
[{:keys [pool] :as cfg} {:keys [id] :as event}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert! conn :server-error-report
|
||||
{:id id :content (db/tjson event)})))
|
||||
(when-not (db/read-only? pool)
|
||||
(db/insert! pool :server-error-report {:id id :content (db/tjson event)})))
|
||||
|
||||
(defn- parse-event-data
|
||||
[event]
|
||||
@@ -54,28 +49,34 @@
|
||||
(assoc :tenant (cf/get :tenant))
|
||||
(assoc :host (cf/get :host))
|
||||
(assoc :public-uri (cf/get :public-uri))
|
||||
(assoc :version (:full cf/version))))
|
||||
(assoc :version (:full cf/version))
|
||||
(update :id #(or % (uuid/next)))))
|
||||
|
||||
(defn handle-event
|
||||
[{:keys [executor] :as cfg} event]
|
||||
(aa/with-thread executor
|
||||
(try
|
||||
(let [event (parse-event event)]
|
||||
(l/debug :hint "registering error on database" :id (:id event))
|
||||
(let [event (parse-event event)
|
||||
uri (cf/get :public-uri)]
|
||||
|
||||
(l/debug :hint "registering error on database" :id (:id event)
|
||||
:uri (str uri "/dbg/error/" (:id event)))
|
||||
|
||||
(persist-on-database! cfg event))
|
||||
(catch Exception e
|
||||
(l/warn :hint "unexpected exception on database error logger"
|
||||
:cause e)))))
|
||||
(catch Exception cause
|
||||
(l/warn :hint "unexpected exception on database error logger" :cause cause)))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::reporter [_]
|
||||
(s/keys :req-un [::wrk/executor ::db/pool ::receiver]))
|
||||
|
||||
(defn error-event?
|
||||
[event]
|
||||
(= "error" (:logger/level event)))
|
||||
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ {:keys [receiver] :as cfg}]
|
||||
(l/info :msg "initializing database error persistence")
|
||||
(let [output (a/chan (a/sliding-buffer 128)
|
||||
(filter (fn [event]
|
||||
(= (:logger/level event) "error"))))]
|
||||
(let [output (a/chan (a/sliding-buffer 5) (filter error-event?))]
|
||||
(receiver :sub output)
|
||||
(a/go-loop []
|
||||
(let [msg (a/<! output)]
|
||||
@@ -89,39 +90,3 @@
|
||||
(defmethod ig/halt-key! ::reporter
|
||||
[_ output]
|
||||
(a/close! output))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Http Handler
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(letfn [(parse-id [request]
|
||||
(let [id (get-in request [:path-params :id])
|
||||
id (us/uuid-conformer id)]
|
||||
(when (uuid? id)
|
||||
id)))
|
||||
(retrieve-report [id]
|
||||
(ex/ignoring
|
||||
(when-let [{:keys [content] :as row} (db/get-by-id pool :server-error-report id)]
|
||||
(assoc row :content (db/decode-transit-pgobject content)))))
|
||||
|
||||
(render-template [{:keys [content] :as report}]
|
||||
(some-> (io/resource "error-report.tmpl")
|
||||
(tmpl/render content)))]
|
||||
|
||||
|
||||
(fn [request]
|
||||
(let [result (some-> (parse-id request)
|
||||
(retrieve-report)
|
||||
(render-template))]
|
||||
(if result
|
||||
{:status 200
|
||||
:headers {"content-type" "text/html; charset=utf-8"
|
||||
"x-robots-tag" "noindex"}
|
||||
:body result}
|
||||
{:status 404
|
||||
:body "not found"})))))
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
(defmethod ig/init-key ::reporter
|
||||
[_ {:keys [receiver uri] :as cfg}]
|
||||
(when uri
|
||||
(l/info :msg "intializing loki reporter" :uri uri)
|
||||
(l/info :msg "initializing loki reporter" :uri uri)
|
||||
(let [input (a/chan (a/dropping-buffer 512))]
|
||||
(receiver :sub input)
|
||||
(a/go-loop []
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
[cfg {:keys [host id public-uri] :as event}]
|
||||
(try
|
||||
(let [uri (:uri cfg)
|
||||
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error-by-id/" id ")\n"
|
||||
text (str "Exception on (host: " host ", url: " public-uri "/dbg/error/" id ")\n"
|
||||
(when-let [pid (:profile-id event)]
|
||||
(str "- profile-id: #uuid-" pid "\n")))
|
||||
rsp (http/send! {:uri uri
|
||||
|
||||
@@ -120,8 +120,6 @@
|
||||
(.captureMessage ^IHub shub msg)
|
||||
))
|
||||
]
|
||||
;; (clojure.pprint/pprint event)
|
||||
|
||||
(when @enabled
|
||||
(.withScope ^IHub shub (reify ScopeCallback
|
||||
(run [_ scope]
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
|
||||
(defmethod ig/init-key ::receiver
|
||||
[_ {:keys [endpoint] :as cfg}]
|
||||
(l/info :msg "intializing ZMQ receiver" :bind endpoint)
|
||||
(l/info :msg "initializing ZMQ receiver" :bind endpoint)
|
||||
(let [buffer (a/chan 1)
|
||||
output (a/chan 1 (comp (filter map?)
|
||||
(keep prepare)))
|
||||
|
||||
@@ -9,47 +9,55 @@
|
||||
[app.common.logging :as l]
|
||||
[app.config :as cf]
|
||||
[app.util.time :as dt]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig])
|
||||
(:gen-class))
|
||||
|
||||
(def system-config
|
||||
{:app.db/pool
|
||||
{:uri (cf/get :database-uri)
|
||||
:username (cf/get :database-username)
|
||||
:password (cf/get :database-password)
|
||||
:read-only (cf/get :database-readonly false)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:migrations (ig/ref :app.migrations/all)
|
||||
:name :main
|
||||
:min-pool-size 0
|
||||
:max-pool-size 30}
|
||||
:min-size (cf/get :database-min-pool-size 0)
|
||||
:max-size (cf/get :database-max-pool-size 30)}
|
||||
|
||||
:app.metrics/metrics
|
||||
{:definitions
|
||||
{:profile-register
|
||||
{:name "actions_profile_register_count"
|
||||
:help "A global counter of user registrations."
|
||||
:type :counter}
|
||||
;; Default thread pool for IO operations
|
||||
[::default :app.worker/executor]
|
||||
{:parallelism (cf/get :default-executor-parallelism 60)
|
||||
:prefix :default}
|
||||
|
||||
:profile-activation
|
||||
{:name "actions_profile_activation_count"
|
||||
:help "A global counter of profile activations"
|
||||
:type :counter}
|
||||
;; Constrained thread pool. Should only be used from high demand
|
||||
;; RPC methods.
|
||||
[::blocking :app.worker/executor]
|
||||
{:parallelism (cf/get :blocking-executor-parallelism 20)
|
||||
:prefix :blocking}
|
||||
|
||||
:update-file-changes
|
||||
{:name "rpc_update_file_changes_total"
|
||||
:help "A total number of changes submitted to update-file."
|
||||
:type :counter}
|
||||
;; Dedicated thread pool for backround tasks execution.
|
||||
[::worker :app.worker/executor]
|
||||
{:parallelism (cf/get :worker-executor-parallelism 10)
|
||||
:prefix :worker}
|
||||
|
||||
:update-file-bytes-processed
|
||||
{:name "rpc_update_file_bytes_processed_total"
|
||||
:help "A total number of bytes processed by update-file."
|
||||
:type :counter}}}
|
||||
:app.worker/executors
|
||||
{:default (ig/ref [::default :app.worker/executor])
|
||||
:worker (ig/ref [::worker :app.worker/executor])
|
||||
:blocking (ig/ref [::blocking :app.worker/executor])}
|
||||
|
||||
:app.migrations/all
|
||||
{:main (ig/ref :app.migrations/migrations)}
|
||||
:app.worker/executors-monitor
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:executors (ig/ref :app.worker/executors)}
|
||||
|
||||
:app.migrations/migrations
|
||||
{}
|
||||
|
||||
:app.metrics/metrics
|
||||
{}
|
||||
|
||||
:app.migrations/all
|
||||
{:main (ig/ref :app.migrations/migrations)}
|
||||
|
||||
:app.msgbus/msgbus
|
||||
{:backend (cf/get :msgbus-backend :redis)
|
||||
:redis-uri (cf/get :redis-uri)}
|
||||
@@ -65,13 +73,9 @@
|
||||
:app.storage/gc-touched-task
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.storage/recheck-task
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:storage (ig/ref :app.storage/storage)}
|
||||
|
||||
:app.http.session/session
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:tokens (ig/ref :app.tokens/tokens)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:tokens (ig/ref :app.tokens/tokens)}
|
||||
|
||||
:app.http.session/gc-task
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
@@ -80,7 +84,7 @@
|
||||
:app.http.session/updater
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:executor (ig/ref :app.worker/executor)
|
||||
:executor (ig/ref [::worker :app.worker/executor])
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:max-batch-age (cf/get :http-session-updater-batch-max-age)
|
||||
:max-batch-size (cf/get :http-session-updater-batch-max-size)}
|
||||
@@ -90,42 +94,58 @@
|
||||
:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.http/server
|
||||
{:port (cf/get :http-server-port)
|
||||
:router (ig/ref :app.http/router)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:ws {"/ws/notifications" (ig/ref :app.notifications/handler)}}
|
||||
{:port (cf/get :http-server-port)
|
||||
:host (cf/get :http-server-host)
|
||||
:router (ig/ref :app.http/router)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
|
||||
:max-threads (cf/get :http-server-max-threads)
|
||||
:min-threads (cf/get :http-server-min-threads)}
|
||||
|
||||
:app.http/router
|
||||
{:rpc (ig/ref :app.rpc/rpc)
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:tokens (ig/ref :app.tokens/tokens)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:oauth (ig/ref :app.http.oauth/handler)
|
||||
:assets (ig/ref :app.http.assets/handlers)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:sns-webhook (ig/ref :app.http.awsns/handler)
|
||||
:feedback (ig/ref :app.http.feedback/handler)
|
||||
{:assets (ig/ref :app.http.assets/handlers)
|
||||
:feedback (ig/ref :app.http.feedback/handler)
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:sns-webhook (ig/ref :app.http.awsns/handler)
|
||||
:oauth (ig/ref :app.http.oauth/handler)
|
||||
:debug (ig/ref :app.http.debug/handlers)
|
||||
:ws (ig/ref :app.http.websocket/handler)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:tokens (ig/ref :app.tokens/tokens)
|
||||
:audit-http-handler (ig/ref :app.loggers.audit/http-handler)
|
||||
:error-report-handler (ig/ref :app.loggers.database/handler)}
|
||||
:rpc (ig/ref :app.rpc/rpc)}
|
||||
|
||||
:app.http.debug/handlers
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.websocket/handler
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:msgbus (ig/ref :app.msgbus/msgbus)}
|
||||
|
||||
:app.http.assets/handlers
|
||||
{:metrics (ig/ref :app.metrics/metrics)
|
||||
:assets-path (cf/get :assets-path)
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:cache-max-age (dt/duration {:hours 24})
|
||||
:signature-max-age (dt/duration {:hours 24 :minutes 5})}
|
||||
|
||||
:app.http.feedback/handler
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref [::default :app.worker/executor])}
|
||||
|
||||
:app.http.oauth/handler
|
||||
{:rpc (ig/ref :app.rpc/rpc)
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:tokens (ig/ref :app.tokens/tokens)
|
||||
:audit (ig/ref :app.loggers.audit/collector)
|
||||
:public-uri (cf/get :public-uri)}
|
||||
{:rpc (ig/ref :app.rpc/rpc)
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:tokens (ig/ref :app.tokens/tokens)
|
||||
:audit (ig/ref :app.loggers.audit/collector)
|
||||
:executor (ig/ref [::default :app.worker/executor])
|
||||
:public-uri (cf/get :public-uri)}
|
||||
|
||||
:app.rpc/rpc
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
@@ -135,29 +155,17 @@
|
||||
:storage (ig/ref :app.storage/storage)
|
||||
:msgbus (ig/ref :app.msgbus/msgbus)
|
||||
:public-uri (cf/get :public-uri)
|
||||
:audit (ig/ref :app.loggers.audit/collector)}
|
||||
|
||||
:app.notifications/handler
|
||||
{:msgbus (ig/ref :app.msgbus/msgbus)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:session (ig/ref :app.http.session/session)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
|
||||
:app.worker/executor
|
||||
{:min-threads 0
|
||||
:max-threads 256
|
||||
:idle-timeout 60000
|
||||
:name :worker}
|
||||
:audit (ig/ref :app.loggers.audit/collector)
|
||||
:executors (ig/ref :app.worker/executors)}
|
||||
|
||||
:app.worker/worker
|
||||
{:executor (ig/ref :app.worker/executor)
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:tasks (ig/ref :app.worker/registry)
|
||||
:metrics (ig/ref :app.metrics/metrics)
|
||||
:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.worker/scheduler
|
||||
{:executor (ig/ref :app.worker/executor)
|
||||
{:executor (ig/ref [::worker :app.worker/executor])
|
||||
:tasks (ig/ref :app.worker/registry)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:schedule
|
||||
@@ -176,9 +184,6 @@
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :session-gc}
|
||||
|
||||
{:cron #app/cron "0 0 * * * ?" ;; hourly
|
||||
:task :storage-recheck}
|
||||
|
||||
{:cron #app/cron "0 0 0 * * ?" ;; daily
|
||||
:task :objects-gc}
|
||||
|
||||
@@ -190,7 +195,7 @@
|
||||
:task :file-offload})
|
||||
|
||||
(when (contains? cf/flags :audit-log-archive)
|
||||
{:cron #app/cron "0 */3 * * * ?" ;; every 3m
|
||||
{:cron #app/cron "0 */5 * * * ?" ;; every 5m
|
||||
:task :audit-log-archive})
|
||||
|
||||
(when (contains? cf/flags :audit-log-gc)
|
||||
@@ -199,7 +204,7 @@
|
||||
|
||||
(when (or (contains? cf/flags :telemetry)
|
||||
(cf/get :telemetry-enabled))
|
||||
{:cron #app/cron "0 0 */6 * * ?" ;; every 6h
|
||||
{:cron #app/cron "0 30 */3,23 * * ?"
|
||||
:task :telemetry})]}
|
||||
|
||||
:app.worker/registry
|
||||
@@ -211,7 +216,6 @@
|
||||
:file-xlog-gc (ig/ref :app.tasks.file-xlog-gc/handler)
|
||||
:storage-deleted-gc (ig/ref :app.storage/gc-deleted-task)
|
||||
:storage-touched-gc (ig/ref :app.storage/gc-touched-task)
|
||||
:storage-recheck (ig/ref :app.storage/recheck-task)
|
||||
:tasks-gc (ig/ref :app.tasks.tasks-gc/handler)
|
||||
:telemetry (ig/ref :app.tasks.telemetry/handler)
|
||||
:session-gc (ig/ref :app.http.session/gc-task)
|
||||
@@ -275,11 +279,11 @@
|
||||
|
||||
:app.loggers.audit/http-handler
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.loggers.audit/collector
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.loggers.audit/archive-task
|
||||
{:uri (cf/get :audit-log-archive-uri)
|
||||
@@ -293,54 +297,43 @@
|
||||
:app.loggers.loki/reporter
|
||||
{:uri (cf/get :loggers-loki-uri)
|
||||
:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.loggers.mattermost/reporter
|
||||
{:uri (cf/get :error-report-webhook)
|
||||
:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.loggers.database/reporter
|
||||
{:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
|
||||
:app.loggers.database/handler
|
||||
{:pool (ig/ref :app.db/pool)}
|
||||
|
||||
:app.loggers.sentry/reporter
|
||||
{:dsn (cf/get :sentry-dsn)
|
||||
:trace-sample-rate (cf/get :sentry-trace-sample-rate 1.0)
|
||||
:attach-stack-trace (cf/get :sentry-attach-stack-trace false)
|
||||
:debug (cf/get :sentry-debug false)
|
||||
:receiver (ig/ref :app.loggers.zmq/receiver)
|
||||
:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)}
|
||||
:executor (ig/ref [::worker :app.worker/executor])}
|
||||
|
||||
:app.storage/storage
|
||||
{:pool (ig/ref :app.db/pool)
|
||||
:executor (ig/ref :app.worker/executor)
|
||||
:backends
|
||||
{:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:assets-db (ig/ref [::assets :app.storage.db/backend])
|
||||
:assets-fs (ig/ref [::assets :app.storage.fs/backend])
|
||||
|
||||
:backends {
|
||||
:assets-s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:assets-db (ig/ref [::assets :app.storage.db/backend])
|
||||
:assets-fs (ig/ref [::assets :app.storage.fs/backend])
|
||||
:tmp (ig/ref [::tmp :app.storage.fs/backend])
|
||||
:fdata-s3 (ig/ref [::fdata :app.storage.s3/backend])
|
||||
:tmp (ig/ref [::tmp :app.storage.fs/backend])
|
||||
:fdata-s3 (ig/ref [::fdata :app.storage.s3/backend])
|
||||
|
||||
;; keep this for backward compatibility
|
||||
:s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:fs (ig/ref [::assets :app.storage.fs/backend])}}
|
||||
;; keep this for backward compatibility
|
||||
:s3 (ig/ref [::assets :app.storage.s3/backend])
|
||||
:fs (ig/ref [::assets :app.storage.fs/backend])}}
|
||||
|
||||
[::fdata :app.storage.s3/backend]
|
||||
{:region (cf/get :storage-fdata-s3-region)
|
||||
:bucket (cf/get :storage-fdata-s3-bucket)
|
||||
:prefix (cf/get :storage-fdata-s3-prefix)}
|
||||
{:region (cf/get :storage-fdata-s3-region)
|
||||
:bucket (cf/get :storage-fdata-s3-bucket)
|
||||
:endpoint (cf/get :storage-fdata-s3-endpoint)
|
||||
:prefix (cf/get :storage-fdata-s3-prefix)}
|
||||
|
||||
[::assets :app.storage.s3/backend]
|
||||
{:region (cf/get :storage-assets-s3-region)
|
||||
:bucket (cf/get :storage-assets-s3-bucket)}
|
||||
{:region (cf/get :storage-assets-s3-region)
|
||||
:endpoint (cf/get :storage-assets-s3-endpoint)
|
||||
:bucket (cf/get :storage-assets-s3-bucket)}
|
||||
|
||||
[::assets :app.storage.fs/backend]
|
||||
{:directory (cf/get :storage-assets-fs-directory)}
|
||||
|
||||
@@ -202,11 +202,9 @@
|
||||
:cause error))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; --- Fonts Generation
|
||||
;; Fonts Generation
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def all-fotmats #{"font/woff2", "font/woff", "font/otf", "font/ttf"})
|
||||
|
||||
(defmethod process :generate-fonts
|
||||
[{:keys [input] :as params}]
|
||||
(letfn [(ttf->otf [data]
|
||||
@@ -326,10 +324,12 @@
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn configure-assets-storage
|
||||
"Given storage map, returns a storage configured with the apropriate
|
||||
"Given storage map, returns a storage configured with the appropriate
|
||||
backend for assets."
|
||||
[storage conn]
|
||||
(-> storage
|
||||
(assoc :conn conn)
|
||||
(assoc :backend (cf/get :assets-storage-backend :assets-fs))))
|
||||
([storage]
|
||||
(assoc storage :backend (cf/get :assets-storage-backend :assets-fs)))
|
||||
([storage conn]
|
||||
(-> storage
|
||||
(assoc :conn conn)
|
||||
(assoc :backend (cf/get :assets-storage-backend :assets-fs)))))
|
||||
|
||||
|
||||
@@ -5,48 +5,132 @@
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.metrics
|
||||
(:refer-clojure :exclude [run!])
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig])
|
||||
(:import
|
||||
io.prometheus.client.CollectorRegistry
|
||||
io.prometheus.client.Counter
|
||||
io.prometheus.client.Counter$Child
|
||||
io.prometheus.client.Gauge
|
||||
io.prometheus.client.Gauge$Child
|
||||
io.prometheus.client.Summary
|
||||
io.prometheus.client.Summary$Child
|
||||
io.prometheus.client.Summary$Builder
|
||||
io.prometheus.client.Histogram
|
||||
io.prometheus.client.Histogram$Child
|
||||
io.prometheus.client.exporter.common.TextFormat
|
||||
io.prometheus.client.hotspot.DefaultExports
|
||||
io.prometheus.client.jetty.JettyStatisticsCollector
|
||||
org.eclipse.jetty.server.handler.StatisticsHandler
|
||||
java.io.StringWriter))
|
||||
|
||||
(declare instrument-vars!)
|
||||
(declare instrument)
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(declare create-registry)
|
||||
(declare create)
|
||||
(declare handler)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Entry Point
|
||||
;; METRICS SERVICE PROVIDER
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- handler
|
||||
[registry _request]
|
||||
(let [samples (.metricFamilySamples ^CollectorRegistry registry)
|
||||
writer (StringWriter.)]
|
||||
(TextFormat/write004 writer samples)
|
||||
{:headers {"content-type" TextFormat/CONTENT_TYPE_004}
|
||||
:body (.toString writer)}))
|
||||
(def default-metrics
|
||||
{:update-file-changes
|
||||
{:name "rpc_update_file_changes_total"
|
||||
:help "A total number of changes submitted to update-file."
|
||||
:type :counter}
|
||||
|
||||
(s/def ::definitions
|
||||
(s/map-of keyword? map?))
|
||||
:update-file-bytes-processed
|
||||
{:name "rpc_update_file_bytes_processed_total"
|
||||
:help "A total number of bytes processed by update-file."
|
||||
:type :counter}
|
||||
|
||||
(defmethod ig/pre-init-spec ::metrics [_]
|
||||
(s/keys :opt-un [::definitions]))
|
||||
:rpc-mutation-timing
|
||||
{:name "rpc_mutation_timing"
|
||||
:help "RPC mutation method call timming."
|
||||
:labels ["name"]
|
||||
:type :histogram}
|
||||
|
||||
:rpc-query-timing
|
||||
{:name "rpc_query_timing"
|
||||
:help "RPC query method call timing."
|
||||
:labels ["name"]
|
||||
:type :histogram}
|
||||
|
||||
:websocket-active-connections
|
||||
{:name "websocket_active_connections"
|
||||
:help "Active websocket connections gauge"
|
||||
:type :gauge}
|
||||
|
||||
:websocket-messages-total
|
||||
{:name "websocket_message_total"
|
||||
:help "Counter of processed messages."
|
||||
:labels ["op"]
|
||||
:type :counter}
|
||||
|
||||
:websocket-session-timing
|
||||
{:name "websocket_session_timing"
|
||||
:help "Websocket session timing (seconds)."
|
||||
:type :summary}
|
||||
|
||||
:session-update-total
|
||||
{:name "http_session_update_total"
|
||||
:help "A counter of session update batch events."
|
||||
:type :counter}
|
||||
|
||||
:tasks-timing
|
||||
{:name "penpot_tasks_timing"
|
||||
:help "Background tasks timing (milliseconds)."
|
||||
:labels ["name"]
|
||||
:type :summary}
|
||||
|
||||
:rlimit-queued-submissions
|
||||
{:name "penpot_rlimit_queued_submissions"
|
||||
:help "Current number of queued submissions on RLIMIT."
|
||||
:labels ["name"]
|
||||
:type :gauge}
|
||||
|
||||
:rlimit-used-permits
|
||||
{:name "penpot_rlimit_used_permits"
|
||||
:help "Current number of used permits on RLIMIT."
|
||||
:labels ["name"]
|
||||
:type :gauge}
|
||||
|
||||
:rlimit-acquires-total
|
||||
{:name "penpot_rlimit_acquires_total"
|
||||
:help "Total number of acquire operations on RLIMIT."
|
||||
:labels ["name"]
|
||||
:type :counter}
|
||||
|
||||
:executors-active-threads
|
||||
{:name "penpot_executors_active_threads"
|
||||
:help "Current number of threads available in the executor service."
|
||||
:labels ["name"]
|
||||
:type :gauge}
|
||||
|
||||
:executors-completed-tasks
|
||||
{:name "penpot_executors_completed_tasks_total"
|
||||
:help "Aproximate number of completed tasks by the executor."
|
||||
:labels ["name"]
|
||||
:type :counter}
|
||||
|
||||
:executors-running-threads
|
||||
{:name "penpot_executors_running_threads"
|
||||
:help "Current number of threads with state RUNNING."
|
||||
:labels ["name"]
|
||||
:type :gauge}
|
||||
|
||||
:executors-queued-submissions
|
||||
{:name "penpot_executors_queued_submissions"
|
||||
:help "Current number of queued submissions."
|
||||
:labels ["name"]
|
||||
:type :gauge}})
|
||||
|
||||
(defmethod ig/init-key ::metrics
|
||||
[_ {:keys [definitions] :as cfg}]
|
||||
[_ _]
|
||||
(l/info :action "initialize metrics")
|
||||
(let [registry (create-registry)
|
||||
definitions (reduce-kv (fn [res k v]
|
||||
@@ -54,7 +138,7 @@
|
||||
(create)
|
||||
(assoc res k)))
|
||||
{}
|
||||
definitions)]
|
||||
default-metrics)]
|
||||
{:handler (partial handler registry)
|
||||
:definitions definitions
|
||||
:registry registry}))
|
||||
@@ -64,24 +148,45 @@
|
||||
(s/def ::metrics
|
||||
(s/keys :req-un [::registry ::handler]))
|
||||
|
||||
(defn- handler
|
||||
[registry _ respond _]
|
||||
(let [samples (.metricFamilySamples ^CollectorRegistry registry)
|
||||
writer (StringWriter.)]
|
||||
(TextFormat/write004 writer samples)
|
||||
(respond {:headers {"content-type" TextFormat/CONTENT_TYPE_004}
|
||||
:body (.toString writer)})))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Implementation
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def default-empty-labels (into-array String []))
|
||||
|
||||
(def default-quantiles
|
||||
[[0.5 0.01]
|
||||
[0.90 0.01]
|
||||
[0.99 0.001]])
|
||||
|
||||
(def default-histogram-buckets
|
||||
[1 5 10 25 50 75 100 250 500 750 1000 2500 5000 7500])
|
||||
|
||||
(defn run!
|
||||
[{:keys [definitions]} {:keys [id] :as params}]
|
||||
(when-let [mobj (get definitions id)]
|
||||
((::fn mobj) params)
|
||||
true))
|
||||
|
||||
(defn create-registry
|
||||
[]
|
||||
(let [registry (CollectorRegistry.)]
|
||||
(DefaultExports/register registry)
|
||||
registry))
|
||||
|
||||
(defmacro with-measure
|
||||
[& {:keys [expr cb]}]
|
||||
`(let [start# (System/nanoTime)
|
||||
tdown# ~cb]
|
||||
(try
|
||||
~expr
|
||||
(finally
|
||||
(tdown# (/ (- (System/nanoTime) start#) 1000000))))))
|
||||
(defn- is-array?
|
||||
[o]
|
||||
(let [oc (class o)]
|
||||
(and (.isArray ^Class oc)
|
||||
(= (.getComponentType oc) String))))
|
||||
|
||||
(defn make-counter
|
||||
[{:keys [name help registry reg labels] :as props}]
|
||||
@@ -94,12 +199,9 @@
|
||||
instance (.register instance registry)]
|
||||
|
||||
{::instance instance
|
||||
::fn (fn [{:keys [by labels] :or {by 1}}]
|
||||
(if labels
|
||||
(.. ^Counter instance
|
||||
(labels (into-array String labels))
|
||||
(inc by))
|
||||
(.inc ^Counter instance by)))}))
|
||||
::fn (fn [{:keys [inc labels] :or {inc 1 labels default-empty-labels}}]
|
||||
(let [instance (.labels instance (if (is-array? labels) labels (into-array String labels)))]
|
||||
(.inc ^Counter$Child instance (double inc))))}))
|
||||
|
||||
(defn make-gauge
|
||||
[{:keys [name help registry reg labels] :as props}]
|
||||
@@ -110,48 +212,33 @@
|
||||
_ (when (seq labels)
|
||||
(.labelNames instance (into-array String labels)))
|
||||
instance (.register instance registry)]
|
||||
|
||||
{::instance instance
|
||||
::fn (fn [{:keys [cmd by labels] :or {by 1}}]
|
||||
(if labels
|
||||
(let [labels (into-array String [labels])]
|
||||
(case cmd
|
||||
:inc (.. ^Gauge instance (labels labels) (inc by))
|
||||
:dec (.. ^Gauge instance (labels labels) (dec by))))
|
||||
(case cmd
|
||||
:inc (.inc ^Gauge instance by)
|
||||
:dec (.dec ^Gauge instance by))))}))
|
||||
|
||||
(def default-quantiles
|
||||
[[0.75 0.02]
|
||||
[0.99 0.001]])
|
||||
::fn (fn [{:keys [inc dec labels val] :or {labels default-empty-labels}}]
|
||||
(let [instance (.labels ^Gauge instance (if (is-array? labels) labels (into-array String labels)))]
|
||||
(cond (number? inc) (.inc ^Gauge$Child instance (double inc))
|
||||
(number? dec) (.dec ^Gauge$Child instance (double dec))
|
||||
(number? val) (.set ^Gauge$Child instance (double val)))))}))
|
||||
|
||||
(defn make-summary
|
||||
[{:keys [name help registry reg labels max-age quantiles buckets]
|
||||
:or {max-age 3600 buckets 6 quantiles default-quantiles} :as props}]
|
||||
:or {max-age 3600 buckets 12 quantiles default-quantiles} :as props}]
|
||||
(let [registry (or registry reg)
|
||||
instance (doto (Summary/build)
|
||||
builder (doto (Summary/build)
|
||||
(.name name)
|
||||
(.help help))
|
||||
_ (when (seq quantiles)
|
||||
(.maxAgeSeconds ^Summary instance max-age)
|
||||
(.ageBuckets ^Summary instance buckets))
|
||||
(.maxAgeSeconds ^Summary$Builder builder ^long max-age)
|
||||
(.ageBuckets ^Summary$Builder builder buckets))
|
||||
_ (doseq [[q e] quantiles]
|
||||
(.quantile ^Summary instance q e))
|
||||
(.quantile ^Summary$Builder builder q e))
|
||||
_ (when (seq labels)
|
||||
(.labelNames instance (into-array String labels)))
|
||||
instance (.register instance registry)]
|
||||
(.labelNames ^Summary$Builder builder (into-array String labels)))
|
||||
instance (.register ^Summary$Builder builder registry)]
|
||||
|
||||
{::instance instance
|
||||
::fn (fn [{:keys [val labels]}]
|
||||
(if labels
|
||||
(.. ^Summary instance
|
||||
(labels (into-array String labels))
|
||||
(observe val))
|
||||
(.observe ^Summary instance val)))}))
|
||||
|
||||
(def default-histogram-buckets
|
||||
[1 5 10 25 50 75 100 250 500 750 1000 2500 5000 7500])
|
||||
::fn (fn [{:keys [val labels] :or {labels default-empty-labels}}]
|
||||
(let [instance (.labels ^Summary instance (if (is-array? labels) labels (into-array String labels)))]
|
||||
(.observe ^Summary$Child instance val)))}))
|
||||
|
||||
(defn make-histogram
|
||||
[{:keys [name help registry reg labels buckets]
|
||||
@@ -166,12 +253,9 @@
|
||||
instance (.register instance registry)]
|
||||
|
||||
{::instance instance
|
||||
::fn (fn [{:keys [val labels]}]
|
||||
(if labels
|
||||
(.. ^Histogram instance
|
||||
(labels (into-array String labels))
|
||||
(observe val))
|
||||
(.observe ^Histogram instance val)))}))
|
||||
::fn (fn [{:keys [val labels] :or {labels default-empty-labels}}]
|
||||
(let [instance (.labels ^Histogram instance (if (is-array? labels) labels (into-array String labels)))]
|
||||
(.observe ^Histogram$Child instance val)))}))
|
||||
|
||||
(defn create
|
||||
[{:keys [type] :as props}]
|
||||
@@ -181,114 +265,6 @@
|
||||
:summary (make-summary props)
|
||||
:histogram (make-histogram props)))
|
||||
|
||||
(defn wrap-counter
|
||||
([rootf mobj]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn
|
||||
([a]
|
||||
((::fn mobj) nil)
|
||||
(origf a))
|
||||
([a b]
|
||||
((::fn mobj) nil)
|
||||
(origf a b))
|
||||
([a b c]
|
||||
((::fn mobj) nil)
|
||||
(origf a b c))
|
||||
([a b c d]
|
||||
((::fn mobj) nil)
|
||||
(origf a b c d))
|
||||
([a b c d & more]
|
||||
((::fn mobj) nil)
|
||||
(apply origf a b c d more)))
|
||||
(assoc mdata ::original origf))))
|
||||
([rootf mobj labels]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn
|
||||
([a]
|
||||
((::fn mobj) {:labels labels})
|
||||
(origf a))
|
||||
([a b]
|
||||
((::fn mobj) {:labels labels})
|
||||
(origf a b))
|
||||
([a b & more]
|
||||
((::fn mobj) {:labels labels})
|
||||
(apply origf a b more)))
|
||||
(assoc mdata ::original origf)))))
|
||||
|
||||
(defn wrap-summary
|
||||
([rootf mobj]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn
|
||||
([a]
|
||||
(with-measure
|
||||
:expr (origf a)
|
||||
:cb #((::fn mobj) {:val %})))
|
||||
([a b]
|
||||
(with-measure
|
||||
:expr (origf a b)
|
||||
:cb #((::fn mobj) {:val %})))
|
||||
([a b & more]
|
||||
(with-measure
|
||||
:expr (apply origf a b more)
|
||||
:cb #((::fn mobj) {:val %}))))
|
||||
(assoc mdata ::original origf))))
|
||||
|
||||
([rootf mobj labels]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn
|
||||
([a]
|
||||
(with-measure
|
||||
:expr (origf a)
|
||||
:cb #((::fn mobj) {:val % :labels labels})))
|
||||
([a b]
|
||||
(with-measure
|
||||
:expr (origf a b)
|
||||
:cb #((::fn mobj) {:val % :labels labels})))
|
||||
([a b & more]
|
||||
(with-measure
|
||||
:expr (apply origf a b more)
|
||||
:cb #((::fn mobj) {:val % :labels labels}))))
|
||||
(assoc mdata ::original origf)))))
|
||||
|
||||
(defn instrument-vars!
|
||||
[vars {:keys [wrap] :as props}]
|
||||
(let [obj (create props)]
|
||||
(cond
|
||||
(instance? Counter (::instance obj))
|
||||
(doseq [var vars]
|
||||
(alter-var-root var (or wrap wrap-counter) obj))
|
||||
|
||||
(instance? Summary (::instance obj))
|
||||
(doseq [var vars]
|
||||
(alter-var-root var (or wrap wrap-summary) obj))
|
||||
|
||||
:else
|
||||
(ex/raise :type :not-implemented))))
|
||||
|
||||
(defn instrument
|
||||
[f {:keys [wrap] :as props}]
|
||||
(let [obj (create props)]
|
||||
(cond
|
||||
(instance? Counter (::instance obj))
|
||||
((or wrap wrap-counter) f obj)
|
||||
|
||||
(instance? Summary (::instance obj))
|
||||
((or wrap wrap-summary) f obj)
|
||||
|
||||
(instance? Histogram (::instance obj))
|
||||
((or wrap wrap-summary) f obj)
|
||||
|
||||
:else
|
||||
(ex/raise :type :not-implemented))))
|
||||
|
||||
(defn instrument-jetty!
|
||||
[^CollectorRegistry registry ^StatisticsHandler handler]
|
||||
(doto (JettyStatisticsCollector. handler)
|
||||
|
||||
@@ -202,6 +202,12 @@
|
||||
|
||||
{:name "0064-mod-audit-log-table"
|
||||
:fn (mg/resource "app/migrations/sql/0064-mod-audit-log-table.sql")}
|
||||
|
||||
{:name "0065-add-trivial-spelling-fixes"
|
||||
:fn (mg/resource "app/migrations/sql/0065-add-trivial-spelling-fixes.sql")}
|
||||
|
||||
{:name "0066-add-frame-thumbnail-table"
|
||||
:fn (mg/resource "app/migrations/sql/0066-add-frame-thumbnail-table.sql")}
|
||||
])
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ CREATE TABLE storage_data (
|
||||
CREATE INDEX storage_data__id__idx ON storage_data(id);
|
||||
|
||||
-- Table used for store inflight upload ids, for later recheck and
|
||||
-- delete possible staled files that exists on the phisical storage
|
||||
-- delete possible staled files that exists on the physical storage
|
||||
-- but does not exists in the 'storage_object' table.
|
||||
|
||||
CREATE TABLE storage_pending (
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
-- Fix problem with content-type inconherence
|
||||
-- Fix problem with content-type incoherence
|
||||
|
||||
UPDATE storage_object so
|
||||
SET metadata = jsonb_set(metadata, '{~:content-type}', to_jsonb(fmo.mtype))
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER INDEX file__modified_at__has_media_trimed__idx RENAME TO file__modified_at__has_media_trimmed__idx;
|
||||
ALTER INDEX media_bject__file_id__idx RENAME TO media_object__file_id__idx;
|
||||
@@ -0,0 +1,10 @@
|
||||
CREATE TABLE file_frame_thumbnail (
|
||||
file_id uuid NOT NULL REFERENCES file(id) ON DELETE CASCADE,
|
||||
frame_id uuid NOT NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
updated_at timestamptz NOT NULL DEFAULT clock_timestamp(),
|
||||
|
||||
data text NULL,
|
||||
|
||||
PRIMARY KEY(file_id, frame_id)
|
||||
);
|
||||
@@ -1,5 +1,5 @@
|
||||
--- This is a second migration but it should be applied when manual
|
||||
--- migration intervention is alteady executed.
|
||||
--- migration intervention is already executed.
|
||||
|
||||
ALTER TABLE file_media_object ALTER COLUMN media_id SET NOT NULL;
|
||||
DROP TABLE file_media_thumbnail;
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p])
|
||||
(:import
|
||||
java.time.Duration
|
||||
io.lettuce.core.RedisClient
|
||||
io.lettuce.core.RedisURI
|
||||
io.lettuce.core.api.StatefulConnection
|
||||
@@ -29,7 +28,10 @@
|
||||
io.lettuce.core.codec.StringCodec
|
||||
io.lettuce.core.pubsub.RedisPubSubListener
|
||||
io.lettuce.core.pubsub.StatefulRedisPubSubConnection
|
||||
io.lettuce.core.pubsub.api.async.RedisPubSubAsyncCommands))
|
||||
io.lettuce.core.pubsub.api.async.RedisPubSubAsyncCommands
|
||||
io.lettuce.core.resource.ClientResources
|
||||
io.lettuce.core.resource.DefaultClientResources
|
||||
java.time.Duration))
|
||||
|
||||
(def ^:private prefix (cfg/get :tenant))
|
||||
|
||||
@@ -136,27 +138,35 @@
|
||||
(declare impl-redis-sub)
|
||||
(declare impl-redis-unsub)
|
||||
|
||||
|
||||
(defmethod init-backend :redis
|
||||
[{:keys [redis-uri] :as cfg}]
|
||||
(let [codec (RedisCodec/of StringCodec/UTF8 ByteArrayCodec/INSTANCE)
|
||||
|
||||
uri (RedisURI/create redis-uri)
|
||||
rclient (RedisClient/create ^RedisURI uri)
|
||||
resources (.. (DefaultClientResources/builder)
|
||||
(ioThreadPoolSize 4)
|
||||
(computationThreadPoolSize 4)
|
||||
(build))
|
||||
|
||||
pub-conn (.connect ^RedisClient rclient ^RedisCodec codec)
|
||||
sub-conn (.connectPubSub ^RedisClient rclient ^RedisCodec codec)]
|
||||
uri (RedisURI/create redis-uri)
|
||||
rclient (RedisClient/create ^ClientResources resources ^RedisURI uri)
|
||||
|
||||
pub-conn (.connect ^RedisClient rclient ^RedisCodec codec)
|
||||
sub-conn (.connectPubSub ^RedisClient rclient ^RedisCodec codec)]
|
||||
|
||||
(.setTimeout ^StatefulRedisConnection pub-conn ^Duration (dt/duration {:seconds 10}))
|
||||
(.setTimeout ^StatefulRedisPubSubConnection sub-conn ^Duration (dt/duration {:seconds 10}))
|
||||
|
||||
(-> cfg
|
||||
(assoc ::resources resources)
|
||||
(assoc ::pub-conn pub-conn)
|
||||
(assoc ::sub-conn sub-conn))))
|
||||
|
||||
(defmethod stop-backend :redis
|
||||
[{:keys [::pub-conn ::sub-conn] :as cfg}]
|
||||
[{:keys [::pub-conn ::sub-conn ::resources] :as cfg}]
|
||||
(.close ^StatefulRedisConnection pub-conn)
|
||||
(.close ^StatefulRedisPubSubConnection sub-conn))
|
||||
(.close ^StatefulRedisPubSubConnection sub-conn)
|
||||
(.shutdown ^ClientResources resources))
|
||||
|
||||
(defmethod init-pub-loop :redis
|
||||
[{:keys [::pub-conn ::pub-ch]}]
|
||||
@@ -243,7 +253,7 @@
|
||||
(recur))
|
||||
(a/close! rcv-ch)))
|
||||
|
||||
;; Asyncrhonous message processing loop;x
|
||||
;; Asynchronous message processing loop;x
|
||||
(a/go-loop []
|
||||
(if-let [{:keys [topic message]} (a/<! rcv-ch)]
|
||||
;; This means we receive data from redis and we need to
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.notifications
|
||||
"A websocket based notifications mechanism."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
[app.db :as db]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.async :as aa]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]
|
||||
[ring.adapter.jetty9 :as jetty]
|
||||
[ring.middleware.cookies :refer [wrap-cookies]]
|
||||
[ring.middleware.keyword-params :refer [wrap-keyword-params]]
|
||||
[ring.middleware.params :refer [wrap-params]]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Http Handler
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare retrieve-file)
|
||||
(declare websocket)
|
||||
(declare handler)
|
||||
|
||||
(s/def ::session map?)
|
||||
(s/def ::msgbus fn?)
|
||||
|
||||
(defmethod ig/pre-init-spec ::handler [_]
|
||||
(s/keys :req-un [::msgbus ::db/pool ::session ::mtx/metrics ::wrk/executor]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [session metrics] :as cfg}]
|
||||
(let [wrap-session (:middleware session)
|
||||
|
||||
mtx-active-connections
|
||||
(mtx/create
|
||||
{:name "websocket_active_connections"
|
||||
:registry (:registry metrics)
|
||||
:type :gauge
|
||||
:help "Active websocket connections."})
|
||||
|
||||
mtx-messages
|
||||
(mtx/create
|
||||
{:name "websocket_message_total"
|
||||
:registry (:registry metrics)
|
||||
:labels ["op"]
|
||||
:type :counter
|
||||
:help "Counter of processed messages."})
|
||||
|
||||
mtx-sessions
|
||||
(mtx/create
|
||||
{:name "websocket_session_timing"
|
||||
:registry (:registry metrics)
|
||||
:quantiles []
|
||||
:help "Websocket session timing (seconds)."
|
||||
:type :summary})
|
||||
|
||||
cfg (assoc cfg
|
||||
:mtx-active-connections mtx-active-connections
|
||||
:mtx-messages mtx-messages
|
||||
:mtx-sessions mtx-sessions
|
||||
)]
|
||||
|
||||
(-> #(handler cfg %)
|
||||
(wrap-session)
|
||||
(wrap-keyword-params)
|
||||
(wrap-cookies)
|
||||
(wrap-params))))
|
||||
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::session-id ::us/uuid)
|
||||
|
||||
(s/def ::websocket-handler-params
|
||||
(s/keys :req-un [::file-id ::session-id]))
|
||||
|
||||
(defn- handler
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id params] :as req}]
|
||||
(let [params (us/conform ::websocket-handler-params params)
|
||||
file (retrieve-file pool (:file-id params))
|
||||
cfg (merge cfg params
|
||||
{:profile-id profile-id
|
||||
:team-id (:team-id file)})]
|
||||
(cond
|
||||
(not profile-id)
|
||||
{:error {:code 403 :message "Authentication required"}}
|
||||
|
||||
(not file)
|
||||
{:error {:code 404 :message "File does not exists"}}
|
||||
|
||||
:else
|
||||
(websocket cfg))))
|
||||
|
||||
(def ^:private
|
||||
sql:retrieve-file
|
||||
"select f.id as id,
|
||||
p.team_id as team_id
|
||||
from file as f
|
||||
join project as p on (p.id = f.project_id)
|
||||
where f.id = ?")
|
||||
|
||||
(defn- retrieve-file
|
||||
[conn id]
|
||||
(db/exec-one! conn [sql:retrieve-file id]))
|
||||
|
||||
|
||||
;; --- WEBSOCKET INIT
|
||||
|
||||
(declare handle-connect)
|
||||
|
||||
(defn- ws-send
|
||||
[conn data]
|
||||
(try
|
||||
(when (jetty/connected? conn)
|
||||
(jetty/send! conn data)
|
||||
true)
|
||||
(catch java.lang.NullPointerException _e
|
||||
false)))
|
||||
|
||||
(defn websocket
|
||||
[{:keys [file-id team-id msgbus executor] :as cfg}]
|
||||
(let [rcv-ch (a/chan 32)
|
||||
out-ch (a/chan 32)
|
||||
mtx-aconn (:mtx-active-connections cfg)
|
||||
mtx-messages (:mtx-messages cfg)
|
||||
mtx-sessions (:mtx-sessions cfg)
|
||||
created-at (dt/now)
|
||||
ws-send (mtx/wrap-counter ws-send mtx-messages ["send"])]
|
||||
|
||||
(letfn [(on-connect [conn]
|
||||
((::mtx/fn mtx-aconn) {:cmd :inc :by 1})
|
||||
;; A subscription channel should use a lossy buffer
|
||||
;; because we can't penalize normal clients when one
|
||||
;; slow client is connected to the room.
|
||||
(let [sub-ch (a/chan (a/dropping-buffer 128))
|
||||
cfg (assoc cfg
|
||||
:conn conn
|
||||
:rcv-ch rcv-ch
|
||||
:out-ch out-ch
|
||||
:sub-ch sub-ch)]
|
||||
|
||||
(l/trace :event "connect" :session (:session-id cfg))
|
||||
|
||||
;; Forward all messages from out-ch to the websocket
|
||||
;; connection
|
||||
(a/go-loop []
|
||||
(let [val (a/<! out-ch)]
|
||||
(when (some? val)
|
||||
(when (a/<! (aa/thread-call executor #(ws-send conn (t/encode-str val))))
|
||||
(recur)))))
|
||||
|
||||
(a/go
|
||||
;; Subscribe to corresponding topics
|
||||
(a/<! (msgbus :sub {:topics [file-id team-id] :chan sub-ch}))
|
||||
(a/<! (handle-connect cfg))
|
||||
|
||||
;; when connection is closed
|
||||
((::mtx/fn mtx-aconn) {:cmd :dec :by 1})
|
||||
((::mtx/fn mtx-sessions) {:val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0)})
|
||||
|
||||
;; close subscription
|
||||
(a/close! sub-ch))))
|
||||
|
||||
(on-error [_conn _e]
|
||||
(l/trace :event "error" :session (:session-id cfg))
|
||||
|
||||
(a/close! out-ch)
|
||||
(a/close! rcv-ch))
|
||||
|
||||
(on-close [_conn _status _reason]
|
||||
(l/trace :event "close" :session (:session-id cfg))
|
||||
|
||||
(a/close! out-ch)
|
||||
(a/close! rcv-ch))
|
||||
|
||||
(on-message [_ws message]
|
||||
(let [message (t/decode-str message)]
|
||||
(when-not (a/offer! rcv-ch message)
|
||||
(l/warn :msg "drop messages"))))]
|
||||
|
||||
{:on-connect on-connect
|
||||
:on-error on-error
|
||||
:on-close on-close
|
||||
:on-text (mtx/wrap-counter on-message mtx-messages ["recv"])
|
||||
:on-bytes (constantly nil)})))
|
||||
|
||||
;; --- CONNECTION INIT
|
||||
|
||||
(declare send-presence)
|
||||
(declare handle-message)
|
||||
(declare start-loop!)
|
||||
|
||||
(defn- handle-connect
|
||||
[cfg]
|
||||
(a/go
|
||||
(a/<! (handle-message cfg {:type :connect}))
|
||||
(a/<! (start-loop! cfg))
|
||||
(a/<! (handle-message cfg {:type :disconnect}))))
|
||||
|
||||
(defn- start-loop!
|
||||
[{:keys [rcv-ch out-ch sub-ch session-id] :as cfg}]
|
||||
(a/go-loop []
|
||||
(let [timeout (a/timeout 30000)
|
||||
[val port] (a/alts! [rcv-ch sub-ch timeout])]
|
||||
(cond
|
||||
;; Process message coming from connected client
|
||||
(and (= port rcv-ch) (some? val))
|
||||
(do
|
||||
(a/<! (handle-message cfg val))
|
||||
(recur))
|
||||
|
||||
;; Process message coming from pubsub.
|
||||
(and (= port sub-ch) (some? val))
|
||||
(do
|
||||
(when-not (= (:session-id val) session-id)
|
||||
;; If we receive a connect message of other user, we need
|
||||
;; to send an update presence to all participants.
|
||||
(when (= :connect (:type val))
|
||||
(a/<! (send-presence cfg :presence)))
|
||||
|
||||
;; Then, just forward the message
|
||||
(a/>! out-ch val))
|
||||
(recur))
|
||||
|
||||
;; When timeout channel is signaled, we need to send a ping
|
||||
;; message to the output channel. TODO: we need to make this
|
||||
;; more smart.
|
||||
(= port timeout)
|
||||
(do
|
||||
(a/>! out-ch {:type :ping})
|
||||
(recur))))))
|
||||
|
||||
(defn send-presence
|
||||
([cfg] (send-presence cfg :presence))
|
||||
([{:keys [msgbus session-id profile-id file-id]} type]
|
||||
(a/go
|
||||
(a/<! (msgbus :pub {:topic file-id
|
||||
:message {:type type
|
||||
:session-id session-id
|
||||
:profile-id profile-id}})))))
|
||||
|
||||
;; --- INCOMING MSG PROCESSING
|
||||
|
||||
(defmulti handle-message
|
||||
(fn [_ message] (:type message)))
|
||||
|
||||
(defmethod handle-message :connect
|
||||
[cfg _]
|
||||
(send-presence cfg :connect))
|
||||
|
||||
(defmethod handle-message :disconnect
|
||||
[cfg _]
|
||||
(send-presence cfg :disconnect))
|
||||
|
||||
(defmethod handle-message :keepalive
|
||||
[_ _]
|
||||
(a/go :nothing))
|
||||
|
||||
(defmethod handle-message :pointer-update
|
||||
[{:keys [profile-id file-id session-id msgbus] :as cfg} message]
|
||||
(let [message (assoc message
|
||||
:profile-id profile-id
|
||||
:session-id session-id)]
|
||||
(msgbus :pub {:topic file-id
|
||||
:message message})))
|
||||
|
||||
(defmethod handle-message :default
|
||||
[_ws message]
|
||||
(a/go
|
||||
(l/log :level :warn
|
||||
:msg "received unexpected message"
|
||||
:message message)))
|
||||
|
||||
@@ -13,125 +13,182 @@
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.retry :as retry]
|
||||
[app.util.rlimit :as rlimit]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.util.async :as async]
|
||||
[app.util.services :as sv]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
[integrant.core :as ig]
|
||||
[promesa.core :as p]
|
||||
[promesa.exec :as px]))
|
||||
|
||||
(defn- default-handler
|
||||
[_]
|
||||
(ex/raise :type :not-found))
|
||||
(p/rejected (ex/error :type :not-found)))
|
||||
|
||||
(defn- run-hook
|
||||
[hook-fn response]
|
||||
(ex/ignoring (hook-fn))
|
||||
(defn- handle-response-transformation
|
||||
[response request mdata]
|
||||
(if-let [transform-fn (:transform-response mdata)]
|
||||
(transform-fn request response)
|
||||
response))
|
||||
|
||||
(defn- handle-before-comple-hook
|
||||
[response mdata]
|
||||
(when-let [hook-fn (:before-complete mdata)]
|
||||
(ex/ignoring (hook-fn)))
|
||||
response)
|
||||
|
||||
(defn- rpc-query-handler
|
||||
[methods {:keys [profile-id] :as request}]
|
||||
(let [type (keyword (get-in request [:path-params :type]))
|
||||
"Ring handler that dispatches query requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [profile-id session-id] :as request} respond raise]
|
||||
(letfn [(handle-response [result]
|
||||
(let [mdata (meta result)]
|
||||
(-> {:status 200 :body result}
|
||||
(handle-response-transformation request mdata))))]
|
||||
|
||||
data (merge (:params request)
|
||||
(:body-params request)
|
||||
(:uploads request)
|
||||
{::request request})
|
||||
(let [type (keyword (get-in request [:path-params :type]))
|
||||
data (merge (:params request)
|
||||
(:body-params request)
|
||||
(:uploads request)
|
||||
{::request request})
|
||||
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id)
|
||||
(dissoc data :profile-id))
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id ::session-id session-id)
|
||||
(dissoc data :profile-id))
|
||||
|
||||
result ((get methods type default-handler) data)
|
||||
mdata (meta result)]
|
||||
;; Get the method from methods registry and if method does
|
||||
;; not exists asigns it to the default handler.
|
||||
method (get methods type default-handler)]
|
||||
|
||||
(cond->> {:status 200 :body result}
|
||||
(fn? (:transform-response mdata))
|
||||
((:transform-response mdata) request))))
|
||||
(-> (method data)
|
||||
(p/then #(respond (handle-response %)))
|
||||
(p/catch raise)))))
|
||||
|
||||
(defn- rpc-mutation-handler
|
||||
[methods {:keys [profile-id] :as request}]
|
||||
(let [type (keyword (get-in request [:path-params :type]))
|
||||
data (merge (:params request)
|
||||
(:body-params request)
|
||||
(:uploads request)
|
||||
{::request request})
|
||||
"Ring handler that dispatches mutation requests and convert between
|
||||
internal async flow into ring async flow."
|
||||
[methods {:keys [profile-id session-id] :as request} respond raise]
|
||||
(letfn [(handle-response [result]
|
||||
(let [mdata (meta result)]
|
||||
(-> {:status 200 :body result}
|
||||
(handle-response-transformation request mdata)
|
||||
(handle-before-comple-hook mdata))))]
|
||||
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id)
|
||||
(dissoc data :profile-id))
|
||||
(let [type (keyword (get-in request [:path-params :type]))
|
||||
data (merge (:params request)
|
||||
(:body-params request)
|
||||
(:uploads request)
|
||||
{::request request})
|
||||
|
||||
result ((get methods type default-handler) data)
|
||||
mdata (meta result)]
|
||||
(cond->> {:status 200 :body result}
|
||||
(fn? (:transform-response mdata))
|
||||
((:transform-response mdata) request)
|
||||
data (if profile-id
|
||||
(assoc data :profile-id profile-id ::session-id session-id)
|
||||
(dissoc data :profile-id))
|
||||
|
||||
(fn? (:before-complete mdata))
|
||||
(run-hook (:before-complete mdata)))))
|
||||
method (get methods type default-handler)]
|
||||
|
||||
(defn- wrap-with-metrics
|
||||
[cfg f mdata]
|
||||
(mtx/wrap-summary f (::mobj cfg) [(::sv/name mdata)]))
|
||||
(-> (method data)
|
||||
(p/then #(respond (handle-response %)))
|
||||
(p/catch raise)))))
|
||||
|
||||
(defn- wrap-impl
|
||||
(defn- wrap-metrics
|
||||
"Wrap service method with metrics measurement."
|
||||
[{:keys [metrics ::metrics-id]} f mdata]
|
||||
(let [labels (into-array String [(::sv/name mdata)])]
|
||||
(fn [cfg params]
|
||||
(let [start (System/nanoTime)]
|
||||
(p/finally
|
||||
(f cfg params)
|
||||
(fn [_ _]
|
||||
(mtx/run! metrics
|
||||
{:id metrics-id
|
||||
:val (/ (- (System/nanoTime) start) 1000000)
|
||||
:labels labels})))))))
|
||||
|
||||
(defn- wrap-dispatch
|
||||
"Wraps service method into async flow, with the ability to dispatching
|
||||
it to a preconfigured executor service."
|
||||
[{:keys [executors] :as cfg} f mdata]
|
||||
(let [dname (::async/dispatch mdata :none)]
|
||||
(if (= :none dname)
|
||||
(with-meta
|
||||
(fn [cfg params]
|
||||
(p/do! (f cfg params)))
|
||||
mdata)
|
||||
|
||||
(let [executor (get executors dname)]
|
||||
(when-not executor
|
||||
(ex/raise :type :internal
|
||||
:code :executor-not-configured
|
||||
:hint (format "executor %s not configured" dname)))
|
||||
(with-meta
|
||||
(fn [cfg params]
|
||||
(-> (px/submit! executor #(f cfg params))
|
||||
(p/bind p/wrap)))
|
||||
mdata)))))
|
||||
|
||||
(defn- wrap-audit
|
||||
[{:keys [audit] :as cfg} f mdata]
|
||||
(if audit
|
||||
(with-meta
|
||||
(fn [cfg {:keys [::request] :as params}]
|
||||
(p/finally (f cfg params)
|
||||
(fn [result _]
|
||||
(when result
|
||||
(let [resultm (meta result)
|
||||
profile-id (or (:profile-id params)
|
||||
(:profile-id result)
|
||||
(::audit/profile-id resultm))
|
||||
props (d/merge params (::audit/props resultm))]
|
||||
(audit :cmd :submit
|
||||
:type (or (::audit/type resultm)
|
||||
(::type cfg))
|
||||
:name (or (::audit/name resultm)
|
||||
(::sv/name mdata))
|
||||
:profile-id profile-id
|
||||
:ip-addr (audit/parse-client-ip request)
|
||||
:props (dissoc props ::request)))))))
|
||||
mdata)
|
||||
f))
|
||||
|
||||
(defn- wrap
|
||||
[cfg f mdata]
|
||||
(let [f (as-> f $
|
||||
(wrap-dispatch cfg $ mdata)
|
||||
(rlimit/wrap-rlimit cfg $ mdata)
|
||||
(retry/wrap-retry cfg $ mdata)
|
||||
(wrap-with-metrics cfg $ mdata))
|
||||
(wrap-audit cfg $ mdata)
|
||||
(wrap-metrics cfg $ mdata)
|
||||
)
|
||||
|
||||
spec (or (::sv/spec mdata) (s/spec any?))
|
||||
auth? (:auth mdata true)]
|
||||
|
||||
(l/trace :action "register" :name (::sv/name mdata))
|
||||
(with-meta
|
||||
(fn [params]
|
||||
(fn [{:keys [::request] :as params}]
|
||||
;; Raise authentication error when rpc method requires auth but
|
||||
;; no profile-id is found in the request.
|
||||
(when (and auth? (not (uuid? (:profile-id params))))
|
||||
(ex/raise :type :authentication
|
||||
:code :authentication-required
|
||||
:hint "authentication required for this endpoint"))
|
||||
(p/do!
|
||||
(if (and auth? (not (uuid? (:profile-id params))))
|
||||
(ex/raise :type :authentication
|
||||
:code :authentication-required
|
||||
:hint "authentication required for this endpoint")
|
||||
(let [params (us/conform spec (dissoc params ::request))]
|
||||
(f cfg (assoc params ::request request))))))
|
||||
|
||||
(let [params' (dissoc params ::request)
|
||||
params' (us/conform spec params')
|
||||
result (f cfg params')]
|
||||
|
||||
;; When audit log is enabled (default false).
|
||||
(when (fn? audit)
|
||||
(let [resultm (meta result)
|
||||
request (::request params)
|
||||
profile-id (or (:profile-id params')
|
||||
(:profile-id result)
|
||||
(::audit/profile-id resultm))
|
||||
props (d/merge params' (::audit/props resultm))]
|
||||
(audit :cmd :submit
|
||||
:type (or (::audit/type resultm)
|
||||
(::type cfg))
|
||||
:name (or (::audit/name resultm)
|
||||
(::sv/name mdata))
|
||||
:profile-id profile-id
|
||||
:ip-addr (audit/parse-client-ip request)
|
||||
:props props)))
|
||||
|
||||
result))
|
||||
mdata)))
|
||||
|
||||
(defn- process-method
|
||||
[cfg vfn]
|
||||
(let [mdata (meta vfn)]
|
||||
[(keyword (::sv/name mdata))
|
||||
(wrap-impl cfg (deref vfn) mdata)]))
|
||||
(wrap cfg (deref vfn) mdata)]))
|
||||
|
||||
(defn- resolve-query-methods
|
||||
[cfg]
|
||||
(let [mobj (mtx/create
|
||||
{:name "rpc_query_timing"
|
||||
:labels ["name"]
|
||||
:registry (get-in cfg [:metrics :registry])
|
||||
:type :histogram
|
||||
:help "Timing of query services."})
|
||||
cfg (assoc cfg ::mobj mobj ::type "query")]
|
||||
(let [cfg (assoc cfg ::type "query" ::metrics-id :rpc-query-timing)]
|
||||
(->> (sv/scan-ns 'app.rpc.queries.projects
|
||||
'app.rpc.queries.files
|
||||
'app.rpc.queries.teams
|
||||
@@ -144,13 +201,7 @@
|
||||
|
||||
(defn- resolve-mutation-methods
|
||||
[cfg]
|
||||
(let [mobj (mtx/create
|
||||
{:name "rpc_mutation_timing"
|
||||
:labels ["name"]
|
||||
:registry (get-in cfg [:metrics :registry])
|
||||
:type :histogram
|
||||
:help "Timing of mutation services."})
|
||||
cfg (assoc cfg ::mobj mobj ::type "mutation")]
|
||||
(let [cfg (assoc cfg ::type "mutation" ::metrics-id :rpc-mutation-timing)]
|
||||
(->> (sv/scan-ns 'app.rpc.mutations.demo
|
||||
'app.rpc.mutations.media
|
||||
'app.rpc.mutations.profile
|
||||
@@ -170,15 +221,16 @@
|
||||
(s/def ::session map?)
|
||||
(s/def ::tokens fn?)
|
||||
(s/def ::audit (s/nilable fn?))
|
||||
(s/def ::executors (s/map-of keyword? ::wrk/executor))
|
||||
|
||||
(defmethod ig/pre-init-spec ::rpc [_]
|
||||
(s/keys :req-un [::storage ::session ::tokens ::audit
|
||||
::mtx/metrics ::db/pool]))
|
||||
::executors ::mtx/metrics ::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::rpc
|
||||
[_ cfg]
|
||||
(let [mq (resolve-query-methods cfg)
|
||||
mm (resolve-mutation-methods cfg)]
|
||||
{:methods {:query mq :mutation mm}
|
||||
:query-handler #(rpc-query-handler mq %)
|
||||
:mutation-handler #(rpc-mutation-handler mm %)}))
|
||||
:query-handler (partial rpc-query-handler mq)
|
||||
:mutation-handler (partial rpc-mutation-handler mm)}))
|
||||
|
||||
@@ -7,12 +7,13 @@
|
||||
(ns app.rpc.mutations.comments
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.geom.point :as gpt]
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.rpc.queries.comments :as comments]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.retry :as retry]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.retry :as retry]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -26,15 +27,14 @@
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
(s/def ::position ::us/point)
|
||||
(s/def ::position ::gpt/point)
|
||||
(s/def ::content ::us/string)
|
||||
|
||||
(s/def ::create-comment-thread
|
||||
(s/keys :req-un [::profile-id ::file-id ::position ::content ::page-id]))
|
||||
|
||||
(sv/defmethod ::create-comment-thread
|
||||
{::retry/enabled true
|
||||
::retry/max-retries 3
|
||||
{::retry/max-retries 3
|
||||
::retry/matches retry/conflict-db-insert?}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
@@ -174,7 +174,7 @@
|
||||
:content content})]
|
||||
|
||||
;; NOTE: this is done in SQL instead of using db/update!
|
||||
;; helper bacause currently the helper does not allow pass raw
|
||||
;; helper because currently the helper does not allow pass raw
|
||||
;; function call parameters to the underlying prepared
|
||||
;; statement; in a future when we fix/improve it, this can be
|
||||
;; changed to use the helper.
|
||||
|
||||
@@ -11,12 +11,14 @@
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.files :as files]
|
||||
[app.rpc.queries.projects :as proj]
|
||||
[app.storage.impl :as simpl]
|
||||
[app.util.async :as async]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
@@ -26,6 +28,8 @@
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
(s/def ::file-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::profile-id ::us/uuid)
|
||||
@@ -181,7 +185,7 @@
|
||||
:library-file-id library-id}))
|
||||
|
||||
|
||||
;; --- Mutation: Update syncrhonization status of a link
|
||||
;; --- Mutation: Update synchronization status of a link
|
||||
|
||||
(declare update-sync)
|
||||
|
||||
@@ -269,6 +273,7 @@
|
||||
(contains? o :changes-with-metadata)))))
|
||||
|
||||
(sv/defmethod ::update-file
|
||||
{::async/dispatch :blocking}
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/xact-lock! conn id)
|
||||
@@ -280,11 +285,13 @@
|
||||
(defn- take-snapshot?
|
||||
"Defines the rule when file `data` snapshot should be saved."
|
||||
[{:keys [revn modified-at] :as file}]
|
||||
;; The snapshot will be saved every 20 changes or if the last
|
||||
;; modification is older than 3 hour.
|
||||
(or (zero? (mod revn 20))
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(inst-ms (dt/duration {:hours 3})))))
|
||||
(let [freq (or (cf/get :file-change-snapshot-every) 20)
|
||||
timeout (or (cf/get :file-change-snapshot-timeout)
|
||||
(dt/duration {:hours 1}))]
|
||||
(or (= 1 freq)
|
||||
(zero? (mod revn freq))
|
||||
(> (inst-ms (dt/diff modified-at (dt/now)))
|
||||
(inst-ms timeout)))))
|
||||
|
||||
(defn- delete-from-storage
|
||||
[{:keys [storage] :as cfg} file]
|
||||
@@ -302,22 +309,21 @@
|
||||
:context {:incoming-revn (:revn params)
|
||||
:stored-revn (:revn file)}))
|
||||
|
||||
(let [mtx1 (get-in metrics [:definitions :update-file-changes])
|
||||
mtx2 (get-in metrics [:definitions :update-file-bytes-processed])
|
||||
|
||||
changes (if changes-with-metadata
|
||||
(let [changes (if changes-with-metadata
|
||||
(mapcat :changes changes-with-metadata)
|
||||
changes)
|
||||
|
||||
changes (vec changes)
|
||||
|
||||
;; Trace the number of changes processed
|
||||
_ ((::mtx/fn mtx1) {:by (count changes)})
|
||||
_ (mtx/run! metrics {:id :update-file-changes :inc (count changes)})
|
||||
|
||||
ts (dt/now)
|
||||
file (-> (files/retrieve-data cfg file)
|
||||
(update :revn inc)
|
||||
(update :data (fn [data]
|
||||
;; Trace the length of bytes of processed data
|
||||
((::mtx/fn mtx2) {:by (alength data)})
|
||||
(mtx/run! metrics {:id :update-file-bytes-processed :inc (alength data)})
|
||||
(-> data
|
||||
(blob/decode)
|
||||
(assoc :id (:id file))
|
||||
@@ -409,8 +415,9 @@
|
||||
[conn project-id]
|
||||
(:team-id (db/get-by-id conn :project project-id {:columns [:team-id]})))
|
||||
|
||||
|
||||
;; TEMPORARY FILE CREATION
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TEMPORARY FILES (behaves differently)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::create-temp-file ::create-file)
|
||||
|
||||
@@ -420,6 +427,23 @@
|
||||
(proj/check-edition-permissions! conn profile-id project-id)
|
||||
(create-file conn (assoc params :deleted-at (dt/in-future {:days 1})))))
|
||||
|
||||
(s/def ::update-temp-file
|
||||
(s/keys :req-un [::changes ::revn ::session-id ::id]))
|
||||
|
||||
(sv/defmethod ::update-temp-file
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id session-id id revn changes] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(db/insert! conn :file-change
|
||||
{:id (uuid/next)
|
||||
:session-id session-id
|
||||
:profile-id profile-id
|
||||
:created-at (dt/now)
|
||||
:file-id id
|
||||
:revn revn
|
||||
:data nil
|
||||
:changes (blob/encode changes)})
|
||||
nil))
|
||||
|
||||
(s/def ::persist-temp-file
|
||||
(s/keys :req-un [::id ::profile-id]))
|
||||
|
||||
@@ -427,6 +451,47 @@
|
||||
[{:keys [pool] :as cfg} {:keys [id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id id)
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil}
|
||||
{:id id})))
|
||||
(let [file (db/get-by-id conn :file id)
|
||||
revs (db/query conn :file-change
|
||||
{:file-id id}
|
||||
{:order-by [[:revn :asc]]})
|
||||
revn (count revs)]
|
||||
|
||||
(when (nil? (:deleted-at file))
|
||||
(ex/raise :type :validation
|
||||
:code :cant-persist-already-persisted-file))
|
||||
|
||||
(loop [revs (seq revs)
|
||||
data (blob/decode (:data file))]
|
||||
(if-let [rev (first revs)]
|
||||
(recur (rest revs)
|
||||
(->> rev :changes blob/decode (cp/process-changes data)))
|
||||
(db/update! conn :file
|
||||
{:deleted-at nil
|
||||
:revn revn
|
||||
:data (blob/encode data)}
|
||||
{:id id})))
|
||||
|
||||
nil)))
|
||||
|
||||
|
||||
;; --- Mutation: Upsert frame thumbnail
|
||||
|
||||
(def sql:upsert-frame-thumbnail
|
||||
"insert into file_frame_thumbnail(file_id, frame_id, data)
|
||||
values (?, ?, ?)
|
||||
on conflict(file_id, frame_id) do
|
||||
update set data = ?;")
|
||||
|
||||
(s/def ::data ::us/string)
|
||||
(s/def ::upsert-frame-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::frame-id ::data]))
|
||||
|
||||
(sv/defmethod ::upsert-frame-thumbnail
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id frame-id data]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(files/check-edition-permissions! conn profile-id file-id)
|
||||
(db/exec-one! conn [sql:upsert-frame-thumbnail file-id frame-id data data])
|
||||
nil))
|
||||
|
||||
|
||||
|
||||
@@ -9,12 +9,10 @@
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cf]
|
||||
[app.db :as db]
|
||||
[app.media :as media]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.storage :as sto]
|
||||
[app.util.rlimit :as rlimit]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]))
|
||||
@@ -39,52 +37,57 @@
|
||||
::font-id ::font-family ::font-weight ::font-style]))
|
||||
|
||||
(sv/defmethod ::create-font-variant
|
||||
{::rlimit/permits (cf/get :rlimit-font)}
|
||||
[{:keys [pool] :as cfg} {:keys [team-id profile-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cfg (assoc cfg :conn conn)]
|
||||
(teams/check-edition-permissions! conn profile-id team-id)
|
||||
(create-font-variant cfg params))))
|
||||
(teams/check-edition-permissions! pool profile-id team-id)
|
||||
(create-font-variant cfg params))
|
||||
|
||||
(defn create-font-variant
|
||||
[{:keys [conn storage] :as cfg} {:keys [data] :as params}]
|
||||
[{:keys [storage pool] :as cfg} {:keys [data] :as params}]
|
||||
(let [data (media/run {:cmd :generate-fonts :input data})
|
||||
storage (media/configure-assets-storage storage conn)
|
||||
storage (media/configure-assets-storage storage)]
|
||||
|
||||
otf (when-let [fdata (get data "font/otf")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/otf"}))
|
||||
|
||||
ttf (when-let [fdata (get data "font/ttf")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/ttf"}))
|
||||
|
||||
woff1 (when-let [fdata (get data "font/woff")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/woff"}))
|
||||
|
||||
woff2 (when-let [fdata (get data "font/woff2")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/woff2"}))]
|
||||
|
||||
(when (and (nil? otf)
|
||||
(nil? ttf)
|
||||
(nil? woff1)
|
||||
(nil? woff2))
|
||||
(when (and (not (contains? data "font/otf"))
|
||||
(not (contains? data "font/ttf"))
|
||||
(not (contains? data "font/woff"))
|
||||
(not (contains? data "font/woff2")))
|
||||
(ex/raise :type :validation
|
||||
:code :invalid-font-upload))
|
||||
|
||||
(db/insert! conn :team-font-variant
|
||||
{:id (uuid/next)
|
||||
:team-id (:team-id params)
|
||||
:font-id (:font-id params)
|
||||
:font-family (:font-family params)
|
||||
:font-weight (:font-weight params)
|
||||
:font-style (:font-style params)
|
||||
:woff1-file-id (:id woff1)
|
||||
:woff2-file-id (:id woff2)
|
||||
:otf-file-id (:id otf)
|
||||
:ttf-file-id (:id ttf)})))
|
||||
(let [otf (when-let [fdata (get data "font/otf")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/otf"
|
||||
:reference :team-font-variant
|
||||
:touched-at (dt/now)}))
|
||||
|
||||
ttf (when-let [fdata (get data "font/ttf")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/ttf"
|
||||
:touched-at (dt/now)
|
||||
:reference :team-font-variant}))
|
||||
|
||||
woff1 (when-let [fdata (get data "font/woff")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/woff"
|
||||
:touched-at (dt/now)
|
||||
:reference :team-font-variant}))
|
||||
|
||||
woff2 (when-let [fdata (get data "font/woff2")]
|
||||
(sto/put-object storage {:content (sto/content fdata)
|
||||
:content-type "font/woff2"
|
||||
:touched-at (dt/now)
|
||||
:reference :team-font-variant}))]
|
||||
|
||||
(db/insert! pool :team-font-variant
|
||||
{:id (uuid/next)
|
||||
:team-id (:team-id params)
|
||||
:font-id (:font-id params)
|
||||
:font-family (:font-family params)
|
||||
:font-weight (:font-weight params)
|
||||
:font-style (:font-style params)
|
||||
:woff1-file-id (:id woff1)
|
||||
:woff2-file-id (:id woff2)
|
||||
:otf-file-id (:id otf)
|
||||
:ttf-file-id (:id ttf)}))))
|
||||
|
||||
;; --- UPDATE FONT FAMILY
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
(s/keys :req-un [::email ::password]
|
||||
:opt-un [::invitation-token]))
|
||||
|
||||
(sv/defmethod ::login-with-ldap {:auth false :rlimit :password}
|
||||
(sv/defmethod ::login-with-ldap {:auth false}
|
||||
[{:keys [pool session tokens] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [info (authenticate params)
|
||||
|
||||
@@ -62,7 +62,7 @@
|
||||
(= :image (:type form)))
|
||||
(update-in [:metadata :id] #(get index % %))))
|
||||
|
||||
;; A function responsible to analize all file data and
|
||||
;; A function responsible to analyze all file data and
|
||||
;; replace the old :component-file reference with the new
|
||||
;; ones, using the provided file-index
|
||||
(relink-shapes [data]
|
||||
@@ -294,7 +294,7 @@
|
||||
;; move all files to the project
|
||||
(db/exec-one! conn [sql:move-files project-id fids])
|
||||
|
||||
;; delete posible broken relations on moved files
|
||||
;; delete possible broken relations on moved files
|
||||
(db/exec-one! conn [sql:delete-broken-relations pids])
|
||||
|
||||
nil)))
|
||||
@@ -329,7 +329,7 @@
|
||||
{:team-id team-id}
|
||||
{:id project-id})
|
||||
|
||||
;; delete posible broken relations on moved files
|
||||
;; delete possible broken relations on moved files
|
||||
(db/exec-one! conn [sql:delete-broken-relations pids])
|
||||
|
||||
nil)))
|
||||
|
||||
@@ -14,9 +14,10 @@
|
||||
[app.db :as db]
|
||||
[app.media :as media]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.storage :as sto]
|
||||
[app.util.async :as async]
|
||||
[app.util.http :as http]
|
||||
[app.util.rlimit :as rlimit]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -49,13 +50,12 @@
|
||||
:opt-un [::id]))
|
||||
|
||||
(sv/defmethod ::upload-file-media-object
|
||||
{::rlimit/permits (cf/get :rlimit-image)}
|
||||
{::rlimit/permits (cf/get :rlimit-image)
|
||||
::async/dispatch :default}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [file (select-file conn file-id)]
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id file))
|
||||
(-> (assoc cfg :conn conn)
|
||||
(create-file-media-object params)))))
|
||||
(let [file (select-file pool file-id)]
|
||||
(teams/check-edition-permissions! pool profile-id (:team-id file))
|
||||
(create-file-media-object cfg params)))
|
||||
|
||||
(defn- big-enough-for-thumbnail?
|
||||
"Checks if the provided image info is big enough for
|
||||
@@ -77,6 +77,9 @@
|
||||
:code :unable-to-access-to-url
|
||||
:cause e))))
|
||||
|
||||
;; TODO: we need to check the size before fetch resource, if not we
|
||||
;; can start downloading very big object and cause OOM errors.
|
||||
|
||||
(defn- download-media
|
||||
[{:keys [storage] :as cfg} url]
|
||||
(let [result (fetch-url url)
|
||||
@@ -90,6 +93,7 @@
|
||||
(-> (assoc storage :backend :tmp)
|
||||
(sto/put-object {:content (sto/content data)
|
||||
:content-type mtype
|
||||
:reference :file-media-object
|
||||
:expired-at (dt/in-future {:minutes 30})}))))
|
||||
|
||||
;; NOTE: we use the `on conflict do update` instead of `do nothing`
|
||||
@@ -102,13 +106,27 @@
|
||||
on conflict (id) do update set created_at=file_media_object.created_at
|
||||
returning *")
|
||||
|
||||
;; NOTE: the following function executes without a transaction, this
|
||||
;; means that if something fails in the middle of this function, it
|
||||
;; will probably leave leaked/unreferenced objects in the database and
|
||||
;; probably in the storage layer. For handle possible object leakage,
|
||||
;; we create all media objects marked as touched, this ensures that if
|
||||
;; something fails, all leaked (already created storage objects) will
|
||||
;; be eventually marked as deleted by the touched-gc task.
|
||||
;;
|
||||
;; The touched-gc task, performs periodic analisis of all touched
|
||||
;; storage objects and check references of it. This is the reason why
|
||||
;; `reference` metadata exists: it indicates the name of the table
|
||||
;; witch holds the reference to storage object (it some kind of
|
||||
;; inverse, soft referential integrity).
|
||||
|
||||
(defn create-file-media-object
|
||||
[{:keys [conn storage] :as cfg} {:keys [id file-id is-local name content] :as params}]
|
||||
[{:keys [storage pool] :as cfg} {:keys [id file-id is-local name content] :as params}]
|
||||
(media/validate-media-type (:content-type content))
|
||||
(let [storage (media/configure-assets-storage storage conn)
|
||||
source-path (fs/path (:tempfile content))
|
||||
(let [source-path (fs/path (:tempfile content))
|
||||
source-mtype (:content-type content)
|
||||
source-info (media/run {:cmd :info :input {:path source-path :mtype source-mtype}})
|
||||
storage (media/configure-assets-storage storage)
|
||||
|
||||
thumb (when (and (not (svg-image? source-info))
|
||||
(big-enough-for-thumbnail? source-info))
|
||||
@@ -119,16 +137,25 @@
|
||||
|
||||
image (if (= (:mtype source-info) "image/svg+xml")
|
||||
(let [data (slurp source-path)]
|
||||
(sto/put-object storage {:content (sto/content data)
|
||||
:content-type (:mtype source-info)}))
|
||||
(sto/put-object storage {:content (sto/content source-path)
|
||||
:content-type (:mtype source-info)}))
|
||||
(sto/put-object storage
|
||||
{:content (sto/content data)
|
||||
:content-type (:mtype source-info)
|
||||
:reference :file-media-object
|
||||
:touched-at (dt/now)}))
|
||||
(sto/put-object storage
|
||||
{:content (sto/content source-path)
|
||||
:content-type (:mtype source-info)
|
||||
:reference :file-media-object
|
||||
:touched-at (dt/now)}))
|
||||
|
||||
thumb (when thumb
|
||||
(sto/put-object storage {:content (sto/content (:data thumb) (:size thumb))
|
||||
:content-type (:mtype thumb)}))]
|
||||
(sto/put-object storage
|
||||
{:content (sto/content (:data thumb) (:size thumb))
|
||||
:content-type (:mtype thumb)
|
||||
:reference :file-media-object
|
||||
:touched-at (dt/now)}))]
|
||||
|
||||
(db/exec-one! conn [sql:create-file-media-object
|
||||
(db/exec-one! pool [sql:create-file-media-object
|
||||
(or id (uuid/next))
|
||||
file-id is-local name
|
||||
(:id image)
|
||||
@@ -144,20 +171,19 @@
|
||||
:opt-un [::id ::name]))
|
||||
|
||||
(sv/defmethod ::create-file-media-object-from-url
|
||||
{::rlimit/permits (cf/get :rlimit-image)
|
||||
::async/dispatch :default}
|
||||
[{:keys [pool storage] :as cfg} {:keys [profile-id file-id url name] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [file (select-file conn file-id)]
|
||||
(teams/check-edition-permissions! conn profile-id (:team-id file))
|
||||
(let [mobj (download-media cfg url)
|
||||
content {:filename "tempfile"
|
||||
:size (:size mobj)
|
||||
:tempfile (sto/get-object-path storage mobj)
|
||||
:content-type (:content-type (meta mobj))}
|
||||
params' (merge params {:content content
|
||||
:name (or name (:filename content))})]
|
||||
(-> (assoc cfg :conn conn)
|
||||
(create-file-media-object params'))))))
|
||||
(let [file (select-file pool file-id)]
|
||||
(teams/check-edition-permissions! pool profile-id (:team-id file))
|
||||
(let [mobj (download-media cfg url)
|
||||
content {:filename "tempfile"
|
||||
:size (:size mobj)
|
||||
:tempfile (sto/get-object-path storage mobj)
|
||||
:content-type (:content-type (meta mobj))}]
|
||||
|
||||
(->> (merge params {:content content :name (or name (:filename content))})
|
||||
(create-file-media-object cfg)))))
|
||||
|
||||
;; --- Clone File Media object (Upload and create from url)
|
||||
|
||||
@@ -189,7 +215,6 @@
|
||||
:height (:height mobj)
|
||||
:mtype (:mtype mobj)})))
|
||||
|
||||
|
||||
;; --- HELPERS
|
||||
|
||||
(def ^:private
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
[app.http.oauth :refer [extract-utm-props]]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.media :as media]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc.mutations.teams :as teams]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.storage :as sto]
|
||||
[app.util.rlimit :as rlimit]
|
||||
[app.util.async :as async]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[buddy.hashers :as hashers]
|
||||
@@ -38,7 +38,6 @@
|
||||
(s/def ::theme ::us/string)
|
||||
(s/def ::invitation-token ::us/not-empty-string)
|
||||
|
||||
(declare annotate-profile-register)
|
||||
(declare check-profile-existence!)
|
||||
(declare create-profile)
|
||||
(declare create-profile-relations)
|
||||
@@ -102,13 +101,14 @@
|
||||
(when-not (contains? cf/flags :registration)
|
||||
(ex/raise :type :restriction
|
||||
:code :registration-disabled))
|
||||
|
||||
(when-let [domains (cf/get :registration-domain-whitelist)]
|
||||
(when-not (email-domain-in-whitelist? domains (:email params))
|
||||
(ex/raise :type :validation
|
||||
:code :email-domain-is-not-allowed)))
|
||||
|
||||
;; Don't allow proceed in preparing registration if the profile is
|
||||
;; already reported as spamer.
|
||||
;; already reported as spammer.
|
||||
(when (eml/has-bounce-reports? pool (:email params))
|
||||
(ex/raise :type :validation
|
||||
:code :email-has-permanent-bounces
|
||||
@@ -116,10 +116,17 @@
|
||||
|
||||
(check-profile-existence! pool params)
|
||||
|
||||
(let [params (assoc params
|
||||
:backend "penpot"
|
||||
:iss :prepared-register
|
||||
:exp (dt/in-future "48h"))
|
||||
(when (= (str/lower (:email params))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
|
||||
(let [params {:email (:email params)
|
||||
:invitation-token (:invitation-token params)
|
||||
:backend "penpot"
|
||||
:iss :prepared-register
|
||||
:exp (dt/in-future "48h")}
|
||||
token (tokens :generate params)]
|
||||
{:token token}))
|
||||
|
||||
@@ -136,53 +143,42 @@
|
||||
(-> (assoc cfg :conn conn)
|
||||
(register-profile params))))
|
||||
|
||||
(defn- annotate-profile-register
|
||||
"A helper for properly increase the profile-register metric once the
|
||||
transaction is completed."
|
||||
[metrics]
|
||||
(fn []
|
||||
(let [mobj (get-in metrics [:definitions :profile-register])]
|
||||
((::mtx/fn mobj) {:by 1}))))
|
||||
|
||||
(defn register-profile
|
||||
[{:keys [conn tokens session metrics] :as cfg} {:keys [token] :as params}]
|
||||
[{:keys [conn tokens session] :as cfg} {:keys [token] :as params}]
|
||||
(let [claims (tokens :verify {:token token :iss :prepared-register})
|
||||
params (merge params claims)]
|
||||
|
||||
(check-profile-existence! conn params)
|
||||
|
||||
(let [is-active (or (:is-active params)
|
||||
(contains? cf/flags :insecure-register))
|
||||
profile (->> (assoc params :is-active is-active)
|
||||
(create-profile conn)
|
||||
(create-profile-relations conn)
|
||||
(decode-profile-row))]
|
||||
(let [is-active (or (:is-active params)
|
||||
(contains? cf/flags :insecure-register))
|
||||
profile (->> (assoc params :is-active is-active)
|
||||
(create-profile conn)
|
||||
(create-profile-relations conn)
|
||||
(decode-profile-row))
|
||||
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens :verify {:token token :iss :team-invitation}))]
|
||||
|
||||
(cond
|
||||
;; If invitation token comes in params, this is because the
|
||||
;; user comes from team-invitation process; in this case,
|
||||
;; regenerate token and send back to the user a new invitation
|
||||
;; token (and mark current session as logged).
|
||||
(some? (:invitation-token params))
|
||||
(let [token (:invitation-token params)
|
||||
claims (tokens :verify {:token token :iss :team-invitation})
|
||||
claims (assoc claims
|
||||
:member-id (:id profile)
|
||||
:member-email (:email profile))
|
||||
;; If invitation token comes in params, this is because the user comes from team-invitation process;
|
||||
;; in this case, regenerate token and send back to the user a new invitation token (and mark current
|
||||
;; session as logged). This happens only if the invitation email matches with the register email.
|
||||
(and (some? invitation) (= (:email profile) (:member-email invitation)))
|
||||
(let [claims (assoc invitation :member-id (:id profile))
|
||||
token (tokens :generate claims)
|
||||
resp {:invitation-token token}]
|
||||
(with-meta resp
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
:before-complete (annotate-profile-register metrics)
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
|
||||
;; If auth backend is different from "penpot" means user is
|
||||
;; registring using third party auth mechanism; in this case
|
||||
;; registering using third party auth mechanism; in this case
|
||||
;; we need to mark this session as logged.
|
||||
(not= "penpot" (:auth-backend profile))
|
||||
(with-meta (profile/strip-private-attrs profile)
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
:before-complete (annotate-profile-register metrics)
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})
|
||||
|
||||
@@ -191,7 +187,6 @@
|
||||
(true? is-active)
|
||||
(with-meta (profile/strip-private-attrs profile)
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
:before-complete (annotate-profile-register metrics)
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})
|
||||
|
||||
@@ -214,8 +209,7 @@
|
||||
:extra-data ptoken})
|
||||
|
||||
(with-meta profile
|
||||
{:before-complete (annotate-profile-register metrics)
|
||||
::audit/props (audit/profile->props profile)
|
||||
{::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
|
||||
(defn create-profile
|
||||
@@ -284,7 +278,9 @@
|
||||
:opt-un [::scope ::invitation-token]))
|
||||
|
||||
(sv/defmethod ::login
|
||||
{:auth false ::rlimit/permits (cf/get :rlimit-password)}
|
||||
{:auth false
|
||||
::async/dispatch :default
|
||||
::rlimit/permits (cf/get :rlimit-password)}
|
||||
[{:keys [pool session tokens] :as cfg} {:keys [email password] :as params}]
|
||||
(letfn [(check-password [profile password]
|
||||
(when (= (:password profile) "!")
|
||||
@@ -305,32 +301,26 @@
|
||||
profile)]
|
||||
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (->> (profile/retrieve-profile-data-by-email conn email)
|
||||
(validate-profile)
|
||||
(profile/strip-private-attrs)
|
||||
(profile/populate-additional-data conn)
|
||||
(decode-profile-row))]
|
||||
(if-let [token (:invitation-token params)]
|
||||
;; If the request comes with an invitation token, this means
|
||||
;; that user wants to accept it with different user. A very
|
||||
;; strange case but still can happen. In this case, we
|
||||
;; proceed in the same way as in register: regenerate the
|
||||
;; invitation token and return it to the user for proper
|
||||
;; invitation acceptation.
|
||||
(let [claims (tokens :verify {:token token :iss :team-invitation})
|
||||
claims (assoc claims
|
||||
:member-id (:id profile)
|
||||
:member-email (:email profile))
|
||||
token (tokens :generate claims)]
|
||||
(with-meta {:invitation-token token}
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))
|
||||
(let [profile (->> (profile/retrieve-profile-data-by-email conn email)
|
||||
(validate-profile)
|
||||
(profile/strip-private-attrs)
|
||||
(profile/populate-additional-data conn)
|
||||
(decode-profile-row))
|
||||
|
||||
(with-meta profile
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)}))))))
|
||||
invitation (when-let [token (:invitation-token params)]
|
||||
(tokens :verify {:token token :iss :team-invitation}))
|
||||
|
||||
;; If invitation member-id does not matches the profile-id, we just proceed to ignore the
|
||||
;; invitation because invitations matches exactly; and user can't loging with other email and
|
||||
;; accept invitation with other email
|
||||
response (if (and (some? invitation) (= (:id profile) (:member-id invitation)))
|
||||
{:invitation-token (:invitation-token params)}
|
||||
profile)]
|
||||
|
||||
(with-meta response
|
||||
{:transform-response ((:create session) (:id profile))
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))))
|
||||
|
||||
;; --- MUTATION: Logout
|
||||
|
||||
@@ -360,6 +350,7 @@
|
||||
:opt-un [::lang ::theme]))
|
||||
|
||||
(sv/defmethod ::update-profile
|
||||
{::async/dispatch :default}
|
||||
[{:keys [pool] :as cfg} params]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (update-profile conn params)]
|
||||
@@ -370,6 +361,7 @@
|
||||
|
||||
(declare validate-password!)
|
||||
(declare update-profile-password!)
|
||||
(declare invalidate-profile-session!)
|
||||
|
||||
(s/def ::update-profile-password
|
||||
(s/keys :req-un [::profile-id ::password ::old-password]))
|
||||
@@ -378,10 +370,23 @@
|
||||
{::rlimit/permits (cf/get :rlimit-password)}
|
||||
[{:keys [pool] :as cfg} {:keys [password] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [profile (validate-password! conn params)]
|
||||
(let [profile (validate-password! conn params)
|
||||
session-id (:app.rpc/session-id params)]
|
||||
(when (= (str/lower (:email profile))
|
||||
(str/lower (:password params)))
|
||||
(ex/raise :type :validation
|
||||
:code :email-as-password
|
||||
:hint "you can't use your email as password"))
|
||||
(update-profile-password! conn (assoc profile :password password))
|
||||
(invalidate-profile-session! conn (:id profile) session-id)
|
||||
nil)))
|
||||
|
||||
(defn- invalidate-profile-session!
|
||||
"Removes all sessions except the current one."
|
||||
[conn profile-id session-id]
|
||||
(let [sql "delete from http_session where profile_id = ? and id != ?"]
|
||||
(:next.jdbc/update-count (db/exec-one! conn [sql profile-id session-id]))))
|
||||
|
||||
(defn- validate-password!
|
||||
[conn {:keys [profile-id old-password] :as params}]
|
||||
(let [profile (db/get-by-id conn :profile profile-id)]
|
||||
@@ -396,7 +401,6 @@
|
||||
{:password (derive-password password)}
|
||||
{:id id}))
|
||||
|
||||
|
||||
;; --- MUTATION: Update Photo
|
||||
|
||||
(declare update-profile-photo)
|
||||
@@ -438,7 +442,7 @@
|
||||
;; --- MUTATION: Request Email Change
|
||||
|
||||
(declare request-email-change)
|
||||
(declare change-email-inmediatelly)
|
||||
(declare change-email-immediately)
|
||||
|
||||
(s/def ::request-email-change
|
||||
(s/keys :req-un [::email]))
|
||||
@@ -454,9 +458,9 @@
|
||||
(if (or (cf/get :smtp-enabled)
|
||||
(contains? cf/flags :smtp))
|
||||
(request-email-change cfg params)
|
||||
(change-email-inmediatelly cfg params)))))
|
||||
(change-email-immediately cfg params)))))
|
||||
|
||||
(defn- change-email-inmediatelly
|
||||
(defn- change-email-immediately
|
||||
[{:keys [conn]} {:keys [profile email] :as params}]
|
||||
(when (not= email (:email profile))
|
||||
(check-profile-existence! conn params))
|
||||
@@ -639,7 +643,7 @@
|
||||
(let [rows (db/exec! conn [sql:owned-teams profile-id])]
|
||||
;; If we found owned teams with more than one profile we don't
|
||||
;; allow delete profile until the user properly transfer ownership
|
||||
;; or explictly removes all participants from the team.
|
||||
;; or explicitly removes all participants from the team.
|
||||
(when (some #(> (:num-profiles %) 1) rows)
|
||||
(ex/raise :type :validation
|
||||
:code :owner-teams-with-people
|
||||
|
||||
@@ -18,8 +18,8 @@
|
||||
[app.rpc.permissions :as perms]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.rpc.queries.teams :as teams]
|
||||
[app.rpc.rlimit :as rlimit]
|
||||
[app.storage :as sto]
|
||||
[app.util.rlimit :as rlimit]
|
||||
[app.util.services :as sv]
|
||||
[app.util.time :as dt]
|
||||
[clojure.spec.alpha :as s]
|
||||
@@ -164,7 +164,7 @@
|
||||
(s/keys :req-un [::profile-id ::id]))
|
||||
|
||||
;; TODO: right now just don't allow delete default team, in future it
|
||||
;; should raise a speific exception for signal that this acction is
|
||||
;; should raise a specific exception for signal that this action is
|
||||
;; not allowed.
|
||||
|
||||
(sv/defmethod ::delete-team
|
||||
@@ -201,8 +201,8 @@
|
||||
(let [perms (teams/get-permissions conn profile-id team-id)
|
||||
;; We retrieve all team members instead of query the
|
||||
;; database for a single member. This is just for
|
||||
;; convenience, if this bocomes a bottleneck or problematic,
|
||||
;; we will change it to more efficient fetch mechanims.
|
||||
;; convenience, if this becomes a bottleneck or problematic,
|
||||
;; we will change it to more efficient fetch mechanisms.
|
||||
members (teams/retrieve-team-members conn team-id)
|
||||
member (d/seek #(= member-id (:id %)) members)
|
||||
|
||||
@@ -379,8 +379,7 @@
|
||||
:code :member-is-muted
|
||||
:hint "looks like the profile has reported repeatedly as spam or has permanent bounces"))
|
||||
|
||||
;; Secondly check if the invited member email is part of the
|
||||
;; global spam/bounce report.
|
||||
;; Secondly check if the invited member email is part of the global spam/bounce report.
|
||||
(when (eml/has-bounce-reports? conn email)
|
||||
(ex/raise :type :validation
|
||||
:code :email-has-permanent-bounces
|
||||
@@ -403,13 +402,21 @@
|
||||
(s/and ::create-team (s/keys :req-un [::emails ::role])))
|
||||
|
||||
(sv/defmethod ::create-team-and-invite-members
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id emails role] :as params}]
|
||||
[{:keys [pool audit] :as cfg} {:keys [profile-id emails role] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [team (create-team conn params)
|
||||
profile (db/get-by-id conn :profile profile-id)]
|
||||
|
||||
;; Create invitations for all provided emails.
|
||||
(doseq [email emails]
|
||||
(audit :cmd :submit
|
||||
:type "mutation"
|
||||
:name "create-team-invitation"
|
||||
:profile-id profile-id
|
||||
:props {:email email
|
||||
:role role
|
||||
:profile-id profile-id})
|
||||
|
||||
(create-team-invitation
|
||||
(assoc cfg
|
||||
:conn conn
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
[app.common.spec :as us]
|
||||
[app.db :as db]
|
||||
[app.loggers.audit :as audit]
|
||||
[app.metrics :as mtx]
|
||||
[app.rpc.mutations.teams :as teams]
|
||||
[app.rpc.queries.profile :as profile]
|
||||
[app.util.services :as sv]
|
||||
@@ -44,16 +43,8 @@
|
||||
::audit/props {:email email}
|
||||
::audit/profile-id profile-id}))
|
||||
|
||||
(defn- annotate-profile-activation
|
||||
"A helper for properly increase the profile-activation metric once the
|
||||
transaction is completed."
|
||||
[metrics]
|
||||
(fn []
|
||||
(let [mobj (get-in metrics [:definitions :profile-activation])]
|
||||
((::mtx/fn mobj) {:by 1}))))
|
||||
|
||||
(defmethod process-token :verify-email
|
||||
[{:keys [conn session metrics] :as cfg} _ {:keys [profile-id] :as claims}]
|
||||
[{:keys [conn session] :as cfg} _ {:keys [profile-id] :as claims}]
|
||||
(let [profile (profile/retrieve-profile conn profile-id)
|
||||
claims (assoc claims :profile profile)]
|
||||
|
||||
@@ -69,7 +60,6 @@
|
||||
|
||||
(with-meta claims
|
||||
{:transform-response ((:create session) profile-id)
|
||||
:before-complete (annotate-profile-activation metrics)
|
||||
::audit/name "verify-profile-email"
|
||||
::audit/props (audit/profile->props profile)
|
||||
::audit/profile-id (:id profile)})))
|
||||
@@ -118,77 +108,39 @@
|
||||
(assoc member :is-active true)))
|
||||
|
||||
(defmethod process-token :team-invitation
|
||||
[{:keys [session] :as cfg} {:keys [profile-id token]} {:keys [member-id] :as claims}]
|
||||
[cfg {:keys [profile-id token]} {:keys [member-id] :as claims}]
|
||||
(us/assert ::team-invitation-claims claims)
|
||||
(cond
|
||||
;; This happens when token is filled with member-id and current
|
||||
;; user is already logged in with some account.
|
||||
(and (uuid? profile-id)
|
||||
(uuid? member-id))
|
||||
;; user is already logged in with exactly invited account.
|
||||
(and (uuid? profile-id) (uuid? member-id) (= member-id profile-id))
|
||||
(let [profile (accept-invitation cfg claims)]
|
||||
(if (= member-id profile-id)
|
||||
;; If the current session is already matches the invited
|
||||
;; member, then just return the token and leave the frontend
|
||||
;; app redirect to correct team.
|
||||
(assoc claims :state :created)
|
||||
|
||||
;; If the session does not matches the invited member, replace
|
||||
;; the session with a new one matching the invited member.
|
||||
;; This techinique should be considered secure because the
|
||||
;; user clicking the link he already has access to the email
|
||||
;; account.
|
||||
(with-meta
|
||||
(assoc claims :state :created)
|
||||
{:transform-response ((:create session) member-id)
|
||||
::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id profile-id})))
|
||||
|
||||
;; This happens when member-id is not filled in the invitation but
|
||||
;; the user already has an account (probably with other mail) and
|
||||
;; is already logged-in.
|
||||
(and (uuid? profile-id)
|
||||
(nil? member-id))
|
||||
(let [profile (accept-invitation cfg (assoc claims :member-id profile-id))]
|
||||
(with-meta
|
||||
(assoc claims :state :created)
|
||||
{::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id profile-id}))
|
||||
|
||||
;; This happens when member-id is filled but the accessing user is
|
||||
;; not logged-in. In this case we proceed to accept invitation and
|
||||
;; leave the user logged-in.
|
||||
(and (nil? profile-id)
|
||||
(uuid? member-id))
|
||||
(let [profile (accept-invitation cfg claims)]
|
||||
(with-meta
|
||||
(assoc claims :state :created)
|
||||
{:transform-response ((:create session) member-id)
|
||||
::audit/name "accept-team-invitation"
|
||||
::audit/props (merge
|
||||
(audit/profile->props profile)
|
||||
{:team-id (:team-id claims)
|
||||
:role (:role claims)})
|
||||
::audit/profile-id member-id}))
|
||||
|
||||
;; In this case, we wait until frontend app redirect user to
|
||||
;; registeration page, the user is correctly registered and the
|
||||
;; register mutation call us again with the same token to finally
|
||||
;; create the corresponding team-profile relation from the first
|
||||
;; condition of this if.
|
||||
;; This case means that invitation token does not match with
|
||||
;; registred user, so we need to indicate to frontend to redirect
|
||||
;; it to register page.
|
||||
(nil? member-id)
|
||||
{:invitation-token token
|
||||
:iss :team-invitation
|
||||
:redirect-to :auth-register
|
||||
:state :pending}
|
||||
|
||||
;; In all other cases, just tell to fontend to redirect the user
|
||||
;; to the login page.
|
||||
:else
|
||||
{:invitation-token token
|
||||
:iss :team-invitation
|
||||
:redirect-to :auth-login
|
||||
:state :pending}))
|
||||
|
||||
|
||||
;; --- Default
|
||||
|
||||
(defmethod process-token :default
|
||||
|
||||
@@ -6,6 +6,8 @@
|
||||
|
||||
(ns app.rpc.queries.files
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.pages.helpers :as cph]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec :as us]
|
||||
[app.common.uuid :as uuid]
|
||||
@@ -24,6 +26,7 @@
|
||||
|
||||
;; --- Helpers & Specs
|
||||
|
||||
(s/def ::frame-id ::us/uuid)
|
||||
(s/def ::id ::us/uuid)
|
||||
(s/def ::name ::us/string)
|
||||
(s/def ::project-id ::us/uuid)
|
||||
@@ -84,8 +87,8 @@
|
||||
(let [perms (get-permissions conn profile-id file-id)
|
||||
ldata (retrieve-share-link conn file-id share-id)]
|
||||
|
||||
;; NOTE: in a future when share-link becomes more powerfull and
|
||||
;; will allow us specify which parts of the app is availabel, we
|
||||
;; NOTE: in a future when share-link becomes more powerful and
|
||||
;; will allow us specify which parts of the app is available, we
|
||||
;; will probably need to tweak this function in order to expose
|
||||
;; this flags to the frontend.
|
||||
(cond
|
||||
@@ -165,6 +168,7 @@
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
f.name,
|
||||
f.revn,
|
||||
f.is_shared
|
||||
from file as f
|
||||
where f.project_id = ?
|
||||
@@ -214,11 +218,63 @@
|
||||
(some-> (retrieve-file cfg id)
|
||||
(assoc :permissions perms)))))
|
||||
|
||||
(s/def ::page
|
||||
(s/keys :req-un [::profile-id ::file-id]))
|
||||
(declare trim-file-data)
|
||||
|
||||
(defn remove-thumbnails-frames
|
||||
"Removes from data the children for frames that have a thumbnail set up"
|
||||
(s/def ::page-id ::us/uuid)
|
||||
(s/def ::object-id ::us/uuid)
|
||||
|
||||
(s/def ::trimmed-file
|
||||
(s/keys :req-un [::profile-id ::id ::object-id ::page-id]))
|
||||
|
||||
(sv/defmethod ::trimmed-file
|
||||
"Retrieve a file by its ID and trims all unnecesary content from
|
||||
it. It is mainly used for rendering a concrete object, so we don't
|
||||
need force download all shapes when only a small subset is
|
||||
necesseary."
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id id] :as params}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cfg (assoc cfg :conn conn)
|
||||
perms (get-permissions conn profile-id id)]
|
||||
(check-read-permissions! perms)
|
||||
(some-> (retrieve-file cfg id)
|
||||
(trim-file-data params)
|
||||
(assoc :permissions perms)))))
|
||||
|
||||
(defn- trim-file-data
|
||||
[file {:keys [page-id object-id]}]
|
||||
(let [page (get-in file [:data :pages-index page-id])
|
||||
objects (->> (cph/get-children-with-self (:objects page) object-id)
|
||||
(map #(dissoc % :thumbnail))
|
||||
(d/index-by :id))
|
||||
page (assoc page :objects objects)]
|
||||
(-> file
|
||||
(update :data assoc :pages-index {page-id page})
|
||||
(update :data assoc :pages [page-id]))))
|
||||
|
||||
(declare strip-frames-with-thumbnails)
|
||||
|
||||
(s/def ::strip-frames-with-thumbnails ::us/boolean)
|
||||
|
||||
(s/def ::page
|
||||
(s/keys :req-un [::profile-id ::file-id]
|
||||
:opt-un [::strip-frames-with-thumbnails]))
|
||||
|
||||
(sv/defmethod ::page
|
||||
"Retrieves the first page of the file. Used mainly for render
|
||||
thumbnails on dashboard."
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id] :as props}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
|
||||
(let [cfg (assoc cfg :conn conn)
|
||||
file (retrieve-file cfg file-id)
|
||||
page-id (get-in file [:data :pages 0])]
|
||||
(cond-> (get-in file [:data :pages-index page-id])
|
||||
(true? (:strip-frames-with-thumbnails props))
|
||||
(strip-frames-with-thumbnails)))))
|
||||
|
||||
(defn strip-frames-with-thumbnails
|
||||
"Remove unnecesary shapes from frames that have thumbnail."
|
||||
[data]
|
||||
(let [filter-shape?
|
||||
(fn [objects [id shape]]
|
||||
@@ -227,7 +283,7 @@
|
||||
(= frame-id uuid/zero)
|
||||
(not (some? (get-in objects [frame-id :thumbnail]))))))
|
||||
|
||||
;; We need to remove from the attribute :shapes its childrens because
|
||||
;; We need to remove from the attribute :shapes its children because
|
||||
;; they will not be sent in the data
|
||||
remove-frame-children
|
||||
(fn [[id shape]]
|
||||
@@ -244,22 +300,12 @@
|
||||
|
||||
(update data :objects update-objects)))
|
||||
|
||||
(sv/defmethod ::page
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id file-id strip-thumbnails]}]
|
||||
(db/with-atomic [conn pool]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
|
||||
(let [cfg (assoc cfg :conn conn)
|
||||
file (retrieve-file cfg file-id)
|
||||
page-id (get-in file [:data :pages 0])]
|
||||
(cond-> (get-in file [:data :pages-index page-id])
|
||||
strip-thumbnails
|
||||
(remove-thumbnails-frames)))))
|
||||
|
||||
;; --- Query: Shared Library Files
|
||||
|
||||
(def ^:private sql:team-shared-files
|
||||
"select f.id,
|
||||
f.revn,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
@@ -330,6 +376,7 @@
|
||||
(def sql:team-recent-files
|
||||
"with recent_files as (
|
||||
select f.id,
|
||||
f.revn,
|
||||
f.project_id,
|
||||
f.created_at,
|
||||
f.modified_at,
|
||||
@@ -346,6 +393,7 @@
|
||||
)
|
||||
select * from recent_files where row_num <= 10;")
|
||||
|
||||
|
||||
(s/def ::team-recent-files
|
||||
(s/keys :req-un [::profile-id ::team-id]))
|
||||
|
||||
@@ -355,6 +403,25 @@
|
||||
(teams/check-read-permissions! conn profile-id team-id)
|
||||
(db/exec! conn [sql:team-recent-files team-id])))
|
||||
|
||||
|
||||
;; --- QUERY: get the thumbnail for an frame
|
||||
|
||||
(def ^:private sql:file-frame-thumbnail
|
||||
"select data
|
||||
from file_frame_thumbnail
|
||||
where file_id = ?
|
||||
and frame_id = ?")
|
||||
|
||||
(s/def ::file-frame-thumbnail
|
||||
(s/keys :req-un [::profile-id ::file-id ::frame-id]))
|
||||
|
||||
(sv/defmethod ::file-frame-thumbnail
|
||||
[{:keys [pool]} {:keys [profile-id file-id frame-id]}]
|
||||
(with-open [conn (db/open pool)]
|
||||
(check-read-permissions! conn profile-id file-id)
|
||||
(db/exec-one! conn [sql:file-frame-thumbnail file-id frame-id])))
|
||||
|
||||
|
||||
;; --- Helpers
|
||||
|
||||
(defn decode-row
|
||||
|
||||
@@ -35,9 +35,9 @@
|
||||
(s/def ::profile
|
||||
(s/keys :opt-un [::profile-id]))
|
||||
|
||||
(sv/defmethod ::profile {:auth false}
|
||||
(sv/defmethod ::profile
|
||||
{:auth false}
|
||||
[{:keys [pool] :as cfg} {:keys [profile-id] :as params}]
|
||||
|
||||
;; We need to return the anonymous profile object in two cases, when
|
||||
;; no profile-id is in session, and when db call raises not found. In all other
|
||||
;; cases we need to reraise the exception.
|
||||
@@ -111,6 +111,6 @@
|
||||
;; --- Attrs Helpers
|
||||
|
||||
(defn strip-private-attrs
|
||||
"Only selects a publicy visible profile attrs."
|
||||
"Only selects a publicly visible profile attrs."
|
||||
[row]
|
||||
(dissoc row :password :deleted-at))
|
||||
|
||||
@@ -65,7 +65,7 @@
|
||||
(ex/raise :type :not-found
|
||||
:code :object-not-found))
|
||||
|
||||
;; When we have only profile, we need to check read permissiones
|
||||
;; When we have only profile, we need to check read permissions
|
||||
;; on file.
|
||||
(when (and profile-id (not slink))
|
||||
(files/check-read-permissions! conn profile-id file-id))
|
||||
|
||||
45
backend/src/app/rpc/retry.clj
Normal file
45
backend/src/app/rpc/retry.clj
Normal file
@@ -0,0 +1,45 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.rpc.retry
|
||||
"A fault tolerance helpers. Allow retry some operations that we know
|
||||
we can retry."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.util.services :as sv]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(defn conflict-db-insert?
|
||||
"Check if exception matches a insertion conflict on postgresql."
|
||||
[e]
|
||||
(and (instance? org.postgresql.util.PSQLException e)
|
||||
(= "23505" (.getSQLState e))))
|
||||
|
||||
(defn wrap-retry
|
||||
[_ f {:keys [::matches ::sv/name]
|
||||
:or {matches (constantly false)}
|
||||
:as mdata}]
|
||||
|
||||
(when (::enabled mdata)
|
||||
(l/debug :hint "wrapping retry" :name name))
|
||||
|
||||
(if-let [max-retries (::max-retries mdata)]
|
||||
(fn [cfg params]
|
||||
(letfn [(run [retry]
|
||||
(-> (f cfg params)
|
||||
(p/catch (partial handle-error retry))))
|
||||
|
||||
(handle-error [retry cause]
|
||||
(if (matches cause)
|
||||
(let [current-retry (inc retry)]
|
||||
(l/trace :hint "running retry algorithm" :retry current-retry)
|
||||
(if (<= current-retry max-retries)
|
||||
(run current-retry)
|
||||
(throw cause)))
|
||||
(throw cause)))]
|
||||
(run 0)))
|
||||
f))
|
||||
|
||||
67
backend/src/app/rpc/rlimit.clj
Normal file
67
backend/src/app/rpc/rlimit.clj
Normal file
@@ -0,0 +1,67 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.rpc.rlimit
|
||||
"Resource usage limits (in other words: semaphores)."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.logging :as l]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.services :as sv]
|
||||
[promesa.core :as p]))
|
||||
|
||||
(defprotocol IAsyncSemaphore
|
||||
(acquire! [_])
|
||||
(release! [_]))
|
||||
|
||||
(defn semaphore
|
||||
[{:keys [permits metrics name]}]
|
||||
(let [name (d/name name)
|
||||
used (volatile! 0)
|
||||
queue (volatile! (d/queue))
|
||||
labels (into-array String [name])]
|
||||
(reify IAsyncSemaphore
|
||||
(acquire! [this]
|
||||
(let [d (p/deferred)]
|
||||
(locking this
|
||||
(if (< @used permits)
|
||||
(do
|
||||
(vswap! used inc)
|
||||
(p/resolve! d))
|
||||
(vswap! queue conj d)))
|
||||
|
||||
(mtx/run! metrics {:id :rlimit-used-permits :val @used :labels labels })
|
||||
(mtx/run! metrics {:id :rlimit-queued-submissions :val (count @queue) :labels labels})
|
||||
(mtx/run! metrics {:id :rlimit-acquires-total :inc 1 :labels labels})
|
||||
d))
|
||||
|
||||
(release! [this]
|
||||
(locking this
|
||||
(if-let [item (peek @queue)]
|
||||
(do
|
||||
(vswap! queue pop)
|
||||
(p/resolve! item))
|
||||
(when (pos? @used)
|
||||
(vswap! used dec))))
|
||||
|
||||
(mtx/run! metrics {:id :rlimit-used-permits :val @used :labels labels})
|
||||
(mtx/run! metrics {:id :rlimit-queued-submissions :val (count @queue) :labels labels})
|
||||
))))
|
||||
|
||||
(defn wrap-rlimit
|
||||
[{:keys [metrics] :as cfg} f mdata]
|
||||
(if-let [permits (::permits mdata)]
|
||||
(let [sem (semaphore {:permits permits
|
||||
:metrics metrics
|
||||
:name (::sv/name mdata)})]
|
||||
(l/debug :hint "wrapping rlimit" :handler (::sv/name mdata) :permits permits)
|
||||
(fn [cfg params]
|
||||
(-> (acquire! sem)
|
||||
(p/then (fn [_] (f cfg params)))
|
||||
(p/finally (fn [_ _] (release! sem))))))
|
||||
f))
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.setup
|
||||
"Initial data setup of instance."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[buddy.core.codecs :as bc]
|
||||
@@ -14,55 +15,49 @@
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare initialize-instance-id!)
|
||||
(declare initialize-secret-key!)
|
||||
(declare retrieve-all)
|
||||
(defn- generate-random-key
|
||||
[]
|
||||
(-> (bn/random-bytes 64)
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->str)))
|
||||
|
||||
(defn- retrieve-all
|
||||
[conn]
|
||||
(->> (db/query conn :server-prop {:preload true})
|
||||
(filter #(not= "secret-key" (:id %)))
|
||||
(map (fn [row]
|
||||
[(keyword (:id row))
|
||||
(db/decode-transit-pgobject (:content row))]))
|
||||
(into {})))
|
||||
|
||||
(defn- handle-instance-id
|
||||
[instance-id conn read-only?]
|
||||
(or instance-id
|
||||
(let [instance-id (uuid/random)]
|
||||
(when-not read-only?
|
||||
(try
|
||||
(db/insert! conn :server-prop
|
||||
{:id "instance-id"
|
||||
:preload true
|
||||
:content (db/tjson instance-id)})
|
||||
(catch Throwable cause
|
||||
(l/warn :hint "unable to persist instance-id"
|
||||
:instance-id instance-id
|
||||
:cause cause))))
|
||||
instance-id)))
|
||||
|
||||
(defmethod ig/pre-init-spec ::props [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::props
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
[_ {:keys [pool key] :as cfg}]
|
||||
(db/with-atomic [conn pool]
|
||||
(let [cfg (assoc cfg :conn conn)]
|
||||
(initialize-secret-key! cfg)
|
||||
(initialize-instance-id! cfg)
|
||||
(retrieve-all cfg))))
|
||||
(db/xact-lock! conn 0)
|
||||
(when-not key
|
||||
(l/warn :hint (str "using autogenerated secret-key, it will change on each restart and will invalidate "
|
||||
"all sessions on each restart, it is hightly recommeded setting up the "
|
||||
"PENPOT_SECRET_KEY environment variable")))
|
||||
|
||||
(def sql:upsert-secret-key
|
||||
"insert into server_prop (id, preload, content)
|
||||
values ('secret-key', true, ?::jsonb)
|
||||
on conflict (id) do update set content = ?::jsonb")
|
||||
|
||||
(def sql:insert-secret-key
|
||||
"insert into server_prop (id, preload, content)
|
||||
values ('secret-key', true, ?::jsonb)
|
||||
on conflict (id) do nothing")
|
||||
|
||||
(defn- initialize-secret-key!
|
||||
[{:keys [conn key] :as cfg}]
|
||||
(if key
|
||||
(let [key (db/tjson key)]
|
||||
(db/exec-one! conn [sql:upsert-secret-key key key]))
|
||||
(let [key (-> (bn/random-bytes 64)
|
||||
(bc/bytes->b64u)
|
||||
(bc/bytes->str))
|
||||
key (db/tjson key)]
|
||||
(db/exec-one! conn [sql:insert-secret-key key]))))
|
||||
|
||||
(defn- initialize-instance-id!
|
||||
[{:keys [conn] :as cfg}]
|
||||
(let [iid (uuid/random)]
|
||||
|
||||
(db/insert! conn :server-prop
|
||||
{:id "instance-id"
|
||||
:preload true
|
||||
:content (db/tjson iid)}
|
||||
{:on-conflict-do-nothing true})))
|
||||
|
||||
(defn- retrieve-all
|
||||
[{:keys [conn] :as cfg}]
|
||||
(reduce (fn [acc row]
|
||||
(assoc acc (keyword (:id row)) (db/decode-transit-pgobject (:content row))))
|
||||
{}
|
||||
(db/query conn :server-prop {:preload true})))
|
||||
(let [stored (-> (retrieve-all conn)
|
||||
(assoc :secret-key (or key (generate-random-key))))]
|
||||
(update stored :instance-id handle-instance-id conn (db/read-only? pool)))))
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
"A main namespace for server repl."
|
||||
#_:clj-kondo/ignore
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages :as cp]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.common.spec.file :as spec.file]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.config :as cfg]
|
||||
[app.db :as db]
|
||||
[app.db.sql :as sql]
|
||||
@@ -12,12 +16,15 @@
|
||||
[app.rpc.queries.profile :as prof]
|
||||
[app.srepl.dev :as dev]
|
||||
[app.util.blob :as blob]
|
||||
[app.util.time :as dt]
|
||||
[fipp.edn :refer [pprint]]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str]
|
||||
[clojure.pprint :refer [pprint]]))
|
||||
[expound.alpha :as expound]))
|
||||
|
||||
(defn update-file
|
||||
([id f] (update-file id f false))
|
||||
([id f save?]
|
||||
([system id f] (update-file system id f false))
|
||||
([system id f save?]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(let [file (db/get-by-id conn :file id {:for-update true})
|
||||
file (-> file
|
||||
@@ -32,8 +39,8 @@
|
||||
{:id (:id file)}))
|
||||
(update file :data blob/decode)))))
|
||||
|
||||
(defn update-file-raw
|
||||
[id data]
|
||||
(defn reset-file-data
|
||||
[system id data]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(db/update! conn :file
|
||||
{:data data}
|
||||
@@ -41,36 +48,13 @@
|
||||
|
||||
(defn get-file
|
||||
[system id]
|
||||
(with-open [conn (db/open (:app.db/pool system))]
|
||||
(let [file (db/get-by-id conn :file id)]
|
||||
(-> file
|
||||
(update :data app.util.blob/decode)
|
||||
(update :data pmg/migrate-data)))))
|
||||
|
||||
|
||||
;; Examples:
|
||||
;; (def backup (update-file #uuid "1586e1f0-3e02-11eb-b1d2-556a2f641513" identity))
|
||||
;; (def x (update-file
|
||||
;; #uuid "1586e1f0-3e02-11eb-b1d2-556a2f641513"
|
||||
;; (fn [{:keys [data] :as file}]
|
||||
;; (update-in data [:pages-index #uuid "878278c0-3ef0-11eb-9d67-8551e7624f43" :objects] dissoc nil))))
|
||||
|
||||
;; Migrate
|
||||
|
||||
(defn update-file-data-blob-format
|
||||
[system]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(doseq [id (->> (db/exec! conn ["select id from file;"]) (map :id))]
|
||||
(let [{:keys [data]} (db/get-by-id conn :file id {:columns [:id :data]})]
|
||||
(prn "Updating file:" id)
|
||||
(db/update! conn :file
|
||||
{:data (-> (blob/decode data)
|
||||
(blob/encode {:version 2}))}
|
||||
{:id id})))))
|
||||
|
||||
(-> (:app.db/pool system)
|
||||
(db/get-by-id :file id)
|
||||
(update :data app.util.blob/decode)
|
||||
(update :data pmg/migrate-data)))
|
||||
|
||||
(defn duplicate-file
|
||||
"This is a raw version of duplication of file just only for forensic analisys"
|
||||
"This is a raw version of duplication of file just only for forensic analysis"
|
||||
[system file-id email]
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(when-let [profile (some->> (prof/retrieve-profile-data-by-email conn (str/lower email))
|
||||
@@ -81,3 +65,87 @@
|
||||
:project-id (:default-project-id profile))]
|
||||
(db/insert! conn :file params)
|
||||
(:id file))))))
|
||||
|
||||
(defn verify-files
|
||||
[system {:keys [age sleep chunk-size max-chunks stop-on-error? verbose?]
|
||||
:or {sleep 1000
|
||||
age "72h"
|
||||
chunk-size 10
|
||||
verbose? false
|
||||
stop-on-error? true
|
||||
max-chunks ##Inf}}]
|
||||
|
||||
(letfn [(retrieve-chunk [conn cursor]
|
||||
(let [sql (str "select id, name, modified_at, data from file "
|
||||
" where modified_at > ? and deleted_at is null "
|
||||
" order by modified_at asc limit ?")
|
||||
age (if cursor
|
||||
cursor
|
||||
(-> (dt/now) (dt/minus age)))]
|
||||
(seq (db/exec! conn [sql age chunk-size]))))
|
||||
|
||||
(validate-item [{:keys [id data modified-at] :as file}]
|
||||
(let [data (blob/decode data)
|
||||
valid? (s/valid? ::spec.file/data data)]
|
||||
|
||||
(l/debug :hint "validated file"
|
||||
:file-id id
|
||||
:age (-> (dt/diff modified-at (dt/now))
|
||||
(dt/truncate :minutes)
|
||||
(str)
|
||||
(subs 2)
|
||||
(str/lower))
|
||||
:valid valid?)
|
||||
|
||||
(when (and (not valid?) verbose?)
|
||||
(let [edata (-> (s/explain-data ::spec.file/data data)
|
||||
(update ::s/problems #(take 5 %)))]
|
||||
(binding [s/*explain-out* expound/printer]
|
||||
(l/warn ::l/raw (with-out-str (s/explain-out edata))))))
|
||||
|
||||
(when (and (not valid?) stop-on-error?)
|
||||
(throw (ex-info "penpot/abort" {})))
|
||||
|
||||
valid?))
|
||||
|
||||
(validate-chunk [chunk]
|
||||
(loop [items chunk
|
||||
success 0
|
||||
errored 0]
|
||||
|
||||
(if-let [item (first items)]
|
||||
(if (validate-item item)
|
||||
(recur (rest items) (inc success) errored)
|
||||
(recur (rest items) success (inc errored)))
|
||||
[(:modified-at (last chunk))
|
||||
success
|
||||
errored])))
|
||||
|
||||
(fmt-result [ns ne]
|
||||
{:total (+ ns ne)
|
||||
:errors ne
|
||||
:success ns})
|
||||
|
||||
]
|
||||
|
||||
(try
|
||||
(db/with-atomic [conn (:app.db/pool system)]
|
||||
(loop [cursor nil
|
||||
chunks 0
|
||||
success 0
|
||||
errors 0]
|
||||
(if (< chunks max-chunks)
|
||||
(if-let [chunk (retrieve-chunk conn cursor)]
|
||||
(let [[cursor success' errors'] (validate-chunk chunk)]
|
||||
(Thread/sleep (inst-ms (dt/duration sleep)))
|
||||
(recur cursor
|
||||
(inc chunks)
|
||||
(+ success success')
|
||||
(+ errors errors')))
|
||||
(fmt-result success errors))
|
||||
(fmt-result success errors))))
|
||||
(catch Throwable cause
|
||||
(when (not= "penpot/abort" (ex-message cause))
|
||||
(throw cause))
|
||||
:error))))
|
||||
|
||||
|
||||
@@ -18,11 +18,9 @@
|
||||
[app.storage.impl :as impl]
|
||||
[app.storage.s3 :as ss3]
|
||||
[app.util.time :as dt]
|
||||
[app.worker :as wrk]
|
||||
[clojure.spec.alpha :as s]
|
||||
[datoteka.core :as fs]
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px]))
|
||||
[integrant.core :as ig]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Storage Module State
|
||||
@@ -40,7 +38,7 @@
|
||||
:db ::sdb/backend))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::storage [_]
|
||||
(s/keys :req-un [::wrk/executor ::db/pool ::backends]))
|
||||
(s/keys :req-un [::db/pool ::backends]))
|
||||
|
||||
(defmethod ig/prep-key ::storage
|
||||
[_ {:keys [backends] :as cfg}]
|
||||
@@ -53,78 +51,74 @@
|
||||
(assoc :backends (d/without-nils backends))))
|
||||
|
||||
(s/def ::storage
|
||||
(s/keys :req-un [::backends ::wrk/executor ::db/pool]))
|
||||
(s/keys :req-un [::backends ::db/pool]))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Database Objects
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defrecord StorageObject [id size created-at expired-at backend])
|
||||
(defrecord StorageObject [id size created-at expired-at touched-at backend])
|
||||
|
||||
(defn storage-object?
|
||||
[v]
|
||||
(instance? StorageObject v))
|
||||
|
||||
(def ^:private
|
||||
sql:insert-storage-object
|
||||
"insert into storage_object (id, size, backend, metadata)
|
||||
values (?, ?, ?, ?::jsonb)
|
||||
returning *")
|
||||
(s/def ::storage-object storage-object?)
|
||||
(s/def ::storage-content impl/content?)
|
||||
|
||||
(def ^:private
|
||||
sql:insert-storage-object-with-expiration
|
||||
"insert into storage_object (id, size, backend, metadata, deleted_at)
|
||||
values (?, ?, ?, ?::jsonb, ?)
|
||||
returning *")
|
||||
|
||||
(defn- insert-object
|
||||
[conn id size backend mdata expiration]
|
||||
(if expiration
|
||||
(db/exec-one! conn [sql:insert-storage-object-with-expiration id size backend mdata expiration])
|
||||
(db/exec-one! conn [sql:insert-storage-object id size backend mdata])))
|
||||
(defn- clone-database-object
|
||||
;; If we in this condition branch, this means we come from the
|
||||
;; clone-object, so we just need to clone it with a new backend.
|
||||
[{:keys [conn backend]} object]
|
||||
(let [id (uuid/random)
|
||||
mdata (meta object)
|
||||
result (db/insert! conn :storage-object
|
||||
{:id id
|
||||
:size (:size object)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at (:expired-at object)
|
||||
:touched-at (:touched-at object)})]
|
||||
(assoc object
|
||||
:id (:id result)
|
||||
:backend backend
|
||||
:created-at (:created-at result)
|
||||
:touched-at (:touched-at result))))
|
||||
|
||||
(defn- create-database-object
|
||||
[{:keys [conn backend]} {:keys [content] :as object}]
|
||||
(if (instance? StorageObject object)
|
||||
;; If we in this condition branch, this means we come from the
|
||||
;; clone-object, so we just need to clone it with a new backend.
|
||||
(let [id (uuid/random)
|
||||
mdata (meta object)
|
||||
result (insert-object conn
|
||||
id
|
||||
(:size object)
|
||||
(name backend)
|
||||
(db/tjson mdata)
|
||||
(:expired-at object))]
|
||||
(assoc object
|
||||
:id (:id result)
|
||||
:backend backend
|
||||
:created-at (:created-at result)))
|
||||
(let [id (uuid/random)
|
||||
mdata (dissoc object :content :expired-at)
|
||||
result (insert-object conn
|
||||
id
|
||||
(count content)
|
||||
(name backend)
|
||||
(db/tjson mdata)
|
||||
(:expired-at object))]
|
||||
(StorageObject. (:id result)
|
||||
(:size result)
|
||||
(:created-at result)
|
||||
(:deleted-at result)
|
||||
backend
|
||||
mdata
|
||||
nil))))
|
||||
(us/assert ::storage-content content)
|
||||
(let [id (uuid/random)
|
||||
mdata (dissoc object :content :expired-at :touched-at)
|
||||
|
||||
result (db/insert! conn :storage-object
|
||||
{:id id
|
||||
:size (count content)
|
||||
:backend (name backend)
|
||||
:metadata (db/tjson mdata)
|
||||
:deleted-at (:expired-at object)
|
||||
:touched-at (:touched-at object)})]
|
||||
|
||||
(StorageObject. (:id result)
|
||||
(:size result)
|
||||
(:created-at result)
|
||||
(:deleted-at result)
|
||||
(:touched-at result)
|
||||
backend
|
||||
mdata
|
||||
nil)))
|
||||
|
||||
(def ^:private sql:retrieve-storage-object
|
||||
"select * from storage_object where id = ? and (deleted_at is null or deleted_at > now())")
|
||||
|
||||
(defn row->storage-object [res]
|
||||
(let [mdata (some-> (:metadata res) (db/decode-transit-pgobject))]
|
||||
(let [mdata (or (some-> (:metadata res) (db/decode-transit-pgobject)) {})]
|
||||
(StorageObject. (:id res)
|
||||
(:size res)
|
||||
(:created-at res)
|
||||
(:deleted-at res)
|
||||
(:touched-at res)
|
||||
(keyword (:backend res))
|
||||
mdata
|
||||
nil)))
|
||||
@@ -142,10 +136,6 @@
|
||||
(let [result (db/exec-one! conn [sql:delete-storage-object id])]
|
||||
(pos? (:next.jdbc/update-count result))))
|
||||
|
||||
(defn- register-recheck
|
||||
[{:keys [pool] :as storage} backend id]
|
||||
(db/insert! pool :storage-pending {:id id :backend (name backend)}))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; API
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -170,17 +160,13 @@
|
||||
|
||||
(defn put-object
|
||||
"Creates a new object with the provided content."
|
||||
[{:keys [pool conn backend executor] :as storage} {:keys [content] :as params}]
|
||||
[{:keys [pool conn backend] :as storage} {:keys [content] :as params}]
|
||||
(us/assert ::storage storage)
|
||||
(us/assert impl/content? content)
|
||||
(us/assert ::storage-content content)
|
||||
(us/assert ::us/keyword backend)
|
||||
(let [storage (assoc storage :conn (or conn pool))
|
||||
object (create-database-object storage params)]
|
||||
|
||||
;; Schedule to execute in background; in an other transaction and
|
||||
;; register the currently created storage object id for a later
|
||||
;; recheck.
|
||||
(px/run! executor #(register-recheck storage backend (:id object)))
|
||||
|
||||
;; Store the data finally on the underlying storage subsystem.
|
||||
(-> (impl/resolve-backend storage backend)
|
||||
(impl/put-object object content))
|
||||
@@ -188,12 +174,14 @@
|
||||
object))
|
||||
|
||||
(defn clone-object
|
||||
"Creates a clone of the provided object using backend basded efficient
|
||||
"Creates a clone of the provided object using backend based efficient
|
||||
method. Always clones objects to the configured default."
|
||||
[{:keys [pool conn] :as storage} object]
|
||||
[{:keys [pool conn backend] :as storage} object]
|
||||
(us/assert ::storage storage)
|
||||
(us/assert ::storage-object object)
|
||||
(us/assert ::us/keyword backend)
|
||||
(let [storage (assoc storage :conn (or conn pool))
|
||||
object* (create-database-object storage object)]
|
||||
object* (clone-database-object storage object)]
|
||||
(if (= (:backend object) (:backend storage))
|
||||
;; if the source and destination backends are the same, we
|
||||
;; proceed to use the fast path with specific copy
|
||||
@@ -269,7 +257,7 @@
|
||||
;; A task responsible to permanently delete already marked as deleted
|
||||
;; storage files.
|
||||
|
||||
(declare sql:retrieve-deleted-objects)
|
||||
(declare sql:retrieve-deleted-objects-chunk)
|
||||
|
||||
(s/def ::min-age ::dt/duration)
|
||||
|
||||
@@ -278,44 +266,46 @@
|
||||
|
||||
(defmethod ig/init-key ::gc-deleted-task
|
||||
[_ {:keys [pool storage min-age] :as cfg}]
|
||||
(letfn [(group-by-backend [rows]
|
||||
(let [conj (fnil conj [])]
|
||||
[(reduce (fn [acc {:keys [id backend]}]
|
||||
(update acc (keyword backend) conj id))
|
||||
{}
|
||||
rows)
|
||||
(count rows)]))
|
||||
(letfn [(retrieve-deleted-objects-chunk [conn cursor]
|
||||
(let [min-age (db/interval min-age)
|
||||
rows (db/exec! conn [sql:retrieve-deleted-objects-chunk min-age cursor])]
|
||||
[(some-> rows peek :created-at)
|
||||
(some->> (seq rows) (d/group-by' #(-> % :backend keyword) :id) seq)]))
|
||||
|
||||
(retrieve-deleted-objects [conn]
|
||||
(let [min-age (db/interval min-age)
|
||||
rows (db/exec! conn [sql:retrieve-deleted-objects min-age])]
|
||||
(some-> (seq rows) (group-by-backend))))
|
||||
(->> (d/iteration (fn [cursor]
|
||||
(retrieve-deleted-objects-chunk conn cursor))
|
||||
:initk (dt/now)
|
||||
:vf second
|
||||
:kf first)
|
||||
(sequence cat)))
|
||||
|
||||
(delete-in-bulk [conn [backend ids]]
|
||||
(delete-in-bulk [conn backend ids]
|
||||
(let [backend (impl/resolve-backend storage backend)
|
||||
backend (assoc backend :conn conn)]
|
||||
(impl/del-objects-in-bulk backend ids)))]
|
||||
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(loop [n 0]
|
||||
(if-let [[groups total] (retrieve-deleted-objects conn)]
|
||||
(loop [total 0
|
||||
groups (retrieve-deleted-objects conn)]
|
||||
(if-let [[backend ids] (first groups)]
|
||||
(do
|
||||
(run! (partial delete-in-bulk conn) groups)
|
||||
(recur (+ n ^long total)))
|
||||
(delete-in-bulk conn backend ids)
|
||||
(recur (+ total (count ids))
|
||||
(rest groups)))
|
||||
(do
|
||||
(l/info :task "gc-deleted"
|
||||
:action "permanently delete items"
|
||||
:count n)
|
||||
{:deleted n})))))))
|
||||
(l/info :task "gc-deleted" :count total)
|
||||
{:deleted total})))))))
|
||||
|
||||
(def sql:retrieve-deleted-objects
|
||||
(def sql:retrieve-deleted-objects-chunk
|
||||
"with items_part as (
|
||||
select s.id
|
||||
from storage_object as s
|
||||
where s.deleted_at is not null
|
||||
and s.deleted_at < (now() - ?::interval)
|
||||
order by s.deleted_at
|
||||
and s.created_at < ?
|
||||
order by s.created_at desc
|
||||
limit 100
|
||||
)
|
||||
delete from storage_object
|
||||
@@ -323,160 +313,108 @@
|
||||
returning *;")
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Garbage Collection: Analize touched objects
|
||||
;; Garbage Collection: Analyze touched objects
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
;; This task is part of the garbage collection of storage objects and
|
||||
;; is responsible on analizing the touched objects and mark them for deletion
|
||||
;; if corresponds.
|
||||
;; This task is part of the garbage collection of storage objects and is responsible on analyzing the touched
|
||||
;; objects and mark them for deletion if corresponds.
|
||||
;;
|
||||
;; When file_media_object is deleted, the depending storage_object are
|
||||
;; marked as touched. This means that some files that depend on a
|
||||
;; concrete storage_object are no longer exists and maybe this
|
||||
;; storage_object is no longer necessary and can be ellegible for
|
||||
;; elimination. This task peridically analizes touched objects and
|
||||
;; mark them as freeze (means that has other references and the object
|
||||
;; is still valid) or deleted (no more references to this object so is
|
||||
;; ready to be deleted).
|
||||
;; For example: when file_media_object is deleted, the depending storage_object are marked as touched. This
|
||||
;; means that some files that depend on a concrete storage_object are no longer exists and maybe this
|
||||
;; storage_object is no longer necessary and can be eligible for elimination. This task periodically analyzes
|
||||
;; touched objects and mark them as freeze (means that has other references and the object is still valid) or
|
||||
;; deleted (no more references to this object so is ready to be deleted).
|
||||
|
||||
(declare sql:retrieve-touched-objects)
|
||||
(declare sql:retrieve-touched-objects-chunk)
|
||||
(declare sql:retrieve-file-media-object-nrefs)
|
||||
(declare sql:retrieve-team-font-variant-nrefs)
|
||||
|
||||
(defmethod ig/pre-init-spec ::gc-touched-task [_]
|
||||
(s/keys :req-un [::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::gc-touched-task
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(letfn [(group-results [rows]
|
||||
(let [conj (fnil conj [])]
|
||||
(reduce (fn [acc {:keys [id nrefs]}]
|
||||
(if (pos? nrefs)
|
||||
(update acc :to-freeze conj id)
|
||||
(update acc :to-delete conj id)))
|
||||
{}
|
||||
rows)))
|
||||
(letfn [(has-team-font-variant-nrefs? [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-team-font-variant-nrefs id id id id]) :nrefs pos?))
|
||||
|
||||
(retrieve-touched [conn]
|
||||
(let [rows (db/exec! conn [sql:retrieve-touched-objects])]
|
||||
(some-> (seq rows) (group-results))))
|
||||
|
||||
(mark-delete-in-bulk [conn ids]
|
||||
(db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)"
|
||||
(db/create-array conn "uuid" (into-array java.util.UUID ids))]))
|
||||
(has-file-media-object-nrefs? [conn id]
|
||||
(-> (db/exec-one! conn [sql:retrieve-file-media-object-nrefs id id]) :nrefs pos?))
|
||||
|
||||
(mark-freeze-in-bulk [conn ids]
|
||||
(db/exec-one! conn ["update storage_object set touched_at=null where id = ANY(?)"
|
||||
(db/create-array conn "uuid" (into-array java.util.UUID ids))]))]
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
|
||||
(mark-delete-in-bulk [conn ids]
|
||||
(db/exec-one! conn ["update storage_object set deleted_at=now(), touched_at=null where id = ANY(?)"
|
||||
(db/create-array conn "uuid" ids)]))
|
||||
|
||||
(retrieve-touched-chunk [conn cursor]
|
||||
(let [rows (->> (db/exec! conn [sql:retrieve-touched-objects-chunk cursor])
|
||||
(mapv #(d/update-when % :metadata db/decode-transit-pgobject)))
|
||||
kw (fn [o] (if (keyword? o) o (keyword o)))]
|
||||
(when (seq rows)
|
||||
[(-> rows peek :created-at)
|
||||
;; NOTE: we use the :file-media-object as default value for backward compatibility because when we
|
||||
;; deploy it we can have old backend instances running in the same time as the new one and we can
|
||||
;; still have storage-objects created without reference value. And we know that if it does not
|
||||
;; have value, it means :file-media-object.
|
||||
(d/group-by' #(or (some-> % :metadata :reference kw) :file-media-object) :id rows)])))
|
||||
|
||||
(retrieve-touched [conn]
|
||||
(->> (d/iteration (fn [cursor]
|
||||
(retrieve-touched-chunk conn cursor))
|
||||
:initk (dt/now)
|
||||
:vf second
|
||||
:kf first)
|
||||
(sequence cat)))
|
||||
|
||||
(process-objects! [conn pred-fn ids]
|
||||
(loop [to-freeze #{}
|
||||
to-delete #{}
|
||||
ids (seq ids)]
|
||||
(if-let [id (first ids)]
|
||||
(if (pred-fn conn id)
|
||||
(recur (conj to-freeze id) to-delete (rest ids))
|
||||
(recur to-freeze (conj to-delete id) (rest ids)))
|
||||
|
||||
(do
|
||||
(some->> (seq to-freeze) (mark-freeze-in-bulk conn))
|
||||
(some->> (seq to-delete) (mark-delete-in-bulk conn))
|
||||
[(count to-freeze) (count to-delete)]))))
|
||||
]
|
||||
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(loop [cntf 0
|
||||
cntd 0]
|
||||
(if-let [{:keys [to-delete to-freeze]} (retrieve-touched conn)]
|
||||
(loop [to-freeze 0
|
||||
to-delete 0
|
||||
groups (retrieve-touched conn)]
|
||||
(if-let [[reference ids] (first groups)]
|
||||
(let [[f d] (case reference
|
||||
:file-media-object (process-objects! conn has-file-media-object-nrefs? ids)
|
||||
:team-font-variant (process-objects! conn has-team-font-variant-nrefs? ids)
|
||||
(ex/raise :type :internal
|
||||
:code :unexpected-unknown-reference
|
||||
:hint (format "unknown reference %s" (pr-str reference))))]
|
||||
(recur (+ to-freeze f)
|
||||
(+ to-delete d)
|
||||
(rest groups)))
|
||||
(do
|
||||
(when (seq to-delete) (mark-delete-in-bulk conn to-delete))
|
||||
(when (seq to-freeze) (mark-freeze-in-bulk conn to-freeze))
|
||||
(recur (+ cntf (count to-freeze))
|
||||
(+ cntd (count to-delete))))
|
||||
(do
|
||||
(l/info :task "gc-touched"
|
||||
:action "mark freeze"
|
||||
:count cntf)
|
||||
(l/info :task "gc-touched"
|
||||
:action "mark for deletion"
|
||||
:count cntd)
|
||||
{:freeze cntf :delete cntd})))))))
|
||||
(l/info :task "gc-touched" :to-freeze to-freeze :to-delete to-delete)
|
||||
{:freeze to-freeze :delete to-delete})))))))
|
||||
|
||||
(def sql:retrieve-touched-objects
|
||||
"select so.id,
|
||||
((select count(*) from file_media_object where media_id = so.id) +
|
||||
(select count(*) from file_media_object where thumbnail_id = so.id)) as nrefs
|
||||
from storage_object as so
|
||||
(def sql:retrieve-touched-objects-chunk
|
||||
"select so.* from storage_object as so
|
||||
where so.touched_at is not null
|
||||
order by so.touched_at
|
||||
limit 100;")
|
||||
and so.created_at < ?
|
||||
order by so.created_at desc
|
||||
limit 500;")
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Recheck Stalled Task
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
(def sql:retrieve-file-media-object-nrefs
|
||||
"select ((select count(*) from file_media_object where media_id = ?) +
|
||||
(select count(*) from file_media_object where thumbnail_id = ?)) as nrefs")
|
||||
|
||||
;; Because the physical storage (filesystem, s3, ... except db) is not
|
||||
;; transactional, in some situations we can found physical object
|
||||
;; leakage. That situations happens when the transaction that writes
|
||||
;; the file aborts, leaving the file written to the underlying storage
|
||||
;; but the reference on the database is lost with the rollback.
|
||||
;;
|
||||
;; For this situations we need to write a "log" of inserted files that
|
||||
;; are checked in some time in future. If physical file exists but the
|
||||
;; database refence does not exists means that leaked file is found
|
||||
;; and is inmediatelly deleted. The responsability of this task is
|
||||
;; check that write log for possible leaked files.
|
||||
|
||||
(def recheck-min-age (dt/duration {:hours 1}))
|
||||
|
||||
(declare sql:retrieve-pending-to-recheck)
|
||||
(declare sql:exists-storage-object)
|
||||
|
||||
(defmethod ig/pre-init-spec ::recheck-task [_]
|
||||
(s/keys :req-un [::storage ::db/pool]))
|
||||
|
||||
(defmethod ig/init-key ::recheck-task
|
||||
[_ {:keys [pool storage] :as cfg}]
|
||||
(letfn [(group-results [rows]
|
||||
(let [conj (fnil conj [])]
|
||||
(reduce (fn [acc {:keys [id exist] :as row}]
|
||||
(cond-> (update acc :all conj id)
|
||||
(false? exist)
|
||||
(update :to-delete conj (dissoc row :exist))))
|
||||
{}
|
||||
rows)))
|
||||
|
||||
(group-by-backend [rows]
|
||||
(let [conj (fnil conj [])]
|
||||
(reduce (fn [acc {:keys [id backend]}]
|
||||
(update acc (keyword backend) conj id))
|
||||
{}
|
||||
rows)))
|
||||
|
||||
(retrieve-pending [conn]
|
||||
(let [rows (db/exec! conn [sql:retrieve-pending-to-recheck (db/interval recheck-min-age)])]
|
||||
(some-> (seq rows) (group-results))))
|
||||
|
||||
(delete-group [conn [backend ids]]
|
||||
(let [backend (impl/resolve-backend storage backend)
|
||||
backend (assoc backend :conn conn)]
|
||||
(impl/del-objects-in-bulk backend ids)))
|
||||
|
||||
(delete-all [conn ids]
|
||||
(let [ids (db/create-array conn "uuid" (into-array java.util.UUID ids))]
|
||||
(db/exec-one! conn ["delete from storage_pending where id = ANY(?)" ids])))]
|
||||
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(loop [n 0 d 0]
|
||||
(if-let [{:keys [all to-delete]} (retrieve-pending conn)]
|
||||
(let [groups (group-by-backend to-delete)]
|
||||
(run! (partial delete-group conn) groups)
|
||||
(delete-all conn all)
|
||||
(recur (+ n (count all))
|
||||
(+ d (count to-delete))))
|
||||
(do
|
||||
(l/info :task "recheck"
|
||||
:action "recheck items"
|
||||
:processed n
|
||||
:deleted n)
|
||||
{:processed n :deleted d})))))))
|
||||
|
||||
(def sql:retrieve-pending-to-recheck
|
||||
"select sp.id,
|
||||
sp.backend,
|
||||
sp.created_at,
|
||||
(case when count(so.id) > 0 then true
|
||||
else false
|
||||
end) as exist
|
||||
from storage_pending as sp
|
||||
left join storage_object as so
|
||||
on (so.id = sp.id)
|
||||
where sp.created_at < now() - ?::interval
|
||||
group by 1,2,3
|
||||
order by sp.created_at asc
|
||||
limit 100")
|
||||
(def sql:retrieve-team-font-variant-nrefs
|
||||
"select ((select count(*) from team_font_variant where woff1_file_id = ?) +
|
||||
(select count(*) from team_font_variant where woff2_file_id = ?) +
|
||||
(select count(*) from team_font_variant where otf_file_id = ?) +
|
||||
(select count(*) from team_font_variant where ttf_file_id = ?)) as nrefs")
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
software.amazon.awssdk.services.s3.model.GetObjectRequest
|
||||
software.amazon.awssdk.services.s3.model.ObjectIdentifier
|
||||
software.amazon.awssdk.services.s3.model.PutObjectRequest
|
||||
software.amazon.awssdk.services.s3.model.S3Error
|
||||
;; software.amazon.awssdk.services.s3.model.GetObjectResponse
|
||||
software.amazon.awssdk.services.s3.presigner.S3Presigner
|
||||
software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest
|
||||
@@ -55,9 +56,10 @@
|
||||
(s/def ::region #{:eu-central-1})
|
||||
(s/def ::bucket ::us/string)
|
||||
(s/def ::prefix ::us/string)
|
||||
(s/def ::endpoint ::us/string)
|
||||
|
||||
(defmethod ig/pre-init-spec ::backend [_]
|
||||
(s/keys :opt-un [::region ::bucket ::prefix]))
|
||||
(s/keys :opt-un [::region ::bucket ::prefix ::endpoint]))
|
||||
|
||||
(defmethod ig/prep-key ::backend
|
||||
[_ {:keys [prefix] :as cfg}]
|
||||
@@ -118,20 +120,31 @@
|
||||
|
||||
(defn- ^Region lookup-region
|
||||
[region]
|
||||
(case region
|
||||
:eu-central-1 Region/EU_CENTRAL_1))
|
||||
(Region/of (name region)))
|
||||
|
||||
(defn build-s3-client
|
||||
[{:keys [region]}]
|
||||
(.. (S3Client/builder)
|
||||
(region (lookup-region region))
|
||||
(build)))
|
||||
[{:keys [region endpoint]}]
|
||||
(if (string? endpoint)
|
||||
(let [uri (java.net.URI. endpoint)]
|
||||
(.. (S3Client/builder)
|
||||
(endpointOverride uri)
|
||||
(region (lookup-region region))
|
||||
(build)))
|
||||
(.. (S3Client/builder)
|
||||
(region (lookup-region region))
|
||||
(build))))
|
||||
|
||||
(defn build-s3-presigner
|
||||
[{:keys [region]}]
|
||||
(.. (S3Presigner/builder)
|
||||
(region (lookup-region region))
|
||||
(build)))
|
||||
[{:keys [region endpoint]}]
|
||||
(if (string? endpoint)
|
||||
(let [uri (java.net.URI. endpoint)]
|
||||
(.. (S3Presigner/builder)
|
||||
(endpointOverride uri)
|
||||
(region (lookup-region region))
|
||||
(build)))
|
||||
(.. (S3Presigner/builder)
|
||||
(region (lookup-region region))
|
||||
(build))))
|
||||
|
||||
(defn put-object
|
||||
[{:keys [client bucket prefix]} {:keys [id] :as object} content]
|
||||
@@ -231,6 +244,9 @@
|
||||
^DeleteObjectsRequest dor)]
|
||||
(when (.hasErrors ^DeleteObjectsResponse dres)
|
||||
(let [errors (seq (.errors ^DeleteObjectsResponse dres))]
|
||||
(ex/raise :type :s3-error
|
||||
:code :error-on-bulk-delete
|
||||
:context errors)))))
|
||||
(ex/raise :type :internal
|
||||
:code :error-on-s3-bulk-delete
|
||||
:s3-errors (mapv (fn [^S3Error error]
|
||||
{:key (.key error)
|
||||
:msg (.message error)})
|
||||
errors))))))
|
||||
|
||||
@@ -6,10 +6,11 @@
|
||||
|
||||
(ns app.tasks.file-media-gc
|
||||
"A maintenance task that is responsible to purge the unused media
|
||||
objects from files. A file is ellegible to be garbage collected
|
||||
objects from files. A file is eligible to be garbage collected
|
||||
after some period of inactivity (the default threshold is 72h)."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.common.pages.helpers :as cph]
|
||||
[app.common.pages.migrations :as pmg]
|
||||
[app.db :as db]
|
||||
[app.util.blob :as blob]
|
||||
@@ -52,6 +53,7 @@
|
||||
limit 10
|
||||
for update skip locked")
|
||||
|
||||
|
||||
(defn- retrieve-candidates
|
||||
[{:keys [conn max-age] :as cfg}]
|
||||
(let [interval (db/interval max-age)]
|
||||
@@ -64,12 +66,11 @@
|
||||
(comp
|
||||
(map :objects)
|
||||
(mapcat vals)
|
||||
(map (fn [{:keys [type] :as obj}]
|
||||
(case type
|
||||
:path (get-in obj [:fill-image :id])
|
||||
:image (get-in obj [:metadata :id])
|
||||
nil)))
|
||||
(filter uuid?)))
|
||||
(keep (fn [{:keys [type] :as obj}]
|
||||
(case type
|
||||
:path (get-in obj [:fill-image :id])
|
||||
:image (get-in obj [:metadata :id])
|
||||
nil)))))
|
||||
|
||||
(defn- collect-used-media
|
||||
[data]
|
||||
@@ -80,37 +81,59 @@
|
||||
(into collect-media-xf pages)
|
||||
(into (keys (:media data))))))
|
||||
|
||||
(def ^:private
|
||||
collect-frames-xf
|
||||
(comp
|
||||
(map :objects)
|
||||
(mapcat vals)
|
||||
(filter cph/frame-shape?)
|
||||
(keep :id)))
|
||||
|
||||
(defn- collect-frames
|
||||
[data]
|
||||
(let [pages (concat
|
||||
(vals (:pages-index data))
|
||||
(vals (:components data)))]
|
||||
(into #{} collect-frames-xf pages)))
|
||||
|
||||
(defn- process-file
|
||||
[{:keys [conn] :as cfg} {:keys [id data age] :as file}]
|
||||
(let [data (-> (blob/decode data)
|
||||
(assoc :id id)
|
||||
(pmg/migrate-data))
|
||||
(let [data (-> (blob/decode data)
|
||||
(assoc :id id)
|
||||
(pmg/migrate-data))]
|
||||
|
||||
used (collect-used-media data)
|
||||
unused (->> (db/query conn :file-media-object {:file-id id})
|
||||
(remove #(contains? used (:id %))))]
|
||||
(let [used (collect-used-media data)
|
||||
unused (->> (db/query conn :file-media-object {:file-id id})
|
||||
(remove #(contains? used (:id %))))]
|
||||
|
||||
(l/debug :action "processing file"
|
||||
:id id
|
||||
:age age
|
||||
:to-delete (count unused))
|
||||
(l/debug :hint "processing file"
|
||||
:id id
|
||||
:age age
|
||||
:to-delete (count unused))
|
||||
|
||||
;; Mark file as trimmed
|
||||
(db/update! conn :file
|
||||
{:has-media-trimmed true}
|
||||
{:id id})
|
||||
;; Mark file as trimmed
|
||||
(db/update! conn :file
|
||||
{:has-media-trimmed true}
|
||||
{:id id})
|
||||
|
||||
(doseq [mobj unused]
|
||||
(l/debug :action "deleting media object"
|
||||
:id (:id mobj)
|
||||
:media-id (:media-id mobj)
|
||||
:thumbnail-id (:thumbnail-id mobj))
|
||||
(doseq [mobj unused]
|
||||
(l/debug :hint "deleting media object"
|
||||
:id (:id mobj)
|
||||
:media-id (:media-id mobj)
|
||||
:thumbnail-id (:thumbnail-id mobj))
|
||||
|
||||
;; NOTE: deleting the file-media-object in the database
|
||||
;; automatically marks as toched the referenced storage
|
||||
;; objects. The touch mechanism is needed because many files can
|
||||
;; point to the same storage objects and we can't just delete
|
||||
;; them.
|
||||
(db/delete! conn :file-media-object {:id (:id mobj)}))
|
||||
;; NOTE: deleting the file-media-object in the database
|
||||
;; automatically marks as touched the referenced storage
|
||||
;; objects. The touch mechanism is needed because many files can
|
||||
;; point to the same storage objects and we can't just delete
|
||||
;; them.
|
||||
(db/delete! conn :file-media-object {:id (:id mobj)})))
|
||||
|
||||
(let [sql (str "delete from file_frame_thumbnail "
|
||||
" where file_id = ? and not (frame_id = ANY(?))")
|
||||
ids (->> (collect-frames data)
|
||||
(db/create-array conn "uuid"))]
|
||||
;; delete the unused frame thumbnails
|
||||
(db/exec! conn [sql (:id file) ids]))
|
||||
|
||||
nil))
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
(defn- offload-candidate
|
||||
[{:keys [storage conn backend] :as cfg} {:keys [id data] :as file}]
|
||||
(l/debug :action "offload file data" :id id)
|
||||
(l/debug :hint "offload file data" :id id)
|
||||
(let [backend (simpl/resolve-backend storage backend)]
|
||||
(->> (simpl/content data)
|
||||
(simpl/put-object backend file))
|
||||
|
||||
@@ -28,7 +28,8 @@
|
||||
(let [interval (db/interval max-age)
|
||||
result (db/exec-one! conn [sql:delete-files-xlog interval])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :removed result :hint "remove old file changes")
|
||||
(l/info :hint "remove old file changes"
|
||||
:removed result)
|
||||
result))))
|
||||
|
||||
(def ^:private
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
result (db/exec! conn [sql max-age])]
|
||||
|
||||
(doseq [{:keys [id] :as item} result]
|
||||
(l/trace :action "delete object" :table table :id id))
|
||||
(l/trace :hint "delete object" :table table :id id))
|
||||
|
||||
(count result)))
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
backend (simpl/resolve-backend storage (cf/get :fdata-storage-backend))]
|
||||
|
||||
(doseq [{:keys [id] :as item} result]
|
||||
(l/trace :action "delete object" :table table :id id)
|
||||
(l/trace :hint "delete object" :table table :id id)
|
||||
(when backend
|
||||
(simpl/del-object backend item)))
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
fonts (db/exec! conn [sql max-age])
|
||||
storage (assoc storage :conn conn)]
|
||||
(doseq [{:keys [id] :as font} fonts]
|
||||
(l/trace :action "delete object" :table table :id id)
|
||||
(l/trace :hint "delete object" :table table :id id)
|
||||
(some->> (:woff1-file-id font) (sto/del-object storage))
|
||||
(some->> (:woff2-file-id font) (sto/del-object storage))
|
||||
(some->> (:otf-file-id font) (sto/del-object storage))
|
||||
@@ -95,7 +95,7 @@
|
||||
storage (assoc storage :conn conn)]
|
||||
|
||||
(doseq [{:keys [id] :as team} teams]
|
||||
(l/trace :action "delete object" :table table :id id)
|
||||
(l/trace :hint "delete object" :table table :id id)
|
||||
(some->> (:photo-id team) (sto/del-object storage)))
|
||||
|
||||
(count teams)))
|
||||
@@ -127,9 +127,9 @@
|
||||
storage (assoc storage :conn conn)]
|
||||
|
||||
(doseq [{:keys [id] :as profile} profiles]
|
||||
(l/trace :action "delete object" :table table :id id)
|
||||
(l/trace :hint "delete object" :table table :id id)
|
||||
|
||||
;; Mark the owned teams as deleted; this enables them to be procesed
|
||||
;; Mark the owned teams as deleted; this enables them to be processed
|
||||
;; in the same transaction in the "team" table step.
|
||||
(db/exec-one! conn [sql:mark-owned-teams-deleted id max-age])
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
(let [interval (db/interval max-age)
|
||||
result (db/exec-one! conn [sql:delete-completed-tasks interval])
|
||||
result (:next.jdbc/update-count result)]
|
||||
(l/debug :action "trim completed tasks table" :removed result)
|
||||
(l/debug :hint "trim completed tasks table" :removed result)
|
||||
result))))
|
||||
|
||||
(def ^:private
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.tasks.telemetry
|
||||
"A task that is reponsible to collect anonymous statistical
|
||||
"A task that is responsible to collect anonymous statistical
|
||||
information about the current instance and send it to the telemetry
|
||||
server."
|
||||
(:require
|
||||
@@ -14,15 +14,18 @@
|
||||
[app.common.spec :as us]
|
||||
[app.config :as cfg]
|
||||
[app.db :as db]
|
||||
[app.util.async :refer [thread-sleep]]
|
||||
[app.util.http :as http]
|
||||
[app.util.json :as json]
|
||||
[clojure.spec.alpha :as s]
|
||||
[integrant.core :as ig]))
|
||||
|
||||
(declare handler)
|
||||
(declare acquire-lock)
|
||||
(declare release-all-locks)
|
||||
(declare retrieve-stats)
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; TASK ENTRY POINT
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare get-stats)
|
||||
(declare send!)
|
||||
|
||||
(s/def ::version ::us/string)
|
||||
(s/def ::uri ::us/string)
|
||||
@@ -34,49 +37,72 @@
|
||||
(s/keys :req-un [::db/pool ::version ::uri ::sprops]))
|
||||
|
||||
(defmethod ig/init-key ::handler
|
||||
[_ {:keys [pool] :as cfg}]
|
||||
(fn [_]
|
||||
(db/with-atomic [conn pool]
|
||||
(try
|
||||
(acquire-lock conn)
|
||||
(handler (assoc cfg :conn conn))
|
||||
(finally
|
||||
(release-all-locks conn))))))
|
||||
[_ {:keys [pool sprops version] :as cfg}]
|
||||
(fn [{:keys [send?] :or {send? true}}]
|
||||
;; Sleep randomly between 0 to 10s
|
||||
(when send?
|
||||
(thread-sleep (rand-int 10000)))
|
||||
|
||||
(defn- acquire-lock
|
||||
[conn]
|
||||
(db/exec-one! conn ["select pg_advisory_lock(87562985867332);"]))
|
||||
(let [instance-id (:instance-id sprops)
|
||||
stats (-> (get-stats pool version)
|
||||
(assoc :instance-id instance-id))]
|
||||
(when send?
|
||||
(send! stats cfg))
|
||||
stats)))
|
||||
|
||||
(defn- release-all-locks
|
||||
[conn]
|
||||
(db/exec-one! conn ["select pg_advisory_unlock_all();"]))
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; IMPL
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn- handler
|
||||
[{:keys [sprops] :as cfg}]
|
||||
(let [instance-id (:instance-id sprops)
|
||||
data (retrieve-stats cfg)
|
||||
data (assoc data :instance-id instance-id)
|
||||
response (http/send! {:method :post
|
||||
:uri (:uri cfg)
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/write-str data)})]
|
||||
(defn- send!
|
||||
[data cfg]
|
||||
(let [response (http/send! {:method :post
|
||||
:uri (:uri cfg)
|
||||
:headers {"content-type" "application/json"}
|
||||
:body (json/write-str data)})]
|
||||
(when (> (:status response) 206)
|
||||
(ex/raise :type :internal
|
||||
:code :invalid-response
|
||||
:context {:status (:status response)
|
||||
:body (:body response)}))))
|
||||
:response-status (:status response)
|
||||
:response-body (:body response)))))
|
||||
|
||||
(defn retrieve-num-teams
|
||||
(defn- retrieve-num-teams
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from team;"]) :count))
|
||||
|
||||
(defn retrieve-num-projects
|
||||
(defn- retrieve-num-projects
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from project;"]) :count))
|
||||
|
||||
(defn retrieve-num-files
|
||||
(defn- retrieve-num-files
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from project;"]) :count))
|
||||
(-> (db/exec-one! conn ["select count(*) as count from file;"]) :count))
|
||||
|
||||
(defn- retrieve-num-file-changes
|
||||
[conn]
|
||||
(let [sql (str "select count(*) as count "
|
||||
" from file_change "
|
||||
" where date_trunc('day', created_at) = date_trunc('day', now())")]
|
||||
(-> (db/exec-one! conn [sql]) :count)))
|
||||
|
||||
(defn- retrieve-num-touched-files
|
||||
[conn]
|
||||
(let [sql (str "select count(distinct file_id) as count "
|
||||
" from file_change "
|
||||
" where date_trunc('day', created_at) = date_trunc('day', now())")]
|
||||
(-> (db/exec-one! conn [sql]) :count)))
|
||||
|
||||
(defn- retrieve-num-users
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from profile;"]) :count))
|
||||
|
||||
(defn- retrieve-num-fonts
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from team_font_variant;"]) :count))
|
||||
|
||||
(defn- retrieve-num-comments
|
||||
[conn]
|
||||
(-> (db/exec-one! conn ["select count(*) as count from comment;"]) :count))
|
||||
|
||||
(def sql:team-averages
|
||||
"with projects_by_team as (
|
||||
@@ -98,7 +124,6 @@
|
||||
select t.id, count(tp.profile_id) as num_users
|
||||
from team as t
|
||||
left join team_profile_rel as tp on(tp.team_id = t.id)
|
||||
where t.is_default = false
|
||||
group by 1
|
||||
)
|
||||
select (select avg(num_projects)::integer from projects_by_team) as avg_projects_on_team,
|
||||
@@ -110,20 +135,36 @@
|
||||
(select avg(num_users)::integer from users_by_team) as avg_users_on_team,
|
||||
(select max(num_users)::integer from users_by_team) as max_users_on_team;")
|
||||
|
||||
(defn retrieve-team-averages
|
||||
(defn- retrieve-team-averages
|
||||
[conn]
|
||||
(->> [sql:team-averages]
|
||||
(db/exec-one! conn)))
|
||||
|
||||
(defn retrieve-jvm-stats
|
||||
(defn- retrieve-enabled-auth-providers
|
||||
[conn]
|
||||
(let [sql (str "select auth_backend as backend, count(*) as total "
|
||||
" from profile group by 1")
|
||||
rows (db/exec! conn [sql])]
|
||||
(->> rows
|
||||
(map (fn [{:keys [backend total]}]
|
||||
(let [backend (or backend "penpot")]
|
||||
[(keyword (str "auth-backend-" backend))
|
||||
total])))
|
||||
(into {}))))
|
||||
|
||||
(defn- retrieve-jvm-stats
|
||||
[]
|
||||
(let [^Runtime runtime (Runtime/getRuntime)]
|
||||
{:jvm-heap-current (.totalMemory runtime)
|
||||
:jvm-heap-max (.maxMemory runtime)
|
||||
:jvm-cpus (.availableProcessors runtime)}))
|
||||
:jvm-cpus (.availableProcessors runtime)
|
||||
:os-arch (System/getProperty "os.arch")
|
||||
:os-name (System/getProperty "os.name")
|
||||
:os-version (System/getProperty "os.version")
|
||||
:user-tz (System/getProperty "user.timezone")}))
|
||||
|
||||
(defn- retrieve-stats
|
||||
[{:keys [conn version]}]
|
||||
(defn get-stats
|
||||
[conn version]
|
||||
(let [referer (if (cfg/get :telemetry-with-taiga)
|
||||
"taiga"
|
||||
(cfg/get :telemetry-referer))]
|
||||
@@ -131,9 +172,15 @@
|
||||
:referer referer
|
||||
:total-teams (retrieve-num-teams conn)
|
||||
:total-projects (retrieve-num-projects conn)
|
||||
:total-files (retrieve-num-files conn)}
|
||||
:total-files (retrieve-num-files conn)
|
||||
:total-users (retrieve-num-users conn)
|
||||
:total-fonts (retrieve-num-fonts conn)
|
||||
:total-comments (retrieve-num-comments conn)
|
||||
:total-file-changes (retrieve-num-file-changes conn)
|
||||
:total-touched-files (retrieve-num-touched-files conn)}
|
||||
(d/merge
|
||||
(retrieve-team-averages conn)
|
||||
(retrieve-jvm-stats))
|
||||
(retrieve-jvm-stats)
|
||||
(retrieve-enabled-auth-providers conn))
|
||||
(d/without-nils))))
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.tokens
|
||||
"Tokens generation service."
|
||||
(:require
|
||||
[app.common.data :as d]
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.spec :as us]
|
||||
[app.common.transit :as t]
|
||||
@@ -17,7 +18,7 @@
|
||||
|
||||
(defn- generate
|
||||
[cfg claims]
|
||||
(let [payload (t/encode claims)]
|
||||
(let [payload (-> claims d/without-nils t/encode)]
|
||||
(jwe/encrypt payload (::secret cfg) {:alg :a256kw :enc :a256gcm})))
|
||||
|
||||
(defn- verify
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
(ns app.util.async
|
||||
(:require
|
||||
[clojure.core.async :as a]
|
||||
[clojure.spec.alpha :as s])
|
||||
[clojure.spec.alpha :as s]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
java.util.concurrent.Executor))
|
||||
|
||||
@@ -54,13 +55,16 @@
|
||||
(a/close! c)
|
||||
c))))
|
||||
|
||||
|
||||
(defmacro with-thread
|
||||
[executor & body]
|
||||
(if (= executor ::default)
|
||||
`(a/thread-call (^:once fn* [] (try ~@body (catch Exception e# e#))))
|
||||
`(thread-call ~executor (^:once fn* [] ~@body))))
|
||||
|
||||
(defmacro with-dispatch
|
||||
[executor & body]
|
||||
`(px/submit! ~executor (^:once fn* [] ~@body)))
|
||||
|
||||
(defn batch
|
||||
[in {:keys [max-batch-size
|
||||
max-batch-age
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
(:require
|
||||
[app.common.transit :as t]
|
||||
[app.config :as cf]
|
||||
[app.util.fressian :as fres]
|
||||
[taoensso.nippy :as n])
|
||||
(:import
|
||||
java.io.ByteArrayInputStream
|
||||
@@ -21,23 +22,28 @@
|
||||
net.jpountz.lz4.LZ4FastDecompressor
|
||||
net.jpountz.lz4.LZ4Compressor))
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(def lz4-factory (LZ4Factory/fastestInstance))
|
||||
|
||||
(declare decode-v1)
|
||||
(declare decode-v2)
|
||||
(declare decode-v3)
|
||||
(declare decode-v4)
|
||||
(declare encode-v1)
|
||||
(declare encode-v2)
|
||||
(declare encode-v3)
|
||||
(declare encode-v4)
|
||||
|
||||
(defn encode
|
||||
([data] (encode data nil))
|
||||
([data {:keys [version]}]
|
||||
(let [version (or version (cf/get :default-blob-version 1))]
|
||||
(let [version (or version (cf/get :default-blob-version 3))]
|
||||
(case (long version)
|
||||
1 (encode-v1 data)
|
||||
2 (encode-v2 data)
|
||||
3 (encode-v3 data)
|
||||
4 (encode-v4 data)
|
||||
(throw (ex-info "unsupported version" {:version version}))))))
|
||||
|
||||
(defn decode
|
||||
@@ -51,6 +57,7 @@
|
||||
1 (decode-v1 data ulen)
|
||||
2 (decode-v2 data ulen)
|
||||
3 (decode-v3 data ulen)
|
||||
4 (decode-v4 data ulen)
|
||||
(throw (ex-info "unsupported version" {:version version}))))))
|
||||
|
||||
;; --- IMPL
|
||||
@@ -122,3 +129,26 @@
|
||||
(Zstd/decompressByteArray ^bytes udata 0 ulen
|
||||
^bytes cdata 6 (- (alength cdata) 6))
|
||||
(t/decode udata {:type :json})))
|
||||
|
||||
(defn- encode-v4
|
||||
[data]
|
||||
(let [data (fres/encode data)
|
||||
dlen (alength ^bytes data)
|
||||
mlen (Zstd/compressBound dlen)
|
||||
cdata (byte-array mlen)
|
||||
clen (Zstd/compressByteArray ^bytes cdata 0 mlen
|
||||
^bytes data 0 dlen
|
||||
0)]
|
||||
(with-open [^ByteArrayOutputStream baos (ByteArrayOutputStream. (+ (alength cdata) 2 4))
|
||||
^DataOutputStream dos (DataOutputStream. baos)]
|
||||
(.writeShort dos (short 4)) ;; version number
|
||||
(.writeInt dos (int dlen))
|
||||
(.write dos ^bytes cdata (int 0) clen)
|
||||
(.toByteArray baos))))
|
||||
|
||||
(defn- decode-v4
|
||||
[^bytes cdata ^long ulen]
|
||||
(let [udata (byte-array ulen)]
|
||||
(Zstd/decompressByteArray ^bytes udata 0 ulen
|
||||
^bytes cdata 6 (- (alength cdata) 6))
|
||||
(fres/decode udata)))
|
||||
|
||||
@@ -206,7 +206,7 @@
|
||||
:content html}]))}))
|
||||
|
||||
(s/def ::priority #{:high :low})
|
||||
(s/def ::to (s/or :sigle ::us/email
|
||||
(s/def ::to (s/or :single ::us/email
|
||||
:multi (s/coll-of ::us/email)))
|
||||
(s/def ::from ::us/email)
|
||||
(s/def ::reply-to ::us/email)
|
||||
|
||||
281
backend/src/app/util/fressian.clj
Normal file
281
backend/src/app/util/fressian.clj
Normal file
@@ -0,0 +1,281 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.util.fressian
|
||||
(:require
|
||||
[app.common.geom.matrix :as gmt]
|
||||
[app.common.geom.point :as gpt]
|
||||
[clojure.data.fressian :as fres])
|
||||
(:import
|
||||
app.common.geom.matrix.Matrix
|
||||
app.common.geom.point.Point
|
||||
clojure.lang.Ratio
|
||||
java.io.ByteArrayInputStream
|
||||
java.io.ByteArrayOutputStream
|
||||
java.time.Instant
|
||||
java.time.OffsetDateTime
|
||||
org.fressian.Reader
|
||||
org.fressian.StreamingWriter
|
||||
org.fressian.Writer
|
||||
org.fressian.handlers.ReadHandler
|
||||
org.fressian.handlers.WriteHandler))
|
||||
|
||||
;; --- MISC
|
||||
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(defn str->bytes
|
||||
([^String s]
|
||||
(str->bytes s "UTF-8"))
|
||||
([^String s, ^String encoding]
|
||||
(.getBytes s encoding)))
|
||||
|
||||
(defn write-named
|
||||
[tag ^Writer w s]
|
||||
(.writeTag w tag 2)
|
||||
(.writeObject w (namespace s) true)
|
||||
(.writeObject w (name s) true))
|
||||
|
||||
(defn write-list-like
|
||||
([^Writer w tag o]
|
||||
(.writeTag w tag 1)
|
||||
(.writeList w o)))
|
||||
|
||||
(defn read-list-like
|
||||
[^Reader rdr build-fn]
|
||||
(build-fn (.readObject rdr)))
|
||||
|
||||
(defn write-map-like
|
||||
"Writes a map as Fressian with the tag 'map' and all keys cached."
|
||||
[^Writer w tag m]
|
||||
(.writeTag w tag 1)
|
||||
(.beginClosedList ^StreamingWriter w)
|
||||
(loop [items (seq m)]
|
||||
(when-let [^clojure.lang.MapEntry item (first items)]
|
||||
(.writeObject w (.key item) true)
|
||||
(.writeObject w (.val item))
|
||||
(recur (rest items))))
|
||||
(.endList ^StreamingWriter w))
|
||||
|
||||
(defn read-map-like
|
||||
[^Reader rdr]
|
||||
(let [kvs ^java.util.List (.readObject rdr)]
|
||||
(if (< (.size kvs) 16)
|
||||
(clojure.lang.PersistentArrayMap. (.toArray kvs))
|
||||
(clojure.lang.PersistentHashMap/create (seq kvs)))))
|
||||
|
||||
(def write-handlers
|
||||
{ Character
|
||||
{"char"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "char" 1)
|
||||
(.writeInt w (int ch))))}
|
||||
|
||||
app.common.geom.point.Point
|
||||
{"penpot/point"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(.writeTag ^Writer w "penpot/point" 1)
|
||||
(.writeList ^Writer w (java.util.List/of (.-x ^Point o) (.-y ^Point o)))))}
|
||||
|
||||
app.common.geom.matrix.Matrix
|
||||
{"penpot/matrix"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(.writeTag ^Writer w "penpot/matrix" 1)
|
||||
(.writeList ^Writer w (java.util.List/of (.-a ^Matrix o)
|
||||
(.-b ^Matrix o)
|
||||
(.-c ^Matrix o)
|
||||
(.-d ^Matrix o)
|
||||
(.-e ^Matrix o)
|
||||
(.-f ^Matrix o)))))}
|
||||
|
||||
Instant
|
||||
{"java/instant"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "java/instant" 1)
|
||||
(.writeInt w (.toEpochMilli ^Instant ch))))}
|
||||
|
||||
OffsetDateTime
|
||||
{"java/instant"
|
||||
(reify WriteHandler
|
||||
(write [_ w ch]
|
||||
(.writeTag w "java/instant" 1)
|
||||
(.writeInt w (.toEpochMilli ^Instant (.toInstant ^OffsetDateTime ch)))))}
|
||||
|
||||
Ratio
|
||||
{"ratio"
|
||||
(reify WriteHandler
|
||||
(write [_ w n]
|
||||
(.writeTag w "ratio" 2)
|
||||
(.writeObject w (.numerator ^Ratio n))
|
||||
(.writeObject w (.denominator ^Ratio n))))}
|
||||
|
||||
clojure.lang.IPersistentMap
|
||||
{"clj/map"
|
||||
(reify WriteHandler
|
||||
(write [_ w d]
|
||||
(write-map-like w "clj/map" d)))}
|
||||
|
||||
clojure.lang.Keyword
|
||||
{"clj/keyword"
|
||||
(reify WriteHandler
|
||||
(write [_ w s]
|
||||
(write-named "clj/keyword" w s)))}
|
||||
|
||||
clojure.lang.BigInt
|
||||
{"bigint"
|
||||
(reify WriteHandler
|
||||
(write [_ w d]
|
||||
(let [^BigInteger bi (if (instance? clojure.lang.BigInt d)
|
||||
(.toBigInteger ^clojure.lang.BigInt d)
|
||||
d)]
|
||||
(.writeTag w "bigint" 1)
|
||||
(.writeBytes w (.toByteArray bi)))))}
|
||||
|
||||
;; Persistent set
|
||||
clojure.lang.IPersistentSet
|
||||
{"clj/set"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/set" o)))}
|
||||
|
||||
;; Persistent vector
|
||||
clojure.lang.IPersistentVector
|
||||
{"clj/vector"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/vector" o)))}
|
||||
|
||||
;; Persistent list
|
||||
clojure.lang.IPersistentList
|
||||
{"clj/list"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/list" o)))}
|
||||
|
||||
;; Persistent seq & lazy seqs
|
||||
clojure.lang.ISeq
|
||||
{"clj/seq"
|
||||
(reify WriteHandler
|
||||
(write [_ w o]
|
||||
(write-list-like w "clj/seq" o)))}
|
||||
})
|
||||
|
||||
|
||||
(def read-handlers
|
||||
{"bigint"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^bytes bibytes (.readObject rdr)]
|
||||
(bigint (BigInteger. bibytes)))))
|
||||
|
||||
"byte"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(byte (.readObject rdr))))
|
||||
|
||||
"penpot/matrix"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^java.util.List x (.readObject rdr)]
|
||||
(Matrix. (.get x 0) (.get x 1) (.get x 2) (.get x 3) (.get x 4) (.get x 5)))))
|
||||
|
||||
"penpot/point"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(let [^java.util.List x (.readObject rdr)]
|
||||
(Point. (.get x 0) (.get x 1)))))
|
||||
|
||||
"char"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(char (.readObject rdr))))
|
||||
|
||||
"java/instant"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(Instant/ofEpochMilli (.readInt rdr))))
|
||||
|
||||
|
||||
"clj/ratio"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(Ratio. (biginteger (.readObject rdr))
|
||||
(biginteger (.readObject rdr)))))
|
||||
|
||||
"clj/keyword"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(keyword (.readObject rdr) (.readObject rdr))))
|
||||
|
||||
"clj/map"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-map-like rdr)))
|
||||
|
||||
"clj/set"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr set)))
|
||||
|
||||
"clj/vector"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr vec)))
|
||||
|
||||
"clj/list"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr #(apply list %))))
|
||||
|
||||
"clj/seq"
|
||||
(reify ReadHandler
|
||||
(read [_ rdr _ _]
|
||||
(read-list-like rdr sequence)))
|
||||
})
|
||||
|
||||
(def write-handler-lookup
|
||||
(-> write-handlers
|
||||
fres/associative-lookup
|
||||
fres/inheritance-lookup))
|
||||
|
||||
(def read-handler-lookup
|
||||
(-> read-handlers
|
||||
(fres/associative-lookup)))
|
||||
|
||||
;; --- Low-Level Api
|
||||
|
||||
(defn reader
|
||||
[istream]
|
||||
(fres/create-reader istream :handlers read-handler-lookup))
|
||||
|
||||
(defn writer
|
||||
[ostream]
|
||||
(fres/create-writer ostream :handlers write-handler-lookup))
|
||||
|
||||
(defn read!
|
||||
[reader]
|
||||
(fres/read-object reader))
|
||||
|
||||
(defn write!
|
||||
[writer data]
|
||||
(fres/write-object writer data))
|
||||
|
||||
;; --- High-Level Api
|
||||
|
||||
(defn encode
|
||||
[data]
|
||||
(with-open [out (ByteArrayOutputStream.)]
|
||||
(write! (writer out) data)
|
||||
(.toByteArray out)))
|
||||
|
||||
(defn decode
|
||||
[data]
|
||||
(with-open [input (ByteArrayInputStream. ^bytes data)]
|
||||
(read! (reader input))))
|
||||
@@ -20,7 +20,7 @@
|
||||
;; --- Implementation
|
||||
|
||||
(defn- registered?
|
||||
"Check if concrete migration is already registred."
|
||||
"Check if concrete migration is already registered."
|
||||
[pool modname stepname]
|
||||
(let [sql "select * from migrations where module=? and step=?"
|
||||
rows (jdbc/execute! pool [sql modname stepname])]
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.util.retry
|
||||
"A fault tolerance helpers. Allow retry some operations that we know
|
||||
we can retry."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.util.async :as aa]
|
||||
[app.util.services :as sv]))
|
||||
|
||||
(defn conflict-db-insert?
|
||||
"Check if exception matches a insertion conflict on postgresql."
|
||||
[e]
|
||||
(and (instance? org.postgresql.util.PSQLException e)
|
||||
(= "23505" (.getSQLState e))))
|
||||
|
||||
(defn wrap-retry
|
||||
[_ f {:keys [::max-retries ::matches ::sv/name]
|
||||
:or {max-retries 3
|
||||
matches (constantly false)}
|
||||
:as mdata}]
|
||||
(when (::enabled mdata)
|
||||
(l/debug :hint "wrapping retry" :name name))
|
||||
(if (::enabled mdata)
|
||||
(fn [cfg params]
|
||||
(loop [retry 1]
|
||||
(when (> retry 1)
|
||||
(l/debug :hint "retrying controlled function" :retry retry :name name))
|
||||
(let [res (ex/try (f cfg params))]
|
||||
(if (ex/exception? res)
|
||||
(if (and (matches res) (< retry max-retries))
|
||||
(do
|
||||
(aa/thread-sleep (* 100 retry))
|
||||
(recur (inc retry)))
|
||||
(throw res))
|
||||
res))))
|
||||
f))
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.util.rlimit
|
||||
"Resource usage limits (in other words: semaphores)."
|
||||
(:require
|
||||
[app.common.logging :as l]
|
||||
[app.util.services :as sv])
|
||||
(:import
|
||||
java.util.concurrent.Semaphore))
|
||||
|
||||
(defn acquire!
|
||||
[sem]
|
||||
(.acquire ^Semaphore sem))
|
||||
|
||||
(defn release!
|
||||
[sem]
|
||||
(.release ^Semaphore sem))
|
||||
|
||||
(defn wrap-rlimit
|
||||
[_cfg f mdata]
|
||||
(if-let [permits (::permits mdata)]
|
||||
(let [sem (Semaphore. permits)]
|
||||
(l/debug :hint "wrapping rlimit" :handler (::sv/name mdata) :permits permits)
|
||||
(fn [cfg params]
|
||||
(try
|
||||
(acquire! sem)
|
||||
(f cfg params)
|
||||
(finally
|
||||
(release! sem)))))
|
||||
f))
|
||||
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[clojure.spec.alpha :as s]
|
||||
[cuerdas.core :as str])
|
||||
[cuerdas.core :as str]
|
||||
[fipp.ednize :as fez])
|
||||
(:import
|
||||
java.time.Duration
|
||||
java.time.Instant
|
||||
@@ -17,6 +18,8 @@
|
||||
java.time.ZonedDateTime
|
||||
java.time.format.DateTimeFormatter
|
||||
java.time.temporal.TemporalAmount
|
||||
java.time.temporal.TemporalUnit
|
||||
java.time.temporal.ChronoUnit
|
||||
java.util.Date
|
||||
org.apache.logging.log4j.core.util.CronExpression))
|
||||
|
||||
@@ -54,15 +57,30 @@
|
||||
:else
|
||||
(obj->duration ms-or-obj)))
|
||||
|
||||
(defn duration-between
|
||||
{:deprecated true}
|
||||
[t1 t2]
|
||||
(Duration/between t1 t2))
|
||||
|
||||
(defn diff
|
||||
[t1 t2]
|
||||
(Duration/between t1 t2))
|
||||
|
||||
(defn truncate
|
||||
[o unit]
|
||||
(let [unit (if (instance? TemporalUnit unit)
|
||||
unit
|
||||
(case unit
|
||||
:nanos ChronoUnit/NANOS
|
||||
:millis ChronoUnit/MILLIS
|
||||
:micros ChronoUnit/MICROS
|
||||
:seconds ChronoUnit/SECONDS
|
||||
:minutes ChronoUnit/MINUTES))]
|
||||
(cond
|
||||
(instance? Instant o)
|
||||
(.truncatedTo ^Instant o ^TemporalUnit unit)
|
||||
|
||||
(instance? Duration o)
|
||||
(.truncatedTo ^Duration o ^TemporalUnit unit)
|
||||
|
||||
:else
|
||||
(throw (IllegalArgumentException. "only instant and duration allowed")))))
|
||||
|
||||
(s/def ::duration
|
||||
(s/conformer
|
||||
(fn [v]
|
||||
@@ -94,6 +112,11 @@
|
||||
(defmethod print-dup Duration [o w]
|
||||
(print-method o w))
|
||||
|
||||
(extend-protocol fez/IEdn
|
||||
Duration
|
||||
(-edn [o] (pr-str o)))
|
||||
|
||||
|
||||
;; --- INSTANT
|
||||
|
||||
(defn instant
|
||||
@@ -158,6 +181,10 @@
|
||||
(defmethod print-dup Instant [o w]
|
||||
(print-method o w))
|
||||
|
||||
(extend-protocol fez/IEdn
|
||||
Instant
|
||||
(-edn [o] (pr-str o)))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Cron Expression
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
@@ -268,6 +295,11 @@
|
||||
(s/assert cron? cron)
|
||||
(.toInstant (.getNextValidTimeAfter cron (Date/from now))))
|
||||
|
||||
(defn get-next
|
||||
[cron tnow]
|
||||
(let [nt (next-valid-instant-from cron tnow)]
|
||||
(cons nt (lazy-seq (get-next cron nt)))))
|
||||
|
||||
(defmethod print-method CronExpression
|
||||
[mv ^java.io.Writer writer]
|
||||
(.write writer (str "#app/cron \"" (.toString ^CronExpression mv) "\"")))
|
||||
@@ -275,3 +307,8 @@
|
||||
(defmethod print-dup CronExpression
|
||||
[o w]
|
||||
(print-ctor o (fn [o w] (print-dup (.toString ^CronExpression o) w)) w))
|
||||
|
||||
(extend-protocol fez/IEdn
|
||||
CronExpression
|
||||
(-edn [o] (pr-str o)))
|
||||
|
||||
|
||||
207
backend/src/app/util/websocket.clj
Normal file
207
backend/src/app/util/websocket.clj
Normal file
@@ -0,0 +1,207 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.util.websocket
|
||||
"A general protocol implementation on top of websockets."
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.logging :as l]
|
||||
[app.common.transit :as t]
|
||||
[app.metrics :as mtx]
|
||||
[app.util.time :as dt]
|
||||
[clojure.core.async :as a]
|
||||
[yetti.websocket :as yws])
|
||||
(:import
|
||||
java.nio.ByteBuffer
|
||||
org.eclipse.jetty.io.EofException))
|
||||
|
||||
|
||||
(declare decode-beat)
|
||||
(declare encode-beat)
|
||||
(declare process-heartbeat)
|
||||
(declare process-input)
|
||||
(declare process-output)
|
||||
(declare ws-ping!)
|
||||
(declare ws-send!)
|
||||
|
||||
(def noop (constantly nil))
|
||||
|
||||
(defn handler
|
||||
"A WebSocket upgrade handler factory. Returns a handler that can be
|
||||
used to upgrade to websocket connection. This handler implements the
|
||||
basic custom protocol on top of websocket connection with all the
|
||||
borring stuff already handled (lifecycle, heartbeat,...).
|
||||
|
||||
The provided function should have the `(fn [ws msg])` signature.
|
||||
|
||||
It also accepts some options that allows you parametrize the
|
||||
protocol behavior. The options map will be used as-as for the
|
||||
initial data of the `ws` data structure"
|
||||
([handle-message] (handler handle-message {}))
|
||||
([handle-message {:keys [::input-buff-size
|
||||
::output-buff-size
|
||||
::idle-timeout
|
||||
metrics]
|
||||
:or {input-buff-size 64
|
||||
output-buff-size 64
|
||||
idle-timeout 30000}
|
||||
:as options}]
|
||||
(fn [_]
|
||||
(let [input-ch (a/chan input-buff-size)
|
||||
output-ch (a/chan output-buff-size)
|
||||
pong-ch (a/chan (a/sliding-buffer 6))
|
||||
close-ch (a/chan)
|
||||
options (-> options
|
||||
(assoc ::input-ch input-ch)
|
||||
(assoc ::output-ch output-ch)
|
||||
(assoc ::close-ch close-ch)
|
||||
(dissoc ::metrics))
|
||||
|
||||
terminated (atom false)
|
||||
created-at (dt/now)
|
||||
|
||||
on-terminate
|
||||
(fn [& _args]
|
||||
(when (compare-and-set! terminated false true)
|
||||
(mtx/run! metrics {:id :websocket-active-connections :dec 1})
|
||||
(mtx/run! metrics {:id :websocket-session-timing :val (/ (inst-ms (dt/diff created-at (dt/now))) 1000.0)})
|
||||
|
||||
(a/close! close-ch)
|
||||
(a/close! pong-ch)
|
||||
(a/close! output-ch)
|
||||
(a/close! input-ch)))
|
||||
|
||||
on-error
|
||||
(fn [_ error]
|
||||
(on-terminate)
|
||||
(when-not (or (instance? org.eclipse.jetty.websocket.api.exceptions.WebSocketTimeoutException error)
|
||||
(instance? java.nio.channels.ClosedChannelException error))
|
||||
(l/error :hint (ex-message error) :cause error)))
|
||||
|
||||
on-connect
|
||||
(fn [conn]
|
||||
(mtx/run! metrics {:id :websocket-active-connections :inc 1})
|
||||
|
||||
(let [wsp (atom (assoc options ::conn conn))]
|
||||
;; Handle heartbeat
|
||||
(yws/idle-timeout! conn (dt/duration idle-timeout))
|
||||
(-> @wsp
|
||||
(assoc ::pong-ch pong-ch)
|
||||
(assoc ::on-close on-terminate)
|
||||
(process-heartbeat))
|
||||
|
||||
;; Forward all messages from output-ch to the websocket
|
||||
;; connection
|
||||
(a/go-loop []
|
||||
(when-let [val (a/<! output-ch)]
|
||||
(mtx/run! metrics {:id :websocket-messages-total :labels ["send"] :inc 1})
|
||||
(a/<! (ws-send! conn (t/encode-str val)))
|
||||
(recur)))
|
||||
|
||||
;; React on messages received from the client
|
||||
(process-input wsp handle-message)))
|
||||
|
||||
on-message
|
||||
(fn [_ message]
|
||||
(mtx/run! metrics {:id :websocket-messages-total :labels ["send"] :inc 1})
|
||||
(try
|
||||
(let [message (t/decode-str message)]
|
||||
(a/offer! input-ch message))
|
||||
(catch Throwable e
|
||||
(l/warn :hint "error on decoding incoming message from websocket"
|
||||
:wsmsg (pr-str message)
|
||||
:cause e)
|
||||
(on-terminate))))
|
||||
|
||||
on-pong
|
||||
(fn [_ buffer]
|
||||
(a/>!! pong-ch buffer))]
|
||||
|
||||
{:on-connect on-connect
|
||||
:on-error on-error
|
||||
:on-close on-terminate
|
||||
:on-text on-message
|
||||
:on-pong on-pong}))))
|
||||
|
||||
(defn- ws-send!
|
||||
[conn s]
|
||||
(let [ch (a/chan 1)]
|
||||
(try
|
||||
(yws/send! conn s (fn [e]
|
||||
(when e (a/offer! ch e))
|
||||
(a/close! ch)))
|
||||
(catch EofException cause
|
||||
(a/offer! ch cause)
|
||||
(a/close! ch)))
|
||||
ch))
|
||||
|
||||
(defn- ws-ping!
|
||||
[conn s]
|
||||
(let [ch (a/chan 1)]
|
||||
(try
|
||||
(yws/ping! conn s (fn [e]
|
||||
(when e (a/offer! ch e))
|
||||
(a/close! ch)))
|
||||
(catch EofException cause
|
||||
(a/offer! ch cause)
|
||||
(a/close! ch)))
|
||||
ch))
|
||||
|
||||
(defn- encode-beat
|
||||
[n]
|
||||
(doto (ByteBuffer/allocate 8)
|
||||
(.putLong n)
|
||||
(.rewind)))
|
||||
|
||||
(defn- decode-beat
|
||||
[^ByteBuffer buffer]
|
||||
(when (= 8 (.capacity buffer))
|
||||
(.rewind buffer)
|
||||
(.getLong buffer)))
|
||||
|
||||
(defn- process-input
|
||||
[wsp handler]
|
||||
(let [{:keys [::input-ch ::output-ch ::close-ch]} @wsp]
|
||||
(a/go
|
||||
(a/<! (handler wsp {:type :connect}))
|
||||
(a/<! (a/go-loop []
|
||||
(when-let [request (a/<! input-ch)]
|
||||
(let [[val port] (a/alts! [(handler wsp request) close-ch])]
|
||||
(when-not (= port close-ch)
|
||||
(cond
|
||||
(ex/ex-info? val)
|
||||
(a/>! output-ch {:type :error :error (ex-data val)})
|
||||
|
||||
(ex/exception? val)
|
||||
(a/>! output-ch {:type :error :error {:message (ex-message val)}})
|
||||
|
||||
(map? val)
|
||||
(a/>! output-ch (cond-> val (:request-id request) (assoc :request-id (:request-id request)))))
|
||||
|
||||
(recur))))))
|
||||
(a/<! (handler wsp {:type :disconnect})))))
|
||||
|
||||
(defn- process-heartbeat
|
||||
[{:keys [::conn ::close-ch ::on-close ::pong-ch
|
||||
::heartbeat-interval ::max-missed-heartbeats]
|
||||
:or {heartbeat-interval 2000
|
||||
max-missed-heartbeats 4}}]
|
||||
(let [beats (atom #{})]
|
||||
(a/go-loop [i 0]
|
||||
(let [[_ port] (a/alts! [close-ch (a/timeout heartbeat-interval)])]
|
||||
(when (and (yws/connected? conn)
|
||||
(not= port close-ch))
|
||||
(a/<! (ws-ping! conn (encode-beat i)))
|
||||
(let [issued (swap! beats conj (long i))]
|
||||
(if (>= (count issued) max-missed-heartbeats)
|
||||
(on-close conn -1 "heartbeat-timeout")
|
||||
(recur (inc i)))))))
|
||||
|
||||
(a/go-loop []
|
||||
(when-let [buffer (a/<! pong-ch)]
|
||||
(swap! beats disj (decode-beat buffer))
|
||||
(recur)))))
|
||||
|
||||
@@ -22,44 +22,100 @@
|
||||
[integrant.core :as ig]
|
||||
[promesa.exec :as px])
|
||||
(:import
|
||||
org.eclipse.jetty.util.thread.QueuedThreadPool
|
||||
java.util.concurrent.ExecutorService
|
||||
java.util.concurrent.Executors
|
||||
java.util.concurrent.Executor))
|
||||
java.util.concurrent.ForkJoinPool
|
||||
java.util.concurrent.ForkJoinWorkerThread
|
||||
java.util.concurrent.ForkJoinPool$ForkJoinWorkerThreadFactory
|
||||
java.util.concurrent.atomic.AtomicLong
|
||||
java.util.concurrent.Executors))
|
||||
|
||||
(s/def ::executor #(instance? Executor %))
|
||||
(set! *warn-on-reflection* true)
|
||||
|
||||
(s/def ::executor #(instance? ExecutorService %))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Executor
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::name keyword?)
|
||||
(s/def ::prefix keyword?)
|
||||
(s/def ::parallelism ::us/integer)
|
||||
(s/def ::min-threads ::us/integer)
|
||||
(s/def ::max-threads ::us/integer)
|
||||
(s/def ::idle-timeout ::us/integer)
|
||||
|
||||
(defmethod ig/pre-init-spec ::executor [_]
|
||||
(s/keys :req-un [::min-threads ::max-threads ::idle-timeout ::name]))
|
||||
(s/keys :req-un [::prefix ::parallelism]))
|
||||
|
||||
(defn- get-thread-factory
|
||||
^ForkJoinPool$ForkJoinWorkerThreadFactory
|
||||
[prefix counter]
|
||||
(reify ForkJoinPool$ForkJoinWorkerThreadFactory
|
||||
(newThread [_ pool]
|
||||
(let [^ForkJoinWorkerThread thread (.newThread ForkJoinPool/defaultForkJoinWorkerThreadFactory pool)
|
||||
^String thread-name (str (name prefix) "-" (.getAndIncrement ^AtomicLong counter))]
|
||||
(.setName thread thread-name)
|
||||
thread))))
|
||||
|
||||
(defmethod ig/init-key ::executor
|
||||
[_ {:keys [min-threads max-threads idle-timeout name]}]
|
||||
(doto (QueuedThreadPool. (int max-threads)
|
||||
(int min-threads)
|
||||
(int idle-timeout))
|
||||
(.setStopTimeout 500)
|
||||
(.setName (d/name name))
|
||||
(.start)))
|
||||
[_ {:keys [parallelism prefix]}]
|
||||
(let [counter (AtomicLong. 0)]
|
||||
(ForkJoinPool. (int parallelism) (get-thread-factory prefix counter) nil false)))
|
||||
|
||||
(defmethod ig/halt-key! ::executor
|
||||
[_ instance]
|
||||
(.stop ^QueuedThreadPool instance))
|
||||
(.shutdown ^ForkJoinPool instance))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Executor Monitor
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(s/def ::executors (s/map-of keyword? ::executor))
|
||||
|
||||
(defmethod ig/pre-init-spec ::executors-monitor [_]
|
||||
(s/keys :req-un [::executors ::mtx/metrics]))
|
||||
|
||||
(defmethod ig/init-key ::executors-monitor
|
||||
[_ {:keys [executors metrics interval] :or {interval 3000}}]
|
||||
(letfn [(log-stats [scheduler state]
|
||||
(doseq [[key ^ForkJoinPool executor] executors]
|
||||
(let [labels (into-array String [(name key)])
|
||||
active (.getActiveThreadCount executor)
|
||||
running (.getRunningThreadCount executor)
|
||||
queued (.getQueuedSubmissionCount executor)
|
||||
steals (.getStealCount executor)
|
||||
steals-increment (- steals (or (get-in @state [key :steals]) 0))
|
||||
steals-increment (if (neg? steals-increment) 0 steals-increment)]
|
||||
|
||||
(mtx/run! metrics {:id :executors-active-threads :labels labels :val active})
|
||||
(mtx/run! metrics {:id :executors-running-threads :labels labels :val running})
|
||||
(mtx/run! metrics {:id :executors-queued-submissions :labels labels :val queued})
|
||||
(mtx/run! metrics {:id :executors-completed-tasks :labels labels :inc steals-increment})
|
||||
|
||||
(swap! state update key assoc
|
||||
:running running
|
||||
:active active
|
||||
:queued queued
|
||||
:steals steals)))
|
||||
|
||||
(when-not (.isShutdown scheduler)
|
||||
(px/schedule! scheduler interval (partial log-stats scheduler state))))]
|
||||
|
||||
(let [scheduler (px/scheduled-pool 1)
|
||||
state (atom {})]
|
||||
(px/schedule! scheduler interval (partial log-stats scheduler state))
|
||||
{::scheduler scheduler
|
||||
::state state})))
|
||||
|
||||
(defmethod ig/halt-key! ::executors-monitor
|
||||
[_ {:keys [::scheduler]}]
|
||||
(.shutdown ^ExecutorService scheduler))
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;; Worker
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare event-loop-fn)
|
||||
(declare instrument-tasks)
|
||||
(declare event-loop)
|
||||
|
||||
(s/def ::queue keyword?)
|
||||
(s/def ::parallelism ::us/integer)
|
||||
@@ -85,13 +141,10 @@
|
||||
:queue :default}
|
||||
(d/without-nils cfg)))
|
||||
|
||||
(defmethod ig/init-key ::worker
|
||||
[_ {:keys [pool poll-interval name queue] :as cfg}]
|
||||
(l/info :action "start worker"
|
||||
:name (d/name name)
|
||||
:queue (d/name queue))
|
||||
(let [close-ch (a/chan 1)
|
||||
poll-ms (inst-ms poll-interval)]
|
||||
(defn- event-loop
|
||||
"Main, worker eventloop"
|
||||
[{:keys [pool poll-interval close-ch] :as cfg}]
|
||||
(let [poll-ms (inst-ms poll-interval)]
|
||||
(a/go-loop []
|
||||
(let [[val port] (a/alts! [close-ch (event-loop-fn cfg)] :priority true)]
|
||||
(cond
|
||||
@@ -100,7 +153,7 @@
|
||||
(or (= port close-ch) (nil? val))
|
||||
(l/debug :hint "stop condition found")
|
||||
|
||||
(db/pool-closed? pool)
|
||||
(db/closed? pool)
|
||||
(do
|
||||
(l/debug :hint "eventloop aborted because pool is closed")
|
||||
(a/close! close-ch))
|
||||
@@ -108,7 +161,7 @@
|
||||
(and (instance? java.sql.SQLException val)
|
||||
(contains? #{"08003" "08006" "08001" "08004"} (.getSQLState ^java.sql.SQLException val)))
|
||||
(do
|
||||
(l/error :hint "connection error, trying resume in some instants")
|
||||
(l/warn :hint "connection error, trying resume in some instants")
|
||||
(a/<! (a/timeout poll-interval))
|
||||
(recur))
|
||||
|
||||
@@ -121,8 +174,8 @@
|
||||
|
||||
(instance? Exception val)
|
||||
(do
|
||||
(l/error :cause val
|
||||
:hint "unexpected error ocurried on polling the database (will resume in some instants)")
|
||||
(l/warn :cause val
|
||||
:hint "unexpected error ocurried on polling the database (will resume in some instants)")
|
||||
(a/<! (a/timeout poll-ms))
|
||||
(recur))
|
||||
|
||||
@@ -132,14 +185,27 @@
|
||||
(= ::empty val)
|
||||
(do
|
||||
(a/<! (a/timeout poll-ms))
|
||||
(recur)))))
|
||||
(recur)))))))
|
||||
|
||||
(defmethod ig/init-key ::worker
|
||||
[_ {:keys [pool name queue] :as cfg}]
|
||||
(let [close-ch (a/chan 1)
|
||||
cfg (assoc cfg :close-ch close-ch)]
|
||||
(if (db/read-only? pool)
|
||||
(l/warn :hint "worker not started, db is read-only"
|
||||
:name (d/name name)
|
||||
:queue (d/name queue))
|
||||
(do
|
||||
(l/info :hint "worker started"
|
||||
:name (d/name name)
|
||||
:queue (d/name queue))
|
||||
(event-loop cfg)))
|
||||
|
||||
(reify
|
||||
java.lang.AutoCloseable
|
||||
(close [_]
|
||||
(a/close! close-ch)))))
|
||||
|
||||
|
||||
(defmethod ig/halt-key! ::worker
|
||||
[_ instance]
|
||||
(.close ^java.lang.AutoCloseable instance))
|
||||
@@ -249,10 +315,15 @@
|
||||
|
||||
(defn get-error-context
|
||||
[error item]
|
||||
(let [edata (ex-data error)]
|
||||
{:id (uuid/next)
|
||||
:data edata
|
||||
:params item}))
|
||||
(let [data (ex-data error)]
|
||||
(merge
|
||||
{:hint (ex-message error)
|
||||
:spec-problems (some->> data ::s/problems (take 10) seq vec)
|
||||
:spec-value (some->> data ::s/value)
|
||||
:data (some-> data (dissoc ::s/problems ::s/value ::s/spec))
|
||||
:params item}
|
||||
(when (and data (::s/problems data))
|
||||
{:spec-explain (us/pretty-explain data)}))))
|
||||
|
||||
(defn- handle-exception
|
||||
[error item]
|
||||
@@ -266,8 +337,10 @@
|
||||
|
||||
(= ::noop (:strategy edata))
|
||||
(assoc :inc-by 0))
|
||||
(l/with-context (get-error-context error item)
|
||||
(l/error :cause error :hint "unhandled exception on task")
|
||||
(do
|
||||
(l/error :hint "unhandled exception on task"
|
||||
::l/context (get-error-context error item)
|
||||
:cause error)
|
||||
(if (>= (:retry-num item) (:max-retries item))
|
||||
{:status :failed :task item :error error}
|
||||
{:status :retry :task item :error error})))))
|
||||
@@ -343,31 +416,35 @@
|
||||
(s/keys :req-un [::executor ::db/pool ::schedule ::tasks]))
|
||||
|
||||
(defmethod ig/init-key ::scheduler
|
||||
[_ {:keys [schedule tasks] :as cfg}]
|
||||
(let [scheduler (Executors/newScheduledThreadPool (int 1))
|
||||
schedule (->> schedule
|
||||
(filter some?)
|
||||
;; If id is not defined, use the task as id.
|
||||
(map (fn [{:keys [id task] :as item}]
|
||||
(if (some? id)
|
||||
(assoc item :id (d/name id))
|
||||
(assoc item :id (d/name task)))))
|
||||
(map (fn [{:keys [task] :as item}]
|
||||
(let [f (get tasks task)]
|
||||
(when-not f
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-found
|
||||
:hint (str/fmt "task %s not configured" task)))
|
||||
(-> item
|
||||
(dissoc :task)
|
||||
(assoc :fn f))))))
|
||||
cfg (assoc cfg
|
||||
:scheduler scheduler
|
||||
:schedule schedule)]
|
||||
[_ {:keys [schedule tasks pool] :as cfg}]
|
||||
(let [scheduler (Executors/newScheduledThreadPool (int 1))]
|
||||
(if (db/read-only? pool)
|
||||
(l/warn :hint "scheduler not started, db is read-only")
|
||||
(let [schedule (->> schedule
|
||||
(filter some?)
|
||||
;; If id is not defined, use the task as id.
|
||||
(map (fn [{:keys [id task] :as item}]
|
||||
(if (some? id)
|
||||
(assoc item :id (d/name id))
|
||||
(assoc item :id (d/name task)))))
|
||||
(map (fn [{:keys [task] :as item}]
|
||||
(let [f (get tasks task)]
|
||||
(when-not f
|
||||
(ex/raise :type :internal
|
||||
:code :task-not-found
|
||||
:hint (str/fmt "task %s not configured" task)))
|
||||
(-> item
|
||||
(dissoc :task)
|
||||
(assoc :fn f))))))
|
||||
cfg (assoc cfg
|
||||
:scheduler scheduler
|
||||
:schedule schedule)]
|
||||
(l/info :hint "scheduler started"
|
||||
:registred-tasks (count schedule))
|
||||
|
||||
(synchronize-schedule cfg)
|
||||
(run! (partial schedule-task cfg)
|
||||
(filter some? schedule))
|
||||
(synchronize-schedule cfg)
|
||||
(run! (partial schedule-task cfg)
|
||||
(filter some? schedule))))
|
||||
|
||||
(reify
|
||||
java.lang.AutoCloseable
|
||||
@@ -398,29 +475,22 @@
|
||||
(def sql:lock-scheduled-task
|
||||
"select id from scheduled_task where id=? for update skip locked")
|
||||
|
||||
(defn exception->string
|
||||
[error]
|
||||
(with-out-str
|
||||
(.printStackTrace ^Throwable error (java.io.PrintWriter. *out*))))
|
||||
|
||||
(defn- execute-scheduled-task
|
||||
[{:keys [executor pool] :as cfg} {:keys [id] :as task}]
|
||||
(letfn [(run-task [conn]
|
||||
(try
|
||||
(when (db/exec-one! conn [sql:lock-scheduled-task (d/name id)])
|
||||
(l/debug :action "execute scheduled task" :id id)
|
||||
((:fn task) task))
|
||||
(catch Throwable e
|
||||
e)))
|
||||
(when (db/exec-one! conn [sql:lock-scheduled-task (d/name id)])
|
||||
(l/debug :action "execute scheduled task" :id id)
|
||||
((:fn task) task)))
|
||||
|
||||
(handle-task []
|
||||
(db/with-atomic [conn pool]
|
||||
(let [result (run-task conn)]
|
||||
(when (ex/exception? result)
|
||||
(l/error :cause result
|
||||
:hint "unhandled exception on scheduled task"
|
||||
:id id)))))]
|
||||
|
||||
(try
|
||||
(db/with-atomic [conn pool]
|
||||
(run-task conn))
|
||||
(catch Throwable cause
|
||||
(l/error :hint "unhandled exception on scheduled task"
|
||||
::l/context (get-error-context cause task)
|
||||
:task-id id
|
||||
:cause cause))))]
|
||||
(try
|
||||
(px/run! executor handle-task)
|
||||
(finally
|
||||
@@ -440,59 +510,27 @@
|
||||
|
||||
;; --- INSTRUMENTATION
|
||||
|
||||
(defn instrument!
|
||||
[registry]
|
||||
(mtx/instrument-vars!
|
||||
[#'submit!]
|
||||
{:registry registry
|
||||
:type :counter
|
||||
:labels ["name"]
|
||||
:name "tasks_submit_total"
|
||||
:help "A counter of task submissions."
|
||||
:wrap (fn [rootf mobj]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn [conn params]
|
||||
(let [tname (:name params)]
|
||||
(mobj :inc [tname])
|
||||
(origf conn params)))
|
||||
{::original origf})))})
|
||||
|
||||
(mtx/instrument-vars!
|
||||
[#'app.worker/run-task]
|
||||
{:registry registry
|
||||
:type :summary
|
||||
:quantiles []
|
||||
:name "tasks_checkout_timing"
|
||||
:help "Latency measured between scheduld_at and execution time."
|
||||
:wrap (fn [rootf mobj]
|
||||
(let [mdata (meta rootf)
|
||||
origf (::original mdata rootf)]
|
||||
(with-meta
|
||||
(fn [tasks item]
|
||||
(let [now (inst-ms (dt/now))
|
||||
sat (inst-ms (:scheduled-at item))]
|
||||
(mobj :observe (- now sat))
|
||||
(origf tasks item)))
|
||||
{::original origf})))}))
|
||||
|
||||
(defn- wrap-task-handler
|
||||
[metrics tname f]
|
||||
(let [labels (into-array String [tname])]
|
||||
(fn [params]
|
||||
(let [start (System/nanoTime)]
|
||||
(try
|
||||
(f params)
|
||||
(finally
|
||||
(mtx/run! metrics
|
||||
{:id :tasks-timing
|
||||
:val (/ (- (System/nanoTime) start) 1000000)
|
||||
:labels labels})))))))
|
||||
|
||||
(defmethod ig/pre-init-spec ::registry [_]
|
||||
(s/keys :req-un [::mtx/metrics ::tasks]))
|
||||
|
||||
(defmethod ig/init-key ::registry
|
||||
[_ {:keys [metrics tasks]}]
|
||||
(let [mobj (mtx/create
|
||||
{:registry (:registry metrics)
|
||||
:type :summary
|
||||
:labels ["name"]
|
||||
:quantiles []
|
||||
:name "tasks_timing"
|
||||
:help "Background task execution timing."})]
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [tname (name k)]
|
||||
(l/debug :action "register task" :name tname)
|
||||
(assoc res k (mtx/wrap-summary v mobj [tname]))))
|
||||
{}
|
||||
tasks)))
|
||||
(reduce-kv (fn [res k v]
|
||||
(let [tname (name k)]
|
||||
(l/debug :hint "register task" :name tname)
|
||||
(assoc res k (wrap-task-handler metrics tname v))))
|
||||
{}
|
||||
tasks))
|
||||
|
||||
@@ -174,12 +174,20 @@
|
||||
:type :image
|
||||
:metadata {:id (:id fmo1)}}}]})]
|
||||
|
||||
;; run the task inmediatelly
|
||||
|
||||
|
||||
;; If we launch gc-touched-task, we should have 4 items to freeze.
|
||||
(let [task (:app.storage/gc-touched-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 4 (:freeze res)))
|
||||
(t/is (= 0 (:delete res))))
|
||||
|
||||
;; run the task immediately
|
||||
(let [task (:app.tasks.file-media-gc/handler th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 0 (:processed res))))
|
||||
|
||||
;; make the file ellegible for GC waiting 300ms (configured
|
||||
;; make the file eligible for GC waiting 300ms (configured
|
||||
;; timeout for testing)
|
||||
(th/sleep 300)
|
||||
|
||||
@@ -202,16 +210,22 @@
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
|
||||
|
||||
;; but if we pass the touched gc task two of them should disappear
|
||||
;; now, we have deleted the unused file-media-object, if we
|
||||
;; execute the touched-gc task, we should see that two of them
|
||||
;; are marked to be deleted.
|
||||
(let [task (:app.storage/gc-touched-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:delete res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1)))))
|
||||
|
||||
;; Finally, check that some of the objects that are marked as
|
||||
;; deleted we are unable to retrieve them using standard storage
|
||||
;; public api.
|
||||
(t/is (nil? (sto/get-object storage (:media-id fmo2))))
|
||||
(t/is (nil? (sto/get-object storage (:thumbnail-id fmo2))))
|
||||
(t/is (some? (sto/get-object storage (:media-id fmo1))))
|
||||
(t/is (some? (sto/get-object storage (:thumbnail-id fmo1))))
|
||||
|
||||
)))
|
||||
|
||||
@@ -389,3 +403,73 @@
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (= (:type error-data) :not-found))))
|
||||
))
|
||||
|
||||
(t/deftest query-frame-thumbnails
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
:project-id (:default-project-id prof)
|
||||
:is-shared false})
|
||||
data {::th/type :file-frame-thumbnail
|
||||
:profile-id (:id prof)
|
||||
:file-id (:id file)
|
||||
:frame-id (uuid/next)}]
|
||||
|
||||
;;insert an entry on the database with a test value for the thumbnail of this frame
|
||||
(db/exec-one! th/*pool*
|
||||
["insert into file_frame_thumbnail(file_id, frame_id, data) values (?, ?, ?)"
|
||||
(:file-id data) (:frame-id data) "testvalue"])
|
||||
|
||||
(let [out (th/query! data)]
|
||||
(t/is (nil? (:error out)))
|
||||
(let [result (:result out)]
|
||||
(t/is (= 1 (count result)))
|
||||
(t/is (= "testvalue" (:data result)))))))
|
||||
|
||||
(t/deftest insert-frame-thumbnails
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
:project-id (:default-project-id prof)
|
||||
:is-shared false})
|
||||
data {::th/type :upsert-frame-thumbnail
|
||||
:profile-id (:id prof)
|
||||
:file-id (:id file)
|
||||
:frame-id (uuid/next)
|
||||
:data "test insert new value"}
|
||||
out (th/mutation! data)]
|
||||
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
|
||||
;;retrieve the value from the database and check its content
|
||||
(let [result (db/exec-one!
|
||||
th/*pool*
|
||||
["select data from file_frame_thumbnail where file_id = ? and frame_id = ?"
|
||||
(:file-id data) (:frame-id data)])]
|
||||
(t/is (= "test insert new value" (:data result))))))
|
||||
|
||||
(t/deftest frame-thumbnails
|
||||
(let [prof (th/create-profile* 1 {:is-active true})
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
:project-id (:default-project-id prof)
|
||||
:is-shared false})
|
||||
data {::th/type :upsert-frame-thumbnail
|
||||
:profile-id (:id prof)
|
||||
:file-id (:id file)
|
||||
:frame-id (uuid/next)
|
||||
:data "updated value"}]
|
||||
|
||||
;;insert an entry on the database with and old value for the thumbnail of this frame
|
||||
(db/exec-one! th/*pool*
|
||||
["insert into file_frame_thumbnail(file_id, frame_id, data) values (?, ?, ?)"
|
||||
(:file-id data) (:frame-id data) "old value"])
|
||||
|
||||
(let [out (th/mutation! data)]
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
|
||||
;;retrieve the value from the database and check its content
|
||||
(let [result (db/exec-one!
|
||||
th/*pool*
|
||||
["select data from file_frame_thumbnail where file_id = ? and frame_id = ?"
|
||||
(:file-id data) (:frame-id data)])]
|
||||
(t/is (= "updated value" (:data result)))))))
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
[app.http :as http]
|
||||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
[app.storage-test :refer [configure-storage-backend]]
|
||||
[clojure.test :as t]
|
||||
[buddy.core.bytes :as b]
|
||||
[datoteka.core :as fs]))
|
||||
@@ -19,7 +20,9 @@
|
||||
(t/use-fixtures :each th/database-reset)
|
||||
|
||||
(t/deftest duplicate-file
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
|
||||
sobject (sto/put-object storage {:content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
@@ -52,7 +55,7 @@
|
||||
|
||||
;; (th/print-result! out)
|
||||
|
||||
;; Check tha tresult is correct
|
||||
;; Check that result is correct
|
||||
(t/is (nil? (:error out)))
|
||||
(let [result (:result out)]
|
||||
|
||||
@@ -90,7 +93,8 @@
|
||||
))))
|
||||
|
||||
(t/deftest duplicate-file-with-deleted-rels
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
sobject (sto/put-object storage {:content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
@@ -127,7 +131,7 @@
|
||||
|
||||
;; (th/print-result! out)
|
||||
|
||||
;; Check tha tresult is correct
|
||||
;; Check that result is correct
|
||||
(t/is (nil? (:error out)))
|
||||
(let [result (:result out)]
|
||||
|
||||
@@ -151,7 +155,9 @@
|
||||
))))
|
||||
|
||||
(t/deftest duplicate-project
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
|
||||
sobject (sto/put-object storage {:content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
@@ -183,7 +189,7 @@
|
||||
:name "project 1 (copy)"}
|
||||
out (th/mutation! data)]
|
||||
|
||||
;; Check tha tresult is correct
|
||||
;; Check that result is correct
|
||||
(t/is (nil? (:error out)))
|
||||
|
||||
(let [result (:result out)]
|
||||
@@ -221,7 +227,8 @@
|
||||
)))))
|
||||
|
||||
(t/deftest duplicate-project-with-deleted-files
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
sobject (sto/put-object storage {:content (sto/content "content")
|
||||
:content-type "text/plain"
|
||||
:other "data"})
|
||||
@@ -254,7 +261,7 @@
|
||||
:name "project 1 (copy)"}
|
||||
out (th/mutation! data)]
|
||||
|
||||
;; Check tha tresult is correct
|
||||
;; Check that result is correct
|
||||
(t/is (nil? (:error out)))
|
||||
|
||||
(let [result (:result out)]
|
||||
|
||||
@@ -240,6 +240,16 @@
|
||||
(t/is (nil? error))
|
||||
(t/is (string? (:token result))))))
|
||||
|
||||
(t/deftest test-register-profile-with-email-as-password
|
||||
(let [data {::th/type :prepare-register-profile
|
||||
:email "user@example.com"
|
||||
:password "USER@example.com"}]
|
||||
|
||||
(let [{:keys [result error] :as out} (th/mutation! data)]
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (th/ex-of-type? error :validation))
|
||||
(t/is (th/ex-of-code? error :email-as-password)))))
|
||||
|
||||
(t/deftest test-email-change-request
|
||||
(with-mocks [email-send-mock {:target 'app.emails/send! :return nil}
|
||||
cfg-get-mock {:target 'app.config/get
|
||||
@@ -345,3 +355,39 @@
|
||||
(t/is (th/ex-of-code? error :email-has-permanent-bounces)))
|
||||
|
||||
)))
|
||||
|
||||
|
||||
(t/deftest update-profile-password
|
||||
(let [profile (th/create-profile* 1)
|
||||
data {::th/type :update-profile-password
|
||||
:profile-id (:id profile)
|
||||
:old-password "123123"
|
||||
:password "foobarfoobar"}
|
||||
out (th/mutation! data)]
|
||||
(t/is (nil? (:error out)))
|
||||
(t/is (nil? (:result out)))
|
||||
))
|
||||
|
||||
|
||||
(t/deftest update-profile-password-bad-old-password
|
||||
(let [profile (th/create-profile* 1)
|
||||
data {::th/type :update-profile-password
|
||||
:profile-id (:id profile)
|
||||
:old-password "badpassword"
|
||||
:password "foobarfoobar"}
|
||||
{:keys [result error] :as out} (th/mutation! data)]
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (th/ex-of-type? error :validation))
|
||||
(t/is (th/ex-of-code? error :old-password-not-match))))
|
||||
|
||||
|
||||
(t/deftest update-profile-password-email-as-password
|
||||
(let [profile (th/create-profile* 1)
|
||||
data {::th/type :update-profile-password
|
||||
:profile-id (:id profile)
|
||||
:old-password "123123"
|
||||
:password "profile1.test@nodomain.com"}
|
||||
{:keys [result error] :as out} (th/mutation! data)]
|
||||
(t/is (th/ex-info? error))
|
||||
(t/is (th/ex-of-type? error :validation))
|
||||
(t/is (th/ex-of-code? error :email-as-password))))
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(ns app.storage-test
|
||||
(:require
|
||||
[app.common.exceptions :as ex]
|
||||
[app.common.uuid :as uuid]
|
||||
[app.db :as db]
|
||||
[app.storage :as sto]
|
||||
[app.test-helpers :as th]
|
||||
@@ -22,9 +23,19 @@
|
||||
th/database-reset
|
||||
th/clean-storage))
|
||||
|
||||
(defn configure-storage-backend
|
||||
"Given storage map, returns a storage configured with the appropriate
|
||||
backend for assets."
|
||||
([storage]
|
||||
(assoc storage :backend :tmp))
|
||||
([storage conn]
|
||||
(-> storage
|
||||
(assoc :conn conn)
|
||||
(assoc :backend :tmp))))
|
||||
|
||||
(t/deftest put-and-retrieve-object
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object (sto/put-object storage {:content content
|
||||
:content-type "text/plain"
|
||||
@@ -39,9 +50,9 @@
|
||||
(t/is (= "content" (slurp (sto/get-object-path storage object))))
|
||||
))
|
||||
|
||||
|
||||
(t/deftest put-and-retrieve-expired-object
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object (sto/put-object storage {:content content
|
||||
:content-type "text/plain"
|
||||
@@ -59,7 +70,8 @@
|
||||
))
|
||||
|
||||
(t/deftest put-and-delete-object
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object (sto/put-object storage {:content content
|
||||
:content-type "text/plain"
|
||||
@@ -68,7 +80,7 @@
|
||||
(t/is (true? (sto/del-object storage object)))
|
||||
|
||||
;; retrieving the same object should be not nil because the
|
||||
;; deletion is not inmediate
|
||||
;; deletion is not immediate
|
||||
(t/is (some? (sto/get-object-data storage object)))
|
||||
(t/is (some? (sto/get-object-url storage object)))
|
||||
(t/is (some? (sto/get-object-path storage object)))
|
||||
@@ -79,7 +91,8 @@
|
||||
))
|
||||
|
||||
(t/deftest test-deleted-gc-task
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
content (sto/content "content")
|
||||
object1 (sto/put-object storage {:content content
|
||||
:content-type "text/plain"
|
||||
@@ -96,14 +109,17 @@
|
||||
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object;"])]
|
||||
(t/is (= 1 (:count res))))))
|
||||
|
||||
(t/deftest test-touched-gc-task
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(t/deftest test-touched-gc-task-1
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
prof (th/create-profile* 1)
|
||||
proj (th/create-project* 1 {:profile-id (:id prof)
|
||||
:team-id (:default-team-id prof)})
|
||||
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
:project-id (:default-project-id prof)
|
||||
:is-shared false})
|
||||
|
||||
mfile {:filename "sample.jpg"
|
||||
:tempfile (th/tempfile "app/test_files/sample.jpg")
|
||||
:content-type "image/jpeg"
|
||||
@@ -140,12 +156,12 @@
|
||||
|
||||
;; now check if the storage objects are touched
|
||||
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
|
||||
(t/is (= 2 (:count res))))
|
||||
(t/is (= 4 (:count res))))
|
||||
|
||||
;; run the touched gc task
|
||||
(let [task (:app.storage/gc-touched-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 0 (:freeze res)))
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 2 (:delete res))))
|
||||
|
||||
;; now check that there are no touched objects
|
||||
@@ -157,8 +173,85 @@
|
||||
(t/is (= 2 (:count res))))
|
||||
)))
|
||||
|
||||
|
||||
(t/deftest test-touched-gc-task-2
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
prof (th/create-profile* 1 {:is-active true})
|
||||
team-id (:default-team-id prof)
|
||||
proj-id (:default-project-id prof)
|
||||
font-id (uuid/custom 10 1)
|
||||
|
||||
proj (th/create-project* 1 {:profile-id (:id prof)
|
||||
:team-id team-id})
|
||||
|
||||
file (th/create-file* 1 {:profile-id (:id prof)
|
||||
:project-id proj-id
|
||||
:is-shared false})
|
||||
|
||||
ttfdata (-> (io/resource "app/test_files/font-1.ttf")
|
||||
(fs/slurp-bytes))
|
||||
|
||||
mfile {:filename "sample.jpg"
|
||||
:tempfile (th/tempfile "app/test_files/sample.jpg")
|
||||
:content-type "image/jpeg"
|
||||
:size 312043}
|
||||
|
||||
params1 {::th/type :upload-file-media-object
|
||||
:profile-id (:id prof)
|
||||
:file-id (:id file)
|
||||
:is-local true
|
||||
:name "testfile"
|
||||
:content mfile}
|
||||
|
||||
params2 {::th/type :create-font-variant
|
||||
:profile-id (:id prof)
|
||||
:team-id team-id
|
||||
:font-id font-id
|
||||
:font-family "somefont"
|
||||
:font-weight 400
|
||||
:font-style "normal"
|
||||
:data {"font/ttf" ttfdata}}
|
||||
|
||||
out1 (th/mutation! params1)
|
||||
out2 (th/mutation! params2)]
|
||||
|
||||
;; (th/print-result! out)
|
||||
|
||||
(t/is (nil? (:error out1)))
|
||||
(t/is (nil? (:error out2)))
|
||||
|
||||
;; run the touched gc task
|
||||
(let [task (:app.storage/gc-touched-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 6 (:freeze res)))
|
||||
(t/is (= 0 (:delete res)))
|
||||
|
||||
(let [result-1 (:result out1)
|
||||
result-2 (:result out2)]
|
||||
|
||||
;; now we proceed to manually delete one team-font-variant
|
||||
(db/exec-one! th/*pool* ["delete from team_font_variant where id = ?" (:id result-2)])
|
||||
|
||||
;; revert touched state to all storage objects
|
||||
(db/exec-one! th/*pool* ["update storage_object set touched_at=now()"])
|
||||
|
||||
;; Run the task again
|
||||
(let [res (task {})]
|
||||
(t/is (= 2 (:freeze res)))
|
||||
(t/is (= 4 (:delete res))))
|
||||
|
||||
;; now check that there are no touched objects
|
||||
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where touched_at is not null"])]
|
||||
(t/is (= 0 (:count res))))
|
||||
|
||||
;; now check that all objects are marked to be deleted
|
||||
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is not null"])]
|
||||
(t/is (= 4 (:count res))))))))
|
||||
|
||||
(t/deftest test-touched-gc-task-without-delete
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
(let [storage (-> (:app.storage/storage th/*system*)
|
||||
(configure-storage-backend))
|
||||
prof (th/create-profile* 1)
|
||||
proj (th/create-project* 1 {:profile-id (:id prof)
|
||||
:team-id (:default-team-id prof)})
|
||||
@@ -198,72 +291,3 @@
|
||||
;; check that we have all object in the db
|
||||
(let [res (db/exec-one! th/*pool* ["select count(*) from storage_object where deleted_at is null"])]
|
||||
(t/is (= 4 (:count res)))))))
|
||||
|
||||
|
||||
;; Recheck is the mechanism for delete leaked resources on
|
||||
;; transaction failure.
|
||||
|
||||
(t/deftest test-recheck
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
content (sto/content "content")
|
||||
object (sto/put-object storage {:content content
|
||||
:content-type "text/plain"})]
|
||||
;; Sleep fo 50ms
|
||||
(th/sleep 50)
|
||||
|
||||
(let [rows (db/exec! th/*pool* ["select * from storage_pending"])]
|
||||
(t/is (= 1 (count rows)))
|
||||
(t/is (= (:id object) (:id (first rows)))))
|
||||
|
||||
;; Artificially make all storage_pending object 1 hour older.
|
||||
(db/exec-one! th/*pool* ["update storage_pending set created_at = created_at - '1 hour'::interval"])
|
||||
|
||||
;; Sleep fo 50ms
|
||||
(th/sleep 50)
|
||||
|
||||
;; Run recheck task
|
||||
(let [task (:app.storage/recheck-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 1 (:processed res)))
|
||||
(t/is (= 0 (:deleted res))))
|
||||
|
||||
;; After recheck task, storage-pending table should be empty
|
||||
(let [rows (db/exec! th/*pool* ["select * from storage_pending"])]
|
||||
(t/is (= 0 (count rows))))))
|
||||
|
||||
(t/deftest test-recheck-with-rollback
|
||||
(let [storage (:app.storage/storage th/*system*)
|
||||
content (sto/content "content")]
|
||||
|
||||
;; check with aborted transaction
|
||||
(ex/ignoring
|
||||
(db/with-atomic [conn th/*pool*]
|
||||
(let [storage (assoc storage :conn conn)] ; make participate storage in the transaction
|
||||
(sto/put-object storage {:content content
|
||||
:content-type "text/plain"})
|
||||
(throw (ex-info "expected" {})))))
|
||||
|
||||
;; let a 200ms window for recheck registration thread
|
||||
;; completion before proceed.
|
||||
(th/sleep 200)
|
||||
|
||||
;; storage_pending table should have the object
|
||||
;; registred independently of the aborted transaction.
|
||||
(let [rows (db/exec! th/*pool* ["select * from storage_pending"])]
|
||||
(t/is (= 1 (count rows))))
|
||||
|
||||
;; Artificially make all storage_pending object 1 hour older.
|
||||
(db/exec-one! th/*pool* ["update storage_pending set created_at = created_at - '1 hour'::interval"])
|
||||
|
||||
;; Sleep fo 50ms
|
||||
(th/sleep 50)
|
||||
|
||||
;; Run recheck task
|
||||
(let [task (:app.storage/recheck-task th/*system*)
|
||||
res (task {})]
|
||||
(t/is (= 1 (:processed res)))
|
||||
(t/is (= 1 (:deleted res))))
|
||||
|
||||
;; After recheck task, storage-pending table should be empty
|
||||
(let [rows (db/exec! th/*pool* ["select * from storage_pending"])]
|
||||
(t/is (= 0 (count rows))))))
|
||||
|
||||
46
backend/test/app/tasks_telemetry_test.clj
Normal file
46
backend/test/app/tasks_telemetry_test.clj
Normal file
@@ -0,0 +1,46 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.tasks-telemetry-test
|
||||
(:require
|
||||
[app.db :as db]
|
||||
[app.emails :as emails]
|
||||
[app.test-helpers :as th]
|
||||
[app.util.time :as dt]
|
||||
[clojure.pprint :refer [pprint]]
|
||||
[clojure.test :as t]
|
||||
[mockery.core :refer [with-mocks]]))
|
||||
|
||||
(t/use-fixtures :once th/state-init)
|
||||
(t/use-fixtures :each th/database-reset)
|
||||
|
||||
(t/deftest test-base-report-data-structure
|
||||
(with-mocks [mock {:target 'app.tasks.telemetry/send!
|
||||
:return nil}]
|
||||
(let [task-fn (-> th/*system* :app.worker/registry :telemetry)
|
||||
prof (th/create-profile* 1 {:is-active true})]
|
||||
|
||||
;; run the task
|
||||
(task-fn nil)
|
||||
|
||||
(t/is (:called? @mock))
|
||||
(let [[data] (-> @mock :call-args)]
|
||||
(t/is (contains? data :total-fonts))
|
||||
(t/is (contains? data :total-users))
|
||||
(t/is (contains? data :total-projects))
|
||||
(t/is (contains? data :total-files))
|
||||
(t/is (contains? data :total-teams))
|
||||
(t/is (contains? data :total-comments))
|
||||
(t/is (contains? data :instance-id))
|
||||
(t/is (contains? data :jvm-cpus))
|
||||
(t/is (contains? data :jvm-heap-max))
|
||||
(t/is (contains? data :max-users-on-team))
|
||||
(t/is (contains? data :avg-users-on-team))
|
||||
(t/is (contains? data :max-files-on-project))
|
||||
(t/is (contains? data :avg-files-on-project))
|
||||
(t/is (contains? data :max-projects-on-team))
|
||||
(t/is (contains? data :avg-files-on-project))
|
||||
(t/is (contains? data :version))))))
|
||||
@@ -52,11 +52,11 @@
|
||||
(assoc-in [:app.db/pool :uri] (:database-uri config))
|
||||
(assoc-in [:app.db/pool :username] (:database-username config))
|
||||
(assoc-in [:app.db/pool :password] (:database-password config))
|
||||
(assoc-in [[:app.main/main :app.storage.fs/backend] :directory] "/tmp/app/storage")
|
||||
(dissoc :app.srepl/server
|
||||
:app.http/server
|
||||
:app.http/router
|
||||
:app.notifications/handler
|
||||
:app.loggers.sentry/reporter
|
||||
:app.http.oauth/google
|
||||
:app.http.oauth/gitlab
|
||||
:app.http.oauth/github
|
||||
@@ -64,8 +64,7 @@
|
||||
:app.worker/scheduler
|
||||
:app.worker/worker)
|
||||
(d/deep-merge
|
||||
{:app.storage/storage {:backend :tmp}
|
||||
:app.tasks.file-media-gc/handler {:max-age (dt/duration 300)}}))
|
||||
{:app.tasks.file-media-gc/handler {:max-age (dt/duration 300)}}))
|
||||
_ (ig/load-namespaces config)
|
||||
system (-> (ig/prep config)
|
||||
(ig/init))]
|
||||
@@ -249,7 +248,7 @@
|
||||
[expr]
|
||||
`(try
|
||||
{:error nil
|
||||
:result ~expr}
|
||||
:result (deref ~expr)}
|
||||
(catch Exception e#
|
||||
{:error (handle-error e#)
|
||||
:result nil})))
|
||||
|
||||
@@ -1,38 +1,39 @@
|
||||
{:deps
|
||||
{org.clojure/clojure {:mvn/version "1.10.3"}
|
||||
org.clojure/data.json {:mvn/version "2.3.1"}
|
||||
org.clojure/data.json {:mvn/version "2.4.0"}
|
||||
org.clojure/tools.cli {:mvn/version "1.0.206"}
|
||||
metosin/jsonista {:mvn/version "0.3.3"}
|
||||
org.clojure/clojurescript {:mvn/version "1.10.844"}
|
||||
metosin/jsonista {:mvn/version "0.3.5"}
|
||||
org.clojure/clojurescript {:mvn/version "1.10.914"}
|
||||
|
||||
;; Logging
|
||||
org.clojure/tools.logging {:mvn/version "1.2.3"}
|
||||
org.apache.logging.log4j/log4j-api {:mvn/version "2.17.0"}
|
||||
org.apache.logging.log4j/log4j-core {:mvn/version "2.17.0"}
|
||||
org.apache.logging.log4j/log4j-web {:mvn/version "2.17.0"}
|
||||
org.apache.logging.log4j/log4j-jul {:mvn/version "2.17.0"}
|
||||
org.apache.logging.log4j/log4j-slf4j18-impl {:mvn/version "2.17.0"}
|
||||
org.apache.logging.log4j/log4j-api {:mvn/version "2.17.1"}
|
||||
org.apache.logging.log4j/log4j-core {:mvn/version "2.17.1"}
|
||||
org.apache.logging.log4j/log4j-web {:mvn/version "2.17.1"}
|
||||
org.apache.logging.log4j/log4j-jul {:mvn/version "2.17.1"}
|
||||
org.apache.logging.log4j/log4j-slf4j18-impl {:mvn/version "2.17.1"}
|
||||
org.slf4j/slf4j-api {:mvn/version "2.0.0-alpha1"}
|
||||
|
||||
selmer/selmer {:mvn/version "1.12.40"}
|
||||
expound/expound {:mvn/version "0.8.9"}
|
||||
selmer/selmer {:mvn/version "1.12.50"}
|
||||
criterium/criterium {:mvn/version "0.4.6"}
|
||||
|
||||
expound/expound {:mvn/version "0.9.0"}
|
||||
com.cognitect/transit-clj {:mvn/version "1.0.324"}
|
||||
com.cognitect/transit-cljs {:mvn/version "0.8.269"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.2"}
|
||||
java-http-clj/java-http-clj {:mvn/version "0.4.3"}
|
||||
|
||||
funcool/promesa {:mvn/version "6.0.1"}
|
||||
funcool/cuerdas {:mvn/version "2021.05.29-0"}
|
||||
funcool/promesa {:mvn/version "7.0.444"}
|
||||
funcool/cuerdas {:mvn/version "2022.01.14-391"}
|
||||
|
||||
lambdaisland/uri {:mvn/version "1.4.70"
|
||||
lambdaisland/uri {:mvn/version "1.13.95"
|
||||
:exclusions [org.clojure/data.json]}
|
||||
|
||||
frankiesardo/linked {:mvn/version "1.3.0"}
|
||||
danlentz/clj-uuid {:mvn/version "0.1.9"}
|
||||
commons-io/commons-io {:mvn/version "2.8.0"}
|
||||
commons-io/commons-io {:mvn/version "2.11.0"}
|
||||
com.sun.mail/jakarta.mail {:mvn/version "2.0.1"}
|
||||
|
||||
;; exception printing
|
||||
fipp/fipp {:mvn/version "0.6.24"}
|
||||
fipp/fipp {:mvn/version "0.6.25"}
|
||||
io.aviso/pretty {:mvn/version "1.1.1"}
|
||||
environ/environ {:mvn/version "1.2.0"}}
|
||||
:paths ["src"]
|
||||
@@ -41,26 +42,17 @@
|
||||
{:extra-deps
|
||||
{org.clojure/tools.namespace {:mvn/version "RELEASE"}
|
||||
org.clojure/test.check {:mvn/version "RELEASE"}
|
||||
org.clojure/tools.deps.alpha {:mvn/version "RELEASE"}
|
||||
thheller/shadow-cljs {:mvn/version "2.12.6"}
|
||||
thheller/shadow-cljs {:mvn/version "2.17.3"}
|
||||
com.bhauman/rebel-readline {:mvn/version "RELEASE"}
|
||||
criterium/criterium {:mvn/version "RELEASE"}
|
||||
mockery/mockery {:mvn/version "RELEASE"}}
|
||||
:extra-paths ["test" "dev"]}
|
||||
|
||||
:repl
|
||||
{:extra-deps
|
||||
{com.bhauman/rebel-readline {:mvn/version "RELEASE"}}
|
||||
:main-opts ["-m" "rebel-readline.main"]}
|
||||
|
||||
:kaocha
|
||||
{:extra-deps {lambdaisland/kaocha {:mvn/version "RELEASE"}}
|
||||
:main-opts ["-m" "kaocha.runner"]}
|
||||
|
||||
:test
|
||||
{:extra-paths ["test"]
|
||||
:extra-deps {io.github.cognitect-labs/test-runner
|
||||
{:git/url "https://github.com/cognitect-labs/test-runner.git"
|
||||
:sha "705ad25bbf0228b1c38d0244a36001c2987d7337"}}
|
||||
:extra-deps
|
||||
{io.github.cognitect-labs/test-runner
|
||||
{:git/tag "v0.5.0" :git/sha "b3fd0d2"}}
|
||||
:exec-fn cognitect.test-runner.api/test}
|
||||
|
||||
:shadow-cljs
|
||||
|
||||
@@ -6,7 +6,14 @@
|
||||
"dependencies": {
|
||||
"luxon": "^1.27.0"
|
||||
},
|
||||
"scripts": {
|
||||
"compile-and-watch-test": "clojure -M:dev:shadow-cljs watch test",
|
||||
"compile-test": "clojure -M:dev:shadow-cljs compile test --config-merge '{:autorun false}'",
|
||||
"run-test": "node target/test.js",
|
||||
"test": "yarn run compile-test && yarn run run-test"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shadow-cljs": "2.17.3",
|
||||
"source-map-support": "^0.5.19",
|
||||
"ws": "^7.4.6"
|
||||
}
|
||||
|
||||
9
common/scripts/repl
Executable file
9
common/scripts/repl
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PENPOT_FLAGS="enable-asserts enable-audit-log $PENPOT_FLAGS"
|
||||
export OPTIONS="-A:dev -J-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager -J-XX:+UseZGC -J-XX:ConcGCThreads=1 -J-XX:-OmitStackTraceInFastThrow -J-Xms50m -J-Xmx512m";
|
||||
export OPTIONS_EVAL="nil"
|
||||
# export OPTIONS_EVAL="(set! *warn-on-reflection* true)"
|
||||
|
||||
set -ex
|
||||
exec clojure $OPTIONS -M -e "$OPTIONS_EVAL" -m rebel-readline.main
|
||||
@@ -6,12 +6,29 @@
|
||||
:builds
|
||||
{:test
|
||||
{:target :node-test
|
||||
:output-to "target/tests.js"
|
||||
:output-to "target/test.js"
|
||||
:output-dir "target/test/"
|
||||
:ns-regexp "^app.common.*-test$"
|
||||
;; :autorun true
|
||||
:autorun true
|
||||
|
||||
:compiler-options
|
||||
{:output-feature-set :es-next
|
||||
:output-wrapper false
|
||||
:warnings {:fn-deprecated false}}}}}
|
||||
:source-map true
|
||||
:source-map-include-sources-content true
|
||||
:source-map-detail-level :all
|
||||
:warnings {:fn-deprecated false}}}
|
||||
|
||||
:bench
|
||||
{:target :node-script
|
||||
:output-to "target/bench.js"
|
||||
:output-dir "target/bench/"
|
||||
:main bench/main
|
||||
:devtools {:autoload false}
|
||||
|
||||
:compiler-options
|
||||
{:output-feature-set :es-next
|
||||
:output-wrapper false}}}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
;; Extract some attributes of a list of shapes.
|
||||
;; For each attribute, if the value is the same in all shapes,
|
||||
;; wll take this value. If there is any shape that is different,
|
||||
;; will take this value. If there is any shape that is different,
|
||||
;; the value of the attribute will be the keyword :multiple.
|
||||
;;
|
||||
;; If some shape has the value nil in any attribute, it's
|
||||
|
||||
19
common/src/app/common/colors.cljc
Normal file
19
common/src/app/common/colors.cljc
Normal file
@@ -0,0 +1,19 @@
|
||||
;; This Source Code Form is subject to the terms of the Mozilla Public
|
||||
;; License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
;; file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
;;
|
||||
;; Copyright (c) UXBOX Labs SL
|
||||
|
||||
(ns app.common.colors
|
||||
(:refer-clojure :exclude [test]))
|
||||
|
||||
(def black "#000000")
|
||||
(def canvas "#E8E9EA")
|
||||
(def default-layout "#DE4762")
|
||||
(def gray-20 "#B1B2B5")
|
||||
(def gray-30 "#7B7D85")
|
||||
(def info "#59B9E2")
|
||||
(def test "#fabada")
|
||||
(def white "#FFFFFF")
|
||||
(def primary "#31EFB8")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user