mirror of
https://github.com/rmcrackan/Libation.git
synced 2025-12-24 06:28:02 -05:00
Compare commits
1731 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f44c26b57 | ||
|
|
03534773ab | ||
|
|
37f223fb77 | ||
|
|
f0dc33a01e | ||
|
|
315d76e061 | ||
|
|
6e78145adc | ||
|
|
200a334f86 | ||
|
|
4dd4a1495a | ||
|
|
b3ce0e0af0 | ||
|
|
1299d91d08 | ||
|
|
ad3a767057 | ||
|
|
a59c73caf8 | ||
|
|
442a688b85 | ||
|
|
0c85ea4d11 | ||
|
|
03ed8e6b57 | ||
|
|
3eca508a26 | ||
|
|
770adf33f3 | ||
|
|
1087ffb150 | ||
|
|
f620234e7d | ||
|
|
2b6b5d082e | ||
|
|
cbbc45c3c5 | ||
|
|
28de1a6cb6 | ||
|
|
1615c6ef77 | ||
|
|
6961bd72fa | ||
|
|
68846a90e5 | ||
|
|
d60ec0702c | ||
|
|
1c55c8533a | ||
|
|
6fa69b603e | ||
|
|
3df8a97463 | ||
|
|
0bd7bd80b9 | ||
|
|
13bb4238b4 | ||
|
|
d5021e4f74 | ||
|
|
5e1458cfb4 | ||
|
|
e1d4533887 | ||
|
|
c1bd1d983b | ||
|
|
b567c38a98 | ||
|
|
348ec22465 | ||
|
|
90bb4d9176 | ||
|
|
7944154ea6 | ||
|
|
01fc7f3fb9 | ||
|
|
b70f973994 | ||
|
|
98d3f85579 | ||
|
|
bdae155af6 | ||
|
|
c8b44193ac | ||
|
|
9545b3a874 | ||
|
|
e932c9fab9 | ||
|
|
c8f4c1e751 | ||
|
|
0303db153f | ||
|
|
a7e9479eab | ||
|
|
d339dbc906 | ||
|
|
5fe6f931ad | ||
|
|
ca9fe9fc32 | ||
|
|
986dbd678f | ||
|
|
ea3716f48a | ||
|
|
426d5a87b4 | ||
|
|
c893bbe52e | ||
|
|
ad5a9874af | ||
|
|
3b70c08439 | ||
|
|
a230605ed5 | ||
|
|
d48ce39773 | ||
|
|
368e695214 | ||
|
|
9c3881c67d | ||
|
|
4c5fdf05f5 | ||
|
|
4bd491f5b9 | ||
|
|
c34b1e752e | ||
|
|
fa30c10435 | ||
|
|
cdb91ae2ca | ||
|
|
7852067b81 | ||
|
|
3708515df9 | ||
|
|
530aca4f4d | ||
|
|
cf571148bc | ||
|
|
2c2a720ba9 | ||
|
|
b577ef7187 | ||
|
|
ffbb3c3516 | ||
|
|
2a6cf38677 | ||
|
|
d8104a4d7c | ||
|
|
af85ea9219 | ||
|
|
c30e149a36 | ||
|
|
050a4867b7 | ||
|
|
2bf6f7a4f2 | ||
|
|
788a768271 | ||
|
|
022a6e979d | ||
|
|
9fc5a7d834 | ||
|
|
b72e5039b1 | ||
|
|
e992b49da2 | ||
|
|
74afbbf581 | ||
|
|
d82ffe1467 | ||
|
|
8a84a083d1 | ||
|
|
04827f81da | ||
|
|
805f42b1cc | ||
|
|
b9a1709284 | ||
|
|
b0a40e12b7 | ||
|
|
dfbc5ec9db | ||
|
|
649ef5f864 | ||
|
|
4345bf2ee2 | ||
|
|
441d430dea | ||
|
|
85ee0bcddb | ||
|
|
0f1fc0f11d | ||
|
|
75aa17df11 | ||
|
|
913019cdfd | ||
|
|
a55da5f187 | ||
|
|
f9ac0253fb | ||
|
|
fde78f4167 | ||
|
|
22159b79d8 | ||
|
|
7fe170acdf | ||
|
|
c0898a288b | ||
|
|
ce2b81036f | ||
|
|
65d24ce223 | ||
|
|
e8c911e603 | ||
|
|
59f66ff480 | ||
|
|
e05dcd6f54 | ||
|
|
d1ce9d5a83 | ||
|
|
2213f5c86a | ||
|
|
d6b232f342 | ||
|
|
f29c19beb8 | ||
|
|
9f6d08fc1f | ||
|
|
717dfcd923 | ||
|
|
a3d181b2ec | ||
|
|
d16eeea56b | ||
|
|
5d2513ec33 | ||
|
|
e5043dcf40 | ||
|
|
c61bfb4134 | ||
|
|
d47a2595b9 | ||
|
|
55e74db4fb | ||
|
|
0a171222bc | ||
|
|
c2093157ca | ||
|
|
8e073800cd | ||
|
|
1daf07b882 | ||
|
|
27a23a16d6 | ||
|
|
c878b9fec0 | ||
|
|
7a01f075ac | ||
|
|
23d391485d | ||
|
|
46be532740 | ||
|
|
e2fd88d075 | ||
|
|
bb0dea3fa9 | ||
|
|
def0b1f611 | ||
|
|
bfee579719 | ||
|
|
d4139861f3 | ||
|
|
ba15eb1a95 | ||
|
|
6263fedf84 | ||
|
|
0cbffc3f6c | ||
|
|
5f093b06ec | ||
|
|
f815c5fd47 | ||
|
|
69a8eaad4a | ||
|
|
01b5c18b2b | ||
|
|
5634fee2aa | ||
|
|
e98e4f10bc | ||
|
|
ec32ff77b2 | ||
|
|
683c984246 | ||
|
|
0fa5c4eb1e | ||
|
|
7507044b82 | ||
|
|
017902ab52 | ||
|
|
dcc5c1c640 | ||
|
|
19efa8c918 | ||
|
|
a34efb5e61 | ||
|
|
9533f80e89 | ||
|
|
fa238a0915 | ||
|
|
f98adef9e9 | ||
|
|
d85e5a0f98 | ||
|
|
365ac8167f | ||
|
|
4720779373 | ||
|
|
0c512162ab | ||
|
|
09ca419faf | ||
|
|
a2b1f13601 | ||
|
|
f4e7cf3418 | ||
|
|
8492a7ea3a | ||
|
|
1b5db9b28f | ||
|
|
5589a6cbd5 | ||
|
|
bfbad988c0 | ||
|
|
8db8615713 | ||
|
|
e8fa3f14b3 | ||
|
|
71552f2417 | ||
|
|
5b96f96a80 | ||
|
|
afffeb953c | ||
|
|
f4d8685058 | ||
|
|
b4cfd18976 | ||
|
|
e12548dacd | ||
|
|
fd95ac7a9c | ||
|
|
f7cd2b106b | ||
|
|
07e51f2191 | ||
|
|
fcd79c5561 | ||
|
|
ba98820989 | ||
|
|
b07e61e6a8 | ||
|
|
8c3fd19c70 | ||
|
|
fa8f761771 | ||
|
|
2c882e883d | ||
|
|
74c76a7414 | ||
|
|
17a0c21453 | ||
|
|
fc9c9dfe48 | ||
|
|
d5f0e39981 | ||
|
|
0f6493f4af | ||
|
|
454b490a06 | ||
|
|
ffea2648aa | ||
|
|
1ac967500c | ||
|
|
39e9f675d2 | ||
|
|
ed5afe5d0f | ||
|
|
ab075d0bef | ||
|
|
7fb1adb41b | ||
|
|
9735a8391c | ||
|
|
dbdfdbc536 | ||
|
|
3b86fc405f | ||
|
|
4ea7f04921 | ||
|
|
5b59b442ab | ||
|
|
b5d9c0a27a | ||
|
|
f5cbf89e13 | ||
|
|
00dc9e020d | ||
|
|
bfa0e4d338 | ||
|
|
5ceda408da | ||
|
|
716b1923a4 | ||
|
|
1148d8125d | ||
|
|
690fd10e42 | ||
|
|
736fbbf82f | ||
|
|
eda100b7ac | ||
|
|
ceb007500d | ||
|
|
05fad01624 | ||
|
|
e1d789ccdc | ||
|
|
d0f00f3f1e | ||
|
|
6ab82dba7b | ||
|
|
0045202334 | ||
|
|
4c80813651 | ||
|
|
6b637b35ab | ||
|
|
9b55ffa715 | ||
|
|
65da7890f1 | ||
|
|
72f92ec6c0 | ||
|
|
4efc084375 | ||
|
|
f955daa5ed | ||
|
|
144ab2162a | ||
|
|
6d0c4a9b3c | ||
|
|
8a682533c1 | ||
|
|
cecabc911e | ||
|
|
e35f5209dc | ||
|
|
4ffe70af0e | ||
|
|
233ba3184f | ||
|
|
ac4c168725 | ||
|
|
db588629c0 | ||
|
|
29be091a4b | ||
|
|
82a48db57b | ||
|
|
9f0f32a462 | ||
|
|
f64239b5ee | ||
|
|
bc8a35aedd | ||
|
|
2fca6b8b91 | ||
|
|
bc2eddd2dd | ||
|
|
ae012548bd | ||
|
|
76a59873ea | ||
|
|
3129fdba7b | ||
|
|
1771813849 | ||
|
|
7024bbf823 | ||
|
|
663f70b8bf | ||
|
|
7741e3caff | ||
|
|
c82eefa768 | ||
|
|
0e4231906a | ||
|
|
9bca84dca4 | ||
|
|
ca30fd41c6 | ||
|
|
be96f99461 | ||
|
|
f017fe419f | ||
|
|
ed42916cb2 | ||
|
|
0bb5bba3c8 | ||
|
|
a887bf4619 | ||
|
|
53eebcd6ba | ||
|
|
a09ae1316d | ||
|
|
7088bd4b8d | ||
|
|
b27325cdcb | ||
|
|
accedeb1b1 | ||
|
|
c98c7c095a | ||
|
|
9b217a4e18 | ||
|
|
a62a9ffc5b | ||
|
|
08aebf8ecf | ||
|
|
2f082a9656 | ||
|
|
1f473039e1 | ||
|
|
0f4197924e | ||
|
|
0f7ffacdf8 | ||
|
|
829b35c5a8 | ||
|
|
614b05d5ff | ||
|
|
26ccc77b47 | ||
|
|
64fb2ccf7c | ||
|
|
890747a902 | ||
|
|
1fdcea929f | ||
|
|
7848366818 | ||
|
|
40b4915b65 | ||
|
|
80b86086ca | ||
|
|
bff9b67b72 | ||
|
|
657a7bb6bc | ||
|
|
f0d7a7bf64 | ||
|
|
8bc098e7bd | ||
|
|
9280b29512 | ||
|
|
d8e9b9c505 | ||
|
|
554b308364 | ||
|
|
8d7872a376 | ||
|
|
747451d243 | ||
|
|
7e79e98771 | ||
|
|
4b7939541a | ||
|
|
a3734c76b1 | ||
|
|
ced4ea6c17 | ||
|
|
35ca6f2621 | ||
|
|
4dab16837e | ||
|
|
1cf889eed7 | ||
|
|
b65b1e819b | ||
|
|
3d50643ab0 | ||
|
|
abd18d74b0 | ||
|
|
0e49df06b8 | ||
|
|
38cc3e9725 | ||
|
|
c9af2bba4b | ||
|
|
2191c1536d | ||
|
|
5b9bf2fbb0 | ||
|
|
9b1ce8c1d7 | ||
|
|
9f8075041b | ||
|
|
944645379e | ||
|
|
cc72517284 | ||
|
|
0044820415 | ||
|
|
9f24027de1 | ||
|
|
24f95cb03d | ||
|
|
3aeea54615 | ||
|
|
f511041781 | ||
|
|
da9dc91469 | ||
|
|
e04e70d333 | ||
|
|
e0b566ee60 | ||
|
|
bf15d7302e | ||
|
|
8f01c644c0 | ||
|
|
ebd2cc96c5 | ||
|
|
0d1cc42ca7 | ||
|
|
e126dd09ce | ||
|
|
ec497f4f81 | ||
|
|
248fdfd2bc | ||
|
|
35862d619a | ||
|
|
ac2c67985d | ||
|
|
f8ae303417 | ||
|
|
0d24caeac2 | ||
|
|
7f1b357c52 | ||
|
|
ef67ae9d6a | ||
|
|
f35c82d59d | ||
|
|
10c01f4147 | ||
|
|
9366b3baca | ||
|
|
20e792c589 | ||
|
|
dfb63d3275 | ||
|
|
19db226f5a | ||
|
|
203ab00865 | ||
|
|
b11a4887d7 | ||
|
|
e73fc5e1eb | ||
|
|
8561a15061 | ||
|
|
28ba62aead | ||
|
|
176294cc55 | ||
|
|
152b0e362d | ||
|
|
4600d029dc | ||
|
|
1a5684799c | ||
|
|
0df17a2296 | ||
|
|
45472abd1f | ||
|
|
f2ea4539f2 | ||
|
|
52d3b9cb67 | ||
|
|
3d87f2cd9b | ||
|
|
e4a3d2ac79 | ||
|
|
8aa157f2f6 | ||
|
|
5ab6c1fe70 | ||
|
|
b23c46f79f | ||
|
|
0e987eef00 | ||
|
|
ace3d80e41 | ||
|
|
4bfb4e73ce | ||
|
|
7805a3ef11 | ||
|
|
08ca2a2db3 | ||
|
|
64a85b6aab | ||
|
|
1a38273d5f | ||
|
|
303dd7c471 | ||
|
|
313e3846c3 | ||
|
|
422c86345e | ||
|
|
ce952417fb | ||
|
|
5f4551822b | ||
|
|
3aebc7c885 | ||
|
|
3982edd0f1 | ||
|
|
f4dafac28f | ||
|
|
1090d29f74 | ||
|
|
1c336e1fe9 | ||
|
|
c7e9e9ac1e | ||
|
|
8232b2b5e5 | ||
|
|
9ca879cc3d | ||
|
|
ece48eb6d7 | ||
|
|
bffaea6026 | ||
|
|
e2aae85fd7 | ||
|
|
1777dc5a7e | ||
|
|
2dfe00f428 | ||
|
|
2cd0a022ff | ||
|
|
5d7ac699e6 | ||
|
|
7d806e0f3e | ||
|
|
0a9e489f48 | ||
|
|
17612dacd2 | ||
|
|
e61ad41d5a | ||
|
|
c77f2e2162 | ||
|
|
bfcd226795 | ||
|
|
0af7c4d90a | ||
|
|
e4826388be | ||
|
|
98a1fa4dda | ||
|
|
81e9ab7fb2 | ||
|
|
9c82d34ba4 | ||
|
|
a384bceab0 | ||
|
|
545540d9a4 | ||
|
|
f402912a92 | ||
|
|
aab4f1d9d6 | ||
|
|
f183b587b8 | ||
|
|
733a091ebd | ||
|
|
9043ea6334 | ||
|
|
40890f242a | ||
|
|
6c03f525bf | ||
|
|
dcda1a0cc2 | ||
|
|
e509f842e4 | ||
|
|
faa2e04b9f | ||
|
|
71afb5c9f4 | ||
|
|
d90ef3f4d4 | ||
|
|
f84bb753e9 | ||
|
|
b34970bd47 | ||
|
|
a37eb383cd | ||
|
|
614965e1ab | ||
|
|
52d611a74c | ||
|
|
653381b1df | ||
|
|
4e067f5b5b | ||
|
|
ee05ca4eb2 | ||
|
|
65e12d9a8f | ||
|
|
5dc1bafb94 | ||
|
|
3010f80834 | ||
|
|
6c20b85200 | ||
|
|
bf87180fe9 | ||
|
|
ae9aac789f | ||
|
|
e6cd182872 | ||
|
|
7eeb2dcd7f | ||
|
|
71bb0571d1 | ||
|
|
7bb5b2968e | ||
|
|
b051283fca | ||
|
|
53af2ee39e | ||
|
|
fab32a1744 | ||
|
|
e2dabc8a53 | ||
|
|
fd82f7ae5a | ||
|
|
df9535a83d | ||
|
|
85b6792468 | ||
|
|
e37abbf276 | ||
|
|
c3938c49a9 | ||
|
|
7658f21d7c | ||
|
|
c4827fc761 | ||
|
|
649b52af1d | ||
|
|
da06511951 | ||
|
|
88d3e5ff0c | ||
|
|
5f99e594d8 | ||
|
|
981a183992 | ||
|
|
ac036f65f1 | ||
|
|
b36e110b49 | ||
|
|
ef3c71a939 | ||
|
|
b2af93bed9 | ||
|
|
1f427919e6 | ||
|
|
c9c5bbb687 | ||
|
|
efbefa2784 | ||
|
|
51aabe5dd4 | ||
|
|
1c668adff8 | ||
|
|
4170dcc1d5 | ||
|
|
68cfae1d58 | ||
|
|
a790c7535c | ||
|
|
3b7d5a354f | ||
|
|
a9375f1520 | ||
|
|
47c9fcb883 | ||
|
|
5f5c9f65ed | ||
|
|
1417a4b992 | ||
|
|
2d6120f0c4 | ||
|
|
2a25b7e0ad | ||
|
|
4766ea7372 | ||
|
|
d195dd07dc | ||
|
|
bcbb7610ad | ||
|
|
6c5773df24 | ||
|
|
211f15af25 | ||
|
|
e3b0f80016 | ||
|
|
7b2c7e49e5 | ||
|
|
6a40d19393 | ||
|
|
3167744111 | ||
|
|
b6a3ba335a | ||
|
|
fa1ddc726a | ||
|
|
b3b0662dec | ||
|
|
5cb22cfd24 | ||
|
|
e911344850 | ||
|
|
8ec7e5a9d2 | ||
|
|
e1f749c3da | ||
|
|
ba060d15aa | ||
|
|
93fde236c8 | ||
|
|
13aad1a7cb | ||
|
|
65c64c4504 | ||
|
|
14ba04c28b | ||
|
|
96e886d207 | ||
|
|
c7279574a9 | ||
|
|
a522e1ff7e | ||
|
|
9ebc4444bd | ||
|
|
525afdf050 | ||
|
|
983cdf6ad5 | ||
|
|
09bb32a435 | ||
|
|
817ef33fbd | ||
|
|
be52b496a6 | ||
|
|
c0c99db6fa | ||
|
|
a1c8fb5921 | ||
|
|
4576c0e193 | ||
|
|
d592e9435e | ||
|
|
9ce6cb54ab | ||
|
|
c15d49fc64 | ||
|
|
99be869aa9 | ||
|
|
a0e875a79c | ||
|
|
6134becc70 | ||
|
|
eadf7cff79 | ||
|
|
87ca76f9cb | ||
|
|
43d1019059 | ||
|
|
ed87ded77a | ||
|
|
56d4205360 | ||
|
|
1f839606ae | ||
|
|
6eebe652d4 | ||
|
|
5fff22a0e1 | ||
|
|
cd7040cdc7 | ||
|
|
97b792868f | ||
|
|
984f931f67 | ||
|
|
e0dd9b845a | ||
|
|
f1c8b320c2 | ||
|
|
9b7d0cd909 | ||
|
|
99592ff84e | ||
|
|
f97cfe77f9 | ||
|
|
2954cb961b | ||
|
|
1e29b98b82 | ||
|
|
8b76da0dbe | ||
|
|
0a749d2d88 | ||
|
|
9ed6c1fd0d | ||
|
|
9825e2b552 | ||
|
|
011efe3676 | ||
|
|
2bdcc221f5 | ||
|
|
21bedca367 | ||
|
|
074fe79ded | ||
|
|
ac8c090c4c | ||
|
|
ade693bebb | ||
|
|
9bc53e45cd | ||
|
|
7d4eaa11e7 | ||
|
|
4521c5d5ed | ||
|
|
eb39f994e1 | ||
|
|
c19833b34e | ||
|
|
6dcf456d06 | ||
|
|
8a87462cf5 | ||
|
|
9da2a44eff | ||
|
|
7af8d8aa70 | ||
|
|
4801f37e7c | ||
|
|
4f5df44d40 | ||
|
|
63e28b13c1 | ||
|
|
f92b2b65b2 | ||
|
|
f7a4a95e3b | ||
|
|
71b8e9e51c | ||
|
|
c6788ccb48 | ||
|
|
0503ee1404 | ||
|
|
303192c6c3 | ||
|
|
6e21e96aa2 | ||
|
|
d1d0a7e487 | ||
|
|
2fd8ea91e1 | ||
|
|
92ee0b2e6d | ||
|
|
f0b5ae1cdc | ||
|
|
eb659cc7d7 | ||
|
|
cdd5f229d3 | ||
|
|
29edfb7c3f | ||
|
|
c0cb454d45 | ||
|
|
970a77c9e9 | ||
|
|
3488c8e0f5 | ||
|
|
5133720cc8 | ||
|
|
4150746f45 | ||
|
|
3a95e1e72f | ||
|
|
c74e26e1af | ||
|
|
ae54c95d46 | ||
|
|
56e6bd164b | ||
|
|
9b28bdceaa | ||
|
|
b7f7d9004d | ||
|
|
1be0991e62 | ||
|
|
ff8a2e59c5 | ||
|
|
453904261b | ||
|
|
f9340db90a | ||
|
|
5cd329dd26 | ||
|
|
b2a882b79d | ||
|
|
75df78a2f7 | ||
|
|
3ad52cbecc | ||
|
|
27b2fe741c | ||
|
|
d19fe2250c | ||
|
|
d16d0c8de2 | ||
|
|
c213d5d9f6 | ||
|
|
c73a023572 | ||
|
|
67389917fd | ||
|
|
b3264d5f42 | ||
|
|
962d9b550f | ||
|
|
91ce7272ae | ||
|
|
2f64ca6856 | ||
|
|
cfe5db436c | ||
|
|
3653fc8094 | ||
|
|
662c0ec871 | ||
|
|
44d39eabdb | ||
|
|
a0550d5c97 | ||
|
|
14eaca6d45 | ||
|
|
93ccc206ef | ||
|
|
d3f0fd711e | ||
|
|
3f4604e877 | ||
|
|
c316709af8 | ||
|
|
221d5c7f1c | ||
|
|
5a86a1a27b | ||
|
|
dc5e55de68 | ||
|
|
ee37864a42 | ||
|
|
efe347667c | ||
|
|
f27a18bdbb | ||
|
|
d1834659d9 | ||
|
|
7842b521d7 | ||
|
|
0822f0229d | ||
|
|
26aee4d29d | ||
|
|
17a80a23a8 | ||
|
|
e26fc9ca62 | ||
|
|
a03ccf1143 | ||
|
|
bb8dd615db | ||
|
|
9022a2889f | ||
|
|
ef049a3b02 | ||
|
|
77409750aa | ||
|
|
1702130b01 | ||
|
|
b6d1a7e3ba | ||
|
|
2907ba5c13 | ||
|
|
6df6c79ac8 | ||
|
|
3a9ca5d827 | ||
|
|
e1e663e327 | ||
|
|
4b00d5fd84 | ||
|
|
02dbf8aad0 | ||
|
|
8326389f5c | ||
|
|
34535b3ce1 | ||
|
|
7e5366ab95 | ||
|
|
690de9bc5c | ||
|
|
c976aa2bb2 | ||
|
|
27f659285d | ||
|
|
423a5e7720 | ||
|
|
9152e12fe1 | ||
|
|
f471c53139 | ||
|
|
66d055bb90 | ||
|
|
2bbb35363a | ||
|
|
1d3687cf9e | ||
|
|
daf925157f | ||
|
|
40eec9e674 | ||
|
|
6f0782053e | ||
|
|
04ad033ba0 | ||
|
|
f12f8ba3ee | ||
|
|
cc6feb21ff | ||
|
|
c4f2ec428d | ||
|
|
59689cb647 | ||
|
|
d7f3758ebc | ||
|
|
49775b019c | ||
|
|
e55e969349 | ||
|
|
42a93bfac1 | ||
|
|
f86c77a546 | ||
|
|
88c35e2a56 | ||
|
|
b405e8b6b2 | ||
|
|
92d283187d | ||
|
|
51b8cfe71f | ||
|
|
c80da5357b | ||
|
|
736d7c4a5f | ||
|
|
f175b7592e | ||
|
|
415e6e7bc6 | ||
|
|
d6a413e8d9 | ||
|
|
3049de6246 | ||
|
|
fb9b4eb77e | ||
|
|
e65b6c76a8 | ||
|
|
167a021eb1 | ||
|
|
ff3ac2d6fd | ||
|
|
f733079a49 | ||
|
|
893d68190d | ||
|
|
97f94d8782 | ||
|
|
4b2ce0c2d1 | ||
|
|
ee00417c6f | ||
|
|
768afd8ecd | ||
|
|
32c3fa85ce | ||
|
|
6986c8f018 | ||
|
|
f69c2b1cfc | ||
|
|
b11675c36a | ||
|
|
379c2ed62d | ||
|
|
7c8489b52f | ||
|
|
c61a863edd | ||
|
|
1d54f32ef3 | ||
|
|
fabe4afd94 | ||
|
|
61efa3c0c1 | ||
|
|
fe70daf0bc | ||
|
|
34033e7947 | ||
|
|
e8c63e9a6e | ||
|
|
9315165f80 | ||
|
|
ce624399ba | ||
|
|
63e9700c4a | ||
|
|
914e574bf8 | ||
|
|
b94f9bbc15 | ||
|
|
4e34834c35 | ||
|
|
3211b2dc85 | ||
|
|
ea6adeb58f | ||
|
|
90eccbf2f6 | ||
|
|
668cd7dba8 | ||
|
|
c08b2b575c | ||
|
|
07eaa48e10 | ||
|
|
3cf5fc1d99 | ||
|
|
15ad753fa1 | ||
|
|
75b984bdb2 | ||
|
|
f586d1d59f | ||
|
|
cb91a591f0 | ||
|
|
0c0c556c6a | ||
|
|
ff63b73c09 | ||
|
|
c1d56adbd2 | ||
|
|
bcd99fd208 | ||
|
|
d1df10d060 | ||
|
|
1fa415628f | ||
|
|
a83fe9e532 | ||
|
|
f85462ffec | ||
|
|
156349c293 | ||
|
|
5976706e40 | ||
|
|
1e40180f0c | ||
|
|
7d09728e6b | ||
|
|
4899ef3007 | ||
|
|
296c2b43eb | ||
|
|
932472cb91 | ||
|
|
1bf86b05ec | ||
|
|
5d5e3a6671 | ||
|
|
9720a573c7 | ||
|
|
1cf01aa92a | ||
|
|
4df9e5abbf | ||
|
|
9243aa47e7 | ||
|
|
c69f41a2a6 | ||
|
|
27c74e52ca | ||
|
|
bfa7f5cca9 | ||
|
|
22a3dcbc1f | ||
|
|
ec9d11cf52 | ||
|
|
fbc29dfb0a | ||
|
|
03d30ff6af | ||
|
|
ecfe0dc033 | ||
|
|
f2d475a9b0 | ||
|
|
86124fc609 | ||
|
|
db2b10d2a4 | ||
|
|
83402028fd | ||
|
|
423b5312f7 | ||
|
|
3be7d8e825 | ||
|
|
29803c6ba0 | ||
|
|
bb05847b25 | ||
|
|
5219ad53e1 | ||
|
|
30aa691aae | ||
|
|
83fa73cef5 | ||
|
|
2195574422 | ||
|
|
74ce408c8b | ||
|
|
85be15b843 | ||
|
|
b4b85cd485 | ||
|
|
0093968537 | ||
|
|
1b09b1fd48 | ||
|
|
ac87d70613 | ||
|
|
a5d98364fa | ||
|
|
ca0e639a19 | ||
|
|
b0e3022988 | ||
|
|
6765c2bfa7 | ||
|
|
94d3742317 | ||
|
|
bd3e833dc1 | ||
|
|
a386ace0e6 | ||
|
|
8221d7e202 | ||
|
|
fa92946d20 | ||
|
|
6d13325c4f | ||
|
|
7a9c6720c7 | ||
|
|
697f797509 | ||
|
|
ec9854212a | ||
|
|
46f6ba1710 | ||
|
|
7347244f0a | ||
|
|
c29c4c470c | ||
|
|
ee51fd9da6 | ||
|
|
2c4705de6e | ||
|
|
b4aa220051 | ||
|
|
4ab6da132b | ||
|
|
b006429a53 | ||
|
|
54d157d244 | ||
|
|
a4dfdf80e4 | ||
|
|
d8c90bc745 | ||
|
|
46accddd2d | ||
|
|
f40ecbc07e | ||
|
|
536982cb5f | ||
|
|
ea3d96329b | ||
|
|
e87fcbb16f | ||
|
|
541cf79b6f | ||
|
|
55fa82f92e | ||
|
|
4a0c2b2180 | ||
|
|
c77fe5d561 | ||
|
|
359d082ffd | ||
|
|
017bdba404 | ||
|
|
d4bf13b3fd | ||
|
|
87b695b2de | ||
|
|
222b16113e | ||
|
|
75c07c3209 | ||
|
|
e640edee7f | ||
|
|
6c48fc1f5e | ||
|
|
e5708a382b | ||
|
|
da9cb3371f | ||
|
|
91d0f8020e | ||
|
|
156726ca95 | ||
|
|
3dad4c194b | ||
|
|
6025a7538a | ||
|
|
824f65baae | ||
|
|
9372a7318b | ||
|
|
ddd032c16d | ||
|
|
9aaf523240 | ||
|
|
8cbdeb38fa | ||
|
|
a9258a1811 | ||
|
|
0dbc42c407 | ||
|
|
2c91de1b3b | ||
|
|
607cd07b74 | ||
|
|
64d080336c | ||
|
|
fd510861c6 | ||
|
|
3fdfbb9e26 | ||
|
|
3e74898dac | ||
|
|
d6fe3013ab | ||
|
|
265794bae0 | ||
|
|
7586f7a159 | ||
|
|
5dfddfb549 | ||
|
|
98bb06378a | ||
|
|
429367d21c | ||
|
|
ea9e36fd76 | ||
|
|
fe534b335b | ||
|
|
6db3a8fbf3 | ||
|
|
48c69a1339 | ||
|
|
1ab882f327 | ||
|
|
019b110a8a | ||
|
|
9e14169e15 | ||
|
|
e08a68219d | ||
|
|
af24c6e07b | ||
|
|
e31847e669 | ||
|
|
c4f55d2ad1 | ||
|
|
1439e38cb0 | ||
|
|
4456432116 | ||
|
|
df2936e0b6 | ||
|
|
53b5c1b902 | ||
|
|
82fba7e752 | ||
|
|
1a95f2923b | ||
|
|
1939aae81c | ||
|
|
9a663fda15 | ||
|
|
84b2996102 | ||
|
|
af8e1cd5ef | ||
|
|
8a1b375f0d | ||
|
|
6800986f25 | ||
|
|
6110b08d16 | ||
|
|
666b5d83df | ||
|
|
7db5a34f1b | ||
|
|
e52772826a | ||
|
|
8ea9b2abc6 | ||
|
|
c10bb276f5 | ||
|
|
9dcb3b3a25 | ||
|
|
d857882220 | ||
|
|
d731db4036 | ||
|
|
ca5b40b176 | ||
|
|
b29ec26f63 | ||
|
|
7569b01bd0 | ||
|
|
6465b0a885 | ||
|
|
5e99cb6f02 | ||
|
|
d737cd2199 | ||
|
|
2d2907e076 | ||
|
|
05c454dce4 | ||
|
|
e64a9d2adf | ||
|
|
6252f015b3 | ||
|
|
7ada0082a9 | ||
|
|
826e53c9cb | ||
|
|
2248d7b24e | ||
|
|
69918c2587 | ||
|
|
1991bf5b4d | ||
|
|
756d387238 | ||
|
|
8d73f5cc7e | ||
|
|
4a65d6bbd3 | ||
|
|
10a1b56b3c | ||
|
|
66fb392b7f | ||
|
|
49ef96055c | ||
|
|
cb4a209f69 | ||
|
|
255e18eb5e | ||
|
|
7e1ec47b46 | ||
|
|
40c725b8c2 | ||
|
|
5d0937dc48 | ||
|
|
bff81bfc4b | ||
|
|
aa7c159985 | ||
|
|
012d94a146 | ||
|
|
22bd1ed121 | ||
|
|
c832f26b08 | ||
|
|
efd73d334e | ||
|
|
0db3ee6fd7 | ||
|
|
6aaf4f63d1 | ||
|
|
ab392a9285 | ||
|
|
efc9ff4bd8 | ||
|
|
a52b466c85 | ||
|
|
5611431abf | ||
|
|
a75932d1f4 | ||
|
|
6c8464b650 | ||
|
|
ba4a1c5a51 | ||
|
|
3681c0f18f | ||
|
|
e365ba7296 | ||
|
|
2afb5365dd | ||
|
|
00cf7693d5 | ||
|
|
dac6877a06 | ||
|
|
36005508a1 | ||
|
|
d9e27fd32e | ||
|
|
d86bcbb414 | ||
|
|
00cbab5b58 | ||
|
|
807725f6ff | ||
|
|
ec9356b36e | ||
|
|
add31024da | ||
|
|
27d2ada5a4 | ||
|
|
702219ee69 | ||
|
|
cdf1a01457 | ||
|
|
a71ccbac6e | ||
|
|
f8c6b836c3 | ||
|
|
090871f50d | ||
|
|
e62f01d2a3 | ||
|
|
68af6a5ebb | ||
|
|
8bba8538d5 | ||
|
|
2cd9b86930 | ||
|
|
b876d90964 | ||
|
|
49c91c273b | ||
|
|
c07bc88493 | ||
|
|
397a516dc1 | ||
|
|
1c2b51aa83 | ||
|
|
fc6f494f0d | ||
|
|
7289459170 | ||
|
|
ed6f741a65 | ||
|
|
1783da3e2d | ||
|
|
e7eac7bed3 | ||
|
|
9ae1f0399b | ||
|
|
784ab73a36 | ||
|
|
99687e968e | ||
|
|
565c84c4ab | ||
|
|
18cf20ecad | ||
|
|
2725340994 | ||
|
|
56de1e7659 | ||
|
|
fd16e97632 | ||
|
|
36076242a7 | ||
|
|
718e6c14d0 | ||
|
|
eb61ba3d69 | ||
|
|
defabf7356 | ||
|
|
1149c10cf1 | ||
|
|
ec7dd1b54a | ||
|
|
bb900b31ef | ||
|
|
eed42bd108 | ||
|
|
3f0e6b9ee5 | ||
|
|
5ec01913d5 | ||
|
|
245e55782e | ||
|
|
cc306e0e19 | ||
|
|
26a9bc6bbf | ||
|
|
fb9d062545 | ||
|
|
49c6b391fd | ||
|
|
e1cd8b8f94 | ||
|
|
ef1edf1136 | ||
|
|
0def1b426a | ||
|
|
230e014bb1 | ||
|
|
34f56d2fd7 | ||
|
|
c45ffaf4a6 | ||
|
|
ae43ab103e | ||
|
|
559977ce0b | ||
|
|
ccd4d3e26d | ||
|
|
e76f99ff28 | ||
|
|
d3607583ab | ||
|
|
3ebd4ce243 | ||
|
|
f6dcc0db1d | ||
|
|
bd49db83e4 | ||
|
|
4140722a6d | ||
|
|
da36f9414d | ||
|
|
1510f71ca6 | ||
|
|
cdb27ef712 | ||
|
|
790319ed98 | ||
|
|
1b0fb2b316 | ||
|
|
02371f2221 | ||
|
|
2b672f86be | ||
|
|
36176bff33 | ||
|
|
174b0c26b8 | ||
|
|
26c60e8e79 | ||
|
|
d94759d868 | ||
|
|
bd7e45ca3c | ||
|
|
52a863c62a | ||
|
|
fe55b90ee3 | ||
|
|
df224cc7f3 | ||
|
|
2a59329350 | ||
|
|
abdf0e7261 | ||
|
|
b9c2a1cce3 | ||
|
|
aa86fca08f | ||
|
|
cf9ec9facf | ||
|
|
f6084ef10c | ||
|
|
740b73beb7 | ||
|
|
5c45802391 | ||
|
|
429aa603f5 | ||
|
|
80ea394934 | ||
|
|
bce4437c79 | ||
|
|
b6ad1a289b | ||
|
|
2a22d05f37 | ||
|
|
d787843fd2 | ||
|
|
ded58f687d | ||
|
|
1f1f34b6ce | ||
|
|
ffadf90f4f | ||
|
|
67807efacf | ||
|
|
980f5afa54 | ||
|
|
b2f68760b2 | ||
|
|
faf86711a5 | ||
|
|
4a78b9d28f | ||
|
|
1b0a7f5062 | ||
|
|
49982043e0 | ||
|
|
378cf7057e | ||
|
|
abdc0f018e | ||
|
|
c65f61b92e | ||
|
|
c12805c8ce | ||
|
|
67f9a6db78 | ||
|
|
bb6336ce2a | ||
|
|
af7a4a6acf | ||
|
|
21d18aa565 | ||
|
|
c96875ba5d | ||
|
|
6ebbfb8e59 | ||
|
|
1e6e28cd57 | ||
|
|
defed72862 | ||
|
|
71503b34b5 | ||
|
|
a00849fb6f | ||
|
|
14b63c0883 | ||
|
|
59d556733e | ||
|
|
a99a175683 | ||
|
|
26fedcfb60 | ||
|
|
dde8024506 | ||
|
|
25f7c29380 | ||
|
|
2f347e83e8 | ||
|
|
080a74884d | ||
|
|
2dbeb64c38 | ||
|
|
bb508c0718 | ||
|
|
9a450b0d63 | ||
|
|
c1de0e60d2 | ||
|
|
dc7c03661d | ||
|
|
952eee6d32 | ||
|
|
472a0f30b9 | ||
|
|
73533c58a8 | ||
|
|
65ef018719 | ||
|
|
f0ca349539 | ||
|
|
500b287721 | ||
|
|
21f3ae45d3 | ||
|
|
d496564f0d | ||
|
|
6fdd6293ce | ||
|
|
3bca495521 | ||
|
|
0fb580f1a5 | ||
|
|
a7cd47e0b1 | ||
|
|
30aecedfae | ||
|
|
e72799efe5 | ||
|
|
ee8c0ae27b | ||
|
|
5b4a4341ad | ||
|
|
56823c1105 | ||
|
|
1f4ada604a | ||
|
|
3a4ab80892 | ||
|
|
bba9c2ba7b | ||
|
|
c4acd5d208 | ||
|
|
381440db4c | ||
|
|
00c8be1f7e | ||
|
|
d665122aa2 | ||
|
|
bb40df5fa3 | ||
|
|
e3c9f70dff | ||
|
|
b351033cec | ||
|
|
18f69bc73d | ||
|
|
39fe7b79d2 | ||
|
|
85769d797b | ||
|
|
9a80f18e1c | ||
|
|
aec8305e52 | ||
|
|
a672174a9b | ||
|
|
6f490b4491 | ||
|
|
5917d059e4 | ||
|
|
40602c7626 | ||
|
|
7d5ee2afa8 | ||
|
|
08b6f8fa11 | ||
|
|
5f9699aa3b | ||
|
|
70607aaaf4 | ||
|
|
1d96d39af7 | ||
|
|
5557772957 | ||
|
|
5c7db6cd23 | ||
|
|
c72b64d74c | ||
|
|
20474e0b3c | ||
|
|
867085600c | ||
|
|
74290ec609 | ||
|
|
5ee555e60c | ||
|
|
a36c28d48f | ||
|
|
0877f2c042 | ||
|
|
2baf5243ea | ||
|
|
b7e71f5812 | ||
|
|
2ed1076fab | ||
|
|
0b20aa751f | ||
|
|
05a4ece8d1 | ||
|
|
25b37c6266 | ||
|
|
b668cff0ac | ||
|
|
4d6c742ae9 | ||
|
|
933f663d22 | ||
|
|
0c55f278a4 | ||
|
|
3f567ee82e | ||
|
|
8dc912c11d | ||
|
|
f1b4e2a17d | ||
|
|
630cfdeab3 | ||
|
|
7029409792 | ||
|
|
d0727b5a85 | ||
|
|
9f52ad5e0a | ||
|
|
501ae643f7 | ||
|
|
400074170e | ||
|
|
17103ed066 | ||
|
|
b6b29309c9 | ||
|
|
a04538710f | ||
|
|
01f6f5c137 | ||
|
|
b1a37cbd8c | ||
|
|
8c59e1280b | ||
|
|
00339127aa | ||
|
|
5935b40b60 | ||
|
|
6cfd2dea96 | ||
|
|
7d3a39c693 | ||
|
|
6e7a4ea475 | ||
|
|
3479dbc3f0 | ||
|
|
9309aea6d9 | ||
|
|
f72551fa9a | ||
|
|
e3b237b75f | ||
|
|
b1ddf18f73 | ||
|
|
13f522abb8 | ||
|
|
3c3d956bf3 | ||
|
|
8160547c11 | ||
|
|
ef71d36dee | ||
|
|
b0d8434455 | ||
|
|
9be0d58461 | ||
|
|
1addcc8211 | ||
|
|
38c75dc8c5 | ||
|
|
89c3ea8311 | ||
|
|
18ff799fb1 | ||
|
|
67b6aaed99 | ||
|
|
08bb463560 | ||
|
|
97767dcabb | ||
|
|
fb18940a5c | ||
|
|
b823f5fa00 | ||
|
|
d64fb081a0 | ||
|
|
09118b1ddf | ||
|
|
de20590fd5 | ||
|
|
1050ffdb24 | ||
|
|
2e49c7f697 | ||
|
|
708cdcc24c | ||
|
|
c89eafd568 | ||
|
|
10de241d53 | ||
|
|
e58952035f | ||
|
|
50a8c7508a | ||
|
|
2b243a6934 | ||
|
|
ece93cb4d7 | ||
|
|
5b3ca0ed32 | ||
|
|
7474f1221a | ||
|
|
d023a943c1 | ||
|
|
4e80af5c53 | ||
|
|
eee785377f | ||
|
|
915906e6ed | ||
|
|
358c8b577e | ||
|
|
5c450a01a4 | ||
|
|
36264c6c6e | ||
|
|
fca946bf15 | ||
|
|
452ceef285 | ||
|
|
7fafee804d | ||
|
|
3a48479435 | ||
|
|
cab8555ab5 | ||
|
|
e3b7cbcc2a | ||
|
|
ed15614288 | ||
|
|
acb6d1b335 | ||
|
|
fe804796ab | ||
|
|
4725fe36d1 | ||
|
|
5c73beff4b | ||
|
|
1f7000c2c9 | ||
|
|
f09baa1318 | ||
|
|
7eaa03e43c | ||
|
|
26099303fa | ||
|
|
6417aee780 | ||
|
|
f9deaba4c5 | ||
|
|
ddd6a3b279 | ||
|
|
9359950666 | ||
|
|
d31b2a1b65 | ||
|
|
b89b4e0af4 | ||
|
|
cbcde027b3 | ||
|
|
d306e6bd22 | ||
|
|
9ec877999e | ||
|
|
f4189bf409 | ||
|
|
0ed5062683 | ||
|
|
7ef666dc91 | ||
|
|
1ac825919a | ||
|
|
a7bf30954d | ||
|
|
613cfdd903 | ||
|
|
28802c8279 | ||
|
|
6d7b3bd5f0 | ||
|
|
b97d8e9403 | ||
|
|
b4838d364e | ||
|
|
05ac5c63e1 | ||
|
|
874bf9e7c0 | ||
|
|
c9497ef39e | ||
|
|
496830d01d | ||
|
|
ccebcdd4c7 | ||
|
|
c900fe8461 | ||
|
|
a0158db37e | ||
|
|
b8c26b01ad | ||
|
|
3a44bef0d9 | ||
|
|
57a4ee781b | ||
|
|
e12f475850 | ||
|
|
f822a23daa | ||
|
|
6901b8be35 | ||
|
|
83fb2cd1d0 | ||
|
|
c98664d584 | ||
|
|
d098be8b03 | ||
|
|
3f6689d032 | ||
|
|
b4206fc203 | ||
|
|
cfa4a0c07f | ||
|
|
357b220ace | ||
|
|
47968304c9 | ||
|
|
2024d5e116 | ||
|
|
5ae2a99c14 | ||
|
|
7fd002d2c9 | ||
|
|
b7b7038244 | ||
|
|
b5519c4875 | ||
|
|
44feab9eb2 | ||
|
|
96c45c33e5 | ||
|
|
36efbcb812 | ||
|
|
03f44b4e9c | ||
|
|
19860e9f09 | ||
|
|
0701cb3970 | ||
|
|
7d6000e3b6 | ||
|
|
ef973ac56a | ||
|
|
91a1033c52 | ||
|
|
4197db6af9 | ||
|
|
210ab065c2 | ||
|
|
9cd10eca58 | ||
|
|
ba676be46d | ||
|
|
665a2e1866 | ||
|
|
94469cae3d | ||
|
|
a0dd2ccad6 | ||
|
|
b2cf837de7 | ||
|
|
80bcf60b5b | ||
|
|
7ad0ab566a | ||
|
|
2b16e86c7b | ||
|
|
f2ea02ae0b | ||
|
|
f65cd39040 | ||
|
|
5ca0d2a399 | ||
|
|
d1528a095b | ||
|
|
749173a463 | ||
|
|
6fbd90a6b3 | ||
|
|
f39d272e6a | ||
|
|
bb3854f512 | ||
|
|
e40daecfb8 | ||
|
|
3716ab9cb5 | ||
|
|
0cc6d6337a | ||
|
|
ce711a36ba | ||
|
|
451af7bea9 | ||
|
|
63200592bf | ||
|
|
d165dfbeb5 | ||
|
|
eed3d84517 | ||
|
|
ba7d890966 | ||
|
|
5140fc63d9 | ||
|
|
78509c07e0 | ||
|
|
5084141215 | ||
|
|
3f2ac83474 | ||
|
|
58a0468728 | ||
|
|
8e13aa7513 | ||
|
|
48e2d91fc8 | ||
|
|
a7f119217f | ||
|
|
865f2261fe | ||
|
|
dfedb23efd | ||
|
|
c01e1c3e4b | ||
|
|
ad8dac5fb0 | ||
|
|
84e81b6218 | ||
|
|
86efe631fe | ||
|
|
f5f1dc483b | ||
|
|
8aa4328c6c | ||
|
|
a01a8c4b19 | ||
|
|
4b2387b621 | ||
|
|
74d16d8ef9 | ||
|
|
b1ea8f9fa7 | ||
|
|
c666fdeaff | ||
|
|
7068782975 | ||
|
|
c4cebbebe7 | ||
|
|
53d43d9fa9 | ||
|
|
11d59beeed | ||
|
|
ef71e297f4 | ||
|
|
1e4d1d1973 | ||
|
|
893d99854b | ||
|
|
db93980cd5 | ||
|
|
34fac30b2b | ||
|
|
2fa0bcb765 | ||
|
|
78fd09aa91 | ||
|
|
a54516b4f5 | ||
|
|
f193d6f376 | ||
|
|
8a82c294a1 | ||
|
|
9392cf4bf0 | ||
|
|
ec4deb9099 | ||
|
|
cf0548aab9 | ||
|
|
064801380b | ||
|
|
9dc2a7424a | ||
|
|
4c8a56a5b9 | ||
|
|
9aad263996 | ||
|
|
ce1ab7c20d | ||
|
|
c9217990cd | ||
|
|
90cbf3b7a6 | ||
|
|
c4f1b22ddf | ||
|
|
fb612ea6ab | ||
|
|
bce44b6f6d | ||
|
|
7575736991 | ||
|
|
06f8d055fc | ||
|
|
d64e043fe8 | ||
|
|
99564d9c25 | ||
|
|
29bccd3e33 | ||
|
|
20f65f6534 | ||
|
|
8ca72b2e2d | ||
|
|
75429f288f | ||
|
|
d1bb921346 | ||
|
|
b979b6ddad | ||
|
|
4eba41ddbb | ||
|
|
418f5062ff | ||
|
|
f736f7f909 | ||
|
|
96ead28246 | ||
|
|
34bad7a53d | ||
|
|
7ac1fff3a0 | ||
|
|
a4c5c53df3 | ||
|
|
87db5cfd94 | ||
|
|
85e7bbf366 | ||
|
|
c55c5fac23 | ||
|
|
e25e2f7211 | ||
|
|
f310d583d8 | ||
|
|
f05465b29b | ||
|
|
959e31972e | ||
|
|
17181811f0 | ||
|
|
6d2624d52b | ||
|
|
9dd5940c8c | ||
|
|
1927d19961 | ||
|
|
09cc838bb4 | ||
|
|
8af4c71101 | ||
|
|
7ffdf45164 | ||
|
|
e0999dc9ae | ||
|
|
a0f3d44e97 | ||
|
|
1510a86579 | ||
|
|
b3581455d2 | ||
|
|
8ee1019fa5 | ||
|
|
285b10a95f | ||
|
|
0ca33f864b | ||
|
|
a0823fa26c | ||
|
|
aa9040da5d | ||
|
|
222031ecc5 | ||
|
|
dda8f5a974 | ||
|
|
e9b484df04 | ||
|
|
d505264e86 | ||
|
|
c0b1f1dc0a | ||
|
|
1524d558a4 | ||
|
|
aea8c11dc4 | ||
|
|
86c7f89788 | ||
|
|
3272541e81 | ||
|
|
3b3d40e4e6 | ||
|
|
a47866b6f7 | ||
|
|
0df4dfdef5 | ||
|
|
fe2de6ecf7 | ||
|
|
fc25e73b1a | ||
|
|
a3df85c87e | ||
|
|
553a936e7e | ||
|
|
635764625e | ||
|
|
f5599f7c57 | ||
|
|
dc6aaf2dd6 | ||
|
|
f1ba2b4ae8 | ||
|
|
742310b8d6 | ||
|
|
073787173d | ||
|
|
66679ace2f | ||
|
|
3982537d46 | ||
|
|
7cf4c63d79 | ||
|
|
7c4575cf66 | ||
|
|
f4749d703f | ||
|
|
f2f562619b | ||
|
|
16c019a9c6 | ||
|
|
644dcbdd4d | ||
|
|
6b112f5248 | ||
|
|
0bfa609058 | ||
|
|
8020ded642 | ||
|
|
c4cd6b16fc | ||
|
|
310012fd17 | ||
|
|
06163db6ff | ||
|
|
7689eed711 | ||
|
|
d396d697d7 | ||
|
|
27ed11d904 | ||
|
|
9e7670b918 | ||
|
|
31e97defd1 | ||
|
|
1a447627c7 | ||
|
|
962b386d07 | ||
|
|
d69ff24c2d | ||
|
|
070ed1d373 | ||
|
|
47729bf7b0 | ||
|
|
ed0ce2976b | ||
|
|
2224f46ed5 | ||
|
|
433974323c | ||
|
|
7525d318c0 | ||
|
|
92327dcc0d | ||
|
|
aeaf234edd | ||
|
|
a99b644917 | ||
|
|
d79a55e5c9 | ||
|
|
16b0feeb82 | ||
|
|
7b3a25e45a | ||
|
|
8effdcb92d | ||
|
|
b12bef81bd | ||
|
|
f04a5e0168 | ||
|
|
e093729707 | ||
|
|
369151ada2 | ||
|
|
1f685ae8a0 | ||
|
|
bbe91099cb | ||
|
|
92015ba4c2 | ||
|
|
3bcacabadc | ||
|
|
f5736d9151 | ||
|
|
59015f438e | ||
|
|
3af47ab395 | ||
|
|
308619b01a | ||
|
|
4efce57488 | ||
|
|
c8ee950f7d | ||
|
|
0bba0f9256 | ||
|
|
05bdff5123 | ||
|
|
e58e6cfb9f | ||
|
|
b052871004 | ||
|
|
d738f4f35f | ||
|
|
7286aee9dd | ||
|
|
ca455978a5 | ||
|
|
9c38bea5b7 | ||
|
|
fbec1bc569 | ||
|
|
6dd885f0b2 | ||
|
|
ab38eb5571 | ||
|
|
0e4b9ab396 | ||
|
|
7dfedbc73b | ||
|
|
625ae1d63c | ||
|
|
71098ef02f | ||
|
|
d63a6de543 | ||
|
|
2a71a85306 | ||
|
|
6de3a8a2bf | ||
|
|
3fc1da66de | ||
|
|
683c221ca8 | ||
|
|
fe6cfc899b | ||
|
|
ffd947eb2e | ||
|
|
8dd59cb08a | ||
|
|
1e4c489983 | ||
|
|
17b0da358f | ||
|
|
6aa0a1f8b9 | ||
|
|
ab731a63af | ||
|
|
07d2c656fc | ||
|
|
9ecb32c3d2 | ||
|
|
503e1e143e | ||
|
|
e34ce67a2c | ||
|
|
a0fd0a3de6 | ||
|
|
7f3cbc454f | ||
|
|
30eb117fa1 | ||
|
|
63877160aa | ||
|
|
77e61479cf | ||
|
|
ca71283108 | ||
|
|
285563af5e | ||
|
|
62cbad0d8f | ||
|
|
2cb2479d63 | ||
|
|
e7c5b1d8dc | ||
|
|
7f086aeaac | ||
|
|
78186d4973 | ||
|
|
4d84174ba6 | ||
|
|
579536f65a | ||
|
|
a4ff739684 | ||
|
|
9e06c70319 | ||
|
|
0c98ce000b | ||
|
|
230b23dc80 | ||
|
|
d55b8eeeba | ||
|
|
decf75411f | ||
|
|
c69f14dac5 | ||
|
|
10359aa5e8 | ||
|
|
72e030faaf | ||
|
|
b21055d0ea | ||
|
|
720fd64c97 | ||
|
|
e9a331292a | ||
|
|
51fee4ae24 | ||
|
|
4cfe72a63b | ||
|
|
6a8476c976 | ||
|
|
8bb17d09c3 | ||
|
|
ad6b86fcb4 | ||
|
|
1578be2520 | ||
|
|
82d8d954ef | ||
|
|
eff9c2b35d | ||
|
|
ccdd1dc9f3 | ||
|
|
952173d450 | ||
|
|
35f677a0fa | ||
|
|
51d0645699 | ||
|
|
0189a197a8 | ||
|
|
1ce5fedc8c | ||
|
|
d336848ed0 | ||
|
|
8cd6219bd9 | ||
|
|
c2a2e51bde | ||
|
|
d62821cd60 | ||
|
|
180d591b0a | ||
|
|
7b7e1d8574 | ||
|
|
efd6156fa8 | ||
|
|
428ea5e864 | ||
|
|
2b6d1201b6 | ||
|
|
de3524d688 | ||
|
|
61a529e62b | ||
|
|
a5d225dc44 | ||
|
|
7b28a274a8 | ||
|
|
26508e6a8a | ||
|
|
c8d91032c0 | ||
|
|
7a8e910697 | ||
|
|
31d6fc8197 | ||
|
|
e23e267d17 | ||
|
|
c727286d22 | ||
|
|
3a61c32881 | ||
|
|
e33fd6ea1b | ||
|
|
aa8e3ac09b | ||
|
|
eb49dcfc54 | ||
|
|
6182b2bcee | ||
|
|
6e091230cf | ||
|
|
5f45d28b9f | ||
|
|
f8e9c16bc1 | ||
|
|
a66b7a6eab | ||
|
|
3b42b52ff4 | ||
|
|
df5293ce1e | ||
|
|
664ff6aabd | ||
|
|
0de62ce010 | ||
|
|
9eafbacad9 | ||
|
|
058eb31110 | ||
|
|
29de8f5706 | ||
|
|
ef869dbe09 | ||
|
|
9f8b320493 | ||
|
|
ef72e04be3 | ||
|
|
38d280b7f4 | ||
|
|
468356d676 | ||
|
|
7364700899 | ||
|
|
e65f19cf24 | ||
|
|
4272dfe03d | ||
|
|
3b739328fb | ||
|
|
81c3dca740 | ||
|
|
dceb3121b1 | ||
|
|
cb60a97b91 | ||
|
|
eb658396d2 | ||
|
|
0a1cefdb76 | ||
|
|
fb618e6719 | ||
|
|
2d529539cd | ||
|
|
9d93a98a58 | ||
|
|
38dcb10a6e | ||
|
|
50651339ec | ||
|
|
d0b2889fec | ||
|
|
3ce1f94f87 | ||
|
|
888967be31 | ||
|
|
6826237657 | ||
|
|
a8987cf1d3 | ||
|
|
d48a74912a | ||
|
|
1668b7c9a1 | ||
|
|
efa2cfb50b | ||
|
|
071b1a54d5 | ||
|
|
7c3bba2ffd | ||
|
|
d58092968a | ||
|
|
1b20bb06ad | ||
|
|
5815a04712 | ||
|
|
85c449bec0 | ||
|
|
10bdddb262 | ||
|
|
b65875386d | ||
|
|
76b5e09f72 | ||
|
|
0fe07695b2 | ||
|
|
51f9b4f473 | ||
|
|
153e1b92bf | ||
|
|
fc5ae7403a | ||
|
|
13149eff08 | ||
|
|
9c53d9bf87 | ||
|
|
bc9625fece | ||
|
|
7e00162ef2 | ||
|
|
af38750e29 | ||
|
|
314f4850bc | ||
|
|
9ff2a83ba3 | ||
|
|
2ab466c570 | ||
|
|
184ba84600 | ||
|
|
99dddb1af4 | ||
|
|
48eca3f5af | ||
|
|
71192cc2ee | ||
|
|
29c7344540 | ||
|
|
6411d23744 | ||
|
|
1a74736115 | ||
|
|
7c11ecb3a7 | ||
|
|
fd7c833de0 | ||
|
|
7fec8b0d7e | ||
|
|
52622fadbb | ||
|
|
57255e0aec | ||
|
|
17ecfa132d | ||
|
|
d1365c3d7d | ||
|
|
c33891a4bc | ||
|
|
9a63f57147 | ||
|
|
839a62cb07 | ||
|
|
dc598e466e | ||
|
|
b698697256 | ||
|
|
f802d1524f | ||
|
|
0cb18f9e1a | ||
|
|
ba722487d8 | ||
|
|
eff2634b32 | ||
|
|
1470aefd42 | ||
|
|
b7fd87b09c | ||
|
|
ab82a1656d | ||
|
|
71387e94d8 | ||
|
|
503379079b | ||
|
|
1ae767087f | ||
|
|
cfd2b7b7aa | ||
|
|
2c42b4c585 | ||
|
|
d3a9ff539e | ||
|
|
58f01bd642 | ||
|
|
38806740e1 | ||
|
|
df583e73c2 | ||
|
|
e787d33e5a | ||
|
|
91db665428 | ||
|
|
94d155cff2 | ||
|
|
ad79075fd7 | ||
|
|
7baefe2f44 | ||
|
|
141a4c29bb | ||
|
|
b2992da370 | ||
|
|
fdee254020 | ||
|
|
c51489ac74 | ||
|
|
3cd394ec10 | ||
|
|
8374fea776 | ||
|
|
733ca891de | ||
|
|
490d121db3 | ||
|
|
45c5efffbd | ||
|
|
a24c929acf | ||
|
|
86a39f10d1 | ||
|
|
4658afdc20 | ||
|
|
ae6c2afb30 | ||
|
|
a3844a3535 | ||
|
|
b710075544 | ||
|
|
c4c9786050 | ||
|
|
b4cc81139a | ||
|
|
fb20eb9162 | ||
|
|
263987d2c9 | ||
|
|
0b30a35383 | ||
|
|
47df1fc602 | ||
|
|
d8375454b9 | ||
|
|
ad535501c4 | ||
|
|
159f5cbd00 | ||
|
|
2bc74d5378 | ||
|
|
eb513f563e | ||
|
|
09dc5e9846 | ||
|
|
cf35a87d85 | ||
|
|
9f25f619a8 | ||
|
|
7e989c730c | ||
|
|
0926e86956 | ||
|
|
75967730fd | ||
|
|
a3be3e354f | ||
|
|
58c52196f1 | ||
|
|
b7b49a60cf | ||
|
|
fa195483d6 | ||
|
|
2341f6ea3b | ||
|
|
ffe0f0730d | ||
|
|
23b512910e | ||
|
|
b1c624b104 | ||
|
|
fe35be6682 | ||
|
|
2d3eb29bd5 | ||
|
|
26f0ff62df | ||
|
|
5e145846bd | ||
|
|
1ae5f99bf0 | ||
|
|
984119c7ee | ||
|
|
f8f5eac109 | ||
|
|
4111d5fa48 | ||
|
|
2eca9056b9 | ||
|
|
60e96572ff | ||
|
|
52193933b2 | ||
|
|
7bcabdda38 | ||
|
|
d993941c4d | ||
|
|
b447bff9a6 | ||
|
|
73cb5ffba4 | ||
|
|
7d694229c1 | ||
|
|
cdb6c9a1a4 | ||
|
|
cc1d2b423f | ||
|
|
508e031143 | ||
|
|
5a093a9a04 | ||
|
|
074d647d19 | ||
|
|
6cb98f99c5 | ||
|
|
7d28681b23 | ||
|
|
859a8e933c | ||
|
|
a476d5986d | ||
|
|
31812bc2d9 | ||
|
|
30ba69eca7 | ||
|
|
cf1bc1c252 | ||
|
|
ee109ba67d | ||
|
|
9c6211e8e0 | ||
|
|
0729e4ab09 | ||
|
|
5cbe728631 | ||
|
|
920f4df213 | ||
|
|
c48eacd9af | ||
|
|
30e6deeeaa | ||
|
|
5bc76a3160 | ||
|
|
114925ebce | ||
|
|
5a80a0cc06 | ||
|
|
aebefac7e6 | ||
|
|
b2d0ee41f2 | ||
|
|
9c20250b0a | ||
|
|
b196836fca | ||
|
|
d9fbcc615a | ||
|
|
fb247fb33f | ||
|
|
61f4dbd896 | ||
|
|
2c86571818 | ||
|
|
1b2ec67726 | ||
|
|
845af854bd | ||
|
|
15b6a66d98 | ||
|
|
c95ba0764b | ||
|
|
42c0648ba7 | ||
|
|
0a6e55dcb7 | ||
|
|
99b77decff | ||
|
|
9e2ca4e586 | ||
|
|
2e8acfdeef | ||
|
|
630096e06d | ||
|
|
d92d892dc7 | ||
|
|
a8f41841bd | ||
|
|
76954b5a0a | ||
|
|
c57b184a09 | ||
|
|
20ca4e0739 | ||
|
|
a972ed5e2e | ||
|
|
2b15bc6ebb | ||
|
|
f7a482659c | ||
|
|
99527453a7 | ||
|
|
3408b4637c | ||
|
|
3f2899e97e | ||
|
|
562496cfaa | ||
|
|
8283f19d6b | ||
|
|
242909b542 | ||
|
|
a7b83ad5e0 | ||
|
|
ed66019d9a | ||
|
|
bc0009be6c | ||
|
|
c88f47eed4 | ||
|
|
59de048ced | ||
|
|
7987dfb819 | ||
|
|
1b101106e7 | ||
|
|
7b75955aec | ||
|
|
8f5467e6ca | ||
|
|
28764f92b9 | ||
|
|
777dfe4c62 | ||
|
|
0878a704d9 | ||
|
|
f880897542 | ||
|
|
b37472a954 | ||
|
|
68735a45dd | ||
|
|
e26deb9092 | ||
|
|
43d6ea82cd | ||
|
|
db1aa495ac | ||
|
|
ee62d9ae8d | ||
|
|
4001124cfa | ||
|
|
43a4d0d1d7 | ||
|
|
632b432b7c | ||
|
|
e778c7a59d | ||
|
|
d71cdecd35 | ||
|
|
4a82541ffd | ||
|
|
f29dff3386 | ||
|
|
718d21f6cb | ||
|
|
440550ded9 | ||
|
|
593fe57ea1 | ||
|
|
e8a320dac9 | ||
|
|
3cb43e5d3e | ||
|
|
f86bdba3c3 | ||
|
|
98c3940297 | ||
|
|
b9e789bbcf | ||
|
|
a108846731 | ||
|
|
0b4ce8d6e7 | ||
|
|
42df61b7dd | ||
|
|
6b46fa4cbc |
5
.cdmurls.json
Normal file
5
.cdmurls.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"CdmUrls": [
|
||||
"https://ollj0gz40d.execute-api.us-west-2.amazonaws.com/default/AudibleCdm"
|
||||
]
|
||||
}
|
||||
43
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
43
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve Libation
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
PLEASE FILL OUT THE FOLLOWING. Bug reports with limited information or lacking an attached log file may get limited or delayed help.
|
||||
|
||||
___
|
||||
|
||||
## Describe the bug
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
## To Reproduce
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
## Expected behavior
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
## Screenshots
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
## Platform
|
||||
[e.g. Windows 10, Windows 11, Mac, Linux (State distribution)]
|
||||
|
||||
## Log Files
|
||||
Attach your Libation log file here. If your user folder contains the file "LibationCrash.log", attach that also.
|
||||
|
||||
**Default Log File Locations**
|
||||
|Platform|Folder|
|
||||
|-|-|
|
||||
|Windows|`%userprofile%\Libation`|
|
||||
|macOS|`~/Library/Application Support/Libation`|
|
||||
|Linux|`~/.local/share/Libation`|
|
||||
|
||||
Alternative, you may open the log file folder from within Libation. Open Libation's settings, and on the first tab in Settings you can click the button 'Open log folder'.
|
||||
31
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
31
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
**No-go ideas**
|
||||
There are lots of great ideas and many are beyond what we intend to do for Libation. Some good ideas which we do not intend to pursue:
|
||||
|
||||
* comprehensive api/cli
|
||||
* aax/audiobook import
|
||||
* bulk rename of existing files
|
||||
* general metadata/tag editor
|
||||
* playback features
|
||||
* web gui
|
||||
* supporting non-audible vendors
|
||||
* official docker support
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
8
.github/dependabot.yml
vendored
Normal file
8
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
83
.github/workflows/build-linux.yml
vendored
Normal file
83
.github/workflows/build-linux.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
# build-linux.yml
|
||||
# Reusable workflow that builds the Linux and MacOS (x64 and arm64) versions of Libation.
|
||||
---
|
||||
name: build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
libation-version:
|
||||
type: string
|
||||
required: true
|
||||
dotnet-version:
|
||||
type: string
|
||||
required: true
|
||||
run-unit-tests:
|
||||
type: boolean
|
||||
publish-r2r:
|
||||
type: boolean
|
||||
retention-days:
|
||||
type: number
|
||||
architecture:
|
||||
type: string
|
||||
description: "CPU architecture targeted by the build."
|
||||
required: true
|
||||
OS:
|
||||
type: string
|
||||
description: >
|
||||
The operating system targeted by the build.
|
||||
|
||||
There must be a corresponding Bundle_$OS.sh script file in ./Scripts
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: "${{ inputs.OS }}-${{ inputs.architecture }}"
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUNTIME_ID: "linux-${{ inputs.architecture }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
dotnet-quality: "ga"
|
||||
|
||||
- name: Unit test
|
||||
if: ${{ inputs.run-unit-tests }}
|
||||
working-directory: ./Source
|
||||
run: dotnet test
|
||||
|
||||
- name: Publish
|
||||
id: publish
|
||||
working-directory: ./Source
|
||||
run: |
|
||||
PUBLISH_ARGS=(
|
||||
'--runtime' '${{ env.RUNTIME_ID }}'
|
||||
'--configuration' 'Release'
|
||||
'--output' '../bin'
|
||||
'-p:PublishProtocol=FileSystem'
|
||||
"-p:PublishReadyToRun=${{ inputs.publish-r2r }}"
|
||||
'-p:SelfContained=true')
|
||||
|
||||
dotnet publish LibationAvalonia/LibationAvalonia.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish LoadByOS/LinuxConfigApp/LinuxConfigApp.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish LibationCli/LibationCli.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish HangoverAvalonia/HangoverAvalonia.csproj "${PUBLISH_ARGS[@]}"
|
||||
|
||||
- name: Build bundle
|
||||
id: bundle
|
||||
run: |
|
||||
SCRIPT=./Scripts/Bundle_${{ inputs.OS }}.sh
|
||||
chmod +rx ${SCRIPT}
|
||||
${SCRIPT} ./bin "${{ inputs.libation-version }}" "${{ inputs.architecture }}"
|
||||
artifact=$(ls ./bundle)
|
||||
echo "artifact=${artifact}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ steps.bundle.outputs.artifact }}
|
||||
path: ./bundle/${{ steps.bundle.outputs.artifact }}
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
104
.github/workflows/build-mac.yml
vendored
Normal file
104
.github/workflows/build-mac.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
# build-mac.yml
|
||||
# Reusable workflow that builds the MacOS (x64 and arm64) versions of Libation.
|
||||
---
|
||||
name: build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
libation-version:
|
||||
type: string
|
||||
required: true
|
||||
dotnet-version:
|
||||
type: string
|
||||
required: true
|
||||
run-unit-tests:
|
||||
type: boolean
|
||||
publish-r2r:
|
||||
type: boolean
|
||||
retention-days:
|
||||
type: number
|
||||
sign-app:
|
||||
type: boolean
|
||||
description: "Wheather to sign an notorize the app bundle and dmg."
|
||||
architecture:
|
||||
type: string
|
||||
description: "CPU architecture targeted by the build."
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: "macOS-${{ inputs.architecture }}"
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
RUNTIME_ID: "osx-${{ inputs.architecture }}"
|
||||
WAIT_FOR_NOTARIZE: ${{ vars.WAIT_FOR_NOTARIZE == 'true' }}
|
||||
steps:
|
||||
- uses: apple-actions/import-codesign-certs@v6
|
||||
if: ${{ inputs.sign-app }}
|
||||
with:
|
||||
p12-file-base64: ${{ secrets.DISTRIBUTION_SIGNING_CERT }}
|
||||
p12-password: ${{ secrets.DISTRIBUTION_SIGNING_CERT_PW }}
|
||||
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
dotnet-quality: "ga"
|
||||
|
||||
- name: Unit test
|
||||
if: ${{ inputs.run-unit-tests }}
|
||||
working-directory: ./Source
|
||||
run: dotnet test
|
||||
|
||||
- name: Publish
|
||||
id: publish
|
||||
working-directory: ./Source
|
||||
run: |
|
||||
PUBLISH_ARGS=(
|
||||
'--runtime' '${{ env.RUNTIME_ID }}'
|
||||
'--configuration' 'Release'
|
||||
'--output' '../bin'
|
||||
'-p:PublishProtocol=FileSystem'
|
||||
"-p:PublishReadyToRun=${{ inputs.publish-r2r }}"
|
||||
'-p:SelfContained=true')
|
||||
|
||||
dotnet publish LibationAvalonia/LibationAvalonia.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish LoadByOS/MacOSConfigApp/MacOSConfigApp.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish LibationCli/LibationCli.csproj "${PUBLISH_ARGS[@]}"
|
||||
dotnet publish HangoverAvalonia/HangoverAvalonia.csproj "${PUBLISH_ARGS[@]}"
|
||||
|
||||
- name: Build bundle
|
||||
id: bundle
|
||||
run: |
|
||||
SCRIPT=./Scripts/Bundle_MacOS.sh
|
||||
chmod +rx ${SCRIPT}
|
||||
${SCRIPT} ./bin "${{ inputs.libation-version }}" "${{ inputs.architecture }}" "${{ inputs.sign-app }}"
|
||||
artifact=$(ls ./bundle)
|
||||
echo "artifact=${artifact}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Notarize bundle
|
||||
if: ${{ inputs.sign-app }}
|
||||
run: |
|
||||
if [ ${{ vars.WAIT_FOR_NOTARIZE == 'true' }} ]; then
|
||||
WAIT="--wait"
|
||||
fi
|
||||
echo "::debug::Submitting the disk image for notarization"
|
||||
RESPONSE=$(xcrun notarytool submit ./bundle/${{ steps.bundle.outputs.artifact }} $WAIT --no-progress --apple-id ${{ vars.APPLE_DEV_EMAIL }} --password ${{ secrets.APPLE_DEV_PASSWORD }} --team-id ${{ secrets.APPLE_TEAM_ID }} 2>&1)
|
||||
SUBMISSION_ID=$(echo "$RESPONSE" | awk '/id: / { print $2;exit; }')
|
||||
|
||||
echo "$RESPONSE"
|
||||
echo "::notice::Noraty Submission Id: $SUBMISSION_ID"
|
||||
|
||||
if [ ${{ vars.WAIT_FOR_NOTARIZE == 'true' }} ]; then
|
||||
echo "::debug::Stapling the notarization ticket to the disk image"
|
||||
xcrun stapler staple "./bundle/${{ steps.bundle.outputs.artifact }}"
|
||||
fi
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ steps.bundle.outputs.artifact }}
|
||||
path: ./bundle/${{ steps.bundle.outputs.artifact }}
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
87
.github/workflows/build-windows.yml
vendored
Normal file
87
.github/workflows/build-windows.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
# build-windows.yml
|
||||
# Reusable workflow that builds the Windows versions of Libation.
|
||||
---
|
||||
name: build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
libation-version:
|
||||
type: string
|
||||
required: true
|
||||
dotnet-version:
|
||||
type: string
|
||||
required: true
|
||||
run-unit-tests:
|
||||
type: boolean
|
||||
publish-r2r:
|
||||
type: boolean
|
||||
retention-days:
|
||||
type: number
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: "Windows-${{ matrix.release_name }}-${{ matrix.architecture }} (${{ matrix.ui }})"
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
ui: [Avalonia]
|
||||
architecture: [x64]
|
||||
release_name: [chardonnay]
|
||||
include:
|
||||
- architecture: x64
|
||||
ui: WinForms
|
||||
release_name: classic
|
||||
prefix: Classic-
|
||||
- architecture: arm64
|
||||
ui: Avalonia
|
||||
release_name: chardonnay
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
dotnet-quality: "ga"
|
||||
|
||||
- name: Unit test
|
||||
if: ${{ inputs.run-unit-tests }}
|
||||
working-directory: ./Source
|
||||
run: dotnet test
|
||||
|
||||
- name: Publish
|
||||
working-directory: ./Source
|
||||
run: |
|
||||
$PUBLISH_ARGS=@(
|
||||
"--runtime", "win-${{ matrix.architecture }}",
|
||||
"--configuration", "Release",
|
||||
"--output", "../bin",
|
||||
"-p:PublishProtocol=FileSystem",
|
||||
"-p:PublishReadyToRun=${{ inputs.publish-r2r }}",
|
||||
"-p:SelfContained=true")
|
||||
|
||||
dotnet publish "Libation${{ matrix.ui }}/Libation${{ matrix.ui }}.csproj" $PUBLISH_ARGS
|
||||
dotnet publish "LoadByOS/WindowsConfigApp/WindowsConfigApp.csproj" $PUBLISH_ARGS
|
||||
dotnet publish "LibationCli/LibationCli.csproj" $PUBLISH_ARGS
|
||||
dotnet publish "Hangover${{ matrix.ui }}/Hangover${{ matrix.ui }}.csproj" $PUBLISH_ARGS
|
||||
|
||||
- name: Zip artifact
|
||||
id: zip
|
||||
working-directory: ./bin
|
||||
run: |
|
||||
$delfiles = @(
|
||||
"WindowsConfigApp.exe",
|
||||
"WindowsConfigApp.runtimeconfig.json",
|
||||
"WindowsConfigApp.deps.json")
|
||||
|
||||
foreach ($file in $delfiles){ if (test-path $file){ Remove-Item $file } }
|
||||
$artifact="${{ matrix.prefix }}Libation.${{ inputs.libation-version }}-windows-${{ matrix.release_name }}-${{ matrix.architecture }}.zip"
|
||||
"artifact=$artifact" >> $env:GITHUB_OUTPUT
|
||||
Compress-Archive -Path * -DestinationPath "$artifact"
|
||||
|
||||
- uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ steps.zip.outputs.artifact }}
|
||||
path: ./bin/${{ steps.zip.outputs.artifact }}
|
||||
if-no-files-found: error
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
69
.github/workflows/build.yml
vendored
Normal file
69
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
# build.yml
|
||||
# Reusable workflow that builds Libation for all platforms.
|
||||
---
|
||||
name: build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
libation-version:
|
||||
type: string
|
||||
description: "Libation version number"
|
||||
required: true
|
||||
dotnet-version:
|
||||
type: string
|
||||
default: "10.x"
|
||||
description: ".NET version to target"
|
||||
run-unit-tests:
|
||||
type: boolean
|
||||
description: "Whether to run unit tests prior to publishing."
|
||||
publish-r2r:
|
||||
type: boolean
|
||||
description: "Whether to publish assemblies as ReadyToRun."
|
||||
release:
|
||||
type: boolean
|
||||
description: "Whether this workflow is being called as a release"
|
||||
retention-days:
|
||||
type: number
|
||||
description: "Number of days the artifacts are to be retained."
|
||||
|
||||
jobs:
|
||||
windows:
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
with:
|
||||
libation-version: ${{ inputs.libation-version }}
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
run-unit-tests: ${{ inputs.run-unit-tests }}
|
||||
publish-r2r: ${{ inputs.publish-r2r }}
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
|
||||
macOS:
|
||||
strategy:
|
||||
matrix:
|
||||
architecture: [x64, arm64]
|
||||
uses: ./.github/workflows/build-mac.yml
|
||||
with:
|
||||
libation-version: ${{ inputs.libation-version }}
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
run-unit-tests: ${{ inputs.run-unit-tests }}
|
||||
publish-r2r: ${{ inputs.publish-r2r }}
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
architecture: ${{ matrix.architecture }}
|
||||
sign-app: ${{ inputs.release || vars.SIGN_MAC_APP_ON_VALIDATE == 'true' }}
|
||||
secrets: inherit
|
||||
|
||||
linux:
|
||||
strategy:
|
||||
matrix:
|
||||
OS: [Redhat, Debian]
|
||||
architecture: [x64, arm64]
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
with:
|
||||
libation-version: ${{ inputs.libation-version }}
|
||||
dotnet-version: ${{ inputs.dotnet-version }}
|
||||
run-unit-tests: ${{ inputs.run-unit-tests }}
|
||||
publish-r2r: ${{ inputs.publish-r2r }}
|
||||
retention-days: ${{ inputs.retention-days }}
|
||||
architecture: ${{ matrix.architecture }}
|
||||
OS: ${{ matrix.OS }}
|
||||
|
||||
67
.github/workflows/deploy.yml
vendored
Normal file
67
.github/workflows/deploy.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Deploy VitePress site to Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the `main` branch. Change this to `master` if you're
|
||||
# using the `master` branch as the default branch.
|
||||
push:
|
||||
branches: [master]
|
||||
paths:
|
||||
- .github/workflows/deploy.yml
|
||||
- docs/**
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: pages
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Build job
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0 # Not needed if lastUpdated is not enabled
|
||||
# - uses: pnpm/action-setup@v4 # Uncomment this block if you're using pnpm
|
||||
# with:
|
||||
# version: 9 # Not needed if you've set "packageManager" in package.json
|
||||
# - uses: oven-sh/setup-bun@v1 # Uncomment this if you're using Bun
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: npm # or pnpm / yarn
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
- name: Install dependencies
|
||||
run: npm ci # or pnpm install / yarn install / bun install
|
||||
- name: Build with VitePress
|
||||
run: npm run docs:build # or pnpm docs:build / yarn docs:build / bun run docs:build
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v4
|
||||
with:
|
||||
path: .vitepress/dist
|
||||
|
||||
# Deployment job
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
63
.github/workflows/docker.yml
vendored
Normal file
63
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# docker.yml
|
||||
# Reusable workflow that builds a docker image for Libation.
|
||||
---
|
||||
name: docker
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
description: "Version number"
|
||||
required: true
|
||||
release:
|
||||
type: boolean
|
||||
description: "Is this a release build?"
|
||||
required: true
|
||||
secrets:
|
||||
docker_username:
|
||||
required: true
|
||||
docker_token:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build_and_push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.release }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.docker_username }}
|
||||
password: ${{ secrets.docker_token }}
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
latest=true
|
||||
images: |
|
||||
name=${{ secrets.docker_username }}/libation
|
||||
tags: |
|
||||
type=raw,value=${{ inputs.version }},enable=${{ inputs.release }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ steps.metadata.outputs.tags != ''}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
60
.github/workflows/release.yml
vendored
Normal file
60
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# release.yml
|
||||
# Builds and creates the release on any tags starting with a `v`
|
||||
---
|
||||
name: release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
jobs:
|
||||
prerelease:
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
steps:
|
||||
- name: Get tag version
|
||||
id: get_version
|
||||
run: |
|
||||
export TAG="${{ github.ref_name }}"
|
||||
echo "version=${TAG#v}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
docker:
|
||||
needs: [prerelease]
|
||||
uses: ./.github/workflows/docker.yml
|
||||
with:
|
||||
version: ${{ needs.prerelease.outputs.version }}
|
||||
release: true
|
||||
secrets:
|
||||
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
build:
|
||||
needs: [prerelease]
|
||||
uses: ./.github/workflows/build.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
libation-version: ${{ needs.prerelease.outputs.version }}
|
||||
publish-r2r: true
|
||||
release: true
|
||||
|
||||
release:
|
||||
needs: [prerelease, build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: artifacts
|
||||
pattern: "*(Classic-)Libation.*"
|
||||
|
||||
- name: Release
|
||||
id: release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
name: Libation ${{ needs.prerelease.outputs.version }}
|
||||
body: <Put a body here>
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
draft: true
|
||||
prerelease: false
|
||||
files: |
|
||||
artifacts/*/*
|
||||
22
.github/workflows/validate-appstream-metainfo.yaml
vendored
Normal file
22
.github/workflows/validate-appstream-metainfo.yaml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Validate MetaInfo
|
||||
"on":
|
||||
pull_request:
|
||||
branches: ["master"]
|
||||
paths:
|
||||
- .github/workflows/validate-appstream-metainfo.yml
|
||||
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
|
||||
push:
|
||||
branches: ["master"]
|
||||
paths:
|
||||
- .github/workflows/validate-appstream-metainfo.yml
|
||||
- Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
|
||||
|
||||
jobs:
|
||||
validate-appstream-metainfo:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/flathub/flatpak-builder-lint:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Check the MetaInfo file
|
||||
run: flatpak-builder-lint appstream Source/LoadByOS/LinuxConfigApp/com.getlibation.Libation.metainfo.xml
|
||||
21
.github/workflows/validate-desktop-file.yaml
vendored
Normal file
21
.github/workflows/validate-desktop-file.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Check desktop file
|
||||
"on":
|
||||
pull_request:
|
||||
branches: ["master"]
|
||||
paths:
|
||||
- .github/workflows/validate-desktop-file.yml
|
||||
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
|
||||
push:
|
||||
branches: ["master"]
|
||||
paths:
|
||||
- .github/workflows/validate-desktop-file.yml
|
||||
- Source/LoadByOS/LinuxConfigApp/Libation.desktop
|
||||
|
||||
jobs:
|
||||
validate-desktop-file:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- run: sudo apt --yes install desktop-file-utils
|
||||
- name: Check the desktop file
|
||||
run: desktop-file-validate Source/LoadByOS/LinuxConfigApp/Libation.desktop
|
||||
47
.github/workflows/validate.yml
vendored
Normal file
47
.github/workflows/validate.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# validate.yml
|
||||
# Validates that Libation will build on a pull request or push to master.
|
||||
---
|
||||
name: validate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
paths:
|
||||
- Source/**
|
||||
- .github/workflows/**
|
||||
pull_request:
|
||||
branches: [master]
|
||||
paths:
|
||||
- Source/**
|
||||
- .github/workflows/**
|
||||
|
||||
jobs:
|
||||
get_version:
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
version: ${{ steps.get_version.outputs.version }}
|
||||
steps:
|
||||
- name: Get version
|
||||
id: get_version
|
||||
run: |
|
||||
wget "https://raw.githubusercontent.com/${{ github.repository }}/${{ github.sha }}/Source/AppScaffolding/AppScaffolding.csproj"
|
||||
version="$(grep -Eio -m 1 '<Version>.*</Version>' ./AppScaffolding.csproj | sed -r 's/<\/?Version>//g')"
|
||||
echo "version=${version}" >> "${GITHUB_OUTPUT}"
|
||||
build:
|
||||
needs: [get_version]
|
||||
uses: ./.github/workflows/build.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
libation-version: ${{ needs.get_version.outputs.version }}
|
||||
retention-days: 14
|
||||
run-unit-tests: true
|
||||
|
||||
docker:
|
||||
needs: [get_version]
|
||||
uses: ./.github/workflows/docker.yml
|
||||
with:
|
||||
version: ${{ needs.get_version.outputs.version }}
|
||||
release: false
|
||||
secrets:
|
||||
docker_username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
docker_token: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -184,7 +184,7 @@ publish/
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
#*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
@@ -370,3 +370,15 @@ FodyWeavers.xsd
|
||||
|
||||
/__TODO.txt
|
||||
/DataLayer/LibationContext.db
|
||||
*/bin-Avalonia
|
||||
|
||||
# macOS Directory Info
|
||||
.DS_Store
|
||||
|
||||
# JetBrains Rider Settings
|
||||
**/.idea/
|
||||
|
||||
# VitePress
|
||||
node_modules
|
||||
.vitepress/cache
|
||||
.vitepress/dist
|
||||
11
.releaseindex.json
Normal file
11
.releaseindex.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"WindowsClassic": "Classic-Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-classic-x64\\.zip",
|
||||
"WindowsAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-chardonnay-x64\\.zip",
|
||||
"WindowsAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-win(?:dows)?-chardonnay-arm64\\.zip",
|
||||
"LinuxAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.deb",
|
||||
"LinuxAvalonia_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-amd64\\.rpm",
|
||||
"MacOSAvalonia": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-x64\\.dmg",
|
||||
"LinuxAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.deb",
|
||||
"LinuxAvalonia_Arm64_RPM": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-linux-chardonnay-arm64\\.rpm",
|
||||
"MacOSAvalonia_Arm64": "Libation\\.\\d+\\.\\d+\\.\\d+(?:\\.\\d+)?-macOS-chardonnay-arm64\\.dmg"
|
||||
}
|
||||
90
.vitepress/config.js
Normal file
90
.vitepress/config.js
Normal file
@@ -0,0 +1,90 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
|
||||
// https://vitepress.dev/reference/site-config
|
||||
export default defineConfig({
|
||||
title: "Libation",
|
||||
description: "Libation: Liberate your Library - A free application for downloading your Audible audiobooks",
|
||||
head: [["link", { rel: "icon", href: "/favicon.ico" }]],
|
||||
cleanUrls: true,
|
||||
themeConfig: {
|
||||
// https://vitepress.dev/reference/default-theme-config
|
||||
logo: {
|
||||
light: "/libation_logo_light.svg",
|
||||
dark: "/libation_logo_dark.svg",
|
||||
},
|
||||
|
||||
footer: {
|
||||
message: "Released under the GPLv3 License",
|
||||
},
|
||||
|
||||
editLink: {
|
||||
pattern: "https://github.com/rmcrackan/Libation/edit/main/:path",
|
||||
},
|
||||
|
||||
lastUpdated: true,
|
||||
|
||||
nav: [
|
||||
{ text: "Getting Started", link: "/docs/getting-started" },
|
||||
{ text: "Docs", link: "/docs/index" },
|
||||
{ text: "Download", link: "https://github.com/rmcrackan/Libation/releases/latest" },
|
||||
{ text: "Issues & Requests", link: "https://github.com/rmcrackan/Libation/issues" },
|
||||
{ text: "Donate", link: "https://www.paypal.com/paypalme/mcrackan" },
|
||||
],
|
||||
sidebar: [
|
||||
{
|
||||
items: [
|
||||
{ text: "Overview", link: "/docs/index"},
|
||||
{ text: "Getting Started", link: "/docs/getting-started" },
|
||||
{ text: "FAQ", link: "/docs/frequently-asked-questions" },
|
||||
{
|
||||
text: "Issues & Requests",
|
||||
link: "https://github.com/rmcrackan/Libation/issues",
|
||||
},
|
||||
{ text: "Donate", link: "https://www.paypal.com/paypalme/mcrackan" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Installation",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "Linux", link: "/docs/installation/linux" },
|
||||
{ text: "Mac", link: "/docs/installation/mac" },
|
||||
{ text: "Docker", link: "/docs/installation/docker" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Features",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "Audio File Formats", link: "/docs/features/audio-file-formats" },
|
||||
{ text: "Naming Templates", link: "/docs/features/naming-templates" },
|
||||
{
|
||||
text: "Searching & Filtering",
|
||||
link: "/docs/features/searching-and-filtering",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "Advanced",
|
||||
collapsed: false,
|
||||
items: [
|
||||
{ text: "Advanced Topics", link: "/docs/advanced/advanced" },
|
||||
{
|
||||
text: "Linux Development Setup",
|
||||
link: "/docs/advanced/linux-development-setup-using-nix",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
outline: {
|
||||
level: "deep",
|
||||
},
|
||||
|
||||
socialLinks: [{ icon: "github", link: "https://github.com/rmcrackan/Libation" }],
|
||||
|
||||
search: {
|
||||
provider: "local",
|
||||
},
|
||||
},
|
||||
});
|
||||
15
.vitepress/theme/custom.css
Normal file
15
.vitepress/theme/custom.css
Normal file
@@ -0,0 +1,15 @@
|
||||
/* Custom styles for Libation documentation */
|
||||
|
||||
/* Hide certain nav items on tablet devices to prevent horizontal scroll */
|
||||
@media (min-width: 640px) and (max-width: 959px) {
|
||||
/* Target specific nav items by their position */
|
||||
/* Hide "Issues & Requests" and "Donate" links on tablet */
|
||||
.VPNav .VPNavBar .nav .VPNavBarMenu .VPMenu:nth-child(3) {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/* Alternative: Use a more specific selector if needed */
|
||||
.VPNavBarMenuLink[href*="issues"] {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
4
.vitepress/theme/index.js
Normal file
4
.vitepress/theme/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
import DefaultTheme from 'vitepress/theme'
|
||||
import './custom.css'
|
||||
|
||||
export default DefaultTheme
|
||||
32
.vscode/launch.json
vendored
Normal file
32
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
{
|
||||
"name": ".NET Core Launch (console) Windows",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"stopAtEntry": false,
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"name": ".NET Core Launch (console) Linux",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build_linux",
|
||||
"program": "${workspaceFolder}/Source/bin/Avalonia/Debug/Libation.dll",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"stopAtEntry": false,
|
||||
"console": "internalConsole"
|
||||
}
|
||||
|
||||
]
|
||||
}
|
||||
59
.vscode/tasks.json
vendored
Normal file
59
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"dependsOn": [
|
||||
"build_libation",
|
||||
"build_linuxconfigapp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "build_libation",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
//"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "build_linuxconfigapp",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/Source/LoadByOS/LinuxConfigApp/LinuxConfigApp.csproj"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
//"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "build_linux",
|
||||
"type": "shell",
|
||||
"command": "dotnet",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/Source/LibationAvalonia/LibationAvalonia.csproj",
|
||||
"-p:TargetFramework=net9.0",
|
||||
"-p:TargetFrameworks=net9.0",
|
||||
"-p:RuntimeIdentifier=linux-x64"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
//"reveal": "silent"
|
||||
},
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
Directory.Build.props
Normal file
5
Directory.Build.props
Normal file
@@ -0,0 +1,5 @@
|
||||
<Project>
|
||||
<PropertyGroup>
|
||||
<EnableMSTestRunner>true</EnableMSTestRunner>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
3
Docker/appsettings.json
Normal file
3
Docker/appsettings.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"LibationFiles": "/config-internal"
|
||||
}
|
||||
174
Docker/liberate.sh
Executable file
174
Docker/liberate.sh
Executable file
@@ -0,0 +1,174 @@
|
||||
#!/bin/bash
|
||||
|
||||
error() {
|
||||
log "ERROR" "$1"
|
||||
}
|
||||
|
||||
warn() {
|
||||
log "WARNING" "$1"
|
||||
}
|
||||
|
||||
info() {
|
||||
log "info" "$1"
|
||||
}
|
||||
|
||||
debug() {
|
||||
if [ "${LOG_LEVEL}" = "debug" ]; then
|
||||
log "debug" "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
log() {
|
||||
LEVEL=$1
|
||||
MESSAGE=$2
|
||||
printf "$(date '+%F %T') %s: %s\n" "${LEVEL}" "${MESSAGE}"
|
||||
}
|
||||
|
||||
init_config_file() {
|
||||
FILE=$1
|
||||
FULLPATH=${LIBATION_CONFIG_DIR}/${FILE}
|
||||
if [ -f ${FULLPATH} ]; then
|
||||
info "loading ${FILE}"
|
||||
cp ${FULLPATH} ${LIBATION_CONFIG_INTERNAL}/
|
||||
return 0
|
||||
else
|
||||
warn "${FULLPATH} not found, creating empty file"
|
||||
echo "{}" > ${LIBATION_CONFIG_INTERNAL}/${FILE}
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
update_settings() {
|
||||
FILE=$1
|
||||
KEY=$2
|
||||
VALUE=$3
|
||||
info "setting ${KEY} to ${VALUE}"
|
||||
echo $(jq --arg k "${KEY}" --arg v "${VALUE}" '.[$k] = $v' ${LIBATION_CONFIG_INTERNAL}/${FILE}) > ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp
|
||||
mv ${LIBATION_CONFIG_INTERNAL}/${FILE}.tmp ${LIBATION_CONFIG_INTERNAL}/${FILE}
|
||||
}
|
||||
|
||||
is_mounted() {
|
||||
DIR=$1
|
||||
if grep -qs "${DIR} " /proc/mounts;
|
||||
then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
create_db() {
|
||||
DBFILE=$1
|
||||
if [ -f "${DBFILE}" ]; then
|
||||
warn "prexisting database found when creating"
|
||||
return 0
|
||||
else
|
||||
if ! touch "${DBFILE}"; then
|
||||
error "unable to create database, check permissions on host"
|
||||
exit 1
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
setup_db() {
|
||||
DBPATH=$1
|
||||
dbpattern="*.db"
|
||||
|
||||
debug "using database directory ${DBPATH}"
|
||||
|
||||
# Figure out the right databse file
|
||||
if [[ -z "${LIBATION_DB_FILE}" ]];
|
||||
then
|
||||
dbCount=$(find "${DBPATH}" -maxdepth 1 -type f -name "${dbpattern}" | wc -l)
|
||||
if [ "${dbCount}" -gt 1 ];
|
||||
then
|
||||
error "too many database files found, set LIBATION_DB_FILE to the filename you wish to use"
|
||||
exit 1
|
||||
elif [ "${dbCount}" -eq 1 ];
|
||||
then
|
||||
files=( ${DBPATH}/${dbpattern} )
|
||||
FILE=${files[0]}
|
||||
else
|
||||
FILE="${DBPATH}/LibationContext.db"
|
||||
fi
|
||||
else
|
||||
FILE="${DBPATH}/${LIBATION_DB_FILE}"
|
||||
fi
|
||||
|
||||
debug "planning to use database ${FILE}"
|
||||
|
||||
if [ -f "${FILE}" ]; then
|
||||
info "database found at ${FILE}"
|
||||
elif [ ${LIBATION_CREATE_DB} = "true" ];
|
||||
then
|
||||
warn "database not found, creating one at ${FILE}"
|
||||
create_db ${FILE}
|
||||
else
|
||||
error "database not found and creation is disabled"
|
||||
exit 1
|
||||
fi
|
||||
ln -s "${FILE}" "${LIBATION_CONFIG_INTERNAL}/LibationContext.db"
|
||||
}
|
||||
|
||||
run() {
|
||||
info "scanning accounts"
|
||||
/libation/LibationCli scan
|
||||
info "liberating books"
|
||||
/libation/LibationCli liberate
|
||||
}
|
||||
|
||||
main() {
|
||||
info "initializing libation"
|
||||
init_config_file AccountsSettings.json
|
||||
init_config_file Settings.json
|
||||
|
||||
info "loading settings"
|
||||
update_settings Settings.json Books "${LIBATION_BOOKS_DIR:-/data}"
|
||||
update_settings Settings.json InProgress /tmp
|
||||
|
||||
info "loading database"
|
||||
# If user provides a separate database mount, use that
|
||||
if is_mounted "${LIBATION_DB_DIR}";
|
||||
then
|
||||
DB_LOCATION=${LIBATION_DB_DIR}
|
||||
# Otherwise, use the config directory
|
||||
else
|
||||
DB_LOCATION=${LIBATION_CONFIG_DIR}
|
||||
fi
|
||||
setup_db ${DB_LOCATION}
|
||||
|
||||
# Try to warn if books dir wasn't mounted in
|
||||
if ! is_mounted "${LIBATION_BOOKS_DIR}";
|
||||
then
|
||||
warn "${LIBATION_BOOKS_DIR} does not appear to be mounted, books will not be saved"
|
||||
fi
|
||||
|
||||
# Let the user know what the run type will be
|
||||
if [[ -z "${SLEEP_TIME}" ]]; then
|
||||
SLEEP_TIME=-1
|
||||
fi
|
||||
|
||||
if [ "${SLEEP_TIME}" == -1 ]; then
|
||||
info "running once"
|
||||
else
|
||||
info "running every ${SLEEP_TIME}"
|
||||
fi
|
||||
|
||||
# loop
|
||||
while true
|
||||
do
|
||||
run
|
||||
|
||||
# Liberate only once if SLEEP_TIME was set to -1
|
||||
if [ "${SLEEP_TIME}" == -1 ]; then
|
||||
break
|
||||
fi
|
||||
|
||||
sleep "${SLEEP_TIME}"
|
||||
done
|
||||
|
||||
info "exiting"
|
||||
}
|
||||
|
||||
main
|
||||
42
Dockerfile
Normal file
42
Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
# Dockerfile
|
||||
FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG TARGETARCH
|
||||
|
||||
COPY Source /Source
|
||||
RUN dotnet publish \
|
||||
/Source/LibationCli/LibationCli.csproj \
|
||||
--os linux \
|
||||
--arch ${TARGETARCH} \
|
||||
--configuration Release \
|
||||
--output /Source/bin/Publish/Linux-chardonnay \
|
||||
-p:PublishProtocol=FileSystem \
|
||||
-p:PublishReadyToRun=true \
|
||||
-p:SelfContained=true
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime:10.0
|
||||
ARG USER_UID=1001
|
||||
ARG USER_GID=1001
|
||||
|
||||
# Set the character set that will be used for folder and filenames when liberating
|
||||
ENV LANG=C.UTF-8
|
||||
ENV LC_ALL=C.UTF-8
|
||||
|
||||
ENV SLEEP_TIME=-1
|
||||
ENV LIBATION_CONFIG_INTERNAL=/config-internal
|
||||
ENV LIBATION_CONFIG_DIR=/config
|
||||
ENV LIBATION_DB_DIR=/db
|
||||
ENV LIBATION_DB_FILE=
|
||||
ENV LIBATION_CREATE_DB=true
|
||||
ENV LIBATION_BOOKS_DIR=/data
|
||||
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade && \
|
||||
apt-get install -y jq && \
|
||||
mkdir -m777 ${LIBATION_CONFIG_INTERNAL} ${LIBATION_BOOKS_DIR}
|
||||
|
||||
COPY --from=build /Source/bin/Publish/Linux-chardonnay /libation
|
||||
COPY Docker/* /libation
|
||||
|
||||
USER ${USER_UID}:${USER_GID}
|
||||
|
||||
CMD ["/libation/liberate.sh"]
|
||||
@@ -1,79 +0,0 @@
|
||||
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
|
||||
|
||||
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
|
||||
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
|
||||
|
||||
|
||||
|
||||
# Advanced: Table of Contents
|
||||
|
||||
- [Files and folders](#files-and-folders)
|
||||
- [Linux and Mac (unofficial)](#linux-and-mac)
|
||||
- [Settings](#settings)
|
||||
- [Custom File Naming](#custom-file-naming)
|
||||
- [Command Line Interface](#command-line-interface)
|
||||
|
||||
|
||||
|
||||
### Files and folders
|
||||
|
||||
To make upgrades and reinstalls easier, Libation separates all of its responsibilities to a few different folders. If you don't want to mess with this stuff: ignore it. Read on if you like a little more control over your files.
|
||||
|
||||
* In Libation's initial folder are the files that make up the program. Since nothing else is here, just copy new files here to upgrade the program. Delete this folder to delete Libation.
|
||||
|
||||
* In a separate folder, Libation keeps track of all of the files it creates like settings and downloaded images. After an upgrade, Libation might think that's its being run for the first time. Just click ADVANCED SETUP and point to this folder. Libation will reload your library and settings.
|
||||
|
||||
* The last important folder is the "books location." This is where Libation looks for your downloaded and decrypted books. This is how it knows which books still need to be downloaded. The Audible id must be somewhere in the book's file or folder name for Libation to detect your downloaded book.
|
||||
|
||||
### Linux and Mac
|
||||
|
||||
Although Libation only currently officially supports Windows, some users have had success with WINE. ([Linux](https://github.com/rmcrackan/Libation/issues/28#issuecomment-890594158), [OSX Crossover and WINE](https://github.com/rmcrackan/Libation/issues/150#issuecomment-1004918592))
|
||||
|
||||
### Settings
|
||||
|
||||
* Allow Libation to fix up audiobook metadata. After decrypting a title, Libation attempts to fix details like chapters and cover art. Some power users and/or control freaks prefer to manage this themselves. By unchecking this setting, Libation will only decrypt the book and will leave metadata as-is, warts and all.
|
||||
|
||||
### Custom File Naming
|
||||
|
||||
In Settings, on the Download/Decrypt tab, you can specify the format in which you want your files to be named. As you edit these templates, a live example will be shown. Parameters are listed for folders, files, and files split by chapter including an explanation of what each naming option means. For instance: you can use template `<title short> - <ch# 0> of <ch count> - <ch title>` to create the file `A Study in Scarlet - 04 of 10 - A Flight for Life.m4b`.
|
||||
|
||||
These templates apply to GUI and CLI.
|
||||
|
||||
### Command Line Interface
|
||||
|
||||
Libationcli.exe allows limited access to Libation's functionalities as a CLI.
|
||||
|
||||
Warnings about relying solely on on the CLI:
|
||||
* CLI will not perform any upgrades.
|
||||
* It will show that there is an upgrade, but that will likely scroll by too fast to notice.
|
||||
* It will not perform all post-upgrade migrations. Some migrations are only be possible by launching GUI.
|
||||
|
||||
```
|
||||
help
|
||||
libationcli --help
|
||||
|
||||
verb-specific help
|
||||
libationcli scan --help
|
||||
|
||||
scan all libraries
|
||||
libationcli scan
|
||||
scan only libraries for specific accounts
|
||||
libationcli scan nickname1 nickname2
|
||||
|
||||
convert all m4b files to mp3
|
||||
libationcli convert
|
||||
|
||||
liberate all books and pdfs
|
||||
libationcli liberate
|
||||
liberate pdfs only
|
||||
libationcli liberate --pdf
|
||||
libationcli liberate -p
|
||||
|
||||
export library to file
|
||||
libationcli export --path "C:\foo\bar\my.json" --json
|
||||
libationcli export -p "C:\foo\bar\my.json" -j
|
||||
libationcli export -p "C:\foo\bar\my.csv" --csv
|
||||
libationcli export -p "C:\foo\bar\my.csv" -c
|
||||
libationcli export -p "C:\foo\bar\my.xlsx" --xlsx
|
||||
libationcli export -p "C:\foo\bar\my.xlsx" -x
|
||||
```
|
||||
BIN
Images/Plus Minus.psd
Normal file
BIN
Images/Plus Minus.psd
Normal file
Binary file not shown.
BIN
Images/Stoplight with pdf.psd
Normal file
BIN
Images/Stoplight with pdf.psd
Normal file
Binary file not shown.
BIN
Images/Stoplight.psd
Normal file
BIN
Images/Stoplight.psd
Normal file
Binary file not shown.
32
Images/libation_cheers.svg
Normal file
32
Images/libation_cheers.svg
Normal file
@@ -0,0 +1,32 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 512 512" width="512px" enable-background="new 0 0 512 512">
|
||||
<path id="slosh" transform=
|
||||
"translate(-50 23)
|
||||
scale(0.7, 0.7)
|
||||
rotate(12 256,256)"
|
||||
d=
|
||||
"M139,2
|
||||
A 192,200 0 0 0 103,84
|
||||
A 222,334 41 0 0 241,320
|
||||
V478
|
||||
H160
|
||||
A 16,16 0 0 0 160,510
|
||||
H352
|
||||
A16 16 0 0 0 352,478
|
||||
H271
|
||||
V320
|
||||
A 222,334 -41 0 0 409,84
|
||||
A 192,200 0 0 0 373,2
|
||||
M355,32
|
||||
A 192,200 0 0 1 381,127
|
||||
A 187.5,334 -35 0 1 256,286
|
||||
A 187.5,334 35 0 1 131,127
|
||||
A 192,200 0 0 1 157,32
|
||||
H355
|
||||
M146,147
|
||||
A 168,300 35 0 0 256,270
|
||||
A 168,300 -35 0 0 366,128
|
||||
S 360,50 280,110
|
||||
S 192,128 147,147
|
||||
z" />
|
||||
<use href="#slosh" transform="translate(512 0) scale(-1 1)" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 736 B |
41
Images/libation_glass.svg
Normal file
41
Images/libation_glass.svg
Normal file
@@ -0,0 +1,41 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 288 288" enable-background="new 0 0 288 288">
|
||||
<defs>
|
||||
<g id="glass">
|
||||
<path transform="translate(16 16)" fill-rule="evenodd" d=
|
||||
"M177,16
|
||||
H79
|
||||
A 32.0781 63.7932 -1.5106 0 0 66 80
|
||||
A 158.789 471.1259 41.9466 0 0 90 131
|
||||
A 81.7197 122.0515 35.3745 0 0 128 143.3484
|
||||
A 81.7197 122.0515 -35.3745 0 0 166 131
|
||||
A 158.789 471.1259 -41.9466 0 0 190 80
|
||||
A 32.0781 63.7932 1.5106 0 0 177 16
|
||||
L 184 0
|
||||
A 44.7901 78.5247 1.1521 0 1 194 122
|
||||
A 97.0039 135.3148 -36.2124 0 1 136 159
|
||||
V 240
|
||||
H 176
|
||||
A 8 8 0 0 1 176 256
|
||||
H 80
|
||||
A 8 8 0 0 1 80 240
|
||||
H 120
|
||||
V 159
|
||||
A 97.0039 135.3148 36.2124 0 1 62 122
|
||||
A 44.7901 78.5247 -1.1521 0 1 72 0
|
||||
H184
|
||||
z"/>
|
||||
</g>
|
||||
<g transform="translate(16 16)" id="wine-level">
|
||||
<path d=
|
||||
"M182,64
|
||||
H 74
|
||||
A 115.9979 308.8033 38.9474 0 0 128 134.4277
|
||||
A 115.9979 308.8033 -38.9474 0 0 182,64
|
||||
z"/>
|
||||
</g>
|
||||
</defs>
|
||||
<use href="#glass" stroke="#ffffffa0" stroke-width="16" fill="Transparent" />
|
||||
<use href="#wine-level" stroke="#ffffffa0" stroke-width="16" fill="Transparent" />
|
||||
<use href="#glass" fill="Black" />
|
||||
<use href="#wine-level" fill="Black" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
31
Images/libation_hangover.svg
Normal file
31
Images/libation_hangover.svg
Normal file
@@ -0,0 +1,31 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 288 288" enable-background="new 0 0 288 288">
|
||||
<g>
|
||||
<path transform="rotate(90 128,128) translate(60 -16)" fill-rule="evenodd" d=
|
||||
"M177,16
|
||||
H79
|
||||
A 32.0781 63.7932 -1.5106 0 0 66 80
|
||||
A 158.789 471.1259 41.9466 0 0 90 131
|
||||
A 81.7197 122.0515 35.3745 0 0 128 143.3484
|
||||
A 81.7197 122.0515 -35.3745 0 0 166 131
|
||||
A 158.789 471.1259 -41.9466 0 0 190 80
|
||||
A 32.0781 63.7932 1.5106 0 0 177 16
|
||||
L 184 0
|
||||
A 44.7901 78.5247 1.1521 0 1 194 122
|
||||
A 97.0039 135.3148 -36.2124 0 1 136 159
|
||||
V 240
|
||||
H 176
|
||||
A 8 8 0 0 1 176 256
|
||||
H 80
|
||||
A 8 8 0 0 1 80 240
|
||||
H 120
|
||||
V 159
|
||||
A 97.0039 135.3148 36.2124 0 1 62 122
|
||||
A 44.7901 78.5247 -1.1521 0 1 72 0
|
||||
H184
|
||||
M170,115
|
||||
V24
|
||||
A 19.5181 45.9183 -3.3549 0 1 182.4322 69.5
|
||||
A 19.5181 45.9183 3.3549 0 1 170 115
|
||||
z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 936 B |
33
Images/libation_slosh.svg
Normal file
33
Images/libation_slosh.svg
Normal file
@@ -0,0 +1,33 @@
|
||||
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 512 512" enable-background="new 0 0 512 512">
|
||||
<path
|
||||
transform=
|
||||
"rotate(15 256,256)
|
||||
translate(0 25)
|
||||
scale(0.93, 0.93)"
|
||||
d=
|
||||
"M139,2
|
||||
A 192,200 0 0 0 103,84
|
||||
A 222,334 41 0 0 241,320
|
||||
V478
|
||||
H160
|
||||
A 16,16 0 0 0 160,510
|
||||
H352
|
||||
A16 16 0 0 0 352,478
|
||||
H271
|
||||
V320
|
||||
A 222,334 -41 0 0 409,84
|
||||
A 192,200 0 0 0 373,2
|
||||
M355,32
|
||||
A 192,200 0 0 1 381,127
|
||||
A 187.5,334 -35 0 1 256,286
|
||||
A 187.5,334 35 0 1 131,127
|
||||
A 192,200 0 0 1 157,32
|
||||
H355
|
||||
M146,147
|
||||
A 168,300 35 0 0 256,270
|
||||
A 168,300 -35 0 0 366,128
|
||||
S 360,50 280,110
|
||||
S 192,128 147,147
|
||||
z" />
|
||||
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 649 B |
73
README.md
73
README.md
@@ -2,68 +2,41 @@
|
||||
|
||||
## [Download Libation](https://github.com/rmcrackan/Libation/releases/latest)
|
||||
|
||||
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PalPal.me](https://paypal.me/mcrackan?locale.x=en_us)
|
||||
### If you found this useful, tell a friend. If you found this REALLY useful, you can click here to [PayPal.me](https://paypal.me/mcrackan?locale.x=en_us)
|
||||
|
||||
...or just tell more friends. As long as I'm maintaining this software, it will remain **free** and **open source**.
|
||||
|
||||
# Table of Contents
|
||||
## Getting started with Libation
|
||||
|
||||
- [Audible audiobook manager](#audible-audiobook-manager)
|
||||
- [The good](#the-good)
|
||||
- [The bad](#the-bad)
|
||||
- [The ugly](#the-ugly)
|
||||
- [Getting started](Documentation/GettingStarted.md)
|
||||
- [Download Libation](Documentation/GettingStarted.md#download-libation-1)
|
||||
- [Installation](Documentation/GettingStarted.md#installation)
|
||||
- [Create Accounts](Documentation/GettingStarted.md#create-accounts)
|
||||
- [Import your library](Documentation/GettingStarted.md#import-your-library)
|
||||
- [Download your books -- DRM-free!](Documentation/GettingStarted.md#download-your-books----drm-free)
|
||||
- [Download PDF attachments](Documentation/GettingStarted.md#download-pdf-attachments)
|
||||
- [Details of downloaded files](Documentation/GettingStarted.md#details-of-downloaded-files)
|
||||
- [Export your library](Documentation/GettingStarted.md#export-your-library)
|
||||
- [Searching and filtering](Documentation/SearchingAndFiltering.md)
|
||||
- [Tags](Documentation/SearchingAndFiltering.md#tags)
|
||||
- [Searches](Documentation/SearchingAndFiltering.md#searches)
|
||||
- [Search examples](Documentation/SearchingAndFiltering.md#search-examples)
|
||||
- [Filters](Documentation/SearchingAndFiltering.md#filters)
|
||||
- [Advanced](Documentation/Advanced.md)
|
||||
- [Files and folders](Documentation/Advanced.md#files-and-folders)
|
||||
- [Linux and Mac (unofficial)](Documentation/Advanced.md#linux-and-mac)
|
||||
- [Settings](Documentation/Advanced.md#settings)
|
||||
- [Custom File Naming](Documentation/Advanced.md#custom-file-naming)
|
||||
- [Command Line Interface](Documentation/Advanced.md#command-line-interface)
|
||||
|
||||
## Getting started
|
||||
All documentation has been moved to our new site: [getlibation.com](https://getlibation.com). Or jump to the important bits:
|
||||
|
||||
* [Getting Started](https://getlibation.com/docs/getting-started)
|
||||
* [Download](https://github.com/rmcrackan/Libation/releases/latest)
|
||||
* [Step-by-step walk-through](Documentation/GettingStarted.md)
|
||||
* [Issues, bugs, and requests](https://github.com/rmcrackan/Libation/issues)
|
||||
* [Documentation](https://getlibation.com/docs/index)
|
||||
|
||||
## Audible audiobook manager
|
||||
## Development
|
||||
|
||||
### The good
|
||||
### Documentation
|
||||
|
||||
* Import library from audible, including cover art
|
||||
* Download and remove DRM from all books
|
||||
* Download accompanying PDFs
|
||||
* Add tags to books for better organization
|
||||
* Powerful advanced search built on the Lucene search engine
|
||||
* Customizable saved filters for common searches
|
||||
* Open source
|
||||
* Supports most regions: US, UK, Canada, Germany, France, Australia, Japan, India, and Spain
|
||||
The documentation is built with [VitePress](https://vitepress.dev/) and located in the `docs` directory. For more information like [markdown syntax](https://vitepress.dev/guide/markdown#advanced-configuration) and [routing](https://vitepress.dev/guide/routing) or other features, refer [VitePress documentation](https://vitepress.dev/guide).
|
||||
|
||||
<a name="theBad"/>
|
||||
**Prerequisites**: Node.js 18+
|
||||
|
||||
### The bad
|
||||
**Commands**:
|
||||
|
||||
* Windows only
|
||||
* Large file size
|
||||
* Made by a programmer, not a designer so the goals are function rather than beauty. And it shows
|
||||
```bash
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
### The ugly
|
||||
# Start local dev server (http://localhost:5173)
|
||||
npm run docs:dev
|
||||
|
||||
* Documentation? Yer lookin' at it
|
||||
* This is a single-developer personal passion project. Support, response, updates, enhancements, bug fixes etc are as my free time allows
|
||||
* I have a full-time job, a life, and a finite attention span. Therefore a lot of time can potentially go by with no improvements of any kind
|
||||
# Build for production (output: docs/.vitepress/dist)
|
||||
npm run docs:build
|
||||
|
||||
Disclaimer: I've made every good-faith effort to include nothing insecure, malicious, anti-privacy, or destructive. That said: use at your own risk.
|
||||
# Preview production build
|
||||
npm run docs:preview
|
||||
```
|
||||
|
||||
I made this for myself and I want to share it with the great programming and audible/audiobook communiites which have been so generous with their time and help.
|
||||
**Note**: New pages are automatically routed based on their folder structure (e.g., `docs/docs/index.md` maps to `/docs/index`). To add them to the sidebar, update the `sidebar` configuration in `.vitepress/config.js`.
|
||||
|
||||
130
Scripts/Bundle_Debian.sh
Normal file
130
Scripts/Bundle_Debian.sh
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/bin/bash
|
||||
|
||||
BIN_DIR=$1; shift
|
||||
VERSION=$1; shift
|
||||
ARCH=$1; shift
|
||||
|
||||
if [ -z "$BIN_DIR" ]
|
||||
then
|
||||
echo "This script must be called with a the Libation Linux bins directory as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ ! -d "$BIN_DIR" ]
|
||||
then
|
||||
echo "The directory \"$BIN_DIR\" does not exist."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z "$VERSION" ]
|
||||
then
|
||||
echo "This script must be called with the Libation version number as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]
|
||||
then
|
||||
echo "This script must be called with the Libation cpu architecture as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
ARCH=$(echo $ARCH | sed 's/x64/amd64/')
|
||||
|
||||
DEB_DIR=./deb
|
||||
|
||||
FOLDER_EXEC=$DEB_DIR/usr/lib/libation
|
||||
echo "Exec dir: $FOLDER_EXEC"
|
||||
mkdir -p $FOLDER_EXEC
|
||||
|
||||
echo "Moving bins from $BIN_DIR to $FOLDER_EXEC"
|
||||
mv "${BIN_DIR}/"* $FOLDER_EXEC
|
||||
|
||||
if [ $? -ne 0 ]
|
||||
then echo "Error moving ${BIN_DIR} files"
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
delfiles=('LinuxConfigApp' 'LinuxConfigApp.deps.json' 'LinuxConfigApp.runtimeconfig.json')
|
||||
|
||||
for n in "${delfiles[@]}"
|
||||
do
|
||||
echo "Deleting $n"
|
||||
rm $FOLDER_EXEC/$n
|
||||
done
|
||||
|
||||
FOLDER_ICON=$DEB_DIR/usr/share/icons/hicolor/scalable/apps/
|
||||
echo "Icon dir: $FOLDER_ICON"
|
||||
|
||||
FOLDER_DESKTOP=$DEB_DIR/usr/share/applications
|
||||
echo "Desktop dir: $FOLDER_DESKTOP"
|
||||
|
||||
FOLDER_DEBIAN=$DEB_DIR/DEBIAN
|
||||
echo "Debian dir: $FOLDER_DEBIAN"
|
||||
|
||||
mkdir -p $FOLDER_ICON
|
||||
mkdir -p $FOLDER_DESKTOP
|
||||
mkdir -p $FOLDER_DEBIAN
|
||||
|
||||
echo "Copying icon..."
|
||||
cp $FOLDER_EXEC/libation_glass.svg $FOLDER_ICON/libation.svg
|
||||
|
||||
echo "Copying desktop file..."
|
||||
cp $FOLDER_EXEC/Libation.desktop $FOLDER_DESKTOP/Libation.desktop
|
||||
|
||||
echo "Creating pre-install file..."
|
||||
echo "#!/bin/bash
|
||||
# Pre-install script, removes previous installation program files and sym links
|
||||
echo \"Removing previously created symlinks...\"
|
||||
rm /usr/bin/libation
|
||||
rm /usr/bin/hangover
|
||||
rm /usr/bin/libationcli
|
||||
echo \"Removing previously installed Libation files...\"
|
||||
rm -r /usr/lib/libation
|
||||
# making sure it won't stop installation
|
||||
exit 0
|
||||
" >> $FOLDER_DEBIAN/preinst
|
||||
|
||||
echo "Creating post-install file..."
|
||||
echo "#!/bin/bash
|
||||
gtk-update-icon-cache -f /usr/share/icons/hicolor/
|
||||
ln -s /usr/lib/libation/Libation /usr/bin/libation
|
||||
ln -s /usr/lib/libation/Hangover /usr/bin/hangover
|
||||
ln -s /usr/lib/libation/LibationCli /usr/bin/libationcli
|
||||
# Increase the maximum number of inotify instances
|
||||
if ! grep -q 'fs.inotify.max_user_instances=524288' /etc/sysctl.conf; then
|
||||
echo fs.inotify.max_user_instances=524288 | tee -a /etc/sysctl.conf && sysctl -p
|
||||
fi
|
||||
" >> $FOLDER_DEBIAN/postinst
|
||||
|
||||
echo "Creating control file..."
|
||||
echo "Package: Libation
|
||||
Version: $VERSION
|
||||
Architecture: $ARCH
|
||||
Essential: no
|
||||
Priority: optional
|
||||
Maintainer: github.com/rmcrackan
|
||||
Description: liberate your audiobooks
|
||||
Recommends: libgtk-3-0, libwebkit2gtk-4.1-0
|
||||
" >> $FOLDER_DEBIAN/control
|
||||
|
||||
echo "Changing permissions for pre- and post-install files..."
|
||||
chmod +x "$FOLDER_DEBIAN/preinst"
|
||||
chmod +x "$FOLDER_DEBIAN/postinst"
|
||||
|
||||
if [ "$(uname -s)" == "Darwin" ]; then
|
||||
echo "macOS detected, installing dpkg"
|
||||
brew install dpkg
|
||||
fi
|
||||
|
||||
DEB_FILE=Libation.${VERSION}-linux-chardonnay-${ARCH}.deb
|
||||
echo "Creating $DEB_FILE"
|
||||
dpkg-deb -Zxz --build $DEB_DIR ./$DEB_FILE
|
||||
|
||||
echo "moving to ./bundle/$DEB_FILE"
|
||||
mkdir bundle
|
||||
mv $DEB_FILE ./bundle/$DEB_FILE
|
||||
|
||||
rm -r "$BIN_DIR"
|
||||
|
||||
echo "Done!"
|
||||
132
Scripts/Bundle_MacOS.sh
Normal file
132
Scripts/Bundle_MacOS.sh
Normal file
@@ -0,0 +1,132 @@
|
||||
#!/bin/bash
|
||||
|
||||
BIN_DIR=$1; shift
|
||||
VERSION=$1; shift
|
||||
ARCH=$1; shift
|
||||
SIGN_WITH_KEY=$1; shift
|
||||
|
||||
if [ -z "$BIN_DIR" ]
|
||||
then
|
||||
echo "This script must be called with a the Libation macos bins directory as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ ! -d "$BIN_DIR" ]
|
||||
then
|
||||
echo "The directory \"$BIN_DIR\" does not exist."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z $VERSION ]
|
||||
then
|
||||
echo "This script must be called with the Libation version number as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z $ARCH ]
|
||||
then
|
||||
echo "This script must be called with the Libation cpu architecture as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ "$SIGN_WITH_KEY" != "true" ]
|
||||
then
|
||||
echo "::warning:: App will fail Gatekeeper verification without valid Apple Team information."
|
||||
fi
|
||||
|
||||
BUNDLE=./Libation.app
|
||||
echo "Bundle dir: $BUNDLE"
|
||||
|
||||
if [[ -d $BUNDLE ]]
|
||||
then
|
||||
echo "$BUNDLE directory already exists, aborting."
|
||||
exit
|
||||
fi
|
||||
|
||||
BUNDLE_CONTENTS=$BUNDLE/Contents
|
||||
echo "Bundle Contents dir: $BUNDLE_CONTENTS"
|
||||
|
||||
BUNDLE_RESOURCES=$BUNDLE_CONTENTS/Resources
|
||||
echo "Resources dir: $BUNDLE_RESOURCES"
|
||||
|
||||
BUNDLE_MACOS=$BUNDLE_CONTENTS/MacOS
|
||||
echo "MacOS dir: $BUNDLE_MACOS"
|
||||
|
||||
mkdir -p $BUNDLE_CONTENTS
|
||||
mkdir -p $BUNDLE_RESOURCES
|
||||
mkdir -p $BUNDLE_MACOS
|
||||
|
||||
mv "${BIN_DIR}/"* $BUNDLE_MACOS
|
||||
|
||||
if [ $? -ne 0 ]
|
||||
then echo "Error moving ${BIN_DIR} files"
|
||||
exit
|
||||
fi
|
||||
|
||||
echo "Make fileicon executable..."
|
||||
chmod +x $BUNDLE_MACOS/fileicon
|
||||
|
||||
echo "Moving icon..."
|
||||
mv $BUNDLE_MACOS/libation.icns $BUNDLE_RESOURCES/libation.icns
|
||||
|
||||
echo "Moving Info.plist file..."
|
||||
mv $BUNDLE_MACOS/Info.plist $BUNDLE_CONTENTS/Info.plist
|
||||
|
||||
echo "Moving Libation_DS_Store file..."
|
||||
mv $BUNDLE_MACOS/Libation_DS_Store ./Libation_DS_Store
|
||||
|
||||
echo "Moving background.png file..."
|
||||
mv $BUNDLE_MACOS/background.png ./background.png
|
||||
|
||||
echo "Moving background.png file..."
|
||||
mv $BUNDLE_MACOS/Libation.entitlements ./Libation.entitlements
|
||||
|
||||
PLIST_ARCH=$(echo $ARCH | sed 's/x64/x86_64/')
|
||||
echo "Set LSArchitecturePriority to $PLIST_ARCH"
|
||||
sed -i -e "s/ARCHITECTURE_STRING/$PLIST_ARCH/" $BUNDLE_CONTENTS/Info.plist
|
||||
|
||||
echo "Set CFBundleVersion to $VERSION"
|
||||
sed -i -e "s/VERSION_STRING/$VERSION/" $BUNDLE_CONTENTS/Info.plist
|
||||
|
||||
delfiles=('MacOSConfigApp' 'MacOSConfigApp.deps.json' 'MacOSConfigApp.runtimeconfig.json')
|
||||
for n in "${delfiles[@]}"
|
||||
do
|
||||
echo "Deleting $n"
|
||||
rm $BUNDLE_MACOS/$n
|
||||
done
|
||||
|
||||
DMG_FILE="Libation.${VERSION}-macOS-chardonnay-${ARCH}.dmg"
|
||||
|
||||
all_identities=$(security find-identity -v -p codesigning)
|
||||
identity=$(echo ${all_identities} | sed -n 's/.*"\(.*\)".*/\1/p')
|
||||
|
||||
if [ "$SIGN_WITH_KEY" == "true" ]; then
|
||||
echo "Signing executables in: $BUNDLE"
|
||||
codesign --force --deep --timestamp --options=runtime --entitlements "./Libation.entitlements" --sign "${identity}" "$BUNDLE"
|
||||
codesign --verify --verbose "$BUNDLE"
|
||||
else
|
||||
echo "Signing with empty key: $BUNDLE"
|
||||
codesign --force --deep -s - $BUNDLE
|
||||
fi
|
||||
|
||||
echo "Creating app disk image: $DMG_FILE"
|
||||
mkdir Libation
|
||||
mv $BUNDLE ./Libation/$BUNDLE
|
||||
mv Libation_DS_Store Libation/.DS_Store
|
||||
mkdir Libation/.background
|
||||
mv background.png Libation/.background/
|
||||
ln -s /Applications "./Libation/ "
|
||||
mkdir ./bundle
|
||||
hdiutil create -srcFolder ./Libation -o "./bundle/$DMG_FILE"
|
||||
# Create a .DS_Store by:
|
||||
# - mounting an existing image in shadow mode (hdiutil attach Libation.dmg -shadow junk.dmg)
|
||||
# - Open the folder and edit it to your liking.
|
||||
# - Copy the .DS_Store from the directory and save it to Libation_DS_Store
|
||||
|
||||
|
||||
if [ "$SIGN_WITH_KEY" == "true" ]; then
|
||||
echo "Signing $DMG_FILE"
|
||||
codesign --deep --sign "${identity}" "./bundle/$DMG_FILE"
|
||||
fi
|
||||
|
||||
echo "Done!"
|
||||
133
Scripts/Bundle_Redhat.sh
Normal file
133
Scripts/Bundle_Redhat.sh
Normal file
@@ -0,0 +1,133 @@
|
||||
#!/bin/bash
|
||||
|
||||
BIN_DIR=$1; shift
|
||||
VERSION=$1; shift
|
||||
ARCH=$1; shift
|
||||
|
||||
if [ -z "$BIN_DIR" ]
|
||||
then
|
||||
echo "This script must be called with a the Libation Linux bins directory as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ ! -d "$BIN_DIR" ]
|
||||
then
|
||||
echo "The directory \"$BIN_DIR\" does not exist."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z "$VERSION" ]
|
||||
then
|
||||
echo "This script must be called with the Libation version number as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]
|
||||
then
|
||||
echo "This script must be called with the Libation cpu architecture as an argument."
|
||||
exit
|
||||
fi
|
||||
|
||||
BASEDIR=$(pwd)
|
||||
|
||||
delfiles=('LinuxConfigApp' 'LinuxConfigApp.deps.json' 'LinuxConfigApp.runtimeconfig.json')
|
||||
if [[ "$ARCH" == "x64" ]]
|
||||
then
|
||||
ARCH_RPM="x86_64"
|
||||
ARCH="amd64"
|
||||
else
|
||||
ARCH_RPM="aarch64"
|
||||
fi
|
||||
|
||||
notinstalled=('libcoreclrtraceptprovider.so' 'libation_glass.svg' 'Libation.desktop')
|
||||
|
||||
mkdir -p ~/rpmbuild/SPECS
|
||||
mkdir ~/rpmbuild/BUILD
|
||||
mkdir ~/rpmbuild/RPMS
|
||||
|
||||
echo "Name: libation
|
||||
Version: ${VERSION}
|
||||
Release: 1
|
||||
Summary: Liberate your Audible Library
|
||||
|
||||
License: GPLv3+
|
||||
URL: https://github.com/rmcrackan/Libation
|
||||
Source0: https://github.com/rmcrackan/Libation
|
||||
|
||||
Requires: bash gtk3 webkit2gtk4.1
|
||||
|
||||
|
||||
%define __os_install_post %{nil}
|
||||
|
||||
%description
|
||||
Liberate your Audible Library
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}%{_libdir}/%{name}
|
||||
mkdir -p %{buildroot}%{_datadir}/icons/hicolor/scalable/apps
|
||||
mkdir -p %{buildroot}%{_datadir}/applications
|
||||
|
||||
if test -f 'libcoreclrtraceptprovider.so'; then
|
||||
rm 'libcoreclrtraceptprovider.so'
|
||||
fi
|
||||
|
||||
|
||||
install -m 666 libation_glass.svg %{buildroot}%{_datadir}/icons/hicolor/scalable/apps/libation.svg
|
||||
install -m 666 Libation.desktop %{buildroot}%{_datadir}/applications/Libation.desktop
|
||||
|
||||
rm libation_glass.svg
|
||||
rm Libation.desktop
|
||||
|
||||
install * %{buildroot}%{_libdir}/%{name}/
|
||||
|
||||
%post
|
||||
|
||||
if [ \$1 -eq 1 ] ; then
|
||||
# Initial installation
|
||||
|
||||
ln -s %{_libdir}/%{name}/Libation %{_bindir}/libation
|
||||
ln -s %{_libdir}/%{name}/Hangover %{_bindir}/hangover
|
||||
ln -s %{_libdir}/%{name}/LibationCli %{_bindir}/libationcli
|
||||
|
||||
gtk-update-icon-cache -f %{_datadir}/icons/hicolor/
|
||||
|
||||
if ! grep -q 'fs.inotify.max_user_instances=524288' /etc/sysctl.conf; then
|
||||
echo fs.inotify.max_user_instances=524288 | tee -a /etc/sysctl.conf && sysctl -p
|
||||
fi
|
||||
fi
|
||||
|
||||
%postun
|
||||
if [ \$1 -eq 0 ] ; then
|
||||
# Uninstall
|
||||
rm %{_bindir}/libation
|
||||
rm %{_bindir}/hangover
|
||||
rm %{_bindir}/libationcli
|
||||
fi
|
||||
|
||||
%files
|
||||
%{_datadir}/icons/hicolor/scalable/apps/libation.svg
|
||||
%{_datadir}/applications/Libation.desktop" >> ~/rpmbuild/SPECS/libation.spec
|
||||
|
||||
|
||||
cd "$BIN_DIR"
|
||||
|
||||
for f in *; do
|
||||
if [[ " ${delfiles[*]} " =~ " ${f} " ]]; then
|
||||
echo "Deleting $f"
|
||||
elif [[ ! " ${notinstalled[*]} " =~ " ${f} " ]]; then
|
||||
echo "%{_libdir}/%{name}/${f}" >> ~/rpmbuild/SPECS/libation.spec
|
||||
cp $f ~/rpmbuild/BUILD/
|
||||
else
|
||||
cp $f ~/rpmbuild/BUILD/
|
||||
fi
|
||||
done
|
||||
|
||||
cd ~/rpmbuild/SPECS/
|
||||
rpmbuild -bb --target $ARCH_RPM libation.spec
|
||||
|
||||
cd $BASEDIR
|
||||
RPM_FILE=$(ls ~/rpmbuild/RPMS/${ARCH_RPM})
|
||||
|
||||
mkdir bundle
|
||||
|
||||
mv ~/rpmbuild/RPMS/${ARCH_RPM}/$RPM_FILE "./bundle/Libation.${VERSION}-linux-chardonnay-${ARCH}.rpm"
|
||||
@@ -1,15 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0-windows</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AAXClean" Version="0.4.7" />
|
||||
<PackageReference Include="AAXClean.Codecs" Version="0.2.7" />
|
||||
</ItemGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AAXClean.Codecs" Version="2.1.2.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\FileManager\FileManager.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
using System;
|
||||
using AAXClean;
|
||||
using Dinah.Core.Net.Http;
|
||||
using AAXClean;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
{
|
||||
public abstract class AaxcDownloadConvertBase : AudiobookDownloadBase
|
||||
{
|
||||
public event EventHandler<AppleTags> RetrievedMetadata;
|
||||
public event EventHandler<AppleTags>? RetrievedMetadata;
|
||||
|
||||
protected AaxFile AaxFile;
|
||||
public Mp4File? AaxFile { get; private set; }
|
||||
protected Mp4Operation? AaxConversion { get; set; }
|
||||
|
||||
protected AaxcDownloadConvertBase(string outFileName, string cacheDirectory, DownloadOptions dlLic)
|
||||
: base(outFileName, cacheDirectory, dlLic) { }
|
||||
protected AaxcDownloadConvertBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
|
||||
: base(outDirectory, cacheDirectory, dlOptions) { }
|
||||
|
||||
/// <summary>Setting cover art by this method will insert the art into the audiobook metadata</summary>
|
||||
public override void SetCoverArt(byte[] coverArt)
|
||||
@@ -21,9 +25,67 @@ namespace AaxDecrypter
|
||||
AaxFile.AppleTags.Cover = coverArt;
|
||||
}
|
||||
|
||||
public override async Task CancelAsync()
|
||||
{
|
||||
await base.CancelAsync();
|
||||
await (AaxConversion?.CancelAsync() ?? Task.CompletedTask);
|
||||
}
|
||||
|
||||
private Mp4File Open()
|
||||
{
|
||||
if (DownloadOptions.DecryptionKeys is not KeyData[] keys || keys.Length == 0)
|
||||
throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} cannot be null or empty for a '{DownloadOptions.InputType}' file.");
|
||||
else if (DownloadOptions.InputType is FileType.Dash)
|
||||
{
|
||||
//We may have multiple keys , so use the key whose key ID matches
|
||||
//the dash files default Key ID.
|
||||
var keyIds = keys.Select(k => new Guid(k.KeyPart1, bigEndian: true)).ToArray();
|
||||
|
||||
var dash = new DashFile(InputFileStream);
|
||||
var kidIndex = Array.IndexOf(keyIds, dash.Tenc.DefaultKID);
|
||||
|
||||
if (kidIndex == -1)
|
||||
throw new InvalidOperationException($"None of the {keyIds.Length} key IDs match the dash file's default KeyID of {dash.Tenc.DefaultKID}");
|
||||
|
||||
keys[0] = keys[kidIndex];
|
||||
var keyId = keys[kidIndex].KeyPart1;
|
||||
var key = keys[kidIndex].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null decryption key (KeyPart2).");
|
||||
dash.SetDecryptionKey(keyId, key);
|
||||
WriteKeyFile($"KeyId={Convert.ToHexString(keyId)}{Environment.NewLine}Key={Convert.ToHexString(key)}");
|
||||
return dash;
|
||||
}
|
||||
else if (DownloadOptions.InputType is FileType.Aax)
|
||||
{
|
||||
var aax = new AaxFile(InputFileStream);
|
||||
var key = keys[0].KeyPart1;
|
||||
aax.SetDecryptionKey(keys[0].KeyPart1);
|
||||
WriteKeyFile($"ActivationBytes={Convert.ToHexString(key)}");
|
||||
return aax;
|
||||
}
|
||||
else if (DownloadOptions.InputType is FileType.Aaxc)
|
||||
{
|
||||
var aax = new AaxFile(InputFileStream);
|
||||
var key = keys[0].KeyPart1;
|
||||
var iv = keys[0].KeyPart2 ?? throw new InvalidOperationException($"{nameof(DownloadOptions.DecryptionKeys)} for '{DownloadOptions.InputType}' must have a non-null initialization vector (KeyPart2).");
|
||||
aax.SetDecryptionKey(keys[0].KeyPart1, iv);
|
||||
WriteKeyFile($"Key={Convert.ToHexString(key)}{Environment.NewLine}IV={Convert.ToHexString(iv)}");
|
||||
return aax;
|
||||
}
|
||||
else throw new InvalidOperationException($"{nameof(DownloadOptions.InputType)} of '{DownloadOptions.InputType}' is unknown.");
|
||||
|
||||
void WriteKeyFile(string contents)
|
||||
{
|
||||
var keyFile = Path.Combine(Path.ChangeExtension(InputFileStream.SaveFilePath, ".key"));
|
||||
File.WriteAllText(keyFile, contents + Environment.NewLine);
|
||||
OnTempFileCreated(new(keyFile));
|
||||
}
|
||||
}
|
||||
|
||||
protected bool Step_GetMetadata()
|
||||
{
|
||||
AaxFile = new AaxFile(InputFileStream);
|
||||
AaxFile = Open();
|
||||
|
||||
RetrievedMetadata?.Invoke(this, AaxFile.AppleTags);
|
||||
|
||||
if (DownloadOptions.StripUnabridged)
|
||||
{
|
||||
@@ -31,90 +93,52 @@ namespace AaxDecrypter
|
||||
AaxFile.AppleTags.Album = AaxFile.AppleTags.Album?.Replace(" (Unabridged)", "");
|
||||
}
|
||||
|
||||
//Finishing configuring lame encoder.
|
||||
if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
|
||||
if (DownloadOptions.FixupFile)
|
||||
{
|
||||
double bitrateMultiple = 1;
|
||||
if (!string.IsNullOrWhiteSpace(AaxFile.AppleTags.Narrator))
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddTag("©wrt", AaxFile.AppleTags.Narrator);
|
||||
|
||||
if (AaxFile.AudioChannels == 2)
|
||||
if (!string.IsNullOrWhiteSpace(AaxFile.AppleTags.Copyright))
|
||||
AaxFile.AppleTags.Copyright = AaxFile.AppleTags.Copyright.Replace("(P)", "℗").Replace("©", "©");
|
||||
|
||||
//Add audiobook shelf tags
|
||||
//https://github.com/advplyr/audiobookshelf/issues/1794#issuecomment-1565050213
|
||||
const string tagDomain = "com.pilabor.tone";
|
||||
|
||||
AaxFile.AppleTags.Title = DownloadOptions.Title;
|
||||
|
||||
if (DownloadOptions.Subtitle is string subtitle)
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "SUBTITLE", subtitle);
|
||||
|
||||
if (DownloadOptions.Publisher is string publisher)
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "PUBLISHER", publisher);
|
||||
|
||||
if (DownloadOptions.Language is string language)
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "LANGUAGE", language);
|
||||
|
||||
if (DownloadOptions.AudibleProductId is string asin)
|
||||
{
|
||||
if (DownloadOptions.Downsample)
|
||||
bitrateMultiple = 0.5;
|
||||
else
|
||||
DownloadOptions.LameConfig.Mode = NAudio.Lame.MPEGMode.Stereo;
|
||||
AaxFile.AppleTags.Asin = asin;
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddTag("asin", asin);
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "AUDIBLE_ASIN", asin);
|
||||
}
|
||||
|
||||
if (DownloadOptions.MatchSourceBitrate)
|
||||
{
|
||||
int kbps = (int)(AaxFile.AverageBitrate * bitrateMultiple / 1024);
|
||||
if (DownloadOptions.SeriesName is string series)
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "SERIES", series);
|
||||
|
||||
if (DownloadOptions.LameConfig.VBR is null)
|
||||
DownloadOptions.LameConfig.BitRate = kbps;
|
||||
else if (DownloadOptions.LameConfig.VBR == NAudio.Lame.VBRMode.ABR)
|
||||
DownloadOptions.LameConfig.ABRRateKbps = kbps;
|
||||
}
|
||||
if (DownloadOptions.SeriesNumber is string part)
|
||||
AaxFile.AppleTags.AppleListBox.EditOrAddFreeformTag(tagDomain, "PART", part);
|
||||
}
|
||||
|
||||
OnRetrievedTitle(AaxFile.AppleTags.TitleSansUnabridged);
|
||||
OnRetrievedAuthors(AaxFile.AppleTags.FirstAuthor ?? "[unknown]");
|
||||
OnRetrievedNarrators(AaxFile.AppleTags.Narrator ?? "[unknown]");
|
||||
OnRetrievedAuthors(AaxFile.AppleTags.FirstAuthor);
|
||||
OnRetrievedNarrators(AaxFile.AppleTags.Narrator);
|
||||
OnRetrievedCoverArt(AaxFile.AppleTags.Cover);
|
||||
|
||||
RetrievedMetadata?.Invoke(this, AaxFile.AppleTags);
|
||||
OnInitialized();
|
||||
|
||||
return !IsCanceled;
|
||||
}
|
||||
|
||||
protected DownloadProgress Step_DownloadAudiobook_Start()
|
||||
{
|
||||
var zeroProgress = new DownloadProgress
|
||||
{
|
||||
BytesReceived = 0,
|
||||
ProgressPercentage = 0,
|
||||
TotalBytesToReceive = InputFileStream.Length
|
||||
};
|
||||
|
||||
OnDecryptProgressUpdate(zeroProgress);
|
||||
|
||||
AaxFile.SetDecryptionKey(DownloadOptions.AudibleKey, DownloadOptions.AudibleIV);
|
||||
return zeroProgress;
|
||||
}
|
||||
|
||||
protected void Step_DownloadAudiobook_End(DownloadProgress zeroProgress)
|
||||
{
|
||||
AaxFile.Close();
|
||||
|
||||
CloseInputFileStream();
|
||||
|
||||
OnDecryptProgressUpdate(zeroProgress);
|
||||
}
|
||||
|
||||
protected void AaxFile_ConversionProgressUpdate(object sender, ConversionProgressEventArgs e)
|
||||
{
|
||||
var duration = AaxFile.Duration;
|
||||
var remainingSecsToProcess = (duration - e.ProcessPosition).TotalSeconds;
|
||||
var estTimeRemaining = remainingSecsToProcess / e.ProcessSpeed;
|
||||
|
||||
if (double.IsNormal(estTimeRemaining))
|
||||
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
|
||||
|
||||
var progressPercent = (e.ProcessPosition / e.TotalDuration);
|
||||
|
||||
OnDecryptProgressUpdate(
|
||||
new DownloadProgress
|
||||
{
|
||||
ProgressPercentage = 100 * progressPercent,
|
||||
BytesReceived = (long)(InputFileStream.Length * progressPercent),
|
||||
TotalBytesToReceive = InputFileStream.Length
|
||||
});
|
||||
}
|
||||
|
||||
public override void Cancel()
|
||||
{
|
||||
IsCanceled = true;
|
||||
AaxFile?.Cancel();
|
||||
AaxFile?.Dispose();
|
||||
CloseInputFileStream();
|
||||
}
|
||||
protected virtual void OnInitialized() { }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +1,39 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using AAXClean;
|
||||
using AAXClean;
|
||||
using AAXClean.Codecs;
|
||||
using Dinah.Core.StepRunner;
|
||||
using FileManager;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public class AaxcDownloadMultiConverter : AaxcDownloadConvertBase
|
||||
{
|
||||
protected override StepSequence Steps { get; }
|
||||
private static readonly TimeSpan minChapterLength = TimeSpan.FromSeconds(3);
|
||||
private FileStream? workingFileStream;
|
||||
|
||||
private Func<MultiConvertFileProperties, string> multipartFileNameCallback { get; }
|
||||
public AaxcDownloadMultiConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
|
||||
: base(outDirectory, cacheDirectory, dlOptions)
|
||||
{
|
||||
AsyncSteps.Name = $"Download, Convert Aaxc To {DownloadOptions.OutputFormat}, and Split";
|
||||
AsyncSteps["Step 1: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
|
||||
AsyncSteps["Step 2: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
|
||||
}
|
||||
|
||||
private static TimeSpan minChapterLength { get; } = TimeSpan.FromSeconds(3);
|
||||
private List<string> multiPartFilePaths { get; } = new List<string>();
|
||||
protected override void OnInitialized()
|
||||
{
|
||||
//Finishing configuring lame encoder.
|
||||
if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
|
||||
MpegUtil.ConfigureLameOptions(
|
||||
AaxFile,
|
||||
DownloadOptions.LameConfig,
|
||||
DownloadOptions.Downsample,
|
||||
DownloadOptions.MatchSourceBitrate,
|
||||
chapters: null);
|
||||
}
|
||||
|
||||
public AaxcDownloadMultiConverter(string outFileName, string cacheDirectory, DownloadOptions dlLic,
|
||||
Func<MultiConvertFileProperties, string> multipartFileNameCallback = null)
|
||||
: base(outFileName, cacheDirectory, dlLic)
|
||||
{
|
||||
Steps = new StepSequence
|
||||
{
|
||||
Name = "Download and Convert Aaxc To " + DownloadOptions.OutputFormat,
|
||||
|
||||
["Step 1: Get Aaxc Metadata"] = Step_GetMetadata,
|
||||
["Step 2: Download Decrypted Audiobook"] = Step_DownloadAudiobookAsMultipleFilesPerChapter,
|
||||
["Step 3: Cleanup"] = Step_Cleanup,
|
||||
};
|
||||
this.multipartFileNameCallback = multipartFileNameCallback ?? MultiConvertFileProperties.DefaultMultipartFilename;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
https://github.com/rmcrackan/Libation/pull/127#issuecomment-939088489
|
||||
|
||||
If the chapter truly is empty, that is, 0 audio frames in length, then yes it is ignored.
|
||||
@@ -56,86 +56,102 @@ The book will be split into the following files:
|
||||
01:41:00 - 02:05:00 | Book - 04 - Chapter 4.m4b
|
||||
|
||||
That naming may not be desirable for everyone, but it's an easy change to instead use the last of the combined chapter's title in the file name.
|
||||
*/
|
||||
private bool Step_DownloadAudiobookAsMultipleFilesPerChapter()
|
||||
{
|
||||
var zeroProgress = Step_DownloadAudiobook_Start();
|
||||
*/
|
||||
protected async override Task<bool> Step_DownloadAndDecryptAudiobookAsync()
|
||||
{
|
||||
if (AaxFile is null) return false;
|
||||
var chapters = DownloadOptions.ChapterInfo.Chapters;
|
||||
|
||||
var chapters = DownloadOptions.ChapterInfo.Chapters.ToList();
|
||||
// Ensure split files are at least minChapterLength in duration.
|
||||
var splitChapters = new ChapterInfo(DownloadOptions.ChapterInfo.StartOffset);
|
||||
|
||||
// Ensure split files are at least minChapterLength in duration.
|
||||
var splitChapters = new ChapterInfo(DownloadOptions.ChapterInfo.StartOffset);
|
||||
var runningTotal = TimeSpan.Zero;
|
||||
string title = "";
|
||||
|
||||
var runningTotal = TimeSpan.Zero;
|
||||
string title = "";
|
||||
for (int i = 0; i < chapters.Count; i++)
|
||||
{
|
||||
if (runningTotal == TimeSpan.Zero)
|
||||
title = chapters[i].Title;
|
||||
|
||||
for (int i = 0; i < chapters.Count; i++)
|
||||
{
|
||||
if (runningTotal == TimeSpan.Zero)
|
||||
title = chapters[i].Title;
|
||||
runningTotal += chapters[i].Duration;
|
||||
|
||||
runningTotal += chapters[i].Duration;
|
||||
if (runningTotal >= minChapterLength)
|
||||
{
|
||||
splitChapters.AddChapter(title, runningTotal);
|
||||
runningTotal = TimeSpan.Zero;
|
||||
}
|
||||
}
|
||||
|
||||
if (runningTotal >= minChapterLength)
|
||||
{
|
||||
splitChapters.AddChapter(title, runningTotal);
|
||||
runningTotal = TimeSpan.Zero;
|
||||
}
|
||||
}
|
||||
try
|
||||
{
|
||||
await (AaxConversion = decryptMultiAsync(AaxFile, splitChapters));
|
||||
|
||||
// reset, just in case
|
||||
multiPartFilePaths.Clear();
|
||||
if (AaxConversion.IsCompletedSuccessfully)
|
||||
await moveMoovToBeginning(AaxFile, workingFileStream?.Name);
|
||||
|
||||
ConversionResult result;
|
||||
return AaxConversion.IsCompletedSuccessfully;
|
||||
}
|
||||
finally
|
||||
{
|
||||
workingFileStream?.Dispose();
|
||||
FinalizeDownload();
|
||||
}
|
||||
}
|
||||
|
||||
AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
|
||||
if (DownloadOptions.OutputFormat == OutputFormat.M4b)
|
||||
result = ConvertToMultiMp4a(splitChapters);
|
||||
else
|
||||
result = ConvertToMultiMp3(splitChapters);
|
||||
AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
|
||||
private Mp4Operation decryptMultiAsync(Mp4File aaxFile, ChapterInfo splitChapters)
|
||||
{
|
||||
var chapterCount = 0;
|
||||
return
|
||||
DownloadOptions.OutputFormat == OutputFormat.M4b
|
||||
? aaxFile.ConvertToMultiMp4aAsync
|
||||
(
|
||||
splitChapters,
|
||||
newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback)
|
||||
)
|
||||
: aaxFile.ConvertToMultiMp3Async
|
||||
(
|
||||
splitChapters,
|
||||
newSplitCallback => newSplit(++chapterCount, splitChapters, newSplitCallback),
|
||||
DownloadOptions.LameConfig
|
||||
);
|
||||
|
||||
Step_DownloadAudiobook_End(zeroProgress);
|
||||
void newSplit(int currentChapter, ChapterInfo splitChapters, INewSplitCallback newSplitCallback)
|
||||
{
|
||||
moveMoovToBeginning(aaxFile, workingFileStream?.Name).GetAwaiter().GetResult();
|
||||
var newTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
|
||||
MultiConvertFileProperties props = new()
|
||||
{
|
||||
OutputFileName = newTempFile.FilePath,
|
||||
PartsPosition = currentChapter,
|
||||
PartsTotal = splitChapters.Count,
|
||||
Title = newSplitCallback.Chapter?.Title,
|
||||
};
|
||||
|
||||
return result == ConversionResult.NoErrorsDetected;
|
||||
}
|
||||
newSplitCallback.OutputFile = workingFileStream = createOutputFileStream(props);
|
||||
newSplitCallback.TrackTitle = DownloadOptions.GetMultipartTitle(props);
|
||||
newSplitCallback.TrackNumber = currentChapter;
|
||||
newSplitCallback.TrackCount = splitChapters.Count;
|
||||
|
||||
private ConversionResult ConvertToMultiMp4a(ChapterInfo splitChapters)
|
||||
{
|
||||
var chapterCount = 0;
|
||||
return AaxFile.ConvertToMultiMp4a(splitChapters, newSplitCallback =>
|
||||
createOutputFileStream(++chapterCount, splitChapters, newSplitCallback),
|
||||
DownloadOptions.TrimOutputToChapterLength);
|
||||
}
|
||||
OnTempFileCreated(newTempFile with { PartProperties = props });
|
||||
}
|
||||
|
||||
private ConversionResult ConvertToMultiMp3(ChapterInfo splitChapters)
|
||||
{
|
||||
var chapterCount = 0;
|
||||
return AaxFile.ConvertToMultiMp3(splitChapters, newSplitCallback =>
|
||||
{
|
||||
createOutputFileStream(++chapterCount, splitChapters, newSplitCallback);
|
||||
((NAudio.Lame.LameConfig)newSplitCallback.UserState).ID3.Track = chapterCount.ToString();
|
||||
}, DownloadOptions.LameConfig, DownloadOptions.TrimOutputToChapterLength);
|
||||
}
|
||||
FileStream createOutputFileStream(MultiConvertFileProperties multiConvertFileProperties)
|
||||
{
|
||||
FileUtility.SaferDelete(multiConvertFileProperties.OutputFileName);
|
||||
return File.Open(multiConvertFileProperties.OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
|
||||
}
|
||||
}
|
||||
|
||||
private void createOutputFileStream(int currentChapter, ChapterInfo splitChapters, NewSplitCallback newSplitCallback)
|
||||
{
|
||||
var fileName = multipartFileNameCallback(new()
|
||||
{
|
||||
OutputFileName = OutputFileName,
|
||||
PartsPosition = currentChapter,
|
||||
PartsTotal = splitChapters.Count,
|
||||
Title = newSplitCallback?.Chapter?.Title
|
||||
});
|
||||
fileName = FileUtility.GetValidFilename(fileName);
|
||||
|
||||
multiPartFilePaths.Add(fileName);
|
||||
|
||||
FileUtility.SaferDelete(fileName);
|
||||
|
||||
newSplitCallback.OutputFile = File.Open(fileName, FileMode.OpenOrCreate);
|
||||
|
||||
OnFileCreated(fileName);
|
||||
}
|
||||
}
|
||||
private Mp4Operation moveMoovToBeginning(Mp4File aaxFile, string? filename)
|
||||
{
|
||||
if (DownloadOptions.OutputFormat is OutputFormat.M4b
|
||||
&& DownloadOptions.MoveMoovToBeginning
|
||||
&& filename is not null
|
||||
&& File.Exists(filename))
|
||||
{
|
||||
return Mp4File.RelocateMoovAsync(filename);
|
||||
}
|
||||
else return Mp4Operation.FromCompleted(aaxFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,55 +1,108 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using AAXClean;
|
||||
using AAXClean;
|
||||
using AAXClean.Codecs;
|
||||
using Dinah.Core.StepRunner;
|
||||
using Dinah.Core.Net.Http;
|
||||
using FileManager;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public class AaxcDownloadSingleConverter : AaxcDownloadConvertBase
|
||||
{
|
||||
protected override StepSequence Steps { get; }
|
||||
private readonly AverageSpeed averageSpeed = new();
|
||||
private TempFile? outputTempFile;
|
||||
|
||||
public AaxcDownloadSingleConverter(string outFileName, string cacheDirectory, DownloadOptions dlLic)
|
||||
: base(outFileName, cacheDirectory, dlLic)
|
||||
{
|
||||
Steps = new StepSequence
|
||||
{
|
||||
Name = "Download and Convert Aaxc To " + DownloadOptions.OutputFormat,
|
||||
public AaxcDownloadSingleConverter(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
|
||||
: base(outDirectory, cacheDirectory, dlOptions)
|
||||
{
|
||||
var step = 1;
|
||||
|
||||
["Step 1: Get Aaxc Metadata"] = Step_GetMetadata,
|
||||
["Step 2: Download Decrypted Audiobook"] = Step_DownloadAudiobookAsSingleFile,
|
||||
["Step 3: Create Cue"] = Step_CreateCue,
|
||||
["Step 4: Cleanup"] = Step_Cleanup,
|
||||
};
|
||||
}
|
||||
AsyncSteps.Name = $"Download and Convert Aaxc To {DownloadOptions.OutputFormat}";
|
||||
AsyncSteps[$"Step {step++}: Get Aaxc Metadata"] = () => Task.Run(Step_GetMetadata);
|
||||
AsyncSteps[$"Step {step++}: Download Decrypted Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
|
||||
if (DownloadOptions.MoveMoovToBeginning && DownloadOptions.OutputFormat is OutputFormat.M4b)
|
||||
AsyncSteps[$"Step {step++}: Move moov atom to beginning"] = Step_MoveMoov;
|
||||
AsyncSteps[$"Step {step++}: Create Cue"] = Step_CreateCueAsync;
|
||||
}
|
||||
|
||||
private bool Step_DownloadAudiobookAsSingleFile()
|
||||
{
|
||||
var zeroProgress = Step_DownloadAudiobook_Start();
|
||||
protected override void OnInitialized()
|
||||
{
|
||||
//Finishing configuring lame encoder.
|
||||
if (DownloadOptions.OutputFormat == OutputFormat.Mp3)
|
||||
MpegUtil.ConfigureLameOptions(
|
||||
AaxFile,
|
||||
DownloadOptions.LameConfig,
|
||||
DownloadOptions.Downsample,
|
||||
DownloadOptions.MatchSourceBitrate,
|
||||
DownloadOptions.ChapterInfo);
|
||||
}
|
||||
|
||||
FileUtility.SaferDelete(OutputFileName);
|
||||
protected async override Task<bool> Step_DownloadAndDecryptAudiobookAsync()
|
||||
{
|
||||
if (AaxFile is null) return false;
|
||||
outputTempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
|
||||
FileUtility.SaferDelete(outputTempFile.FilePath);
|
||||
|
||||
var outputFile = File.Open(OutputFileName, FileMode.OpenOrCreate, FileAccess.ReadWrite);
|
||||
OnFileCreated(OutputFileName);
|
||||
using var outputFile = File.Open(outputTempFile.FilePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
|
||||
OnTempFileCreated(outputTempFile);
|
||||
|
||||
AaxFile.ConversionProgressUpdate += AaxFile_ConversionProgressUpdate;
|
||||
var decryptionResult
|
||||
= DownloadOptions.OutputFormat == OutputFormat.M4b
|
||||
? AaxFile.ConvertToMp4a(outputFile, DownloadOptions.ChapterInfo, DownloadOptions.TrimOutputToChapterLength)
|
||||
: AaxFile.ConvertToMp3(outputFile, DownloadOptions.LameConfig, DownloadOptions.ChapterInfo, DownloadOptions.TrimOutputToChapterLength);
|
||||
AaxFile.ConversionProgressUpdate -= AaxFile_ConversionProgressUpdate;
|
||||
try
|
||||
{
|
||||
await (AaxConversion = decryptAsync(AaxFile, outputFile));
|
||||
|
||||
DownloadOptions.ChapterInfo = AaxFile.Chapters;
|
||||
return AaxConversion.IsCompletedSuccessfully;
|
||||
}
|
||||
finally
|
||||
{
|
||||
FinalizeDownload();
|
||||
}
|
||||
}
|
||||
|
||||
Step_DownloadAudiobook_End(zeroProgress);
|
||||
private async Task<bool> Step_MoveMoov()
|
||||
{
|
||||
if (outputTempFile is null) return false;
|
||||
AaxConversion = Mp4File.RelocateMoovAsync(outputTempFile.FilePath);
|
||||
AaxConversion.ConversionProgressUpdate += AaxConversion_MoovProgressUpdate;
|
||||
await AaxConversion;
|
||||
AaxConversion.ConversionProgressUpdate -= AaxConversion_MoovProgressUpdate;
|
||||
return AaxConversion.IsCompletedSuccessfully;
|
||||
}
|
||||
|
||||
var success = decryptionResult == ConversionResult.NoErrorsDetected && !IsCanceled;
|
||||
if (success)
|
||||
base.OnFileCreated(OutputFileName);
|
||||
private void AaxConversion_MoovProgressUpdate(object? sender, ConversionProgressEventArgs e)
|
||||
{
|
||||
averageSpeed.AddPosition(e.ProcessPosition.TotalSeconds);
|
||||
|
||||
return success;
|
||||
}
|
||||
}
|
||||
var remainingTimeToProcess = (e.EndTime - e.ProcessPosition).TotalSeconds;
|
||||
var estTimeRemaining = remainingTimeToProcess / averageSpeed.Average;
|
||||
|
||||
if (double.IsNormal(estTimeRemaining))
|
||||
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
|
||||
|
||||
OnDecryptProgressUpdate(
|
||||
new DownloadProgress
|
||||
{
|
||||
ProgressPercentage = 100 * e.FractionCompleted,
|
||||
BytesReceived = (long)(InputFileStream.Length * e.FractionCompleted),
|
||||
TotalBytesToReceive = InputFileStream.Length
|
||||
});
|
||||
}
|
||||
|
||||
private Mp4Operation decryptAsync(Mp4File aaxFile, Stream outputFile)
|
||||
=> DownloadOptions.OutputFormat == OutputFormat.Mp3
|
||||
? aaxFile.ConvertToMp3Async
|
||||
(
|
||||
outputFile,
|
||||
DownloadOptions.LameConfig,
|
||||
DownloadOptions.ChapterInfo
|
||||
)
|
||||
: DownloadOptions.FixupFile
|
||||
? aaxFile.ConvertToMp4aAsync
|
||||
(
|
||||
outputFile,
|
||||
DownloadOptions.ChapterInfo
|
||||
)
|
||||
: aaxFile.ConvertToMp4aAsync(outputFile);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,179 +1,228 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using Dinah.Core;
|
||||
using Dinah.Core;
|
||||
using Dinah.Core.Net.Http;
|
||||
using Dinah.Core.StepRunner;
|
||||
using FileManager;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public enum OutputFormat { M4b, Mp3 }
|
||||
|
||||
public abstract class AudiobookDownloadBase
|
||||
{
|
||||
public event EventHandler<string> RetrievedTitle;
|
||||
public event EventHandler<string> RetrievedAuthors;
|
||||
public event EventHandler<string> RetrievedNarrators;
|
||||
public event EventHandler<byte[]> RetrievedCoverArt;
|
||||
public event EventHandler<DownloadProgress> DecryptProgressUpdate;
|
||||
public event EventHandler<TimeSpan> DecryptTimeRemaining;
|
||||
public event EventHandler<string> FileCreated;
|
||||
public event EventHandler<string?>? RetrievedTitle;
|
||||
public event EventHandler<string?>? RetrievedAuthors;
|
||||
public event EventHandler<string?>? RetrievedNarrators;
|
||||
public event EventHandler<byte[]?>? RetrievedCoverArt;
|
||||
public event EventHandler<DownloadProgress>? DecryptProgressUpdate;
|
||||
public event EventHandler<TimeSpan>? DecryptTimeRemaining;
|
||||
public event EventHandler<TempFile>? TempFileCreated;
|
||||
|
||||
public bool IsCanceled { get; set; }
|
||||
|
||||
protected string OutputFileName { get; private set; }
|
||||
protected DownloadOptions DownloadOptions { get; }
|
||||
protected NetworkFileStream InputFileStream => (nfsPersister ??= OpenNetworkFileStream()).NetworkFileStream;
|
||||
|
||||
// Don't give the property a 'set'. This should have to be an obvious choice; not accidental
|
||||
protected void SetOutputFileName(string newOutputFileName) => OutputFileName = newOutputFileName;
|
||||
|
||||
protected abstract StepSequence Steps { get; }
|
||||
private NetworkFileStreamPersister nfsPersister;
|
||||
|
||||
private string jsonDownloadState { get; }
|
||||
public string TempFilePath { get; }
|
||||
|
||||
protected AudiobookDownloadBase(string outFileName, string cacheDirectory, DownloadOptions dlLic)
|
||||
public bool IsCanceled { get; protected set; }
|
||||
protected AsyncStepSequence AsyncSteps { get; } = new();
|
||||
protected string OutputDirectory { get; }
|
||||
public IDownloadOptions DownloadOptions { get; }
|
||||
protected NetworkFileStream InputFileStream => NfsPersister.NetworkFileStream;
|
||||
protected virtual long InputFilePosition
|
||||
{
|
||||
OutputFileName = ArgumentValidator.EnsureNotNullOrWhiteSpace(outFileName, nameof(outFileName));
|
||||
get
|
||||
{
|
||||
//Use try/catch instread of checking CanRead to avoid
|
||||
//a race with the background download completing
|
||||
//between the check and the Position call.
|
||||
try { return InputFileStream.Position; }
|
||||
catch { return InputFileStream.Length; }
|
||||
}
|
||||
}
|
||||
private bool downloadFinished;
|
||||
|
||||
var outDir = Path.GetDirectoryName(OutputFileName);
|
||||
if (!Directory.Exists(outDir))
|
||||
throw new DirectoryNotFoundException($"Directory does not exist: {nameof(outDir)}");
|
||||
private NetworkFileStreamPersister? m_nfsPersister;
|
||||
private NetworkFileStreamPersister NfsPersister => m_nfsPersister ??= OpenNetworkFileStream();
|
||||
private readonly DownloadProgress zeroProgress;
|
||||
private readonly string jsonDownloadState;
|
||||
private readonly string tempFilePath;
|
||||
|
||||
protected AudiobookDownloadBase(string outDirectory, string cacheDirectory, IDownloadOptions dlOptions)
|
||||
{
|
||||
OutputDirectory = ArgumentValidator.EnsureNotNullOrWhiteSpace(outDirectory, nameof(outDirectory));
|
||||
DownloadOptions = ArgumentValidator.EnsureNotNull(dlOptions, nameof(dlOptions));
|
||||
DownloadOptions.DownloadSpeedChanged += (_, speed) => InputFileStream.SpeedLimit = speed;
|
||||
|
||||
if (!Directory.Exists(OutputDirectory))
|
||||
Directory.CreateDirectory(OutputDirectory);
|
||||
|
||||
if (!Directory.Exists(cacheDirectory))
|
||||
throw new DirectoryNotFoundException($"Directory does not exist: {nameof(cacheDirectory)}");
|
||||
Directory.CreateDirectory(cacheDirectory);
|
||||
|
||||
jsonDownloadState = Path.Combine(cacheDirectory, Path.GetFileName(Path.ChangeExtension(OutputFileName, ".json")));
|
||||
TempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
|
||||
jsonDownloadState = Path.Combine(cacheDirectory, $"{DownloadOptions.AudibleProductId}.json");
|
||||
tempFilePath = Path.ChangeExtension(jsonDownloadState, ".aaxc");
|
||||
|
||||
DownloadOptions = ArgumentValidator.EnsureNotNull(dlLic, nameof(dlLic));
|
||||
zeroProgress = new DownloadProgress
|
||||
{
|
||||
BytesReceived = 0,
|
||||
ProgressPercentage = 0,
|
||||
TotalBytesToReceive = 0
|
||||
};
|
||||
|
||||
// delete file after validation is complete
|
||||
FileUtility.SaferDelete(OutputFileName);
|
||||
OnDecryptProgressUpdate(zeroProgress);
|
||||
}
|
||||
|
||||
public abstract void Cancel();
|
||||
|
||||
public virtual void SetCoverArt(byte[] coverArt)
|
||||
protected TempFile GetNewTempFilePath(string extension)
|
||||
{
|
||||
if (coverArt is not null)
|
||||
OnRetrievedCoverArt(coverArt);
|
||||
extension = FileUtility.GetStandardizedExtension(extension);
|
||||
var path = Path.Combine(OutputDirectory, Guid.NewGuid().ToString("N") + extension);
|
||||
return new(path, extension);
|
||||
}
|
||||
|
||||
public bool Run()
|
||||
public async Task<bool> RunAsync()
|
||||
{
|
||||
var (IsSuccess, Elapsed) = Steps.Run();
|
||||
await InputFileStream.BeginDownloadingAsync();
|
||||
var progressTask = Task.Run(reportProgress);
|
||||
|
||||
if (!IsSuccess)
|
||||
Serilog.Log.Logger.Error("Conversion failed");
|
||||
(bool success, var elapsed) = await AsyncSteps.RunAsync();
|
||||
|
||||
return IsSuccess;
|
||||
//Stop the downloader so it doesn't keep running in the background.
|
||||
if (!success)
|
||||
NfsPersister.Dispose();
|
||||
|
||||
await progressTask;
|
||||
|
||||
var speedup = DownloadOptions.RuntimeLength / elapsed;
|
||||
Serilog.Log.Information($"Speedup is {speedup:F0}x realtime.");
|
||||
|
||||
NfsPersister.Dispose();
|
||||
return success;
|
||||
|
||||
async Task reportProgress()
|
||||
{
|
||||
AverageSpeed averageSpeed = new();
|
||||
|
||||
while (
|
||||
InputFileStream.CanRead
|
||||
&& InputFileStream.Length > InputFilePosition
|
||||
&& !InputFileStream.IsCancelled
|
||||
&& !downloadFinished)
|
||||
{
|
||||
averageSpeed.AddPosition(InputFilePosition);
|
||||
|
||||
var estSecsRemaining = (InputFileStream.Length - InputFilePosition) / averageSpeed.Average;
|
||||
|
||||
if (double.IsNormal(estSecsRemaining))
|
||||
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estSecsRemaining));
|
||||
|
||||
var progressPercent = 100d * InputFilePosition / InputFileStream.Length;
|
||||
|
||||
OnDecryptProgressUpdate(
|
||||
new DownloadProgress
|
||||
{
|
||||
ProgressPercentage = progressPercent,
|
||||
BytesReceived = InputFilePosition,
|
||||
TotalBytesToReceive = InputFileStream.Length
|
||||
});
|
||||
|
||||
await Task.Delay(200);
|
||||
}
|
||||
|
||||
OnDecryptTimeRemaining(TimeSpan.Zero);
|
||||
OnDecryptProgressUpdate(zeroProgress);
|
||||
}
|
||||
}
|
||||
|
||||
protected void OnRetrievedTitle(string title)
|
||||
public virtual Task CancelAsync()
|
||||
{
|
||||
IsCanceled = true;
|
||||
FinalizeDownload();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
protected abstract Task<bool> Step_DownloadAndDecryptAudiobookAsync();
|
||||
|
||||
public virtual void SetCoverArt(byte[] coverArt) { }
|
||||
protected void OnRetrievedTitle(string? title)
|
||||
=> RetrievedTitle?.Invoke(this, title);
|
||||
protected void OnRetrievedAuthors(string authors)
|
||||
protected void OnRetrievedAuthors(string? authors)
|
||||
=> RetrievedAuthors?.Invoke(this, authors);
|
||||
protected void OnRetrievedNarrators(string narrators)
|
||||
protected void OnRetrievedNarrators(string? narrators)
|
||||
=> RetrievedNarrators?.Invoke(this, narrators);
|
||||
|
||||
protected void OnRetrievedCoverArt(byte[] coverArt)
|
||||
protected void OnRetrievedCoverArt(byte[]? coverArt)
|
||||
=> RetrievedCoverArt?.Invoke(this, coverArt);
|
||||
|
||||
protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
|
||||
protected void OnDecryptProgressUpdate(DownloadProgress downloadProgress)
|
||||
=> DecryptProgressUpdate?.Invoke(this, downloadProgress);
|
||||
protected void OnDecryptTimeRemaining(TimeSpan timeRemaining)
|
||||
=> DecryptTimeRemaining?.Invoke(this, timeRemaining);
|
||||
protected void OnFileCreated(string path)
|
||||
=> FileCreated?.Invoke(this, path);
|
||||
public void OnTempFileCreated(TempFile path)
|
||||
=> TempFileCreated?.Invoke(this, path);
|
||||
|
||||
protected void CloseInputFileStream()
|
||||
protected virtual void FinalizeDownload()
|
||||
{
|
||||
nfsPersister?.NetworkFileStream?.Close();
|
||||
nfsPersister?.Dispose();
|
||||
NfsPersister.Dispose();
|
||||
downloadFinished = true;
|
||||
}
|
||||
|
||||
protected bool Step_CreateCue()
|
||||
protected async Task<bool> Step_CreateCueAsync()
|
||||
{
|
||||
if (!DownloadOptions.CreateCueSheet) return true;
|
||||
if (!DownloadOptions.CreateCueSheet) return !IsCanceled;
|
||||
|
||||
if (DownloadOptions.ChapterInfo.Count <= 1)
|
||||
{
|
||||
Serilog.Log.Logger.Information($"Skipped creating .cue because book has no chapters.");
|
||||
return !IsCanceled;
|
||||
}
|
||||
|
||||
// not a critical step. its failure should not prevent future steps from running
|
||||
try
|
||||
{
|
||||
var path = Path.ChangeExtension(OutputFileName, ".cue");
|
||||
path = FileUtility.GetValidFilename(path);
|
||||
File.WriteAllText(path, Cue.CreateContents(Path.GetFileName(OutputFileName), DownloadOptions.ChapterInfo));
|
||||
OnFileCreated(path);
|
||||
var tempFile = GetNewTempFilePath(".cue");
|
||||
await File.WriteAllTextAsync(tempFile.FilePath, Cue.CreateContents(Path.GetFileName(tempFile.FilePath), DownloadOptions.ChapterInfo));
|
||||
OnTempFileCreated(tempFile);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCue)}. FAILED");
|
||||
Serilog.Log.Logger.Error(ex, $"{nameof(Step_CreateCueAsync)} Failed");
|
||||
}
|
||||
return !IsCanceled;
|
||||
}
|
||||
|
||||
protected bool Step_Cleanup()
|
||||
{
|
||||
bool success = !IsCanceled;
|
||||
if (success)
|
||||
{
|
||||
FileUtility.SaferDelete(jsonDownloadState);
|
||||
|
||||
if (DownloadOptions.AudibleKey is not null &&
|
||||
DownloadOptions.AudibleIV is not null &&
|
||||
DownloadOptions.RetainEncryptedFile)
|
||||
{
|
||||
string aaxPath = Path.ChangeExtension(TempFilePath, ".aax");
|
||||
FileUtility.SaferMove(TempFilePath, aaxPath);
|
||||
|
||||
//Write aax decryption key
|
||||
string keyPath = Path.ChangeExtension(aaxPath, ".key");
|
||||
FileUtility.SaferDelete(keyPath);
|
||||
File.WriteAllText(keyPath, $"Key={DownloadOptions.AudibleKey}\r\nIV={DownloadOptions.AudibleIV}");
|
||||
|
||||
OnFileCreated(aaxPath);
|
||||
OnFileCreated(keyPath);
|
||||
}
|
||||
else
|
||||
FileUtility.SaferDelete(TempFilePath);
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
private NetworkFileStreamPersister OpenNetworkFileStream()
|
||||
{
|
||||
if (!File.Exists(jsonDownloadState))
|
||||
return NewNetworkFilePersister();
|
||||
|
||||
NetworkFileStreamPersister? nfsp = default;
|
||||
try
|
||||
{
|
||||
var nfsp = new NetworkFileStreamPersister(jsonDownloadState);
|
||||
// If More than ~1 hour has elapsed since getting the download url, it will expire.
|
||||
// The new url will be to the same file.
|
||||
if (!File.Exists(jsonDownloadState))
|
||||
return nfsp = newNetworkFilePersister();
|
||||
|
||||
nfsp = new NetworkFileStreamPersister(jsonDownloadState);
|
||||
// The download url expires after 1 hour.
|
||||
// The new url points to the same file.
|
||||
nfsp.NetworkFileStream.SetUriForSameFile(new Uri(DownloadOptions.DownloadUrl));
|
||||
return nfsp;
|
||||
}
|
||||
catch
|
||||
{
|
||||
nfsp?.Target?.Dispose();
|
||||
FileUtility.SaferDelete(jsonDownloadState);
|
||||
FileUtility.SaferDelete(TempFilePath);
|
||||
return NewNetworkFilePersister();
|
||||
FileUtility.SaferDelete(tempFilePath);
|
||||
return nfsp = newNetworkFilePersister();
|
||||
}
|
||||
}
|
||||
|
||||
private NetworkFileStreamPersister NewNetworkFilePersister()
|
||||
{
|
||||
var headers = new System.Net.WebHeaderCollection
|
||||
finally
|
||||
{
|
||||
{ "User-Agent", DownloadOptions.UserAgent }
|
||||
};
|
||||
//nfsp will only be null when an unhandled exception occurs. Let the caller handle it.
|
||||
if (nfsp is not null)
|
||||
{
|
||||
nfsp.NetworkFileStream.RequestHeaders["User-Agent"] = DownloadOptions.UserAgent;
|
||||
nfsp.NetworkFileStream.SpeedLimit = DownloadOptions.DownloadSpeedBps;
|
||||
OnTempFileCreated(new(tempFilePath, DownloadOptions.InputType.ToString()));
|
||||
OnTempFileCreated(new(jsonDownloadState));
|
||||
}
|
||||
}
|
||||
|
||||
var networkFileStream = new NetworkFileStream(TempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, headers);
|
||||
return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
|
||||
NetworkFileStreamPersister newNetworkFilePersister()
|
||||
{
|
||||
var networkFileStream = new NetworkFileStream(tempFilePath, new Uri(DownloadOptions.DownloadUrl), 0, new() { { "User-Agent", DownloadOptions.UserAgent } });
|
||||
return new NetworkFileStreamPersister(networkFileStream, jsonDownloadState);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
171
Source/AaxDecrypter/AverageSpeed.cs
Normal file
171
Source/AaxDecrypter/AverageSpeed.cs
Normal file
@@ -0,0 +1,171 @@
|
||||
using Dinah.Core;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace AaxDecrypter;
|
||||
|
||||
public static class LinqStats
|
||||
{
|
||||
public static (double mean, double stdDev) BasicStatisticsBy<T>(this IEnumerable<T> values, Func<T, double> selector)
|
||||
{
|
||||
var count = values.Count();
|
||||
var mean = values.Average(selector);
|
||||
|
||||
return (mean, Math.Sqrt(values.Sum(s => Math.Pow(selector(s) - mean, 2)) / (count - 1)));
|
||||
}
|
||||
|
||||
public static bool T_Test_2By<T>(this IEnumerable<T> values, Func<T, double> selector, IEnumerable<T> secondGroup, Significance confidence)
|
||||
{
|
||||
var n1 = values.Count();
|
||||
var n2 = secondGroup.Count();
|
||||
var n = n1 + n2;
|
||||
|
||||
if (n1 < 3 || n2 < 3) return false;
|
||||
|
||||
(var mean1, var stdDev1) = values.BasicStatisticsBy(selector);
|
||||
(var mean2, var stdDev2) = secondGroup.BasicStatisticsBy(selector);
|
||||
|
||||
var pooledStdDev = Math.Sqrt((((n1 - 1) * (stdDev1 * stdDev1)) + ((n2 - 1) * (stdDev2 * stdDev2))) / (n1 + n2 - 2));
|
||||
|
||||
var testStat = Math.Abs(mean1 - mean2) / (pooledStdDev * Math.Sqrt(1d / n1 + 1d / n2));
|
||||
var crit = T_Stat(Math.Min(n - 2, MAX_DEGREES_FREEDOM), confidence);
|
||||
|
||||
return testStat > crit;
|
||||
}
|
||||
|
||||
public static bool T_Test_1By<T>(this IEnumerable<T> values, Func<T, double> selector, double testMean, Significance confidence)
|
||||
{
|
||||
var n = values.Count();
|
||||
|
||||
if (n < 2) return false;
|
||||
|
||||
(var sampleMean, var sampleStdDev) = values.BasicStatisticsBy(selector);
|
||||
|
||||
var testStat = Math.Abs(sampleMean - testMean) / (sampleStdDev / Math.Sqrt(n));
|
||||
var crit = T_Stat(Math.Min(n - 1, MAX_DEGREES_FREEDOM), confidence);
|
||||
|
||||
return testStat > crit;
|
||||
}
|
||||
|
||||
private static double T_Stat(int degreesFreedom, Significance confidence)
|
||||
{
|
||||
ArgumentValidator.EnsureBetweenInclusive(degreesFreedom, nameof(degreesFreedom), MIN_DEGREES_FREEDOM, MAX_DEGREES_FREEDOM);
|
||||
|
||||
return T_TABLE[(int)confidence][degreesFreedom - MIN_DEGREES_FREEDOM];
|
||||
}
|
||||
|
||||
static LinqStats()
|
||||
{
|
||||
T_TABLE = new double[][] { T_Table_01, T_Table_05, T_Table_10, T_Table_15, T_Table_20, T_Table_25 };
|
||||
}
|
||||
|
||||
private const int MIN_DEGREES_FREEDOM = 1;
|
||||
private const int MAX_DEGREES_FREEDOM = 201;
|
||||
/// <summary>
|
||||
/// 2-tailed t-Distribution critical values at 75%, 80%, 85%,
|
||||
/// 90%, 95%, and 99% confidence for 1 - 201 degrees of freedom.
|
||||
/// </summary>
|
||||
private readonly static double[][] T_TABLE;
|
||||
private readonly static double[] T_Table_25 = { 2.414213562, 1.603567451, 1.422625281, 1.344397556, 1.300949037, 1.273349309, 1.254278682, 1.240318261, 1.229659173, 1.221255395, 1.214460246, 1.208852542, 1.204146242, 1.200140298, 1.196689284, 1.193685414, 1.191047107, 1.188711483, 1.186629298, 1.184761434, 1.183076432, 1.181548697, 1.180157199, 1.178884497, 1.177716003, 1.176639425, 1.175644329, 1.174721803, 1.173864189, 1.173064871, 1.1723181, 1.17161886, 1.170962753, 1.17034591, 1.169764906, 1.169216709, 1.168698615, 1.168208212, 1.167743338, 1.167302049, 1.166882595, 1.166483396, 1.166103019, 1.165740162, 1.165393644, 1.165062385, 1.164745398, 1.164441782, 1.164150707, 1.163871412, 1.163603196, 1.163345413, 1.163097467, 1.162858803, 1.162628911, 1.162407316, 1.162193577, 1.161987283, 1.161788052, 1.161595527, 1.161409375, 1.161229286, 1.161054967, 1.160886145, 1.160722566, 1.160563987, 1.160410184, 1.160260944, 1.160116066, 1.159975363, 1.159838656, 1.159705777, 1.159576569, 1.15945088, 1.15932857, 1.159209503, 1.159093552, 1.158980598, 1.158870524, 1.158763222, 1.158658589, 1.158556526, 1.15845694, 1.158359742, 1.158264847, 1.158172173, 1.158081645, 1.157993188, 1.157906731, 1.157822209, 1.157739556, 1.157658712, 1.157579617, 1.157502216, 1.157426454, 1.157352281, 1.157279646, 1.157208502, 1.157138804, 1.157070509, 1.157003573, 1.156937958, 1.156873624, 1.156810534, 1.156748653, 1.156687945, 1.156628379, 1.156569922, 1.156512543, 1.156456213, 1.156400904, 1.156346587, 1.156293237, 1.156240827, 1.156189334, 1.156138733, 1.156089001, 1.156040117, 1.155992058, 1.155944804, 1.155898335, 1.155852631, 1.155807674, 1.155763446, 1.155719928, 1.155677105, 1.155634959, 1.155593475, 1.155552637, 1.15551243, 1.155472839, 1.155433851, 1.155395452, 1.155357629, 1.155320368, 1.155283658, 1.155247486, 1.155211841, 1.15517671, 1.155142084, 1.15510795, 1.1550743, 1.155041122, 1.155008406, 1.154976144, 1.154944326, 1.154912942, 1.154881984, 1.154851443, 1.154821311, 1.15479158, 1.154762241, 1.154733287, 1.154704711, 1.154676505, 1.154648662, 1.154621175, 1.154594037, 1.154567242, 1.154540783, 1.154514654, 1.154488849, 1.154463361, 1.154438185, 1.154413316, 1.154388747, 1.154364474, 1.15434049, 1.154316792, 1.154293373, 1.154270229, 1.154247355, 1.154224746, 1.154202398, 1.154180307, 1.154158467, 1.154136875, 1.154115526, 1.154094417, 1.154073543, 1.1540529, 1.154032485, 1.154012294, 1.153992323, 1.153972568, 1.153953027, 1.153933695, 1.15391457, 1.153895647, 1.153876925, 1.153858399, 1.153840066, 1.153821925, 1.15380397, 1.153786201, 1.153768613, 1.153751204, 1.153733972, 1.153716914, 1.153700026 };
|
||||
private readonly static double[] T_Table_20 = { 3.077683537, 1.885618083, 1.637744354, 1.533206274, 1.475884049, 1.439755747, 1.414923928, 1.39681531, 1.383028738, 1.372183641, 1.363430318, 1.356217334, 1.350171289, 1.345030374, 1.340605608, 1.336757167, 1.33337939, 1.330390944, 1.327728209, 1.325340707, 1.323187874, 1.321236742, 1.31946024, 1.317835934, 1.316345073, 1.314971864, 1.313702913, 1.312526782, 1.311433647, 1.310415025, 1.309463549, 1.308572793, 1.307737124, 1.306951587, 1.306211802, 1.305513886, 1.304854381, 1.304230204, 1.303638589, 1.303077053, 1.302543359, 1.302035487, 1.301551608, 1.30109006, 1.300649332, 1.300228048, 1.299824947, 1.299438879, 1.299068785, 1.298713694, 1.298372713, 1.298045016, 1.297729843, 1.297426488, 1.2971343, 1.296852673, 1.296581044, 1.29631889, 1.296065725, 1.295821094, 1.295584571, 1.295355762, 1.295134294, 1.29491982, 1.294712013, 1.294510568, 1.294315197, 1.294125629, 1.293941609, 1.293762898, 1.293589269, 1.293420507, 1.293256413, 1.293096793, 1.292941469, 1.292790268, 1.292643029, 1.292499597, 1.292359828, 1.292223583, 1.29209073, 1.291961144, 1.291834705, 1.291711301, 1.291590824, 1.291473171, 1.291358243, 1.291245948, 1.291136195, 1.291028899, 1.290923979, 1.290821356, 1.290720956, 1.290622708, 1.290526543, 1.290432395, 1.290340202, 1.290249904, 1.290161442, 1.290074761, 1.289989809, 1.289906533, 1.289824884, 1.289744816, 1.289666283, 1.289589241, 1.289513648, 1.289439464, 1.289366649, 1.289295166, 1.289224979, 1.289156054, 1.289088355, 1.289021851, 1.28895651, 1.288892302, 1.288829199, 1.288767171, 1.288706191, 1.288646234, 1.288587273, 1.288529284, 1.288472243, 1.288416127, 1.288360913, 1.288306581, 1.288253109, 1.288200477, 1.288148665, 1.288097654, 1.288047427, 1.287997964, 1.287949248, 1.287901264, 1.287853994, 1.287807422, 1.287761534, 1.287716314, 1.287671748, 1.287627821, 1.287584521, 1.287541833, 1.287499745, 1.287458245, 1.287417319, 1.287376957, 1.287337146, 1.287297876, 1.287259135, 1.287220914, 1.2871832, 1.287145985, 1.287109259, 1.287073012, 1.287037235, 1.287001918, 1.286967053, 1.286932631, 1.286898644, 1.286865084, 1.286831942, 1.286799212, 1.286766884, 1.286734952, 1.286703409, 1.286672248, 1.286641461, 1.286611042, 1.286580985, 1.286551283, 1.286521929, 1.286492918, 1.286464244, 1.286435901, 1.286407882, 1.286380184, 1.286352799, 1.286325724, 1.286298952, 1.286272479, 1.286246299, 1.286220408, 1.286194801, 1.286169474, 1.286144421, 1.286119638, 1.286095122, 1.286070867, 1.28604687, 1.286023127, 1.285999633, 1.285976384, 1.285953377, 1.285930609, 1.285908074, 1.285885771, 1.285863694, 1.285841842, 1.285820209, 1.285798794 };
|
||||
private readonly static double[] T_Table_15 = { 4.16529977, 2.281930588, 1.924319657, 1.778192164, 1.699362566, 1.650173154, 1.616591737, 1.59222144, 1.573735785, 1.559235933, 1.547559766, 1.537956495, 1.529919606, 1.523095061, 1.517227969, 1.51213017, 1.507659754, 1.503707672, 1.500188756, 1.497035518, 1.494193795, 1.491619612, 1.489276897, 1.487135783, 1.485171326, 1.483362535, 1.481691617, 1.48014339, 1.478704821, 1.477364662, 1.47611315, 1.474941772, 1.473843072, 1.47281049, 1.471838233, 1.470921166, 1.470054719, 1.469234815, 1.468457801, 1.467720399, 1.467019655, 1.466352901, 1.465717725, 1.465111933, 1.464533534, 1.463980712, 1.463451805, 1.462945295, 1.46245979, 1.461994009, 1.461546775, 1.461117, 1.460703683, 1.460305896, 1.45992278, 1.459553538, 1.45919743, 1.458853767, 1.458521908, 1.458201256, 1.457891251, 1.457591373, 1.457301133, 1.457020074, 1.456747768, 1.45648381, 1.456227824, 1.455979454, 1.455738365, 1.455504241, 1.455276784, 1.455055715, 1.454840767, 1.45463169, 1.454428246, 1.454230212, 1.454037373, 1.453849529, 1.453666487, 1.453488066, 1.453314093, 1.453144404, 1.452978842, 1.452817259, 1.452659513, 1.452505469, 1.452354998, 1.452207977, 1.452064289, 1.451923821, 1.451786468, 1.451652126, 1.451520697, 1.451392088, 1.451266209, 1.451142973, 1.451022299, 1.450904108, 1.450788323, 1.450674871, 1.450563684, 1.450454694, 1.450347836, 1.450243048, 1.450140271, 1.450039448, 1.449940523, 1.449843444, 1.449748158, 1.449654617, 1.449562773, 1.449472581, 1.449383997, 1.449296977, 1.449211481, 1.449127468, 1.449044902, 1.448963744, 1.448883959, 1.448805513, 1.448728372, 1.448652503, 1.448577876, 1.44850446, 1.448432226, 1.448361146, 1.448291192, 1.448222337, 1.448154557, 1.448087826, 1.44802212, 1.447957415, 1.447893688, 1.447830919, 1.447769085, 1.447708165, 1.44764814, 1.44758899, 1.447530695, 1.447473238, 1.447416601, 1.447360765, 1.447305715, 1.447251433, 1.447197905, 1.447145113, 1.447093044, 1.447041682, 1.446991013, 1.446941023, 1.446891698, 1.446843026, 1.446794994, 1.446747588, 1.446700797, 1.446654609, 1.446609012, 1.446563996, 1.446519548, 1.446475659, 1.446432318, 1.446389514, 1.446347238, 1.44630548, 1.44626423, 1.44622348, 1.44618322, 1.446143442, 1.446104137, 1.446065296, 1.446026911, 1.445988975, 1.44595148, 1.445914417, 1.44587778, 1.445841561, 1.445805753, 1.445770349, 1.445735343, 1.445700727, 1.445666495, 1.445632641, 1.445599159, 1.445566042, 1.445533284, 1.445500881, 1.445468825, 1.445437112, 1.445405736, 1.445374691, 1.445343973, 1.445313576, 1.445283495, 1.445253726, 1.445224264, 1.445195103, 1.445166239, 1.445137668, 1.445109385, 1.445081387 };
|
||||
private readonly static double[] T_Table_10 = { 6.313751515, 2.91998558, 2.353363435, 2.131846786, 2.015048373, 1.943180281, 1.894578605, 1.859548038, 1.833112933, 1.812461123, 1.795884819, 1.782287556, 1.770933396, 1.761310136, 1.753050356, 1.745883676, 1.739606726, 1.734063607, 1.729132812, 1.724718243, 1.720742903, 1.717144374, 1.713871528, 1.71088208, 1.708140761, 1.70561792, 1.703288446, 1.701130934, 1.699127027, 1.697260887, 1.695518783, 1.693888748, 1.692360309, 1.690924255, 1.689572458, 1.688297714, 1.68709362, 1.68595446, 1.684875122, 1.683851013, 1.682878002, 1.681952357, 1.681070703, 1.680229977, 1.679427393, 1.678660414, 1.677926722, 1.677224196, 1.676550893, 1.675905025, 1.67528495, 1.674689154, 1.674116237, 1.673564906, 1.673033965, 1.672522303, 1.672028888, 1.671552762, 1.671093032, 1.670648865, 1.670219484, 1.669804163, 1.669402222, 1.669013025, 1.668635976, 1.668270514, 1.667916114, 1.667572281, 1.667238549, 1.666914479, 1.666599658, 1.666293696, 1.665996224, 1.665706893, 1.665425373, 1.665151353, 1.664884537, 1.664624645, 1.664371409, 1.664124579, 1.663883913, 1.663649184, 1.663420175, 1.663196679, 1.6629785, 1.662765449, 1.662557349, 1.662354029, 1.662155326, 1.661961084, 1.661771155, 1.661585397, 1.661403674, 1.661225855, 1.661051817, 1.66088144, 1.66071461, 1.660551217, 1.660391156, 1.660234326, 1.66008063, 1.659929976, 1.659782273, 1.659637437, 1.659495383, 1.659356034, 1.659219312, 1.659085144, 1.658953458, 1.658824187, 1.658697265, 1.658572629, 1.658450216, 1.658329969, 1.65821183, 1.658095744, 1.657981659, 1.657869522, 1.657759285, 1.657650899, 1.657544319, 1.657439499, 1.657336397, 1.65723497, 1.657135178, 1.657036982, 1.656940344, 1.656845226, 1.656751594, 1.656659413, 1.656568649, 1.65647927, 1.656391244, 1.656304542, 1.656219133, 1.656134988, 1.65605208, 1.655970382, 1.655889868, 1.655810511, 1.655732287, 1.655655173, 1.655579143, 1.655504177, 1.655430251, 1.655357345, 1.655285437, 1.655214506, 1.655144534, 1.6550755, 1.655007387, 1.654940175, 1.654873847, 1.654808385, 1.654743774, 1.654679996, 1.654617035, 1.654554875, 1.654493503, 1.654432901, 1.654373057, 1.654313957, 1.654255585, 1.654197929, 1.654140976, 1.654084713, 1.654029128, 1.653974208, 1.653919942, 1.653866317, 1.653813324, 1.653760949, 1.653709184, 1.653658017, 1.653607437, 1.653557435, 1.653508002, 1.653459126, 1.6534108, 1.653363013, 1.653315758, 1.653269024, 1.653222803, 1.653177088, 1.653131869, 1.653087138, 1.653042889, 1.652999113, 1.652955802, 1.652912949, 1.652870547, 1.652828589, 1.652787068, 1.652745977, 1.65270531, 1.652665059, 1.652625219, 1.652585784, 1.652546746, 1.652508101 };
|
||||
private readonly static double[] T_Table_05 = { 12.70620474, 4.30265273, 3.182446305, 2.776445105, 2.570581836, 2.446911851, 2.364624252, 2.306004135, 2.262157163, 2.228138852, 2.20098516, 2.17881283, 2.160368656, 2.144786688, 2.131449546, 2.119905299, 2.109815578, 2.10092204, 2.093024054, 2.085963447, 2.079613845, 2.073873068, 2.06865761, 2.063898562, 2.059538553, 2.055529439, 2.051830516, 2.048407142, 2.045229642, 2.042272456, 2.039513446, 2.036933343, 2.034515297, 2.032244509, 2.030107928, 2.028094001, 2.026192463, 2.024394164, 2.02269092, 2.02107539, 2.01954097, 2.018081703, 2.016692199, 2.015367574, 2.014103389, 2.012895599, 2.011740514, 2.010634758, 2.009575237, 2.008559112, 2.00758377, 2.006646805, 2.005745995, 2.004879288, 2.004044783, 2.003240719, 2.002465459, 2.001717484, 2.000995378, 2.000297822, 1.999623585, 1.998971517, 1.998340543, 1.997729654, 1.997137908, 1.996564419, 1.996008354, 1.995468931, 1.994945415, 1.994437112, 1.993943368, 1.993463567, 1.992997126, 1.992543495, 1.992102154, 1.99167261, 1.991254395, 1.990847069, 1.99045021, 1.990063421, 1.989686323, 1.989318557, 1.98895978, 1.988609667, 1.988267907, 1.987934206, 1.987608282, 1.987289865, 1.9869787, 1.986674541, 1.986377154, 1.986086317, 1.985801814, 1.985523442, 1.985251004, 1.984984312, 1.984723186, 1.984467455, 1.984216952, 1.983971519, 1.983731003, 1.983495259, 1.983264145, 1.983037526, 1.982815274, 1.982597262, 1.98238337, 1.982173483, 1.98196749, 1.981765282, 1.981566757, 1.981371815, 1.981180359, 1.980992298, 1.980807541, 1.980626002, 1.980447599, 1.980272249, 1.980099876, 1.979930405, 1.979763763, 1.979599878, 1.979438685, 1.979280117, 1.979124109, 1.978970602, 1.978819535, 1.97867085, 1.978524491, 1.978380405, 1.978238539, 1.978098842, 1.977961264, 1.977825758, 1.977692277, 1.977560777, 1.977431212, 1.977303542, 1.977177724, 1.97705372, 1.976931489, 1.976810994, 1.976692198, 1.976575066, 1.976459563, 1.976345655, 1.976233309, 1.976122494, 1.976013178, 1.975905331, 1.975798924, 1.975693928, 1.975590315, 1.975488058, 1.975387131, 1.975287508, 1.975189163, 1.975092073, 1.974996213, 1.97490156, 1.974808092, 1.974715786, 1.974624621, 1.974534576, 1.97444563, 1.974357764, 1.974270957, 1.974185191, 1.974100447, 1.974016708, 1.973933954, 1.973852169, 1.973771337, 1.97369144, 1.973612462, 1.973534388, 1.973457202, 1.973380889, 1.973305434, 1.973230823, 1.973157042, 1.973084077, 1.973011915, 1.972940542, 1.972869946, 1.972800114, 1.972731033, 1.972662692, 1.972595079, 1.972528182, 1.97246199, 1.972396491, 1.972331676, 1.972267533, 1.972204051, 1.972141222, 1.972079034, 1.972017478, 1.971956544, 1.971896224 };
|
||||
private readonly static double[] T_Table_01 = { 63.65674116, 9.924843201, 5.84090931, 4.604094871, 4.032142984, 3.707428021, 3.499483297, 3.355387331, 3.249835542, 3.169272673, 3.105806516, 3.054539589, 3.012275839, 2.976842734, 2.946712883, 2.920781622, 2.89823052, 2.878440473, 2.860934606, 2.84533971, 2.831359558, 2.818756061, 2.807335684, 2.796939505, 2.787435814, 2.778714533, 2.770682957, 2.763262455, 2.756385904, 2.749995654, 2.744041919, 2.738481482, 2.733276642, 2.728394367, 2.723805589, 2.71948463, 2.715408722, 2.711557602, 2.707913184, 2.704459267, 2.701181304, 2.698066186, 2.695102079, 2.692278266, 2.689585019, 2.687013492, 2.684555618, 2.682204027, 2.679951974, 2.677793271, 2.675722234, 2.673733631, 2.671822636, 2.669984796, 2.668215988, 2.666512398, 2.664870482, 2.663286954, 2.661758752, 2.660283029, 2.658857127, 2.657478565, 2.656145025, 2.654854337, 2.653604469, 2.652393515, 2.651219685, 2.650081299, 2.648976774, 2.647904624, 2.646863444, 2.645851913, 2.644868782, 2.643912872, 2.642983067, 2.642078313, 2.641197611, 2.640340015, 2.639504627, 2.638690596, 2.637897113, 2.63712341, 2.636368757, 2.635632458, 2.634913852, 2.634212309, 2.633527229, 2.632858038, 2.632204191, 2.631565166, 2.630940463, 2.630329608, 2.629732145, 2.629147638, 2.628575671, 2.628015844, 2.627467774, 2.626931096, 2.626405457, 2.625890521, 2.625385965, 2.624891476, 2.624406758, 2.623931523, 2.623465496, 2.623008411, 2.622560015, 2.622120061, 2.621688313, 2.621264543, 2.620848534, 2.620440073, 2.620038957, 2.619644989, 2.619257981, 2.618877749, 2.618504116, 2.618136914, 2.617775976, 2.617421145, 2.617072266, 2.616729191, 2.616391776, 2.616059883, 2.615733377, 2.615412127, 2.615096008, 2.614784899, 2.61447868, 2.614177238, 2.613880461, 2.613588242, 2.613300477, 2.613017065, 2.612737908, 2.61246291, 2.61219198, 2.611925028, 2.611661966, 2.611402711, 2.611147181, 2.610895295, 2.610646976, 2.61040215, 2.610160742, 2.609922682, 2.609687901, 2.609456331, 2.609227907, 2.609002566, 2.608780245, 2.608560883, 2.608344423, 2.608130807, 2.60791998, 2.607711886, 2.607506474, 2.607303692, 2.607103489, 2.606905817, 2.606710628, 2.606517876, 2.606327515, 2.606139501, 2.605953791, 2.605770342, 2.605589114, 2.605410067, 2.605233162, 2.605058359, 2.604885623, 2.604714916, 2.604546204, 2.60437945, 2.604214622, 2.604051686, 2.60389061, 2.603731363, 2.603573912, 2.603418229, 2.603264282, 2.603112045, 2.602961487, 2.602812582, 2.602665303, 2.602519622, 2.602375515, 2.602232955, 2.602091918, 2.60195238, 2.601814317, 2.601677705, 2.601542523, 2.601408747, 2.601276355, 2.601145327, 2.601015642, 2.600887278, 2.600760216, 2.600634436 };
|
||||
}
|
||||
|
||||
public enum Significance
|
||||
{
|
||||
P01,
|
||||
P05,
|
||||
P10,
|
||||
P15,
|
||||
P20,
|
||||
P25
|
||||
}
|
||||
|
||||
public class AverageSpeed
|
||||
{
|
||||
/// <summary>Average speed in units per second</summary>
|
||||
public double Average { get; private set; }
|
||||
public TimeSpan SlowWindow { get; }
|
||||
public TimeSpan FastWindow { get; }
|
||||
public Significance SlowSignificance { get; }
|
||||
public Significance FastSignificance { get; }
|
||||
|
||||
private DateTime start;
|
||||
private TimeSpan lastTime;
|
||||
private double lastPosition = double.NaN;
|
||||
|
||||
private readonly record struct Point(TimeSpan Time, double Velocity);
|
||||
private readonly LinkedList<Point> speeds = new();
|
||||
private const int MAX_SPEEDS = 200;
|
||||
|
||||
public AverageSpeed() : this(TimeSpan.FromSeconds(15), Significance.P10, TimeSpan.FromSeconds(3), Significance.P01) { }
|
||||
|
||||
/// <param name="slowWindow">Total moving average time window</param>
|
||||
/// <param name="slowSignificance">T-test significance level at which the newest speed will be considered different from the slow window's mean speed.</param>
|
||||
/// <param name="fastWindow">A shorter moving window of the most resent speeds. The average speed in <paramref name="fastWindow"/> is compared to the average speed in the rest of <paramref name="slowWindow"/> to quickly detect large changes in speed.</param>
|
||||
/// <param name="fastSignificance">T-test significance level at which the mean speed in <paramref name="fastWindow"/> will be considered different from the mean speed of the remainder of <paramref name="slowWindow"/>.</param>
|
||||
public AverageSpeed(TimeSpan slowWindow, Significance slowSignificance, TimeSpan fastWindow, Significance fastSignificance)
|
||||
{
|
||||
SlowWindow = ArgumentValidator.EnsureGreaterThan(slowWindow, nameof(slowWindow), fastWindow);
|
||||
FastWindow = ArgumentValidator.EnsureGreaterThan(fastWindow, nameof(fastWindow), TimeSpan.Zero);
|
||||
SlowSignificance = slowSignificance;
|
||||
FastSignificance = fastSignificance;
|
||||
}
|
||||
|
||||
/// <summary>Add a new position to the moving average</summary>
|
||||
public void AddPosition(double position)
|
||||
{
|
||||
var now = DateTime.UtcNow;
|
||||
if (start == default)
|
||||
start = now;
|
||||
|
||||
var time = now - start;
|
||||
|
||||
while (speeds.Count > MAX_SPEEDS || (speeds.Count > 2 && time - speeds.First.Value.Time > SlowWindow))
|
||||
speeds.RemoveFirst();
|
||||
|
||||
if (!double.IsNaN(lastPosition))
|
||||
{
|
||||
var newSpeed = (position - lastPosition) / (time - lastTime).TotalSeconds;
|
||||
speeds.AddLast(new Point(time, newSpeed));
|
||||
}
|
||||
|
||||
lastTime = time;
|
||||
lastPosition = position;
|
||||
|
||||
Average = ComputeNextAverage();
|
||||
}
|
||||
|
||||
private double ComputeNextAverage()
|
||||
{
|
||||
if (speeds.Count == 0)
|
||||
return 0;
|
||||
else if (speeds.Count == 1)
|
||||
return speeds.Last.Value.Velocity;
|
||||
else
|
||||
{
|
||||
var n_newest = speeds.Count(s => s.Time > lastTime.Subtract(FastWindow));
|
||||
|
||||
var n_oldest = speeds.Count - n_newest;
|
||||
|
||||
if (speeds.Take(n_oldest).T_Test_2By(s => s.Velocity, speeds.TakeLast(n_newest), FastSignificance))
|
||||
{
|
||||
//Speeds in FastWindow are significantly different from reset of speeds in SlowWindow.
|
||||
//Discard older speeds and keep only speeds in FastWindow
|
||||
for (; n_oldest > 0; n_oldest--)
|
||||
speeds.RemoveFirst();
|
||||
|
||||
return speeds.Average(s => s.Velocity);
|
||||
}
|
||||
else
|
||||
return
|
||||
speeds.T_Test_1By(s => s.Velocity, Average, SlowSignificance)
|
||||
? speeds.Average(s => s.Velocity)
|
||||
: Average;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
using System;
|
||||
using AAXClean;
|
||||
using Dinah.Core;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using AAXClean;
|
||||
using Dinah.Core;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
@@ -16,15 +15,14 @@ namespace AaxDecrypter
|
||||
|
||||
var startOffset = chapters.StartOffset;
|
||||
|
||||
var trackCount = 0;
|
||||
var trackCount = 1;
|
||||
foreach (var c in chapters.Chapters)
|
||||
{
|
||||
var startTime = c.StartOffset - startOffset;
|
||||
trackCount++;
|
||||
|
||||
stringBuilder.AppendLine($"TRACK {trackCount} AUDIO");
|
||||
stringBuilder.AppendLine($"TRACK {trackCount++} AUDIO");
|
||||
stringBuilder.AppendLine($" TITLE \"{c.Title}\"");
|
||||
stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds / 1000d * 75)}");
|
||||
stringBuilder.AppendLine($" INDEX 01 {(int)startTime.TotalMinutes}:{startTime:ss}:{(int)(startTime.Milliseconds * 75d / 1000):D2}");
|
||||
}
|
||||
|
||||
return stringBuilder.ToString();
|
||||
@@ -46,7 +44,7 @@ namespace AaxDecrypter
|
||||
for (var i = 0; i < cueContents.Length; i++)
|
||||
{
|
||||
var line = cueContents[i];
|
||||
if (!line.Trim().StartsWith("FILE") || !line.Contains(" "))
|
||||
if (!line.Trim().StartsWith("FILE") || !line.Contains(' '))
|
||||
continue;
|
||||
|
||||
var fileTypeBegins = line.LastIndexOf(" ") + 1;
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
using AAXClean;
|
||||
using Dinah.Core;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public class DownloadOptions
|
||||
{
|
||||
public string DownloadUrl { get; }
|
||||
public string UserAgent { get; }
|
||||
public string AudibleKey { get; init; }
|
||||
public string AudibleIV { get; init; }
|
||||
public OutputFormat OutputFormat { get; init; }
|
||||
public bool TrimOutputToChapterLength { get; init; }
|
||||
public bool RetainEncryptedFile { get; init; }
|
||||
public bool StripUnabridged { get; init; }
|
||||
public bool CreateCueSheet { get; init; }
|
||||
public ChapterInfo ChapterInfo { get; set; }
|
||||
public NAudio.Lame.LameConfig LameConfig { get; set; }
|
||||
public bool Downsample { get; set; }
|
||||
public bool MatchSourceBitrate { get; set; }
|
||||
|
||||
public DownloadOptions(string downloadUrl, string userAgent)
|
||||
{
|
||||
DownloadUrl = ArgumentValidator.EnsureNotNullOrEmpty(downloadUrl, nameof(downloadUrl));
|
||||
UserAgent = ArgumentValidator.EnsureNotNullOrEmpty(userAgent, nameof(userAgent));
|
||||
|
||||
// no null/empty check for key/iv. unencrypted files do not have them
|
||||
}
|
||||
}
|
||||
}
|
||||
55
Source/AaxDecrypter/IDownloadOptions.cs
Normal file
55
Source/AaxDecrypter/IDownloadOptions.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
using AAXClean;
|
||||
using System;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public class KeyData
|
||||
{
|
||||
public byte[] KeyPart1 { get; }
|
||||
public byte[]? KeyPart2 { get; }
|
||||
|
||||
public KeyData(byte[] keyPart1, byte[]? keyPart2 = null)
|
||||
{
|
||||
KeyPart1 = keyPart1;
|
||||
KeyPart2 = keyPart2;
|
||||
}
|
||||
|
||||
[Newtonsoft.Json.JsonConstructor]
|
||||
public KeyData(string keyPart1, string? keyPart2 = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(keyPart1, nameof(keyPart1));
|
||||
KeyPart1 = Convert.FromHexString(keyPart1);
|
||||
if (keyPart2 != null)
|
||||
KeyPart2 = Convert.FromHexString(keyPart2);
|
||||
}
|
||||
}
|
||||
|
||||
public interface IDownloadOptions
|
||||
{
|
||||
event EventHandler<long> DownloadSpeedChanged;
|
||||
string DownloadUrl { get; }
|
||||
string UserAgent { get; }
|
||||
KeyData[]? DecryptionKeys { get; }
|
||||
TimeSpan RuntimeLength { get; }
|
||||
OutputFormat OutputFormat { get; }
|
||||
bool StripUnabridged { get; }
|
||||
bool CreateCueSheet { get; }
|
||||
long DownloadSpeedBps { get; }
|
||||
ChapterInfo ChapterInfo { get; }
|
||||
bool FixupFile { get; }
|
||||
string? AudibleProductId { get; }
|
||||
string? Title { get; }
|
||||
string? Subtitle { get; }
|
||||
string? Publisher { get; }
|
||||
string? Language { get; }
|
||||
string? SeriesName { get; }
|
||||
string? SeriesNumber { get; }
|
||||
NAudio.Lame.LameConfig? LameConfig { get; }
|
||||
bool Downsample { get; }
|
||||
bool MatchSourceBitrate { get; }
|
||||
bool MoveMoovToBeginning { get; }
|
||||
string GetMultipartTitle(MultiConvertFileProperties props);
|
||||
public FileType? InputType { get; }
|
||||
}
|
||||
}
|
||||
69
Source/AaxDecrypter/MpegUtil.cs
Normal file
69
Source/AaxDecrypter/MpegUtil.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
using AAXClean;
|
||||
using AAXClean.Codecs;
|
||||
using NAudio.Lame;
|
||||
using System;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public static class MpegUtil
|
||||
{
|
||||
private const string TagDomain = "com.pilabor.tone";
|
||||
public static void ConfigureLameOptions(
|
||||
Mp4File mp4File,
|
||||
LameConfig lameConfig,
|
||||
bool downsample,
|
||||
bool matchSourceBitrate,
|
||||
ChapterInfo chapters)
|
||||
{
|
||||
double bitrateMultiple = 1;
|
||||
|
||||
if (mp4File.TimeScale < lameConfig.OutputSampleRate)
|
||||
{
|
||||
lameConfig.OutputSampleRate = mp4File.TimeScale;
|
||||
}
|
||||
else if (mp4File.TimeScale > lameConfig.OutputSampleRate)
|
||||
{
|
||||
bitrateMultiple *= (double)lameConfig.OutputSampleRate / mp4File.TimeScale;
|
||||
}
|
||||
|
||||
if (mp4File.AudioChannels == 2)
|
||||
{
|
||||
if (downsample)
|
||||
bitrateMultiple /= 2;
|
||||
else
|
||||
lameConfig.Mode = MPEGMode.Stereo;
|
||||
}
|
||||
|
||||
if (matchSourceBitrate)
|
||||
{
|
||||
int kbps = (int)Math.Round(mp4File.AverageBitrate * bitrateMultiple / 1024);
|
||||
|
||||
if (lameConfig.VBR is null)
|
||||
lameConfig.BitRate = kbps;
|
||||
else if (lameConfig.VBR == VBRMode.ABR)
|
||||
lameConfig.ABRRateKbps = kbps;
|
||||
}
|
||||
|
||||
//Setup metadata tags
|
||||
lameConfig.ID3 = mp4File.AppleTags.ToIDTags();
|
||||
|
||||
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "SUBTITLE") is string subtitle)
|
||||
lameConfig.ID3.Subtitle = subtitle;
|
||||
|
||||
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "LANGUAGE") is string lang)
|
||||
lameConfig.ID3.UserDefinedText.Add("LANGUAGE", lang);
|
||||
|
||||
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "SERIES") is string series)
|
||||
lameConfig.ID3.UserDefinedText.Add("SERIES", series);
|
||||
|
||||
if (mp4File.AppleTags.AppleListBox.GetFreeformTagString(TagDomain, "PART") is string part)
|
||||
lameConfig.ID3.UserDefinedText.Add("PART", part);
|
||||
|
||||
if (chapters?.Count > 0)
|
||||
{
|
||||
var cue = Cue.CreateContents(lameConfig.ID3.Title + ".mp3", chapters);
|
||||
lameConfig.ID3.UserDefinedText.Add("CUESHEET", cue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using FileManager;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
@@ -10,16 +8,6 @@ namespace AaxDecrypter
|
||||
public int PartsPosition { get; set; }
|
||||
public int PartsTotal { get; set; }
|
||||
public string Title { get; set; }
|
||||
|
||||
public static string DefaultMultipartFilename(MultiConvertFileProperties multiConvertFileProperties)
|
||||
{
|
||||
var template = Path.ChangeExtension(multiConvertFileProperties.OutputFileName, null) + " - <ch# 0> - <title>" + Path.GetExtension(multiConvertFileProperties.OutputFileName);
|
||||
|
||||
var fileNamingTemplate = new FileNamingTemplate(template) { IllegalCharacterReplacements = " " };
|
||||
fileNamingTemplate.AddParameterReplacement("ch# 0", FileUtility.GetSequenceFormatted(multiConvertFileProperties.PartsPosition, multiConvertFileProperties.PartsTotal));
|
||||
fileNamingTemplate.AddParameterReplacement("title", multiConvertFileProperties.Title ?? "");
|
||||
|
||||
return fileNamingTemplate.GetFilePath();
|
||||
}
|
||||
public DateTime FileDate { get; } = DateTime.Now;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,135 +1,111 @@
|
||||
using Dinah.Core;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
/// <summary>
|
||||
/// A <see cref="CookieContainer"/> for a single Uri.
|
||||
/// </summary>
|
||||
public class SingleUriCookieContainer : CookieContainer
|
||||
{
|
||||
private Uri baseAddress;
|
||||
public Uri Uri
|
||||
{
|
||||
get => baseAddress;
|
||||
set
|
||||
{
|
||||
baseAddress = new UriBuilder(value.Scheme, value.Host).Uri;
|
||||
}
|
||||
}
|
||||
|
||||
public CookieCollection GetCookies()
|
||||
{
|
||||
return GetCookies(Uri);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A resumable, simultaneous file downloader and reader.
|
||||
/// </summary>
|
||||
/// <summary>A resumable, simultaneous file downloader and reader. </summary>
|
||||
public class NetworkFileStream : Stream, IUpdatable
|
||||
{
|
||||
public event EventHandler Updated;
|
||||
|
||||
#region Public Properties
|
||||
|
||||
/// <summary>
|
||||
/// Location to save the downloaded data.
|
||||
/// </summary>
|
||||
/// <summary> Location to save the downloaded data. </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public string SaveFilePath { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Http(s) address of the file to download.
|
||||
/// </summary>
|
||||
/// <summary> Http(s) address of the file to download. </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public Uri Uri { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// All cookies set by caller or by the remote server.
|
||||
/// </summary>
|
||||
/// <summary> Http headers to be sent to the server with the request. </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public SingleUriCookieContainer CookieContainer { get; }
|
||||
public Dictionary<string, string> RequestHeaders { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// Http headers to be sent to the server with the request.
|
||||
/// </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public WebHeaderCollection RequestHeaders { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// The position in <see cref="SaveFilePath"/> that has been written and flushed to disk.
|
||||
/// </summary>
|
||||
/// <summary> The position in <see cref="SaveFilePath"/> that has been written and flushed to disk. </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public long WritePosition { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// The total length of the <see cref="Uri"/> file to download.
|
||||
/// </summary>
|
||||
/// <summary> The total length of the <see cref="Uri"/> file to download. </summary>
|
||||
[JsonProperty(Required = Required.Always)]
|
||||
public long ContentLength { get; private set; }
|
||||
|
||||
[JsonIgnore]
|
||||
public bool IsCancelled => _cancellationSource.IsCancellationRequested;
|
||||
|
||||
[JsonIgnore]
|
||||
public Task DownloadTask { get; private set; }
|
||||
|
||||
private long _speedLimit = 0;
|
||||
/// <summary>bytes per second</summary>
|
||||
public long SpeedLimit { get => _speedLimit; set => _speedLimit = value <= 0 ? 0 : Math.Max(value, MIN_BYTES_PER_SECOND); }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Properties
|
||||
private HttpWebRequest HttpRequest { get; set; }
|
||||
private FileStream _writeFile { get; }
|
||||
private FileStream _readFile { get; }
|
||||
private Stream _networkStream { get; set; }
|
||||
private bool hasBegunDownloading { get; set; }
|
||||
public bool IsCancelled { get; private set; }
|
||||
private EventWaitHandle downloadEnded { get; set; }
|
||||
private EventWaitHandle downloadedPiece { get; set; }
|
||||
private CancellationTokenSource _cancellationSource { get; } = new();
|
||||
private EventWaitHandle _downloadedPiece { get; set; }
|
||||
|
||||
private DateTime NextUpdateTime { get; set; }
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constants
|
||||
|
||||
//Download buffer size
|
||||
private const int DOWNLOAD_BUFF_SZ = 32 * 1024;
|
||||
//Download memory buffer size
|
||||
private const int DOWNLOAD_BUFF_SZ = 8 * 1024;
|
||||
|
||||
//NetworkFileStream will flush all data in _writeFile to disk after every
|
||||
//DATA_FLUSH_SZ bytes are written to the file stream.
|
||||
private const int DATA_FLUSH_SZ = 1024 * 1024;
|
||||
|
||||
//Number of times per second the download rate is checked and throttled
|
||||
private const int THROTTLE_FREQUENCY = 8;
|
||||
|
||||
//Minimum throttle rate. The minimum amount of data that can be throttled
|
||||
//on each iteration of the download loop is DOWNLOAD_BUFF_SZ.
|
||||
public const int MIN_BYTES_PER_SECOND = DOWNLOAD_BUFF_SZ * THROTTLE_FREQUENCY;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructor
|
||||
|
||||
/// <summary>
|
||||
/// A resumable, simultaneous file downloader and reader.
|
||||
/// </summary>
|
||||
/// <summary> A resumable, simultaneous file downloader and reader. </summary>
|
||||
/// <param name="saveFilePath">Path to a location on disk to save the downloaded data from <paramref name="uri"/></param>
|
||||
/// <param name="uri">Http(s) address of the file to download.</param>
|
||||
/// <param name="writePosition">The position in <paramref name="uri"/> to begin downloading.</param>
|
||||
/// <param name="requestHeaders">Http headers to be sent to the server with the <see cref="HttpWebRequest"/>.</param>
|
||||
/// <param name="cookies">A <see cref="SingleUriCookieContainer"/> with cookies to send with the <see cref="HttpWebRequest"/>. It will also be populated with any cookies set by the server. </param>
|
||||
public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, WebHeaderCollection requestHeaders = null, SingleUriCookieContainer cookies = null)
|
||||
public NetworkFileStream(string saveFilePath, Uri uri, long writePosition = 0, Dictionary<string, string> requestHeaders = null)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
|
||||
ArgumentValidator.EnsureNotNullOrWhiteSpace(uri?.AbsoluteUri, nameof(uri));
|
||||
ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
|
||||
SaveFilePath = ArgumentValidator.EnsureNotNullOrWhiteSpace(saveFilePath, nameof(saveFilePath));
|
||||
Uri = ArgumentValidator.EnsureNotNull(uri, nameof(uri));
|
||||
WritePosition = ArgumentValidator.EnsureGreaterThan(writePosition, nameof(writePosition), -1);
|
||||
|
||||
if (!Directory.Exists(Path.GetDirectoryName(saveFilePath)))
|
||||
throw new ArgumentException($"Specified {nameof(saveFilePath)} directory \"{Path.GetDirectoryName(saveFilePath)}\" does not exist.");
|
||||
|
||||
SaveFilePath = saveFilePath;
|
||||
Uri = uri;
|
||||
WritePosition = writePosition;
|
||||
RequestHeaders = requestHeaders ?? new WebHeaderCollection();
|
||||
CookieContainer = cookies ?? new SingleUriCookieContainer { Uri = uri };
|
||||
RequestHeaders = requestHeaders ?? new();
|
||||
|
||||
_writeFile = new FileStream(SaveFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)
|
||||
{
|
||||
Position = WritePosition
|
||||
};
|
||||
|
||||
if (_writeFile.Length < WritePosition)
|
||||
{
|
||||
_writeFile.Dispose();
|
||||
throw new InvalidDataException($"{SaveFilePath} file length is shorter than {WritePosition}");
|
||||
}
|
||||
|
||||
_readFile = new FileStream(SaveFilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
|
||||
|
||||
SetUriForSameFile(uri);
|
||||
@@ -139,213 +115,211 @@ namespace AaxDecrypter
|
||||
|
||||
#region Downloader
|
||||
|
||||
/// <summary>
|
||||
/// Update the <see cref="JsonFilePersister"/>.
|
||||
/// </summary>
|
||||
private void Update()
|
||||
/// <summary> Update the <see cref="Dinah.Core.IO.JsonFilePersister{T}"/>. </summary>
|
||||
private void OnUpdate(bool waitForWrite = false)
|
||||
{
|
||||
RequestHeaders = HttpRequest.Headers;
|
||||
Updated?.Invoke(this, EventArgs.Empty);
|
||||
try
|
||||
{
|
||||
if (waitForWrite || DateTime.UtcNow > NextUpdateTime)
|
||||
{
|
||||
Updated?.Invoke(this, EventArgs.Empty);
|
||||
//JsonFilePersister Will not allow update intervals shorter than 100 milliseconds
|
||||
//If an update is called less than 100 ms since the last update, persister will
|
||||
//sleep the thread until 100 ms has elapsed.
|
||||
NextUpdateTime = DateTime.UtcNow.AddMilliseconds(110);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Error(ex, "An error was encountered while saving the download progress to JSON");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set a different <see cref="System.Uri"/> to the same file targeted by this instance of <see cref="NetworkFileStream"/>
|
||||
/// </summary>
|
||||
/// <summary> Set a different <see cref="System.Uri"/> to the same file targeted by this instance of <see cref="NetworkFileStream"/> </summary>
|
||||
/// <param name="uriToSameFile">New <see cref="System.Uri"/> host must match existing host.</param>
|
||||
public void SetUriForSameFile(Uri uriToSameFile)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNullOrWhiteSpace(uriToSameFile?.AbsoluteUri, nameof(uriToSameFile));
|
||||
|
||||
if (Path.GetFileName(uriToSameFile.LocalPath) != Path.GetFileName(Uri.LocalPath))
|
||||
throw new ArgumentException($"New uri to the same file must have the same file name.");
|
||||
if (uriToSameFile.Host != Uri.Host)
|
||||
throw new ArgumentException($"New uri to the same file must have the same host.\r\n Old Host :{Uri.Host}\r\nNew Host: {uriToSameFile.Host}");
|
||||
if (hasBegunDownloading)
|
||||
if (DownloadTask is not null)
|
||||
throw new InvalidOperationException("Cannot change Uri after download has started.");
|
||||
|
||||
Uri = uriToSameFile;
|
||||
HttpRequest = WebRequest.CreateHttp(Uri);
|
||||
|
||||
HttpRequest.CookieContainer = CookieContainer;
|
||||
HttpRequest.Headers = RequestHeaders;
|
||||
//If NetworkFileStream is resuming, Header will already contain a range.
|
||||
HttpRequest.Headers.Remove("Range");
|
||||
HttpRequest.AddRange(WritePosition);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Begins downloading <see cref="Uri"/> to <see cref="SaveFilePath"/> in a background thread.
|
||||
/// </summary>
|
||||
private void BeginDownloading()
|
||||
/// <summary> Begins downloading <see cref="Uri"/> to <see cref="SaveFilePath"/> in a background thread. </summary>
|
||||
/// <returns>The downloader <see cref="Task"/></returns>
|
||||
public async Task BeginDownloadingAsync()
|
||||
{
|
||||
downloadEnded = new EventWaitHandle(false, EventResetMode.ManualReset);
|
||||
|
||||
if (ContentLength != 0 && WritePosition == ContentLength)
|
||||
{
|
||||
hasBegunDownloading = true;
|
||||
downloadEnded.Set();
|
||||
DownloadTask = Task.CompletedTask;
|
||||
return;
|
||||
}
|
||||
|
||||
if (ContentLength != 0 && WritePosition > ContentLength)
|
||||
throw new WebException($"Specified write position (0x{WritePosition:X10}) is larger than {nameof(ContentLength)} (0x{ContentLength:X10}).");
|
||||
|
||||
var response = HttpRequest.GetResponse() as HttpWebResponse;
|
||||
//Initiate connection with the first request block and
|
||||
//get the total content length before returning.
|
||||
var client = new HttpClient();
|
||||
var response = await RequestNextByteRangeAsync(client);
|
||||
|
||||
if (ContentLength != 0 && ContentLength != response.FileSize)
|
||||
throw new WebException($"Content length of 0x{response.FileSize:X10} differs from partially downloaded content length of 0x{ContentLength:X10}");
|
||||
|
||||
ContentLength = response.FileSize;
|
||||
|
||||
_downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
|
||||
//Hand off the client and the open request to the downloader to download and write data to file.
|
||||
DownloadTask = Task.Run(() => DownloadLoopInternal(client , response), _cancellationSource.Token);
|
||||
}
|
||||
|
||||
private async Task DownloadLoopInternal(HttpClient client, BlockResponse blockResponse)
|
||||
{
|
||||
try
|
||||
{
|
||||
long startPosition = WritePosition;
|
||||
|
||||
while (WritePosition < ContentLength && !IsCancelled)
|
||||
{
|
||||
try
|
||||
{
|
||||
await DownloadToFile(blockResponse);
|
||||
}
|
||||
catch (HttpIOException e)
|
||||
when (e.HttpRequestError is HttpRequestError.ResponseEnded
|
||||
&& WritePosition != startPosition
|
||||
&& WritePosition < ContentLength && !IsCancelled)
|
||||
{
|
||||
Serilog.Log.Logger.Debug($"The download connection ended before the file completed downloading all 0x{ContentLength:X10} bytes");
|
||||
|
||||
//the download made *some* progress since the last attempt.
|
||||
//Try again to complete the download from where it left off.
|
||||
//Make sure to rewind file to last flush position.
|
||||
_writeFile.Position = startPosition = WritePosition;
|
||||
blockResponse.Dispose();
|
||||
blockResponse = await RequestNextByteRangeAsync(client);
|
||||
|
||||
Serilog.Log.Logger.Debug($"Resuming the file download starting at position 0x{WritePosition:X10}.");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//Don't throw from DownloadTask.
|
||||
//This task gets awaited in Dispose() and we don't want to have an unhandled exception there.
|
||||
Serilog.Log.Error(ex, "An error was encountered during the download process.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_writeFile.Dispose();
|
||||
blockResponse.Dispose();
|
||||
client.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<BlockResponse> RequestNextByteRangeAsync(HttpClient client)
|
||||
{
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, Uri);
|
||||
|
||||
//Just in case it snuck in the saved json (Issue #1232)
|
||||
RequestHeaders.Remove("Range");
|
||||
|
||||
foreach (var header in RequestHeaders)
|
||||
request.Headers.Add(header.Key, header.Value);
|
||||
|
||||
request.Headers.Add("Range", $"bytes={WritePosition}-");
|
||||
|
||||
var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, _cancellationSource.Token);
|
||||
|
||||
if (response.StatusCode != HttpStatusCode.PartialContent)
|
||||
throw new WebException($"Server at {Uri.Host} responded with unexpected status code: {response.StatusCode}.");
|
||||
|
||||
//Content length is the length of the range request, and it is only equal
|
||||
//to the complete file length if requesting Range: bytes=0-
|
||||
if (WritePosition == 0)
|
||||
ContentLength = response.ContentLength;
|
||||
var totalSize = response.Content.Headers.ContentRange?.Length ??
|
||||
throw new WebException("The response did not contain a total content length.");
|
||||
|
||||
_networkStream = response.GetResponseStream();
|
||||
downloadedPiece = new EventWaitHandle(false, EventResetMode.AutoReset);
|
||||
var rangeSize = response.Content.Headers.ContentLength ??
|
||||
throw new WebException($"The response did not contain a {nameof(response.Content.Headers.ContentLength)};");
|
||||
|
||||
//Download the file in the background.
|
||||
new Thread(() => DownloadFile())
|
||||
{ IsBackground = true }
|
||||
.Start();
|
||||
|
||||
hasBegunDownloading = true;
|
||||
return;
|
||||
return new BlockResponse(response, rangeSize, totalSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Downlod <see cref="Uri"/> to <see cref="SaveFilePath"/>.
|
||||
/// </summary>
|
||||
private void DownloadFile()
|
||||
private readonly record struct BlockResponse(HttpResponseMessage Response, long BlockSize, long FileSize) : IDisposable
|
||||
{
|
||||
public void Dispose() => Response?.Dispose();
|
||||
}
|
||||
|
||||
/// <summary> Download <see cref="Uri"/> to <see cref="SaveFilePath"/>.</summary>
|
||||
private async Task DownloadToFile(BlockResponse block)
|
||||
{
|
||||
var endPosition = WritePosition + block.BlockSize;
|
||||
using var networkStream = await block.Response.Content.ReadAsStreamAsync(_cancellationSource.Token);
|
||||
|
||||
var downloadPosition = WritePosition;
|
||||
var nextFlush = downloadPosition + DATA_FLUSH_SZ;
|
||||
|
||||
var buff = new byte[DOWNLOAD_BUFF_SZ];
|
||||
do
|
||||
|
||||
try
|
||||
{
|
||||
var bytesRead = _networkStream.Read(buff, 0, DOWNLOAD_BUFF_SZ);
|
||||
_writeFile.Write(buff, 0, bytesRead);
|
||||
|
||||
downloadPosition += bytesRead;
|
||||
|
||||
if (downloadPosition > nextFlush)
|
||||
DateTime startTime = DateTime.UtcNow;
|
||||
long bytesReadSinceThrottle = 0;
|
||||
int bytesRead;
|
||||
do
|
||||
{
|
||||
_writeFile.Flush();
|
||||
WritePosition = downloadPosition;
|
||||
Update();
|
||||
nextFlush = downloadPosition + DATA_FLUSH_SZ;
|
||||
downloadedPiece.Set();
|
||||
}
|
||||
bytesRead = await networkStream.ReadAsync(buff, _cancellationSource.Token);
|
||||
await _writeFile.WriteAsync(buff, 0, bytesRead, _cancellationSource.Token);
|
||||
|
||||
} while (downloadPosition < ContentLength && !IsCancelled);
|
||||
downloadPosition += bytesRead;
|
||||
|
||||
_writeFile.Close();
|
||||
_networkStream.Close();
|
||||
WritePosition = downloadPosition;
|
||||
Update();
|
||||
if (downloadPosition > nextFlush)
|
||||
{
|
||||
await _writeFile.FlushAsync(_cancellationSource.Token);
|
||||
WritePosition = downloadPosition;
|
||||
OnUpdate();
|
||||
nextFlush = downloadPosition + DATA_FLUSH_SZ;
|
||||
_downloadedPiece.Set();
|
||||
}
|
||||
|
||||
downloadedPiece.Set();
|
||||
downloadEnded.Set();
|
||||
#region throttle
|
||||
|
||||
if (!IsCancelled && WritePosition < ContentLength)
|
||||
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
|
||||
bytesReadSinceThrottle += bytesRead;
|
||||
|
||||
if (WritePosition > ContentLength)
|
||||
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
|
||||
if (SpeedLimit >= MIN_BYTES_PER_SECOND && bytesReadSinceThrottle > SpeedLimit / THROTTLE_FREQUENCY)
|
||||
{
|
||||
var delayMS = (int)(startTime.AddSeconds(1d / THROTTLE_FREQUENCY) - DateTime.UtcNow).TotalMilliseconds;
|
||||
if (delayMS > 0)
|
||||
await Task.Delay(delayMS, _cancellationSource.Token);
|
||||
|
||||
}
|
||||
startTime = DateTime.UtcNow;
|
||||
bytesReadSinceThrottle = 0;
|
||||
}
|
||||
|
||||
#endregion
|
||||
#endregion
|
||||
|
||||
#region Json Connverters
|
||||
} while (downloadPosition < endPosition && !IsCancelled && bytesRead > 0);
|
||||
|
||||
public static JsonSerializerSettings GetJsonSerializerSettings()
|
||||
{
|
||||
var settings = new JsonSerializerSettings();
|
||||
settings.Converters.Add(new CookieContainerConverter());
|
||||
settings.Converters.Add(new WebHeaderCollectionConverter());
|
||||
return settings;
|
||||
}
|
||||
await _writeFile.FlushAsync(_cancellationSource.Token);
|
||||
WritePosition = downloadPosition;
|
||||
|
||||
internal class CookieContainerConverter : JsonConverter
|
||||
{
|
||||
public override bool CanConvert(Type objectType)
|
||||
=> objectType == typeof(SingleUriCookieContainer);
|
||||
if (!IsCancelled && WritePosition < endPosition)
|
||||
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is less than {nameof(ContentLength)} (0x{ContentLength:X10}).");
|
||||
|
||||
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
|
||||
{
|
||||
var jObj = JObject.Load(reader);
|
||||
|
||||
var result = new SingleUriCookieContainer()
|
||||
{
|
||||
Uri = new Uri(jObj["Uri"].Value<string>()),
|
||||
Capacity = jObj["Capacity"].Value<int>(),
|
||||
MaxCookieSize = jObj["MaxCookieSize"].Value<int>(),
|
||||
PerDomainCapacity = jObj["PerDomainCapacity"].Value<int>()
|
||||
};
|
||||
|
||||
var cookieList = jObj["Cookies"].ToList();
|
||||
|
||||
foreach (var cookie in cookieList)
|
||||
{
|
||||
result.Add(
|
||||
new Cookie
|
||||
{
|
||||
Comment = cookie["Comment"].Value<string>(),
|
||||
HttpOnly = cookie["HttpOnly"].Value<bool>(),
|
||||
Discard = cookie["Discard"].Value<bool>(),
|
||||
Domain = cookie["Domain"].Value<string>(),
|
||||
Expired = cookie["Expired"].Value<bool>(),
|
||||
Expires = cookie["Expires"].Value<DateTime>(),
|
||||
Name = cookie["Name"].Value<string>(),
|
||||
Path = cookie["Path"].Value<string>(),
|
||||
Port = cookie["Port"].Value<string>(),
|
||||
Secure = cookie["Secure"].Value<bool>(),
|
||||
Value = cookie["Value"].Value<string>(),
|
||||
Version = cookie["Version"].Value<int>(),
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
if (WritePosition > endPosition)
|
||||
throw new WebException($"Downloaded size (0x{WritePosition:X10}) is greater than {nameof(ContentLength)} (0x{ContentLength:X10}).");
|
||||
}
|
||||
|
||||
public override bool CanWrite => true;
|
||||
|
||||
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
var cookies = value as SingleUriCookieContainer;
|
||||
var obj = (JObject)JToken.FromObject(value);
|
||||
var container = cookies.GetCookies();
|
||||
var propertyNames = container.Select(c => JToken.FromObject(c));
|
||||
obj.AddFirst(new JProperty("Cookies", new JArray(propertyNames)));
|
||||
obj.WriteTo(writer);
|
||||
Serilog.Log.Information("Download was cancelled");
|
||||
}
|
||||
}
|
||||
|
||||
internal class WebHeaderCollectionConverter : JsonConverter
|
||||
{
|
||||
public override bool CanConvert(Type objectType)
|
||||
=> objectType == typeof(WebHeaderCollection);
|
||||
|
||||
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
|
||||
finally
|
||||
{
|
||||
var jObj = JObject.Load(reader);
|
||||
var result = new WebHeaderCollection();
|
||||
|
||||
foreach (var kvp in jObj)
|
||||
result.Add(kvp.Key, kvp.Value.Value<string>());
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override bool CanWrite => true;
|
||||
|
||||
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
|
||||
{
|
||||
var jObj = new JObject();
|
||||
var type = value.GetType();
|
||||
var headers = value as WebHeaderCollection;
|
||||
var jHeaders = headers.AllKeys.Select(k => new JProperty(k, headers[k]));
|
||||
jObj.Add(jHeaders);
|
||||
jObj.WriteTo(writer);
|
||||
_downloadedPiece.Set();
|
||||
OnUpdate(waitForWrite: true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,10 +328,10 @@ namespace AaxDecrypter
|
||||
#region Download Stream Reader
|
||||
|
||||
[JsonIgnore]
|
||||
public override bool CanRead => true;
|
||||
public override bool CanRead => _readFile.CanRead;
|
||||
|
||||
[JsonIgnore]
|
||||
public override bool CanSeek => true;
|
||||
public override bool CanSeek => _readFile.CanSeek;
|
||||
|
||||
[JsonIgnore]
|
||||
public override bool CanWrite => false;
|
||||
@@ -367,8 +341,8 @@ namespace AaxDecrypter
|
||||
{
|
||||
get
|
||||
{
|
||||
if (!hasBegunDownloading)
|
||||
BeginDownloading();
|
||||
if (DownloadTask is null)
|
||||
throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}");
|
||||
return ContentLength;
|
||||
}
|
||||
}
|
||||
@@ -385,18 +359,18 @@ namespace AaxDecrypter
|
||||
[JsonIgnore]
|
||||
public override int WriteTimeout { get => base.WriteTimeout; set => base.WriteTimeout = value; }
|
||||
|
||||
public override void Flush() => throw new NotImplementedException();
|
||||
public override void SetLength(long value) => throw new NotImplementedException();
|
||||
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
|
||||
public override void Flush() => throw new InvalidOperationException();
|
||||
public override void SetLength(long value) => throw new InvalidOperationException();
|
||||
public override void Write(byte[] buffer, int offset, int count) => throw new InvalidOperationException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (!hasBegunDownloading)
|
||||
BeginDownloading();
|
||||
if (DownloadTask is null)
|
||||
throw new InvalidOperationException($"Background downloader must first be started by calling {nameof(BeginDownloadingAsync)}");
|
||||
|
||||
var toRead = Math.Min(count, Length - Position);
|
||||
WaitToPosition(Position + toRead);
|
||||
return _readFile.Read(buffer, offset, count);
|
||||
return IsCancelled ? 0 : _readFile.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
@@ -412,32 +386,42 @@ namespace AaxDecrypter
|
||||
return _readFile.Position = newPosition;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Blocks until the file has downloaded to at least <paramref name="requiredPosition"/>, then returns.
|
||||
/// </summary>
|
||||
/// <param name="requiredPosition">The minimum required flished data length in <see cref="SaveFilePath"/>.</param>
|
||||
/// <summary>Blocks until the file has downloaded to at least <paramref name="requiredPosition"/>, then returns. </summary>
|
||||
/// <param name="requiredPosition">The minimum required flushed data length in <see cref="SaveFilePath"/>.</param>
|
||||
private void WaitToPosition(long requiredPosition)
|
||||
{
|
||||
while (requiredPosition > WritePosition && !IsCancelled && hasBegunDownloading && !downloadedPiece.WaitOne(1000)) ;
|
||||
while (WritePosition < requiredPosition
|
||||
&& DownloadTask?.IsCompleted is false
|
||||
&& !IsCancelled)
|
||||
{
|
||||
_downloadedPiece.WaitOne(50);
|
||||
}
|
||||
}
|
||||
|
||||
public override void Close()
|
||||
private bool disposed = false;
|
||||
|
||||
/*
|
||||
* https://learn.microsoft.com/en-us/dotnet/api/system.io.stream.dispose?view=net-7.0
|
||||
*
|
||||
* In derived classes, do not override the Close() method, instead, put all of the
|
||||
* Stream cleanup logic in the Dispose(Boolean) method.
|
||||
*/
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
IsCancelled = true;
|
||||
if (disposing && !Interlocked.CompareExchange(ref disposed, true, false))
|
||||
{
|
||||
_cancellationSource.Cancel();
|
||||
DownloadTask?.GetAwaiter().GetResult();
|
||||
_downloadedPiece?.Dispose();
|
||||
_cancellationSource?.Dispose();
|
||||
_readFile.Dispose();
|
||||
_writeFile.Dispose();
|
||||
OnUpdate(waitForWrite: true);
|
||||
}
|
||||
|
||||
while (downloadEnded is not null && !downloadEnded.WaitOne(1000)) ;
|
||||
|
||||
_readFile.Close();
|
||||
_writeFile.Close();
|
||||
_networkStream?.Close();
|
||||
Update();
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
|
||||
#endregion
|
||||
~NetworkFileStream()
|
||||
{
|
||||
downloadEnded?.Close();
|
||||
downloadedPiece?.Close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
using Dinah.Core.IO;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
internal class NetworkFileStreamPersister : JsonFilePersister<NetworkFileStream>
|
||||
{
|
||||
|
||||
internal class NetworkFileStreamPersister : JsonFilePersister<NetworkFileStream>
|
||||
{
|
||||
/// <summary>Alias for Target </summary>
|
||||
public NetworkFileStream NetworkFileStream => Target;
|
||||
|
||||
@@ -17,7 +15,11 @@ namespace AaxDecrypter
|
||||
public NetworkFileStreamPersister(string path, string jsonPath = null)
|
||||
: base(path, jsonPath) { }
|
||||
|
||||
protected override JsonSerializerSettings GetSerializerSettings() => NetworkFileStream.GetJsonSerializerSettings();
|
||||
|
||||
}
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing)
|
||||
NetworkFileStream?.Dispose();
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
Source/AaxDecrypter/TempFile.cs
Normal file
17
Source/AaxDecrypter/TempFile.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
using FileManager;
|
||||
|
||||
#nullable enable
|
||||
namespace AaxDecrypter;
|
||||
|
||||
public record TempFile
|
||||
{
|
||||
public LongPath FilePath { get; init; }
|
||||
public string Extension { get; }
|
||||
public MultiConvertFileProperties? PartProperties { get; init; }
|
||||
public TempFile(LongPath filePath, string? extension = null)
|
||||
{
|
||||
FilePath = filePath;
|
||||
extension ??= System.IO.Path.GetExtension(filePath);
|
||||
Extension = FileUtility.GetStandardizedExtension(extension).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -1,76 +1,34 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using Dinah.Core.Net.Http;
|
||||
using Dinah.Core.StepRunner;
|
||||
using FileManager;
|
||||
|
||||
using FileManager;
|
||||
using System.Threading.Tasks;
|
||||
#nullable enable
|
||||
namespace AaxDecrypter
|
||||
{
|
||||
public class UnencryptedAudiobookDownloader : AudiobookDownloadBase
|
||||
{
|
||||
protected override StepSequence Steps { get; }
|
||||
protected override long InputFilePosition => InputFileStream.WritePosition;
|
||||
|
||||
public UnencryptedAudiobookDownloader(string outFileName, string cacheDirectory, DownloadOptions dlLic)
|
||||
: base(outFileName, cacheDirectory, dlLic)
|
||||
public UnencryptedAudiobookDownloader(string outDirectory, string cacheDirectory, IDownloadOptions dlLic)
|
||||
: base(outDirectory, cacheDirectory, dlLic)
|
||||
{
|
||||
Steps = new StepSequence
|
||||
AsyncSteps.Name = "Download Unencrypted Audiobook";
|
||||
AsyncSteps["Step 1: Download Audiobook"] = Step_DownloadAndDecryptAudiobookAsync;
|
||||
AsyncSteps["Step 2: Create Cue"] = Step_CreateCueAsync;
|
||||
}
|
||||
|
||||
protected override async Task<bool> Step_DownloadAndDecryptAudiobookAsync()
|
||||
{
|
||||
await InputFileStream.DownloadTask;
|
||||
|
||||
if (IsCanceled)
|
||||
return false;
|
||||
else
|
||||
{
|
||||
Name = "Download Mp3 Audiobook",
|
||||
|
||||
["Step 1: Get Mp3 Metadata"] = Step_GetMetadata,
|
||||
["Step 2: Download Audiobook"] = Step_DownloadAudiobookAsSingleFile,
|
||||
["Step 3: Create Cue"] = Step_CreateCue,
|
||||
["Step 4: Cleanup"] = Step_Cleanup,
|
||||
};
|
||||
}
|
||||
|
||||
public override void Cancel()
|
||||
{
|
||||
IsCanceled = true;
|
||||
CloseInputFileStream();
|
||||
}
|
||||
|
||||
protected bool Step_GetMetadata()
|
||||
{
|
||||
OnRetrievedCoverArt(null);
|
||||
|
||||
return !IsCanceled;
|
||||
}
|
||||
|
||||
private bool Step_DownloadAudiobookAsSingleFile()
|
||||
{
|
||||
DateTime startTime = DateTime.Now;
|
||||
|
||||
// MUST put InputFileStream.Length first, because it starts background downloader.
|
||||
|
||||
while (InputFileStream.Length > InputFileStream.WritePosition && !InputFileStream.IsCancelled)
|
||||
{
|
||||
var rate = InputFileStream.WritePosition / (DateTime.Now - startTime).TotalSeconds;
|
||||
|
||||
var estTimeRemaining = (InputFileStream.Length - InputFileStream.WritePosition) / rate;
|
||||
|
||||
if (double.IsNormal(estTimeRemaining))
|
||||
OnDecryptTimeRemaining(TimeSpan.FromSeconds(estTimeRemaining));
|
||||
|
||||
var progressPercent = (double)InputFileStream.WritePosition / InputFileStream.Length;
|
||||
|
||||
OnDecryptProgressUpdate(
|
||||
new DownloadProgress
|
||||
{
|
||||
ProgressPercentage = 100 * progressPercent,
|
||||
BytesReceived = (long)(InputFileStream.Length * progressPercent),
|
||||
TotalBytesToReceive = InputFileStream.Length
|
||||
});
|
||||
Thread.Sleep(200);
|
||||
FinalizeDownload();
|
||||
var tempFile = GetNewTempFilePath(DownloadOptions.OutputFormat.ToString());
|
||||
FileUtility.SaferMove(InputFileStream.SaveFilePath, tempFile.FilePath);
|
||||
OnTempFileCreated(tempFile);
|
||||
return true;
|
||||
}
|
||||
|
||||
CloseInputFileStream();
|
||||
|
||||
var realOutputFileName = FileUtility.SaferMoveToValidPath(InputFileStream.SaveFilePath, OutputFileName);
|
||||
SetOutputFileName(realOutputFileName);
|
||||
OnFileCreated(realOutputFileName);
|
||||
|
||||
return !IsCanceled;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"//": "https://github.com/BalassaMarton/MSBump",
|
||||
BumpRevision: true
|
||||
}
|
||||
@@ -1,21 +1,25 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0-windows</TargetFramework>
|
||||
<Version>7.6.3.1</Version>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Version>13.0.0.1</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MSBump" Version="2.3.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Octokit" Version="0.51.0" />
|
||||
<PackageReference Include="Octokit" Version="14.0.0" />
|
||||
<!-- Do not remove unused Serilog.Sinks -->
|
||||
<!-- Only File sink is currently used. By user request (June 2024) others packages are included for experimental use. -->
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
|
||||
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\ApplicationServices\ApplicationServices.csproj" />
|
||||
<ProjectReference Include="..\AudibleUtilities\AudibleUtilities.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,20 +1,50 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using ApplicationServices;
|
||||
using AudibleUtilities;
|
||||
using Dinah.Core;
|
||||
using Dinah.Core.IO;
|
||||
using Dinah.Core.Logging;
|
||||
using LibationFileManager;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Serilog;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace AppScaffolding
|
||||
{
|
||||
public enum ReleaseIdentifier
|
||||
{
|
||||
None,
|
||||
WindowsClassic = OS.Windows | Variety.Classic | Architecture.X64,
|
||||
WindowsAvalonia = OS.Windows | Variety.Chardonnay | Architecture.X64,
|
||||
LinuxAvalonia = OS.Linux | Variety.Chardonnay | Architecture.X64,
|
||||
MacOSAvalonia = OS.MacOS | Variety.Chardonnay | Architecture.X64,
|
||||
LinuxAvalonia_Arm64 = OS.Linux | Variety.Chardonnay | Architecture.Arm64,
|
||||
MacOSAvalonia_Arm64 = OS.MacOS | Variety.Chardonnay | Architecture.Arm64,
|
||||
WindowsAvalonia_Arm64 = OS.Windows | Variety.Chardonnay | Architecture.Arm64,
|
||||
}
|
||||
|
||||
// I know I'm taking the wine metaphor a bit far by naming this "Variety", but I don't know what else to call it
|
||||
[Flags]
|
||||
public enum Variety
|
||||
{
|
||||
None,
|
||||
Classic = 0x10000,
|
||||
Chardonnay = 0x20000,
|
||||
}
|
||||
|
||||
public static class LibationScaffolding
|
||||
{
|
||||
public const string RepositoryUrl = "ht" + "tps://github.com/rmcrackan/Libation";
|
||||
public const string WebsiteUrl = "ht" + "tps://getlibation.com";
|
||||
public const string RepositoryLatestUrl = "ht" + "tps://github.com/rmcrackan/Libation/releases/latest";
|
||||
public static ReleaseIdentifier ReleaseIdentifier { get; private set; }
|
||||
public static Variety Variety { get; private set; }
|
||||
|
||||
// AppScaffolding
|
||||
private static Assembly _executingAssembly;
|
||||
private static Assembly ExecutingAssembly
|
||||
@@ -36,7 +66,10 @@ namespace AppScaffolding
|
||||
public static Configuration RunPreConfigMigrations()
|
||||
{
|
||||
// must occur before access to Configuration instance
|
||||
Migrations.migrate_to_v5_2_0__pre_config();
|
||||
// // outdated. kept here as an example of what belongs in this area
|
||||
// // Migrations.migrate_to_v5_2_0__pre_config();
|
||||
|
||||
Configuration.SetLibationVersion(BuildVersion);
|
||||
|
||||
//***********************************************//
|
||||
// //
|
||||
@@ -47,116 +80,112 @@ namespace AppScaffolding
|
||||
}
|
||||
|
||||
/// <summary>most migrations go in here</summary>
|
||||
public static void RunPostConfigMigrations(Configuration config)
|
||||
public static void RunPostConfigMigrations(Configuration config, bool ephemeralSettings = false)
|
||||
{
|
||||
if (ephemeralSettings)
|
||||
{
|
||||
var settings = JObject.Parse(File.ReadAllText(config.LibationFiles.SettingsFilePath));
|
||||
config.LoadEphemeralSettings(settings);
|
||||
}
|
||||
else
|
||||
{
|
||||
config.LoadPersistentSettings(config.LibationFiles.SettingsFilePath);
|
||||
}
|
||||
DeleteOpenSqliteFiles(config);
|
||||
AudibleApiStorage.EnsureAccountsSettingsFileExists();
|
||||
PopulateMissingConfigValues(config);
|
||||
|
||||
//
|
||||
// migrations go below here
|
||||
//
|
||||
|
||||
Migrations.migrate_to_v6_6_9(config);
|
||||
Migrations.migrate_to_v11_5_0(config);
|
||||
Migrations.migrate_to_v11_6_5(config);
|
||||
Migrations.migrate_to_v12_0_1(config);
|
||||
}
|
||||
|
||||
public static void PopulateMissingConfigValues(Configuration config)
|
||||
/// <summary>
|
||||
/// Delete shared memory and write-ahead log SQLite database files which may prevent access to the database.
|
||||
/// These file may or may not cause libation to hang on CreateContext,
|
||||
/// so try our luck by swallowing any exceptions and continuing.
|
||||
/// </summary>
|
||||
private static void DeleteOpenSqliteFiles(Configuration config)
|
||||
{
|
||||
config.InProgress ??= Configuration.WinTemp;
|
||||
|
||||
if (!config.Exists(nameof(config.AllowLibationFixup)))
|
||||
config.AllowLibationFixup = true;
|
||||
|
||||
if (!config.Exists(nameof(config.CreateCueSheet)))
|
||||
config.CreateCueSheet = true;
|
||||
|
||||
if (!config.Exists(nameof(config.RetainAaxFile)))
|
||||
config.RetainAaxFile = false;
|
||||
|
||||
if (!config.Exists(nameof(config.SplitFilesByChapter)))
|
||||
config.SplitFilesByChapter = false;
|
||||
|
||||
if (!config.Exists(nameof(config.StripUnabridged)))
|
||||
config.StripUnabridged = false;
|
||||
|
||||
if (!config.Exists(nameof(config.StripAudibleBrandAudio)))
|
||||
config.StripAudibleBrandAudio = false;
|
||||
|
||||
if (!config.Exists(nameof(config.DecryptToLossy)))
|
||||
config.DecryptToLossy = false;
|
||||
|
||||
if (!config.Exists(nameof(config.LameTargetBitrate)))
|
||||
config.LameTargetBitrate = false;
|
||||
|
||||
if (!config.Exists(nameof(config.LameDownsampleMono)))
|
||||
config.LameDownsampleMono = true;
|
||||
|
||||
if (!config.Exists(nameof(config.LameBitrate)))
|
||||
config.LameBitrate = 64;
|
||||
|
||||
if (!config.Exists(nameof(config.LameConstantBitrate)))
|
||||
config.LameConstantBitrate = false;
|
||||
|
||||
if (!config.Exists(nameof(config.LameMatchSourceBR)))
|
||||
config.LameMatchSourceBR = true;
|
||||
|
||||
if (!config.Exists(nameof(config.LameVBRQuality)))
|
||||
config.LameVBRQuality = 2;
|
||||
|
||||
if (!config.Exists(nameof(config.BadBook)))
|
||||
config.BadBook = Configuration.BadBookAction.Ask;
|
||||
|
||||
if (!config.Exists(nameof(config.ShowImportedStats)))
|
||||
config.ShowImportedStats = true;
|
||||
|
||||
if (!config.Exists(nameof(config.ImportEpisodes)))
|
||||
config.ImportEpisodes = true;
|
||||
|
||||
if (!config.Exists(nameof(config.DownloadEpisodes)))
|
||||
config.DownloadEpisodes = true;
|
||||
|
||||
if (!config.Exists(nameof(config.FolderTemplate)))
|
||||
config.FolderTemplate = Templates.Folder.DefaultTemplate;
|
||||
|
||||
if (!config.Exists(nameof(config.FileTemplate)))
|
||||
config.FileTemplate = Templates.File.DefaultTemplate;
|
||||
|
||||
if (!config.Exists(nameof(config.ChapterFileTemplate)))
|
||||
config.ChapterFileTemplate = Templates.ChapterFile.DefaultTemplate;
|
||||
|
||||
if (!config.Exists(nameof(config.AutoScan)))
|
||||
config.AutoScan = true;
|
||||
|
||||
if (!config.Exists(nameof(config.GridColumnsVisibilities)))
|
||||
config.GridColumnsVisibilities = new Dictionary<string, bool>();
|
||||
|
||||
if (!config.Exists(nameof(config.GridColumnsDisplayIndices)))
|
||||
config.GridColumnsDisplayIndices = new Dictionary<string, int>();
|
||||
|
||||
if (!config.Exists(nameof(config.GridColumnsWidths)))
|
||||
config.GridColumnsWidths = new Dictionary<string, int>();
|
||||
|
||||
if (!config.Exists(nameof(config.DownloadCoverArt)))
|
||||
config.DownloadCoverArt = true;
|
||||
var walFile = SqliteStorage.DatabasePath + "-wal";
|
||||
var shmFile = SqliteStorage.DatabasePath + "-shm";
|
||||
if (File.Exists(walFile))
|
||||
{
|
||||
try
|
||||
{
|
||||
FileManager.FileUtility.SaferDelete(walFile);
|
||||
}
|
||||
catch(Exception ex)
|
||||
{
|
||||
Log.Logger.Warning(ex, "Could not delete SQLite WAL file: {@WalFile}", walFile);
|
||||
}
|
||||
}
|
||||
if (File.Exists(shmFile))
|
||||
{
|
||||
try
|
||||
{
|
||||
FileManager.FileUtility.SaferDelete(shmFile);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Warning(ex, "Could not delete SQLite SHM file: {@ShmFile}", shmFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Initialize logging. Run after migration</summary>
|
||||
public static void RunPostMigrationScaffolding(Configuration config)
|
||||
/// <summary>Initialize logging. Wire-up events. Run after migration</summary>
|
||||
public static void RunPostMigrationScaffolding(Variety variety, Configuration config)
|
||||
{
|
||||
Variety = Enum.IsDefined(variety) ? variety : Variety.None;
|
||||
|
||||
var releaseID = (ReleaseIdentifier)((int)variety | (int)Configuration.OS | (int)RuntimeInformation.ProcessArchitecture);
|
||||
|
||||
ReleaseIdentifier = Enum.IsDefined(releaseID) ? releaseID : ReleaseIdentifier.None;
|
||||
|
||||
ensureSerilogConfig(config);
|
||||
configureLogging(config);
|
||||
logStartupState(config);
|
||||
|
||||
// all else should occur after logging
|
||||
|
||||
wireUpSystemEvents(config);
|
||||
}
|
||||
|
||||
private static void ensureSerilogConfig(Configuration config)
|
||||
{
|
||||
if (config.GetObject("Serilog") is not null)
|
||||
if (config.GetObject("Serilog") is JObject serilog)
|
||||
{
|
||||
bool fileChanged = false;
|
||||
if (serilog.SelectToken("$.WriteTo[?(@.Name == 'ZipFile')]", false) is JObject zipFileSink)
|
||||
{
|
||||
zipFileSink["Name"] = "File";
|
||||
fileChanged = true;
|
||||
}
|
||||
var hooks = typeof(FileSinkHook).AssemblyQualifiedName;
|
||||
if (serilog.SelectToken("$.WriteTo[?(@.Name == 'File')].Args", false) is JObject fileSinkArgs
|
||||
&& fileSinkArgs["hooks"]?.Value<string>() != hooks)
|
||||
{
|
||||
fileSinkArgs["hooks"] = hooks;
|
||||
fileChanged = true;
|
||||
}
|
||||
|
||||
if (fileChanged)
|
||||
config.SetNonString(serilog.DeepClone(), "Serilog");
|
||||
return;
|
||||
}
|
||||
|
||||
var serilogObj = new JObject
|
||||
{
|
||||
{ "MinimumLevel", "Information" },
|
||||
{ "WriteTo", new JArray
|
||||
{
|
||||
// ABOUT SINKS
|
||||
// Only File sink is currently used. By user request (June 2024) others packages are included for experimental use.
|
||||
|
||||
// new JObject { {"Name", "Console" } }, // this has caused more problems than it's solved
|
||||
new JObject
|
||||
{
|
||||
@@ -165,7 +194,7 @@ namespace AppScaffolding
|
||||
new JObject
|
||||
{
|
||||
// for this sink to work, a path must be provided. we override this below
|
||||
{ "path", Path.Combine(config.LibationFiles, "_Log.log") },
|
||||
{ "path", Path.Combine(config.LibationFiles.Location, "Log.log") },
|
||||
{ "rollingInterval", "Month" },
|
||||
// Serilog template formatting examples
|
||||
// - default: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] {Message:lj}{NewLine}{Exception}"
|
||||
@@ -173,7 +202,8 @@ namespace AppScaffolding
|
||||
// - with class and method info: "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception}";
|
||||
// output example: 2019-11-26 08:48:40.224 -05:00 [DBG] (at LibationWinForms.Program.init()) Begin Libation
|
||||
// {Properties:j} needed for expanded exception logging
|
||||
{ "outputTemplate", "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}" }
|
||||
{ "outputTemplate", "{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} [{Level:u3}] (at {Caller}) {Message:lj}{NewLine}{Exception} {Properties:j}" },
|
||||
{ "hooks", typeof(FileSinkHook).AssemblyQualifiedName }, // for FileSinkHook
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -183,7 +213,7 @@ namespace AppScaffolding
|
||||
{ "Using", new JArray{ "Dinah.Core", "Serilog.Exceptions" } }, // dll's name, NOT namespace
|
||||
{ "Enrich", new JArray{ "WithCaller", "WithExceptionDetails" } },
|
||||
};
|
||||
config.SetObject("Serilog", serilogObj);
|
||||
config.SetNonString(serilogObj, "Serilog");
|
||||
}
|
||||
|
||||
// to restore original: Console.SetOut(origOut);
|
||||
@@ -201,8 +231,8 @@ namespace AppScaffolding
|
||||
// However, empirical testing so far has shown no issues.
|
||||
Console.SetOut(new MultiTextWriter(origOut, new SerilogTextWriter()));
|
||||
|
||||
#region Console => Serilog tests
|
||||
/*
|
||||
#region Console => Serilog tests
|
||||
/*
|
||||
// all below apply to "Console." and "Console.Out."
|
||||
|
||||
// captured
|
||||
@@ -241,27 +271,44 @@ namespace AppScaffolding
|
||||
Console.Write("{0}{1}{2}", "zero|", "one|", "two");
|
||||
Console.Write("{0}", new object[] { "arr" });
|
||||
*/
|
||||
#endregion
|
||||
#endregion
|
||||
|
||||
// .Here() captures debug info via System.Runtime.CompilerServices attributes. Warning: expensive
|
||||
//var withLineNumbers_outputTemplate = "[{Timestamp:HH:mm:ss} {Level}] {SourceContext}{NewLine}{Message}{NewLine}in method {MemberName} at {FilePath}:{LineNumber}{NewLine}{Exception}{NewLine}";
|
||||
//Log.Logger.Here().Debug("Begin Libation. Debug with line numbers");
|
||||
}
|
||||
// .Here() captures debug info via System.Runtime.CompilerServices attributes. Warning: expensive
|
||||
//var withLineNumbers_outputTemplate = "[{Timestamp:HH:mm:ss} {Level}] {SourceContext}{NewLine}{Message}{NewLine}in method {MemberName} at {FilePath}:{LineNumber}{NewLine}{Exception}{NewLine}";
|
||||
//Log.Logger.Here().Debug("Begin Libation. Debug with line numbers");
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
private static void logStartupState(Configuration config)
|
||||
{
|
||||
#if DEBUG
|
||||
var mode = "Debug";
|
||||
#else
|
||||
var mode = "Release";
|
||||
#endif
|
||||
if (System.Diagnostics.Debugger.IsAttached)
|
||||
mode += " (Debugger attached)";
|
||||
|
||||
// begin logging session with a form feed
|
||||
Log.Logger.Information("\r\n\f");
|
||||
Log.Logger.Information("Begin. {@DebugInfo}", new
|
||||
|
||||
static int fileCount(FileManager.LongPath? longPath)
|
||||
{
|
||||
if (longPath is null)
|
||||
return -1;
|
||||
try { return FileManager.FileUtility.SaferEnumerateFiles(longPath).Count(); }
|
||||
catch { return -1; }
|
||||
}
|
||||
|
||||
Log.Logger.Information("Begin. {@DebugInfo}", new
|
||||
{
|
||||
AppName = EntryAssembly.GetName().Name,
|
||||
Version = BuildVersion.ToString(),
|
||||
#if DEBUG
|
||||
Mode = "Debug",
|
||||
#else
|
||||
Mode = "Release",
|
||||
#endif
|
||||
|
||||
ReleaseIdentifier,
|
||||
Configuration.OS,
|
||||
Environment.OSVersion,
|
||||
InteropFactory.InteropFunctionsType,
|
||||
Mode = mode,
|
||||
LogLevel_Verbose_Enabled = Log.Logger.IsVerboseEnabled(),
|
||||
LogLevel_Debug_Enabled = Log.Logger.IsDebugEnabled(),
|
||||
LogLevel_Information_Enabled = Log.Logger.IsInformationEnabled(),
|
||||
@@ -269,54 +316,61 @@ namespace AppScaffolding
|
||||
LogLevel_Error_Enabled = Log.Logger.IsErrorEnabled(),
|
||||
LogLevel_Fatal_Enabled = Log.Logger.IsFatalEnabled(),
|
||||
|
||||
config.LibationFiles,
|
||||
config.AutoScan,
|
||||
config.BetaOptIn,
|
||||
config.UseCoverAsFolderIcon,
|
||||
config.LibationFiles,
|
||||
AudibleFileStorage.BooksDirectory,
|
||||
|
||||
config.InProgress,
|
||||
|
||||
AudibleFileStorage.DownloadsInProgressDirectory,
|
||||
DownloadsInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DownloadsInProgressDirectory).Count(),
|
||||
DownloadsInProgressFiles = fileCount(AudibleFileStorage.DownloadsInProgressDirectory),
|
||||
|
||||
AudibleFileStorage.DecryptInProgressDirectory,
|
||||
DecryptInProgressFiles = FileManager.FileUtility.SaferEnumerateFiles(AudibleFileStorage.DecryptInProgressDirectory).Count(),
|
||||
DecryptInProgressFiles = fileCount(AudibleFileStorage.DecryptInProgressDirectory),
|
||||
|
||||
disableIPv6 = AppContext.TryGetSwitch("System.Net.DisableIPv6", out bool disableIPv6Value),
|
||||
});
|
||||
|
||||
if (InteropFactory.InteropFunctionsType is null)
|
||||
Serilog.Log.Logger.Warning("WARNING: OSInteropProxy.InteropFunctionsType is null");
|
||||
}
|
||||
#nullable restore
|
||||
private static void wireUpSystemEvents(Configuration configuration)
|
||||
{
|
||||
LibraryCommands.LibrarySizeChanged += (object _, List<DataLayer.LibraryBook> libraryBooks)
|
||||
=> SearchEngineCommands.FullReIndex(libraryBooks);
|
||||
|
||||
LibraryCommands.BookUserDefinedItemCommitted += (_, books)
|
||||
=> SearchEngineCommands.UpdateBooks(books);
|
||||
}
|
||||
|
||||
public static (bool hasUpgrade, string zipUrl, string htmlUrl, string zipName) GetLatestRelease()
|
||||
public static UpgradeProperties GetLatestRelease()
|
||||
{
|
||||
(bool, string, string, string) isFalse = (false, null, null, null);
|
||||
|
||||
// timed out
|
||||
var latest = getLatestRelease(TimeSpan.FromSeconds(10));
|
||||
if (latest is null)
|
||||
return isFalse;
|
||||
(var version, var latest, var zip) = getLatestRelease(TimeSpan.FromSeconds(10));
|
||||
|
||||
var latestVersionString = latest.TagName.Trim('v');
|
||||
if (!Version.TryParse(latestVersionString, out var latestRelease))
|
||||
return isFalse;
|
||||
|
||||
// we're up to date
|
||||
if (latestRelease <= BuildVersion)
|
||||
return isFalse;
|
||||
if (version is null || latest is null || zip is null)
|
||||
return null;
|
||||
|
||||
// we have an update
|
||||
var zip = latest.Assets.FirstOrDefault(a => a.BrowserDownloadUrl.EndsWith(".zip"));
|
||||
var zipUrl = zip?.BrowserDownloadUrl;
|
||||
|
||||
Log.Logger.Information("Update available: {@DebugInfo}", new
|
||||
{
|
||||
latestRelease = latestRelease.ToString(),
|
||||
latestRelease = version.ToString(),
|
||||
latest.HtmlUrl,
|
||||
zipUrl
|
||||
});
|
||||
|
||||
return (true, zipUrl, latest.HtmlUrl, zip.Name);
|
||||
return new(zipUrl, latest.HtmlUrl, zip.Name, version, latest.Body);
|
||||
}
|
||||
private static Octokit.Release getLatestRelease(TimeSpan timeout)
|
||||
private static (Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset) getLatestRelease(TimeSpan timeout)
|
||||
{
|
||||
try
|
||||
{
|
||||
var task = System.Threading.Tasks.Task.Run(() => getLatestRelease());
|
||||
var task = getLatestRelease();
|
||||
if (task.Wait(timeout))
|
||||
return task.Result;
|
||||
|
||||
@@ -326,56 +380,46 @@ namespace AppScaffolding
|
||||
{
|
||||
Log.Logger.Error(aggEx, "Checking for new version too often");
|
||||
}
|
||||
return null;
|
||||
return (null, null, null);
|
||||
}
|
||||
private static Octokit.Release getLatestRelease()
|
||||
private static async System.Threading.Tasks.Task<(Version releaseVersion, Octokit.Release, Octokit.ReleaseAsset)> getLatestRelease()
|
||||
{
|
||||
var gitHubClient = new Octokit.GitHubClient(new Octokit.ProductHeaderValue("Libation"));
|
||||
const string ownerAccount = "rmcrackan";
|
||||
const string repoName = "Libation";
|
||||
|
||||
// https://octokitnet.readthedocs.io/en/latest/releases/
|
||||
var releases = gitHubClient.Repository.Release.GetAll("rmcrackan", "Libation").GetAwaiter().GetResult();
|
||||
var latest = releases.First(r => !r.Draft && !r.Prerelease);
|
||||
return latest;
|
||||
var gitHubClient = new Octokit.GitHubClient(new Octokit.ProductHeaderValue(repoName));
|
||||
|
||||
//https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release
|
||||
var latestRelease = await gitHubClient.Repository.Release.GetLatest(ownerAccount, repoName);
|
||||
|
||||
//Ensure that latest release is greater than the current version
|
||||
var latestVersionString = latestRelease.TagName.Trim('v');
|
||||
if (!Version.TryParse(latestVersionString, out var releaseVersion) || releaseVersion <= BuildVersion)
|
||||
return (null, null, null);
|
||||
|
||||
//Download the release index
|
||||
var bts = await gitHubClient.Repository.Content.GetRawContent(ownerAccount, repoName, ".releaseindex.json");
|
||||
var releaseIndex = JObject.Parse(System.Text.Encoding.ASCII.GetString(bts));
|
||||
|
||||
string regexPattern;
|
||||
|
||||
try
|
||||
{
|
||||
regexPattern = releaseIndex.Value<string>(InteropFactory.Create().ReleaseIdString);
|
||||
}
|
||||
catch
|
||||
{
|
||||
regexPattern = releaseIndex.Value<string>(ReleaseIdentifier.ToString());
|
||||
}
|
||||
|
||||
var regex = new System.Text.RegularExpressions.Regex(regexPattern, System.Text.RegularExpressions.RegexOptions.IgnoreCase);
|
||||
|
||||
return (releaseVersion, latestRelease, latestRelease?.Assets?.FirstOrDefault(a => regex.IsMatch(a.Name)));
|
||||
}
|
||||
}
|
||||
|
||||
internal static class Migrations
|
||||
{
|
||||
#region migrate to v5.2.0
|
||||
// get rid of meta-directories, combine DownloadsInProgressEnum and DecryptInProgressEnum => InProgress
|
||||
public static void migrate_to_v5_2_0__pre_config()
|
||||
{
|
||||
{
|
||||
var settingsKey = "DownloadsInProgressEnum";
|
||||
if (UNSAFE_MigrationHelper.Settings_TryGet(settingsKey, out var value))
|
||||
{
|
||||
UNSAFE_MigrationHelper.Settings_Delete(settingsKey);
|
||||
UNSAFE_MigrationHelper.Settings_Insert("InProgress", translatePath(value));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
UNSAFE_MigrationHelper.Settings_Delete("DecryptInProgressEnum");
|
||||
}
|
||||
|
||||
{ // appsettings.json
|
||||
var appSettingsKey = UNSAFE_MigrationHelper.LIBATION_FILES_KEY;
|
||||
if (UNSAFE_MigrationHelper.APPSETTINGS_TryGet(appSettingsKey, out var value))
|
||||
UNSAFE_MigrationHelper.APPSETTINGS_Update(appSettingsKey, translatePath(value));
|
||||
}
|
||||
}
|
||||
|
||||
private static string translatePath(string path)
|
||||
=> path switch
|
||||
{
|
||||
"AppDir" => @".\LibationFiles",
|
||||
"MyDocs" => Path.GetFullPath(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments), "LibationFiles")),
|
||||
"UserProfile" => Path.GetFullPath(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), "Libation")),
|
||||
"WinTemp" => Path.GetFullPath(Path.Combine(Path.GetTempPath(), "Libation")),
|
||||
_ => path
|
||||
};
|
||||
#endregion
|
||||
|
||||
public static void migrate_to_v6_6_9(Configuration config)
|
||||
{
|
||||
var writeToPath = $"Serilog.WriteTo";
|
||||
@@ -422,5 +466,139 @@ namespace AppScaffolding
|
||||
UNSAFE_MigrationHelper.Settings_AddUniqueToArray("Serilog.Enrich", "WithExceptionDetails");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FilterState_6_6_9
|
||||
{
|
||||
public bool UseDefault { get; set; }
|
||||
public List<string> Filters { get; set; } = new();
|
||||
}
|
||||
|
||||
public static void migrate_to_v12_0_1(Configuration config)
|
||||
{
|
||||
#nullable enable
|
||||
//Migrate from version 1 file cache to the dictionary-based version 2 cache
|
||||
const string FILENAME_V1 = "FileLocations.json";
|
||||
const string FILENAME_V2 = "FileLocationsV2.json";
|
||||
|
||||
var jsonFileV1 = Path.Combine(Configuration.Instance.LibationFiles.Location, FILENAME_V1);
|
||||
var jsonFileV2 = Path.Combine(Configuration.Instance.LibationFiles.Location, FILENAME_V2);
|
||||
|
||||
if (!File.Exists(jsonFileV2) && File.Exists(jsonFileV1))
|
||||
{
|
||||
try
|
||||
{
|
||||
//FilePathCache loads the cache in its static constructor,
|
||||
//so perform migration without using FilePathCache.CacheEntry
|
||||
if (JArray.Parse(File.ReadAllText(jsonFileV1)) is not JArray v1Cache || v1Cache.Count == 0)
|
||||
return;
|
||||
|
||||
Dictionary<string, JArray> cache = new();
|
||||
|
||||
//Convert to c# objects to speed up searching by ID inside the iterator
|
||||
var allItems
|
||||
= v1Cache
|
||||
.Select(i => new
|
||||
{
|
||||
Id = i["Id"]?.Value<string>(),
|
||||
Path = i["Path"]?["Path"]?.Value<string>()
|
||||
}).Where(i => i.Id != null)
|
||||
.ToArray();
|
||||
|
||||
foreach (var id in allItems.Select(i => i.Id).OfType<string>().Distinct())
|
||||
{
|
||||
//Use this opportunity to purge non-existent files and re-classify file types
|
||||
//(due to *.aax files previously not being classified as FileType.AAXC)
|
||||
var items = allItems
|
||||
.Where(i => i.Id == id && File.Exists(i.Path))
|
||||
.Select(i => new JObject
|
||||
{
|
||||
{ "Id", i.Id },
|
||||
{ "FileType", (int)FileTypes.GetFileTypeFromPath(i.Path) },
|
||||
{ "Path", new JObject{ { "Path", i.Path } } }
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
if (items.Length == 0)
|
||||
continue;
|
||||
|
||||
cache[id] = new JArray(items);
|
||||
}
|
||||
|
||||
var cacheJson = new JObject { { "Dictionary", JObject.FromObject(cache) } };
|
||||
var cacheFileText = cacheJson.ToString(Formatting.Indented);
|
||||
|
||||
void migrate()
|
||||
{
|
||||
File.WriteAllText(jsonFileV2, cacheFileText);
|
||||
File.Delete(jsonFileV1);
|
||||
}
|
||||
|
||||
try { migrate(); }
|
||||
catch (IOException)
|
||||
{
|
||||
try { migrate(); }
|
||||
catch (IOException)
|
||||
{
|
||||
migrate();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch { /* eat */ }
|
||||
}
|
||||
#nullable restore
|
||||
}
|
||||
|
||||
public static void migrate_to_v11_6_5(Configuration config)
|
||||
{
|
||||
//Settings migration for unsupported sample rates (#1116)
|
||||
if (config.MaxSampleRate < AAXClean.SampleRate.Hz_8000)
|
||||
config.MaxSampleRate = AAXClean.SampleRate.Hz_8000;
|
||||
else if (config.MaxSampleRate > AAXClean.SampleRate.Hz_48000)
|
||||
config.MaxSampleRate = AAXClean.SampleRate.Hz_48000;
|
||||
}
|
||||
|
||||
public static void migrate_to_v11_5_0(Configuration config)
|
||||
{
|
||||
// Read file, but convert old format to new (with Name field) as necessary.
|
||||
if (!File.Exists(QuickFilters.JsonFile))
|
||||
{
|
||||
QuickFilters.InMemoryState = new();
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if (JsonConvert.DeserializeObject<QuickFilters.FilterState>(File.ReadAllText(QuickFilters.JsonFile))
|
||||
is QuickFilters.FilterState inMemState)
|
||||
{
|
||||
QuickFilters.InMemoryState = inMemState;
|
||||
return;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Eat
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if (JsonConvert.DeserializeObject<FilterState_6_6_9>(File.ReadAllText(QuickFilters.JsonFile))
|
||||
is FilterState_6_6_9 inMemState)
|
||||
{
|
||||
// Copy old structure to new.
|
||||
QuickFilters.InMemoryState = new();
|
||||
QuickFilters.InMemoryState.UseDefault = inMemState.UseDefault;
|
||||
foreach (var oldFilter in inMemState.Filters)
|
||||
QuickFilters.InMemoryState.Filters.Add(new QuickFilters.NamedFilter(oldFilter, null));
|
||||
|
||||
return;
|
||||
}
|
||||
Debug.Assert(false, "Should not get here, QuickFilters.json deserialization issue");
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Eat
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,9 +3,11 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Dinah.Core;
|
||||
using LibationFileManager;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
#nullable enable
|
||||
namespace AppScaffolding
|
||||
{
|
||||
/// <summary>
|
||||
@@ -17,21 +19,23 @@ namespace AppScaffolding
|
||||
///
|
||||
///
|
||||
/// </summary>
|
||||
internal static class UNSAFE_MigrationHelper
|
||||
public static class UNSAFE_MigrationHelper
|
||||
{
|
||||
public static string? SettingsDirectory
|
||||
=> !APPSETTINGS_TryGet(LibationFiles.LIBATION_FILES_KEY, out var value) || value is null
|
||||
? null
|
||||
: value;
|
||||
|
||||
#region appsettings.json
|
||||
private static string APPSETTINGS_JSON { get; } = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), "appsettings.json");
|
||||
|
||||
public static bool APPSETTINGS_Json_Exists => File.Exists(APPSETTINGS_JSON);
|
||||
|
||||
public static bool APPSETTINGS_TryGet(string key, out string value)
|
||||
public static bool APPSETTINGS_TryGet(string key, out string? value)
|
||||
{
|
||||
bool success = false;
|
||||
JToken val = null;
|
||||
JToken? val = null;
|
||||
|
||||
process_APPSETTINGS_Json(jObj => success = jObj.TryGetValue(key, out val), false);
|
||||
|
||||
value = success ? val.Value<string>() : null;
|
||||
value = success ? val?.Value<string>() : null;
|
||||
return success;
|
||||
}
|
||||
|
||||
@@ -56,11 +60,10 @@ namespace AppScaffolding
|
||||
/// <param name="save">True: save if contents changed. False: no not attempt save</param>
|
||||
private static void process_APPSETTINGS_Json(Action<JObject> action, bool save = true)
|
||||
{
|
||||
// only insert if not exists
|
||||
if (!APPSETTINGS_Json_Exists)
|
||||
if (Configuration.Instance.LibationFiles.AppsettingsJsonFile is not string appSettingsFile)
|
||||
return;
|
||||
|
||||
var startingContents = File.ReadAllText(APPSETTINGS_JSON);
|
||||
var startingContents = File.ReadAllText(appSettingsFile);
|
||||
|
||||
JObject jObj;
|
||||
try
|
||||
@@ -83,48 +86,37 @@ namespace AppScaffolding
|
||||
if (startingContents.EqualsInsensitive(endingContents_indented) || startingContents.EqualsInsensitive(endingContents_compact))
|
||||
return;
|
||||
|
||||
File.WriteAllText(APPSETTINGS_JSON, endingContents_indented);
|
||||
File.WriteAllText(Configuration.Instance.LibationFiles.AppsettingsJsonFile, endingContents_indented);
|
||||
System.Threading.Thread.Sleep(100);
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region Settings.json
|
||||
public const string LIBATION_FILES_KEY = "LibationFiles";
|
||||
private const string SETTINGS_JSON = "Settings.json";
|
||||
|
||||
public static string SettingsJsonPath
|
||||
{
|
||||
get
|
||||
{
|
||||
var success = APPSETTINGS_TryGet(LIBATION_FILES_KEY, out var value);
|
||||
return !success || value is null ? null : Path.Combine(value, SETTINGS_JSON);
|
||||
}
|
||||
}
|
||||
public static bool SettingsJson_Exists => SettingsJsonPath is not null && File.Exists(SettingsJsonPath);
|
||||
public static string? SettingsJsonPath => SettingsDirectory is null ? null : Path.Combine(SettingsDirectory, LibationFiles.SETTINGS_JSON);
|
||||
|
||||
public static bool Settings_TryGet(string key, out string value)
|
||||
public static bool Settings_TryGet(string key, out string? value)
|
||||
{
|
||||
bool success = false;
|
||||
JToken val = null;
|
||||
JToken? val = null;
|
||||
|
||||
process_SettingsJson(jObj => success = jObj.TryGetValue(key, out val), false);
|
||||
|
||||
value = success ? val.Value<string>() : null;
|
||||
value = success ? val?.Value<string>() : null;
|
||||
return success;
|
||||
}
|
||||
|
||||
public static bool Settings_JsonPathIsType(string jsonPath, JTokenType jTokenType)
|
||||
{
|
||||
JToken val = null;
|
||||
JToken? val = null;
|
||||
|
||||
process_SettingsJson(jObj => val = jObj.SelectToken(jsonPath), false);
|
||||
|
||||
return val?.Type == jTokenType;
|
||||
}
|
||||
|
||||
public static bool Settings_TryGetFromJsonPath(string jsonPath, out string value)
|
||||
public static bool Settings_TryGetFromJsonPath(string jsonPath, out string? value)
|
||||
{
|
||||
JToken val = null;
|
||||
JToken? val = null;
|
||||
|
||||
process_SettingsJson(jObj => val = jObj.SelectToken(jsonPath), false);
|
||||
|
||||
@@ -166,10 +158,10 @@ namespace AppScaffolding
|
||||
if (!Settings_JsonPathIsType(jsonPath, JTokenType.Array))
|
||||
return false;
|
||||
|
||||
JArray array = null;
|
||||
process_SettingsJson(jObj => array = (JArray)jObj.SelectToken(jsonPath));
|
||||
JArray? array = null;
|
||||
process_SettingsJson(jObj => array = jObj.SelectToken(jsonPath) as JArray);
|
||||
|
||||
length = array.Count;
|
||||
length = array?.Count ?? 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -180,8 +172,7 @@ namespace AppScaffolding
|
||||
|
||||
process_SettingsJson(jObj =>
|
||||
{
|
||||
var array = (JArray)jObj.SelectToken(jsonPath);
|
||||
array.Add(newValue);
|
||||
(jObj.SelectToken(jsonPath) as JArray)?.Add(newValue);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -209,8 +200,7 @@ namespace AppScaffolding
|
||||
|
||||
process_SettingsJson(jObj =>
|
||||
{
|
||||
var array = (JArray)jObj.SelectToken(jsonPath);
|
||||
if (position < array.Count)
|
||||
if (jObj.SelectToken(jsonPath) is JArray array && position < array.Count)
|
||||
array.RemoveAt(position);
|
||||
});
|
||||
}
|
||||
@@ -237,7 +227,7 @@ namespace AppScaffolding
|
||||
private static void process_SettingsJson(Action<JObject> action, bool save = true)
|
||||
{
|
||||
// only insert if not exists
|
||||
if (!SettingsJson_Exists)
|
||||
if (!File.Exists(SettingsJsonPath))
|
||||
return;
|
||||
|
||||
var startingContents = File.ReadAllText(SettingsJsonPath);
|
||||
@@ -267,5 +257,10 @@ namespace AppScaffolding
|
||||
System.Threading.Thread.Sleep(100);
|
||||
}
|
||||
#endregion
|
||||
#region LibationContext.db
|
||||
public const string LIBATION_CONTEXT = "LibationContext.db";
|
||||
public static string? DatabaseFile => SettingsDirectory is null ? null : Path.Combine(SettingsDirectory, LIBATION_CONTEXT);
|
||||
public static bool DatabaseFile_Exists => DatabaseFile is not null && File.Exists(DatabaseFile);
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
26
Source/AppScaffolding/UpgradeProperties.cs
Normal file
26
Source/AppScaffolding/UpgradeProperties.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace AppScaffolding
|
||||
{
|
||||
public partial record UpgradeProperties
|
||||
{
|
||||
public string ZipUrl { get; }
|
||||
public string HtmlUrl { get; }
|
||||
public string ZipName { get; }
|
||||
public Version LatestRelease { get; }
|
||||
public string Notes { get; }
|
||||
|
||||
public UpgradeProperties(string zipUrl, string htmlUrl, string zipName, Version latestRelease, string notes)
|
||||
{
|
||||
ZipName = zipName;
|
||||
HtmlUrl = htmlUrl;
|
||||
ZipUrl = zipUrl;
|
||||
LatestRelease = latestRelease;
|
||||
Notes = LinkStripRegex().Replace(notes, "$1");
|
||||
}
|
||||
|
||||
[GeneratedRegex(@"\[(.*)\]\(.*\)")]
|
||||
private static partial Regex LinkStripRegex();
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,27 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0-windows</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CsvHelper" Version="27.2.1" />
|
||||
<PackageReference Include="NPOI" Version="2.5.6" />
|
||||
<PackageReference Include="CsvHelper" Version="33.1.0" />
|
||||
<PackageReference Include="ClosedXML" Version="0.105.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DtoImporterService\DtoImporterService.csproj" />
|
||||
<ProjectReference Include="..\LibationSearchEngine\LibationSearchEngine.csproj" />
|
||||
<ProjectReference Include="..\DataLayer.Postgres\DataLayer.Postgres.csproj" />
|
||||
<ProjectReference Include="..\DataLayer.Sqlite\DataLayer.Sqlite.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
78
Source/ApplicationServices/BulkSetDownloadStatus.cs
Normal file
78
Source/ApplicationServices/BulkSetDownloadStatus.cs
Normal file
@@ -0,0 +1,78 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using DataLayer;
|
||||
using Dinah.Core;
|
||||
using LibationFileManager;
|
||||
|
||||
namespace ApplicationServices
|
||||
{
|
||||
public class BulkSetDownloadStatus
|
||||
{
|
||||
private List<(string message, LiberatedStatus newStatus, IEnumerable<LibraryBook> LibraryBooks)> actionSets { get; } = new();
|
||||
|
||||
public int Count => actionSets.Count;
|
||||
|
||||
public IEnumerable<string> Messages => actionSets.Select(a => a.message);
|
||||
public string AggregateMessage => $"Are you sure you want to set {Messages.Aggregate((a, b) => $"{a} and {b}")}?";
|
||||
|
||||
private List<LibraryBook> _libraryBooks;
|
||||
private bool _setDownloaded;
|
||||
private bool _setNotDownloaded;
|
||||
|
||||
public BulkSetDownloadStatus(List<LibraryBook> libraryBooks, bool setDownloaded, bool setNotDownloaded)
|
||||
{
|
||||
_libraryBooks = libraryBooks;
|
||||
_setDownloaded = setDownloaded;
|
||||
_setNotDownloaded = setNotDownloaded;
|
||||
}
|
||||
|
||||
public int Discover()
|
||||
{
|
||||
var bookExistsList = _libraryBooks
|
||||
.Select(libraryBook => new
|
||||
{
|
||||
LibraryBook = libraryBook,
|
||||
FileExists = AudibleFileStorage.Audio.GetPath(libraryBook.Book.AudibleProductId) is not null
|
||||
})
|
||||
.ToList();
|
||||
|
||||
if (_setDownloaded)
|
||||
{
|
||||
var books2change = bookExistsList
|
||||
.Where(a => a.FileExists && a.LibraryBook.Book.UserDefinedItem.BookStatus != LiberatedStatus.Liberated)
|
||||
.Select(a => a.LibraryBook)
|
||||
.ToList();
|
||||
|
||||
if (books2change.Any())
|
||||
actionSets.Add((
|
||||
$"{"book".PluralizeWithCount(books2change.Count)} to 'Downloaded'",
|
||||
LiberatedStatus.Liberated,
|
||||
books2change));
|
||||
}
|
||||
|
||||
if (_setNotDownloaded)
|
||||
{
|
||||
var books2change = bookExistsList
|
||||
.Where(a => !a.FileExists && a.LibraryBook.Book.UserDefinedItem.BookStatus != LiberatedStatus.NotLiberated)
|
||||
.Select(a => a.LibraryBook)
|
||||
.ToList();
|
||||
|
||||
if (books2change.Any())
|
||||
actionSets.Add((
|
||||
$"{"book".PluralizeWithCount(books2change.Count)} to 'Not Downloaded'",
|
||||
LiberatedStatus.NotLiberated,
|
||||
books2change));
|
||||
}
|
||||
|
||||
return Count;
|
||||
}
|
||||
|
||||
public async Task ExecuteAsync()
|
||||
{
|
||||
foreach (var a in actionSets)
|
||||
await a.LibraryBooks.UpdateBookStatusAsync(a.newStatus);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,21 +1,40 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using DataLayer;
|
||||
using DataLayer;
|
||||
using LibationFileManager;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using System.Collections.Generic;
|
||||
|
||||
#nullable enable
|
||||
namespace ApplicationServices
|
||||
{
|
||||
public static class DbContexts
|
||||
{
|
||||
/// <summary>Use for fully functional context, incl. SaveChanges(). For query-only, use the other method</summary>
|
||||
public static LibationContext GetContext()
|
||||
=> LibationContext.Create(SqliteStorage.ConnectionString);
|
||||
public static class DbContexts
|
||||
{
|
||||
/// <summary>Use for fully functional context, incl. SaveChanges(). For query-only, use the other method</summary>
|
||||
public static LibationContext GetContext()
|
||||
{
|
||||
var context = !string.IsNullOrEmpty(Configuration.Instance.PostgresqlConnectionString)
|
||||
? LibationContextFactory.CreatePostgres(Configuration.Instance.PostgresqlConnectionString)
|
||||
: LibationContextFactory.CreateSqlite(SqliteStorage.ConnectionString);
|
||||
context.Database.Migrate();
|
||||
return context;
|
||||
}
|
||||
|
||||
/// <summary>Use for full library querying. No lazy loading</summary>
|
||||
public static List<LibraryBook> GetLibrary_Flat_NoTracking()
|
||||
{
|
||||
/// <summary>Use for full library querying. No lazy loading</summary>
|
||||
public static List<LibraryBook> GetLibrary_Flat_NoTracking(bool includeParents = false)
|
||||
{
|
||||
using var context = GetContext();
|
||||
return context.GetLibrary_Flat_NoTracking(includeParents);
|
||||
}
|
||||
|
||||
public static List<LibraryBook> GetDeletedLibraryBooks()
|
||||
{
|
||||
using var context = GetContext();
|
||||
return context.GetDeletedLibraryBooks();
|
||||
}
|
||||
|
||||
public static LibraryBook? GetLibraryBook_Flat_NoTracking(string productId, bool caseSensative = true)
|
||||
{
|
||||
using var context = GetContext();
|
||||
return context.GetLibrary_Flat_NoTracking();
|
||||
return context.GetLibraryBook_Flat_NoTracking(productId, caseSensative);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,354 +1,661 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using AudibleApi;
|
||||
using AudibleApi;
|
||||
using AudibleUtilities;
|
||||
using DataLayer;
|
||||
using Dinah.Core;
|
||||
using Dinah.Core.Logging;
|
||||
using DtoImporterService;
|
||||
using FileManager;
|
||||
using LibationFileManager;
|
||||
using Microsoft.Extensions.DependencyModel;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Serilog;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using static DtoImporterService.PerfLogger;
|
||||
|
||||
#nullable enable
|
||||
namespace ApplicationServices
|
||||
{
|
||||
public static class LibraryCommands
|
||||
{
|
||||
public static event EventHandler<int> ScanBegin;
|
||||
public static event EventHandler ScanEnd;
|
||||
public static class LibraryCommands
|
||||
{
|
||||
public static event EventHandler<int>? ScanBegin;
|
||||
public static event EventHandler<int>? ScanEnd;
|
||||
|
||||
public static bool Scanning { get; private set; }
|
||||
private static object _lock { get; } = new();
|
||||
public static bool Scanning { get; private set; }
|
||||
private static object _lock { get; } = new();
|
||||
|
||||
static LibraryCommands()
|
||||
{
|
||||
ScanBegin += (_, __) => Scanning = true;
|
||||
ScanEnd += (_, __) => Scanning = false;
|
||||
}
|
||||
|
||||
public static async Task<List<LibraryBook>> FindInactiveBooks(Func<Account, Task<ApiExtended>> apiExtendedfunc, List<LibraryBook> existingLibrary, params Account[] accounts)
|
||||
{
|
||||
logRestart();
|
||||
|
||||
//These are the minimum response groups required for the
|
||||
//library scanner to pass all validation and filtering.
|
||||
var libraryOptions = new LibraryOptions
|
||||
{
|
||||
ResponseGroups
|
||||
= LibraryOptions.ResponseGroupOptions.ProductAttrs
|
||||
| LibraryOptions.ResponseGroupOptions.ProductDesc
|
||||
| LibraryOptions.ResponseGroupOptions.Relationships
|
||||
};
|
||||
if (accounts is null || accounts.Length == 0)
|
||||
return new List<LibraryBook>();
|
||||
|
||||
try
|
||||
{
|
||||
logTime($"pre {nameof(scanAccountsAsync)} all");
|
||||
var libraryItems = await scanAccountsAsync(apiExtendedfunc, accounts, libraryOptions);
|
||||
logTime($"post {nameof(scanAccountsAsync)} all");
|
||||
|
||||
var totalCount = libraryItems.Count;
|
||||
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
|
||||
|
||||
var missingBookList = existingLibrary.Where(b => !libraryItems.Any(i => i.DtoItem.Asin == b.Book.AudibleProductId)).ToList();
|
||||
|
||||
return missingBookList;
|
||||
}
|
||||
catch (AudibleApi.Authentication.LoginFailedException lfEx)
|
||||
{
|
||||
lfEx.SaveFiles(Configuration.Instance.LibationFiles);
|
||||
|
||||
// nuget Serilog.Exceptions would automatically log custom properties
|
||||
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
|
||||
// https://github.com/RehanSaeed/Serilog.Exceptions
|
||||
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
|
||||
Log.Logger.Error(lfEx, "Error scanning library. Login failed. {@DebugInfo}", new
|
||||
{
|
||||
lfEx.RequestUrl,
|
||||
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
|
||||
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
|
||||
lfEx.ResponseInputFields,
|
||||
lfEx.ResponseBodyFilePaths
|
||||
});
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error scanning library");
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stop();
|
||||
var putBreakPointHere = logOutput;
|
||||
}
|
||||
}
|
||||
|
||||
#region FULL LIBRARY scan and import
|
||||
public static async Task<(int totalCount, int newCount)> ImportAccountAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, params Account[] accounts)
|
||||
{
|
||||
logRestart();
|
||||
|
||||
if (accounts is null || accounts.Length == 0)
|
||||
return (0, 0);
|
||||
|
||||
try
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (Scanning)
|
||||
return (0, 0);
|
||||
ScanBegin?.Invoke(null, accounts.Length);
|
||||
}
|
||||
|
||||
logTime($"pre {nameof(scanAccountsAsync)} all");
|
||||
var libraryOptions = new LibraryOptions
|
||||
{
|
||||
ResponseGroups = LibraryOptions.ResponseGroupOptions.ALL_OPTIONS,
|
||||
ImageSizes = LibraryOptions.ImageSizeOptions._500 | LibraryOptions.ImageSizeOptions._1215
|
||||
};
|
||||
var importItems = await scanAccountsAsync(apiExtendedfunc, accounts, libraryOptions);
|
||||
logTime($"post {nameof(scanAccountsAsync)} all");
|
||||
|
||||
var totalCount = importItems.Count;
|
||||
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
|
||||
|
||||
if (totalCount == 0)
|
||||
return default;
|
||||
|
||||
Log.Logger.Information("Begin long-running import");
|
||||
logTime($"pre {nameof(importIntoDbAsync)}");
|
||||
var newCount = await importIntoDbAsync(importItems);
|
||||
logTime($"post {nameof(importIntoDbAsync)}");
|
||||
Log.Logger.Information($"Import complete. New count {newCount}");
|
||||
|
||||
return (totalCount, newCount);
|
||||
}
|
||||
catch (AudibleApi.Authentication.LoginFailedException lfEx)
|
||||
{
|
||||
lfEx.SaveFiles(Configuration.Instance.LibationFiles);
|
||||
|
||||
// nuget Serilog.Exceptions would automatically log custom properties
|
||||
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
|
||||
// https://github.com/RehanSaeed/Serilog.Exceptions
|
||||
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
|
||||
Log.Logger.Error(lfEx, "Error importing library. Login failed. {@DebugInfo}", new
|
||||
{
|
||||
lfEx.RequestUrl,
|
||||
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
|
||||
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
|
||||
lfEx.ResponseInputFields,
|
||||
lfEx.ResponseBodyFilePaths
|
||||
});
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error importing library");
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stop();
|
||||
var putBreakPointHere = logOutput;
|
||||
ScanEnd?.Invoke(null, null);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<List<ImportItem>> scanAccountsAsync(Func<Account, Task<ApiExtended>> apiExtendedfunc, Account[] accounts, LibraryOptions libraryOptions)
|
||||
{
|
||||
var tasks = new List<Task<List<ImportItem>>>();
|
||||
foreach (var account in accounts)
|
||||
{
|
||||
// get APIs in serial b/c of logins. do NOT move inside of parallel (Task.WhenAll)
|
||||
var apiExtended = await apiExtendedfunc(account);
|
||||
|
||||
// add scanAccountAsync as a TASK: do not await
|
||||
tasks.Add(scanAccountAsync(apiExtended, account, libraryOptions));
|
||||
}
|
||||
|
||||
// import library in parallel
|
||||
var arrayOfLists = await Task.WhenAll(tasks);
|
||||
var importItems = arrayOfLists.SelectMany(a => a).ToList();
|
||||
return importItems;
|
||||
}
|
||||
|
||||
private static async Task<List<ImportItem>> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions libraryOptions)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNull(account, nameof(account));
|
||||
|
||||
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
|
||||
{
|
||||
Account = account?.MaskedLogEntry ?? "[null]"
|
||||
});
|
||||
|
||||
logTime($"pre scanAccountAsync {account.AccountName}");
|
||||
|
||||
var dtoItems = await apiExtended.GetLibraryValidatedAsync(libraryOptions, Configuration.Instance.ImportEpisodes);
|
||||
|
||||
logTime($"post scanAccountAsync {account.AccountName} qty: {dtoItems.Count}");
|
||||
|
||||
return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList();
|
||||
}
|
||||
|
||||
private static async Task<int> importIntoDbAsync(List<ImportItem> importItems)
|
||||
static LibraryCommands()
|
||||
{
|
||||
logTime("importIntoDbAsync -- pre db");
|
||||
using var context = DbContexts.GetContext();
|
||||
var libraryBookImporter = new LibraryBookImporter(context);
|
||||
var newCount = await Task.Run(() => libraryBookImporter.Import(importItems));
|
||||
logTime("importIntoDbAsync -- post Import()");
|
||||
int qtyChanges = saveChanges(context);
|
||||
logTime("importIntoDbAsync -- post SaveChanges");
|
||||
|
||||
// this is any changes at all to the database, not just new books
|
||||
if (qtyChanges > 0)
|
||||
await Task.Run(() => finalizeLibrarySizeChange());
|
||||
logTime("importIntoDbAsync -- post finalizeLibrarySizeChange");
|
||||
|
||||
return newCount;
|
||||
ScanBegin += (_, __) => Scanning = true;
|
||||
ScanEnd += (_, __) => Scanning = false;
|
||||
}
|
||||
|
||||
private static int saveChanges(LibationContext context)
|
||||
{
|
||||
public static async Task<List<LibraryBook>> FindInactiveBooks(IEnumerable<LibraryBook> existingLibrary, params Account[] accounts)
|
||||
{
|
||||
logRestart();
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (Scanning)
|
||||
return new();
|
||||
}
|
||||
ScanBegin?.Invoke(null, accounts.Length);
|
||||
|
||||
//These are the minimum response groups required for the
|
||||
//library scanner to pass all validation and filtering.
|
||||
var libraryOptions = new LibraryOptions
|
||||
{
|
||||
ResponseGroups
|
||||
= LibraryOptions.ResponseGroupOptions.ProductAttrs
|
||||
| LibraryOptions.ResponseGroupOptions.ProductDesc
|
||||
| LibraryOptions.ResponseGroupOptions.Relationships
|
||||
};
|
||||
if (accounts is null || accounts.Length == 0)
|
||||
return new List<LibraryBook>();
|
||||
|
||||
try
|
||||
{
|
||||
logTime($"pre {nameof(scanAccountsAsync)} all");
|
||||
var libraryItems = await scanAccountsAsync(accounts, libraryOptions);
|
||||
logTime($"post {nameof(scanAccountsAsync)} all");
|
||||
|
||||
var totalCount = libraryItems.Count;
|
||||
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
|
||||
|
||||
var missingBookList = existingLibrary.Where(b => !libraryItems.Any(i => i.DtoItem.Asin == b.Book.AudibleProductId)).ToList();
|
||||
|
||||
return missingBookList;
|
||||
}
|
||||
catch (AudibleApi.Authentication.LoginFailedException lfEx)
|
||||
{
|
||||
lfEx.SaveFiles(Configuration.Instance.LibationFiles.Location);
|
||||
|
||||
// nuget Serilog.Exceptions would automatically log custom properties
|
||||
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
|
||||
// https://github.com/RehanSaeed/Serilog.Exceptions
|
||||
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
|
||||
Log.Logger.Error(lfEx, "Error scanning library. Login failed. {@DebugInfo}", new
|
||||
{
|
||||
lfEx.RequestUrl,
|
||||
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
|
||||
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
|
||||
lfEx.ResponseInputFields,
|
||||
lfEx.ResponseBodyFilePaths
|
||||
});
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error scanning library");
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stop();
|
||||
var putBreakPointHere = logOutput;
|
||||
ScanEnd?.Invoke(null, 0);
|
||||
GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
#region FULL LIBRARY scan and import
|
||||
public static async Task<(int totalCount, int newCount)> ImportAccountAsync(params Account[]? accounts)
|
||||
{
|
||||
logRestart();
|
||||
|
||||
if (accounts is null || accounts.Length == 0)
|
||||
return (0, 0);
|
||||
|
||||
int newCount = 0;
|
||||
try
|
||||
{
|
||||
return context.SaveChanges();
|
||||
}
|
||||
catch (Microsoft.EntityFrameworkCore.DbUpdateException ex)
|
||||
{
|
||||
// DbUpdateException exceptions can wreck serilog. Condense it until we can find a better solution. I suspect the culpret is the "WithExceptionDetails" serilog extension
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (Scanning)
|
||||
return (0, 0);
|
||||
}
|
||||
ScanBegin?.Invoke(null, accounts.Length);
|
||||
|
||||
static string format(Exception ex) => $"\r\nMessage: {ex.Message}\r\nStack Trace:\r\n{ex.StackTrace}";
|
||||
logTime($"pre {nameof(scanAccountsAsync)} all");
|
||||
var libraryOptions = new LibraryOptions
|
||||
{
|
||||
ResponseGroups
|
||||
= LibraryOptions.ResponseGroupOptions.Rating | LibraryOptions.ResponseGroupOptions.Media
|
||||
| LibraryOptions.ResponseGroupOptions.Relationships | LibraryOptions.ResponseGroupOptions.ProductDesc
|
||||
| LibraryOptions.ResponseGroupOptions.Contributors | LibraryOptions.ResponseGroupOptions.ProvidedReview
|
||||
| LibraryOptions.ResponseGroupOptions.ProductPlans | LibraryOptions.ResponseGroupOptions.Series
|
||||
| LibraryOptions.ResponseGroupOptions.CategoryLadders | LibraryOptions.ResponseGroupOptions.ProductExtendedAttrs
|
||||
| LibraryOptions.ResponseGroupOptions.PdfUrl | LibraryOptions.ResponseGroupOptions.OriginAsin
|
||||
| LibraryOptions.ResponseGroupOptions.IsFinished,
|
||||
ImageSizes = LibraryOptions.ImageSizeOptions._500 | LibraryOptions.ImageSizeOptions._1215
|
||||
};
|
||||
var importItems = await scanAccountsAsync(accounts, libraryOptions);
|
||||
logTime($"post {nameof(scanAccountsAsync)} all");
|
||||
|
||||
var msg = "Microsoft.EntityFrameworkCore.DbUpdateException";
|
||||
if (ex.InnerException is null)
|
||||
throw new Exception($"{msg}{format(ex)}");
|
||||
throw new Exception(
|
||||
$"{msg}{format(ex)}",
|
||||
new Exception($"Inner Exception{format(ex.InnerException)}"));
|
||||
var totalCount = importItems.Count;
|
||||
Log.Logger.Information($"GetAllLibraryItems: Total count {totalCount}");
|
||||
|
||||
if (totalCount == 0)
|
||||
return default;
|
||||
|
||||
Log.Logger.Information("Begin long-running import");
|
||||
logTime($"pre {nameof(ImportIntoDbAsync)}");
|
||||
newCount = await Task.Run(() => ImportIntoDbAsync(importItems));
|
||||
logTime($"post {nameof(ImportIntoDbAsync)}");
|
||||
Log.Logger.Information($"Import complete. New count {newCount}");
|
||||
|
||||
return (totalCount, newCount);
|
||||
}
|
||||
catch (AudibleApi.Authentication.LoginFailedException lfEx)
|
||||
{
|
||||
lfEx.SaveFiles(Configuration.Instance.LibationFiles.Location);
|
||||
|
||||
// nuget Serilog.Exceptions would automatically log custom properties
|
||||
// However, it comes with a scary warning when used with EntityFrameworkCore which I'm not yet ready to implement:
|
||||
// https://github.com/RehanSaeed/Serilog.Exceptions
|
||||
// work-around: use 3rd param. don't just put exception object in 3rd param -- info overload: stack trace, etc
|
||||
Log.Logger.Error(lfEx, "Error importing library. Login failed. {@DebugInfo}", new
|
||||
{
|
||||
lfEx.RequestUrl,
|
||||
ResponseStatusCodeNumber = (int)lfEx.ResponseStatusCode,
|
||||
ResponseStatusCodeDesc = lfEx.ResponseStatusCode,
|
||||
lfEx.ResponseInputFields,
|
||||
lfEx.ResponseBodyFilePaths
|
||||
});
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error importing library");
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stop();
|
||||
var putBreakPointHere = logOutput;
|
||||
ScanEnd?.Invoke(null, newCount);
|
||||
GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
public static Task<int> ImportSingleToDbAsync(AudibleApi.Common.Item item, string accountId, string localeName) => Task.Run(() => importSingleToDb(item, accountId, localeName));
|
||||
private static int importSingleToDb(AudibleApi.Common.Item item, string accountId, string localeName)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNull(item, "item");
|
||||
ArgumentValidator.EnsureNotNull(accountId, "accountId");
|
||||
ArgumentValidator.EnsureNotNull(localeName, "localeName");
|
||||
|
||||
var importItem = new ImportItem
|
||||
{
|
||||
DtoItem = item,
|
||||
AccountId = accountId,
|
||||
LocaleName = localeName
|
||||
};
|
||||
|
||||
var importItems = new List<ImportItem> { importItem };
|
||||
var validator = new LibraryValidator();
|
||||
var exceptions = validator.Validate(importItems.Select(i => i.DtoItem));
|
||||
|
||||
if (exceptions?.Any() ?? false)
|
||||
{
|
||||
Log.Logger.Error(new AggregateException(exceptions), "Error validating library book. {@DebugInfo}", new { item, accountId, localeName });
|
||||
return 0;
|
||||
}
|
||||
|
||||
return DoDbSizeChangeOperation(ctx =>
|
||||
{
|
||||
var bookImporter = new BookImporter(ctx);
|
||||
bookImporter.Import(importItems);
|
||||
var book = ctx.LibraryBooks.FirstOrDefault(lb => lb.Book.AudibleProductId == importItem.DtoItem.ProductId);
|
||||
|
||||
if (book is null)
|
||||
{
|
||||
book = new LibraryBook(bookImporter.Cache[importItem.DtoItem.ProductId], importItem.DtoItem.DateAdded, importItem.AccountId);
|
||||
ctx.LibraryBooks.Add(book);
|
||||
}
|
||||
else
|
||||
{
|
||||
book.AbsentFromLastScan = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static LogArchiver? openLogArchive(string? archivePath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(archivePath))
|
||||
return null;
|
||||
|
||||
try
|
||||
{
|
||||
return new LogArchiver(archivePath);
|
||||
}
|
||||
catch (System.IO.InvalidDataException)
|
||||
{
|
||||
try
|
||||
{
|
||||
Log.Logger.Warning($"Deleting corrupted {nameof(LogArchiver)} at {archivePath}");
|
||||
FileUtility.SaferDelete(archivePath);
|
||||
return new LogArchiver(archivePath);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, $"Failed to open {nameof(LogArchiver)} at {archivePath}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, $"Failed to open {nameof(LogArchiver)} at {archivePath}");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task<List<ImportItem>> scanAccountsAsync(Account[] accounts, LibraryOptions libraryOptions)
|
||||
{
|
||||
var tasks = new List<Task<List<ImportItem>>>();
|
||||
|
||||
await using LogArchiver? archiver
|
||||
= Log.Logger.IsDebugEnabled()
|
||||
? openLogArchive(System.IO.Path.Combine(Configuration.Instance.LibationFiles.Location, "LibraryScans.zip"))
|
||||
: default;
|
||||
|
||||
archiver?.DeleteAllButNewestN(20);
|
||||
|
||||
foreach (var account in accounts)
|
||||
{
|
||||
try
|
||||
{
|
||||
// get APIs in serial b/c of logins. do NOT move inside of parallel (Task.WhenAll)
|
||||
var apiExtended = await ApiExtended.CreateAsync(account);
|
||||
|
||||
// add scanAccountAsync as a TASK: do not await
|
||||
tasks.Add(scanAccountAsync(apiExtended, account, libraryOptions, archiver));
|
||||
}
|
||||
catch(Exception ex)
|
||||
{
|
||||
//Catch to allow other accounts to continue scanning.
|
||||
Log.Logger.Error(ex, "Failed to scan account");
|
||||
}
|
||||
}
|
||||
|
||||
// import library in parallel
|
||||
var arrayOfLists = await Task.WhenAll(tasks);
|
||||
var importItems = arrayOfLists.SelectMany(a => a).ToList();
|
||||
return importItems;
|
||||
}
|
||||
|
||||
private static async Task<List<ImportItem>> scanAccountAsync(ApiExtended apiExtended, Account account, LibraryOptions libraryOptions, LogArchiver? archiver)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNull(account, nameof(account));
|
||||
|
||||
Log.Logger.Information("ImportLibraryAsync. {@DebugInfo}", new
|
||||
{
|
||||
Account = account.MaskedLogEntry ?? "[null]"
|
||||
});
|
||||
|
||||
logTime($"pre scanAccountAsync {account.AccountName}");
|
||||
|
||||
try
|
||||
{
|
||||
var dtoItems = await apiExtended.GetLibraryValidatedAsync(libraryOptions);
|
||||
|
||||
logTime($"post scanAccountAsync {account.AccountName} qty: {dtoItems.Count}");
|
||||
|
||||
await logDtoItemsAsync(dtoItems);
|
||||
|
||||
return dtoItems.Select(d => new ImportItem { DtoItem = d, AccountId = account.AccountId, LocaleName = account.Locale?.Name }).ToList();
|
||||
}
|
||||
catch(ImportValidationException ex)
|
||||
{
|
||||
await logDtoItemsAsync(ex.Items, ex.InnerExceptions.ToArray());
|
||||
//If ImportValidationException is thrown, all Dto items get logged as part of the exception
|
||||
throw new AggregateException(ex.InnerExceptions);
|
||||
}
|
||||
|
||||
async Task logDtoItemsAsync(IEnumerable<AudibleApi.Common.Item> dtoItems, IEnumerable<Exception>? exceptions = null)
|
||||
{
|
||||
if (archiver is not null)
|
||||
{
|
||||
var fileName = $"{DateTime.Now:u} {account.MaskedLogEntry}.json";
|
||||
var items = await Task.Run(() => JArray.FromObject(dtoItems.Select(i => i.SourceJson)));
|
||||
|
||||
var scanFile = new JObject
|
||||
{
|
||||
{ "Account", account.MaskedLogEntry },
|
||||
{ "ScannedDateTime", DateTime.Now.ToString("u") },
|
||||
};
|
||||
|
||||
if (exceptions?.Any() is true)
|
||||
scanFile.Add("Exceptions", JArray.FromObject(exceptions));
|
||||
|
||||
scanFile.Add("Items", items);
|
||||
|
||||
await archiver.AddFileAsync(fileName, scanFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<int> ImportIntoDbAsync(List<ImportItem> importItems) => await Task.Run(() => importIntoDb(importItems));
|
||||
private static int importIntoDb(List<ImportItem> importItems)
|
||||
{
|
||||
logTime("importIntoDbAsync -- pre db");
|
||||
|
||||
int newCount = 0;
|
||||
|
||||
DoDbSizeChangeOperation(ctx =>
|
||||
{
|
||||
var libraryBookImporter = new LibraryBookImporter(ctx);
|
||||
newCount = libraryBookImporter.Import(importItems);
|
||||
logTime("importIntoDbAsync -- post Import()");
|
||||
});
|
||||
return newCount;
|
||||
}
|
||||
|
||||
public static int SaveContext(LibationContext context)
|
||||
{
|
||||
try
|
||||
{
|
||||
return context.SaveChanges();
|
||||
}
|
||||
catch (Microsoft.EntityFrameworkCore.DbUpdateException ex)
|
||||
{
|
||||
// DbUpdateException exceptions can wreck serilog. Condense it until we can find a better solution. I suspect the culprit is the "WithExceptionDetails" serilog extension
|
||||
|
||||
static string format(Exception ex) => $"\r\nMessage: {ex.Message}\r\nStack Trace:\r\n{ex.StackTrace}";
|
||||
|
||||
var msg = "Microsoft.EntityFrameworkCore.DbUpdateException";
|
||||
if (ex.InnerException is null)
|
||||
throw new Exception($"{msg}{format(ex)}");
|
||||
throw new Exception(
|
||||
$"{msg}{format(ex)}",
|
||||
new Exception($"Inner Exception{format(ex.InnerException)}"));
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region remove books
|
||||
public static Task<List<LibraryBook>> RemoveBooksAsync(List<string> idsToRemove) => Task.Run(() => removeBooks(idsToRemove));
|
||||
private static List<LibraryBook> removeBooks(List<string> idsToRemove)
|
||||
#region remove/restore books
|
||||
public static Task<int> RemoveBooksAsync(this IEnumerable<LibraryBook> idsToRemove) => Task.Run(() => removeBooks(idsToRemove));
|
||||
private static int removeBooks(IEnumerable<LibraryBook> removeLibraryBooks)
|
||||
{
|
||||
using var context = DbContexts.GetContext();
|
||||
var libBooks = context.GetLibrary_Flat_NoTracking();
|
||||
if (removeLibraryBooks is null || !removeLibraryBooks.Any())
|
||||
return 0;
|
||||
|
||||
var removeLibraryBooks = libBooks.Where(lb => idsToRemove.Contains(lb.Book.AudibleProductId)).ToList();
|
||||
context.LibraryBooks.RemoveRange(removeLibraryBooks);
|
||||
context.Books.RemoveRange(removeLibraryBooks.Select(lb => lb.Book));
|
||||
return DoDbSizeChangeOperation(ctx =>
|
||||
{
|
||||
// Entry() NoTracking entities before SaveChanges()
|
||||
foreach (var lb in removeLibraryBooks)
|
||||
{
|
||||
lb.IsDeleted = true;
|
||||
ctx.Entry(lb).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var qtyChanges = context.SaveChanges();
|
||||
if (qtyChanges > 0)
|
||||
finalizeLibrarySizeChange();
|
||||
|
||||
return removeLibraryBooks;
|
||||
}
|
||||
#endregion
|
||||
|
||||
// call this whenever books are added or removed from library
|
||||
private static void finalizeLibrarySizeChange()
|
||||
public static Task<int> RestoreBooksAsync(this IEnumerable<LibraryBook> idsToRemove) => Task.Run(() => restoreBooks(idsToRemove));
|
||||
private static int restoreBooks(this IEnumerable<LibraryBook> libraryBooks)
|
||||
{
|
||||
SearchEngineCommands.FullReIndex();
|
||||
LibrarySizeChanged?.Invoke(null, null);
|
||||
}
|
||||
if (libraryBooks is null || !libraryBooks.Any())
|
||||
return 0;
|
||||
try
|
||||
{
|
||||
return DoDbSizeChangeOperation(ctx =>
|
||||
{
|
||||
// Entry() NoTracking entities before SaveChanges()
|
||||
foreach (var lb in libraryBooks)
|
||||
{
|
||||
lb.IsDeleted = false;
|
||||
ctx.Entry(lb).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error restoring books");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Occurs when the size of the library changes. ie: books are added or removed</summary>
|
||||
public static event EventHandler LibrarySizeChanged;
|
||||
|
||||
/// <summary>
|
||||
/// Occurs when the size of the library does not change but book(s) details do. Especially when <see cref="UserDefinedItem.Tags"/>, <see cref="UserDefinedItem.BookStatus"/>, or <see cref="UserDefinedItem.PdfStatus"/> changed values are successfully persisted.
|
||||
/// </summary>
|
||||
public static event EventHandler BookUserDefinedItemCommitted;
|
||||
|
||||
#region Update book details
|
||||
public static int UpdateUserDefinedItem(params Book[] books) => UpdateUserDefinedItem(books.ToList());
|
||||
public static int UpdateUserDefinedItem(IEnumerable<Book> books)
|
||||
public static Task<int> PermanentlyDeleteBooksAsync(this IEnumerable<LibraryBook> idsToRemove) => Task.Run(() => permanentlyDeleteBooks(idsToRemove));
|
||||
private static int permanentlyDeleteBooks(this IEnumerable<LibraryBook> libraryBooks)
|
||||
{
|
||||
if (libraryBooks is null || !libraryBooks.Any())
|
||||
return 0;
|
||||
try
|
||||
{
|
||||
return DoDbSizeChangeOperation(ctx =>
|
||||
{
|
||||
ctx.LibraryBooks.RemoveRange(libraryBooks);
|
||||
ctx.Books.RemoveRange(libraryBooks.Select(lb => lb.Book));
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, "Error restoring books");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
static int DoDbSizeChangeOperation(Action<LibationContext> action)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (books is null || !books.Any())
|
||||
return 0;
|
||||
int qtyChanges;
|
||||
List<LibraryBook>? library;
|
||||
|
||||
using var context = DbContexts.GetContext();
|
||||
|
||||
// Attach() NoTracking entities before SaveChanges()
|
||||
foreach (var book in books)
|
||||
context.Attach(book.UserDefinedItem).State = Microsoft.EntityFrameworkCore.EntityState.Modified;
|
||||
|
||||
var qtyChanges = context.SaveChanges();
|
||||
if (qtyChanges == 0)
|
||||
return 0;
|
||||
|
||||
// semi-arbitrary. At some point it's more worth it to do a full re-index than to do one offs.
|
||||
// I did not benchmark before choosing the number here
|
||||
if (qtyChanges > 15)
|
||||
SearchEngineCommands.FullReIndex();
|
||||
else
|
||||
using (var context = DbContexts.GetContext())
|
||||
{
|
||||
foreach (var book in books)
|
||||
{
|
||||
SearchEngineCommands.UpdateLiberatedStatus(book);
|
||||
SearchEngineCommands.UpdateBookTags(book);
|
||||
}
|
||||
action?.Invoke(context);
|
||||
|
||||
qtyChanges = SaveContext(context);
|
||||
logTime("importIntoDbAsync -- post SaveChanges");
|
||||
library = qtyChanges == 0 ? null : context.GetLibrary_Flat_NoTracking(includeParents: true);
|
||||
}
|
||||
|
||||
BookUserDefinedItemCommitted?.Invoke(null, null);
|
||||
if (library is not null)
|
||||
finalizeLibrarySizeChange(library);
|
||||
logTime("importIntoDbAsync -- post finalizeLibrarySizeChange");
|
||||
|
||||
return qtyChanges;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, $"Error updating {nameof(Book.UserDefinedItem)}");
|
||||
Log.Logger.Error(ex, "Error performing DB Size change operation");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
// must be here instead of in db layer due to AaxcExists
|
||||
public static LiberatedStatus Liberated_Status(Book book)
|
||||
=> book.Audio_Exists() ? book.UserDefinedItem.BookStatus
|
||||
: AudibleFileStorage.AaxcExists(book.AudibleProductId) ? LiberatedStatus.PartialDownload
|
||||
: LiberatedStatus.NotLiberated;
|
||||
#endregion
|
||||
|
||||
// exists here for feature predictability. It makes sense for this to be where Liberated_Status is
|
||||
public static LiberatedStatus? Pdf_Status(Book book) => book.UserDefinedItem.PdfStatus;
|
||||
// call this whenever books are added or removed from library
|
||||
private static void finalizeLibrarySizeChange(List<LibraryBook> library)
|
||||
{
|
||||
LibrarySizeChanged?.Invoke(null, library);
|
||||
}
|
||||
|
||||
// below are queries, not commands. maybe I should make a LibraryQueries. except there's already one of those...
|
||||
/// <summary>Occurs when the size of the library changes. ie: books are added or removed</summary>
|
||||
public static event EventHandler<List<LibraryBook>>? LibrarySizeChanged;
|
||||
|
||||
public record LibraryStats(int booksFullyBackedUp, int booksDownloadedOnly, int booksNoProgress, int booksError, int pdfsDownloaded, int pdfsNotDownloaded) { }
|
||||
public static LibraryStats GetCounts()
|
||||
{
|
||||
var libraryBooks = DbContexts.GetLibrary_Flat_NoTracking();
|
||||
/// <summary>
|
||||
/// Occurs when the size of the library does not change but book(s) details do. Especially when <see cref="UserDefinedItem.Tags"/>, <see cref="UserDefinedItem.BookStatus"/>, or <see cref="UserDefinedItem.PdfStatus"/> changed values are successfully persisted.
|
||||
/// </summary>
|
||||
public static event EventHandler<IEnumerable<LibraryBook>>? BookUserDefinedItemCommitted;
|
||||
|
||||
var results = libraryBooks
|
||||
.AsParallel()
|
||||
.Select(lb => Liberated_Status(lb.Book))
|
||||
.ToList();
|
||||
var booksFullyBackedUp = results.Count(r => r == LiberatedStatus.Liberated);
|
||||
var booksDownloadedOnly = results.Count(r => r == LiberatedStatus.PartialDownload);
|
||||
var booksNoProgress = results.Count(r => r == LiberatedStatus.NotLiberated);
|
||||
var booksError = results.Count(r => r == LiberatedStatus.Error);
|
||||
#region Update book details
|
||||
public static async Task<int> UpdateUserDefinedItemAsync(
|
||||
this LibraryBook lb,
|
||||
string? tags = null,
|
||||
LiberatedStatus? bookStatus = null,
|
||||
LiberatedStatus? pdfStatus = null,
|
||||
Rating? rating = null)
|
||||
=> await UpdateUserDefinedItemAsync([lb], tags, bookStatus, pdfStatus, rating);
|
||||
|
||||
Log.Logger.Information("Book counts. {@DebugInfo}", new { total = results.Count, booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError });
|
||||
public static async Task<int> UpdateUserDefinedItemAsync(
|
||||
this IEnumerable<LibraryBook> lb,
|
||||
string? tags = null,
|
||||
LiberatedStatus? bookStatus = null,
|
||||
LiberatedStatus? pdfStatus = null,
|
||||
Rating? rating = null)
|
||||
=> await UpdateUserDefinedItemAsync(
|
||||
lb,
|
||||
udi => {
|
||||
// blank tags are expected. null tags are not
|
||||
if (tags is not null)
|
||||
udi.Tags = tags;
|
||||
|
||||
var boolResults = libraryBooks
|
||||
.AsParallel()
|
||||
.Where(lb => lb.Book.HasPdf())
|
||||
.Select(lb => Pdf_Status(lb.Book))
|
||||
.ToList();
|
||||
var pdfsDownloaded = boolResults.Count(r => r == LiberatedStatus.Liberated);
|
||||
var pdfsNotDownloaded = boolResults.Count(r => r == LiberatedStatus.NotLiberated);
|
||||
if (bookStatus.HasValue)
|
||||
udi.BookStatus = bookStatus.Value;
|
||||
|
||||
Log.Logger.Information("PDF counts. {@DebugInfo}", new { total = boolResults.Count, pdfsDownloaded, pdfsNotDownloaded });
|
||||
// method handles null logic
|
||||
udi.SetPdfStatus(pdfStatus);
|
||||
|
||||
return new(booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, pdfsDownloaded, pdfsNotDownloaded);
|
||||
if (rating is not null)
|
||||
udi.UpdateRating(rating.OverallRating, rating.PerformanceRating, rating.StoryRating);
|
||||
});
|
||||
|
||||
public static async Task<int> UpdateBookStatusAsync(this LibraryBook lb, LiberatedStatus bookStatus, Version? libationVersion, AudioFormat audioFormat, string audioVersion)
|
||||
=> await lb.UpdateUserDefinedItemAsync(udi => { udi.BookStatus = bookStatus; udi.SetLastDownloaded(libationVersion, audioFormat, audioVersion); });
|
||||
|
||||
public static async Task<int> UpdateBookStatusAsync(this LibraryBook libraryBook, LiberatedStatus bookStatus)
|
||||
=> await libraryBook.UpdateUserDefinedItemAsync(udi => udi.BookStatus = bookStatus);
|
||||
public static async Task<int> UpdateBookStatusAsync(this IEnumerable<LibraryBook> libraryBooks, LiberatedStatus bookStatus)
|
||||
=> await libraryBooks.UpdateUserDefinedItemAsync(udi => udi.BookStatus = bookStatus);
|
||||
|
||||
public static async Task<int> UpdatePdfStatusAsync(this LibraryBook libraryBook, LiberatedStatus pdfStatus)
|
||||
=> await libraryBook.UpdateUserDefinedItemAsync(udi => udi.SetPdfStatus(pdfStatus));
|
||||
public static async Task<int> UpdatePdfStatusAsync(this IEnumerable<LibraryBook> libraryBooks, LiberatedStatus pdfStatus)
|
||||
=> await libraryBooks.UpdateUserDefinedItemAsync(udi => udi.SetPdfStatus(pdfStatus));
|
||||
|
||||
public static async Task<int> UpdateTagsAsync(this LibraryBook libraryBook, string tags)
|
||||
=> await libraryBook.UpdateUserDefinedItemAsync(udi => udi.Tags = tags);
|
||||
public static async Task<int> UpdateTagsAsync(this IEnumerable<LibraryBook> libraryBooks, string tags)
|
||||
=> await libraryBooks.UpdateUserDefinedItemAsync(udi => udi.Tags = tags);
|
||||
|
||||
public static async Task<int> UpdateUserDefinedItemAsync(this LibraryBook libraryBook, Action<UserDefinedItem> action)
|
||||
=> await UpdateUserDefinedItemAsync([libraryBook], action);
|
||||
|
||||
public static Task<int> UpdateUserDefinedItemAsync(this IEnumerable<LibraryBook> libraryBooks, Action<UserDefinedItem> action)
|
||||
=> Task.Run(() => libraryBooks.updateUserDefinedItem(action));
|
||||
|
||||
private static int updateUserDefinedItem(this IEnumerable<LibraryBook> libraryBooks, Action<UserDefinedItem> action)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (libraryBooks is null || !libraryBooks.Any())
|
||||
return 0;
|
||||
|
||||
int qtyChanges;
|
||||
using (var context = DbContexts.GetContext())
|
||||
{
|
||||
// Entry() instead of Attach() due to possible stack overflow with large tables
|
||||
foreach (var book in libraryBooks)
|
||||
{
|
||||
action?.Invoke(book.Book.UserDefinedItem);
|
||||
|
||||
var udiEntity = context.Entry(book.Book.UserDefinedItem);
|
||||
|
||||
udiEntity.State = Microsoft.EntityFrameworkCore.EntityState.Modified;
|
||||
if (udiEntity.Reference(udi => udi.Rating).TargetEntry is Microsoft.EntityFrameworkCore.ChangeTracking.EntityEntry<Rating> ratingEntry)
|
||||
ratingEntry.State = Microsoft.EntityFrameworkCore.EntityState.Modified;
|
||||
}
|
||||
|
||||
qtyChanges = context.SaveChanges();
|
||||
}
|
||||
if (qtyChanges > 0)
|
||||
BookUserDefinedItemCommitted?.Invoke(null, libraryBooks);
|
||||
|
||||
return qtyChanges;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log.Logger.Error(ex, $"Error updating {nameof(Book.UserDefinedItem)}");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
// must be here instead of in db layer due to AaxcExists
|
||||
public static LiberatedStatus Liberated_Status(Book book)
|
||||
=> book.AudioExists ? book.UserDefinedItem.BookStatus
|
||||
: AudibleFileStorage.AaxcExists(book.AudibleProductId) ? LiberatedStatus.PartialDownload
|
||||
: LiberatedStatus.NotLiberated;
|
||||
|
||||
// exists here for feature predictability. It makes sense for this to be where Liberated_Status is
|
||||
public static LiberatedStatus? Pdf_Status(Book book) => book.UserDefinedItem.PdfStatus;
|
||||
|
||||
// below are queries, not commands. maybe I should make a LibraryQueries. except there's already one of those...
|
||||
|
||||
public record LibraryStats(int booksFullyBackedUp, int booksDownloadedOnly, int booksNoProgress, int booksError, int booksUnavailable, int pdfsDownloaded, int pdfsNotDownloaded, int pdfsUnavailable, IEnumerable<LibraryBook> LibraryBooks)
|
||||
{
|
||||
public int PendingBooks => booksNoProgress + booksDownloadedOnly;
|
||||
public bool HasPendingBooks => PendingBooks > 0;
|
||||
|
||||
public bool HasBookResults => 0 < (booksFullyBackedUp + booksDownloadedOnly + booksNoProgress + booksError + booksUnavailable);
|
||||
public bool HasPdfResults => 0 < (pdfsNotDownloaded + pdfsDownloaded + pdfsUnavailable);
|
||||
|
||||
public string StatusString => HasPdfResults ? $"{toBookStatusString()} | {toPdfStatusString()}" : toBookStatusString();
|
||||
|
||||
private string toBookStatusString()
|
||||
{
|
||||
if (!HasBookResults) return "No books. Begin by importing your library";
|
||||
|
||||
if (!HasPendingBooks && booksError + booksUnavailable == 0) return $"All {"book".PluralizeWithCount(booksFullyBackedUp)} backed up";
|
||||
|
||||
var sb = new StringBuilder($"BACKUPS: No progress: {booksNoProgress} In process: {booksDownloadedOnly} Fully backed up: {booksFullyBackedUp}");
|
||||
|
||||
if (booksError > 0)
|
||||
sb.Append($" Errors: {booksError}");
|
||||
if (booksUnavailable > 0)
|
||||
sb.Append($" Unavailable: {booksUnavailable}");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private string toPdfStatusString()
|
||||
{
|
||||
if (pdfsNotDownloaded + pdfsUnavailable == 0) return $"All {pdfsDownloaded} PDFs downloaded";
|
||||
|
||||
var sb = new StringBuilder($"PDFs: NOT d/l'ed: {pdfsNotDownloaded} Downloaded: {pdfsDownloaded}");
|
||||
|
||||
if (pdfsUnavailable > 0)
|
||||
sb.Append($" Unavailable: {pdfsUnavailable}");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static LibraryStats GetCounts(IEnumerable<LibraryBook>? libraryBooks = null)
|
||||
{
|
||||
libraryBooks ??= DbContexts.GetLibrary_Flat_NoTracking();
|
||||
|
||||
var results = libraryBooks
|
||||
.AsParallel()
|
||||
.WithoutParents()
|
||||
.Select(lb => new { absent = lb.AbsentFromLastScan, status = Liberated_Status(lb.Book) })
|
||||
.ToList();
|
||||
|
||||
var booksFullyBackedUp = results.Count(r => r.status == LiberatedStatus.Liberated);
|
||||
var booksDownloadedOnly = results.Count(r => !r.absent && r.status == LiberatedStatus.PartialDownload);
|
||||
var booksNoProgress = results.Count(r => !r.absent && r.status == LiberatedStatus.NotLiberated);
|
||||
var booksError = results.Count(r => r.status == LiberatedStatus.Error);
|
||||
var booksUnavailable = results.Count(r => r.absent && r.status is LiberatedStatus.NotLiberated or LiberatedStatus.PartialDownload);
|
||||
|
||||
Log.Logger.Information("Book counts. {@DebugInfo}", new { total = results.Count, booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, booksUnavailable });
|
||||
|
||||
var pdfResults = libraryBooks
|
||||
.AsParallel()
|
||||
.Where(lb => lb.Book.HasPdf)
|
||||
.Select(lb => new { absent = lb.AbsentFromLastScan, status = Pdf_Status(lb.Book) })
|
||||
.ToList();
|
||||
|
||||
var pdfsDownloaded = pdfResults.Count(r => r.status == LiberatedStatus.Liberated);
|
||||
var pdfsNotDownloaded = pdfResults.Count(r => !r.absent && r.status == LiberatedStatus.NotLiberated);
|
||||
var pdfsUnavailable = pdfResults.Count(r => r.absent && r.status == LiberatedStatus.NotLiberated);
|
||||
|
||||
Log.Logger.Information("PDF counts. {@DebugInfo}", new { total = pdfResults.Count, pdfsDownloaded, pdfsNotDownloaded, pdfsUnavailable });
|
||||
|
||||
return new(booksFullyBackedUp, booksDownloadedOnly, booksNoProgress, booksError, booksUnavailable, pdfsDownloaded, pdfsNotDownloaded, pdfsUnavailable, libraryBooks);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using ClosedXML.Excel;
|
||||
using CsvHelper;
|
||||
using CsvHelper.Configuration.Attributes;
|
||||
using DataLayer;
|
||||
using NPOI.XSSF.UserModel;
|
||||
using Serilog;
|
||||
using Newtonsoft.Json;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
|
||||
namespace ApplicationServices
|
||||
{
|
||||
@@ -35,6 +36,9 @@ namespace ApplicationServices
|
||||
[Name("Title")]
|
||||
public string Title { get; set; }
|
||||
|
||||
[Name("Subtitle")]
|
||||
public string Subtitle { get; set; }
|
||||
|
||||
[Name("Authors")]
|
||||
public string AuthorNames { get; set; }
|
||||
|
||||
@@ -100,7 +104,41 @@ namespace ApplicationServices
|
||||
|
||||
[Name("Content Type")]
|
||||
public string ContentType { get; set; }
|
||||
|
||||
[Name("Language")]
|
||||
public string Language { get; set; }
|
||||
|
||||
[Name("LastDownloaded")]
|
||||
public DateTime? LastDownloaded { get; set; }
|
||||
|
||||
[Name("LastDownloadedVersion")]
|
||||
public string LastDownloadedVersion { get; set; }
|
||||
|
||||
[Name("IsFinished")]
|
||||
public bool IsFinished { get; set; }
|
||||
|
||||
[Name("IsSpatial")]
|
||||
public bool IsSpatial { get; set; }
|
||||
|
||||
[Name("Last Downloaded File Version")]
|
||||
public string LastDownloadedFileVersion { get; set; }
|
||||
|
||||
[Ignore /* csv ignore */]
|
||||
public AudioFormat LastDownloadedFormat { get; set; }
|
||||
|
||||
[Name("Last Downloaded Codec"), JsonIgnore]
|
||||
public string CodecString => LastDownloadedFormat?.CodecString ?? "";
|
||||
|
||||
[Name("Last Downloaded Sample rate"), JsonIgnore]
|
||||
public int? SampleRate => LastDownloadedFormat?.SampleRate;
|
||||
|
||||
[Name("Last Downloaded Audio Channels"), JsonIgnore]
|
||||
public int? ChannelCount => LastDownloadedFormat?.ChannelCount;
|
||||
|
||||
[Name("Last Downloaded Bitrate"), JsonIgnore]
|
||||
public int? BitRate => LastDownloadedFormat?.BitRate;
|
||||
}
|
||||
|
||||
public static class LibToDtos
|
||||
{
|
||||
public static List<ExportDto> ToDtos(this IEnumerable<LibraryBook> library)
|
||||
@@ -111,29 +149,39 @@ namespace ApplicationServices
|
||||
AudibleProductId = a.Book.AudibleProductId,
|
||||
Locale = a.Book.Locale,
|
||||
Title = a.Book.Title,
|
||||
AuthorNames = a.Book.AuthorNames(),
|
||||
NarratorNames = a.Book.NarratorNames(),
|
||||
Subtitle = a.Book.Subtitle,
|
||||
AuthorNames = a.Book.AuthorNames,
|
||||
NarratorNames = a.Book.NarratorNames,
|
||||
LengthInMinutes = a.Book.LengthInMinutes,
|
||||
Description = a.Book.Description,
|
||||
Publisher = a.Book.Publisher,
|
||||
HasPdf = a.Book.HasPdf(),
|
||||
HasPdf = a.Book.HasPdf,
|
||||
SeriesNames = a.Book.SeriesNames(),
|
||||
SeriesOrder = a.Book.SeriesLink.Any() ? a.Book.SeriesLink?.Select(sl => $"{sl.Order} : {sl.Series.Name}").Aggregate((a, b) => $"{a}, {b}") : "",
|
||||
CommunityRatingOverall = a.Book.Rating?.OverallRating,
|
||||
CommunityRatingPerformance = a.Book.Rating?.PerformanceRating,
|
||||
CommunityRatingStory = a.Book.Rating?.StoryRating,
|
||||
CommunityRatingOverall = a.Book.Rating?.OverallRating.ZeroIsNull(),
|
||||
CommunityRatingPerformance = a.Book.Rating?.PerformanceRating.ZeroIsNull(),
|
||||
CommunityRatingStory = a.Book.Rating?.StoryRating.ZeroIsNull(),
|
||||
PictureId = a.Book.PictureId,
|
||||
IsAbridged = a.Book.IsAbridged,
|
||||
DatePublished = a.Book.DatePublished,
|
||||
CategoriesNames = a.Book.CategoriesNames().Any() ? a.Book.CategoriesNames().Aggregate((a, b) => $"{a}, {b}") : "",
|
||||
MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating,
|
||||
MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating,
|
||||
MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating,
|
||||
CategoriesNames = string.Join("; ", a.Book.LowestCategoryNames()),
|
||||
MyRatingOverall = a.Book.UserDefinedItem.Rating.OverallRating.ZeroIsNull(),
|
||||
MyRatingPerformance = a.Book.UserDefinedItem.Rating.PerformanceRating.ZeroIsNull(),
|
||||
MyRatingStory = a.Book.UserDefinedItem.Rating.StoryRating.ZeroIsNull(),
|
||||
MyLibationTags = a.Book.UserDefinedItem.Tags,
|
||||
BookStatus = a.Book.UserDefinedItem.BookStatus.ToString(),
|
||||
PdfStatus = a.Book.UserDefinedItem.PdfStatus.ToString(),
|
||||
ContentType = a.Book.ContentType.ToString()
|
||||
ContentType = a.Book.ContentType.ToString(),
|
||||
Language = a.Book.Language,
|
||||
LastDownloaded = a.Book.UserDefinedItem.LastDownloaded,
|
||||
LastDownloadedVersion = a.Book.UserDefinedItem.LastDownloadedVersion?.ToString() ?? "",
|
||||
IsFinished = a.Book.UserDefinedItem.IsFinished,
|
||||
IsSpatial = a.Book.IsSpatial,
|
||||
LastDownloadedFileVersion = a.Book.UserDefinedItem.LastDownloadedFileVersion ?? "",
|
||||
LastDownloadedFormat = a.Book.UserDefinedItem.LastDownloadedFormat
|
||||
}).ToList();
|
||||
|
||||
private static float? ZeroIsNull(this float value) => value is 0 ? null : value;
|
||||
}
|
||||
public static class LibraryExporter
|
||||
{
|
||||
@@ -142,7 +190,6 @@ namespace ApplicationServices
|
||||
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
|
||||
if (!dtos.Any())
|
||||
return;
|
||||
|
||||
using var writer = new System.IO.StreamWriter(saveFilePath);
|
||||
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
|
||||
|
||||
@@ -154,7 +201,7 @@ namespace ApplicationServices
|
||||
public static void ToJson(string saveFilePath)
|
||||
{
|
||||
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
|
||||
var json = Newtonsoft.Json.JsonConvert.SerializeObject(dtos, Newtonsoft.Json.Formatting.Indented);
|
||||
var json = JsonConvert.SerializeObject(dtos, Formatting.Indented);
|
||||
System.IO.File.WriteAllText(saveFilePath, json);
|
||||
}
|
||||
|
||||
@@ -162,126 +209,116 @@ namespace ApplicationServices
|
||||
{
|
||||
var dtos = DbContexts.GetLibrary_Flat_NoTracking().ToDtos();
|
||||
|
||||
var workbook = new XSSFWorkbook();
|
||||
var sheet = workbook.CreateSheet("Library");
|
||||
using var workbook = new XLWorkbook();
|
||||
var sheet = workbook.AddWorksheet("Library");
|
||||
|
||||
var detailSubtotalFont = workbook.CreateFont();
|
||||
detailSubtotalFont.IsBold = true;
|
||||
|
||||
var detailSubtotalCellStyle = workbook.CreateCellStyle();
|
||||
detailSubtotalCellStyle.SetFont(detailSubtotalFont);
|
||||
|
||||
// headers
|
||||
var rowIndex = 0;
|
||||
var row = sheet.CreateRow(rowIndex);
|
||||
|
||||
var columns = new[] {
|
||||
nameof (ExportDto.Account),
|
||||
nameof (ExportDto.DateAdded),
|
||||
nameof (ExportDto.AudibleProductId),
|
||||
nameof (ExportDto.Locale),
|
||||
nameof (ExportDto.Title),
|
||||
nameof (ExportDto.AuthorNames),
|
||||
nameof (ExportDto.NarratorNames),
|
||||
nameof (ExportDto.LengthInMinutes),
|
||||
nameof (ExportDto.Description),
|
||||
nameof (ExportDto.Publisher),
|
||||
nameof (ExportDto.HasPdf),
|
||||
nameof (ExportDto.SeriesNames),
|
||||
nameof (ExportDto.SeriesOrder),
|
||||
nameof (ExportDto.CommunityRatingOverall),
|
||||
nameof (ExportDto.CommunityRatingPerformance),
|
||||
nameof (ExportDto.CommunityRatingStory),
|
||||
nameof (ExportDto.PictureId),
|
||||
nameof (ExportDto.IsAbridged),
|
||||
nameof (ExportDto.DatePublished),
|
||||
nameof (ExportDto.CategoriesNames),
|
||||
nameof (ExportDto.MyRatingOverall),
|
||||
nameof (ExportDto.MyRatingPerformance),
|
||||
nameof (ExportDto.MyRatingStory),
|
||||
nameof (ExportDto.MyLibationTags),
|
||||
nameof (ExportDto.BookStatus),
|
||||
nameof (ExportDto.PdfStatus),
|
||||
nameof (ExportDto.ContentType)
|
||||
};
|
||||
var col = 0;
|
||||
nameof(ExportDto.Account),
|
||||
nameof(ExportDto.DateAdded),
|
||||
nameof(ExportDto.AudibleProductId),
|
||||
nameof(ExportDto.Locale),
|
||||
nameof(ExportDto.Title),
|
||||
nameof(ExportDto.Subtitle),
|
||||
nameof(ExportDto.AuthorNames),
|
||||
nameof(ExportDto.NarratorNames),
|
||||
nameof(ExportDto.LengthInMinutes),
|
||||
nameof(ExportDto.Description),
|
||||
nameof(ExportDto.Publisher),
|
||||
nameof(ExportDto.HasPdf),
|
||||
nameof(ExportDto.SeriesNames),
|
||||
nameof(ExportDto.SeriesOrder),
|
||||
nameof(ExportDto.CommunityRatingOverall),
|
||||
nameof(ExportDto.CommunityRatingPerformance),
|
||||
nameof(ExportDto.CommunityRatingStory),
|
||||
nameof(ExportDto.PictureId),
|
||||
nameof(ExportDto.IsAbridged),
|
||||
nameof(ExportDto.DatePublished),
|
||||
nameof(ExportDto.CategoriesNames),
|
||||
nameof(ExportDto.MyRatingOverall),
|
||||
nameof(ExportDto.MyRatingPerformance),
|
||||
nameof(ExportDto.MyRatingStory),
|
||||
nameof(ExportDto.MyLibationTags),
|
||||
nameof(ExportDto.BookStatus),
|
||||
nameof(ExportDto.PdfStatus),
|
||||
nameof(ExportDto.ContentType),
|
||||
nameof(ExportDto.Language),
|
||||
nameof(ExportDto.LastDownloaded),
|
||||
nameof(ExportDto.LastDownloadedVersion),
|
||||
nameof(ExportDto.IsFinished),
|
||||
nameof(ExportDto.IsSpatial),
|
||||
nameof(ExportDto.LastDownloadedFileVersion),
|
||||
nameof(ExportDto.CodecString),
|
||||
nameof(ExportDto.SampleRate),
|
||||
nameof(ExportDto.ChannelCount),
|
||||
nameof(ExportDto.BitRate)
|
||||
};
|
||||
|
||||
int rowIndex = 1, col = 1;
|
||||
var headerRow = sheet.Row(rowIndex++);
|
||||
foreach (var c in columns)
|
||||
{
|
||||
var cell = row.CreateCell(col++);
|
||||
var name = ExportDto.GetName(c);
|
||||
cell.SetCellValue(name);
|
||||
cell.CellStyle = detailSubtotalCellStyle;
|
||||
var headerCell = headerRow.Cell(col++);
|
||||
headerCell.Value = ExportDto.GetName(c);
|
||||
headerCell.Style.Font.Bold = true;
|
||||
}
|
||||
|
||||
var dateFormat = workbook.CreateDataFormat();
|
||||
var dateStyle = workbook.CreateCellStyle();
|
||||
dateStyle.DataFormat = dateFormat.GetFormat("MM/dd/yyyy HH:mm:ss");
|
||||
|
||||
rowIndex++;
|
||||
var dateFormat = CultureInfo.CurrentCulture.DateTimeFormat.ShortDatePattern + " HH:mm:ss";
|
||||
|
||||
// Add data rows
|
||||
foreach (var dto in dtos)
|
||||
{
|
||||
col = 0;
|
||||
col = 1;
|
||||
var row = sheet.Row(rowIndex++);
|
||||
|
||||
row = sheet.CreateRow(rowIndex);
|
||||
|
||||
row.CreateCell(col++).SetCellValue(dto.Account);
|
||||
|
||||
var dateAddedCell = row.CreateCell(col++);
|
||||
dateAddedCell.CellStyle = dateStyle;
|
||||
dateAddedCell.SetCellValue(dto.DateAdded);
|
||||
|
||||
row.CreateCell(col++).SetCellValue(dto.AudibleProductId);
|
||||
row.CreateCell(col++).SetCellValue(dto.Locale);
|
||||
row.CreateCell(col++).SetCellValue(dto.Title);
|
||||
row.CreateCell(col++).SetCellValue(dto.AuthorNames);
|
||||
row.CreateCell(col++).SetCellValue(dto.NarratorNames);
|
||||
row.CreateCell(col++).SetCellValue(dto.LengthInMinutes);
|
||||
row.CreateCell(col++).SetCellValue(dto.Description);
|
||||
row.CreateCell(col++).SetCellValue(dto.Publisher);
|
||||
row.CreateCell(col++).SetCellValue(dto.HasPdf);
|
||||
row.CreateCell(col++).SetCellValue(dto.SeriesNames);
|
||||
row.CreateCell(col++).SetCellValue(dto.SeriesOrder);
|
||||
|
||||
col = createCell(row, col, dto.CommunityRatingOverall);
|
||||
col = createCell(row, col, dto.CommunityRatingPerformance);
|
||||
col = createCell(row, col, dto.CommunityRatingStory);
|
||||
|
||||
row.CreateCell(col++).SetCellValue(dto.PictureId);
|
||||
row.CreateCell(col++).SetCellValue(dto.IsAbridged);
|
||||
|
||||
var datePubCell = row.CreateCell(col++);
|
||||
datePubCell.CellStyle = dateStyle;
|
||||
if (dto.DatePublished.HasValue)
|
||||
datePubCell.SetCellValue(dto.DatePublished.Value);
|
||||
else
|
||||
datePubCell.SetCellValue("");
|
||||
|
||||
row.CreateCell(col++).SetCellValue(dto.CategoriesNames);
|
||||
|
||||
col = createCell(row, col, dto.MyRatingOverall);
|
||||
col = createCell(row, col, dto.MyRatingPerformance);
|
||||
col = createCell(row, col, dto.MyRatingStory);
|
||||
|
||||
row.CreateCell(col++).SetCellValue(dto.MyLibationTags);
|
||||
row.CreateCell(col++).SetCellValue(dto.BookStatus);
|
||||
row.CreateCell(col++).SetCellValue(dto.PdfStatus);
|
||||
row.CreateCell(col++).SetCellValue(dto.ContentType);
|
||||
|
||||
rowIndex++;
|
||||
row.Cell(col++).Value = dto.Account;
|
||||
row.Cell(col++).SetDate(dto.DateAdded, dateFormat);
|
||||
row.Cell(col++).Value = dto.AudibleProductId;
|
||||
row.Cell(col++).Value = dto.Locale;
|
||||
row.Cell(col++).Value = dto.Title;
|
||||
row.Cell(col++).Value = dto.Subtitle;
|
||||
row.Cell(col++).Value = dto.AuthorNames;
|
||||
row.Cell(col++).Value = dto.NarratorNames;
|
||||
row.Cell(col++).Value = dto.LengthInMinutes;
|
||||
row.Cell(col++).Value = dto.Description;
|
||||
row.Cell(col++).Value = dto.Publisher;
|
||||
row.Cell(col++).Value = dto.HasPdf;
|
||||
row.Cell(col++).Value = dto.SeriesNames;
|
||||
row.Cell(col++).Value = dto.SeriesOrder;
|
||||
row.Cell(col++).Value = dto.CommunityRatingOverall;
|
||||
row.Cell(col++).Value = dto.CommunityRatingPerformance;
|
||||
row.Cell(col++).Value = dto.CommunityRatingStory;
|
||||
row.Cell(col++).Value = dto.PictureId;
|
||||
row.Cell(col++).Value = dto.IsAbridged;
|
||||
row.Cell(col++).SetDate(dto.DatePublished, dateFormat);
|
||||
row.Cell(col++).Value = dto.CategoriesNames;
|
||||
row.Cell(col++).Value = dto.MyRatingOverall;
|
||||
row.Cell(col++).Value = dto.MyRatingPerformance;
|
||||
row.Cell(col++).Value = dto.MyRatingStory;
|
||||
row.Cell(col++).Value = dto.MyLibationTags;
|
||||
row.Cell(col++).Value = dto.BookStatus;
|
||||
row.Cell(col++).Value = dto.PdfStatus;
|
||||
row.Cell(col++).Value = dto.ContentType;
|
||||
row.Cell(col++).Value = dto.Language;
|
||||
row.Cell(col++).SetDate(dto.LastDownloaded, dateFormat);
|
||||
row.Cell(col++).Value = dto.LastDownloadedVersion;
|
||||
row.Cell(col++).Value = dto.IsFinished;
|
||||
row.Cell(col++).Value = dto.IsSpatial;
|
||||
row.Cell(col++).Value = dto.LastDownloadedFileVersion;
|
||||
row.Cell(col++).Value = dto.CodecString;
|
||||
row.Cell(col++).Value = dto.SampleRate;
|
||||
row.Cell(col++).Value = dto.ChannelCount;
|
||||
row.Cell(col++).Value = dto.BitRate;
|
||||
}
|
||||
|
||||
using var fileData = new System.IO.FileStream(saveFilePath, System.IO.FileMode.Create);
|
||||
workbook.Write(fileData);
|
||||
workbook.SaveAs(saveFilePath);
|
||||
}
|
||||
private static int createCell(NPOI.SS.UserModel.IRow row, int col, float? nullableFloat)
|
||||
|
||||
private static void SetDate(this IXLCell cell, DateTime? value, string dateFormat)
|
||||
{
|
||||
if (nullableFloat.HasValue)
|
||||
row.CreateCell(col++).SetCellValue(nullableFloat.Value);
|
||||
else
|
||||
row.CreateCell(col++).SetCellValue("");
|
||||
return col;
|
||||
cell.Value = value;
|
||||
cell.Style.DateFormat.Format = dateFormat;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
186
Source/ApplicationServices/RecordExporter.cs
Normal file
186
Source/ApplicationServices/RecordExporter.cs
Normal file
@@ -0,0 +1,186 @@
|
||||
using AudibleApi.Common;
|
||||
using ClosedXML.Excel;
|
||||
using CsvHelper;
|
||||
using DataLayer;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
|
||||
namespace ApplicationServices
|
||||
{
|
||||
public static class RecordExporter
|
||||
{
|
||||
public static void ToXlsx(string saveFilePath, IEnumerable<IRecord> records)
|
||||
{
|
||||
if (!records.Any())
|
||||
return;
|
||||
|
||||
using var workbook = new XLWorkbook();
|
||||
var worksheet = workbook.AddWorksheet("Records");
|
||||
|
||||
// headers
|
||||
var columns = new List<string>
|
||||
{
|
||||
nameof(Type.Name),
|
||||
nameof(IRecord.Created),
|
||||
nameof(IRecord.Start) + "_ms",
|
||||
};
|
||||
|
||||
if (records.OfType<IAnnotation>().Any())
|
||||
{
|
||||
columns.Add(nameof(IAnnotation.AnnotationId));
|
||||
columns.Add(nameof(IAnnotation.LastModified));
|
||||
}
|
||||
if (records.OfType<IRangeAnnotation>().Any())
|
||||
{
|
||||
columns.Add(nameof(IRangeAnnotation.End) + "_ms");
|
||||
columns.Add(nameof(IRangeAnnotation.Text));
|
||||
}
|
||||
if (records.OfType<Clip>().Any())
|
||||
columns.Add(nameof(Clip.Title));
|
||||
|
||||
int rowIndex = 1, col = 1;
|
||||
var headerRow = worksheet.Row(rowIndex++);
|
||||
foreach (var c in columns)
|
||||
{
|
||||
var headerCell = headerRow.Cell(col++);
|
||||
headerCell.Value = c;
|
||||
headerCell.Style.Font.Bold = true;
|
||||
}
|
||||
|
||||
var dateFormat = CultureInfo.CurrentCulture.DateTimeFormat.ShortDatePattern + " HH:mm:ss";
|
||||
|
||||
// Add data rows
|
||||
foreach (var record in records)
|
||||
{
|
||||
col = 1;
|
||||
var row = worksheet.Row(rowIndex++);
|
||||
|
||||
row.Cell(col++).Value = record.GetType().Name;
|
||||
row.Cell(col++).SetDate(record.Created.DateTime, dateFormat);
|
||||
row.Cell(col++).Value = record.Start.TotalMilliseconds;
|
||||
|
||||
if (record is IAnnotation annotation)
|
||||
{
|
||||
|
||||
row.Cell(col++).Value = annotation.AnnotationId;
|
||||
row.Cell(col++).SetDate(annotation.LastModified.DateTime, dateFormat);
|
||||
|
||||
if (annotation is IRangeAnnotation rangeAnnotation)
|
||||
{
|
||||
row.Cell(col++).Value = rangeAnnotation.End.TotalMilliseconds;
|
||||
row.Cell(col++).Value = rangeAnnotation.Text;
|
||||
|
||||
if (rangeAnnotation is Clip clip)
|
||||
row.Cell(col++).Value = clip.Title;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
workbook.SaveAs(saveFilePath);
|
||||
}
|
||||
|
||||
private static void SetDate(this IXLCell cell, DateTime? value, string dateFormat)
|
||||
{
|
||||
cell.Value = value;
|
||||
cell.Style.DateFormat.Format = dateFormat;
|
||||
}
|
||||
public static void ToJson(string saveFilePath, LibraryBook libraryBook, IEnumerable<IRecord> records)
|
||||
{
|
||||
if (!records.Any())
|
||||
return;
|
||||
|
||||
var recordsEx = extendRecords(records);
|
||||
|
||||
var recordsObj = new JObject
|
||||
{
|
||||
{ "title", libraryBook.Book.TitleWithSubtitle},
|
||||
{ "asin", libraryBook.Book.AudibleProductId},
|
||||
{ "exportTime", DateTime.Now},
|
||||
{ "records", JArray.FromObject(recordsEx) }
|
||||
};
|
||||
|
||||
System.IO.File.WriteAllText(saveFilePath, recordsObj.ToString(Newtonsoft.Json.Formatting.Indented));
|
||||
}
|
||||
|
||||
public static void ToCsv(string saveFilePath, IEnumerable<IRecord> records)
|
||||
{
|
||||
if (!records.Any())
|
||||
return;
|
||||
|
||||
using var writer = new System.IO.StreamWriter(saveFilePath);
|
||||
using var csv = new CsvWriter(writer, System.Globalization.CultureInfo.CurrentCulture);
|
||||
|
||||
//Write headers for the present record type that has the most properties
|
||||
if (records.OfType<Clip>().Any())
|
||||
csv.WriteHeader(typeof(ClipEx));
|
||||
else if (records.OfType<Note>().Any())
|
||||
csv.WriteHeader(typeof(NoteEx));
|
||||
else if (records.OfType<Bookmark>().Any())
|
||||
csv.WriteHeader(typeof(BookmarkEx));
|
||||
else
|
||||
csv.WriteHeader(typeof(LastHeardEx));
|
||||
|
||||
var recordsEx = extendRecords(records);
|
||||
|
||||
csv.NextRecord();
|
||||
csv.WriteRecords(recordsEx.OfType<ClipEx>());
|
||||
csv.WriteRecords(recordsEx.OfType<NoteEx>());
|
||||
csv.WriteRecords(recordsEx.OfType<BookmarkEx>());
|
||||
csv.WriteRecords(recordsEx.OfType<LastHeardEx>());
|
||||
}
|
||||
|
||||
private static IEnumerable<IRecordEx> extendRecords(IEnumerable<IRecord> records)
|
||||
=> records
|
||||
.Select<IRecord, IRecordEx>(
|
||||
r => r switch
|
||||
{
|
||||
Clip c => new ClipEx(nameof(Clip), c),
|
||||
Note n => new NoteEx(nameof(Note), n),
|
||||
Bookmark b => new BookmarkEx(nameof(Bookmark), b),
|
||||
LastHeard l => new LastHeardEx(nameof(LastHeard), l),
|
||||
_ => throw new InvalidOperationException(),
|
||||
});
|
||||
|
||||
|
||||
private interface IRecordEx { string Type { get; } }
|
||||
|
||||
private record LastHeardEx : LastHeard, IRecordEx
|
||||
{
|
||||
public string Type { get; }
|
||||
public LastHeardEx(string type, LastHeard original) : base(original)
|
||||
{
|
||||
Type = type;
|
||||
}
|
||||
}
|
||||
|
||||
private record BookmarkEx : Bookmark, IRecordEx
|
||||
{
|
||||
public string Type { get; }
|
||||
public BookmarkEx(string type, Bookmark original) : base(original)
|
||||
{
|
||||
Type = type;
|
||||
}
|
||||
}
|
||||
|
||||
private record NoteEx : Note, IRecordEx
|
||||
{
|
||||
public string Type { get; }
|
||||
public NoteEx(string type, Note original) : base(original)
|
||||
{
|
||||
Type = type;
|
||||
}
|
||||
}
|
||||
|
||||
private record ClipEx : Clip, IRecordEx
|
||||
{
|
||||
public string Type { get; }
|
||||
public ClipEx(string type, Clip original) : base(original)
|
||||
{
|
||||
Type = type;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using DataLayer;
|
||||
using LibationSearchEngine;
|
||||
|
||||
@@ -7,40 +9,12 @@ namespace ApplicationServices
|
||||
{
|
||||
public static class SearchEngineCommands
|
||||
{
|
||||
public static void FullReIndex(SearchEngine engine = null)
|
||||
{
|
||||
engine ??= new SearchEngine();
|
||||
var library = DbContexts.GetLibrary_Flat_NoTracking();
|
||||
engine.CreateNewIndex(library);
|
||||
}
|
||||
|
||||
public static SearchResultSet Search(string searchString) => performSearchEngineFunc_safe(e =>
|
||||
#region Search
|
||||
public static SearchResultSet Search(string searchString) => performSafeQuery(e =>
|
||||
e.Search(searchString)
|
||||
);
|
||||
|
||||
public static void UpdateBookTags(Book book) => performSearchEngineAction_safe(e =>
|
||||
e.UpdateTags(book.AudibleProductId, book.UserDefinedItem.Tags)
|
||||
);
|
||||
|
||||
public static void UpdateLiberatedStatus(Book book) => performSearchEngineAction_safe(e =>
|
||||
e.UpdateLiberatedStatus(book)
|
||||
);
|
||||
|
||||
private static void performSearchEngineAction_safe(Action<SearchEngine> action)
|
||||
{
|
||||
var engine = new SearchEngine();
|
||||
try
|
||||
{
|
||||
action(engine);
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
FullReIndex(engine);
|
||||
action(engine);
|
||||
}
|
||||
}
|
||||
|
||||
private static T performSearchEngineFunc_safe<T>(Func<SearchEngine, T> func)
|
||||
private static T performSafeQuery<T>(Func<SearchEngine, T> func)
|
||||
{
|
||||
var engine = new SearchEngine();
|
||||
try
|
||||
@@ -49,9 +23,84 @@ namespace ApplicationServices
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
FullReIndex(engine);
|
||||
fullReIndex(engine);
|
||||
return func(engine);
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
public static event EventHandler SearchEngineUpdated;
|
||||
|
||||
#region Update
|
||||
private static bool isUpdating;
|
||||
|
||||
public static void UpdateBooks(IEnumerable<LibraryBook> books)
|
||||
{
|
||||
// Semi-arbitrary. At some point it's more worth it to do a full re-index than to do one offs.
|
||||
// I did not benchmark before choosing the number here
|
||||
if (books.Count() > 15)
|
||||
FullReIndex();
|
||||
else
|
||||
{
|
||||
foreach (var book in books)
|
||||
UpdateUserDefinedItems(book);
|
||||
}
|
||||
}
|
||||
|
||||
public static void FullReIndex() => performSafeCommand(fullReIndex);
|
||||
public static void FullReIndex(List<LibraryBook> libraryBooks)
|
||||
=> performSafeCommand(se => fullReIndex(se, libraryBooks.WithoutParents()));
|
||||
|
||||
internal static void UpdateUserDefinedItems(LibraryBook book) => performSafeCommand(e =>
|
||||
{
|
||||
e.UpdateLiberatedStatus(book);
|
||||
e.UpdateTags(book.Book.AudibleProductId, book.Book.UserDefinedItem.Tags);
|
||||
e.UpdateUserRatings(book);
|
||||
}
|
||||
);
|
||||
|
||||
private static void performSafeCommand(Action<SearchEngine> action)
|
||||
{
|
||||
try
|
||||
{
|
||||
update(action);
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
fullReIndex(new SearchEngine());
|
||||
update(action);
|
||||
}
|
||||
}
|
||||
|
||||
private static void update(Action<SearchEngine> action)
|
||||
{
|
||||
if (action is null)
|
||||
return;
|
||||
|
||||
// support nesting incl recursion
|
||||
var prevIsUpdating = isUpdating;
|
||||
try
|
||||
{
|
||||
isUpdating = true;
|
||||
|
||||
action(new SearchEngine());
|
||||
if (!prevIsUpdating)
|
||||
SearchEngineUpdated?.Invoke(null, null);
|
||||
}
|
||||
finally
|
||||
{
|
||||
isUpdating = prevIsUpdating;
|
||||
}
|
||||
}
|
||||
|
||||
private static void fullReIndex(SearchEngine engine)
|
||||
{
|
||||
var library = DbContexts.GetLibrary_Flat_NoTracking();
|
||||
fullReIndex(engine, library);
|
||||
}
|
||||
|
||||
private static void fullReIndex(SearchEngine engine, IEnumerable<LibraryBook> libraryBooks)
|
||||
=> engine.CreateNewIndex(libraryBooks);
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,68 +18,64 @@ namespace AudibleUtilities
|
||||
public string AccountId { get; }
|
||||
|
||||
// user-friendly, non-canonical name. mutable
|
||||
private string _accountName;
|
||||
public string AccountName
|
||||
{
|
||||
get => _accountName;
|
||||
get => field;
|
||||
set
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
return;
|
||||
var v = value.Trim();
|
||||
if (v == _accountName)
|
||||
if (v == field)
|
||||
return;
|
||||
_accountName = v;
|
||||
field = v;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
// whether to include this account when scanning libraries.
|
||||
// technically this is an app setting; not an attribute of account. but since it's managed with accounts, it makes sense to put this exception-to-the-rule here
|
||||
private bool _libraryScan = true;
|
||||
public bool LibraryScan
|
||||
{
|
||||
get => _libraryScan;
|
||||
get => field;
|
||||
set
|
||||
{
|
||||
if (value == _libraryScan)
|
||||
if (value == field)
|
||||
return;
|
||||
_libraryScan = value;
|
||||
field = value;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
private string _decryptKey = "";
|
||||
/// <summary>aka: activation bytes</summary>
|
||||
public string DecryptKey
|
||||
{
|
||||
get => _decryptKey;
|
||||
get => field ?? "";
|
||||
set
|
||||
{
|
||||
var v = (value ?? "").Trim();
|
||||
if (v == _decryptKey)
|
||||
if (v == field)
|
||||
return;
|
||||
_decryptKey = v;
|
||||
field = v;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
private Identity _identity;
|
||||
public Identity IdentityTokens
|
||||
{
|
||||
get => _identity;
|
||||
get => field;
|
||||
set
|
||||
{
|
||||
if (_identity is null && value is null)
|
||||
if (field is null && value is null)
|
||||
return;
|
||||
|
||||
if (_identity is not null)
|
||||
_identity.Updated -= update;
|
||||
if (field is not null)
|
||||
field.Updated -= update;
|
||||
|
||||
if (value is not null)
|
||||
value.Updated += update;
|
||||
|
||||
_identity = value;
|
||||
field = value;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ using AudibleApi.Authorization;
|
||||
using Dinah.Core;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities
|
||||
{
|
||||
// 'AccountsSettings' is intentionally NOT IEnumerable<> so that properties can be added/extended
|
||||
@@ -14,8 +15,8 @@ namespace AudibleUtilities
|
||||
// JSON : Array (properties on the collection will not be serialized)
|
||||
public class AccountsSettings : IUpdatable
|
||||
{
|
||||
public event EventHandler Updated;
|
||||
private void update(object sender = null, EventArgs e = null)
|
||||
public event EventHandler? Updated;
|
||||
private void update(object? sender = null, EventArgs? e = null)
|
||||
{
|
||||
foreach (var account in Accounts)
|
||||
validate(account);
|
||||
@@ -47,12 +48,28 @@ namespace AudibleUtilities
|
||||
update_no_validate();
|
||||
}
|
||||
}
|
||||
|
||||
private string? _cdm;
|
||||
[JsonProperty]
|
||||
public string? Cdm
|
||||
{
|
||||
get => _cdm;
|
||||
set
|
||||
{
|
||||
if (value is null)
|
||||
return;
|
||||
|
||||
_cdm = value;
|
||||
update_no_validate();
|
||||
}
|
||||
}
|
||||
|
||||
[JsonIgnore]
|
||||
public IReadOnlyList<Account> Accounts => _accounts_json.AsReadOnly();
|
||||
#endregion
|
||||
|
||||
#region de/serialize
|
||||
public static AccountsSettings FromJson(string json)
|
||||
public static AccountsSettings? FromJson(string json)
|
||||
=> JsonConvert.DeserializeObject<AccountsSettings>(json, Identity.GetJsonSerializerSettings());
|
||||
|
||||
public string ToJson(Formatting formatting = Formatting.Indented)
|
||||
@@ -91,7 +108,7 @@ namespace AudibleUtilities
|
||||
account.Updated += update;
|
||||
}
|
||||
|
||||
public Account GetAccount(string accountId, string locale)
|
||||
public Account? GetAccount(string accountId, string? locale)
|
||||
{
|
||||
if (locale is null)
|
||||
return null;
|
||||
|
||||
@@ -1,316 +1,293 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using System.Diagnostics;
|
||||
using AudibleApi;
|
||||
using AudibleApi.Common;
|
||||
using Dinah.Core;
|
||||
using Polly;
|
||||
using Polly.Retry;
|
||||
using System.Threading;
|
||||
using LibationFileManager;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities
|
||||
{
|
||||
/// <summary>USE THIS from within Libation. It wraps the call with correct JSONPath</summary>
|
||||
public class ApiExtended
|
||||
{
|
||||
public static Func<Account, ILoginChoiceEager>? LoginChoiceFactory { get; set; }
|
||||
public Api Api { get; private set; }
|
||||
|
||||
private const int MaxConcurrency = 10;
|
||||
private const int BatchSize = 50;
|
||||
|
||||
private ApiExtended(Api api) => Api = api;
|
||||
|
||||
/// <summary>Get api from existing tokens else login with 'eager' choice. External browser url is provided. Response can be external browser login or continuing with native api callbacks.</summary>
|
||||
public static async Task<ApiExtended> CreateAsync(Account account, ILoginChoiceEager loginChoiceEager)
|
||||
{
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
{
|
||||
LoginType = nameof(ILoginChoiceEager),
|
||||
Account = account?.MaskedLogEntry ?? "[null]",
|
||||
LocaleName = account?.Locale?.Name
|
||||
});
|
||||
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
loginChoiceEager,
|
||||
account.Locale,
|
||||
AudibleApiStorage.AccountsSettingsFile,
|
||||
account.GetIdentityTokensJsonPath());
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
|
||||
/// <summary>Get api from existing tokens else login with native api callbacks.</summary>
|
||||
public static async Task<ApiExtended> CreateAsync(Account account, ILoginCallback loginCallback)
|
||||
{
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
{
|
||||
LoginType = nameof(ILoginCallback),
|
||||
Account = account?.MaskedLogEntry ?? "[null]",
|
||||
LocaleName = account?.Locale?.Name
|
||||
});
|
||||
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
loginCallback,
|
||||
account.Locale,
|
||||
AudibleApiStorage.AccountsSettingsFile,
|
||||
account.GetIdentityTokensJsonPath());
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
|
||||
/// <summary>Get api from existing tokens else login with external browser</summary>
|
||||
public static async Task<ApiExtended> CreateAsync(Account account, ILoginExternal loginExternal)
|
||||
{
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
{
|
||||
LoginType = nameof(ILoginExternal),
|
||||
Account = account?.MaskedLogEntry ?? "[null]",
|
||||
LocaleName = account?.Locale?.Name
|
||||
});
|
||||
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
loginExternal,
|
||||
account.Locale,
|
||||
AudibleApiStorage.AccountsSettingsFile,
|
||||
account.GetIdentityTokensJsonPath());
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
|
||||
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
|
||||
public static async Task<ApiExtended> CreateAsync(Account account)
|
||||
{
|
||||
ArgumentValidator.EnsureNotNull(account, nameof(account));
|
||||
ArgumentValidator.EnsureNotNull(account.AccountId, nameof(account.AccountId));
|
||||
ArgumentValidator.EnsureNotNull(account.Locale, nameof(account.Locale));
|
||||
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
try
|
||||
{
|
||||
AccountMaskedLogEntry = account.MaskedLogEntry
|
||||
});
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
{
|
||||
AccountMaskedLogEntry = account.MaskedLogEntry
|
||||
});
|
||||
|
||||
return await CreateAsync(account.AccountId, account.Locale.Name);
|
||||
}
|
||||
|
||||
/// <summary>Get api from existing tokens. Assumes you have valid login tokens. Else exception</summary>
|
||||
public static async Task<ApiExtended> CreateAsync(string username, string localeName)
|
||||
{
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
account.Locale,
|
||||
AudibleApiStorage.AccountsSettingsFile,
|
||||
account.GetIdentityTokensJsonPath());
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
catch
|
||||
{
|
||||
Username = username.ToMask(),
|
||||
LocaleName = localeName,
|
||||
});
|
||||
if (LoginChoiceFactory is null)
|
||||
throw new InvalidOperationException($"The UI module must first set {nameof(LoginChoiceFactory)} before attempting to create the api");
|
||||
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
Localization.Get(localeName),
|
||||
Serilog.Log.Logger.Information("{@DebugInfo}", new
|
||||
{
|
||||
LoginType = nameof(ILoginChoiceEager),
|
||||
Account = account.MaskedLogEntry ?? "[null]",
|
||||
LocaleName = account.Locale?.Name
|
||||
});
|
||||
|
||||
var api = await EzApiCreator.GetApiAsync(
|
||||
LoginChoiceFactory(account),
|
||||
account.Locale,
|
||||
AudibleApiStorage.AccountsSettingsFile,
|
||||
AudibleApiStorage.GetIdentityTokensJsonPath(username, localeName));
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
account.GetIdentityTokensJsonPath());
|
||||
|
||||
return new ApiExtended(api);
|
||||
}
|
||||
}
|
||||
|
||||
private static AsyncRetryPolicy policy { get; }
|
||||
= Policy.Handle<Exception>()
|
||||
// 2 retries == 3 total
|
||||
.RetryAsync(2);
|
||||
|
||||
public Task<List<Item>> GetLibraryValidatedAsync(LibraryOptions libraryOptions, bool importEpisodes = true)
|
||||
public Task<List<Item>> GetLibraryValidatedAsync(LibraryOptions libraryOptions)
|
||||
{
|
||||
// bug on audible's side. the 1st time after a long absence, a query to get library will return without titles or authors. a subsequent identical query will be successful. this is true whether or not tokens are refreshed
|
||||
// worse, this 1st dummy call doesn't seem to help:
|
||||
// var page = await api.GetLibraryAsync(new AudibleApi.LibraryOptions { NumberOfResultPerPage = 1, PageNumber = 1, PurchasedAfter = DateTime.Now.AddYears(-20), ResponseGroups = AudibleApi.LibraryOptions.ResponseGroupOptions.ALL_OPTIONS });
|
||||
// i don't want to incur the cost of making a full dummy call every time because it fails sometimes
|
||||
return policy.ExecuteAsync(() => getItemsAsync(libraryOptions, importEpisodes));
|
||||
return policy.ExecuteAsync(() => getItemsAsync(libraryOptions));
|
||||
}
|
||||
|
||||
private async Task<List<Item>> getItemsAsync(LibraryOptions libraryOptions, bool importEpisodes)
|
||||
private async Task<List<Item>> getItemsAsync(LibraryOptions libraryOptions)
|
||||
{
|
||||
var items = new List<Item>();
|
||||
#if DEBUG
|
||||
//// this will not work for multi accounts
|
||||
//var library_json = "library.json";
|
||||
//library_json = System.IO.Path.GetFullPath(library_json);
|
||||
//if (System.IO.File.Exists(library_json))
|
||||
//{
|
||||
// items = AudibleApi.Common.Converter.FromJson<List<Item>>(System.IO.File.ReadAllText(library_json));
|
||||
//}
|
||||
#endif
|
||||
Serilog.Log.Logger.Debug("Beginning library scan.");
|
||||
|
||||
Serilog.Log.Logger.Debug("Begin initial library scan");
|
||||
List<Item> items = new();
|
||||
var sw = Stopwatch.StartNew();
|
||||
var totalTime = TimeSpan.Zero;
|
||||
using var semaphore = new SemaphoreSlim(MaxConcurrency);
|
||||
|
||||
if (!items.Any())
|
||||
items = await Api.GetAllLibraryItemsAsync(libraryOptions);
|
||||
var episodeChannel = Channel.CreateUnbounded<string>(new UnboundedChannelOptions { SingleReader = true, SingleWriter = true });
|
||||
var batchReaderTask = readAllAsinsAsync(episodeChannel.Reader, semaphore);
|
||||
|
||||
Serilog.Log.Logger.Debug("Initial library scan complete. Begin episode scan");
|
||||
|
||||
await manageEpisodesAsync(items, importEpisodes);
|
||||
|
||||
Serilog.Log.Logger.Debug("Episode scan complete");
|
||||
|
||||
#if DEBUG
|
||||
//System.IO.File.WriteAllText(library_json, AudibleApi.Common.Converter.ToJson(items));
|
||||
#endif
|
||||
|
||||
var validators = new List<IValidator>();
|
||||
validators.AddRange(getValidators());
|
||||
foreach (var v in validators)
|
||||
//Scan the library for all added books.
|
||||
//Get relationship asins from episode-type items and write them to episodeChannel where they will be batched and queried.
|
||||
await foreach (var itemsBatch in Api.GetLibraryItemsPagesAsync(libraryOptions, BatchSize, semaphore))
|
||||
{
|
||||
var exceptions = v.Validate(items);
|
||||
if (exceptions is not null && exceptions.Any())
|
||||
throw new AggregateException(exceptions);
|
||||
if (Configuration.Instance.ImportEpisodes)
|
||||
{
|
||||
var episodes = itemsBatch.Where(i => i.IsEpisodes).ToList();
|
||||
var series = itemsBatch.Where(i => i.IsSeriesParent).ToList();
|
||||
|
||||
var parentAsins = episodes
|
||||
.SelectMany(i => i.Relationships)
|
||||
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Parent)
|
||||
.Select(r => r.Asin);
|
||||
|
||||
var episodeAsins = series
|
||||
.SelectMany(i => i.Relationships)
|
||||
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
|
||||
.Select(r => r.Asin);
|
||||
|
||||
foreach (var asin in parentAsins.Concat(episodeAsins))
|
||||
episodeChannel.Writer.TryWrite(asin);
|
||||
|
||||
items.AddRange(episodes);
|
||||
items.AddRange(series);
|
||||
}
|
||||
|
||||
var booksInBatch
|
||||
= itemsBatch
|
||||
.Where(i => !i.IsSeriesParent && !i.IsEpisodes)
|
||||
.Where(i => i.IsAyce is not true || Configuration.Instance.ImportPlusTitles);
|
||||
items.AddRange(booksInBatch);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
totalTime += sw.Elapsed;
|
||||
Serilog.Log.Logger.Debug("Library scan complete after {elappsed_ms} ms. Found {count} books and series. Waiting on series episode scans to complete.", sw.ElapsedMilliseconds, items.Count);
|
||||
sw.Restart();
|
||||
|
||||
//Signal that we're done adding asins
|
||||
episodeChannel.Writer.Complete();
|
||||
|
||||
//Wait for all episodes/parents to be retrived
|
||||
var allEps = await batchReaderTask;
|
||||
|
||||
sw.Stop();
|
||||
totalTime += sw.Elapsed;
|
||||
Serilog.Log.Logger.Debug("Episode scan complete after {elappsed_ms} ms. Found {count} episodes and series .", sw.ElapsedMilliseconds, allEps.Count);
|
||||
sw.Restart();
|
||||
|
||||
Serilog.Log.Logger.Debug("Begin indexing series episodes");
|
||||
items.AddRange(allEps);
|
||||
|
||||
//Set the Item.Series info for episodes and parents.
|
||||
foreach (var parent in items.Where(i => i.IsSeriesParent))
|
||||
{
|
||||
var children = items.Where(i => i.IsEpisodes && i.Relationships.Any(r => r.Asin == parent.Asin));
|
||||
SetSeries(parent, children);
|
||||
}
|
||||
|
||||
int orphansRemoved = items.RemoveAll(i => (i.IsEpisodes || i.IsSeriesParent) && i.Series is null);
|
||||
if (orphansRemoved > 0)
|
||||
Serilog.Log.Debug("{orphansRemoved} podcast orphans not imported", orphansRemoved);
|
||||
|
||||
sw.Stop();
|
||||
totalTime += sw.Elapsed;
|
||||
Serilog.Log.Logger.Information("Completed indexing series episodes after {elappsed_ms} ms.", sw.ElapsedMilliseconds);
|
||||
Serilog.Log.Logger.Information($"Completed library scan in {totalTime.TotalMilliseconds:F0} ms.");
|
||||
|
||||
var allExceptions = IValidator.GetAllValidators().SelectMany(v => v.Validate(items)).ToList();
|
||||
if (allExceptions?.Count > 0)
|
||||
throw new ImportValidationException(items, allExceptions);
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
#region episodes and podcasts
|
||||
private async Task manageEpisodesAsync(List<Item> items, bool importEpisodes)
|
||||
|
||||
/// <summary>
|
||||
/// Read asins from the channel and request catalog item info in batches of <see cref="BatchSize"/>. Blocks until <paramref name="channelReader"/> is closed.
|
||||
/// </summary>
|
||||
/// <param name="channelReader">Input asins to batch</param>
|
||||
/// <param name="semaphore">Shared semaphore to limit concurrency</param>
|
||||
/// <returns>All <see cref="Item"/>s of asins written to the channel.</returns>
|
||||
private async Task<List<Item>> readAllAsinsAsync(ChannelReader<string> channelReader, SemaphoreSlim semaphore)
|
||||
{
|
||||
// add podcasts and episodes to list. If fail, don't let it de-rail the rest of the import
|
||||
int batchNum = 1;
|
||||
List<Task<List<Item>>> getTasks = new();
|
||||
|
||||
while (await channelReader.WaitToReadAsync())
|
||||
{
|
||||
List<string> asins = new();
|
||||
|
||||
while (asins.Count < BatchSize && await channelReader.WaitToReadAsync())
|
||||
{
|
||||
var asin = await channelReader.ReadAsync();
|
||||
|
||||
if (!asins.Contains(asin))
|
||||
asins.Add(asin);
|
||||
}
|
||||
await semaphore.WaitAsync();
|
||||
getTasks.Add(getProductsAsync(batchNum++, asins, semaphore));
|
||||
}
|
||||
|
||||
var completed = await Task.WhenAll(getTasks);
|
||||
//We only want Series parents and Series episodes. Explude other relationship types (e.g. 'season')
|
||||
return completed.SelectMany(l => l).Where(i => i.IsSeriesParent || i.IsEpisodes).ToList();
|
||||
}
|
||||
|
||||
private async Task<List<Item>> getProductsAsync(int batchNum, List<string> asins, SemaphoreSlim semaphore)
|
||||
{
|
||||
Serilog.Log.Logger.Debug($"Batch {batchNum} Begin: Fetching {asins.Count} asins");
|
||||
try
|
||||
{
|
||||
// get parents
|
||||
var parents = items.Where(i => i.IsEpisodes).ToList();
|
||||
#if DEBUG
|
||||
//var parentsDebug = parents.Select(i => i.ToJson()).Aggregate((a, b) => $"{a}\r\n\r\n{b}");
|
||||
//System.IO.File.WriteAllText("parents.json", parentsDebug);
|
||||
#endif
|
||||
var sw = Stopwatch.StartNew();
|
||||
var items = await Api.GetCatalogProductsAsync(asins, CatalogOptions.ResponseGroupOptions.Rating | CatalogOptions.ResponseGroupOptions.Media
|
||||
| CatalogOptions.ResponseGroupOptions.Relationships | CatalogOptions.ResponseGroupOptions.ProductDesc
|
||||
| CatalogOptions.ResponseGroupOptions.Contributors | CatalogOptions.ResponseGroupOptions.ProvidedReview
|
||||
| CatalogOptions.ResponseGroupOptions.ProductPlans | CatalogOptions.ResponseGroupOptions.Series
|
||||
| CatalogOptions.ResponseGroupOptions.CategoryLadders | CatalogOptions.ResponseGroupOptions.ProductExtendedAttrs);
|
||||
sw.Stop();
|
||||
|
||||
if (!parents.Any())
|
||||
return;
|
||||
Serilog.Log.Logger.Debug($"Batch {batchNum} End: Retrieved {items.Count} items in {sw.ElapsedMilliseconds} ms");
|
||||
|
||||
Serilog.Log.Logger.Information($"{parents.Count} series of shows/podcasts found");
|
||||
|
||||
// remove episode parents. even if the following stuff fails, these will still be removed from the collection
|
||||
items.RemoveAll(i => i.IsEpisodes);
|
||||
|
||||
if (importEpisodes)
|
||||
{
|
||||
// add children
|
||||
var children = await getEpisodesAsync(parents);
|
||||
Serilog.Log.Logger.Information($"{children.Count} episodes of shows/podcasts found");
|
||||
items.AddRange(children);
|
||||
}
|
||||
return items;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error adding podcasts and episodes");
|
||||
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new { asins });
|
||||
throw;
|
||||
}
|
||||
finally { semaphore.Release(); }
|
||||
}
|
||||
|
||||
private async Task<List<Item>> getEpisodesAsync(List<Item> parents)
|
||||
public static void SetSeries(Item parent, IEnumerable<Item> children)
|
||||
{
|
||||
var results = new List<Item>();
|
||||
ArgumentValidator.EnsureNotNull(parent, nameof(parent));
|
||||
ArgumentValidator.EnsureNotNull(children, nameof(children));
|
||||
|
||||
foreach (var parent in parents)
|
||||
//A series parent will always have exactly 1 Series
|
||||
parent.Series = new[]
|
||||
{
|
||||
var children = await getEpisodeChildrenAsync(parent);
|
||||
|
||||
// actual individual episode, not the parent of a series.
|
||||
// for now I'm keeping it inside this method since it fits the work flow, incl. importEpisodes logic
|
||||
if (!children.Any())
|
||||
new Series
|
||||
{
|
||||
results.Add(parent);
|
||||
continue;
|
||||
Asin = parent.Asin,
|
||||
Sequence = "-1",
|
||||
Title = parent.TitleWithSubtitle
|
||||
}
|
||||
};
|
||||
|
||||
foreach (var child in children)
|
||||
if (parent.PurchaseDate == default)
|
||||
{
|
||||
parent.PurchaseDate = children.Select(c => c.PurchaseDate).Order().FirstOrDefault(d => d != default);
|
||||
|
||||
if (parent.PurchaseDate == default)
|
||||
{
|
||||
// use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
|
||||
child.PurchaseDate = parent.PurchaseDate;
|
||||
// parent is essentially a series
|
||||
child.Series = new Series[]
|
||||
{
|
||||
new Series
|
||||
{
|
||||
Asin = parent.Asin,
|
||||
// This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
|
||||
Sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin).Sort.ToString(),
|
||||
Title = parent.TitleWithSubtitle
|
||||
}
|
||||
};
|
||||
// overload (read: abuse) IsEpisodes flag
|
||||
child.Relationships = new Relationship[]
|
||||
{
|
||||
new Relationship
|
||||
{
|
||||
RelationshipToProduct = RelationshipToProduct.Child,
|
||||
RelationshipType = RelationshipType.Episode
|
||||
}
|
||||
};
|
||||
Serilog.Log.Logger.Warning("{series} doesn't have a purchase date. Using UtcNow", parent);
|
||||
parent.PurchaseDate = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
results.AddRange(children);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private async Task<List<Item>> getEpisodeChildrenAsync(Item parent)
|
||||
{
|
||||
var childrenIds = parent.Relationships
|
||||
.Where(r => r.RelationshipToProduct == RelationshipToProduct.Child && r.RelationshipType == RelationshipType.Episode)
|
||||
.Select(r => r.Asin)
|
||||
.ToList();
|
||||
|
||||
// fetch children in batches
|
||||
const int batchSize = 20;
|
||||
|
||||
var results = new List<Item>();
|
||||
|
||||
for (var i = 1; ; i++)
|
||||
int lastEpNum = -1, dupeCount = 0;
|
||||
foreach (var child in children.OrderBy(i => i.EpisodeNumber).ThenBy(i => i.PublicationDateTime))
|
||||
{
|
||||
var idBatch = childrenIds.Skip((i - 1) * batchSize).Take(batchSize).ToList();
|
||||
if (!idBatch.Any())
|
||||
break;
|
||||
|
||||
List<Item> childrenBatch;
|
||||
try
|
||||
string sequence;
|
||||
if (child.EpisodeNumber is null)
|
||||
{
|
||||
childrenBatch = await Api.GetCatalogProductsAsync(idBatch, CatalogOptions.ResponseGroupOptions.ALL_OPTIONS);
|
||||
#if DEBUG
|
||||
//var childrenBatchDebug = childrenBatch.Select(i => i.ToJson()).Aggregate((a, b) => $"{a}\r\n\r\n{b}");
|
||||
//System.IO.File.WriteAllText($"children of {parent.Asin}.json", childrenBatchDebug);
|
||||
#endif
|
||||
// This should properly be Single() not FirstOrDefault(), but FirstOrDefault is defensive for malformed data from audible
|
||||
sequence = parent.Relationships.FirstOrDefault(r => r.Asin == child.Asin)?.Sort?.ToString() ?? "0";
|
||||
}
|
||||
catch (Exception ex)
|
||||
else
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error fetching batch of episodes. {@DebugInfo}", new
|
||||
//multipart episodes may have the same episode number
|
||||
if (child.EpisodeNumber == lastEpNum)
|
||||
dupeCount++;
|
||||
else
|
||||
lastEpNum = child.EpisodeNumber.Value;
|
||||
|
||||
sequence = (lastEpNum + dupeCount).ToString();
|
||||
}
|
||||
|
||||
// use parent's 'DateAdded'. DateAdded is just a convenience prop for: PurchaseDate.UtcDateTime
|
||||
child.PurchaseDate = parent.PurchaseDate;
|
||||
// parent is essentially a series
|
||||
child.Series = new[]
|
||||
{
|
||||
new Series
|
||||
{
|
||||
ParentId = parent.Asin,
|
||||
ParentTitle = parent.Title,
|
||||
BatchNumber = i,
|
||||
ChildIdBatch = idBatch
|
||||
});
|
||||
throw;
|
||||
}
|
||||
|
||||
Serilog.Log.Logger.Debug($"Batch {i}: {childrenBatch.Count} results");
|
||||
// the service returned no results. probably indicates an error. stop running batches
|
||||
if (!childrenBatch.Any())
|
||||
break;
|
||||
|
||||
results.AddRange(childrenBatch);
|
||||
Asin = parent.Asin,
|
||||
Sequence = sequence,
|
||||
Title = parent.TitleWithSubtitle
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Serilog.Log.Logger.Debug("Parent episodes/podcasts series. Children found. {@DebugInfo}", new
|
||||
{
|
||||
ParentId = parent.Asin,
|
||||
ParentTitle = parent.Title,
|
||||
ChildCount = childrenIds.Count
|
||||
});
|
||||
|
||||
if (childrenIds.Count != results.Count)
|
||||
{
|
||||
var ex = new ApplicationException($"Mis-match: Children defined by parent={childrenIds.Count}. Children returned by batches={results.Count}");
|
||||
Serilog.Log.Logger.Error(ex, "Quantity of series episodes defined by parent does not match quantity returned by batch fetching.");
|
||||
throw ex;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
#endregion
|
||||
|
||||
private static List<IValidator> getValidators()
|
||||
{
|
||||
var type = typeof(IValidator);
|
||||
var types = AppDomain.CurrentDomain.GetAssemblies()
|
||||
.SelectMany(s => s.GetTypes())
|
||||
.Where(p => type.IsAssignableFrom(p) && !p.IsInterface);
|
||||
|
||||
return types.Select(t => Activator.CreateInstance(t) as IValidator).ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,19 +5,58 @@ using Newtonsoft.Json;
|
||||
|
||||
namespace AudibleUtilities
|
||||
{
|
||||
public class AccountSettingsLoadErrorEventArgs : ErrorEventArgs
|
||||
{
|
||||
/// <summary>
|
||||
/// Create a new, empty <see cref="AccountsSettings"/> file if true, otherwise throw
|
||||
/// </summary>
|
||||
public bool Handled { get; set; }
|
||||
/// <summary>
|
||||
/// The file path of the AccountsSettings.json file
|
||||
/// </summary>
|
||||
public string SettingsFilePath { get; }
|
||||
|
||||
public AccountSettingsLoadErrorEventArgs(string path, Exception exception)
|
||||
: base(exception)
|
||||
{
|
||||
SettingsFilePath = path;
|
||||
}
|
||||
}
|
||||
|
||||
public static class AudibleApiStorage
|
||||
{
|
||||
public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles, "AccountsSettings.json");
|
||||
public static string AccountsSettingsFile => Path.Combine(Configuration.Instance.LibationFiles.Location, "AccountsSettings.json");
|
||||
|
||||
public static event EventHandler<AccountSettingsLoadErrorEventArgs> LoadError;
|
||||
|
||||
public static void EnsureAccountsSettingsFileExists()
|
||||
{
|
||||
// saves. BEWARE: this will overwrite an existing file
|
||||
if (!File.Exists(AccountsSettingsFile))
|
||||
_ = new AccountsSettingsPersister(new AccountsSettings(), AccountsSettingsFile);
|
||||
{
|
||||
//Save the JSON file manually so that AccountsSettingsPersister.Saving and AccountsSettingsPersister.Saved
|
||||
//are not fired. There's no need to fire those events on an empty AccountsSettings file.
|
||||
var accountSerializerSettings = AudibleApi.Authorization.Identity.GetJsonSerializerSettings();
|
||||
File.WriteAllText(AccountsSettingsFile, JsonConvert.SerializeObject(new AccountsSettings(), Formatting.Indented, accountSerializerSettings));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>If you use this, be a good citizen and DISPOSE of it</summary>
|
||||
public static AccountsSettingsPersister GetAccountsSettingsPersister() => new AccountsSettingsPersister(AccountsSettingsFile);
|
||||
public static AccountsSettingsPersister GetAccountsSettingsPersister()
|
||||
{
|
||||
try
|
||||
{
|
||||
return new AccountsSettingsPersister(AccountsSettingsFile);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var args = new AccountSettingsLoadErrorEventArgs(AccountsSettingsFile, ex);
|
||||
LoadError?.Invoke(null, args);
|
||||
if (args.Handled)
|
||||
return GetAccountsSettingsPersister();
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public static string GetIdentityTokensJsonPath(this Account account)
|
||||
=> GetIdentityTokensJsonPath(account.AccountId, account.Locale?.Name);
|
||||
|
||||
@@ -8,7 +8,18 @@ namespace AudibleUtilities
|
||||
public interface IValidator
|
||||
{
|
||||
IEnumerable<Exception> Validate(IEnumerable<Item> items);
|
||||
|
||||
public static IValidator[] GetAllValidators()
|
||||
=> new IValidator[]
|
||||
{
|
||||
new LibraryValidator(),
|
||||
new BookValidator(),
|
||||
new CategoryValidator(),
|
||||
new ContributorValidator(),
|
||||
new SeriesValidator(),
|
||||
};
|
||||
}
|
||||
|
||||
public class LibraryValidator : IValidator
|
||||
{
|
||||
public IEnumerable<Exception> Validate(IEnumerable<Item> items)
|
||||
@@ -17,10 +28,11 @@ namespace AudibleUtilities
|
||||
|
||||
if (items.Any(i => string.IsNullOrWhiteSpace(i.ProductId)))
|
||||
exceptions.Add(new ArgumentException($"Collection contains item(s) with null or blank {nameof(Item.ProductId)}", nameof(items)));
|
||||
if (items.Any(i => i.DateAdded < new DateTime(1980, 1, 1)))
|
||||
exceptions.Add(new ArgumentException($"Collection contains item(s) with invalid {nameof(Item.DateAdded)}", nameof(items)));
|
||||
//// unfortunately, an actual user has a title with a beginning-of-time 'purchase_date'
|
||||
//if (items.Any(i => i.DateAdded < new DateTime(1980, 1, 1)))
|
||||
// exceptions.Add(new ArgumentException($"Collection contains item(s) with invalid {nameof(Item.DateAdded)}", nameof(items)));
|
||||
|
||||
return exceptions;
|
||||
return exceptions;
|
||||
}
|
||||
}
|
||||
public class BookValidator : IValidator
|
||||
@@ -79,8 +91,10 @@ namespace AudibleUtilities
|
||||
var distinct = items.GetSeriesDistinct();
|
||||
if (distinct.Any(s => s.SeriesId is null))
|
||||
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesId)}", nameof(items)));
|
||||
if (distinct.Any(s => s.SeriesName is null))
|
||||
exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
|
||||
|
||||
//// unfortunately, an actual user has a series with no name
|
||||
//if (distinct.Any(s => s.SeriesName is null))
|
||||
// exceptions.Add(new ArgumentException($"Collection contains {nameof(Item.Series)} with null {nameof(Series.SeriesName)}", nameof(items)));
|
||||
|
||||
return exceptions;
|
||||
}
|
||||
|
||||
@@ -1,15 +1,29 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net6.0-windows</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AudibleApi" Version="2.8.3.1" />
|
||||
<PackageReference Include="AudibleApi" Version="10.1.2.1" />
|
||||
<PackageReference Include="Google.Protobuf" Version="3.33.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\LibationFileManager\LibationFileManager.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Update="Widevine\Cdm.*.cs">
|
||||
<DependentUpon>Cdm.cs</DependentUpon>
|
||||
</Compile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
15
Source/AudibleUtilities/ImportValidationException.cs
Normal file
15
Source/AudibleUtilities/ImportValidationException.cs
Normal file
@@ -0,0 +1,15 @@
|
||||
using AudibleApi.Common;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace AudibleUtilities
|
||||
{
|
||||
public class ImportValidationException : AggregateException
|
||||
{
|
||||
public List<Item> Items { get; }
|
||||
public ImportValidationException(List<Item> items, IEnumerable<Exception> exceptions) : base(exceptions)
|
||||
{
|
||||
Items = items;
|
||||
}
|
||||
}
|
||||
}
|
||||
205
Source/AudibleUtilities/Mkb79Auth.cs
Normal file
205
Source/AudibleUtilities/Mkb79Auth.cs
Normal file
@@ -0,0 +1,205 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using AudibleApi;
|
||||
using AudibleApi.Authorization;
|
||||
using AudibleApi.Cryptography;
|
||||
using Dinah.Core;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace AudibleUtilities
|
||||
{
|
||||
public partial class Mkb79Auth : IIdentityMaintainer
|
||||
{
|
||||
[JsonProperty("website_cookies")]
|
||||
private JObject _websiteCookies { get; set; }
|
||||
|
||||
[JsonProperty("adp_token")]
|
||||
public string AdpToken { get; private set; }
|
||||
|
||||
[JsonProperty("access_token")]
|
||||
public string AccessToken { get; private set; }
|
||||
|
||||
[JsonProperty("refresh_token")]
|
||||
public string RefreshToken { get; private set; }
|
||||
|
||||
[JsonProperty("device_private_key")]
|
||||
public string DevicePrivateKey { get; private set; }
|
||||
|
||||
[JsonProperty("store_authentication_cookie")]
|
||||
private JObject _storeAuthenticationCookie { get; set; }
|
||||
|
||||
[JsonProperty("device_info")]
|
||||
public DeviceInfo DeviceInfo { get; private set; }
|
||||
|
||||
[JsonProperty("customer_info")]
|
||||
public CustomerInfo CustomerInfo { get; private set; }
|
||||
|
||||
[JsonProperty("expires")]
|
||||
private double _expires { get; set; }
|
||||
|
||||
[JsonProperty("locale_code")]
|
||||
public string LocaleCode { get; private set; }
|
||||
|
||||
[JsonProperty("with_username")]
|
||||
public bool WithUsername { get; private set; }
|
||||
|
||||
[JsonProperty("activation_bytes")]
|
||||
public string ActivationBytes { get; private set; }
|
||||
|
||||
[JsonIgnore]
|
||||
public Dictionary<string, string> WebsiteCookies
|
||||
{
|
||||
get => _websiteCookies.ToObject<Dictionary<string, string>>();
|
||||
private set => _websiteCookies = JObject.Parse(JsonConvert.SerializeObject(value, Converter.Settings));
|
||||
}
|
||||
|
||||
[JsonIgnore]
|
||||
public string StoreAuthenticationCookie
|
||||
{
|
||||
get => _storeAuthenticationCookie.ToObject<Dictionary<string, string>>()["cookie"];
|
||||
private set => _storeAuthenticationCookie = JObject.Parse(JsonConvert.SerializeObject(new Dictionary<string, string>() { { "cookie", value } }, Converter.Settings));
|
||||
}
|
||||
|
||||
[JsonIgnore]
|
||||
public DateTime AccessTokenExpires
|
||||
{
|
||||
get => DateTimeOffset.FromUnixTimeMilliseconds((long)(_expires * 1000)).DateTime;
|
||||
private set => _expires = new DateTimeOffset(value).ToUnixTimeMilliseconds() / 1000d;
|
||||
}
|
||||
|
||||
[JsonIgnore] public ISystemDateTime SystemDateTime { get; } = new SystemDateTime();
|
||||
[JsonIgnore]
|
||||
public Locale Locale => Localization.Locales.Where(l => l.WithUsername == WithUsername).Single(l => l.CountryCode == LocaleCode);
|
||||
[JsonIgnore] public string DeviceSerialNumber => DeviceInfo.DeviceSerialNumber;
|
||||
[JsonIgnore] public string DeviceType => DeviceInfo.DeviceType;
|
||||
[JsonIgnore] public string AmazonAccountId => CustomerInfo.UserId;
|
||||
|
||||
public Task<AccessToken> GetAccessTokenAsync()
|
||||
=> Task.FromResult(new AccessToken(AccessToken, AccessTokenExpires));
|
||||
|
||||
public Task<AdpToken> GetAdpTokenAsync()
|
||||
=> Task.FromResult(new AdpToken(AdpToken));
|
||||
|
||||
public Task<PrivateKey> GetPrivateKeyAsync()
|
||||
=> Task.FromResult(new PrivateKey(DevicePrivateKey));
|
||||
}
|
||||
|
||||
public partial class CustomerInfo
|
||||
{
|
||||
[JsonProperty("account_pool")]
|
||||
public string AccountPool { get; set; }
|
||||
|
||||
[JsonProperty("user_id")]
|
||||
public string UserId { get; set; }
|
||||
|
||||
[JsonProperty("home_region")]
|
||||
public string HomeRegion { get; set; }
|
||||
|
||||
[JsonProperty("name")]
|
||||
public string Name { get; set; }
|
||||
|
||||
[JsonProperty("given_name")]
|
||||
public string GivenName { get; set; }
|
||||
}
|
||||
|
||||
public partial class DeviceInfo
|
||||
{
|
||||
[JsonProperty("device_name")]
|
||||
public string DeviceName { get; set; }
|
||||
|
||||
[JsonProperty("device_serial_number")]
|
||||
public string DeviceSerialNumber { get; set; }
|
||||
|
||||
[JsonProperty("device_type")]
|
||||
public string DeviceType { get; set; }
|
||||
}
|
||||
|
||||
public partial class Mkb79Auth
|
||||
{
|
||||
public static Mkb79Auth FromJson(string json)
|
||||
=> JsonConvert.DeserializeObject<Mkb79Auth>(json, Converter.Settings);
|
||||
|
||||
public string ToJson()
|
||||
=> JObject.Parse(JsonConvert.SerializeObject(this, Converter.Settings)).ToString(Formatting.Indented);
|
||||
|
||||
public async Task<Account> ToAccountAsync()
|
||||
{
|
||||
var refreshToken = new RefreshToken(RefreshToken);
|
||||
|
||||
var authorize = new Authorize(Locale);
|
||||
var newToken = await authorize.RefreshAccessTokenAsync(refreshToken);
|
||||
AccessToken = newToken.TokenValue;
|
||||
AccessTokenExpires = newToken.Expires;
|
||||
|
||||
var api = new Api(this);
|
||||
var email = await api.GetEmailAsync();
|
||||
var account = new Account(email)
|
||||
{
|
||||
DecryptKey = ActivationBytes,
|
||||
AccountName = $"{email} - {Locale.Name}",
|
||||
IdentityTokens = new Identity(Locale)
|
||||
};
|
||||
|
||||
account.IdentityTokens.Update(
|
||||
await GetPrivateKeyAsync(),
|
||||
await GetAdpTokenAsync(),
|
||||
await GetAccessTokenAsync(),
|
||||
refreshToken,
|
||||
WebsiteCookies.Select(c => new KeyValuePair<string, string>(c.Key, c.Value)),
|
||||
DeviceSerialNumber,
|
||||
DeviceType,
|
||||
AmazonAccountId,
|
||||
DeviceInfo.DeviceName,
|
||||
StoreAuthenticationCookie);
|
||||
|
||||
return account;
|
||||
}
|
||||
|
||||
public static Mkb79Auth FromAccount(Account account)
|
||||
=> new()
|
||||
{
|
||||
AccessToken = account.IdentityTokens.ExistingAccessToken.TokenValue,
|
||||
ActivationBytes = string.IsNullOrEmpty(account.DecryptKey) ? null : account.DecryptKey,
|
||||
AdpToken = account.IdentityTokens.AdpToken.Value,
|
||||
CustomerInfo = new CustomerInfo
|
||||
{
|
||||
AccountPool = "Amazon",
|
||||
GivenName = string.Empty,
|
||||
HomeRegion = "NA",
|
||||
Name = string.Empty,
|
||||
UserId = account.IdentityTokens.AmazonAccountId
|
||||
},
|
||||
DeviceInfo = new DeviceInfo
|
||||
{
|
||||
DeviceName = account.IdentityTokens.DeviceName,
|
||||
DeviceSerialNumber = account.IdentityTokens.DeviceSerialNumber,
|
||||
DeviceType = account.IdentityTokens.DeviceType,
|
||||
},
|
||||
DevicePrivateKey = account.IdentityTokens.PrivateKey,
|
||||
AccessTokenExpires = account.IdentityTokens.ExistingAccessToken.Expires,
|
||||
LocaleCode = account.Locale.CountryCode,
|
||||
WithUsername = account.Locale.WithUsername,
|
||||
RefreshToken = account.IdentityTokens.RefreshToken.Value,
|
||||
StoreAuthenticationCookie = account.IdentityTokens.StoreAuthenticationCookie,
|
||||
WebsiteCookies = new(account.IdentityTokens.Cookies),
|
||||
};
|
||||
}
|
||||
|
||||
public static class Serialize
|
||||
{
|
||||
public static string ToJson(this Mkb79Auth self)
|
||||
=> JObject.Parse(JsonConvert.SerializeObject(self, Converter.Settings)).ToString(Formatting.Indented);
|
||||
}
|
||||
|
||||
internal static class Converter
|
||||
{
|
||||
public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings
|
||||
{
|
||||
MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
|
||||
DateParseHandling = DateParseHandling.None,
|
||||
};
|
||||
}
|
||||
}
|
||||
189
Source/AudibleUtilities/Widevine/Cdm.Api.cs
Normal file
189
Source/AudibleUtilities/Widevine/Cdm.Api.cs
Normal file
@@ -0,0 +1,189 @@
|
||||
using AudibleApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System.Net.Http;
|
||||
using AudibleApi.Cryptography;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Dinah.Core.Net.Http;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities.Widevine;
|
||||
|
||||
public partial class Cdm
|
||||
{
|
||||
/// <summary>
|
||||
/// Get a <see cref="Cdm"/> from <see cref="AccountsSettings"/> or from the API.
|
||||
/// </summary>
|
||||
/// <returns>A <see cref="Cdm"/> if successful, otherwise <see cref="null"/></returns>
|
||||
public static async Task<Cdm?> GetCdmAsync()
|
||||
{
|
||||
using var persister = AudibleApiStorage.GetAccountsSettingsPersister();
|
||||
|
||||
//Check if there are any Android accounts. If not, we can't use Widevine.
|
||||
if (!persister.Target.Accounts.Any(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
|
||||
return null;
|
||||
|
||||
if (!string.IsNullOrEmpty(persister.Target.Cdm))
|
||||
{
|
||||
try
|
||||
{
|
||||
var cdm = Convert.FromBase64String(persister.Target.Cdm);
|
||||
return new Cdm(new Device(cdm));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error loading CDM from account settings.");
|
||||
persister.Target.Cdm = string.Empty;
|
||||
//Clear the stored Cdm and try getting a fresh one from the server.
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(persister.Target.Cdm))
|
||||
{
|
||||
using var client = new HttpClient();
|
||||
if (await GetCdmUris(client) is not Uri[] uris)
|
||||
return null;
|
||||
|
||||
//try to get a CDM file for any account that's registered as an android device.
|
||||
//CDMs are not account-specific, so it doesn't matter which account we're successful with.
|
||||
foreach (var account in persister.Target.Accounts.Where(a => a.IdentityTokens.DeviceType == Resources.DeviceType))
|
||||
{
|
||||
try
|
||||
{
|
||||
var requestMessage = CreateApiRequest(account);
|
||||
|
||||
await TestApiRequest(client, new JsonObject { { "body", requestMessage.ToString() } });
|
||||
|
||||
//Try all CDM URIs until a CDM has been retrieved successfully
|
||||
foreach (var uri in uris)
|
||||
{
|
||||
try
|
||||
{
|
||||
var resp = await client.PostAsync(uri, ((HttpBody)requestMessage).Content);
|
||||
|
||||
if (!resp.IsSuccessStatusCode)
|
||||
{
|
||||
var message = await resp.Content.ReadAsStringAsync();
|
||||
throw new ApiErrorException(uri, null, message);
|
||||
}
|
||||
|
||||
var cdmBts = await resp.Content.ReadAsByteArrayAsync();
|
||||
var device = new Device(cdmBts);
|
||||
persister.Target.Cdm = Convert.ToBase64String(cdmBts);
|
||||
return new Cdm(device);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error getting a CDM from URI: " + uri);
|
||||
//try the next URI
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error getting a CDM for account: " + account.MaskedLogEntry);
|
||||
//try the next Account
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get a list of CDM API URIs from the main Gitgub repository's .cdmurls.json file.
|
||||
/// </summary>
|
||||
/// <returns>If successful, an array of URIs to try. Otherwise null</returns>
|
||||
private static async Task<Uri[]?> GetCdmUris(HttpClient httpClient)
|
||||
{
|
||||
const string CdmUrlListFile = "https://raw.githubusercontent.com/rmcrackan/Libation/refs/heads/master/.cdmurls.json";
|
||||
|
||||
try
|
||||
{
|
||||
var fileContents = await httpClient.GetStringAsync(CdmUrlListFile);
|
||||
var releaseIndex = JObject.Parse(fileContents);
|
||||
var urlArray = releaseIndex["CdmUrls"] as JArray;
|
||||
if (urlArray is null)
|
||||
throw new System.IO.InvalidDataException("CDM url list not found in JSON: " + fileContents);
|
||||
|
||||
var uris = urlArray.Select(u => u.Value<string>()).OfType<string>().Select(u => new Uri(u)).ToArray();
|
||||
|
||||
if (uris.Length == 0)
|
||||
throw new System.IO.InvalidDataException("No CDM url found in JSON: " + fileContents);
|
||||
|
||||
return uris;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Serilog.Log.Logger.Error(ex, "Error getting CDM URLs");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static readonly string[] TLDs = ["com", "co.uk", "com.au", "com.br", "ca", "fr", "de", "in", "it", "co.jp", "es"];
|
||||
|
||||
//Ensure that the request can be made successfully before sending it to the API
|
||||
//The API uses System.Text.Json, so perform test with same.
|
||||
private static async Task TestApiRequest(HttpClient client, JsonObject input)
|
||||
{
|
||||
if (input["body"]?.GetValue<string>() is not string body
|
||||
|| JsonNode.Parse(body) is not JsonNode bodyJson)
|
||||
throw new Exception("Api request doesn't contain a body");
|
||||
|
||||
if (bodyJson?["Url"]?.GetValue<string>() is not string url
|
||||
|| !Uri.TryCreate(url, UriKind.Absolute, out var uri))
|
||||
throw new Exception("Api request doesn't contain a url");
|
||||
|
||||
if (!TLDs.Select(tld => "api.audible." + tld).Contains(uri.Host.ToLower()))
|
||||
throw new Exception($"Unknown Audible Api domain: {uri.Host}");
|
||||
|
||||
if (bodyJson?["Headers"] is not JsonObject headers)
|
||||
throw new Exception($"Api request doesn't contain any headers");
|
||||
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, uri);
|
||||
|
||||
Dictionary<string, string>? headersDict = null;
|
||||
try
|
||||
{
|
||||
headersDict = System.Text.Json.JsonSerializer.Deserialize<Dictionary<string, string>>(headers);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new Exception("Failed to read Audible Api headers.", ex);
|
||||
}
|
||||
|
||||
if (headersDict is null)
|
||||
throw new Exception("Failed to read Audible Api headers.");
|
||||
|
||||
foreach (var kvp in headersDict)
|
||||
request.Headers.Add(kvp.Key, kvp.Value);
|
||||
|
||||
using var resp = await client.SendAsync(request);
|
||||
resp.EnsureSuccessStatusCode();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a request body to send to the API
|
||||
/// </summary>
|
||||
/// <param name="account">An authenticated account</param>
|
||||
private static JObject CreateApiRequest(Account account)
|
||||
{
|
||||
const string ACCOUNT_INFO_PATH = "/1.0/account/information";
|
||||
|
||||
var message = new HttpRequestMessage(HttpMethod.Get, ACCOUNT_INFO_PATH);
|
||||
message.SignRequest(
|
||||
DateTime.UtcNow,
|
||||
account.IdentityTokens.AdpToken,
|
||||
account.IdentityTokens.PrivateKey);
|
||||
|
||||
return new JObject
|
||||
{
|
||||
{ "Url", new Uri(account.Locale.AudibleApiUri(), ACCOUNT_INFO_PATH) },
|
||||
{ "Headers", JObject.FromObject(message.Headers.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Single())) }
|
||||
};
|
||||
}
|
||||
}
|
||||
300
Source/AudibleUtilities/Widevine/Cdm.cs
Normal file
300
Source/AudibleUtilities/Widevine/Cdm.cs
Normal file
@@ -0,0 +1,300 @@
|
||||
using Google.Protobuf;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities.Widevine;
|
||||
|
||||
public enum KeyType
|
||||
{
|
||||
/// <summary>
|
||||
/// Exactly one key of this type must appear.
|
||||
/// </summary>
|
||||
Signing = 1,
|
||||
/// <summary>
|
||||
/// Content key.
|
||||
/// </summary>
|
||||
Content = 2,
|
||||
/// <summary>
|
||||
/// Key control block for license renewals. No key.
|
||||
/// </summary>
|
||||
KeyControl = 3,
|
||||
/// <summary>
|
||||
/// wrapped keys for auxiliary crypto operations.
|
||||
/// </summary>
|
||||
OperatorSession = 4,
|
||||
/// <summary>
|
||||
/// Entitlement keys.
|
||||
/// </summary>
|
||||
Entitlement = 5,
|
||||
/// <summary>
|
||||
/// Partner-specific content key.
|
||||
/// </summary>
|
||||
OemContent = 6,
|
||||
}
|
||||
|
||||
public interface ISession : IDisposable
|
||||
{
|
||||
string? GetLicenseChallenge(MpegDash dash);
|
||||
WidevineKey[] ParseLicense(string licenseMessage);
|
||||
}
|
||||
|
||||
public class WidevineKey
|
||||
{
|
||||
public Guid Kid { get; }
|
||||
public KeyType Type { get; }
|
||||
public byte[] Key { get; }
|
||||
internal WidevineKey(Guid kid, License.Types.KeyContainer.Types.KeyType type, byte[] key)
|
||||
{
|
||||
Kid = kid;
|
||||
Type = (KeyType)type;
|
||||
Key = key;
|
||||
}
|
||||
public override string ToString() => $"{Convert.ToHexString(Kid.ToByteArray(bigEndian: true)).ToLower()}:{Convert.ToHexString(Key).ToLower()}";
|
||||
}
|
||||
|
||||
public partial class Cdm
|
||||
{
|
||||
public static Guid WidevineContentProtection { get; } = new("edef8ba9-79d6-4ace-a3c8-27dcd51d21ed");
|
||||
private const int MAX_NUM_OF_SESSIONS = 16;
|
||||
internal Device Device { get; }
|
||||
|
||||
private ConcurrentDictionary<Guid, Session> Sessions { get; } = new(-1, MAX_NUM_OF_SESSIONS);
|
||||
|
||||
internal Cdm(Device device)
|
||||
{
|
||||
Device = device;
|
||||
}
|
||||
|
||||
public ISession OpenSession()
|
||||
{
|
||||
if (Sessions.Count == MAX_NUM_OF_SESSIONS)
|
||||
throw new Exception("Too Many Sessions");
|
||||
|
||||
var session = new Session(Sessions.Count + 1, this);
|
||||
|
||||
var ddd = Sessions.TryAdd(session.Id, session);
|
||||
return session;
|
||||
}
|
||||
|
||||
#region Session
|
||||
|
||||
internal class Session : ISession
|
||||
{
|
||||
public Guid Id { get; } = Guid.NewGuid();
|
||||
private int SessionNumber { get; }
|
||||
private Cdm Cdm { get; }
|
||||
private byte[]? EncryptionContext { get; set; }
|
||||
private byte[]? AuthenticationContext { get; set; }
|
||||
|
||||
public Session(int number, Cdm cdm)
|
||||
{
|
||||
SessionNumber = number;
|
||||
Cdm = cdm;
|
||||
}
|
||||
|
||||
private string GetRequestId()
|
||||
=> $"{RandomUint():x8}00000000{Convert.ToHexString(BitConverter.GetBytes((long)SessionNumber)).ToLowerInvariant()}";
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Cdm.Sessions.ContainsKey(Id))
|
||||
Cdm.Sessions.TryRemove(Id, out var session);
|
||||
}
|
||||
|
||||
public string? GetLicenseChallenge(MpegDash dash)
|
||||
{
|
||||
if (!dash.TryGetPssh(Cdm.WidevineContentProtection, out var pssh))
|
||||
return null;
|
||||
|
||||
var licRequest = new LicenseRequest
|
||||
{
|
||||
ClientId = Cdm.Device.ClientId,
|
||||
ContentId = new()
|
||||
{
|
||||
WidevinePsshData = new()
|
||||
{
|
||||
LicenseType = LicenseType.Offline,
|
||||
RequestId = ByteString.CopyFrom(GetRequestId(), Encoding.ASCII)
|
||||
}
|
||||
},
|
||||
Type = LicenseRequest.Types.RequestType.New,
|
||||
RequestTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
|
||||
ProtocolVersion = ProtocolVersion.Version21,
|
||||
KeyControlNonce = RandomUint()
|
||||
};
|
||||
|
||||
licRequest.ContentId.WidevinePsshData.PsshData.Add(ByteString.CopyFrom(pssh.InitData));
|
||||
|
||||
var licRequestBts = licRequest.ToByteArray();
|
||||
EncryptionContext = CreateContext("ENCRYPTION", 128, licRequestBts);
|
||||
AuthenticationContext = CreateContext("AUTHENTICATION", 512, licRequestBts);
|
||||
|
||||
var signedMessage = new SignedMessage
|
||||
{
|
||||
Type = SignedMessage.Types.MessageType.LicenseRequest,
|
||||
Msg = ByteString.CopyFrom(licRequestBts),
|
||||
Signature = ByteString.CopyFrom(Cdm.Device.SignMessage(licRequestBts))
|
||||
};
|
||||
|
||||
return Convert.ToBase64String(signedMessage.ToByteArray());
|
||||
}
|
||||
|
||||
public WidevineKey[] ParseLicense(string licenseMessage)
|
||||
{
|
||||
if (EncryptionContext is null || AuthenticationContext is null)
|
||||
throw new InvalidOperationException($"{nameof(GetLicenseChallenge)}() must be called before calling {nameof(ParseLicense)}()");
|
||||
|
||||
var signedMessage = SignedMessage.Parser.ParseFrom(Convert.FromBase64String(licenseMessage));
|
||||
if (signedMessage.Type != SignedMessage.Types.MessageType.License)
|
||||
throw new InvalidDataException("Invalid license");
|
||||
|
||||
var sessionKey = Cdm.Device.DecryptSessionKey(signedMessage.SessionKey.ToByteArray());
|
||||
|
||||
if (!VerifySignature(signedMessage, AuthenticationContext, sessionKey))
|
||||
throw new InvalidDataException("Message signature is invalid");
|
||||
|
||||
var license = License.Parser.ParseFrom(signedMessage.Msg);
|
||||
var keyToTheKeys = DeriveKey(sessionKey, EncryptionContext, 1);
|
||||
|
||||
return DecryptKeys(keyToTheKeys, license.Key);
|
||||
}
|
||||
|
||||
private static WidevineKey[] DecryptKeys(byte[] keyToTheKeys, IList<License.Types.KeyContainer> licenseKeys)
|
||||
{
|
||||
using var aes = Aes.Create();
|
||||
aes.Key = keyToTheKeys;
|
||||
var keys = new WidevineKey[licenseKeys.Count];
|
||||
|
||||
for (int i = 0; i < licenseKeys.Count; i++)
|
||||
{
|
||||
var keyContainer = licenseKeys[i];
|
||||
|
||||
var keyBytes = aes.DecryptCbc(keyContainer.Key.ToByteArray(), keyContainer.Iv.ToByteArray(), PaddingMode.PKCS7);
|
||||
var id = keyContainer.Id.ToByteArray();
|
||||
|
||||
if (id.Length > 16)
|
||||
{
|
||||
var tryB64 = new byte[id.Length * 3 / 4];
|
||||
if (Convert.TryFromBase64String(Encoding.ASCII.GetString(id), tryB64, out int bytesWritten))
|
||||
{
|
||||
id = tryB64;
|
||||
}
|
||||
Array.Resize(ref id, 16);
|
||||
}
|
||||
else if (id.Length < 16)
|
||||
{
|
||||
id = id.Append(new byte[16 - id.Length]);
|
||||
}
|
||||
|
||||
keys[i] = new WidevineKey(new Guid(id,bigEndian: true), keyContainer.Type, keyBytes);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
private static bool VerifySignature(SignedMessage signedMessage, byte[] authContext, byte[] sessionKey)
|
||||
{
|
||||
var mac_key_server = DeriveKey(sessionKey, authContext, 1).Append(DeriveKey(sessionKey, authContext, 2));
|
||||
|
||||
var hmacData = (signedMessage.OemcryptoCoreMessage?.ToByteArray() ?? []).Append(signedMessage.Msg?.ToByteArray() ?? []);
|
||||
|
||||
var computed_signature = HMACSHA256.HashData(mac_key_server, hmacData);
|
||||
|
||||
return computed_signature.SequenceEqual(signedMessage.Signature);
|
||||
}
|
||||
|
||||
private static byte[] DeriveKey(byte[] session_key, byte[] context, int counter)
|
||||
{
|
||||
var data = new byte[context.Length + 1];
|
||||
Array.Copy(context, 0, data, 1, context.Length);
|
||||
data[0] = (byte)counter;
|
||||
|
||||
return AESCMAC(session_key, data);
|
||||
}
|
||||
|
||||
private static byte[] AESCMAC(byte[] key, byte[] data)
|
||||
{
|
||||
using var aes = Aes.Create();
|
||||
aes.Key = key;
|
||||
|
||||
// SubKey generation
|
||||
// step 1, AES-128 with key K is applied to an all-zero input block.
|
||||
byte[] subKey = aes.EncryptCbc(new byte[16], new byte[16], PaddingMode.None);
|
||||
|
||||
nextSubKey();
|
||||
|
||||
// MAC computing
|
||||
if ((data.Length == 0) || (data.Length % 16 != 0))
|
||||
{
|
||||
// If the size of the input message block is not equal to a positive
|
||||
// multiple of the block size (namely, 128 bits), the last block shall
|
||||
// be padded with 10^i
|
||||
nextSubKey();
|
||||
var padLen = 16 - data.Length % 16;
|
||||
Array.Resize(ref data, data.Length + padLen);
|
||||
data[^padLen] = 0x80;
|
||||
}
|
||||
|
||||
// the last block shall be exclusive-OR'ed with K1 before processing
|
||||
for (int j = 0; j < subKey.Length; j++)
|
||||
data[data.Length - 16 + j] ^= subKey[j];
|
||||
|
||||
// The result of the previous process will be the input of the last encryption.
|
||||
byte[] encResult = aes.EncryptCbc(data, new byte[16], PaddingMode.None);
|
||||
|
||||
byte[] HashValue = new byte[16];
|
||||
Array.Copy(encResult, encResult.Length - HashValue.Length, HashValue, 0, HashValue.Length);
|
||||
|
||||
return HashValue;
|
||||
|
||||
void nextSubKey()
|
||||
{
|
||||
const byte const_Rb = 0x87;
|
||||
if (Rol(subKey) != 0)
|
||||
subKey[15] ^= const_Rb;
|
||||
|
||||
static int Rol(byte[] b)
|
||||
{
|
||||
int carry = 0;
|
||||
|
||||
for (int i = b.Length - 1; i >= 0; i--)
|
||||
{
|
||||
ushort u = (ushort)(b[i] << 1);
|
||||
b[i] = (byte)((u & 0xff) + carry);
|
||||
carry = (u & 0xff00) >> 8;
|
||||
}
|
||||
return carry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] CreateContext(string label, int keySize, byte[] licRequestBts)
|
||||
{
|
||||
var contextSize = label.Length + 1 + licRequestBts.Length + sizeof(int);
|
||||
|
||||
var context = new byte[contextSize];
|
||||
var numChars = Encoding.ASCII.GetBytes(label.AsSpan(), context);
|
||||
Array.Copy(licRequestBts, 0, context, numChars + 1, licRequestBts.Length);
|
||||
|
||||
var numBts = BitConverter.GetBytes(keySize);
|
||||
if (BitConverter.IsLittleEndian)
|
||||
Array.Reverse(numBts);
|
||||
Array.Copy(numBts, 0, context, context.Length - sizeof(int), sizeof(int));
|
||||
return context;
|
||||
}
|
||||
|
||||
private static uint RandomUint()
|
||||
{
|
||||
var bts = new byte[4];
|
||||
new Random().NextBytes(bts);
|
||||
return BitConverter.ToUInt32(bts, 0);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
155
Source/AudibleUtilities/Widevine/Device.cs
Normal file
155
Source/AudibleUtilities/Widevine/Device.cs
Normal file
@@ -0,0 +1,155 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Numerics;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities.Widevine;
|
||||
|
||||
internal enum DeviceTypes : byte
|
||||
{
|
||||
Unknown = 0,
|
||||
Chrome = 1,
|
||||
Android = 2
|
||||
}
|
||||
|
||||
internal class Device
|
||||
{
|
||||
public DeviceTypes Type { get; }
|
||||
public int FileVersion { get; }
|
||||
public int SecurityLevel { get; }
|
||||
public int Flags { get; }
|
||||
|
||||
public RSA CdmKey { get; }
|
||||
internal ClientIdentification ClientId { get; }
|
||||
|
||||
public Device(Span<byte> fileData)
|
||||
{
|
||||
if (fileData.Length < 7 || fileData[0] != 'W' || fileData[1] != 'V' || fileData[2] != 'D')
|
||||
throw new InvalidDataException();
|
||||
|
||||
FileVersion = fileData[3];
|
||||
Type = (DeviceTypes)fileData[4];
|
||||
SecurityLevel = fileData[5];
|
||||
Flags = fileData[6];
|
||||
|
||||
if (FileVersion != 2)
|
||||
throw new InvalidDataException($"Unknown CDM File Version: '{FileVersion}'");
|
||||
if (Type != DeviceTypes.Android)
|
||||
throw new InvalidDataException($"Unknown CDM Type: '{Type}'");
|
||||
if (SecurityLevel != 3)
|
||||
throw new InvalidDataException($"Unknown CDM Security Level: '{SecurityLevel}'");
|
||||
|
||||
var privateKeyLength = (fileData[7] << 8) | fileData[8];
|
||||
|
||||
if (privateKeyLength <= 0 || fileData.Length < 9 + privateKeyLength + 2)
|
||||
throw new InvalidDataException($"Invalid private key length: '{privateKeyLength}'");
|
||||
|
||||
var clientIdLength = (fileData[9 + privateKeyLength] << 8) | fileData[10 + privateKeyLength];
|
||||
|
||||
if (clientIdLength <= 0 || fileData.Length < 11 + privateKeyLength + clientIdLength)
|
||||
throw new InvalidDataException($"Invalid client id length: '{clientIdLength}'");
|
||||
|
||||
ClientId = ClientIdentification.Parser.ParseFrom(fileData.Slice(11 + privateKeyLength));
|
||||
CdmKey = RSA.Create();
|
||||
CdmKey.ImportRSAPrivateKey(fileData.Slice(9, privateKeyLength), out _);
|
||||
}
|
||||
|
||||
public byte[] SignMessage(byte[] message)
|
||||
{
|
||||
var digestion = SHA1.HashData(message);
|
||||
return PssSha1Signer.SignHash(CdmKey, digestion);
|
||||
}
|
||||
|
||||
public bool VerifyMessage(byte[] message, byte[] signature)
|
||||
{
|
||||
var digestion = SHA1.HashData(message);
|
||||
return CdmKey.VerifyHash(digestion, signature, HashAlgorithmName.SHA1, RSASignaturePadding.Pss);
|
||||
}
|
||||
|
||||
public byte[] DecryptSessionKey(byte[] sessionKey)
|
||||
=> CdmKey.Decrypt(sessionKey, RSAEncryptionPadding.OaepSHA1);
|
||||
|
||||
/// <summary>
|
||||
/// Completely managed implementation of RSASSA-PSS using SHA-1.
|
||||
/// https://github.com/bcgit/bc-csharp/blob/master/crypto/src/crypto/signers/PssSigner.cs
|
||||
///
|
||||
/// Absolutely nobody anywhere should use this RSASSA-PSS implementation in anything where they care about security at all. We completely skipped the random salt part of it because libation doesn't need security; it only needs to satisfy Audible server's challenge-response requirements.
|
||||
/// </summary>
|
||||
private static class PssSha1Signer
|
||||
{
|
||||
private const int Sha1DigestSize = 20;
|
||||
private const int Trailer = 0xBC;
|
||||
|
||||
public static byte[] SignHash(RSA rsa, ReadOnlySpan<byte> hash)
|
||||
{
|
||||
ArgumentOutOfRangeException.ThrowIfNotEqual(hash.Length, Sha1DigestSize);
|
||||
|
||||
var parameters = rsa.ExportParameters(true);
|
||||
var Modulus = new BigInteger(parameters.Modulus, isUnsigned: true, isBigEndian: true);
|
||||
var Exponent = new BigInteger(parameters.D, isUnsigned: true, isBigEndian: true);
|
||||
var emBits = rsa.KeySize - 1;
|
||||
var block = new byte[(emBits + 7) / 8];
|
||||
var firstByteMask = (byte)(0xFFU >> ((block.Length * 8) - emBits));
|
||||
|
||||
Span<byte> mDash = new byte[8 + 2 * Sha1DigestSize];
|
||||
|
||||
hash.CopyTo(mDash.Slice(8));
|
||||
var h = SHA1.HashData(mDash);
|
||||
|
||||
block[^(2 * (Sha1DigestSize + 1))] = 1;
|
||||
byte[] dbMask = MaskGeneratorFunction1(h, 0, h.Length, block.Length - Sha1DigestSize - 1);
|
||||
for (int i = 0; i != dbMask.Length; i++)
|
||||
block[i] ^= dbMask[i];
|
||||
|
||||
h.CopyTo(block, block.Length - Sha1DigestSize - 1);
|
||||
|
||||
block[0] &= firstByteMask;
|
||||
block[^1] = Trailer;
|
||||
|
||||
var input = new BigInteger(block, isUnsigned: true, isBigEndian: true);
|
||||
var result = BigInteger.ModPow(input, Exponent, Modulus);
|
||||
return result.ToByteArray(isUnsigned: true, isBigEndian: true);
|
||||
}
|
||||
|
||||
private static byte[] MaskGeneratorFunction1(byte[] Z, int zOff, int zLen, int length)
|
||||
{
|
||||
byte[] mask = new byte[length];
|
||||
byte[] hashBuf = new byte[Sha1DigestSize];
|
||||
byte[] C = new byte[4];
|
||||
int counter = 0;
|
||||
|
||||
using var sha = SHA1.Create();
|
||||
|
||||
for (; counter < (length / Sha1DigestSize); counter++)
|
||||
{
|
||||
ItoOSP(counter, C);
|
||||
|
||||
sha.TransformBlock(Z, zOff, zLen, null, 0);
|
||||
sha.TransformFinalBlock(C, 0, C.Length);
|
||||
|
||||
sha.Hash!.CopyTo(mask, counter * Sha1DigestSize);
|
||||
}
|
||||
|
||||
if ((counter * Sha1DigestSize) < length)
|
||||
{
|
||||
ItoOSP(counter, C);
|
||||
|
||||
sha.TransformBlock(Z, zOff, zLen, null, 0);
|
||||
sha.TransformFinalBlock(C, 0, C.Length);
|
||||
|
||||
Array.Copy(sha.Hash!, 0, mask, counter * Sha1DigestSize, mask.Length - (counter * Sha1DigestSize));
|
||||
}
|
||||
|
||||
return mask;
|
||||
}
|
||||
|
||||
private static void ItoOSP(int i, byte[] sp)
|
||||
{
|
||||
sp[0] = (byte)((uint)i >> 24);
|
||||
sp[1] = (byte)((uint)i >> 16);
|
||||
sp[2] = (byte)((uint)i >> 8);
|
||||
sp[3] = (byte)((uint)i >> 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
15
Source/AudibleUtilities/Widevine/Extensions.cs
Normal file
15
Source/AudibleUtilities/Widevine/Extensions.cs
Normal file
@@ -0,0 +1,15 @@
|
||||
using System;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities.Widevine;
|
||||
|
||||
internal static class Extensions
|
||||
{
|
||||
public static T[] Append<T>(this T[] message, T[] appendData)
|
||||
{
|
||||
var origLength = message.Length;
|
||||
Array.Resize(ref message, origLength + appendData.Length);
|
||||
Array.Copy(appendData, 0, message, origLength, appendData.Length);
|
||||
return message;
|
||||
}
|
||||
}
|
||||
14552
Source/AudibleUtilities/Widevine/LicenseProtocol.cs
Normal file
14552
Source/AudibleUtilities/Widevine/LicenseProtocol.cs
Normal file
File diff suppressed because it is too large
Load Diff
70
Source/AudibleUtilities/Widevine/MpegDash.cs
Normal file
70
Source/AudibleUtilities/Widevine/MpegDash.cs
Normal file
@@ -0,0 +1,70 @@
|
||||
using Mpeg4Lib.Boxes;
|
||||
using System;
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using System.IO;
|
||||
using System.Xml;
|
||||
using System.Xml.Linq;
|
||||
using System.Xml.XPath;
|
||||
|
||||
#nullable enable
|
||||
namespace AudibleUtilities.Widevine;
|
||||
|
||||
public class MpegDash
|
||||
{
|
||||
private const string MpegDashNamespace = "urn:mpeg:dash:schema:mpd:2011";
|
||||
private const string CencNamespace = "urn:mpeg:cenc:2013";
|
||||
private const string UuidPreamble = "urn:uuid:";
|
||||
private XElement DashMpd { get; }
|
||||
private static XmlNamespaceManager NamespaceManager { get; } = new(new NameTable());
|
||||
static MpegDash()
|
||||
{
|
||||
NamespaceManager.AddNamespace("dash", MpegDashNamespace);
|
||||
NamespaceManager.AddNamespace("cenc", CencNamespace);
|
||||
}
|
||||
|
||||
public MpegDash(Stream contents)
|
||||
{
|
||||
DashMpd = XElement.Load(contents);
|
||||
}
|
||||
|
||||
public bool TryGetUri(Uri baseUri, [NotNullWhen(true)] out Uri? fileUri)
|
||||
{
|
||||
foreach (var baseUrl in DashMpd.XPathSelectElements("/dash:Period/dash:AdaptationSet/dash:Representation/dash:BaseURL", NamespaceManager))
|
||||
{
|
||||
try
|
||||
{
|
||||
fileUri = new Uri(baseUri, baseUrl.Value);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
fileUri = null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
fileUri = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
public bool TryGetPssh(Guid protectionSystemId, [NotNullWhen(true)] out PsshBox? pssh)
|
||||
{
|
||||
foreach (var psshEle in DashMpd.XPathSelectElements("/dash:Period/dash:AdaptationSet/dash:ContentProtection/cenc:pssh", NamespaceManager))
|
||||
{
|
||||
if (psshEle?.Value?.Trim() is string psshStr
|
||||
&& psshEle.Parent?.Attribute(XName.Get("schemeIdUri")) is XAttribute scheme
|
||||
&& scheme.Value is string uuid
|
||||
&& uuid.Equals(UuidPreamble + protectionSystemId.ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Span<byte> buffer = new byte[psshStr.Length * 3 / 4];
|
||||
if (Convert.TryFromBase64String(psshStr, buffer, out var written))
|
||||
{
|
||||
using var ms = new MemoryStream(buffer.Slice(0, written).ToArray());
|
||||
pssh = BoxFactory.CreateBox(ms, null) as PsshBox;
|
||||
return pssh is not null;
|
||||
}
|
||||
}
|
||||
}
|
||||
pssh = null;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
38
Source/DataLayer.Postgres/DataLayer.Postgres.csproj
Normal file
38
Source/DataLayer.Postgres/DataLayer.Postgres.csproj
Normal file
@@ -0,0 +1,38 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
|
||||
<OutputType>Library</OutputType>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.0">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="10.0.0">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DataLayer\DataLayer.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Folder Include="Migrations\" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
494
Source/DataLayer.Postgres/Migrations/20251027224441_InitialPostgres.Designer.cs
generated
Normal file
494
Source/DataLayer.Postgres/Migrations/20251027224441_InitialPostgres.Designer.cs
generated
Normal file
@@ -0,0 +1,494 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using DataLayer;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DataLayer.Postgres.Migrations
|
||||
{
|
||||
[DbContext(typeof(LibationContext))]
|
||||
[Migration("20251027224441_InitialPostgres")]
|
||||
partial class InitialPostgres
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void BuildTargetModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.8")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("CategoryCategoryLadder", b =>
|
||||
{
|
||||
b.Property<int>("_categoriesCategoryId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("_categoryLaddersCategoryLadderId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("_categoriesCategoryId", "_categoryLaddersCategoryLadderId");
|
||||
|
||||
b.HasIndex("_categoryLaddersCategoryLadderId");
|
||||
|
||||
b.ToTable("CategoryCategoryLadder");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("BookId"));
|
||||
|
||||
b.Property<string>("AudibleProductId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<int>("ContentType")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<DateTime?>("DatePublished")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<bool>("IsAbridged")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<bool>("IsSpatial")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<string>("Language")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<int>("LengthInMinutes")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<string>("Locale")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("PictureId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("PictureLarge")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Subtitle")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Title")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("BookId");
|
||||
|
||||
b.HasIndex("AudibleProductId");
|
||||
|
||||
b.ToTable("Books");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookCategory", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("CategoryLadderId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("BookId", "CategoryLadderId");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("CategoryLadderId");
|
||||
|
||||
b.ToTable("BookCategory");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookContributor", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("ContributorId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<byte>("Order")
|
||||
.HasColumnType("smallint");
|
||||
|
||||
b.HasKey("BookId", "ContributorId", "Role");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("ContributorId");
|
||||
|
||||
b.ToTable("BookContributor");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Category", b =>
|
||||
{
|
||||
b.Property<int>("CategoryId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("CategoryId"));
|
||||
|
||||
b.Property<string>("AudibleCategoryId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("CategoryId");
|
||||
|
||||
b.HasIndex("AudibleCategoryId");
|
||||
|
||||
b.ToTable("Categories");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.CategoryLadder", b =>
|
||||
{
|
||||
b.Property<int>("CategoryLadderId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("CategoryLadderId"));
|
||||
|
||||
b.HasKey("CategoryLadderId");
|
||||
|
||||
b.ToTable("CategoryLadders");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Contributor", b =>
|
||||
{
|
||||
b.Property<int>("ContributorId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("ContributorId"));
|
||||
|
||||
b.Property<string>("AudibleContributorId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("ContributorId");
|
||||
|
||||
b.HasIndex("Name");
|
||||
|
||||
b.ToTable("Contributors");
|
||||
|
||||
b.HasData(
|
||||
new
|
||||
{
|
||||
ContributorId = -1,
|
||||
Name = ""
|
||||
});
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.LibraryBook", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<bool>("AbsentFromLastScan")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<string>("Account")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<DateTime>("DateAdded")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<DateTime?>("IncludedUntil")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<bool>("IsDeleted")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.HasKey("BookId");
|
||||
|
||||
b.ToTable("LibraryBooks");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Series", b =>
|
||||
{
|
||||
b.Property<int>("SeriesId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("SeriesId"));
|
||||
|
||||
b.Property<string>("AudibleSeriesId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("SeriesId");
|
||||
|
||||
b.HasIndex("AudibleSeriesId");
|
||||
|
||||
b.ToTable("Series");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.SeriesBook", b =>
|
||||
{
|
||||
b.Property<int>("SeriesId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<string>("Order")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("SeriesId", "BookId");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("SeriesId");
|
||||
|
||||
b.ToTable("SeriesBook");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("CategoryCategoryLadder", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Category", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("_categoriesCategoryId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.CategoryLadder", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("_categoryLaddersCategoryLadderId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
|
||||
{
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<float>("OverallRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.Property<float>("PerformanceRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.Property<float>("StoryRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.HasKey("BookId");
|
||||
|
||||
b1.ToTable("Books");
|
||||
|
||||
b1.WithOwner()
|
||||
.HasForeignKey("BookId");
|
||||
});
|
||||
|
||||
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
|
||||
{
|
||||
b1.Property<int>("SupplementId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b1.Property<int>("SupplementId"));
|
||||
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<string>("Url")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.HasKey("SupplementId");
|
||||
|
||||
b1.HasIndex("BookId");
|
||||
|
||||
b1.ToTable("Supplement");
|
||||
|
||||
b1.WithOwner("Book")
|
||||
.HasForeignKey("BookId");
|
||||
|
||||
b1.Navigation("Book");
|
||||
});
|
||||
|
||||
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
|
||||
{
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<int>("BookStatus")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<bool>("IsFinished")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b1.Property<DateTime?>("LastDownloaded")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b1.Property<string>("LastDownloadedFileVersion")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.Property<long?>("LastDownloadedFormat")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b1.Property<string>("LastDownloadedVersion")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.Property<int?>("PdfStatus")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<string>("Tags")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.HasKey("BookId");
|
||||
|
||||
b1.ToTable("UserDefinedItem", (string)null);
|
||||
|
||||
b1.WithOwner("Book")
|
||||
.HasForeignKey("BookId");
|
||||
|
||||
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
|
||||
{
|
||||
b2.Property<int>("UserDefinedItemBookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b2.Property<float>("OverallRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.Property<float>("PerformanceRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.Property<float>("StoryRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.HasKey("UserDefinedItemBookId");
|
||||
|
||||
b2.ToTable("UserDefinedItem");
|
||||
|
||||
b2.WithOwner()
|
||||
.HasForeignKey("UserDefinedItemBookId");
|
||||
});
|
||||
|
||||
b1.Navigation("Book");
|
||||
|
||||
b1.Navigation("Rating");
|
||||
});
|
||||
|
||||
b.Navigation("Rating");
|
||||
|
||||
b.Navigation("Supplements");
|
||||
|
||||
b.Navigation("UserDefinedItem");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookCategory", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("CategoriesLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.CategoryLadder", "CategoryLadder")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("CategoryLadderId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("CategoryLadder");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookContributor", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("ContributorsLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.Contributor", "Contributor")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("ContributorId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("Contributor");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.LibraryBook", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithOne()
|
||||
.HasForeignKey("DataLayer.LibraryBook", "BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.SeriesBook", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("SeriesLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.Series", "Series")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("SeriesId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("Series");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.Navigation("CategoriesLink");
|
||||
|
||||
b.Navigation("ContributorsLink");
|
||||
|
||||
b.Navigation("SeriesLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.CategoryLadder", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Contributor", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Series", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,372 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DataLayer.Postgres.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class InitialPostgres : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Books",
|
||||
columns: table => new
|
||||
{
|
||||
BookId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||
AudibleProductId = table.Column<string>(type: "text", nullable: true),
|
||||
Title = table.Column<string>(type: "text", nullable: true),
|
||||
Subtitle = table.Column<string>(type: "text", nullable: true),
|
||||
Description = table.Column<string>(type: "text", nullable: true),
|
||||
LengthInMinutes = table.Column<int>(type: "integer", nullable: false),
|
||||
ContentType = table.Column<int>(type: "integer", nullable: false),
|
||||
Locale = table.Column<string>(type: "text", nullable: true),
|
||||
PictureId = table.Column<string>(type: "text", nullable: true),
|
||||
PictureLarge = table.Column<string>(type: "text", nullable: true),
|
||||
IsAbridged = table.Column<bool>(type: "boolean", nullable: false),
|
||||
IsSpatial = table.Column<bool>(type: "boolean", nullable: false),
|
||||
DatePublished = table.Column<DateTime>(type: "timestamp without time zone", nullable: true),
|
||||
Language = table.Column<string>(type: "text", nullable: true),
|
||||
Rating_OverallRating = table.Column<float>(type: "real", nullable: true),
|
||||
Rating_PerformanceRating = table.Column<float>(type: "real", nullable: true),
|
||||
Rating_StoryRating = table.Column<float>(type: "real", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Books", x => x.BookId);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Categories",
|
||||
columns: table => new
|
||||
{
|
||||
CategoryId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||
AudibleCategoryId = table.Column<string>(type: "text", nullable: true),
|
||||
Name = table.Column<string>(type: "text", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Categories", x => x.CategoryId);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "CategoryLadders",
|
||||
columns: table => new
|
||||
{
|
||||
CategoryLadderId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_CategoryLadders", x => x.CategoryLadderId);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Contributors",
|
||||
columns: table => new
|
||||
{
|
||||
ContributorId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||
Name = table.Column<string>(type: "text", nullable: true),
|
||||
AudibleContributorId = table.Column<string>(type: "text", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Contributors", x => x.ContributorId);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Series",
|
||||
columns: table => new
|
||||
{
|
||||
SeriesId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||
AudibleSeriesId = table.Column<string>(type: "text", nullable: true),
|
||||
Name = table.Column<string>(type: "text", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Series", x => x.SeriesId);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "LibraryBooks",
|
||||
columns: table => new
|
||||
{
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
DateAdded = table.Column<DateTime>(type: "timestamp without time zone", nullable: false),
|
||||
Account = table.Column<string>(type: "text", nullable: true),
|
||||
IsDeleted = table.Column<bool>(type: "boolean", nullable: false),
|
||||
AbsentFromLastScan = table.Column<bool>(type: "boolean", nullable: false),
|
||||
IncludedUntil = table.Column<DateTime>(type: "timestamp without time zone", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_LibraryBooks", x => x.BookId);
|
||||
table.ForeignKey(
|
||||
name: "FK_LibraryBooks_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "Supplement",
|
||||
columns: table => new
|
||||
{
|
||||
SupplementId = table.Column<int>(type: "integer", nullable: false)
|
||||
.Annotation("Npgsql:ValueGenerationStrategy", NpgsqlValueGenerationStrategy.IdentityByDefaultColumn),
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
Url = table.Column<string>(type: "text", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_Supplement", x => x.SupplementId);
|
||||
table.ForeignKey(
|
||||
name: "FK_Supplement_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "UserDefinedItem",
|
||||
columns: table => new
|
||||
{
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
LastDownloaded = table.Column<DateTime>(type: "timestamp without time zone", nullable: true),
|
||||
LastDownloadedVersion = table.Column<string>(type: "text", nullable: true),
|
||||
LastDownloadedFormat = table.Column<long>(type: "bigint", nullable: true),
|
||||
LastDownloadedFileVersion = table.Column<string>(type: "text", nullable: true),
|
||||
Tags = table.Column<string>(type: "text", nullable: true),
|
||||
Rating_OverallRating = table.Column<float>(type: "real", nullable: true),
|
||||
Rating_PerformanceRating = table.Column<float>(type: "real", nullable: true),
|
||||
Rating_StoryRating = table.Column<float>(type: "real", nullable: true),
|
||||
BookStatus = table.Column<int>(type: "integer", nullable: false),
|
||||
PdfStatus = table.Column<int>(type: "integer", nullable: true),
|
||||
IsFinished = table.Column<bool>(type: "boolean", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_UserDefinedItem", x => x.BookId);
|
||||
table.ForeignKey(
|
||||
name: "FK_UserDefinedItem_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "BookCategory",
|
||||
columns: table => new
|
||||
{
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
CategoryLadderId = table.Column<int>(type: "integer", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_BookCategory", x => new { x.BookId, x.CategoryLadderId });
|
||||
table.ForeignKey(
|
||||
name: "FK_BookCategory_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
table.ForeignKey(
|
||||
name: "FK_BookCategory_CategoryLadders_CategoryLadderId",
|
||||
column: x => x.CategoryLadderId,
|
||||
principalTable: "CategoryLadders",
|
||||
principalColumn: "CategoryLadderId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "CategoryCategoryLadder",
|
||||
columns: table => new
|
||||
{
|
||||
_categoriesCategoryId = table.Column<int>(type: "integer", nullable: false),
|
||||
_categoryLaddersCategoryLadderId = table.Column<int>(type: "integer", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_CategoryCategoryLadder", x => new { x._categoriesCategoryId, x._categoryLaddersCategoryLadderId });
|
||||
table.ForeignKey(
|
||||
name: "FK_CategoryCategoryLadder_Categories__categoriesCategoryId",
|
||||
column: x => x._categoriesCategoryId,
|
||||
principalTable: "Categories",
|
||||
principalColumn: "CategoryId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
table.ForeignKey(
|
||||
name: "FK_CategoryCategoryLadder_CategoryLadders__categoryLaddersCate~",
|
||||
column: x => x._categoryLaddersCategoryLadderId,
|
||||
principalTable: "CategoryLadders",
|
||||
principalColumn: "CategoryLadderId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "BookContributor",
|
||||
columns: table => new
|
||||
{
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
ContributorId = table.Column<int>(type: "integer", nullable: false),
|
||||
Role = table.Column<int>(type: "integer", nullable: false),
|
||||
Order = table.Column<byte>(type: "smallint", nullable: false)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_BookContributor", x => new { x.BookId, x.ContributorId, x.Role });
|
||||
table.ForeignKey(
|
||||
name: "FK_BookContributor_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
table.ForeignKey(
|
||||
name: "FK_BookContributor_Contributors_ContributorId",
|
||||
column: x => x.ContributorId,
|
||||
principalTable: "Contributors",
|
||||
principalColumn: "ContributorId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.CreateTable(
|
||||
name: "SeriesBook",
|
||||
columns: table => new
|
||||
{
|
||||
SeriesId = table.Column<int>(type: "integer", nullable: false),
|
||||
BookId = table.Column<int>(type: "integer", nullable: false),
|
||||
Order = table.Column<string>(type: "text", nullable: true)
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_SeriesBook", x => new { x.SeriesId, x.BookId });
|
||||
table.ForeignKey(
|
||||
name: "FK_SeriesBook_Books_BookId",
|
||||
column: x => x.BookId,
|
||||
principalTable: "Books",
|
||||
principalColumn: "BookId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
table.ForeignKey(
|
||||
name: "FK_SeriesBook_Series_SeriesId",
|
||||
column: x => x.SeriesId,
|
||||
principalTable: "Series",
|
||||
principalColumn: "SeriesId",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
});
|
||||
|
||||
migrationBuilder.InsertData(
|
||||
table: "Contributors",
|
||||
columns: new[] { "ContributorId", "AudibleContributorId", "Name" },
|
||||
values: new object[] { -1, null, "" });
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_BookCategory_BookId",
|
||||
table: "BookCategory",
|
||||
column: "BookId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_BookCategory_CategoryLadderId",
|
||||
table: "BookCategory",
|
||||
column: "CategoryLadderId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_BookContributor_BookId",
|
||||
table: "BookContributor",
|
||||
column: "BookId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_BookContributor_ContributorId",
|
||||
table: "BookContributor",
|
||||
column: "ContributorId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Books_AudibleProductId",
|
||||
table: "Books",
|
||||
column: "AudibleProductId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Categories_AudibleCategoryId",
|
||||
table: "Categories",
|
||||
column: "AudibleCategoryId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_CategoryCategoryLadder__categoryLaddersCategoryLadderId",
|
||||
table: "CategoryCategoryLadder",
|
||||
column: "_categoryLaddersCategoryLadderId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Contributors_Name",
|
||||
table: "Contributors",
|
||||
column: "Name");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Series_AudibleSeriesId",
|
||||
table: "Series",
|
||||
column: "AudibleSeriesId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_SeriesBook_BookId",
|
||||
table: "SeriesBook",
|
||||
column: "BookId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_SeriesBook_SeriesId",
|
||||
table: "SeriesBook",
|
||||
column: "SeriesId");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "IX_Supplement_BookId",
|
||||
table: "Supplement",
|
||||
column: "BookId");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "BookCategory");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "BookContributor");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "CategoryCategoryLadder");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "LibraryBooks");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "SeriesBook");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Supplement");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "UserDefinedItem");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Contributors");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Categories");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "CategoryLadders");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Series");
|
||||
|
||||
migrationBuilder.DropTable(
|
||||
name: "Books");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,491 @@
|
||||
// <auto-generated />
|
||||
using System;
|
||||
using DataLayer;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Infrastructure;
|
||||
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
|
||||
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace DataLayer.Postgres.Migrations
|
||||
{
|
||||
[DbContext(typeof(LibationContext))]
|
||||
partial class LibationContextModelSnapshot : ModelSnapshot
|
||||
{
|
||||
protected override void BuildModel(ModelBuilder modelBuilder)
|
||||
{
|
||||
#pragma warning disable 612, 618
|
||||
modelBuilder
|
||||
.HasAnnotation("ProductVersion", "9.0.8")
|
||||
.HasAnnotation("Relational:MaxIdentifierLength", 63);
|
||||
|
||||
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
|
||||
|
||||
modelBuilder.Entity("CategoryCategoryLadder", b =>
|
||||
{
|
||||
b.Property<int>("_categoriesCategoryId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("_categoryLaddersCategoryLadderId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("_categoriesCategoryId", "_categoryLaddersCategoryLadderId");
|
||||
|
||||
b.HasIndex("_categoryLaddersCategoryLadderId");
|
||||
|
||||
b.ToTable("CategoryCategoryLadder");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("BookId"));
|
||||
|
||||
b.Property<string>("AudibleProductId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<int>("ContentType")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<DateTime?>("DatePublished")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<string>("Description")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<bool>("IsAbridged")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<bool>("IsSpatial")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<string>("Language")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<int>("LengthInMinutes")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<string>("Locale")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("PictureId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("PictureLarge")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Subtitle")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Title")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("BookId");
|
||||
|
||||
b.HasIndex("AudibleProductId");
|
||||
|
||||
b.ToTable("Books");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookCategory", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("CategoryLadderId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.HasKey("BookId", "CategoryLadderId");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("CategoryLadderId");
|
||||
|
||||
b.ToTable("BookCategory");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookContributor", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("ContributorId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("Role")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<byte>("Order")
|
||||
.HasColumnType("smallint");
|
||||
|
||||
b.HasKey("BookId", "ContributorId", "Role");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("ContributorId");
|
||||
|
||||
b.ToTable("BookContributor");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Category", b =>
|
||||
{
|
||||
b.Property<int>("CategoryId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("CategoryId"));
|
||||
|
||||
b.Property<string>("AudibleCategoryId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("CategoryId");
|
||||
|
||||
b.HasIndex("AudibleCategoryId");
|
||||
|
||||
b.ToTable("Categories");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.CategoryLadder", b =>
|
||||
{
|
||||
b.Property<int>("CategoryLadderId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("CategoryLadderId"));
|
||||
|
||||
b.HasKey("CategoryLadderId");
|
||||
|
||||
b.ToTable("CategoryLadders");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Contributor", b =>
|
||||
{
|
||||
b.Property<int>("ContributorId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("ContributorId"));
|
||||
|
||||
b.Property<string>("AudibleContributorId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("ContributorId");
|
||||
|
||||
b.HasIndex("Name");
|
||||
|
||||
b.ToTable("Contributors");
|
||||
|
||||
b.HasData(
|
||||
new
|
||||
{
|
||||
ContributorId = -1,
|
||||
Name = ""
|
||||
});
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.LibraryBook", b =>
|
||||
{
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<bool>("AbsentFromLastScan")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.Property<string>("Account")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<DateTime>("DateAdded")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<DateTime?>("IncludedUntil")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b.Property<bool>("IsDeleted")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b.HasKey("BookId");
|
||||
|
||||
b.ToTable("LibraryBooks");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Series", b =>
|
||||
{
|
||||
b.Property<int>("SeriesId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b.Property<int>("SeriesId"));
|
||||
|
||||
b.Property<string>("AudibleSeriesId")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("Name")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("SeriesId");
|
||||
|
||||
b.HasIndex("AudibleSeriesId");
|
||||
|
||||
b.ToTable("Series");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.SeriesBook", b =>
|
||||
{
|
||||
b.Property<int>("SeriesId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b.Property<string>("Order")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.HasKey("SeriesId", "BookId");
|
||||
|
||||
b.HasIndex("BookId");
|
||||
|
||||
b.HasIndex("SeriesId");
|
||||
|
||||
b.ToTable("SeriesBook");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("CategoryCategoryLadder", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Category", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("_categoriesCategoryId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.CategoryLadder", null)
|
||||
.WithMany()
|
||||
.HasForeignKey("_categoryLaddersCategoryLadderId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.OwnsOne("DataLayer.Rating", "Rating", b1 =>
|
||||
{
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<float>("OverallRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.Property<float>("PerformanceRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.Property<float>("StoryRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b1.HasKey("BookId");
|
||||
|
||||
b1.ToTable("Books");
|
||||
|
||||
b1.WithOwner()
|
||||
.HasForeignKey("BookId");
|
||||
});
|
||||
|
||||
b.OwnsMany("DataLayer.Supplement", "Supplements", b1 =>
|
||||
{
|
||||
b1.Property<int>("SupplementId")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("integer");
|
||||
|
||||
NpgsqlPropertyBuilderExtensions.UseIdentityByDefaultColumn(b1.Property<int>("SupplementId"));
|
||||
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<string>("Url")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.HasKey("SupplementId");
|
||||
|
||||
b1.HasIndex("BookId");
|
||||
|
||||
b1.ToTable("Supplement");
|
||||
|
||||
b1.WithOwner("Book")
|
||||
.HasForeignKey("BookId");
|
||||
|
||||
b1.Navigation("Book");
|
||||
});
|
||||
|
||||
b.OwnsOne("DataLayer.UserDefinedItem", "UserDefinedItem", b1 =>
|
||||
{
|
||||
b1.Property<int>("BookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<int>("BookStatus")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<bool>("IsFinished")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
b1.Property<DateTime?>("LastDownloaded")
|
||||
.HasColumnType("timestamp without time zone");
|
||||
|
||||
b1.Property<string>("LastDownloadedFileVersion")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.Property<long?>("LastDownloadedFormat")
|
||||
.HasColumnType("bigint");
|
||||
|
||||
b1.Property<string>("LastDownloadedVersion")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.Property<int?>("PdfStatus")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b1.Property<string>("Tags")
|
||||
.HasColumnType("text");
|
||||
|
||||
b1.HasKey("BookId");
|
||||
|
||||
b1.ToTable("UserDefinedItem", (string)null);
|
||||
|
||||
b1.WithOwner("Book")
|
||||
.HasForeignKey("BookId");
|
||||
|
||||
b1.OwnsOne("DataLayer.Rating", "Rating", b2 =>
|
||||
{
|
||||
b2.Property<int>("UserDefinedItemBookId")
|
||||
.HasColumnType("integer");
|
||||
|
||||
b2.Property<float>("OverallRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.Property<float>("PerformanceRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.Property<float>("StoryRating")
|
||||
.HasColumnType("real");
|
||||
|
||||
b2.HasKey("UserDefinedItemBookId");
|
||||
|
||||
b2.ToTable("UserDefinedItem");
|
||||
|
||||
b2.WithOwner()
|
||||
.HasForeignKey("UserDefinedItemBookId");
|
||||
});
|
||||
|
||||
b1.Navigation("Book");
|
||||
|
||||
b1.Navigation("Rating");
|
||||
});
|
||||
|
||||
b.Navigation("Rating");
|
||||
|
||||
b.Navigation("Supplements");
|
||||
|
||||
b.Navigation("UserDefinedItem");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookCategory", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("CategoriesLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.CategoryLadder", "CategoryLadder")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("CategoryLadderId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("CategoryLadder");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.BookContributor", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("ContributorsLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.Contributor", "Contributor")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("ContributorId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("Contributor");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.LibraryBook", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithOne()
|
||||
.HasForeignKey("DataLayer.LibraryBook", "BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.SeriesBook", b =>
|
||||
{
|
||||
b.HasOne("DataLayer.Book", "Book")
|
||||
.WithMany("SeriesLink")
|
||||
.HasForeignKey("BookId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.HasOne("DataLayer.Series", "Series")
|
||||
.WithMany("BooksLink")
|
||||
.HasForeignKey("SeriesId")
|
||||
.OnDelete(DeleteBehavior.Cascade)
|
||||
.IsRequired();
|
||||
|
||||
b.Navigation("Book");
|
||||
|
||||
b.Navigation("Series");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Book", b =>
|
||||
{
|
||||
b.Navigation("CategoriesLink");
|
||||
|
||||
b.Navigation("ContributorsLink");
|
||||
|
||||
b.Navigation("SeriesLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.CategoryLadder", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Contributor", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("DataLayer.Series", b =>
|
||||
{
|
||||
b.Navigation("BooksLink");
|
||||
});
|
||||
#pragma warning restore 612, 618
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Source/DataLayer.Postgres/PostgresContextFactory.cs
Normal file
12
Source/DataLayer.Postgres/PostgresContextFactory.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using Microsoft.EntityFrameworkCore.Design;
|
||||
|
||||
namespace DataLayer.Postgres
|
||||
{
|
||||
public class PostgresContextFactory : IDesignTimeDbContextFactory<LibationContext>
|
||||
{
|
||||
public LibationContext CreateDbContext(string[] args)
|
||||
{
|
||||
return LibationContextFactory.CreatePostgres(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
34
Source/DataLayer.Sqlite/DataLayer.Sqlite.csproj
Normal file
34
Source/DataLayer.Sqlite/DataLayer.Sqlite.csproj
Normal file
@@ -0,0 +1,34 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
|
||||
<OutputType>Library</OutputType>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.0">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="10.0.0">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
|
||||
<DebugType>embedded</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\DataLayer\DataLayer.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user