mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-24 16:19:31 -05:00
Compare commits
2907 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a637d218c4 | ||
|
|
63c03b42a9 | ||
|
|
4539837fad | ||
|
|
a0cd48e3f5 | ||
|
|
ceeb7cb162 | ||
|
|
f9f4e1b028 | ||
|
|
6487944c6c | ||
|
|
239fddf39c | ||
|
|
8ada8b2fd9 | ||
|
|
b19bd65495 | ||
|
|
e3ea5fdd64 | ||
|
|
4fdb89701a | ||
|
|
9165c4f304 | ||
|
|
4152f0ba6a | ||
|
|
3eaab17739 | ||
|
|
578bfd083d | ||
|
|
dd464456e4 | ||
|
|
e7a0255359 | ||
|
|
2e1281d9e8 | ||
|
|
efecefdd3b | ||
|
|
a91e718ef5 | ||
|
|
b420975267 | ||
|
|
c4211df8dc | ||
|
|
e182707d3a | ||
|
|
05cbd9d7c4 | ||
|
|
6e8683349f | ||
|
|
adb4816552 | ||
|
|
3914290c11 | ||
|
|
f76bf55b4a | ||
|
|
1cde764336 | ||
|
|
44d94226ec | ||
|
|
e8e8fff5bf | ||
|
|
1b04e07d40 | ||
|
|
54db889f05 | ||
|
|
777d279267 | ||
|
|
75be6b5850 | ||
|
|
a4657e2bd3 | ||
|
|
095b48ca47 | ||
|
|
d459f69113 | ||
|
|
2ecdd0b940 | ||
|
|
73a4ad50e5 | ||
|
|
9b59e24961 | ||
|
|
27e164763e | ||
|
|
eb544d85c7 | ||
|
|
ad85a241df | ||
|
|
e4d8642b4f | ||
|
|
77b35e7904 | ||
|
|
f8a0b3db52 | ||
|
|
9c8b26ab4e | ||
|
|
67a5a552fd | ||
|
|
80f57a2b9a | ||
|
|
baaf7edc89 | ||
|
|
2d9f480af1 | ||
|
|
2266ac33aa | ||
|
|
1ba479398c | ||
|
|
f71a81f7a8 | ||
|
|
1916c01bd9 | ||
|
|
699d97965c | ||
|
|
399935ad21 | ||
|
|
0824fdc7c7 | ||
|
|
a3f8e89af8 | ||
|
|
f9f17731c8 | ||
|
|
b052325ea7 | ||
|
|
daca14f97e | ||
|
|
daa26bc1a6 | ||
|
|
70d5134d28 | ||
|
|
a32458d9a9 | ||
|
|
4c77954526 | ||
|
|
a229a2a5ea | ||
|
|
0a2f3865ee | ||
|
|
900e68bb9a | ||
|
|
1de674a532 | ||
|
|
e1dad3e4c4 | ||
|
|
44f2eb8620 | ||
|
|
70945a9c5b | ||
|
|
fdfca97dfa | ||
|
|
b84900dcb5 | ||
|
|
d989ec928a | ||
|
|
d7fa3e1f7b | ||
|
|
c1417c319d | ||
|
|
6689939cc9 | ||
|
|
09347d0766 | ||
|
|
41db09057c | ||
|
|
6983058f49 | ||
|
|
fb2d412c97 | ||
|
|
1c0b1205b2 | ||
|
|
f556cea488 | ||
|
|
a2447253a0 | ||
|
|
3393d7c976 | ||
|
|
06572bdf7d | ||
|
|
d4411f1b8f | ||
|
|
1bfd1b8f41 | ||
|
|
c47dbfdc26 | ||
|
|
b5e55cd9b2 | ||
|
|
85c98d7203 | ||
|
|
9e95717619 | ||
|
|
90b4ff2720 | ||
|
|
0f97a9fdfc | ||
|
|
90caf0c164 | ||
|
|
9b3fe470a0 | ||
|
|
ab318729ab | ||
|
|
9576554426 | ||
|
|
3cd819b78d | ||
|
|
bb24f3f04e | ||
|
|
6f4416236d | ||
|
|
47dcccd17f | ||
|
|
6b026d8274 | ||
|
|
ec18606557 | ||
|
|
895c8549ba | ||
|
|
0d80efb898 | ||
|
|
deace9f8ae | ||
|
|
1c96dff133 | ||
|
|
1734b11338 | ||
|
|
5f3c4d17da | ||
|
|
4ffe0e27fb | ||
|
|
951bc0c957 | ||
|
|
60f985ba00 | ||
|
|
a42a2db196 | ||
|
|
64034c5636 | ||
|
|
e03a031342 | ||
|
|
da3d72b484 | ||
|
|
e3042a6106 | ||
|
|
55f1253a56 | ||
|
|
5e432bea37 | ||
|
|
2d0cc08987 | ||
|
|
627797f8c7 | ||
|
|
e37a777f29 | ||
|
|
13a76e5824 | ||
|
|
e4c64cac12 | ||
|
|
c6694483e4 | ||
|
|
bc793e11c4 | ||
|
|
4980fc70a0 | ||
|
|
8afac4f6fb | ||
|
|
c78b633da8 | ||
|
|
a1ee1677dc | ||
|
|
511bb153d7 | ||
|
|
c1af36f6b0 | ||
|
|
6028824966 | ||
|
|
49a7300ad6 | ||
|
|
8e8e560eac | ||
|
|
e51da569ca | ||
|
|
6ce43eed5f | ||
|
|
73ec6d8323 | ||
|
|
27a7531f79 | ||
|
|
423bdb4f81 | ||
|
|
7f8081e2cc | ||
|
|
50c2d5e2ab | ||
|
|
552bfd4b72 | ||
|
|
3d522c8205 | ||
|
|
b6b0d10367 | ||
|
|
8d4d69d56b | ||
|
|
0e475e593a | ||
|
|
78424318ce | ||
|
|
57c90b2554 | ||
|
|
9b94d22621 | ||
|
|
5c30b0ee29 | ||
|
|
3a1c60a3ed | ||
|
|
bd07a79c97 | ||
|
|
c562c9a468 | ||
|
|
fdd3f590cd | ||
|
|
77e9627e64 | ||
|
|
c40d1274d2 | ||
|
|
d1948071fc | ||
|
|
4f79d924e6 | ||
|
|
872cf835df | ||
|
|
69bb1a87a4 | ||
|
|
e3339a1ab4 | ||
|
|
18d84fa522 | ||
|
|
7a7e7b7b93 | ||
|
|
18ca9d6155 | ||
|
|
f5aa0f52d6 | ||
|
|
5a93074f4b | ||
|
|
3863d6bd2f | ||
|
|
753d1e3bd3 | ||
|
|
5393b6330e | ||
|
|
2d8be8d89b | ||
|
|
08723f0432 | ||
|
|
1e1220c509 | ||
|
|
c6920e09bf | ||
|
|
74d2a9f7c4 | ||
|
|
31c9c64741 | ||
|
|
96e7999b4f | ||
|
|
5303b9d291 | ||
|
|
91af147b8b | ||
|
|
33463a3fd1 | ||
|
|
ac88446757 | ||
|
|
d129eec4a7 | ||
|
|
9b1b908115 | ||
|
|
5866c029c4 | ||
|
|
de6a6a084e | ||
|
|
139489e5b3 | ||
|
|
a91f72a7f4 | ||
|
|
f5fd4c1c18 | ||
|
|
3da67dc4af | ||
|
|
0220d886c5 | ||
|
|
19ea25f072 | ||
|
|
d934c588ad | ||
|
|
ccb036ba37 | ||
|
|
3d1c5c82d1 | ||
|
|
d37532a4eb | ||
|
|
143851d1cb | ||
|
|
235dd32ad0 | ||
|
|
0b295d7b57 | ||
|
|
a4020ed085 | ||
|
|
af1e5e0c1d | ||
|
|
a3fffea9f9 | ||
|
|
180bc30ea5 | ||
|
|
f6b21680f8 | ||
|
|
f6b65a4d1a | ||
|
|
4662bb8129 | ||
|
|
e2f613da6c | ||
|
|
d4874970cd | ||
|
|
80268c4068 | ||
|
|
e2811f93b1 | ||
|
|
f8190e28c9 | ||
|
|
7eb460ba1f | ||
|
|
fe647284e7 | ||
|
|
54fe887636 | ||
|
|
2604775e5c | ||
|
|
a15bad6fb4 | ||
|
|
58c060c39d | ||
|
|
6ecd0a8561 | ||
|
|
4a52289f05 | ||
|
|
104c0b20fe | ||
|
|
b8a3a264e3 | ||
|
|
b0e42940e5 | ||
|
|
4bc6fefa1f | ||
|
|
3ec8227745 | ||
|
|
9f6dc3df01 | ||
|
|
cadeae061f | ||
|
|
80c3226147 | ||
|
|
f2137e2fa3 | ||
|
|
9bf80a3745 | ||
|
|
750ee422a7 | ||
|
|
035559f1c3 | ||
|
|
4751f609bb | ||
|
|
40fde7a411 | ||
|
|
b27a7f6cf4 | ||
|
|
9006875d6c | ||
|
|
dd0122865a | ||
|
|
9a5f39ce99 | ||
|
|
545f75b456 | ||
|
|
e1553e86b6 | ||
|
|
c85ddd0595 | ||
|
|
5d763f52af | ||
|
|
2b842b2bc5 | ||
|
|
2a3e144ed6 | ||
|
|
52f09dbf55 | ||
|
|
6bce289c8f | ||
|
|
20bc8f0d06 | ||
|
|
966fe246f6 | ||
|
|
c4a84b3a31 | ||
|
|
e06f91c243 | ||
|
|
42c00dda84 | ||
|
|
e40d2980e7 | ||
|
|
b6a905be21 | ||
|
|
22800ae411 | ||
|
|
e8a4d95177 | ||
|
|
026a606e7f | ||
|
|
9551f9a3b9 | ||
|
|
f62e7ab560 | ||
|
|
92fbd75cc3 | ||
|
|
7de0869376 | ||
|
|
57606ef1bb | ||
|
|
318d3ff649 | ||
|
|
8953f4df4a | ||
|
|
7c607a6bf6 | ||
|
|
93c6d67ef8 | ||
|
|
a314d6ab71 | ||
|
|
62bb9b3816 | ||
|
|
2ef5cec5c5 | ||
|
|
c8e2329bee | ||
|
|
f0358d9f0a | ||
|
|
3f89114c73 | ||
|
|
721f5dfe86 | ||
|
|
ef38c1d7c8 | ||
|
|
fe88760600 | ||
|
|
e498ef6302 | ||
|
|
b1c486fa2a | ||
|
|
d9aa42dc31 | ||
|
|
3ee0db8cff | ||
|
|
23cae4d073 | ||
|
|
2b45ba044e | ||
|
|
cd831d2775 | ||
|
|
8c761ba67e | ||
|
|
07243290ec | ||
|
|
317a9c00de | ||
|
|
cc40214818 | ||
|
|
3b52379e56 | ||
|
|
8a2f8d969d | ||
|
|
6f9bf68528 | ||
|
|
5a48301d25 | ||
|
|
d1d3e27dea | ||
|
|
2f1805123a | ||
|
|
f05c82b3a7 | ||
|
|
1fe01f2724 | ||
|
|
a5239808eb | ||
|
|
00a7b4fffc | ||
|
|
0c5010f5c1 | ||
|
|
a8b33e7686 | ||
|
|
0294e01fae | ||
|
|
c2621f56eb | ||
|
|
ebac7707ff | ||
|
|
26adade8be | ||
|
|
dd9c7488c6 | ||
|
|
754b862a29 | ||
|
|
0ddf343bb7 | ||
|
|
7a8a45b811 | ||
|
|
66c525ef95 | ||
|
|
7cd3e50ded | ||
|
|
d5094e1aa6 | ||
|
|
bca9559783 | ||
|
|
0cbb8af7a0 | ||
|
|
afab90ca68 | ||
|
|
92ecfc4930 | ||
|
|
b064f112cd | ||
|
|
629da51be7 | ||
|
|
0fa0f7286c | ||
|
|
6cd8f3e333 | ||
|
|
ff18fe03e9 | ||
|
|
a1e30d19e3 | ||
|
|
3d74c19f41 | ||
|
|
33a831e87c | ||
|
|
7b04917f0e | ||
|
|
2f62b76279 | ||
|
|
bd43bca1cf | ||
|
|
d200f0a618 | ||
|
|
c5314569db | ||
|
|
001e215b7d | ||
|
|
3a628a0025 | ||
|
|
7e48207fbc | ||
|
|
48a6d6b289 | ||
|
|
21f05ad3d9 | ||
|
|
d93c861eb3 | ||
|
|
fb0ef21768 | ||
|
|
277679ef53 | ||
|
|
e7e47bbcb0 | ||
|
|
65ffb5ca81 | ||
|
|
6cf308e441 | ||
|
|
870fa40c91 | ||
|
|
39d9eaec2a | ||
|
|
6fd4d0882c | ||
|
|
32591f7c46 | ||
|
|
6b47d1126d | ||
|
|
53df39dd12 | ||
|
|
b5d33fc17c | ||
|
|
8a517b668e | ||
|
|
67135ba4c8 | ||
|
|
c9efda1889 | ||
|
|
250869c242 | ||
|
|
844650e6be | ||
|
|
6685c72894 | ||
|
|
154a5e4989 | ||
|
|
93d302c9d7 | ||
|
|
f664df7f05 | ||
|
|
8fbf50292b | ||
|
|
f3fed43022 | ||
|
|
2d323ba18c | ||
|
|
1ec30a56e1 | ||
|
|
b98f3a07dd | ||
|
|
46170ffb3d | ||
|
|
5e8b41be5a | ||
|
|
47a2d5387d | ||
|
|
1e61239933 | ||
|
|
aedbf35be8 | ||
|
|
cf9540842b | ||
|
|
9205b9161b | ||
|
|
07b64b4abb | ||
|
|
c56145e424 | ||
|
|
ef11aba166 | ||
|
|
fcf03e9a59 | ||
|
|
6662065bb1 | ||
|
|
4973672892 | ||
|
|
efa73a52e1 | ||
|
|
82098a6228 | ||
|
|
07250aa355 | ||
|
|
46caa8b33f | ||
|
|
b0564c1bab | ||
|
|
3c8a85ff35 | ||
|
|
656c329912 | ||
|
|
983253908c | ||
|
|
cef0eeb25b | ||
|
|
a9eace759f | ||
|
|
ad0e7bf5df | ||
|
|
bea348232a | ||
|
|
1519dbc554 | ||
|
|
297455cd35 | ||
|
|
56b68024db | ||
|
|
09aa09a55b | ||
|
|
f1d134fe2e | ||
|
|
621d586c2f | ||
|
|
4966f9c753 | ||
|
|
059d82f6f0 | ||
|
|
bca41db6b7 | ||
|
|
613ba0b05f | ||
|
|
5f3b03ed87 | ||
|
|
f6fe801000 | ||
|
|
8ff34660d8 | ||
|
|
0c1b8dd60a | ||
|
|
8e8ee7a3ab | ||
|
|
9145a90e33 | ||
|
|
02b4a116dd | ||
|
|
e504b288a2 | ||
|
|
5128f788f0 | ||
|
|
044fe7a26a | ||
|
|
4ed2565101 | ||
|
|
abbd77bac4 | ||
|
|
38c9a52e1d | ||
|
|
f89114ca7e | ||
|
|
773d567eed | ||
|
|
ee717b679e | ||
|
|
f50810fb58 | ||
|
|
08b1b20b34 | ||
|
|
edca79af83 | ||
|
|
dd5dcd0ec9 | ||
|
|
820824e443 | ||
|
|
4c2dfdee43 | ||
|
|
ece4437c3a | ||
|
|
74daa15ce4 | ||
|
|
4f81bc8a26 | ||
|
|
e77d15f75e | ||
|
|
8668852574 | ||
|
|
7e944f393e | ||
|
|
1646fbfd17 | ||
|
|
72b0521325 | ||
|
|
8aa53fd43f | ||
|
|
aa67edb2d9 | ||
|
|
0054b17f41 | ||
|
|
2af2cc7370 | ||
|
|
5aa7aafebb | ||
|
|
3bd0f7c1e0 | ||
|
|
9c8d21f6db | ||
|
|
4947effeb7 | ||
|
|
b8fd9e6e31 | ||
|
|
2a02c93e4b | ||
|
|
a0ef520e06 | ||
|
|
a9eb32eba6 | ||
|
|
592ef0e645 | ||
|
|
cce53ee058 | ||
|
|
93755aa6d8 | ||
|
|
b1d42c7c22 | ||
|
|
8286b7b830 | ||
|
|
fbaa3c0420 | ||
|
|
ba6c30cf24 | ||
|
|
3ce5679298 | ||
|
|
47e1d40943 | ||
|
|
1687130107 | ||
|
|
8e59146d60 | ||
|
|
4b37d2772f | ||
|
|
ea9d690a90 | ||
|
|
3a2e967a03 | ||
|
|
a2eb0cc2c3 | ||
|
|
8b9341023a | ||
|
|
54314c0198 | ||
|
|
b0e4c4c5bf | ||
|
|
989e215acc | ||
|
|
ba88bb15a9 | ||
|
|
0cac0d942c | ||
|
|
b24a9ee781 | ||
|
|
25ae29235f | ||
|
|
a8d4de2d3d | ||
|
|
ccb3e0522c | ||
|
|
a9f1838b52 | ||
|
|
d744c293fb | ||
|
|
94848979ad | ||
|
|
2732326b3d | ||
|
|
ea8328c199 | ||
|
|
f1e42707a0 | ||
|
|
c1bdc3abff | ||
|
|
5cbb569b38 | ||
|
|
d4a3f0ea79 | ||
|
|
b31fe2cf49 | ||
|
|
65d748fc9f | ||
|
|
ab2da15bc9 | ||
|
|
bf8bef3cd0 | ||
|
|
adbe2f3c96 | ||
|
|
dadb8ee71b | ||
|
|
7d30f12532 | ||
|
|
9c41cbd2f3 | ||
|
|
5ce9a0c17d | ||
|
|
7b2d2df299 | ||
|
|
285ff00c12 | ||
|
|
04ca95cc83 | ||
|
|
3b25a07522 | ||
|
|
14aa449c35 | ||
|
|
d88c035c23 | ||
|
|
6d2d90b1e5 | ||
|
|
e2ca39fb36 | ||
|
|
6a37780b8e | ||
|
|
146f33f38e | ||
|
|
c6c26c5de4 | ||
|
|
262cc8dbbd | ||
|
|
5bec8e99a5 | ||
|
|
2e0e6749ca | ||
|
|
c0ef18e8b3 | ||
|
|
52173804f1 | ||
|
|
2d8cf69140 | ||
|
|
e2603d74ca | ||
|
|
9df9238d56 | ||
|
|
b697165392 | ||
|
|
16bfcc27d8 | ||
|
|
be3f47539d | ||
|
|
f3fd63cd70 | ||
|
|
aafdf7620e | ||
|
|
4f3b0541eb | ||
|
|
04fb73fd4a | ||
|
|
b1a7924c75 | ||
|
|
4e618206a8 | ||
|
|
8a510331df | ||
|
|
81035964ca | ||
|
|
304cf5eda0 | ||
|
|
b987749291 | ||
|
|
cc3ad230f8 | ||
|
|
e7266db3b3 | ||
|
|
a85f39a6e4 | ||
|
|
14fdb93c07 | ||
|
|
349957b8d4 | ||
|
|
16134c6421 | ||
|
|
608d05fabc | ||
|
|
c410e1209d | ||
|
|
74aefd868a | ||
|
|
55476b6594 | ||
|
|
ad650aa6eb | ||
|
|
154d2d73ef | ||
|
|
b171f7764f | ||
|
|
30a20b549e | ||
|
|
a3cc5e244d | ||
|
|
37441f598f | ||
|
|
8bced7cdc9 | ||
|
|
35d0589f46 | ||
|
|
5c45db3d45 | ||
|
|
700d08c69d | ||
|
|
1b71c60256 | ||
|
|
8cf7d812ab | ||
|
|
627264affd | ||
|
|
674502323b | ||
|
|
34a9d751b8 | ||
|
|
5b252efcf0 | ||
|
|
6756f2ba2e | ||
|
|
3c0e89802e | ||
|
|
e55a95db39 | ||
|
|
80cd64b4ba | ||
|
|
962642b0d0 | ||
|
|
641f353c84 | ||
|
|
d598bc0a79 | ||
|
|
153041c431 | ||
|
|
582a40599a | ||
|
|
6e21f14ae9 | ||
|
|
a4540b8deb | ||
|
|
fe10c7daad | ||
|
|
cd4ee1eee9 | ||
|
|
54ec05d63e | ||
|
|
da7a74ee58 | ||
|
|
72d08f60b2 | ||
|
|
4e6878972e | ||
|
|
f5efa5e93d | ||
|
|
179f765ca0 | ||
|
|
6a8f78ec23 | ||
|
|
f46e669eeb | ||
|
|
1c3188a3bb | ||
|
|
a7fe030557 | ||
|
|
dacadfc59e | ||
|
|
aa01855ac3 | ||
|
|
2d9b91eff5 | ||
|
|
d2c6c6e564 | ||
|
|
a11a4d1aee | ||
|
|
ad43a18f59 | ||
|
|
b1a9ff708c | ||
|
|
97a01b302f | ||
|
|
c22a73a98d | ||
|
|
bc9a7a0eb7 | ||
|
|
b35a737d97 | ||
|
|
760364d4c7 | ||
|
|
e4ff047c6e | ||
|
|
833219d5e5 | ||
|
|
eed1ab3ce3 | ||
|
|
c9a427bf8b | ||
|
|
6b4d7bde71 | ||
|
|
96442a3578 | ||
|
|
60e1dfb380 | ||
|
|
deaa150ab4 | ||
|
|
ca649a31a4 | ||
|
|
a2e514c10d | ||
|
|
0577a64ae3 | ||
|
|
1a69842871 | ||
|
|
992c6c71b0 | ||
|
|
bad0914e3c | ||
|
|
8495a234e8 | ||
|
|
3faa6577df | ||
|
|
f398d2a0d8 | ||
|
|
335ae82a3d | ||
|
|
4ac15880db | ||
|
|
fd5c2795b1 | ||
|
|
47c71422bc | ||
|
|
bfb7fd92b0 | ||
|
|
bf52430da8 | ||
|
|
7005b3ee86 | ||
|
|
8f2ea239c5 | ||
|
|
9ee2a8a98c | ||
|
|
6f0daf9d1b | ||
|
|
28ed424fa8 | ||
|
|
fe3e20b108 | ||
|
|
23f3b901e3 | ||
|
|
567608b3c4 | ||
|
|
4ff0f94d41 | ||
|
|
a56290489c | ||
|
|
aac4392f69 | ||
|
|
c130feefc5 | ||
|
|
474bcf5f05 | ||
|
|
cf24ada3f1 | ||
|
|
7b26bb7171 | ||
|
|
83d89ff05c | ||
|
|
7034bdcbf6 | ||
|
|
4c12da5418 | ||
|
|
8bf1d2bc1f | ||
|
|
900a99653f | ||
|
|
f33fcfa7b1 | ||
|
|
130148d475 | ||
|
|
021f87eef3 | ||
|
|
8ef8788152 | ||
|
|
041756829a | ||
|
|
89c7f52d84 | ||
|
|
c40b560d15 | ||
|
|
74f9391076 | ||
|
|
76dab68759 | ||
|
|
d405548825 | ||
|
|
b4c76f034f | ||
|
|
cda2402d01 | ||
|
|
2cf9ab2620 | ||
|
|
465f182493 | ||
|
|
23321a1075 | ||
|
|
ff2b9243e9 | ||
|
|
471fb7a83c | ||
|
|
0db5ae8390 | ||
|
|
e36f60085f | ||
|
|
3718fc36f0 | ||
|
|
71a41f6369 | ||
|
|
47a2f9a4a7 | ||
|
|
a6f0bc0490 | ||
|
|
cb7c37a836 | ||
|
|
e89eba08c4 | ||
|
|
d5d857983d | ||
|
|
9ab30dffd8 | ||
|
|
1e630c3c68 | ||
|
|
b2cd596401 | ||
|
|
ef6be9d436 | ||
|
|
9f6a9f9912 | ||
|
|
e4c37af7b7 | ||
|
|
353e90cf6d | ||
|
|
ecf7fb4bc4 | ||
|
|
e200a5ed78 | ||
|
|
5e02263ad1 | ||
|
|
a6af810274 | ||
|
|
d9d34735da | ||
|
|
087bc95f80 | ||
|
|
ff27f9832a | ||
|
|
2124e66219 | ||
|
|
9ae80b60b4 | ||
|
|
d701c4c3f9 | ||
|
|
b4cc5eea66 | ||
|
|
7cdf14c43b | ||
|
|
06d086725c | ||
|
|
a8e79d64c0 | ||
|
|
742c6fa5dd | ||
|
|
f4cfdc6647 | ||
|
|
43ae566053 | ||
|
|
063a6428f3 | ||
|
|
3e302d7c04 | ||
|
|
436ceabb9e | ||
|
|
186dc6db31 | ||
|
|
af4feba7d7 | ||
|
|
549aac15b7 | ||
|
|
06d8d92dbe | ||
|
|
6a8763d7ba | ||
|
|
521b97b7b7 | ||
|
|
58c8601067 | ||
|
|
36609376e8 | ||
|
|
32a1c8264e | ||
|
|
06754f4ef1 | ||
|
|
99d9b3bf94 | ||
|
|
ec71d20d37 | ||
|
|
2d1e88bb39 | ||
|
|
c9d30bb422 | ||
|
|
cd448082e3 | ||
|
|
46239dddac | ||
|
|
81177fda35 | ||
|
|
983d623d7f | ||
|
|
bdda8f4abf | ||
|
|
94fc804394 | ||
|
|
e00d8c09e7 | ||
|
|
70a40b4bdd | ||
|
|
f806a62f01 | ||
|
|
71a9281b8f | ||
|
|
a34747fbd5 | ||
|
|
6b0380199b | ||
|
|
39d2f90a84 | ||
|
|
7bff7651f3 | ||
|
|
44bd15d519 | ||
|
|
1ca93b03a0 | ||
|
|
3295142d81 | ||
|
|
f12fdc46dc | ||
|
|
fc01254fe6 | ||
|
|
8fb3368601 | ||
|
|
58facc2512 | ||
|
|
b43c2b308b | ||
|
|
1e89a0af56 | ||
|
|
acd3cbbf49 | ||
|
|
a806521745 | ||
|
|
0dddaf26e0 | ||
|
|
cdf63a005b | ||
|
|
ca422a0af3 | ||
|
|
a682371a91 | ||
|
|
26ef146526 | ||
|
|
936ee58abb | ||
|
|
71d8c208bc | ||
|
|
2200ffa88e | ||
|
|
4453316516 | ||
|
|
b947207571 | ||
|
|
25d29deae6 | ||
|
|
9abe6d6d71 | ||
|
|
77dbc0a37f | ||
|
|
659117512b | ||
|
|
b1dbbc6a69 | ||
|
|
424a1c626e | ||
|
|
522666191b | ||
|
|
78055ef794 | ||
|
|
0fe534c202 | ||
|
|
257179de31 | ||
|
|
65b57112b9 | ||
|
|
27f0b1d1f2 | ||
|
|
6e31476c45 | ||
|
|
bc7f0f3fb3 | ||
|
|
13eeb5164f | ||
|
|
fc756ed23d | ||
|
|
c150365462 | ||
|
|
58d209059e | ||
|
|
506179b517 | ||
|
|
f0f4eb75df | ||
|
|
6c1c025668 | ||
|
|
987032b384 | ||
|
|
d516cbf363 | ||
|
|
824274ac5e | ||
|
|
82b1c784f4 | ||
|
|
232512b860 | ||
|
|
223fa421c7 | ||
|
|
2e5e72bfcf | ||
|
|
9bdb986382 | ||
|
|
901ff30e11 | ||
|
|
5e04599212 | ||
|
|
d3c9b7ead3 | ||
|
|
361770c34b | ||
|
|
5168f3fa97 | ||
|
|
94d307e198 | ||
|
|
eba6236ad2 | ||
|
|
d0128bd989 | ||
|
|
fbd7c0ec36 | ||
|
|
55abac97ea | ||
|
|
740b94170e | ||
|
|
c6a1a09213 | ||
|
|
cd84d52398 | ||
|
|
cdbad1b397 | ||
|
|
67e227008a | ||
|
|
23cf43cac5 | ||
|
|
62a057dbfb | ||
|
|
f2ff9ae557 | ||
|
|
9ed4e46919 | ||
|
|
faa71bae40 | ||
|
|
bbd5d2cd6d | ||
|
|
221e135c07 | ||
|
|
956904c0b3 | ||
|
|
8590481022 | ||
|
|
2171d0139e | ||
|
|
71d6aca9f8 | ||
|
|
0125e279c0 | ||
|
|
b8e46ccf10 | ||
|
|
787fef1c03 | ||
|
|
98b7a6171f | ||
|
|
210f254f63 | ||
|
|
ecdccda1ce | ||
|
|
ed66ac91e0 | ||
|
|
e571165c15 | ||
|
|
1513664b5f | ||
|
|
0132d81c43 | ||
|
|
8d32da8b27 | ||
|
|
b5fbc8af86 | ||
|
|
d0166b5a5c | ||
|
|
ada77d6970 | ||
|
|
9f8758b242 | ||
|
|
5ca629ebea | ||
|
|
f9f3820652 | ||
|
|
08e61ecf19 | ||
|
|
d15f0cafce | ||
|
|
1b85253940 | ||
|
|
b329ff007e | ||
|
|
f6918d598a | ||
|
|
0cdfdd82d4 | ||
|
|
de3649dba4 | ||
|
|
9ba975ac44 | ||
|
|
2b0ea92da8 | ||
|
|
b79a1e973d | ||
|
|
1be4cf986d | ||
|
|
18c4226b90 | ||
|
|
07a5ba6857 | ||
|
|
6252d02498 | ||
|
|
11cf8c5397 | ||
|
|
1f3f4a4c85 | ||
|
|
5bfe5967db | ||
|
|
476fa25a12 | ||
|
|
792bd20fa2 | ||
|
|
26f3cd064e | ||
|
|
0556a84cbc | ||
|
|
090871625a | ||
|
|
12dedb7cff | ||
|
|
d4187e93b2 | ||
|
|
1beb1aafd8 | ||
|
|
67c4703bab | ||
|
|
d850c9c6e3 | ||
|
|
38e07b0859 | ||
|
|
ea10785160 | ||
|
|
16803b9f17 | ||
|
|
b9a0cf3f76 | ||
|
|
71ff6b14da | ||
|
|
a98b3c7e85 | ||
|
|
7259c25ece | ||
|
|
5e7154530b | ||
|
|
d501cc0a23 | ||
|
|
45606285ec | ||
|
|
a5e860a60f | ||
|
|
d93333f9ef | ||
|
|
3bd68b630a | ||
|
|
97c93a0858 | ||
|
|
8b15fe0d6a | ||
|
|
2d22a5f5b9 | ||
|
|
be63fbaada | ||
|
|
dc6b338266 | ||
|
|
9e36971151 | ||
|
|
9dc08d16b6 | ||
|
|
182a5412a5 | ||
|
|
cb15c79e4b | ||
|
|
06e6e81779 | ||
|
|
938b833954 | ||
|
|
596f069e46 | ||
|
|
e16a7f06d6 | ||
|
|
2947f2c2ff | ||
|
|
0d33039b72 | ||
|
|
682f8227fd | ||
|
|
dc1675073d | ||
|
|
d71f4eb802 | ||
|
|
e55756469d | ||
|
|
3764b705a8 | ||
|
|
1e4ef9c381 | ||
|
|
8188d8256a | ||
|
|
5fb2fcb059 | ||
|
|
0bb2f677b2 | ||
|
|
4d324de343 | ||
|
|
8e2972edae | ||
|
|
550ff83781 | ||
|
|
db793810eb | ||
|
|
1fb24c5705 | ||
|
|
cbbdfce5cd | ||
|
|
8576e377fa | ||
|
|
0d500f443f | ||
|
|
bed6dacff2 | ||
|
|
8643c6b260 | ||
|
|
71e529ebe9 | ||
|
|
fc951b964f | ||
|
|
900d3d6b71 | ||
|
|
2b3b5e02f5 | ||
|
|
b1b75dcad2 | ||
|
|
b558b1c6b4 | ||
|
|
9e58b97362 | ||
|
|
1f4f4f1a5f | ||
|
|
bc705b5563 | ||
|
|
677850e18a | ||
|
|
1f2c3af660 | ||
|
|
667ffec667 | ||
|
|
9837c23daf | ||
|
|
ab3bef3d2f | ||
|
|
58cb710d38 | ||
|
|
afbb340f8d | ||
|
|
f378741152 | ||
|
|
9f88bda8e5 | ||
|
|
33ebb1593f | ||
|
|
b0b91bd002 | ||
|
|
77518cf1f5 | ||
|
|
8b329ed602 | ||
|
|
0b27b21e75 | ||
|
|
d7da55c823 | ||
|
|
e6c15e2f73 | ||
|
|
c3dbd77c17 | ||
|
|
f7901711a9 | ||
|
|
ec8fee0a75 | ||
|
|
ca6ce3af09 | ||
|
|
65eaf0fc76 | ||
|
|
bfba8d10cf | ||
|
|
6cd89a5614 | ||
|
|
16163c7c5f | ||
|
|
0482fbed05 | ||
|
|
d0f1574893 | ||
|
|
e64167bb99 | ||
|
|
22098c5424 | ||
|
|
273c56aa0b | ||
|
|
a951361fa6 | ||
|
|
2795c3718b | ||
|
|
1a365bdefd | ||
|
|
d3db70baab | ||
|
|
20324ad88b | ||
|
|
80f34bdf3e | ||
|
|
f2dbdb95dc | ||
|
|
0e3893122d | ||
|
|
831ff6e3ae | ||
|
|
b62d17cbee | ||
|
|
b95d6cfca0 | ||
|
|
d0d1876783 | ||
|
|
df23bf21ea | ||
|
|
934561e551 | ||
|
|
de6c560027 | ||
|
|
9c582fccc8 | ||
|
|
cab5c26e3e | ||
|
|
cca6dda9e6 | ||
|
|
77aea23007 | ||
|
|
42c5403bbe | ||
|
|
b14dacd44d | ||
|
|
abd47ddcf7 | ||
|
|
8611e65fc6 | ||
|
|
8663fe39e3 | ||
|
|
4891213a88 | ||
|
|
828ea8e61a | ||
|
|
f6fae7c0b8 | ||
|
|
b4b446e770 | ||
|
|
9ff4fdaab8 | ||
|
|
d3bfbb0642 | ||
|
|
57ab0a05f7 | ||
|
|
296aee9757 | ||
|
|
3d8c408627 | ||
|
|
11cdb24558 | ||
|
|
589cc69498 | ||
|
|
6f17ab1f02 | ||
|
|
7cbbff727f | ||
|
|
daa07ed2d2 | ||
|
|
b3ced3bb40 | ||
|
|
20127e5bcd | ||
|
|
f7a5e462b7 | ||
|
|
728bc723c2 | ||
|
|
b1f75ec35b | ||
|
|
1709c778a6 | ||
|
|
769e110ffb | ||
|
|
a0808d2d4c | ||
|
|
59bd38ddc7 | ||
|
|
000ecb5669 | ||
|
|
8525f60488 | ||
|
|
24329faf5c | ||
|
|
4a72c6fdf9 | ||
|
|
8235c3048e | ||
|
|
209e9f0573 | ||
|
|
9455121647 | ||
|
|
fa7a11617e | ||
|
|
a6c62bc118 | ||
|
|
dbf4073da4 | ||
|
|
552ca12bc1 | ||
|
|
e13968eec1 | ||
|
|
2ce56c8581 | ||
|
|
8d6cc8c86a | ||
|
|
488719de1e | ||
|
|
8cb4011a44 | ||
|
|
9ff0bab873 | ||
|
|
3331738f2b | ||
|
|
e768ceea96 | ||
|
|
cb4215910c | ||
|
|
b9e014b8bd | ||
|
|
96f0743ce5 | ||
|
|
560766dfa0 | ||
|
|
a2bbccd3ea | ||
|
|
5570b804ba | ||
|
|
3ff1d4b68c | ||
|
|
d19d3c382c | ||
|
|
05a68a7506 | ||
|
|
9aacf4c780 | ||
|
|
0390dc14c5 | ||
|
|
1ee1ef836a | ||
|
|
bf1080ac5a | ||
|
|
ee4fdb9563 | ||
|
|
a179f2a895 | ||
|
|
b4c3a4b19f | ||
|
|
71e203f19c | ||
|
|
07283ba9ab | ||
|
|
decfb2c168 | ||
|
|
71778656da | ||
|
|
517d6e3e1a | ||
|
|
e11e9e7201 | ||
|
|
135b9336a4 | ||
|
|
e0d4d4abbd | ||
|
|
422b4fce7b | ||
|
|
e3a7226648 | ||
|
|
5b9fc86319 | ||
|
|
5afea2d3c7 | ||
|
|
5f942a6943 | ||
|
|
18075c5c51 | ||
|
|
a728225782 | ||
|
|
3e6ae26710 | ||
|
|
a8a4e442a8 | ||
|
|
c16e91734d | ||
|
|
bb9ad4b546 | ||
|
|
43045e5d4e | ||
|
|
63c7dbdb4d | ||
|
|
ef217bba90 | ||
|
|
ca9924c38f | ||
|
|
c3c47507e7 | ||
|
|
dc237c752a | ||
|
|
08892c71b2 | ||
|
|
026717b7c2 | ||
|
|
be06290f6c | ||
|
|
9ec55478c9 | ||
|
|
4172b4a2a6 | ||
|
|
59620c2217 | ||
|
|
c410c646b2 | ||
|
|
0b515996d7 | ||
|
|
8b9b8319a1 | ||
|
|
161cf14519 | ||
|
|
c6ac09e938 | ||
|
|
fde8f9d133 | ||
|
|
2bd222ca1c | ||
|
|
12228fe1fb | ||
|
|
c63b2592f1 | ||
|
|
e65980258c | ||
|
|
bd0a90d2dd | ||
|
|
33a7e92f4c | ||
|
|
51d1a1994d | ||
|
|
835745e485 | ||
|
|
bcb553d9f9 | ||
|
|
b73b8aae6a | ||
|
|
51792e31a8 | ||
|
|
636a391db3 | ||
|
|
57d5ed2f21 | ||
|
|
bbb1d1d908 | ||
|
|
c5d8f52f03 | ||
|
|
1b49e4a355 | ||
|
|
878cb589c3 | ||
|
|
53ce88d3d2 | ||
|
|
85e9bea9e7 | ||
|
|
7c7f88ebb5 | ||
|
|
cbd007b81a | ||
|
|
ac0438de42 | ||
|
|
b73699be8d | ||
|
|
bc87b6e955 | ||
|
|
f15155ddc9 | ||
|
|
37b556012e | ||
|
|
79ba3dd874 | ||
|
|
28795c3158 | ||
|
|
935d248b53 | ||
|
|
b2103afe30 | ||
|
|
fcbc4e420e | ||
|
|
19fcda877f | ||
|
|
48cd93ef93 | ||
|
|
80fd39826b | ||
|
|
50c7d1531b | ||
|
|
657c6f2b7d | ||
|
|
e7484fac09 | ||
|
|
613ec9c48c | ||
|
|
322050efd8 | ||
|
|
5242368343 | ||
|
|
564151e520 | ||
|
|
b40220cb73 | ||
|
|
2ebac74049 | ||
|
|
8ede63a960 | ||
|
|
38a0cc39e6 | ||
|
|
b482b61770 | ||
|
|
36a6f6e151 | ||
|
|
0c45883649 | ||
|
|
c243144009 | ||
|
|
4e2df006e7 | ||
|
|
02964d3bef | ||
|
|
c0e50aac48 | ||
|
|
6c0804ba4f | ||
|
|
708b13dd71 | ||
|
|
eb64e054b5 | ||
|
|
c42f7f930c | ||
|
|
44c1d7306d | ||
|
|
2413c22a51 | ||
|
|
31fefb4f86 | ||
|
|
745fd81aa1 | ||
|
|
f7bf1567c1 | ||
|
|
02021a09b1 | ||
|
|
6411d32228 | ||
|
|
56287e8094 | ||
|
|
62f70fd628 | ||
|
|
dcca2c5821 | ||
|
|
d22f2296c2 | ||
|
|
2e0ea5d085 | ||
|
|
cb4526e8e2 | ||
|
|
fc3132cd77 | ||
|
|
e474db33ec | ||
|
|
6274d2b250 | ||
|
|
d4bfdaa29e | ||
|
|
3b7f5f5ce0 | ||
|
|
496e2f1840 | ||
|
|
98f3c055d7 | ||
|
|
7df36ce8b4 | ||
|
|
5deaca45c2 | ||
|
|
e790757855 | ||
|
|
46b2c6494f | ||
|
|
c4216a4075 | ||
|
|
375412aa42 | ||
|
|
895ac56eb3 | ||
|
|
c593388ec6 | ||
|
|
1c6960fe44 | ||
|
|
e7ac2ffd6c | ||
|
|
df35e54fba | ||
|
|
15f757ae04 | ||
|
|
b7dc15099c | ||
|
|
dc33c67f48 | ||
|
|
89fb517fd1 | ||
|
|
51eb94dbe9 | ||
|
|
03747f618f | ||
|
|
8154322448 | ||
|
|
7531ae2749 | ||
|
|
32515172d3 | ||
|
|
3b500ecf69 | ||
|
|
b93dd4751d | ||
|
|
d651f8db34 | ||
|
|
3f8f7d21d0 | ||
|
|
824341e396 | ||
|
|
a972708d69 | ||
|
|
47b305c83b | ||
|
|
2ec3da18f2 | ||
|
|
70aea9ac0c | ||
|
|
38270bf4e2 | ||
|
|
05d58ca0da | ||
|
|
91aab54b43 | ||
|
|
25d552c09e | ||
|
|
a1d51502c4 | ||
|
|
883d1dfa19 | ||
|
|
f94c48b27c | ||
|
|
0734547aec | ||
|
|
8ab87d9844 | ||
|
|
21b3b85e6e | ||
|
|
45ccac3bc4 | ||
|
|
0b95b0b94b | ||
|
|
501b370dc0 | ||
|
|
2058a4b639 | ||
|
|
266823a81e | ||
|
|
6cd5713baa | ||
|
|
e9038de819 | ||
|
|
9129b681dc | ||
|
|
1f2b602638 | ||
|
|
87d9de1009 | ||
|
|
81a6db2190 | ||
|
|
dbd335fd3b | ||
|
|
84fc6e7a7a | ||
|
|
f851f10ee1 | ||
|
|
0d92d9f9bd | ||
|
|
73fce52df1 | ||
|
|
14223d239a | ||
|
|
a3daa7b257 | ||
|
|
a70f943462 | ||
|
|
a717260574 | ||
|
|
90a4898dbd | ||
|
|
4543d9e975 | ||
|
|
2aedd20007 | ||
|
|
822e1cbfb5 | ||
|
|
0ec082669d | ||
|
|
5315eeb26b | ||
|
|
32bd5a4cca | ||
|
|
e4ec774d16 | ||
|
|
b1ce21ad77 | ||
|
|
9ab5e86c81 | ||
|
|
ea3442ad27 | ||
|
|
e1af02a642 | ||
|
|
fe0c4e4f92 | ||
|
|
5e58fdf821 | ||
|
|
01537c03b1 | ||
|
|
b78f4d13c1 | ||
|
|
ba68243dc7 | ||
|
|
b742971d9b | ||
|
|
6492cfb430 | ||
|
|
c229adcbb9 | ||
|
|
abb08a4589 | ||
|
|
5ccc124ad4 | ||
|
|
db22fea0d1 | ||
|
|
7ebd12ec3d | ||
|
|
ac0e57726f | ||
|
|
e3200b1481 | ||
|
|
5492935c32 | ||
|
|
2a67d80057 | ||
|
|
7956a75344 | ||
|
|
cfa82e5086 | ||
|
|
60291a93c2 | ||
|
|
51fec1c5a0 | ||
|
|
5b8c5e2fd7 | ||
|
|
5a0fd6ee08 | ||
|
|
d7d3810874 | ||
|
|
f0819c339c | ||
|
|
adcdca6f2e | ||
|
|
efd7d1a4a0 | ||
|
|
fc3fa137ac | ||
|
|
61e901e07b | ||
|
|
d5dee106d1 | ||
|
|
00518e1a60 | ||
|
|
129d622015 | ||
|
|
4423cbfcf3 | ||
|
|
7f0d845dd0 | ||
|
|
bba1c894c5 | ||
|
|
6c197a4a8c | ||
|
|
4ceae8ec31 | ||
|
|
d257f903cc | ||
|
|
69742dd785 | ||
|
|
92161eae07 | ||
|
|
70d5099902 | ||
|
|
de80f4e262 | ||
|
|
0f0b8d4528 | ||
|
|
e34301fb2f | ||
|
|
a140c1ddc1 | ||
|
|
b472c615fb | ||
|
|
d41f33775e | ||
|
|
c27d60e2b0 | ||
|
|
77fcaf4fca | ||
|
|
206dc66f7c | ||
|
|
2d267fc50a | ||
|
|
5cd5f00df7 | ||
|
|
6a80869861 | ||
|
|
fb113514ae | ||
|
|
91740048c2 | ||
|
|
ff2e206229 | ||
|
|
5f1f82257b | ||
|
|
3df0fab793 | ||
|
|
7e7fa62c24 | ||
|
|
6220c00dcb | ||
|
|
59a3d58c0f | ||
|
|
d8fb19c26e | ||
|
|
b0530325c5 | ||
|
|
734a86a248 | ||
|
|
a12d447d95 | ||
|
|
e9578d9aa0 | ||
|
|
5fef185a30 | ||
|
|
ee2b2b2c3a | ||
|
|
45d232e401 | ||
|
|
9a4d56e4e1 | ||
|
|
63018439c8 | ||
|
|
b5fef2ecb9 | ||
|
|
fd3ece31c7 | ||
|
|
7aa8e3d60d | ||
|
|
9d71b39b31 | ||
|
|
7cb09a0e0b | ||
|
|
fa47448ddc | ||
|
|
be7ae3d151 | ||
|
|
fcb3c11203 | ||
|
|
c63002e145 | ||
|
|
7758079efa | ||
|
|
9e43cc30a7 | ||
|
|
40eaf15538 | ||
|
|
a6228b43f3 | ||
|
|
b5fcc90da2 | ||
|
|
c66e80fdaf | ||
|
|
7853e1990f | ||
|
|
9d52a335c3 | ||
|
|
8597784bc6 | ||
|
|
557b9ef71d | ||
|
|
c7791a478a | ||
|
|
d29a20727b | ||
|
|
a14a2f6c96 | ||
|
|
cc402e35a1 | ||
|
|
2813c30536 | ||
|
|
8bd0bdf5b2 | ||
|
|
26a99443d7 | ||
|
|
6d5aa77dee | ||
|
|
5fa7bea885 | ||
|
|
62fb85a94b | ||
|
|
c840e3485e | ||
|
|
04c72d51fd | ||
|
|
eae1250cac | ||
|
|
634801431d | ||
|
|
e9a884ab39 | ||
|
|
40fc1511d0 | ||
|
|
8642723c77 | ||
|
|
15ac97e41a | ||
|
|
8202ae7cf6 | ||
|
|
1b459460dc | ||
|
|
bdf7df9ecd | ||
|
|
7e55c6a79d | ||
|
|
183570aaa5 | ||
|
|
de1d66d4dd | ||
|
|
0b27e57ad7 | ||
|
|
182bbd43c5 | ||
|
|
cdf4d6c5fd | ||
|
|
86db74c394 | ||
|
|
856e63794b | ||
|
|
c29a57445a | ||
|
|
4aba90ad3f | ||
|
|
4f2b6d4cd7 | ||
|
|
92067fa3f3 | ||
|
|
c26ea4ceeb | ||
|
|
35ccbff5b9 | ||
|
|
1a265a5176 | ||
|
|
ef2d243fa8 | ||
|
|
c2b8fa59a6 | ||
|
|
a85f9e39bd | ||
|
|
a13f8828fb | ||
|
|
7d391b8465 | ||
|
|
dbfa7cc4eb | ||
|
|
66e99df303 | ||
|
|
77ecf64443 | ||
|
|
f7060804b2 | ||
|
|
a44a1269c0 | ||
|
|
ac6fc37c7d | ||
|
|
9472d65af9 | ||
|
|
1b4c07f229 | ||
|
|
a645058ae1 | ||
|
|
edec6defbb | ||
|
|
27b3a3ddef | ||
|
|
da0903b8a6 | ||
|
|
a49a3b45e5 | ||
|
|
b9225fb153 | ||
|
|
656b7f0948 | ||
|
|
08b249ee09 | ||
|
|
5d76ebfe6e | ||
|
|
0d53b12ade | ||
|
|
f403e12a2a | ||
|
|
5abce26309 | ||
|
|
e73cb0958f | ||
|
|
70070e2f1c | ||
|
|
2685f9adab | ||
|
|
c43e74eabd | ||
|
|
b0d3306209 | ||
|
|
4aaabae109 | ||
|
|
b8188f999e | ||
|
|
4dc5ceb9b1 | ||
|
|
4f8e5053f7 | ||
|
|
3d97ce99e1 | ||
|
|
8620412b3a | ||
|
|
a6d07d89d0 | ||
|
|
a63efcefd6 | ||
|
|
bce9207161 | ||
|
|
12670dedbe | ||
|
|
40393f9548 | ||
|
|
94cae5f015 | ||
|
|
8353227f9d | ||
|
|
5c1d69b934 | ||
|
|
fb04b58b57 | ||
|
|
2bb14bba2a | ||
|
|
91195eb21b | ||
|
|
3b8d6dd3c8 | ||
|
|
ff2ab2da8d | ||
|
|
85aed457b2 | ||
|
|
d624d1d5b6 | ||
|
|
6c6a1049ea | ||
|
|
f380889d98 | ||
|
|
f61df0e126 | ||
|
|
ffca12123f | ||
|
|
1077ca3753 | ||
|
|
c7189dbceb | ||
|
|
f2361c49b4 | ||
|
|
f84cbb66c3 | ||
|
|
e45f254d19 | ||
|
|
b6cd3c0bae | ||
|
|
4a98724a35 | ||
|
|
4680fa5ae9 | ||
|
|
458f4e2bdc | ||
|
|
3357fd81c7 | ||
|
|
3ebe277303 | ||
|
|
29c57dce0f | ||
|
|
27a9330638 | ||
|
|
4b42b1f55d | ||
|
|
db761395e5 | ||
|
|
249d73e270 | ||
|
|
5d359afedb | ||
|
|
5e8e37e6a2 | ||
|
|
a74df6f04f | ||
|
|
10991d5472 | ||
|
|
b3206fe1db | ||
|
|
70391ea055 | ||
|
|
5b69155d49 | ||
|
|
941bb8adca | ||
|
|
b5eb014084 | ||
|
|
ee4b9339a7 | ||
|
|
3acfe19499 | ||
|
|
9bac23b38f | ||
|
|
b028258cbd | ||
|
|
4d442159cb | ||
|
|
181a91ccf6 | ||
|
|
1c6f2e9d10 | ||
|
|
244fe3b116 | ||
|
|
9ee7391918 | ||
|
|
2eadc3ace6 | ||
|
|
786b29c18d | ||
|
|
315f787d20 | ||
|
|
0347907044 | ||
|
|
f2ae281195 | ||
|
|
a4f76d59b0 | ||
|
|
52a3e04eae | ||
|
|
390abb00df | ||
|
|
02c50e4b17 | ||
|
|
8c8a78c0ab | ||
|
|
fb75cde710 | ||
|
|
fc855dccff | ||
|
|
3d57def676 | ||
|
|
74d2da8857 | ||
|
|
8cfd721ef6 | ||
|
|
d4e10f32e7 | ||
|
|
5a7cff491d | ||
|
|
392ae695d0 | ||
|
|
b39effc067 | ||
|
|
908f4f01cf | ||
|
|
8baab13192 | ||
|
|
abb38d3e49 | ||
|
|
d0016e390a | ||
|
|
c80012e367 | ||
|
|
19fc60a1d8 | ||
|
|
487c9e96ce | ||
|
|
cd337cb164 | ||
|
|
5c15747d62 | ||
|
|
7f11e6946b | ||
|
|
8fa77691d0 | ||
|
|
894e5910c3 | ||
|
|
54f33a72c4 | ||
|
|
566f90ff30 | ||
|
|
debc59744f | ||
|
|
33c6ac813c | ||
|
|
04875d07c5 | ||
|
|
39eb594e12 | ||
|
|
2561182126 | ||
|
|
a68ab27bac | ||
|
|
c27acb0e90 | ||
|
|
875d1ab952 | ||
|
|
3496480254 | ||
|
|
2ca79ab3b9 | ||
|
|
51ce7b657c | ||
|
|
67a34dcc78 | ||
|
|
81d27ece54 | ||
|
|
f3a2e54e61 | ||
|
|
1894ed72e3 | ||
|
|
64bf7d541e | ||
|
|
8beba13430 | ||
|
|
6379869d41 | ||
|
|
5a666a28ef | ||
|
|
3c80053010 | ||
|
|
7fd4941923 | ||
|
|
b42e600285 | ||
|
|
df8859d49e | ||
|
|
d62d006398 | ||
|
|
dadf1bdcc1 | ||
|
|
da961b7722 | ||
|
|
087326ed32 | ||
|
|
fc9f66e3e2 | ||
|
|
cb25e0dcb3 | ||
|
|
88da458c2b | ||
|
|
705b3aa74e | ||
|
|
f35bfa45db | ||
|
|
cc59037fbc | ||
|
|
1c442128dd | ||
|
|
d70d8d34dc | ||
|
|
2c972995d3 | ||
|
|
794f6239bb | ||
|
|
5f498c3bb1 | ||
|
|
8efce7430f | ||
|
|
fa3bf93464 | ||
|
|
ff1f0986cf | ||
|
|
a1601a0c1f | ||
|
|
2f457c2721 | ||
|
|
02c9d0d387 | ||
|
|
9b002fe568 | ||
|
|
73ec5df88d | ||
|
|
82e0635995 | ||
|
|
da0ff7be27 | ||
|
|
569260b396 | ||
|
|
bbce0afbf4 | ||
|
|
33022d82c4 | ||
|
|
a7c29fa317 | ||
|
|
8974944044 | ||
|
|
60c86a45da | ||
|
|
92883f313a | ||
|
|
ce4524e2bb | ||
|
|
fdf4242a6c | ||
|
|
e44c603ba7 | ||
|
|
1f30714e17 | ||
|
|
36f32f518f | ||
|
|
bdbd02fa0b | ||
|
|
0a1e018144 | ||
|
|
601649e028 | ||
|
|
7c17dc64c9 | ||
|
|
647f04b6f0 | ||
|
|
c2102c38f3 | ||
|
|
6c82e47d32 | ||
|
|
b53529614e | ||
|
|
2a25c201a7 | ||
|
|
8f3d5047fc | ||
|
|
6a08b5b71b | ||
|
|
c08715614a | ||
|
|
ea2b130b57 | ||
|
|
daa3b25822 | ||
|
|
0184e8de2f | ||
|
|
eec59252f6 | ||
|
|
2b7f8fb1c9 | ||
|
|
345a37d024 | ||
|
|
5f04211c58 | ||
|
|
80b679e9dc | ||
|
|
e9ecf39d54 | ||
|
|
c5a022958c | ||
|
|
97abbafbe9 | ||
|
|
776b0367a5 | ||
|
|
c6822fc4f5 | ||
|
|
a2f10b5416 | ||
|
|
4ea7ce7138 | ||
|
|
ab9842b599 | ||
|
|
7bba292f46 | ||
|
|
867bbd5326 | ||
|
|
2690f26300 | ||
|
|
b514c81015 | ||
|
|
cd75d5acd3 | ||
|
|
b9a60f598c | ||
|
|
7b39578461 | ||
|
|
8934c29617 | ||
|
|
dbaed5c8ed | ||
|
|
5a091d55a6 | ||
|
|
a28d7ecfab | ||
|
|
7dea433f52 | ||
|
|
80a2ffdead | ||
|
|
e4b5d937d7 | ||
|
|
5806f816b8 | ||
|
|
385928c6e5 | ||
|
|
3e1654356d | ||
|
|
def4d596dc | ||
|
|
a239039a89 | ||
|
|
1a71265354 | ||
|
|
9a3a3b0868 | ||
|
|
b0a109bd0d | ||
|
|
525d10c0ae | ||
|
|
8de65d48f3 | ||
|
|
a5f21c764c | ||
|
|
1e4e18e51d | ||
|
|
281921ec59 | ||
|
|
c10efcbf4c | ||
|
|
661ab24d6f | ||
|
|
4bf33f5a0a | ||
|
|
b256e086c7 | ||
|
|
a9470ae959 | ||
|
|
100018d6bf | ||
|
|
7719513c72 | ||
|
|
970703671d | ||
|
|
6d3a4a391c | ||
|
|
9838c6a05c | ||
|
|
5744b1ae98 | ||
|
|
879af8744d | ||
|
|
640f941180 | ||
|
|
a48d8b8f6a | ||
|
|
20d92bf36b | ||
|
|
7f2ab5d629 | ||
|
|
bb46b474c3 | ||
|
|
5a3e8836e0 | ||
|
|
e100c4ce7f | ||
|
|
32b3db80d8 | ||
|
|
7ac24e69c4 | ||
|
|
80f2a5476e | ||
|
|
d27237832e | ||
|
|
73a3ce9889 | ||
|
|
ff66b8ec01 | ||
|
|
d632084c58 | ||
|
|
4257ff3e67 | ||
|
|
72ca0a6ef7 | ||
|
|
5b361a6a2e | ||
|
|
40d3a69b1c | ||
|
|
0cc618e1b2 | ||
|
|
0ee04ada31 | ||
|
|
807b1c64dd | ||
|
|
690aad123c | ||
|
|
73b6a3159f | ||
|
|
73e8a18d5a | ||
|
|
e613dda536 | ||
|
|
2621b90d3b | ||
|
|
9e95736b48 | ||
|
|
de4ca8e55c | ||
|
|
f4247e0361 | ||
|
|
d9df7b66f5 | ||
|
|
c5feb5bdc9 | ||
|
|
ee170d2f51 | ||
|
|
eea6d07613 | ||
|
|
4a9a26d470 | ||
|
|
f949487a75 | ||
|
|
d7572c5772 | ||
|
|
74e3bbb83b | ||
|
|
adaa4cade5 | ||
|
|
09e670bce4 | ||
|
|
d6b601968d | ||
|
|
f07db641d4 | ||
|
|
1ffdd53629 | ||
|
|
97f91e734a | ||
|
|
4eee13ac02 | ||
|
|
5869022a28 | ||
|
|
19e77590f3 | ||
|
|
bc790275aa | ||
|
|
1c35cd9d65 | ||
|
|
d6dd8da81e | ||
|
|
d55b23c482 | ||
|
|
f95f83c85b | ||
|
|
f0fa67d16d | ||
|
|
89756b7c9e | ||
|
|
5f9693cb34 | ||
|
|
2856f3af8c | ||
|
|
e7b1db8b09 | ||
|
|
48c3a84b85 | ||
|
|
eca3705794 | ||
|
|
74fd17d7ac | ||
|
|
35cbf0ebb4 | ||
|
|
2eb8b8e2b2 | ||
|
|
883596d1d7 | ||
|
|
698487377b | ||
|
|
c3d826df8c | ||
|
|
d5536ed246 | ||
|
|
df077f8ef6 | ||
|
|
4293a098e3 | ||
|
|
cf07260186 | ||
|
|
9ad5bd477b | ||
|
|
1d2c1dbc6f | ||
|
|
8c44c5b79a | ||
|
|
0c84eb3dd0 | ||
|
|
869792c09e | ||
|
|
09ab158d66 | ||
|
|
f539a8ccda | ||
|
|
171f049607 | ||
|
|
43e47e6249 | ||
|
|
afbfe72489 | ||
|
|
59d042c9a7 | ||
|
|
38a613de45 | ||
|
|
1c27ebc1e4 | ||
|
|
6f1d8a99e7 | ||
|
|
8c9e5b4963 | ||
|
|
3c3aeac93c | ||
|
|
846c2761f6 | ||
|
|
d03eabe4d5 | ||
|
|
cad9be6e74 | ||
|
|
dbb166e94f | ||
|
|
6b92922ee5 | ||
|
|
479431b283 | ||
|
|
2ab1a27f8b | ||
|
|
8470e61e3f | ||
|
|
6c6c8c86d8 | ||
|
|
efab6ee229 | ||
|
|
72a1173615 | ||
|
|
1dfa3a64a7 | ||
|
|
7b36396570 | ||
|
|
c1a8470dcf | ||
|
|
ba0d9ac834 | ||
|
|
56d0feab12 | ||
|
|
0674737796 | ||
|
|
58b632b6c6 | ||
|
|
43f3e13491 | ||
|
|
7b69060776 | ||
|
|
75f6cc7f0f | ||
|
|
8ef8200363 | ||
|
|
8b31c38229 | ||
|
|
651591a063 | ||
|
|
c86b3c972b | ||
|
|
7529297151 | ||
|
|
2c411e343d | ||
|
|
e9d8f6aebd | ||
|
|
d144582f1c | ||
|
|
2d3e703979 | ||
|
|
af440ffffa | ||
|
|
e129b2df57 | ||
|
|
09f2b36920 | ||
|
|
37af295873 | ||
|
|
52d21e94d3 | ||
|
|
acd80cc05e | ||
|
|
de48464661 | ||
|
|
623cac6220 | ||
|
|
ad558b4984 | ||
|
|
ae0ba59f67 | ||
|
|
fd28a8b427 | ||
|
|
b390e5de98 | ||
|
|
82402abe43 | ||
|
|
78da7ddb55 | ||
|
|
1dd48743c2 | ||
|
|
015417d640 | ||
|
|
94de00b14c | ||
|
|
4d237c4328 | ||
|
|
b7f721e869 | ||
|
|
49ba555fc0 | ||
|
|
cd1b25ec2b | ||
|
|
109725fafe | ||
|
|
9c7b61070b | ||
|
|
8b5a1fef51 | ||
|
|
72f8dd5b3a | ||
|
|
1d3a922a7b | ||
|
|
ab2b145556 | ||
|
|
8960ff5a4d | ||
|
|
06b41b1b5a | ||
|
|
4277ac65a4 | ||
|
|
6ed815f1ac | ||
|
|
4ebfa6a3d0 | ||
|
|
529f75467b | ||
|
|
062895c7ca | ||
|
|
7b174ac498 | ||
|
|
beb8f627ee | ||
|
|
1badbaa182 | ||
|
|
75f2930f53 | ||
|
|
60b746e4dd | ||
|
|
dbef9af0df | ||
|
|
2a5cdf49ad | ||
|
|
0bb5432223 | ||
|
|
f565a50e90 | ||
|
|
0a840228b2 | ||
|
|
37c1964991 | ||
|
|
f3cd5b06fa | ||
|
|
c735ad5110 | ||
|
|
51b27cb002 | ||
|
|
0406f1df01 | ||
|
|
a72401a529 | ||
|
|
406c4d20cf | ||
|
|
49ef9ae85d | ||
|
|
d65eacea41 | ||
|
|
347904297e | ||
|
|
404aeb026d | ||
|
|
29df51a4b0 | ||
|
|
397cde2be2 | ||
|
|
a848283d59 | ||
|
|
80b4582e81 | ||
|
|
9d5c7d3317 | ||
|
|
b860741ab8 | ||
|
|
792825bdaa | ||
|
|
ad2371dc9a | ||
|
|
dfb771a51e | ||
|
|
14a0dbbd5c | ||
|
|
bb1c63cc92 | ||
|
|
38ca26e6f3 | ||
|
|
285d5688f5 | ||
|
|
26cdfc2279 | ||
|
|
d915dfc941 | ||
|
|
d29bd65f97 | ||
|
|
81d378498e | ||
|
|
d32630c759 | ||
|
|
b8d7ec0723 | ||
|
|
207c5430c1 | ||
|
|
b922274b61 | ||
|
|
863c0e5eb7 | ||
|
|
46f9956791 | ||
|
|
879a6f2552 | ||
|
|
5e3d237c99 | ||
|
|
1eb83e4eb0 | ||
|
|
194c0e6708 | ||
|
|
a87d38c61f | ||
|
|
ed0e5bbf9b | ||
|
|
2249b623e6 | ||
|
|
854ca5f5d4 | ||
|
|
a0a8029c36 | ||
|
|
e7eec8e4f1 | ||
|
|
a354c1984b | ||
|
|
34799c397c | ||
|
|
2c88dddc1e | ||
|
|
64b0216ba9 | ||
|
|
f950520475 | ||
|
|
60f3de2a91 | ||
|
|
06e13483bd | ||
|
|
f00cbe89bc | ||
|
|
e450f5744b | ||
|
|
255242eca5 | ||
|
|
cbeab4dd55 | ||
|
|
6b8506c986 | ||
|
|
d5d5647b7c | ||
|
|
1a76de1ca3 | ||
|
|
1913109623 | ||
|
|
e76b4395a7 | ||
|
|
6670156397 | ||
|
|
37b7a77b70 | ||
|
|
ddb5a007a5 | ||
|
|
8568df4552 | ||
|
|
493a5f715c | ||
|
|
a61b27992e | ||
|
|
798eec7aa8 | ||
|
|
0d29603e2b | ||
|
|
48882220d6 | ||
|
|
b4ad292ec5 | ||
|
|
b59a14f6b7 | ||
|
|
80ed385a41 | ||
|
|
04cd67b98b | ||
|
|
68eded2c0c | ||
|
|
389a0d3afa | ||
|
|
5a3e4a28fe | ||
|
|
66b5629a31 | ||
|
|
eae77eb236 | ||
|
|
5f44ec8a0d | ||
|
|
9d8c62de6b | ||
|
|
3229fd8d28 | ||
|
|
fdee789637 | ||
|
|
c762dda1b1 | ||
|
|
c5c8b902c4 | ||
|
|
ee255a5042 | ||
|
|
3952965632 | ||
|
|
85db706bbe | ||
|
|
ea570442c6 | ||
|
|
9c109b803d | ||
|
|
86f77f8064 | ||
|
|
81c33d65e4 | ||
|
|
a1cf822141 | ||
|
|
ce48a9697a | ||
|
|
9b22c4b23c | ||
|
|
6283b0460a | ||
|
|
4fe977fa47 | ||
|
|
f188e55692 | ||
|
|
f5487ed932 | ||
|
|
c69b25ff0d | ||
|
|
b608af640f | ||
|
|
315f1ff3bc | ||
|
|
b6c2ac194b | ||
|
|
00570d2089 | ||
|
|
56375b16fe | ||
|
|
447a7b684c | ||
|
|
82379481dd | ||
|
|
06f9e28170 | ||
|
|
d89b6f814b | ||
|
|
fad69356c1 | ||
|
|
2285c6e430 | ||
|
|
c1b9b727e6 | ||
|
|
a95714710b | ||
|
|
82268b58e2 | ||
|
|
0dd7e71fd1 | ||
|
|
63f1d2905f | ||
|
|
ea9b409a04 | ||
|
|
8957a8421d | ||
|
|
7c171081c6 | ||
|
|
e4520e9e16 | ||
|
|
336c373dd0 | ||
|
|
fc721f31c5 | ||
|
|
062ed9f7b8 | ||
|
|
59a915cdac | ||
|
|
191f7d2152 | ||
|
|
a76e9c2c1f | ||
|
|
9d616459b7 | ||
|
|
82fa42d182 | ||
|
|
0c36aaa5ff | ||
|
|
a9761464a0 | ||
|
|
355d02faa3 | ||
|
|
1052f37d02 | ||
|
|
92b023599b | ||
|
|
0c0dc946d8 | ||
|
|
9301edaee8 | ||
|
|
cf3136781d | ||
|
|
d17368a9d2 | ||
|
|
acfdef0c19 | ||
|
|
1f4c7239d7 | ||
|
|
2aa0dbd93d | ||
|
|
a9135280ba | ||
|
|
0352ed79fa | ||
|
|
87454c9b10 | ||
|
|
1afec3aa95 | ||
|
|
a7a3334c9a | ||
|
|
dc1f9bb252 | ||
|
|
b2cbb8c8d0 | ||
|
|
399de15792 | ||
|
|
9ef30625a6 | ||
|
|
9b870e64d2 | ||
|
|
c90a93661b | ||
|
|
d8349764ed | ||
|
|
232740fc2e | ||
|
|
88b5810d8e | ||
|
|
3fa528224f | ||
|
|
edc1f734b7 | ||
|
|
49f4ced8e0 | ||
|
|
7bc0a6d140 | ||
|
|
968a347664 | ||
|
|
1740b16a0f | ||
|
|
be5a71ce47 | ||
|
|
2d195e9584 | ||
|
|
b975489ddd | ||
|
|
5cefa5f2c4 | ||
|
|
4da9e64737 | ||
|
|
37f2b1abe2 | ||
|
|
6d729d27cd | ||
|
|
ec40cbc2ed | ||
|
|
0575f1710d | ||
|
|
15823e6fe8 | ||
|
|
7dbde008af | ||
|
|
d8ebd647a2 | ||
|
|
ece0d7fabe | ||
|
|
679a712e16 | ||
|
|
18ad9e22f5 | ||
|
|
152029c85b | ||
|
|
e91de667e0 | ||
|
|
7f21b8263b | ||
|
|
f65acc9f95 | ||
|
|
6a61bacd60 | ||
|
|
fb9489f470 | ||
|
|
87da19caa3 | ||
|
|
378f6fe485 | ||
|
|
65a8886e61 | ||
|
|
8fdb259270 | ||
|
|
98b0b46dda | ||
|
|
861fb9e3d5 | ||
|
|
644bcee14e | ||
|
|
933d9e92d1 | ||
|
|
9fb03a25f6 | ||
|
|
0b1f7827fc | ||
|
|
49f21e2c9d | ||
|
|
990c0e07cf | ||
|
|
745459e69f | ||
|
|
115a6cf5d7 | ||
|
|
39aafbbc61 | ||
|
|
93ddc9ce99 | ||
|
|
3d877eed13 | ||
|
|
308d612c05 | ||
|
|
9b75f0428d | ||
|
|
e6858659fb | ||
|
|
815058ffcd | ||
|
|
915b540576 | ||
|
|
5b06d6925c | ||
|
|
ef875fa720 | ||
|
|
994a7d044f | ||
|
|
80cd7f39b4 | ||
|
|
93bf45cde6 | ||
|
|
b4adc064a0 | ||
|
|
7e81d0bcbb | ||
|
|
33b59f091e | ||
|
|
ea3dc1f2f4 | ||
|
|
5d3e68a6a5 | ||
|
|
64f2ec3ffe | ||
|
|
c80014ec7d | ||
|
|
6515720d55 | ||
|
|
605c5cbfd8 | ||
|
|
77e97d1a89 | ||
|
|
f17d959770 | ||
|
|
22f1d2f642 | ||
|
|
7d3907fa0e | ||
|
|
9588fe8d94 | ||
|
|
3b3ffdb8d1 | ||
|
|
cdd7e6931a | ||
|
|
4c3df012a6 | ||
|
|
b0eaf93331 | ||
|
|
55c03279ca | ||
|
|
c4f0753f5a | ||
|
|
a9bd25873e | ||
|
|
5ab6de8123 | ||
|
|
75deb9d678 | ||
|
|
b5ce0e0766 | ||
|
|
43817aef20 | ||
|
|
81a7a58299 | ||
|
|
4ae1c21b6f | ||
|
|
8ffa3e5d4c | ||
|
|
ac6ebe1f99 | ||
|
|
a5c07e7873 | ||
|
|
94c4f6008d | ||
|
|
615c296023 | ||
|
|
d227611ee8 | ||
|
|
acf00c723f | ||
|
|
adb3913daa | ||
|
|
faf1a44944 | ||
|
|
9f5cb9ffff | ||
|
|
068c653a2a | ||
|
|
b1c922bb75 | ||
|
|
4879fbc6d4 | ||
|
|
e7dc81eb38 | ||
|
|
c2fa08598e | ||
|
|
d23ca4a38e | ||
|
|
078b608582 | ||
|
|
a64457973f | ||
|
|
00ef13fe9f | ||
|
|
b4a7f2fdf6 | ||
|
|
a482bb7acc | ||
|
|
ce46eeac49 | ||
|
|
110dbf6cca | ||
|
|
c93de2dd6f | ||
|
|
be88f5152f | ||
|
|
efda5bab4d | ||
|
|
d491eb1af0 | ||
|
|
e249dbfc67 | ||
|
|
c994ae5798 | ||
|
|
cba61bd8fb | ||
|
|
a72440ee6b | ||
|
|
cd3ed40ff3 | ||
|
|
cf3ce5e31d | ||
|
|
bdcbc5e011 | ||
|
|
c2d3ce348f | ||
|
|
970d580e4b | ||
|
|
d2f9721576 | ||
|
|
8a39e5827b | ||
|
|
89c8b6a0a5 | ||
|
|
238f0a6108 | ||
|
|
19950569cb | ||
|
|
a19553dddd | ||
|
|
c383a5b120 | ||
|
|
dab7243ccd | ||
|
|
ccf15ab4a3 | ||
|
|
25a3ef2b3e | ||
|
|
9bdaae8d9f | ||
|
|
4115651998 | ||
|
|
58349082df | ||
|
|
aa75828296 | ||
|
|
10eaf6e278 | ||
|
|
04e22571e9 | ||
|
|
bc8b9e7c8b | ||
|
|
b6213654ef | ||
|
|
9ba17d5338 | ||
|
|
dde453744d | ||
|
|
a86273f213 | ||
|
|
2b312dfa6f | ||
|
|
800c7182c1 | ||
|
|
cbbd5faf24 | ||
|
|
bb9c8f04e2 | ||
|
|
50469903dc | ||
|
|
b8f6cf11d6 | ||
|
|
f0d4f76e0f | ||
|
|
05f0a12d16 | ||
|
|
a1cad730ad | ||
|
|
3e8c738496 | ||
|
|
940dd3e3c0 | ||
|
|
6de4e1a401 | ||
|
|
0a8747f600 | ||
|
|
68a5e7c8f7 | ||
|
|
c3d4bf5428 | ||
|
|
0cac0bc761 | ||
|
|
05427b7b3b | ||
|
|
9e73f9b5e0 | ||
|
|
5ec41bafbe | ||
|
|
cb67cc8c3d | ||
|
|
d35619805f | ||
|
|
cb26758d53 | ||
|
|
9783674890 | ||
|
|
270eeda3e2 | ||
|
|
24d3d064bb | ||
|
|
e8eec80696 | ||
|
|
c366504868 | ||
|
|
c7b54856c5 | ||
|
|
10c56e08d4 | ||
|
|
4af51b4a76 | ||
|
|
65cc03da14 | ||
|
|
e908cb0df5 | ||
|
|
ae2cee3fda | ||
|
|
0467ed7ffc | ||
|
|
d5453b4aa4 | ||
|
|
7096a785db | ||
|
|
c80db13c28 | ||
|
|
b971045cd2 | ||
|
|
61d4ccbf1b | ||
|
|
c3b237466c | ||
|
|
29c727319d | ||
|
|
52c5dc589d | ||
|
|
35cad9bf22 | ||
|
|
b108876017 | ||
|
|
52bfff953a | ||
|
|
65278c4489 | ||
|
|
3a4a925ab0 | ||
|
|
6ef5d41c25 | ||
|
|
b9b9f46fbe | ||
|
|
8d014e579d | ||
|
|
f2fc9f10f9 | ||
|
|
f131155fd8 | ||
|
|
691e24a1d8 | ||
|
|
794a6f4454 | ||
|
|
41bf8525cf | ||
|
|
6ebf486c09 | ||
|
|
899ae94fcf | ||
|
|
d3cd5019d9 | ||
|
|
1e4719558f | ||
|
|
29ab83b9c0 | ||
|
|
4b4d170ce1 | ||
|
|
8b0a12e0ba | ||
|
|
430318ead7 | ||
|
|
32f6ec63f2 | ||
|
|
a3181c8f76 | ||
|
|
412d169f58 | ||
|
|
d1c2e6e0dd | ||
|
|
4f9ac56de0 | ||
|
|
9641dc82f9 | ||
|
|
e68413b73c | ||
|
|
a7386a25bd | ||
|
|
53f512e864 | ||
|
|
283e643606 | ||
|
|
fc1aa2db83 | ||
|
|
0fc1e02519 | ||
|
|
67581bf3f6 | ||
|
|
b7e4ca4d87 | ||
|
|
0594fc60c0 | ||
|
|
5a6c51219c | ||
|
|
815542bf25 | ||
|
|
0c5bd817a9 | ||
|
|
9b8a317351 | ||
|
|
7a9d8e021a | ||
|
|
297ec1b8a1 | ||
|
|
f04f6684e0 | ||
|
|
91870c6712 | ||
|
|
9c48fcf5f8 | ||
|
|
ee41cfc618 | ||
|
|
ae30f89a2d | ||
|
|
dfcce3a974 | ||
|
|
59423df0cb | ||
|
|
ee08c486bc | ||
|
|
a56c522068 | ||
|
|
6d40eba496 | ||
|
|
c772df9d65 | ||
|
|
2a73f26f2a | ||
|
|
a750ade5a4 | ||
|
|
06b37c02f1 | ||
|
|
d129607d5c | ||
|
|
e6d812bbd8 | ||
|
|
9cc921098e | ||
|
|
38213c1a91 | ||
|
|
44d74924e6 | ||
|
|
5eaf0c12d3 | ||
|
|
7c2b433f7b | ||
|
|
30e692cefe | ||
|
|
396fb42b11 | ||
|
|
c8d882712e | ||
|
|
9667aad1cb | ||
|
|
4471303aae | ||
|
|
fe2ec8cc94 | ||
|
|
25440c6fec | ||
|
|
8c2d7243cc | ||
|
|
49e67a0bef | ||
|
|
1dfa937bff | ||
|
|
a3c6bbc1b5 | ||
|
|
241e8b6842 | ||
|
|
8660faaeb7 | ||
|
|
687deab6bc | ||
|
|
17ec3cbf4b | ||
|
|
f75609c98c | ||
|
|
5bd65cc06a | ||
|
|
e2177577be | ||
|
|
a695744c7c | ||
|
|
c2a89731c9 | ||
|
|
f0bfb08c2e | ||
|
|
764f7df6a7 | ||
|
|
c310822945 | ||
|
|
4cedd051b6 | ||
|
|
d36fe1ab12 | ||
|
|
9a14125c6b | ||
|
|
1a4ba51dec | ||
|
|
826fb3f110 | ||
|
|
9f42eb3ad7 | ||
|
|
fbbc333221 | ||
|
|
e62792b0da | ||
|
|
dbc435c4e1 | ||
|
|
ada2f2498e | ||
|
|
c736446872 | ||
|
|
a1e7d5b36f | ||
|
|
24fd5a5e0b | ||
|
|
bc4557432a | ||
|
|
5df4a76367 | ||
|
|
48d566fd98 | ||
|
|
f9cd328b3a | ||
|
|
a24c13d8ce | ||
|
|
8b5494d0a6 | ||
|
|
7f0e8f5591 | ||
|
|
14f4e09676 | ||
|
|
c5aa61e191 | ||
|
|
c50efd7efd | ||
|
|
c23c239ce9 | ||
|
|
5a1b92f060 | ||
|
|
b7375b5a8e | ||
|
|
c42b5b2bb6 | ||
|
|
809783cd53 | ||
|
|
b5c5a18216 | ||
|
|
49b4dca12c | ||
|
|
37b03e6e37 | ||
|
|
70b92b5961 | ||
|
|
f31da2a8e6 | ||
|
|
c68ff15f38 | ||
|
|
bd1fe2b1cd | ||
|
|
a1f3914054 | ||
|
|
2d9dc48076 | ||
|
|
c80c120153 | ||
|
|
b75c23772e | ||
|
|
d9a94bc59c | ||
|
|
e446ab4762 | ||
|
|
2a656d437e | ||
|
|
7473cd2264 | ||
|
|
9fb1c0fbbb | ||
|
|
4ae0e75dc9 | ||
|
|
2632ce537a | ||
|
|
4d79261851 | ||
|
|
fadae5e33e | ||
|
|
7f702b7025 | ||
|
|
db255a8b7e | ||
|
|
47b8d1de39 | ||
|
|
01ea1d2910 | ||
|
|
f5f8aa985e | ||
|
|
1a848cf5fe | ||
|
|
b748b05fbd | ||
|
|
9f2a9c32c0 | ||
|
|
92d0b0163a | ||
|
|
c50e2a4026 | ||
|
|
69ffa159c7 | ||
|
|
81089fc20a | ||
|
|
3d09f72c90 | ||
|
|
ef7d84b24d | ||
|
|
9b71f8ca4b | ||
|
|
04c3fc77cb | ||
|
|
c6cc6f4537 | ||
|
|
f31a4440f1 | ||
|
|
84b1e60803 | ||
|
|
a434a5f25d | ||
|
|
09e844a63f | ||
|
|
c55e114131 | ||
|
|
575fbc06aa | ||
|
|
19376805de | ||
|
|
5ea6a31bc2 | ||
|
|
2714ffe04d | ||
|
|
c38eac0e46 | ||
|
|
fccc57fd52 | ||
|
|
fea309da11 | ||
|
|
d867881162 | ||
|
|
af9a7d2fb3 | ||
|
|
259584b24f | ||
|
|
38f61f64c7 | ||
|
|
3e9bfba4d6 | ||
|
|
be26c7f080 | ||
|
|
6b8befdc67 | ||
|
|
423e4e429b | ||
|
|
53aba47915 | ||
|
|
87f90b004f | ||
|
|
0b96afb055 | ||
|
|
8e99ebe5ef | ||
|
|
6e06d954fe | ||
|
|
497abb83da | ||
|
|
7ffebd97b9 | ||
|
|
55a5855720 | ||
|
|
adc828dc8a | ||
|
|
6c5c9e0147 | ||
|
|
baa9ffb948 | ||
|
|
92541fec23 | ||
|
|
b1f6448ae0 | ||
|
|
fc72cf0451 | ||
|
|
c76d931b01 | ||
|
|
02ef37d381 | ||
|
|
329b420c0d | ||
|
|
10049d0c1f | ||
|
|
1e602d86bd | ||
|
|
f22ab0068e | ||
|
|
3700e45e7f | ||
|
|
36196a176e | ||
|
|
72907de5ef | ||
|
|
9a7385789e | ||
|
|
d13893d1c7 | ||
|
|
1a8031c75d | ||
|
|
9d10261a9f | ||
|
|
d0a7ff00fc | ||
|
|
b80d0ee458 | ||
|
|
53069492b1 | ||
|
|
3e2dad4a7e | ||
|
|
fca1e5355e | ||
|
|
47c0fd706f | ||
|
|
4c4ffb2f54 | ||
|
|
ade477c6e5 | ||
|
|
719b966709 | ||
|
|
2085c04717 | ||
|
|
12a4e34075 | ||
|
|
13dd81ebbd | ||
|
|
a9492eb25f | ||
|
|
4dabbb7590 | ||
|
|
64b78bddd6 | ||
|
|
5a02554380 | ||
|
|
c312f3917f | ||
|
|
30654af261 | ||
|
|
29aa329038 | ||
|
|
cfbb0d3bf6 | ||
|
|
388f77ea52 | ||
|
|
139c2f3c14 | ||
|
|
dab544bc93 | ||
|
|
0070fce88d | ||
|
|
c27ecfe339 | ||
|
|
746de90700 | ||
|
|
c580f1aff7 | ||
|
|
93b429af8b | ||
|
|
f0e2e783a8 | ||
|
|
9c2af4281a | ||
|
|
c12e25217b | ||
|
|
d5d0903591 | ||
|
|
72bde214a3 | ||
|
|
3ae2cbcd2c | ||
|
|
82b3f210f6 | ||
|
|
b8e67c558d | ||
|
|
371bcfbf5b | ||
|
|
d75f1ed966 | ||
|
|
5e4c3e0fa4 | ||
|
|
2c2642a92a | ||
|
|
afa0a206bc | ||
|
|
57a8661988 | ||
|
|
a57b58b675 | ||
|
|
8b051462a8 | ||
|
|
3bde8373a3 | ||
|
|
73df161cd0 | ||
|
|
9c83fd14bc | ||
|
|
ab020a0654 | ||
|
|
14e77f3f9b | ||
|
|
730d717936 | ||
|
|
91a7a83cd5 | ||
|
|
6fb586e30f | ||
|
|
05b069ab8e | ||
|
|
33a9eca696 | ||
|
|
2b969c987c | ||
|
|
f6c15490cc | ||
|
|
da5e95595d | ||
|
|
56343b9d19 | ||
|
|
d2a4f5cbe5 | ||
|
|
bf5f071e9d | ||
|
|
5d14aac430 | ||
|
|
f69f895418 | ||
|
|
e572c34743 | ||
|
|
822f3a760f | ||
|
|
274c236860 | ||
|
|
29d074732d | ||
|
|
097cec5283 | ||
|
|
f0ee73f03b | ||
|
|
691110af2c | ||
|
|
1c7d3cc66d | ||
|
|
58df97961b | ||
|
|
61cefb3308 | ||
|
|
694b0178e6 | ||
|
|
48ae414941 | ||
|
|
b143767f8d | ||
|
|
11de24ad4f | ||
|
|
a9c5f2e184 | ||
|
|
ed3ad27560 | ||
|
|
a6632b6e3e | ||
|
|
2d49e7b4ce | ||
|
|
c097ad828d | ||
|
|
7125ee469f | ||
|
|
f91646f956 | ||
|
|
5bd86b6fb7 | ||
|
|
e12ed3e6f1 | ||
|
|
33a5d34bbf | ||
|
|
94662f5831 | ||
|
|
a37ffe5b4d | ||
|
|
fa1b421dad | ||
|
|
93727c52ae | ||
|
|
0108730004 | ||
|
|
10b97e708a | ||
|
|
cfa23ca27e | ||
|
|
5290eaefc7 | ||
|
|
2626b715ab | ||
|
|
99bc350f5f | ||
|
|
ee38441779 | ||
|
|
f0d31e0dc2 | ||
|
|
4a08b47c07 | ||
|
|
2d588a6498 | ||
|
|
510ec977b8 | ||
|
|
420a3d385d | ||
|
|
30185d1dbe | ||
|
|
642f949ae9 | ||
|
|
872804e1f4 | ||
|
|
b5a1575d5a | ||
|
|
95197f94be | ||
|
|
fedab57f29 | ||
|
|
3054c568ac | ||
|
|
c0fcc34f52 | ||
|
|
521e2bd7aa | ||
|
|
db4db08550 | ||
|
|
977f0204a7 | ||
|
|
78d12ddb03 | ||
|
|
433dcab02b | ||
|
|
c57563d5ca | ||
|
|
fcc4a44695 | ||
|
|
7d9f9b4d1f | ||
|
|
f790a9601f | ||
|
|
13d44d1ed9 | ||
|
|
d57ecd4eaa | ||
|
|
f14e5ba400 | ||
|
|
5638f435ba | ||
|
|
6b7b8a8203 | ||
|
|
942f95364e | ||
|
|
e997fb6679 | ||
|
|
3b8a96de23 | ||
|
|
75d6d10649 | ||
|
|
26736657fd | ||
|
|
27b6194d53 | ||
|
|
5c158db350 | ||
|
|
f54c173479 | ||
|
|
d51b337045 | ||
|
|
3d693a7b8d | ||
|
|
99f34ab71d | ||
|
|
cd2f95ac90 | ||
|
|
bacea59c0c | ||
|
|
f7e84a8f11 | ||
|
|
1452ddd5e4 | ||
|
|
abdbdd63f4 | ||
|
|
a92d2b585e | ||
|
|
3dae1bd104 | ||
|
|
e07c0c0981 | ||
|
|
b7dcd051b1 | ||
|
|
25223c8b85 | ||
|
|
d7b1a73777 | ||
|
|
df19d4d323 | ||
|
|
10bc4ed611 | ||
|
|
b063055e78 | ||
|
|
4f1f422701 | ||
|
|
7e44a3759f | ||
|
|
0a5a4ec0da | ||
|
|
c9a5280c7a | ||
|
|
c953498a9d | ||
|
|
c0ec8fcea2 | ||
|
|
7562444763 | ||
|
|
747add419e | ||
|
|
f242053d6c | ||
|
|
235df91a37 | ||
|
|
97ffa0bac2 | ||
|
|
c6bc7d93f4 | ||
|
|
bcc7573756 | ||
|
|
1f554816b6 | ||
|
|
411463bc57 | ||
|
|
1c26685c8c | ||
|
|
1dd4afa5e2 | ||
|
|
f9d4477cb1 | ||
|
|
6cbee09950 | ||
|
|
bcf6a5bd09 | ||
|
|
a00092f5cc | ||
|
|
b36b345ef3 | ||
|
|
029d97e21c | ||
|
|
13331e0709 | ||
|
|
68ad931728 | ||
|
|
475aa60bcd | ||
|
|
2937d8a022 | ||
|
|
6627510a59 | ||
|
|
ac9448cacc | ||
|
|
2d7b6717a9 | ||
|
|
c4aad2a4bd | ||
|
|
4566f23984 | ||
|
|
b6621fc333 | ||
|
|
084b2b357f | ||
|
|
aa5c63f467 | ||
|
|
c365065cdb | ||
|
|
e4a42de095 | ||
|
|
245935b7ac | ||
|
|
2540a8174f | ||
|
|
38c0a75759 | ||
|
|
eac5f20937 | ||
|
|
a71a2a7a4b | ||
|
|
856fdd3493 | ||
|
|
cbd54bdfe8 | ||
|
|
f294f8c740 | ||
|
|
3079976165 | ||
|
|
020005e89b | ||
|
|
b73a6d2a7f | ||
|
|
af5acd16f7 | ||
|
|
9d98dbb2a6 | ||
|
|
01406ca2e7 | ||
|
|
ca7010abb9 | ||
|
|
48c57d3d47 | ||
|
|
51b421c2b5 | ||
|
|
e3fc2a159a | ||
|
|
a624669b4d | ||
|
|
6c9862c634 | ||
|
|
525c7ade7c | ||
|
|
73d3f7b5c2 | ||
|
|
db13ae5fc8 | ||
|
|
e37c517f79 | ||
|
|
4a9093e0e9 | ||
|
|
4420c36889 | ||
|
|
6098eb6eac | ||
|
|
50b7c8a00c | ||
|
|
48d18b3866 | ||
|
|
c0627f342c | ||
|
|
92efc9ed8a | ||
|
|
00a0d35020 | ||
|
|
1cac9ad559 | ||
|
|
0f2c485d05 | ||
|
|
c4635f99c7 | ||
|
|
b31803780a | ||
|
|
86fe3aae1a | ||
|
|
9d0dda7fac | ||
|
|
5bc93be26d | ||
|
|
140aded113 | ||
|
|
3faa0f4f42 | ||
|
|
59b293c398 | ||
|
|
18f42b60b6 | ||
|
|
aa6c254997 | ||
|
|
44f7295830 | ||
|
|
22a75a886d | ||
|
|
7e5a43e95c | ||
|
|
db5ff8c7e1 | ||
|
|
d9758a7358 | ||
|
|
8ce6c26f9c | ||
|
|
9040b56ada | ||
|
|
15ad60f8a9 | ||
|
|
3b178d2645 | ||
|
|
21b7acd60b | ||
|
|
18a96d3286 | ||
|
|
2fd72cbd82 | ||
|
|
329a67cb21 | ||
|
|
9155ade937 | ||
|
|
9bcbcaefdf | ||
|
|
63794869a4 | ||
|
|
3631138978 | ||
|
|
c2ba998e7b | ||
|
|
93c62c6827 | ||
|
|
593c6ec09c | ||
|
|
8bb6ba2285 | ||
|
|
39e2254cd5 | ||
|
|
51f361f8d9 | ||
|
|
0c26af66d7 | ||
|
|
1f412fc1c6 | ||
|
|
1150f1af71 | ||
|
|
d0510509a4 | ||
|
|
d84444ae73 | ||
|
|
8ef7dee0b5 | ||
|
|
d658f70d2b | ||
|
|
308c0ec621 | ||
|
|
2f45a87f12 | ||
|
|
75ddba076f | ||
|
|
a2aefe5242 | ||
|
|
3259f83b9d | ||
|
|
65cd41b3c1 | ||
|
|
8a7d3e8d2a | ||
|
|
ce29c1f5cb | ||
|
|
f738469444 | ||
|
|
4ec25f7a60 | ||
|
|
c6ccffd1df | ||
|
|
539be8fca9 | ||
|
|
f53bf6a4a4 | ||
|
|
e9b4340a25 | ||
|
|
a5b075426b | ||
|
|
b58d26354d | ||
|
|
75f5066941 | ||
|
|
d40cebd34d | ||
|
|
4b5c0a6e87 | ||
|
|
faedd22329 | ||
|
|
2aafa1d30d | ||
|
|
331395f665 | ||
|
|
ac4a643a8f | ||
|
|
e43622fd66 | ||
|
|
d8cb71eaf4 | ||
|
|
57afca7fc2 | ||
|
|
7cda4d9ce2 | ||
|
|
5a7eb4c462 | ||
|
|
53003d5289 | ||
|
|
58f20959cc | ||
|
|
586168fbb3 | ||
|
|
53292f07b1 | ||
|
|
4d1f36d60d | ||
|
|
c67ec03d5e | ||
|
|
1cd6023600 | ||
|
|
05501d8c8d | ||
|
|
44bfcf1106 | ||
|
|
e6ae17445c | ||
|
|
f0ef984276 | ||
|
|
99090bb731 | ||
|
|
8b818e1eb5 | ||
|
|
f20e7137e6 | ||
|
|
fbde994106 | ||
|
|
51c9da73fe | ||
|
|
83cbab6318 | ||
|
|
739e86b6b0 | ||
|
|
491d1111a2 | ||
|
|
e7ca53447a | ||
|
|
4f79dd70e6 | ||
|
|
e09b7e2c45 | ||
|
|
52c4dce922 | ||
|
|
75111c11cb | ||
|
|
9d5f3b69bd | ||
|
|
3cd6a71256 | ||
|
|
e8b4ab2c18 | ||
|
|
55b1a41f49 | ||
|
|
d6f5d33411 | ||
|
|
d5e03833f0 | ||
|
|
68118e3a03 | ||
|
|
10b36e9db6 | ||
|
|
3a74fcdc40 | ||
|
|
b9210a2801 | ||
|
|
d2c3a51aac | ||
|
|
a2c23f24cf | ||
|
|
e862918889 | ||
|
|
e1cc083a15 | ||
|
|
00d0e19b0c | ||
|
|
abc9cc6cc4 | ||
|
|
6ca2352666 | ||
|
|
dfcba6e2fb | ||
|
|
0a2de2743b | ||
|
|
3d5d38a898 | ||
|
|
c353d4b102 | ||
|
|
40df38f08b | ||
|
|
2189a03bc1 | ||
|
|
99f91269b3 | ||
|
|
bfd2ad363a | ||
|
|
bbe9737502 | ||
|
|
20643da250 | ||
|
|
a7552c28c8 | ||
|
|
fa5c40e477 | ||
|
|
9ce63fa941 | ||
|
|
7a58038842 | ||
|
|
72130d8fa7 | ||
|
|
4012c35100 | ||
|
|
e5c1a89492 | ||
|
|
b38a3cc97c | ||
|
|
adcb690662 | ||
|
|
233ab2ee6e | ||
|
|
e192c7e7a2 | ||
|
|
59c5838333 | ||
|
|
5b3d932232 | ||
|
|
bd6db4707d | ||
|
|
14bd790747 | ||
|
|
d87c4bba83 | ||
|
|
07fe1104bd | ||
|
|
3b16d58ecc | ||
|
|
9761154938 | ||
|
|
6f2c60e092 | ||
|
|
3ed8f967f4 | ||
|
|
ea73ade1d3 | ||
|
|
379602fbb9 | ||
|
|
efcb09a5b7 | ||
|
|
e3720b31f0 | ||
|
|
b8ef195d69 | ||
|
|
c33cabdff6 | ||
|
|
e71bd8370d | ||
|
|
4592a1e723 | ||
|
|
57c42320ce | ||
|
|
bb56f859f0 | ||
|
|
fba38a880f | ||
|
|
6d8c264e90 | ||
|
|
9f777c12ce | ||
|
|
3d2e6904a5 | ||
|
|
46f05f5a6d | ||
|
|
f1667398ec | ||
|
|
af6e6c30f5 | ||
|
|
fed8747e04 | ||
|
|
1c94db79b0 | ||
|
|
26e1a7222d | ||
|
|
cdde5eed6d | ||
|
|
a3ce436244 | ||
|
|
350ea662b5 | ||
|
|
ac70bd890a | ||
|
|
f301afe4da | ||
|
|
c0d0964300 | ||
|
|
aeabbe4085 | ||
|
|
f0abcc4ae7 | ||
|
|
0662fd70e2 | ||
|
|
721976eb43 | ||
|
|
f068919266 | ||
|
|
ef91138e27 | ||
|
|
be5be641ed | ||
|
|
2a8aea43e6 | ||
|
|
face626896 | ||
|
|
07d0731a5e | ||
|
|
d1e2986510 | ||
|
|
b0a50a2eac | ||
|
|
c8b26a7b76 | ||
|
|
4740fc8745 | ||
|
|
33819ef547 | ||
|
|
38dc0f54cc | ||
|
|
cf8d2676a4 | ||
|
|
47b99cfc13 | ||
|
|
e7efb3f804 | ||
|
|
1a494cbf14 | ||
|
|
25efb7018c | ||
|
|
002bdc0793 | ||
|
|
8a62e325a1 | ||
|
|
5a7e1a1699 | ||
|
|
1445bc3a90 | ||
|
|
612c378bb8 | ||
|
|
0f25ec018e | ||
|
|
85baf39bfc | ||
|
|
441cb042a2 | ||
|
|
22084de58e | ||
|
|
4dab43721d | ||
|
|
efaa4c7647 | ||
|
|
58d2c54823 | ||
|
|
50a5cdfac2 | ||
|
|
97df88a5ad | ||
|
|
b7e5df1356 | ||
|
|
efcee62fa5 | ||
|
|
17719b1b5b | ||
|
|
c08dd7bdee | ||
|
|
5c0815728e | ||
|
|
5df525ce6b | ||
|
|
4e1a06c78a | ||
|
|
2118e996d4 | ||
|
|
bc8f3e527a | ||
|
|
7bf71ee85e | ||
|
|
1671ca5e5e | ||
|
|
428f6e75a8 | ||
|
|
7e87a0c759 | ||
|
|
e95e9d5254 | ||
|
|
d3c9adc69f | ||
|
|
6ffcb478b4 | ||
|
|
613b8216a7 | ||
|
|
3873b9a11b | ||
|
|
dc7d15e704 | ||
|
|
9b4769b54a | ||
|
|
71e9645346 | ||
|
|
a5c8318b5f | ||
|
|
33eccf3e5d | ||
|
|
826c4057ea | ||
|
|
7c3c17ff85 | ||
|
|
9a63410e20 | ||
|
|
e7367b30f5 | ||
|
|
3efd595fc6 | ||
|
|
815703181a | ||
|
|
928b239a80 | ||
|
|
5e7ff9b20a | ||
|
|
b12a723a44 | ||
|
|
a8e50421e5 | ||
|
|
52ae8c2236 | ||
|
|
76f0896029 | ||
|
|
27af3fe99a | ||
|
|
d7fc7f02de | ||
|
|
1c107e021c | ||
|
|
1f02eff383 | ||
|
|
f666332913 | ||
|
|
67454359ba | ||
|
|
e803fb256b | ||
|
|
563173a325 | ||
|
|
a50a65ee7c | ||
|
|
8665af91f2 | ||
|
|
8641f86631 | ||
|
|
45de8c7206 | ||
|
|
4bdaddc003 | ||
|
|
55f811289d | ||
|
|
fbb637e5e3 | ||
|
|
f628dda4e7 | ||
|
|
129d3eaf07 | ||
|
|
350db4afc0 | ||
|
|
0a8ffd233e | ||
|
|
a74b500e96 | ||
|
|
1298e7ccce | ||
|
|
e1c61c41ca | ||
|
|
3da76e15d2 | ||
|
|
b116c24b84 | ||
|
|
a43810ec46 | ||
|
|
d4954b52ec | ||
|
|
9446cf97b8 | ||
|
|
d0572893eb | ||
|
|
d2e0ebebc9 | ||
|
|
7fcdad0e69 | ||
|
|
82ba687eb2 | ||
|
|
6655b53373 | ||
|
|
3e672857ee | ||
|
|
6b3f8bd255 | ||
|
|
b0332e141c | ||
|
|
80a181816e | ||
|
|
585912cc53 | ||
|
|
277df287f4 | ||
|
|
0dec3b7ec3 | ||
|
|
5e287f838c | ||
|
|
2c92a7621c | ||
|
|
9ded5e88dd | ||
|
|
fe902aa3f7 | ||
|
|
cc3a2d205f | ||
|
|
1caca8c08d | ||
|
|
eb33d4d02b | ||
|
|
6978ac2ca5 | ||
|
|
e05beb71e6 | ||
|
|
7e80004843 | ||
|
|
0756531cab | ||
|
|
6d82c7e542 | ||
|
|
fb238af7de | ||
|
|
6b9e3cd9d8 | ||
|
|
0dd1f647d8 | ||
|
|
465a881541 | ||
|
|
788c185bd0 | ||
|
|
2bd9419927 | ||
|
|
c4e7ab2df8 | ||
|
|
309bccec9b | ||
|
|
7034fb27a6 | ||
|
|
052762fd03 | ||
|
|
6c15eea28c | ||
|
|
8d5ad1d94f | ||
|
|
6999872ec3 | ||
|
|
83ccc0a765 | ||
|
|
bf4b06f620 | ||
|
|
8f290a11ce | ||
|
|
0c12f2fbbd | ||
|
|
0d0ab9647d | ||
|
|
80e4b6e044 | ||
|
|
a9185b4aee | ||
|
|
919315cb8e | ||
|
|
973483930b | ||
|
|
27e1323d9e | ||
|
|
543458abde | ||
|
|
e5553e254b | ||
|
|
a92f94d560 | ||
|
|
5e80c2aab9 | ||
|
|
db6a6798e7 | ||
|
|
50ad203aa8 | ||
|
|
b8d7ba8995 | ||
|
|
72fda0117f | ||
|
|
d1cfa51a46 | ||
|
|
33d64fc152 | ||
|
|
082d7047c3 | ||
|
|
e558101709 | ||
|
|
d713ba74b1 | ||
|
|
20ea1d0284 | ||
|
|
27a123c688 | ||
|
|
845c608d2b | ||
|
|
223914f002 | ||
|
|
62a4f3429a | ||
|
|
872a6d3031 | ||
|
|
a903679dfb | ||
|
|
f31e4f9d01 | ||
|
|
b5f391e3fb | ||
|
|
3b80a2cf08 | ||
|
|
38a2b76987 | ||
|
|
f8eb6e7776 | ||
|
|
1fd7c2199f | ||
|
|
91a472130f | ||
|
|
7c45050e05 | ||
|
|
c03436a566 | ||
|
|
97cb572332 | ||
|
|
d0d1beab88 | ||
|
|
917221ea3f | ||
|
|
dbff981050 | ||
|
|
75386cd9cd | ||
|
|
1cb7770897 | ||
|
|
d6a5f4c0dc | ||
|
|
124701c118 | ||
|
|
a844c303e5 | ||
|
|
d1adc8abc1 | ||
|
|
0c2cfd6225 | ||
|
|
f280363df5 | ||
|
|
c62025bfca | ||
|
|
6ed9dcfc95 | ||
|
|
6d2b7ee6f0 | ||
|
|
24c7707fc0 | ||
|
|
e34a464835 | ||
|
|
3b9f3ba378 | ||
|
|
baa9d3634f | ||
|
|
4008bd004f | ||
|
|
b124f2a2a9 | ||
|
|
743e660395 | ||
|
|
a63aff20f2 | ||
|
|
cbbdccca81 | ||
|
|
f555c682eb | ||
|
|
aa51b51531 | ||
|
|
2e8ca99980 | ||
|
|
f801c9f938 | ||
|
|
6c7aa50dc8 | ||
|
|
0168d8870c | ||
|
|
d6269099aa | ||
|
|
38eca9a7d4 | ||
|
|
0ae5b11773 | ||
|
|
771244db07 | ||
|
|
c2fcc488ac | ||
|
|
152f1daffe | ||
|
|
e053aa9343 | ||
|
|
0d49dbc6e9 | ||
|
|
a6170536eb | ||
|
|
6a9e8663dc | ||
|
|
b2a299522f | ||
|
|
e7471b4def | ||
|
|
638e87515a | ||
|
|
2c095e97c9 | ||
|
|
2285a9b7d9 | ||
|
|
989341009d | ||
|
|
3063ec0e1e | ||
|
|
6107c06ac5 | ||
|
|
155fb8e5d2 | ||
|
|
1619a0f99e | ||
|
|
5afb468795 | ||
|
|
c728360a3c | ||
|
|
5548bd9c7e | ||
|
|
e5c1250310 | ||
|
|
b0de7e4c61 | ||
|
|
fb4c413ab5 | ||
|
|
b6c4a48497 | ||
|
|
1e786a1576 | ||
|
|
5c8483f393 | ||
|
|
b9a344992b | ||
|
|
a85a988b22 | ||
|
|
d9808a7550 | ||
|
|
92894e9301 | ||
|
|
e59a28b617 | ||
|
|
e9bfc541a6 | ||
|
|
27429a1415 | ||
|
|
8f9e1b2eb7 | ||
|
|
663330e251 | ||
|
|
eec00d8b7a | ||
|
|
54eca2b977 | ||
|
|
1fae7acb34 | ||
|
|
8cd9de94ea | ||
|
|
6951d65194 | ||
|
|
1273190c89 | ||
|
|
eae88478fd | ||
|
|
88770ae6eb | ||
|
|
d564323ac1 | ||
|
|
946871b9b8 | ||
|
|
3bbe2665ad | ||
|
|
2bbc286154 | ||
|
|
50cf85d17e | ||
|
|
22e270c649 | ||
|
|
e827487c11 | ||
|
|
8f47230933 | ||
|
|
d7bea6f134 | ||
|
|
9e98168588 | ||
|
|
92410fc1ef | ||
|
|
8ab3ebd5f6 | ||
|
|
dc74f472ed | ||
|
|
24cef0b5aa | ||
|
|
2341ea44ec | ||
|
|
579e07adc3 | ||
|
|
281acce610 | ||
|
|
6cf701599c | ||
|
|
4c6dab62a8 | ||
|
|
0d8d5daff6 | ||
|
|
309f8e0044 | ||
|
|
902f67ef02 | ||
|
|
180b4c9538 | ||
|
|
bac5523ce3 | ||
|
|
b2508f3fd1 | ||
|
|
da5eacba31 | ||
|
|
8ea79934b8 | ||
|
|
056f82a2ae | ||
|
|
3b36cfe4c9 | ||
|
|
80559e1f1c | ||
|
|
83bf6f05a0 | ||
|
|
714c5e15ba | ||
|
|
1499932625 | ||
|
|
d963988fc6 | ||
|
|
134aa6f6b3 | ||
|
|
938a34600b | ||
|
|
c451f15afe | ||
|
|
15c2a7e9ef | ||
|
|
c6054e622c | ||
|
|
960494cfa6 | ||
|
|
5c54b873bf |
43
.git-blame-ignore-revs
Normal file
43
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,43 @@
|
||||
# `git blame` master ignore list.
|
||||
#
|
||||
# This file contains a list of git hashes of revisions to be ignored
|
||||
# by `git blame`. These revisions are considered "unimportant" in
|
||||
# that they are unlikely to be what you are interested in when blaming.
|
||||
# They are typically expected to be formatting-only changes.
|
||||
#
|
||||
# It can be used for `git blame` using `--ignore-revs-file` or by
|
||||
# setting `blame.ignoreRevsFile` in the `git config`[1].
|
||||
#
|
||||
# Ignore these commits when reporting with blame. Calling
|
||||
#
|
||||
# git blame --ignore-revs-file .git-blame-ignore-revs
|
||||
#
|
||||
# will tell `git blame` to ignore changes made by these revisions when
|
||||
# assigning blame, as if the change never happened.
|
||||
#
|
||||
# You can enable this as a default for your local repository by
|
||||
# running
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
# This will probably be automatically picked by your IDE
|
||||
# (VSCode+GitLens and JetBrains products are confirmed to do this).
|
||||
#
|
||||
# Important: if you are switching to a branch without this file,
|
||||
# `git blame` will fail with an error.
|
||||
#
|
||||
# GitHub also excludes the commits listed below from its "Blame"
|
||||
# views[2][3].
|
||||
#
|
||||
# [1]: https://git-scm.com/docs/git-blame#Documentation/git-blame.txt-blameignoreRevsFile
|
||||
# [2]: https://github.blog/changelog/2022-03-24-ignore-commits-in-the-blame-view-beta/
|
||||
# [3]: https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view
|
||||
|
||||
|
||||
# Black changes
|
||||
465a88154152fb0607a63fa24c8446bff43ec886
|
||||
f06891926661986fff52d6eb4b4cb120c71972d1
|
||||
9bcbcaefdfecc85aedfd8e2f8aaa1ca7f959404e
|
||||
433dcab02b29f7bd3827e237434034deecc1b549
|
||||
9f6a9f991222efccc87b45a701086c95629c67b6
|
||||
f89114ca7e1b20bf8e645ecd0b52b707ec857aa9
|
||||
31
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
31
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Bug report
|
||||
description: >
|
||||
Did you discover a bug in SABnzbd? Report it here!
|
||||
If you are not 100% certain this is a bug please go to our forums, Reddit or Discord server first.
|
||||
labels:
|
||||
- Support
|
||||
body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: SABnzbd version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Operating system
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Using Docker image
|
||||
options:
|
||||
- linuxserver
|
||||
- hotio
|
||||
- binhex
|
||||
- Other
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: Include error logs directly or link to extended logs on https://pastebin.com/
|
||||
validations:
|
||||
required: true
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Support forum
|
||||
url: https://forums.sabnzbd.org/
|
||||
about: Support questions can be asked on our forums, Reddit or Discord server.
|
||||
- name: Discord
|
||||
url: https://discord.sabnzbd.org
|
||||
about: Support questions can be asked on our forums, Reddit or Discord server.
|
||||
- name: Reddit - r/sabnzbd
|
||||
url: https://www.reddit.com/r/sabnzbd
|
||||
about: Support questions can be asked on our forums, Reddit or Discord server.
|
||||
10
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
10
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
name: Feature request
|
||||
description: What new feature would you like to have added to SABnzbd?
|
||||
labels:
|
||||
- Support
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
validations:
|
||||
required: true
|
||||
1
.github/funding.yml
vendored
Normal file
1
.github/funding.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: ['https://sabnzbd.org/donate/']
|
||||
47
.github/renovate.json
vendored
Normal file
47
.github/renovate.json
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base",
|
||||
":disableDependencyDashboard"
|
||||
],
|
||||
"schedule": [
|
||||
"before 8am on Monday"
|
||||
],
|
||||
"baseBranches": ["develop", "feature/uvicorn"],
|
||||
"pip_requirements": {
|
||||
"fileMatch": [
|
||||
"requirements.txt",
|
||||
"tests/requirements.txt",
|
||||
"builder/requirements.txt",
|
||||
"builder/release-requirements.txt"
|
||||
]
|
||||
},
|
||||
"ignorePaths": [],
|
||||
"ignoreDeps": [
|
||||
"jaraco.text",
|
||||
"jaraco.context",
|
||||
"jaraco.collections",
|
||||
"sabctools",
|
||||
"paho-mqtt",
|
||||
"werkzeug",
|
||||
"tavern"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchManagers": ["github-actions"],
|
||||
"matchPackageNames": ["windows", "macos"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": [
|
||||
"*"
|
||||
],
|
||||
"groupName": "all dependencies",
|
||||
"groupSlug": "all",
|
||||
"separateMajorMinor": false,
|
||||
"automerge": true
|
||||
}
|
||||
],
|
||||
"automergeStrategy": "squash",
|
||||
"platformAutomerge": true
|
||||
}
|
||||
256
.github/workflows/build_release.yml
vendored
Normal file
256
.github/workflows/build_release.yml
vendored
Normal file
@@ -0,0 +1,256 @@
|
||||
name: Build binaries and source distribution
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
# Setting PYTHONNODEBUGRANGES reduces binary size
|
||||
env:
|
||||
PYTHONNODEBUGRANGES: 1
|
||||
|
||||
jobs:
|
||||
build_windows:
|
||||
name: Build Windows binary (${{ matrix.architecture }})
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- architecture: x64
|
||||
runs-on: windows-2022
|
||||
- architecture: arm64
|
||||
runs-on: windows-11-arm
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.14"
|
||||
architecture: ${{ matrix.architecture }}
|
||||
cache: pip
|
||||
cache-dependency-path: "**/requirements.txt"
|
||||
- name: Install Python dependencies
|
||||
# Without dependencies to make sure everything is covered in the requirements.txt
|
||||
run: |
|
||||
python --version
|
||||
python -m pip install --upgrade pip wheel
|
||||
pip install --upgrade -r requirements.txt --no-dependencies
|
||||
pip install --upgrade -r builder/requirements.txt --no-dependencies
|
||||
- name: Build Windows standalone binary
|
||||
id: windows_binary
|
||||
run: python builder/package.py binary
|
||||
- name: Upload Windows standalone binary (unsigned)
|
||||
uses: actions/upload-artifact@v6
|
||||
id: upload-unsigned-binary
|
||||
with:
|
||||
path: "*-win*-bin.zip"
|
||||
name: Windows standalone binary (${{ matrix.architecture }})
|
||||
- name: Sign Windows standalone binary
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
organization-id: ${{ secrets.SIGNPATH_ORG_ID }}
|
||||
project-slug: "sabnzbd"
|
||||
artifact-configuration-slug: "sabnzbd-binary"
|
||||
signing-policy-slug: "release-signing"
|
||||
github-artifact-id: ${{ steps.upload-unsigned-binary.outputs.artifact-id }}
|
||||
wait-for-completion: true
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows standalone binary (signed)
|
||||
uses: actions/upload-artifact@v6
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: Windows standalone binary (${{ matrix.architecture }}, signed)
|
||||
path: "signed"
|
||||
- name: Build Windows installer
|
||||
if: matrix.architecture == 'x64'
|
||||
run: python builder/package.py installer
|
||||
- name: Upload Windows installer
|
||||
if: matrix.architecture == 'x64'
|
||||
uses: actions/upload-artifact@v6
|
||||
id: upload-unsigned-installer
|
||||
with:
|
||||
path: "*-win-setup.exe"
|
||||
name: Windows installer (${{ matrix.architecture }})
|
||||
- name: Sign Windows installer
|
||||
if: matrix.architecture == 'x64' && contains(github.ref, 'refs/tags/')
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
organization-id: ${{ secrets.SIGNPATH_ORG_ID }}
|
||||
project-slug: "sabnzbd"
|
||||
artifact-configuration-slug: "sabnzbd-installer"
|
||||
signing-policy-slug: "release-signing"
|
||||
github-artifact-id: ${{ steps.upload-unsigned-installer.outputs.artifact-id }}
|
||||
wait-for-completion: true
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows installer (signed)
|
||||
if: matrix.architecture == 'x64' && contains(github.ref, 'refs/tags/')
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: Windows installer (${{ matrix.architecture }}, signed)
|
||||
path: "signed/*-win-setup.exe"
|
||||
|
||||
build_macos:
|
||||
name: Build macOS binary
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: "3.14.2"
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.15"
|
||||
# We need to force compile for universal2 support
|
||||
CFLAGS: -arch x86_64 -arch arm64
|
||||
ARCHFLAGS: -arch x86_64 -arch arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
# Only use this for the caching of pip packages!
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.14"
|
||||
cache: pip
|
||||
cache-dependency-path: "**/requirements.txt"
|
||||
- name: Cache Python download
|
||||
id: cache-python-download
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/python.pkg
|
||||
key: cache-macOS-Python-${{ env.PYTHON_VERSION }}
|
||||
- name: Get Python from python.org
|
||||
if: steps.cache-python-download.outputs.cache-hit != 'true'
|
||||
run: curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o ~/python.pkg
|
||||
- name: Install Python
|
||||
run: sudo installer -pkg ~/python.pkg -target /
|
||||
- name: Install Python dependencies
|
||||
# We have to manually compile some modules as they don't automatically fetch universal2 binaries
|
||||
run: |
|
||||
python3 --version
|
||||
pip3 install --upgrade pip wheel
|
||||
pip3 install --upgrade -r requirements.txt --no-binary cffi,CT3,PyYAML,charset_normalizer --no-dependencies
|
||||
pip3 install --upgrade -r builder/requirements.txt --no-dependencies
|
||||
- name: Import macOS codesign certificates
|
||||
# Taken from https://github.com/Apple-Actions/import-codesign-certs/pull/27 (comments)
|
||||
env:
|
||||
CERTIFICATES_P12: ${{ secrets.CERTIFICATES_P12 }}
|
||||
CERTIFICATES_P12_PASSWORD: ${{ secrets.CERTIFICATES_P12_PASSWORD }}
|
||||
MACOS_KEYCHAIN_TEMP_PASSWORD: ${{ secrets.MACOS_KEYCHAIN_TEMP_PASSWORD }}
|
||||
if: env.CERTIFICATES_P12
|
||||
run: |
|
||||
echo $CERTIFICATES_P12 | base64 --decode > certificate.p12
|
||||
security create-keychain -p "$MACOS_KEYCHAIN_TEMP_PASSWORD" build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
security unlock-keychain -p "$MACOS_KEYCHAIN_TEMP_PASSWORD" build.keychain
|
||||
security set-keychain-settings -lut 21600 build.keychain
|
||||
security import certificate.p12 -k build.keychain -P "$CERTIFICATES_P12_PASSWORD" -T /usr/bin/codesign -T /usr/bin/productsign -T /usr/bin/xcrun
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_KEYCHAIN_TEMP_PASSWORD" build.keychain
|
||||
- name: Build source distribution
|
||||
# Run this on macOS so the line endings are correct by default
|
||||
run: python builder/package.py source
|
||||
- name: Upload source distribution
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
path: "*-src.tar.gz"
|
||||
name: Source distribution
|
||||
- name: Build macOS binary
|
||||
env:
|
||||
SIGNING_AUTH: ${{ secrets.SIGNING_AUTH }}
|
||||
NOTARIZATION_USER: ${{ secrets.NOTARIZATION_USER }}
|
||||
NOTARIZATION_PASS: ${{ secrets.NOTARIZATION_PASS }}
|
||||
run: |
|
||||
python3 builder/package.py app
|
||||
python3 builder/make_dmg.py
|
||||
- name: Upload macOS binary
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
path: "*-macos.dmg"
|
||||
name: macOS binary
|
||||
|
||||
build-snap:
|
||||
name: Build Snap Packages (${{ matrix.linux_arch }})
|
||||
timeout-minutes: 30
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
linux_arch: x64
|
||||
- os: ubuntu-24.04-arm
|
||||
linux_arch: arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Cache par2cmdline-turbo tarball
|
||||
uses: actions/cache@v5
|
||||
id: cache-par2cmdline
|
||||
# Clearing the cache in case of new version requires manual clearing in GitHub!
|
||||
with:
|
||||
path: snap/par2cmdline.tar.gz
|
||||
key: cache-par2cmdline
|
||||
- name: Download par2cmdline-turbo tarball
|
||||
if: steps.cache-par2cmdline.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
PAR2_TARBALL=$(curl -sL https://api.github.com/repos/animetosho/par2cmdline-turbo/releases/latest | jq -r '.tarball_url')
|
||||
curl -o snap/par2cmdline.tar.gz -L "$PAR2_TARBALL"
|
||||
- uses: snapcore/action-build@v1
|
||||
name: Build snap
|
||||
id: snapcraft
|
||||
- name: Test snap installation
|
||||
run: |
|
||||
sudo snap install --dangerous *.snap
|
||||
sudo snap connect sabnzbd:removable-media
|
||||
# Basic smoke test - check that the binary exists and can show help
|
||||
timeout 10s snap run sabnzbd --help || true
|
||||
sudo snap remove sabnzbd
|
||||
- name: Upload snap
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: Snap package (${{ matrix.linux_arch }})
|
||||
path: ${{ steps.snapcraft.outputs.snap }}
|
||||
- name: Publish snap
|
||||
uses: snapcore/action-publish@v1
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
env:
|
||||
SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAP_TOKEN }}
|
||||
with:
|
||||
store_login: ${{ secrets.SNAP_TOKEN }}
|
||||
snap: ${{ steps.snapcraft.outputs.snap }}
|
||||
release: stable
|
||||
|
||||
release:
|
||||
name: Prepare Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_windows, build_macos]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.14"
|
||||
cache: pip
|
||||
cache-dependency-path: "builder/release-requirements.txt"
|
||||
- name: Download Source distribution artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: Source distribution
|
||||
- name: Download macOS artifact
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: macOS binary
|
||||
- name: Download Windows artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
pattern: ${{ (contains(github.ref, 'refs/tags/')) && '*signed*' || '*Windows*' }}
|
||||
merge-multiple: true
|
||||
- name: Prepare official release
|
||||
env:
|
||||
AUTOMATION_GITHUB_TOKEN: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
REDDIT_TOKEN: ${{ secrets.REDDIT_TOKEN }}
|
||||
run: |
|
||||
pip3 install -r builder/release-requirements.txt
|
||||
python3 builder/release.py
|
||||
63
.github/workflows/integration_testing.yml
vendored
Normal file
63
.github/workflows/integration_testing.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: CI Tests
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
black:
|
||||
name: Black Code Formatter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Black Code Formatter
|
||||
uses: lgeiger/black-action@master
|
||||
with:
|
||||
args: >
|
||||
SABnzbd.py
|
||||
sabnzbd
|
||||
scripts
|
||||
tools
|
||||
builder
|
||||
builder/SABnzbd.spec
|
||||
tests
|
||||
--line-length=120
|
||||
--target-version=py39
|
||||
--check
|
||||
--diff
|
||||
|
||||
test:
|
||||
name: Test ${{ matrix.name }} - Python ${{ matrix.python-version }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 20
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14" ]
|
||||
name: ["Linux"]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
python-version: "3.14"
|
||||
- name: Windows
|
||||
os: windows-2022
|
||||
python-version: "3.14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: pip
|
||||
cache-dependency-path: "**/requirements.txt"
|
||||
- name: Install system dependencies
|
||||
if: runner.os == 'Linux'
|
||||
run: sudo apt-get install unrar 7zip par2
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python --version
|
||||
python -m pip install --upgrade pip wheel
|
||||
pip install --upgrade -r requirements.txt --no-dependencies
|
||||
pip install --upgrade -r tests/requirements.txt
|
||||
- name: Test SABnzbd
|
||||
run: pytest -s
|
||||
33
.github/workflows/stale.yml
vendored
Normal file
33
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: "Close and lock old issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: "Close stale issues"
|
||||
if: github.repository_owner == 'sabnzbd'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
days-before-stale: 21
|
||||
days-before-close: 7
|
||||
stale-issue-label: "Stale"
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
exempt-issue-labels: "Feature request, Work in progress, Bug"
|
||||
|
||||
lock:
|
||||
name: "Lock old issues"
|
||||
if: github.repository_owner == 'sabnzbd'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
with:
|
||||
log-output: true
|
||||
issue-inactive-days: 60
|
||||
pr-inactive-days: 60
|
||||
43
.github/workflows/translations.yml
vendored
Normal file
43
.github/workflows/translations.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Update translatable texts
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
translations:
|
||||
name: Update translatable texts
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
- name: Generate translatable texts
|
||||
run: |
|
||||
python3 tools/extract_pot.py
|
||||
- name: Push/pull Transifex translations
|
||||
if: env.TX_TOKEN
|
||||
# Add --translation to the push command in order to update Transifex using local translation edits
|
||||
# However, this prevents modifying existing translations in Transifex as they will be overwritten by the push!
|
||||
run: |
|
||||
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
|
||||
./tx push --source
|
||||
./tx pull --all --force
|
||||
- name: Compile translations to validate them
|
||||
run: |
|
||||
python3 tools/make_mo.py
|
||||
- name: Push translatable and translated texts back to repo
|
||||
uses: stefanzweifel/git-auto-commit-action@v7.1.0
|
||||
if: env.TX_TOKEN
|
||||
with:
|
||||
commit_message: |
|
||||
Update translatable texts
|
||||
[skip ci]
|
||||
commit_user_name: SABnzbd Automation
|
||||
commit_user_email: bugs@sabnzbd.org
|
||||
commit_author: SABnzbd Automation <bugs@sabnzbd.org>
|
||||
file_pattern: "po/*.pot po/*.po"
|
||||
push_options: --force
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -1,11 +1,7 @@
|
||||
# Compiled python
|
||||
# Compiled python and venv
|
||||
*.py[cod]
|
||||
|
||||
# Working folders for Win build
|
||||
build/
|
||||
dist/
|
||||
locale/
|
||||
srcdist/
|
||||
__pycache__
|
||||
/venv
|
||||
|
||||
# Snapcraft
|
||||
parts/
|
||||
@@ -17,19 +13,31 @@ snap/.snapcraft/
|
||||
# Generated email templates
|
||||
email/*.tmpl
|
||||
|
||||
# Working folders for build
|
||||
build/
|
||||
dist/
|
||||
locale/
|
||||
srcdist/
|
||||
|
||||
# Build results
|
||||
SABnzbd*.zip
|
||||
SABnzbd*.exe
|
||||
SABnzbd*.gz
|
||||
SABnzbd*.dmg
|
||||
SABnzbd*.pkg
|
||||
SABnzbd-*/
|
||||
|
||||
# WingIDE/PyCharm project files
|
||||
*.wp[ru]
|
||||
.idea
|
||||
|
||||
# VScode
|
||||
.vscode/
|
||||
|
||||
# Testing folders
|
||||
.cache
|
||||
.xprocess
|
||||
.pytest*
|
||||
tests/cache
|
||||
|
||||
# General junk
|
||||
@@ -41,5 +49,5 @@ tests/cache
|
||||
\#*
|
||||
.\#*
|
||||
|
||||
# Apple
|
||||
.DS_Store
|
||||
/venv
|
||||
|
||||
38
.travis.yml
38
.travis.yml
@@ -1,38 +0,0 @@
|
||||
# Include the host/username/password for the test-servers
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
language: python
|
||||
env:
|
||||
- secure: X5MY2HAtCxBI84IySY/XroFsFy2RIVhfsX+P1y3WXfvwBHYKCgrPV6BgwCg93ttkPmMS/IslP5Vp4F1OGqC9AZdxtxfHKpIPlIVxIHj6Lf6xwynmbGDQXjy73K13gjznK2mkGA0jcsp4Q5POS4ZKVkd6aOXnc8l8xS08+ztAvfxCC3tsMj2oMLEPP92j6zqb/1x2aX5+gVyVzrKgQQVKIk6R6jTxhIiOMPzj4+VMLXK8NEZqjV6RPwUjSoKHqJiV5URyf6/+2Ojmem3ilnpktn7xIJ/ZO1UNnZCrElOGZtmbryZFMctJvEAIQCOSdzsq/MACk0gocnOL3JQfDF5sYPOjJmc6sZI9PL78oFhwKaLkWEx565u8kdkLTMvv4A02HAmPzV1rKE1CTlEhsy0djU8mueCr9Ep1WyLJdY/igbyhR+dOd8fVo9Y1tY2o+ZisCsO5+PRfzhypK9xukqmWDJSXIWSuExUU/becXJ4IaTmlYJ+ArhKvkL90GmckH/zt9ZPIgr9Lq0OFva9uVHX+sbbsQZZ48lAmgiiiX335dONj8MxO8cDKsUT9FWQ8PzeJ8g8PErv5pmVVVODoOdKZg2Oo4jUsZG2yV8uUt9j87I2DPou4WiJ7wcTzQCPdzlaA5hdixPMyVUF/yCL+eKdJQFaKy3eaKwCxnAVp3WA2WdA=
|
||||
- secure: gzvbgg+rdM/TfPOlXGE/JOW8VEfIJxwUnzt2bH07nj/PbREXQXrImC1SreMtwQX+gvd7bjIBZU/sgNSgRYcWKaSim4uNtAbinkcKQeP+V844ZY2Fabq/xpLluSP61vJLQ+hOLbnvilxmuI0E1Divahxo5qZJlwzCdk5u2txJRT/2LKGDT1lpGgIyDj9u0/ViAcpaAyfFR2Zd6ydHKbx+eFBE21LkyH/+GJmRiO0+qLIuCa2knmOJYjwBxRcPiAEDpbrRUbYDiNyzPqEVxJfCbsGYlo/QN/SnV6kTqM1WoFzvi4d1pDxDGRFLQj+KigihF6uY4eLC1e6yVQrDy0tyWKt6E+1tc8fH5dRS7AYtWMzURn/7Ebd72AiNIYaeAL8ZPqI7vw3ovFTqBS0h8Mg2uUZ503ytUvfwKyU9MgIkzXwmGuE37MCd0bRJ/blPS2DT+IMbrbEP90K5VrDrN/AGiYHR1TZ9GKUZd6xHibulEh2nNFMMQEga8nE2CWaJ3uJrCN7ud+4OJ0zCZFF7JiJTbOGApHg/aGWD/bYfg9sIh7up4PcxVs6RFxbf+M1aB8GO2A9aEZFow+djYVxiqf6esmzrnlsTfz16f8Txmez3BRftjVULre03a3Rt7WRxwYLveNlJos1nMw3G0CnruCe+wJbHEK4tEiqIXqB8UemT4zw=
|
||||
- secure: f5QGjZESH4khjLOO1Ntgtiol4ZvwcqHLIV1sdK162dVkNT6UKOTRQflj2UmRXzwiRzWtVX/Ri0zT0j+SUJy2+aqJY/gxvisdTIWzRQ3w/CJPGgCizSkTQEWJ2V/n7DUAJ4xerme36zYi21S3d8VEWVDzU/duLu3yhlN5x0wMCY+dDPSDTFubmptGeCmyxqBqGVd7gD3PaiK7fDBB/eAXbW3QxLLQfxLHmPsx8vzPhDTQiLFtY43jfnVGEBdUbxSMXbq2NRB5eXH3bBkW8u/5y9uoyuF45CQn8f3UB6F84L+/n9M2ryCGeSJOFuZqSUHXvRF2acON40jx3t4PVocEzYguPwewoiFxfFHjRWmiI4WljiN30taK0pgstmzLTedozK+NdZ0M8vD7MCyK0yegPQolzFRngWW5Y8NY1XwlBT9W2lqGmrFge+dB86wOArMcRlY62PTOJ9Zqspbe/6mBT4Tq4O2OsXxGX/x60W/NJynva9WAz2SLEi5Pjs6r1a3tyXssw4/8KVhWl92WfpOnWrZrnZlsxOTmcS2OhLB0FQikTv9T/i3CZNcCI4PELkExeIwh4JW1MY0iGeLDHcAUKryJGrRZj1x32Nt1uUPTPBi8l8EzNyNOUvbHYTdpBr5r2JW1orvT55OhvKauc3wB7ogj673iYsqx5jeazHhgJMs=
|
||||
- os: osx
|
||||
env:
|
||||
- HOMEBREW_NO_AUTO_UPDATE=1
|
||||
- secure: RI/WxBI5kTP5v0qZ6im5BwqPurzzwKIl8GSbM2dFSEiWYEbKwHTDJ3KDDsXqS8YMNaropNVgzsdpCGXYUWRNTraKVa9GZEGNJ+fQuBWK9wkJ0MDTYfL/QFSN1CDXXlg7k26VXu6PgvEFA5kyDfSpxrxXJC6yXOUeJqmebkU2fiQo7/5Vpb1WAwpYlBP6zL5lYt2lpJ85fhYEjuAeuP/9zdVIlgCB7rDCgUX7tCKKXgwbKXfcff7lOCneB00/RCmRuNp3/tohGlgrSXh4ivHx4XEQgRoiVdeR3RCKZa5tBIXANefuJ2VopBrAbSRmVBexQP1818IU/XLlwtEEpC1vulpkx+5JolWksCrx4uJkKdlH0KA4k1m88L0Q1Rmmnp9LgRgeEl5xqt5s6RR6lS63ChQYkVFgWandwlrWu7Uenne4401KbG58PzDXEGlsKhUXnYBX+SU6gwejImCMb3vszKRAge5QAQlkiruCu31W9tWpY9ezHYrbv9ckOqdFXf9qsPEnU352v/8qHFe7jT/+7RSYdUzuo/d2aQqPKfkb7sy1VLEznmbGmv1BH4rGNpxd5inlcFKsR099Hx7PWgY8MHZcnEP3PJ2kBseFzVP3WKXHDTcv8yR0w6EgQyMzSHl9Ah3WJJ7TXZQ82gcqF8LcmuKcqXcwTkffG3ww7Vzuq4M=
|
||||
- secure: uXHKYgQAwnfhWKi7RKAEumMMZZTJBb878KpodRfs1fz0NffdPo5+Ak1ricNzOJ8wti8/lXycDS+YmnFs64lGUxL+zvbQlFv7QuKfN0uHfPlo6zux9Ha9pg1rSUI4zqZ9kmbtwc0I2mdy1VeWwHvnbQDXUIt6a+tTwYZL3MGdP6kNvtSXaYhbEoHExjqeHUtVhUTafvWGtwE7uN+sdvhwXQ0dWlz6HGub8qYjkKCmF9VG+OyLKjFHjLVDMQ7Jnng2l1ZOgHSh5g5m6r++NEwSzZ8wFVULdzv5eEcR9U+mHmonFKOA/ICcZGd8MhEuvz9BupfgDWFqSTb5JGxzlZ28YdtjcAudzrWQMSpP2R0ks2Ttxz9Kpgw1L75HMvj0smazHs7IEEiXf2Yr03bzeHg7CGXNqOYyEOxxrPaJekCjMlX/YGqT/iv/8pZPfew7k/iVJlvCam76WNXABjJncHJeMsCgkItYZAoRZJDc+7z8J4g4ys1Rk0V/difjjwc/pSeKbt6wDA/9cmZ7r4Cs1Yh9Pl/mw6kzWGGpejO7lmsayQN3Pw99QMcZByUHx5BR+ZtIfF7Sl+F0uDQJ0MntJcteF7z1Dam2jHlkLckb85j6YWup5ItLAj5Hz7V2YUwqFmQhfOWEAjxagNSNnB8we4YBWS4KDTBEVDm6ITTfddlYvCw=
|
||||
- secure: HKaT52NUQh18kllFQTjpKC64KlDkWEz0XnIEKJffumctrJjCvoFZFNC7ip3j7Bi3yp2IeD2SMsdxrrT6YFKxx5FfSdPqpQnsY34bzdEFZQomNJg4n/tmBc350PoVQ0PvLQiVoCCfVbdS/b4makNK7A+d9KED+SEsQMAqKp2mSnGhATB9MwFaZL5S4nGnEkqW5+eeAQxJ8JRawwumOOx/xhPOoEMIfHMpyTwFI1yUh1nJhZ9k1nxHzPlM78goyIuf0MjeZfSZ2fIlNZGVruYM28i9hpO4bzPFhk51uryWv8DQZiZlpCkHl6Po7rVVf5pNqm+l9SD/t0DnhS2rJHdeFSI2lM/uZtdOxaY5fTTj83LbCGhFtuZnZRwoQ73tpda8J7Z1E5Ni9bi7vOiZQ4pEIPt4LLu0X607sPWMkqrmgalKQQS13b5oliyMpkIguvmj9822BpaNVqamIrfn0z38+0Gog8iuGlMAQnRO9tGDO4kbVLcZQTRWpSwIC3niTPjPgLq/N92XQ9xmccrFT7efwemgF65FNM5ltv8+9AmI+hsuyXfqeHaAV9wmxRAAhaqvRgnSLYa3u1CPn5fF2CDvPvPcyCEIWnyxc7dYHDpzAQDcyuSejtbnL8gpkDqEHpy23hTjgZnZD7Pk7PQ7ayA8zBumTMGZ+/GAn5Wmgce+w0M=
|
||||
addons:
|
||||
chrome: stable
|
||||
|
||||
before_script:
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
|
||||
brew cask install chromedriver;
|
||||
else
|
||||
sudo add-apt-repository ppa:jcfp -y;
|
||||
sudo apt-get update -q;
|
||||
sudo apt-get install unrar p7zip-full par2 chromium-chromedriver -y;
|
||||
ln -s /usr/lib/chromium-browser/chromedriver ~/bin/chromedriver;
|
||||
fi;
|
||||
|
||||
install:
|
||||
- pip install --upgrade -r tests/requirements.txt
|
||||
|
||||
script:
|
||||
- python ./tests/test_functional.py
|
||||
|
||||
notifications:
|
||||
email:
|
||||
on_success: never
|
||||
on_failure: always
|
||||
23
.tx/config
Normal file
23
.tx/config
Normal file
@@ -0,0 +1,23 @@
|
||||
[main]
|
||||
host = https://www.transifex.com
|
||||
|
||||
[o:sabnzbd:p:sabnzbd-translations:r:po-main-sabnzbd-pot--develop]
|
||||
file_filter = po/main/<lang>.po
|
||||
minimum_perc = 0
|
||||
source_file = po/main/SABnzbd.pot
|
||||
source_lang = en
|
||||
type = PO
|
||||
|
||||
[o:sabnzbd:p:sabnzbd-translations:r:po-email-sabemail-pot--develop]
|
||||
file_filter = po/email/<lang>.po
|
||||
minimum_perc = 0
|
||||
source_file = po/email/SABemail.pot
|
||||
source_lang = en
|
||||
type = PO
|
||||
|
||||
[o:sabnzbd:p:sabnzbd-translations:r:po-nsis-sabnsis-pot--develop]
|
||||
file_filter = po/nsis/<lang>.po
|
||||
minimum_perc = 0
|
||||
source_file = po/nsis/SABnsis.pot
|
||||
source_lang = en
|
||||
type = PO
|
||||
28
ABOUT.txt
28
ABOUT.txt
@@ -1,28 +0,0 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 2.3.7 ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
thanks to its friendly web-based user interface and advanced
|
||||
built-in post-processing options that automatically verify, repair,
|
||||
extract and clean up posts downloaded from Usenet.
|
||||
SABnzbd also has a fully customizable user interface,
|
||||
and offers a complete API for third-party applications to hook into.
|
||||
|
||||
There is an extensive Wiki on the use of SABnzbd.
|
||||
https://sabnzbd.org/wiki/
|
||||
|
||||
Please also read the file "ISSUES.txt"
|
||||
|
||||
The organization of the download queue is different from 0.7.x (and older).
|
||||
1.0.0 will not finish downloading an existing queue.
|
||||
Also, your sabnzbd.ini file will be upgraded, making it
|
||||
incompatible with older releases.
|
||||
|
||||
*******************************************
|
||||
*** Upgrading from 0.7.x and below ***
|
||||
*******************************************
|
||||
Empty your current queue
|
||||
Stop SABnzbd.
|
||||
Install new version
|
||||
Start SABnzbd.
|
||||
@@ -1,24 +1,26 @@
|
||||
|
||||
(c) Copyright 2007-2019 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
|
||||
The SABnzbd-team is:
|
||||
The SABnzbd-Team is:
|
||||
|
||||
Active team:
|
||||
ShyPike <shypike@sabnzbd.org>
|
||||
inpheaux <inpheaux@sabnzbd.org>
|
||||
zoggy <zoggy@sabnzbd.org>
|
||||
Safihre <safihre@sabnzbd.org>
|
||||
Sleeping members
|
||||
sw1tch <switch@sabnzbd.org>
|
||||
pairofdimes <pairofdimes@sabnzbd.org>
|
||||
rAf <rAf@sabnzbd.org>
|
||||
Honorary member (and original author)
|
||||
Gregor Kaufmann <tdian@users.sourceforge.net>
|
||||
Safihre
|
||||
sanderjo
|
||||
jcfp
|
||||
inpheaux
|
||||
zoggy
|
||||
Sleeping members:
|
||||
ShyPike
|
||||
sw1tch
|
||||
pairofdimes
|
||||
rAf
|
||||
Honorary member (and original author):
|
||||
Gregor Kaufmann
|
||||
|
||||
The main contributors and moderators of the translations
|
||||
The main contributors and moderators of the translations:
|
||||
Danish: Rene (nordjyden6), Scott
|
||||
Dutch: ShyPike, Safihre
|
||||
French : rAf, Fox Ace, Fred, Morback, Jih
|
||||
French: rAf, Fox Ace, Fred, Morback, Jih
|
||||
German: Severin Heiniger, Tim Hartmann, DonPizza, Alex
|
||||
Norwegian: Protx, mjelva, TomP, John
|
||||
Romanian: nicusor
|
||||
|
||||
56
INSTALL.txt
56
INSTALL.txt
@@ -1,10 +1,10 @@
|
||||
SABnzbd 2.3.7
|
||||
SABnzbd
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
(c) Copyright 2007-2019 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
|
||||
This program is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU General Public License
|
||||
@@ -41,7 +41,7 @@ Use the "Help" button in the web-interface to be directed to the Help Wiki.
|
||||
-------------------------------------------------------------------------------
|
||||
3) INSTALL pre-built macOS binaries
|
||||
-------------------------------------------------------------------------------
|
||||
Download the DMG file, mount and drag the SABnzbd icon to Programs.
|
||||
Download the DMG file, mount and drag the SABnzbd icon to Applications.
|
||||
Just like you do with so many apps.
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
@@ -52,48 +52,26 @@ Specific guides to install from source are available for Windows and macOS:
|
||||
https://sabnzbd.org/wiki/installation/install-macos
|
||||
https://sabnzbd.org/wiki/installation/install-from-source-windows
|
||||
|
||||
You need to have Python installed plus some non-standard Python modules
|
||||
and a few tools.
|
||||
Only Python 3.9 and above is supported.
|
||||
|
||||
All platforms
|
||||
Python-2.7.latest http://www.python.org (2.7.9+ recommended)
|
||||
|
||||
Windows
|
||||
PyWin32 use "pip install pypiwin32"
|
||||
subprocessww use "pip install subprocessww"
|
||||
|
||||
Essential modules
|
||||
cheetah-2.0.1+ use "pip install cheetah"
|
||||
par2cmdline >= 0.4 https://github.com/Parchive/par2cmdline/releases
|
||||
See also: https://sabnzbd.org/wiki/installation/multicore-par2
|
||||
unrar >= 5.00+ http://www.rarlab.com/rar_add.htm
|
||||
openssl >= 1.0.0 http://www.openssl.org/
|
||||
|
||||
Optional modules
|
||||
unzip >= 6.00 http://www.info-zip.org/
|
||||
7zip >= 9.20 http://www.7zip.org/
|
||||
sabyenc == 3.3.1 use "pip install sabyenc"
|
||||
More information: https://sabnzbd.org/sabyenc
|
||||
cryptography >= 1.0 use "pip install cryptography"
|
||||
Enables certificate generation and detection of encrypted RAR-files
|
||||
|
||||
Optional modules Linux
|
||||
pynotify Should be part of GTK for Python support on Debian/Ubuntu
|
||||
If not, you cannot use the NotifyOSD feature.
|
||||
python-dbus Enable option to Shutdown/Restart/Standby PC on queue finish.
|
||||
|
||||
Embedded modules (preferably use the included version)
|
||||
CherryPy-8.1.2 with patches http://www.cherrypy.org
|
||||
On Linux systems you need to install:
|
||||
par2 unrar python3-setuptools python3-pip
|
||||
|
||||
On non-X86 platforms, for which PyPI does not provide all pre-compiled packages,
|
||||
you also need to install these development libraries (exact names might differ per platform):
|
||||
libffi-dev libssl-dev
|
||||
|
||||
Unpack the ZIP-file containing the SABnzbd sources to any folder of your liking.
|
||||
|
||||
If you want multiple languages, you need to compile the translations.
|
||||
Start this from a shell terminal (or command prompt):
|
||||
python tools/make_mo.py
|
||||
Inside the SABnzbd source directory, install all required Python packages by running:
|
||||
python3 -m pip install -r requirements.txt
|
||||
|
||||
If you want non-English languages, you need to compile the translations.
|
||||
Start this from a shell terminal (or command prompt):
|
||||
python -OO SABnzbd.py
|
||||
python3 tools/make_mo.py
|
||||
|
||||
To start SABnzbd, run this from a shell terminal (or command prompt):
|
||||
python3 -OO SABnzbd.py
|
||||
|
||||
Within a few seconds your web browser will start and show the user interface.
|
||||
Use the "Help" button in the web-interface to be directed to the Help Wiki.
|
||||
@@ -108,7 +86,7 @@ This means that SABnzbd cannot use the default port 8080 to run its web-server o
|
||||
Try to use another port, you'll need to use the a command window:
|
||||
SABnzbd.exe -s localhost:7777
|
||||
or
|
||||
python SABnzbd.py -s localhost:7777
|
||||
python3 SABnzbd.py -s localhost:7777
|
||||
|
||||
You may of course try other port numbers too.
|
||||
|
||||
|
||||
34
ISSUES.txt
34
ISSUES.txt
@@ -14,50 +14,33 @@
|
||||
For these the server blocking method is not very favourable.
|
||||
There is an INI-only option that will limit blocks to 1 minute.
|
||||
no_penalties = 1
|
||||
See: https://sabnzbd.org/wiki/configuration/2.3/special
|
||||
See: https://sabnzbd.org/wiki/configuration/3.4/special
|
||||
|
||||
- Some third-party utilties try to probe SABnzbd API in such a way that you will
|
||||
- Some third-party utilities try to probe SABnzbd API in such a way that you will
|
||||
often see warnings about unauthenticated access.
|
||||
If you are sure these probes are harmless, you can suppress the warnings by
|
||||
setting the option "api_warnings" to 0.
|
||||
See: https://sabnzbd.org/wiki/configuration/2.3/special
|
||||
See: https://sabnzbd.org/wiki/configuration/3.4/special
|
||||
|
||||
- On OSX you may encounter downloaded files with foreign characters.
|
||||
- On macOS you may encounter downloaded files with foreign characters.
|
||||
The par2 repair may fail when the files were created on a Windows system.
|
||||
The problem is caused by the PAR2 utility and we cannot fix this now.
|
||||
This does not apply to files inside RAR files.
|
||||
|
||||
- On Linux when you download files they may have the wrong character encoding.
|
||||
You will see this only when downloaded files contain accented characters.
|
||||
You need to fix it yourself by running the convmv utility (available for most Linux platforms).
|
||||
Possible the file system override setting 'fsys_type' might be solve things:
|
||||
See: https://sabnzbd.org/wiki/configuration/2.3/special
|
||||
|
||||
- The "Watched Folder" sometimes fails to delete the NZB files it has
|
||||
processed. This happens when other software still accesses these files.
|
||||
Some third-party utilities supporting SABnzbd are known to do this.
|
||||
We cannot solve this problem, because the Operating System (read Windows)
|
||||
prevents the removal.
|
||||
|
||||
- Memory usage can sometimes have high peaks. This makes using SABnzbd on very low
|
||||
memory systems (e.g. a NAS device or a router) a challenge.
|
||||
In particular on Synology (SynoCommunity) the device may report that SABnzbd is using
|
||||
a lot of memory even when idle. In this case the memory is usually not actually used by
|
||||
SABnzbd and will be available if required by other apps or the system. More information
|
||||
can be found in the discussion here: https://github.com/SynoCommunity/spksrc/issues/2856
|
||||
|
||||
- SABnzbd is not compatible with some software firewall versions.
|
||||
The Microsoft Windows Firewall works fine, but remember to tell this
|
||||
firewall that SABnzbd is allowed to talk to other computers.
|
||||
|
||||
- When SABnzbd cannot send notification emails, check your virus scanner,
|
||||
firewall or security suite. It may be blocking outgoing email.
|
||||
|
||||
- When you are using external drives or network shares on OSX or Linux
|
||||
- When you are using external drives or network shares on macOS or Linux
|
||||
make sure that the drives are mounted.
|
||||
The operating system will simply redirect your files to alternative locations.
|
||||
You may have trouble finding the files when mounting the drive later.
|
||||
On OSX, SABnzbd will not create new folders in /Volumes.
|
||||
On macOS, SABnzbd will not create new folders in /Volumes.
|
||||
The result will be a failed job that can be retried once the volume has been mounted.
|
||||
|
||||
- If you use a mounted drive as "temporary download folder", it must be present when SABnzbd
|
||||
@@ -65,8 +48,3 @@
|
||||
You can make SABnzbd wait for a mount of the "temporary download folder" by setting
|
||||
Config->Special->wait_for_dfolder to 1.
|
||||
SABnzbd will appear to hang until the drive is mounted.
|
||||
|
||||
- If you experience speed-drops to KB/s when using a VPN, try setting the number of connections
|
||||
to your servers to a total of 7. There is a CPU-usage reduction feature in SABnzbd that
|
||||
gets confused by the way some VPN's handle the state of a connection. Below 8 connections
|
||||
this feature is not active.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
(c) Copyright 2007-2019 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
|
||||
This program is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU General Public License
|
||||
|
||||
10
PKG-INFO
10
PKG-INFO
@@ -1,10 +0,0 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.3.7
|
||||
Summary: SABnzbd-2.3.7
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
License: GNU General Public License 2 (GPL2 or later)
|
||||
Description: Fully automated Usenet Binary Downloader
|
||||
Platform: posix
|
||||
41
README.md
41
README.md
@@ -1,53 +1,49 @@
|
||||
SABnzbd - The automated Usenet download tool
|
||||
============================================
|
||||
|
||||
[](https://isitmaintained.com/project/sabnzbd/sabnzbd "Average time to resolve an issue")
|
||||
[](https://travis-ci.org/sabnzbd/sabnzbd)
|
||||
[](https://ci.appveyor.com/project/Safihre/sabnzbd)
|
||||
[](https://snapcraft.io/sabnzbd)
|
||||
[](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
|
||||
[](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
|
||||
[](https://discord.sabnzbd.org)
|
||||
|
||||
SABnzbd is an Open Source Binary Newsreader written in Python.
|
||||
|
||||
It's totally free, incredibly easy to use, and works practically everywhere.
|
||||
It's totally free, easy to use, and works practically everywhere.
|
||||
SABnzbd makes Usenet as simple and streamlined as possible by automating everything we can. All you have to do is add an `.nzb`. SABnzbd takes over from there, where it will be automatically downloaded, verified, repaired, extracted and filed away with zero human interaction.
|
||||
SABnzbd offers an easy setup wizard and has self-analysis tools to verify your setup.
|
||||
|
||||
If you want to know more you can head over to our website: https://sabnzbd.org.
|
||||
|
||||
## Resolving Dependencies
|
||||
|
||||
SABnzbd has a good deal of dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
SABnzbd has a few dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (only 2.7.x and higher, but not 3.x.x)
|
||||
- `python-cheetah`
|
||||
- `python` (Python 3.9 and above, often called `python3`)
|
||||
- Python modules listed in `requirements.txt`. Install with `python3 -m pip install -r requirements.txt -U`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://sabnzbd.org/wiki/installation/multicore-par2))
|
||||
- `unrar` (Make sure you get the "official" non-free version of unrar)
|
||||
- `sabyenc` (installation guide can be found [here](https://sabnzbd.org/sabyenc))
|
||||
- `unrar` (make sure you get the "official" non-free version of unrar)
|
||||
|
||||
Optional:
|
||||
- `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download)
|
||||
- `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish)
|
||||
- `7zip`
|
||||
- See `requirements.txt`
|
||||
|
||||
Your package manager should supply these. If not, we've got links in our more in-depth [installation guide](https://github.com/sabnzbd/sabnzbd/blob/master/INSTALL.txt).
|
||||
Your package manager should supply these. If not, we've got links in our [installation guide](https://github.com/sabnzbd/sabnzbd/blob/master/INSTALL.txt).
|
||||
|
||||
## Running SABnzbd from source
|
||||
|
||||
Once you've sorted out all the dependencies, simply run:
|
||||
|
||||
```
|
||||
python -OO SABnzbd.py
|
||||
python3 -OO SABnzbd.py
|
||||
```
|
||||
|
||||
Or, if you want to run in the background:
|
||||
|
||||
```
|
||||
python -OO SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
python3 -OO SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
```
|
||||
|
||||
If you want multi-language support, run:
|
||||
|
||||
```
|
||||
python tools/make_mo.py
|
||||
python3 tools/make_mo.py
|
||||
```
|
||||
|
||||
Our many other command line options are explained in depth [here](https://sabnzbd.org/wiki/advanced/command-line-parameters).
|
||||
@@ -70,3 +66,12 @@ Conditions:
|
||||
- Bugfixes created specifically for a release branch are done there (because they are specific, they're not cherry-picked to `develop`).
|
||||
- Bugfixes done on `develop` may be cherry-picked to a release branch.
|
||||
- We will not release a 1.0.2 if a 1.1.0 has already been released.
|
||||
|
||||
## Privacy Policy
|
||||
|
||||
This program will not transfer any information to other networked systems unless
|
||||
specifically requested by the user or the person installing or operating it.
|
||||
|
||||
## Code Signing Policy
|
||||
|
||||
For our Windows release, free code signing is provided by [SignPath.io](https://signpath.io), certificate by [SignPath Foundation](https://signpath.org).
|
||||
|
||||
71
README.mkd
71
README.mkd
@@ -1,41 +1,52 @@
|
||||
Release Notes - SABnzbd 2.3.7
|
||||
Release Notes - SABnzbd 4.6.0 Beta 2
|
||||
=========================================================
|
||||
|
||||
## Improvements and bug fixes since 2.3.6
|
||||
- Hopeless jobs will be failed faster, based on the first article of
|
||||
each file. If 80% of these first articles is missing, the job is aborted.
|
||||
- Option "fast_fail" added to disable the new behavior.
|
||||
- Added option to quickly add a verified test download
|
||||
- Readout of some RSS feeds could result in a crash
|
||||
- Direct Unpack could get stuck processing
|
||||
- Show Details on Servers page could break graph display
|
||||
- Windows: Update MultiPar to 1.3.0.3
|
||||
This is the second beta release of version 4.6.
|
||||
|
||||
Still looking for help with SABnzbd (Python 3) development!
|
||||
https://www.reddit.com/r/usenet/comments/918nxv/
|
||||
## New features in 4.6.0
|
||||
|
||||
* Added support for NNTP Pipelining which eliminates idle waiting between
|
||||
requests, significantly improving speeds on high-latency connections.
|
||||
Read more here: https://sabnzbd.org/wiki/advanced/nntp-pipelining
|
||||
* Dynamically increase Assembler limits on faster connections.
|
||||
* Improved disk speed measurement in Status window.
|
||||
* Enable `verify_xff_header` by default.
|
||||
* Reduce delays between jobs during post-processing.
|
||||
* If a download only has `.nzb` files inside, the new downloads
|
||||
will include the name of the original download.
|
||||
* Dropped support for Python 3.8.
|
||||
* Windows: Added Windows ARM (portable) release.
|
||||
|
||||
## Bug fixes since 4.5.0
|
||||
|
||||
* `Check before download` could get stuck or fail to reject.
|
||||
* No error was shown in case NZB upload failed.
|
||||
* Correct mobile layout if `Full Width` is enabled.
|
||||
* Aborted Direct Unpack could result in no files being unpacked.
|
||||
* Windows: Tray icon disappears after Explorer restart.
|
||||
* macOS: Slow to start on some network setups.
|
||||
|
||||
## Upgrading from 2.2.x and older
|
||||
- Finish queue
|
||||
- Stop SABnzbd
|
||||
- Install new version
|
||||
- Start SABnzbd
|
||||
|
||||
## Upgrade notices
|
||||
- When upgrading from 2.2.0 or older the queue will be converted. Job order,
|
||||
settings and data will be preserved, but all jobs will be unpaused and
|
||||
URL's that did not finish fetching before the upgrade will be lost.
|
||||
- The organization of the download queue is different from 0.7.x releases.
|
||||
This version will not see the 0.7.x queue, but you can restore the jobs
|
||||
by going to Status page and using Queue Repair.
|
||||
|
||||
* You can directly upgrade from version 3.0.0 and newer.
|
||||
* Upgrading from older versions will require performing a `Queue repair`.
|
||||
* Downgrading from version 4.2.0 or newer to 3.7.2 or older will require
|
||||
performing a `Queue repair` due to changes in the internal data format.
|
||||
|
||||
## Known problems and solutions
|
||||
- Read the file "ISSUES.txt"
|
||||
|
||||
* Read `ISSUES.txt` or https://sabnzbd.org/wiki/introduction/known-issues
|
||||
|
||||
## Code Signing Policy
|
||||
|
||||
Windows code signing is provided by SignPath.io using a SignPath Foundation certificate.
|
||||
|
||||
## About
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically, thanks
|
||||
to its web-based user interface and advanced built-in post-processing options
|
||||
that automatically verify, repair, extract and clean up posts downloaded
|
||||
from Usenet.
|
||||
|
||||
(c) Copyright 2007-2019 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically, thanks to its web-based
|
||||
user interface and advanced built-in post-processing options that automatically verify, repair,
|
||||
extract and clean up posts downloaded from Usenet.
|
||||
|
||||
(c) Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
|
||||
154
SABHelper.py
154
SABHelper.py
@@ -1,154 +0,0 @@
|
||||
#!/usr/bin/python -OO
|
||||
# Copyright 2007-2019 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import sys
|
||||
if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
|
||||
print "Sorry, requires Python 2.6 or 2.7."
|
||||
sys.exit(1)
|
||||
|
||||
import time
|
||||
import subprocess
|
||||
|
||||
|
||||
try:
|
||||
import win32api
|
||||
import win32file
|
||||
import win32serviceutil
|
||||
import win32evtlogutil
|
||||
import win32event
|
||||
import win32service
|
||||
import pywintypes
|
||||
except ImportError:
|
||||
print "Sorry, requires Python module PyWin32."
|
||||
sys.exit(1)
|
||||
|
||||
from util.mailslot import MailSlot
|
||||
from util.apireg import del_connection_info, set_connection_info
|
||||
|
||||
|
||||
WIN_SERVICE = None
|
||||
|
||||
|
||||
def HandleCommandLine(allow_service=True):
|
||||
""" Handle command line for a Windows Service
|
||||
Prescribed name that will be called by Py2Exe.
|
||||
You MUST set 'cmdline_style':'custom' in the package.py!
|
||||
"""
|
||||
win32serviceutil.HandleCommandLine(SABHelper)
|
||||
|
||||
|
||||
def start_sab():
|
||||
return subprocess.Popen('net start SABnzbd', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).stdout.read()
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
mail = MailSlot()
|
||||
if not mail.create(10):
|
||||
return '- Cannot create Mailslot'
|
||||
|
||||
active = False # SABnzbd should be running
|
||||
counter = 0 # Time allowed for SABnzbd to be silent
|
||||
while True:
|
||||
msg = mail.receive()
|
||||
if msg:
|
||||
if msg == 'restart':
|
||||
time.sleep(1.0)
|
||||
counter = 0
|
||||
del_connection_info(user=False)
|
||||
start_sab()
|
||||
elif msg == 'stop':
|
||||
active = False
|
||||
del_connection_info(user=False)
|
||||
elif msg == 'active':
|
||||
active = True
|
||||
counter = 0
|
||||
elif msg.startswith('api '):
|
||||
active = True
|
||||
counter = 0
|
||||
_cmd, url = msg.split()
|
||||
if url:
|
||||
set_connection_info(url.strip(), user=False)
|
||||
|
||||
if active:
|
||||
counter += 1
|
||||
if counter > 120: # 120 seconds
|
||||
counter = 0
|
||||
start_sab()
|
||||
|
||||
rc = win32event.WaitForMultipleObjects((WIN_SERVICE.hWaitStop,
|
||||
WIN_SERVICE.overlapped.hEvent), 0, 1000)
|
||||
if rc == win32event.WAIT_OBJECT_0:
|
||||
del_connection_info(user=False)
|
||||
mail.disconnect()
|
||||
return ''
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Windows Service Support
|
||||
##############################################################################
|
||||
import servicemanager
|
||||
|
||||
|
||||
class SABHelper(win32serviceutil.ServiceFramework):
|
||||
""" Win32 Service Handler """
|
||||
|
||||
_svc_name_ = 'SABHelper'
|
||||
_svc_display_name_ = 'SABnzbd Helper'
|
||||
_svc_deps_ = ["EventLog", "Tcpip"]
|
||||
_svc_description_ = 'Automated downloading from Usenet. ' \
|
||||
'This service helps SABnzbd to restart itself.'
|
||||
|
||||
def __init__(self, args):
|
||||
global WIN_SERVICE
|
||||
win32serviceutil.ServiceFramework.__init__(self, args)
|
||||
|
||||
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
|
||||
self.overlapped = pywintypes.OVERLAPPED() # @UndefinedVariable
|
||||
self.overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None)
|
||||
WIN_SERVICE = self
|
||||
|
||||
def SvcDoRun(self):
|
||||
msg = 'SABHelper-service'
|
||||
self.Logger(servicemanager.PYS_SERVICE_STARTED, msg + ' has started')
|
||||
res = main()
|
||||
self.Logger(servicemanager.PYS_SERVICE_STOPPED, msg + ' has stopped' + res)
|
||||
|
||||
def SvcStop(self):
|
||||
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
|
||||
win32event.SetEvent(self.hWaitStop)
|
||||
|
||||
def Logger(self, state, msg):
|
||||
win32evtlogutil.ReportEvent(self._svc_display_name_,
|
||||
state, 0,
|
||||
servicemanager.EVENTLOG_INFORMATION_TYPE,
|
||||
(self._svc_name_, unicode(msg)))
|
||||
|
||||
def ErrLogger(self, msg, text):
|
||||
win32evtlogutil.ReportEvent(self._svc_display_name_,
|
||||
servicemanager.PYS_SERVICE_STOPPED, 0,
|
||||
servicemanager.EVENTLOG_ERROR_TYPE,
|
||||
(self._svc_name_, unicode(msg)),
|
||||
unicode(text))
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Platform specific startup code
|
||||
##############################################################################
|
||||
if __name__ == '__main__':
|
||||
|
||||
win32serviceutil.HandleCommandLine(SABHelper, argv=sys.argv)
|
||||
1793
SABnzbd.py
1793
SABnzbd.py
File diff suppressed because it is too large
Load Diff
14
appveyor.yml
14
appveyor.yml
@@ -1,14 +0,0 @@
|
||||
environment:
|
||||
SAB_NEWSSERVER_HOST:
|
||||
secure: UNnTfVHDugC9amTucdTRyxe8RZfVBLYfI1EOTaDUjNM=
|
||||
SAB_NEWSSERVER_USER:
|
||||
secure: npe0D4TiEzXMUVMCH3+SHA==
|
||||
SAB_NEWSSERVER_PASSWORD:
|
||||
secure: 28COv3RG+KAnBLxIrR1EDw==
|
||||
|
||||
install:
|
||||
- pip install --upgrade -r tests/requirements.txt
|
||||
- pip install pypiwin32 subprocessww
|
||||
|
||||
build_script:
|
||||
- python ./tests/test_functional.py
|
||||
179
builder/SABnzbd.spec
Normal file
179
builder/SABnzbd.spec
Normal file
@@ -0,0 +1,179 @@
|
||||
# -*- mode: python -*-
|
||||
import os
|
||||
import sys
|
||||
|
||||
from PyInstaller.building.api import EXE, COLLECT, PYZ
|
||||
from PyInstaller.building.build_main import Analysis
|
||||
from PyInstaller.building.osx import BUNDLE
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules
|
||||
|
||||
from builder.constants import EXTRA_FILES, EXTRA_FOLDERS, RELEASE_VERSION, RELEASE_VERSION_TUPLE
|
||||
|
||||
# Add extra files in the PyInstaller-spec
|
||||
extra_pyinstaller_files = []
|
||||
|
||||
# Add hidden imports
|
||||
extra_hiddenimports = ["Cheetah.DummyTransaction", "cheroot.ssl.builtin", "certifi"]
|
||||
extra_hiddenimports.extend(collect_submodules("apprise"))
|
||||
extra_hiddenimports.extend(collect_submodules("babelfish.converters"))
|
||||
extra_hiddenimports.extend(collect_submodules("guessit.data"))
|
||||
|
||||
# Add platform specific stuff
|
||||
if sys.platform == "darwin":
|
||||
extra_hiddenimports.extend(["objc", "PyObjCTools"])
|
||||
# macOS folders
|
||||
EXTRA_FOLDERS += ["macos/par2/", "macos/unrar/", "macos/7zip/"]
|
||||
# Add NZB-icon file
|
||||
extra_pyinstaller_files.append(("builder/macos/image/nzbfile.icns", "."))
|
||||
# Version information is set differently on macOS
|
||||
version_info = None
|
||||
else:
|
||||
# Build would fail on non-Windows
|
||||
from PyInstaller.utils.win32.versioninfo import (
|
||||
VSVersionInfo,
|
||||
FixedFileInfo,
|
||||
StringFileInfo,
|
||||
StringTable,
|
||||
StringStruct,
|
||||
VarFileInfo,
|
||||
VarStruct,
|
||||
)
|
||||
|
||||
# Windows
|
||||
extra_hiddenimports.extend(["win32timezone", "winrt.windows.foundation.collections"])
|
||||
EXTRA_FOLDERS += ["win/par2/", "win/unrar/", "win/7zip/"]
|
||||
EXTRA_FILES += ["portable.cmd"]
|
||||
|
||||
# Detailed instructions are in the PyInstaller documentation
|
||||
# We don't include the alpha/beta/rc in the counters
|
||||
version_info = VSVersionInfo(
|
||||
ffi=FixedFileInfo(
|
||||
filevers=RELEASE_VERSION_TUPLE,
|
||||
prodvers=RELEASE_VERSION_TUPLE,
|
||||
mask=0x3F,
|
||||
flags=0x0,
|
||||
OS=0x40004,
|
||||
fileType=0x1,
|
||||
subtype=0x0,
|
||||
date=(0, 0),
|
||||
),
|
||||
kids=[
|
||||
StringFileInfo(
|
||||
[
|
||||
StringTable(
|
||||
"040904B0",
|
||||
[
|
||||
StringStruct("Comments", f"SABnzbd {RELEASE_VERSION}"),
|
||||
StringStruct("CompanyName", "The SABnzbd-Team"),
|
||||
StringStruct("FileDescription", f"SABnzbd {RELEASE_VERSION}"),
|
||||
StringStruct("FileVersion", RELEASE_VERSION),
|
||||
StringStruct("LegalCopyright", "The SABnzbd-Team"),
|
||||
StringStruct("ProductName", f"SABnzbd {RELEASE_VERSION}"),
|
||||
StringStruct("ProductVersion", RELEASE_VERSION),
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
VarFileInfo([VarStruct("Translation", [1033, 1200])]),
|
||||
],
|
||||
)
|
||||
|
||||
# Process the extra-files and folders
|
||||
for file_item in EXTRA_FILES:
|
||||
extra_pyinstaller_files.append((file_item, "."))
|
||||
for folder_item in EXTRA_FOLDERS:
|
||||
extra_pyinstaller_files.append((folder_item, folder_item))
|
||||
|
||||
# Add babelfish data files
|
||||
extra_pyinstaller_files.extend(collect_data_files("babelfish"))
|
||||
extra_pyinstaller_files.extend(collect_data_files("guessit"))
|
||||
extra_pyinstaller_files.extend(collect_data_files("apprise"))
|
||||
extra_pyinstaller_files.extend(collect_data_files("dateutil"))
|
||||
|
||||
pyi_analysis = Analysis(
|
||||
["SABnzbd.py"],
|
||||
datas=extra_pyinstaller_files,
|
||||
hiddenimports=extra_hiddenimports,
|
||||
excludes=["ujson", "FixTk", "tcl", "tk", "_tkinter", "tkinter", "Tkinter", "pydoc", "pydoc_data.topics"],
|
||||
module_collection_mode={"apprise.plugins": "py"},
|
||||
)
|
||||
|
||||
pyz = PYZ(pyi_analysis.pure, pyi_analysis.zipped_data)
|
||||
|
||||
codesign_identity = os.environ.get("SIGNING_AUTH")
|
||||
if not codesign_identity:
|
||||
# PyInstaller needs specifically None, not just an empty value
|
||||
codesign_identity = None
|
||||
|
||||
# macOS specific parameters are ignored on other platforms
|
||||
exe = EXE(
|
||||
pyz,
|
||||
pyi_analysis.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name="SABnzbd",
|
||||
console=False,
|
||||
append_pkg=False,
|
||||
icon="icons/sabnzbd.ico",
|
||||
contents_directory=".",
|
||||
version=version_info,
|
||||
target_arch="universal2",
|
||||
entitlements_file="builder/macos/entitlements.plist",
|
||||
codesign_identity=codesign_identity,
|
||||
)
|
||||
|
||||
coll = COLLECT(exe, pyi_analysis.binaries, pyi_analysis.zipfiles, pyi_analysis.datas, name="SABnzbd")
|
||||
|
||||
# We need to run again for the console-app
|
||||
if sys.platform == "win32":
|
||||
# Enable console=True for this one
|
||||
console_exe = EXE(
|
||||
pyz,
|
||||
pyi_analysis.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name="SABnzbd-console",
|
||||
append_pkg=False,
|
||||
icon="icons/sabnzbd.ico",
|
||||
contents_directory=".",
|
||||
version=version_info,
|
||||
)
|
||||
|
||||
console_coll = COLLECT(
|
||||
console_exe,
|
||||
pyi_analysis.binaries,
|
||||
pyi_analysis.zipfiles,
|
||||
pyi_analysis.datas,
|
||||
name="SABnzbd-console",
|
||||
)
|
||||
|
||||
# Build the APP on macOS
|
||||
if sys.platform == "darwin":
|
||||
info_plist = {
|
||||
"NSUIElement": 1,
|
||||
"NSPrincipalClass": "NSApplication",
|
||||
"CFBundleShortVersionString": RELEASE_VERSION,
|
||||
"NSHumanReadableCopyright": "The SABnzbd-Team",
|
||||
"CFBundleIdentifier": "org.sabnzbd.sabnzbd",
|
||||
"CFBundleDocumentTypes": [
|
||||
{
|
||||
"CFBundleTypeExtensions": ["nzb"],
|
||||
"CFBundleTypeIconFile": "nzbfile.icns",
|
||||
"CFBundleTypeMIMETypes": ["text/nzb"],
|
||||
"CFBundleTypeName": "NZB File",
|
||||
"CFBundleTypeRole": "Viewer",
|
||||
"LSTypeIsPackage": 0,
|
||||
"NSPersistentStoreTypeKey": "Binary",
|
||||
}
|
||||
],
|
||||
"LSMinimumSystemVersion": "10.13",
|
||||
"LSEnvironment": {"LANG": "en_US.UTF-8", "LC_ALL": "en_US.UTF-8"},
|
||||
}
|
||||
|
||||
app = BUNDLE(
|
||||
coll,
|
||||
name="SABnzbd.app",
|
||||
icon="builder/macos/image/sabnzbdplus.icns",
|
||||
bundle_identifier="org.sabnzbd.sabnzbd",
|
||||
info_plist=info_plist,
|
||||
)
|
||||
79
builder/constants.py
Normal file
79
builder/constants.py
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2008-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
|
||||
# Constants
|
||||
VERSION_FILE = "sabnzbd/version.py"
|
||||
APPDATA_FILE = "linux/org.sabnzbd.sabnzbd.appdata.xml"
|
||||
|
||||
# To draft a release or not to draft a release?
|
||||
ON_GITHUB_ACTIONS = os.environ.get("CI", False)
|
||||
RELEASE_THIS = "refs/tags/" in os.environ.get("GITHUB_REF", "")
|
||||
|
||||
# Import version.py without the sabnzbd overhead
|
||||
with open(VERSION_FILE) as version_file:
|
||||
exec(version_file.read())
|
||||
RELEASE_VERSION = __version__
|
||||
|
||||
# Pre-releases are longer than 6 characters (e.g. 3.1.0Beta1 vs 3.1.0, but also 3.0.11)
|
||||
PRERELEASE = len(RELEASE_VERSION) > 5
|
||||
|
||||
# Parse the version info for Windows file properties information
|
||||
version_regexed = re.search(r"(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)", RELEASE_VERSION)
|
||||
RELEASE_VERSION_TUPLE = (int(version_regexed.group(1)), int(version_regexed.group(2)), int(version_regexed.group(3)), 0)
|
||||
RELEASE_VERSION_BASE = f"{RELEASE_VERSION_TUPLE[0]}.{RELEASE_VERSION_TUPLE[1]}.{RELEASE_VERSION_TUPLE[2]}"
|
||||
|
||||
# Define release name
|
||||
RELEASE_NAME = "SABnzbd-%s" % RELEASE_VERSION
|
||||
RELEASE_TITLE = "SABnzbd %s" % RELEASE_VERSION
|
||||
RELEASE_SRC = RELEASE_NAME + "-src.tar.gz"
|
||||
RELEASE_WIN_BIN_X64 = RELEASE_NAME + "-win64-bin.zip"
|
||||
RELEASE_WIN_BIN_ARM64 = RELEASE_NAME + "-win-arm64-bin.zip"
|
||||
RELEASE_WIN_INSTALLER = RELEASE_NAME + "-win-setup.exe"
|
||||
RELEASE_MACOS = RELEASE_NAME + "-macos.dmg"
|
||||
RELEASE_README = "README.mkd"
|
||||
|
||||
# Detect architecture
|
||||
RELEASE_WIN_BIN = RELEASE_WIN_BIN_X64
|
||||
if platform.machine() == "ARM64":
|
||||
RELEASE_WIN_BIN = RELEASE_WIN_BIN_ARM64
|
||||
|
||||
# Used in package.py and SABnzbd.spec
|
||||
EXTRA_FILES = [
|
||||
RELEASE_README,
|
||||
"README.txt",
|
||||
"INSTALL.txt",
|
||||
"LICENSE.txt",
|
||||
"GPL2.txt",
|
||||
"GPL3.txt",
|
||||
"COPYRIGHT.txt",
|
||||
"ISSUES.txt",
|
||||
]
|
||||
EXTRA_FOLDERS = [
|
||||
"scripts/",
|
||||
"licenses/",
|
||||
"locale/",
|
||||
"email/",
|
||||
"interfaces/Glitter/",
|
||||
"interfaces/wizard/",
|
||||
"interfaces/Config/",
|
||||
"scripts/",
|
||||
"icons/",
|
||||
]
|
||||
8
builder/macos/entitlements.plist
Normal file
8
builder/macos/entitlements.plist
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
BIN
builder/macos/image/nzbfile.icns
Normal file
BIN
builder/macos/image/nzbfile.icns
Normal file
Binary file not shown.
BIN
builder/macos/image/sabnzbd_new_bg.png
Normal file
BIN
builder/macos/image/sabnzbd_new_bg.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 39 KiB |
BIN
builder/macos/image/sabnzbd_new_bg.psd
Normal file
BIN
builder/macos/image/sabnzbd_new_bg.psd
Normal file
Binary file not shown.
BIN
builder/macos/image/sabnzbdplus.icns
Normal file
BIN
builder/macos/image/sabnzbdplus.icns
Normal file
Binary file not shown.
206
builder/make_dmg.py
Normal file
206
builder/make_dmg.py
Normal file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2008-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import os
|
||||
from constants import RELEASE_VERSION
|
||||
|
||||
|
||||
# We need to call dmgbuild from command-line, so here we can setup how
|
||||
if __name__ == "__main__":
|
||||
# Check for DMGBuild
|
||||
try:
|
||||
import dmgbuild
|
||||
except Exception:
|
||||
print("Requires dmgbuild-module, use pip install dmgbuild")
|
||||
exit()
|
||||
|
||||
# Make sure we are in the src folder
|
||||
if not os.path.exists("builder"):
|
||||
raise FileNotFoundError("Run from the main SABnzbd source folder: python builder/package.py")
|
||||
|
||||
# Check if signing is possible
|
||||
authority = os.environ.get("SIGNING_AUTH")
|
||||
|
||||
# Extract version info and set DMG path
|
||||
# Create sub-folder to upload later
|
||||
release = RELEASE_VERSION
|
||||
prod = "SABnzbd-" + release
|
||||
fileDmg = prod + "-macos.dmg"
|
||||
|
||||
# Path to app file
|
||||
apppath = "dist/SABnzbd.app"
|
||||
|
||||
# Copy Readme
|
||||
readmepath = os.path.join(apppath, "Contents/Resources/README.txt")
|
||||
|
||||
# Path to background and the icon
|
||||
backgroundpath = "builder/macos/image/sabnzbd_new_bg.png"
|
||||
iconpath = "builder/macos/image/sabnzbdplus.icns"
|
||||
|
||||
# Make DMG
|
||||
print("Building DMG")
|
||||
dmgbuild.build_dmg(
|
||||
filename=fileDmg,
|
||||
volume_name=prod,
|
||||
settings_file="builder/make_dmg.py",
|
||||
defines={"app": apppath, "readme": readmepath, "background": backgroundpath, "iconpath": iconpath},
|
||||
)
|
||||
|
||||
# Resign APP
|
||||
if authority:
|
||||
print("Siging DMG")
|
||||
os.system('codesign --deep -f -i "org.sabnzbd.SABnzbd" -s "%s" "%s"' % (authority, fileDmg))
|
||||
print("Signed!")
|
||||
else:
|
||||
print("Signing skipped, missing SIGNING_AUTH.")
|
||||
exit()
|
||||
|
||||
|
||||
### START OF DMGBUILD SETTINGS
|
||||
### COPIED AND MODIFIED FROM THE EXAMPLE ONLINE
|
||||
application = defines.get("app", "AppName.app")
|
||||
readme = defines.get("readme", "ReadMe.rtf")
|
||||
appname = os.path.basename(application)
|
||||
|
||||
# .. Basics ....................................................................
|
||||
|
||||
# Volume format (see hdiutil create -help)
|
||||
format = defines.get("format", "UDBZ")
|
||||
|
||||
# Volume size (must be large enough for your files)
|
||||
size = defines.get("size", "100M")
|
||||
|
||||
# Files to include
|
||||
files = [application, readme]
|
||||
|
||||
# Symlinks to create
|
||||
symlinks = {"Applications": "/Applications"}
|
||||
|
||||
# Volume icon
|
||||
#
|
||||
# You can either define icon, in which case that icon file will be copied to the
|
||||
# image, *or* you can define badge_icon, in which case the icon file you specify
|
||||
# will be used to badge the system's Removable Disk icon
|
||||
#
|
||||
badge_icon = defines.get("iconpath", "")
|
||||
|
||||
# Where to put the icons
|
||||
icon_locations = {readme: (70, 160), appname: (295, 220), "Applications": (510, 220)}
|
||||
|
||||
# .. Window configuration ......................................................
|
||||
|
||||
# Window position in ((x, y), (w, h)) format
|
||||
window_rect = ((100, 100), (660, 360))
|
||||
|
||||
# Background
|
||||
#
|
||||
# This is a STRING containing any of the following:
|
||||
#
|
||||
# #3344ff - web-style RGB color
|
||||
# #34f - web-style RGB color, short form (#34f == #3344ff)
|
||||
# rgb(1,0,0) - RGB color, each value is between 0 and 1
|
||||
# hsl(120,1,.5) - HSL (hue saturation lightness) color
|
||||
# hwb(300,0,0) - HWB (hue whiteness blackness) color
|
||||
# cmyk(0,1,0,0) - CMYK color
|
||||
# goldenrod - X11/SVG named color
|
||||
# builtin-arrow - A simple built-in background with a blue arrow
|
||||
# /foo/bar/baz.png - The path to an image file
|
||||
#
|
||||
# Other color components may be expressed either in the range 0 to 1, or
|
||||
# as percentages (e.g. 60% is equivalent to 0.6).
|
||||
background = defines.get("background", "builtin-arrow")
|
||||
|
||||
show_status_bar = False
|
||||
show_tab_view = False
|
||||
show_toolbar = False
|
||||
show_pathbar = False
|
||||
show_sidebar = False
|
||||
sidebar_width = 0
|
||||
|
||||
# Select the default view; must be one of
|
||||
#
|
||||
# 'icon-view'
|
||||
# 'list-view'
|
||||
# 'column-view'
|
||||
# 'coverflow'
|
||||
#
|
||||
default_view = "icon-view"
|
||||
|
||||
# General view configuration
|
||||
show_icon_preview = False
|
||||
|
||||
# Set these to True to force inclusion of icon/list view settings (otherwise
|
||||
# we only include settings for the default view)
|
||||
include_icon_view_settings = "auto"
|
||||
include_list_view_settings = "auto"
|
||||
|
||||
# .. Icon view configuration ...................................................
|
||||
|
||||
arrange_by = None
|
||||
grid_offset = (0, 0)
|
||||
grid_spacing = 50
|
||||
scroll_position = (0, 0)
|
||||
label_pos = "bottom" # or 'right'
|
||||
text_size = 16
|
||||
icon_size = 64
|
||||
|
||||
# .. List view configuration ...................................................
|
||||
|
||||
# Column names are as follows:
|
||||
#
|
||||
# name
|
||||
# date-modified
|
||||
# date-created
|
||||
# date-added
|
||||
# date-last-opened
|
||||
# size
|
||||
# kind
|
||||
# label
|
||||
# version
|
||||
# comments
|
||||
#
|
||||
list_icon_size = 16
|
||||
list_text_size = 12
|
||||
list_scroll_position = (0, 0)
|
||||
list_sort_by = "name"
|
||||
list_use_relative_dates = True
|
||||
list_calculate_all_sizes = (False,)
|
||||
list_columns = ("name", "date-modified", "size", "kind", "date-added")
|
||||
list_column_widths = {
|
||||
"name": 300,
|
||||
"date-modified": 181,
|
||||
"date-created": 181,
|
||||
"date-added": 181,
|
||||
"date-last-opened": 181,
|
||||
"size": 97,
|
||||
"kind": 115,
|
||||
"label": 100,
|
||||
"version": 75,
|
||||
"comments": 300,
|
||||
}
|
||||
list_column_sort_directions = {
|
||||
"name": "ascending",
|
||||
"date-modified": "descending",
|
||||
"date-created": "descending",
|
||||
"date-added": "descending",
|
||||
"date-last-opened": "descending",
|
||||
"size": "descending",
|
||||
"kind": "ascending",
|
||||
"label": "ascending",
|
||||
"version": "ascending",
|
||||
"comments": "ascending",
|
||||
}
|
||||
511
builder/package.py
Normal file
511
builder/package.py
Normal file
@@ -0,0 +1,511 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2008-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import glob
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import configobj
|
||||
import packaging.version
|
||||
|
||||
from constants import (
|
||||
RELEASE_VERSION,
|
||||
RELEASE_VERSION_TUPLE,
|
||||
VERSION_FILE,
|
||||
RELEASE_README,
|
||||
RELEASE_NAME,
|
||||
RELEASE_WIN_BIN,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
ON_GITHUB_ACTIONS,
|
||||
RELEASE_THIS,
|
||||
RELEASE_SRC,
|
||||
EXTRA_FILES,
|
||||
EXTRA_FOLDERS,
|
||||
)
|
||||
|
||||
|
||||
# Support functions
|
||||
def safe_remove(path):
|
||||
"""Remove file without errors if the file doesn't exist
|
||||
Can also handle folders
|
||||
"""
|
||||
if os.path.exists(path):
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def delete_files_glob(glob_pattern: str, allow_no_matches: bool = False):
|
||||
"""Delete one file or set of files from wild-card spec.
|
||||
We expect to match at least 1 file, to force expected behavior"""
|
||||
if files_to_remove := glob.glob(glob_pattern):
|
||||
for path in files_to_remove:
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
else:
|
||||
if not allow_no_matches:
|
||||
raise FileNotFoundError(f"No files found that match '{glob_pattern}'")
|
||||
|
||||
|
||||
def run_external_command(command: list[str], print_output: bool = True, **kwargs):
|
||||
"""Wrapper to ease the use of calling external programs"""
|
||||
process = subprocess.Popen(command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
|
||||
output, _ = process.communicate()
|
||||
ret = process.wait()
|
||||
if (output and print_output) or ret != 0:
|
||||
print(output)
|
||||
if ret != 0:
|
||||
raise RuntimeError("Command returned non-zero exit code %s!" % ret)
|
||||
return output
|
||||
|
||||
|
||||
def run_git_command(parms):
|
||||
"""Run git command, raise error if it failed"""
|
||||
return run_external_command(["git"] + parms)
|
||||
|
||||
|
||||
def patch_version_file(release_name):
|
||||
"""Patch in the Git commit hash, but only when this is
|
||||
an unmodified checkout
|
||||
"""
|
||||
git_output = run_git_command(["log", "-1"])
|
||||
for line in git_output.split("\n"):
|
||||
if "commit " in line:
|
||||
commit = line.split(" ")[1].strip()
|
||||
break
|
||||
else:
|
||||
raise TypeError("Commit hash not found")
|
||||
|
||||
with open(VERSION_FILE, "r") as ver:
|
||||
version_file = ver.read()
|
||||
|
||||
version_file = re.sub(r'__baseline__\s*=\s*"[^"]*"', '__baseline__ = "%s"' % commit, version_file)
|
||||
version_file = re.sub(r'__version__\s*=\s*"[^"]*"', '__version__ = "%s"' % release_name, version_file)
|
||||
|
||||
with open(VERSION_FILE, "w") as ver:
|
||||
ver.write(version_file)
|
||||
|
||||
|
||||
def test_macos_min_version(binary_path: str):
|
||||
# Skip check if nothing was set
|
||||
if macos_min_version := os.environ.get("MACOSX_DEPLOYMENT_TARGET"):
|
||||
# Skip any arm64 specific files
|
||||
if "arm64" in binary_path:
|
||||
print(f"Skipping arm64 binary {binary_path}")
|
||||
return
|
||||
|
||||
# Check minimum macOS version is at least mac OS10.13
|
||||
# We only check the x86_64 since for arm64 it's always macOS 11+
|
||||
print(f"Checking if binary supports macOS {macos_min_version} and above: {binary_path}")
|
||||
otool_output = run_external_command(
|
||||
[
|
||||
"otool",
|
||||
"-arch",
|
||||
"x86_64",
|
||||
"-l",
|
||||
binary_path,
|
||||
],
|
||||
print_output=False,
|
||||
)
|
||||
|
||||
# Parse the output for LC_BUILD_VERSION minos
|
||||
# The output is very large, so that's why we enumerate over it
|
||||
req_version = packaging.version.parse(macos_min_version)
|
||||
bin_version = None
|
||||
lines = otool_output.split("\n")
|
||||
for line_nr, line in enumerate(lines):
|
||||
if "LC_VERSION_MIN_MACOSX" in line:
|
||||
# Display the version in the next lines
|
||||
bin_version = packaging.version.parse(lines[line_nr + 2].split()[1])
|
||||
elif "minos" in line:
|
||||
bin_version = packaging.version.parse(line.split()[1])
|
||||
|
||||
if bin_version and bin_version > req_version:
|
||||
raise ValueError(f"{binary_path} requires {bin_version}, we want {req_version}")
|
||||
else:
|
||||
# We got the information we need
|
||||
break
|
||||
else:
|
||||
print(lines)
|
||||
raise RuntimeError(f"Could not determine minimum macOS version for {binary_path}")
|
||||
else:
|
||||
print(f"Skipping macOS version check, MACOSX_DEPLOYMENT_TARGET not set")
|
||||
|
||||
|
||||
def test_sab_binary(binary_path: str):
|
||||
"""Wrapper to have a simple start-up test for the binary"""
|
||||
with tempfile.TemporaryDirectory() as config_dir:
|
||||
sabnzbd_process = subprocess.Popen(
|
||||
[binary_path, "--browser", "0", "--logging", "2", "--config", config_dir],
|
||||
text=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
# Wait for SAB to respond
|
||||
base_url = "http://127.0.0.1:8080/"
|
||||
for _ in range(30):
|
||||
try:
|
||||
urllib.request.urlopen(base_url, timeout=1).read()
|
||||
break
|
||||
except Exception:
|
||||
time.sleep(1)
|
||||
else:
|
||||
# Print console output and give some time to print
|
||||
print(sabnzbd_process.stdout.read())
|
||||
time.sleep(1)
|
||||
raise urllib.error.URLError("Could not connect to SABnzbd")
|
||||
|
||||
# Open a number of API calls and pages, to see if we are really up
|
||||
pages_to_test = [
|
||||
"",
|
||||
"wizard",
|
||||
"config",
|
||||
"config/server",
|
||||
"config/categories",
|
||||
"config/scheduling",
|
||||
"config/rss",
|
||||
"config/general",
|
||||
"config/folders",
|
||||
"config/switches",
|
||||
"config/sorting",
|
||||
"config/notify",
|
||||
"config/special",
|
||||
"api?mode=version",
|
||||
]
|
||||
for url in pages_to_test:
|
||||
print("Testing: %s%s" % (base_url, url))
|
||||
if b"500 Internal Server Error" in urllib.request.urlopen(base_url + url, timeout=1).read():
|
||||
raise RuntimeError("Crash in %s" % url)
|
||||
|
||||
# Parse API-key so we can do a graceful shutdown
|
||||
sab_config = configobj.ConfigObj(os.path.join(config_dir, "sabnzbd.ini"))
|
||||
urllib.request.urlopen(base_url + "shutdown/?apikey=" + sab_config["misc"]["api_key"], timeout=10)
|
||||
sabnzbd_process.wait()
|
||||
|
||||
# Print logs for verification
|
||||
with open(os.path.join(config_dir, "logs", "sabnzbd.log"), "r") as log_file:
|
||||
# Wait after printing so the output is nicely displayed in case of problems
|
||||
print(log_text := log_file.read())
|
||||
time.sleep(5)
|
||||
|
||||
# Make sure no extra errors/warnings were reported
|
||||
if "ERROR" in log_text or "WARNING" in log_text:
|
||||
raise RuntimeError("Warning or error reported during execution")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Was any option supplied?
|
||||
if len(sys.argv) < 2:
|
||||
raise TypeError("Please specify what to do")
|
||||
|
||||
# Make sure we are in the src folder
|
||||
if not os.path.exists("builder"):
|
||||
raise FileNotFoundError("Run from the main SABnzbd source folder: python builder/package.py")
|
||||
|
||||
# Check if we have the needed certificates
|
||||
try:
|
||||
import certifi
|
||||
except ImportError:
|
||||
raise FileNotFoundError("Need certifi module")
|
||||
|
||||
# Patch release file
|
||||
patch_version_file(RELEASE_VERSION)
|
||||
|
||||
# Rename release notes file
|
||||
safe_remove("README.txt")
|
||||
shutil.copyfile(RELEASE_README, "README.txt")
|
||||
|
||||
# Compile translations
|
||||
if not os.path.exists("locale"):
|
||||
run_external_command([sys.executable, "tools/make_mo.py"])
|
||||
|
||||
# Check again if translations exist, fail otherwise
|
||||
if not os.path.exists("locale"):
|
||||
raise FileNotFoundError("Failed to compile language files")
|
||||
|
||||
if "binary" in sys.argv:
|
||||
# Must be run on Windows
|
||||
if sys.platform != "win32":
|
||||
raise RuntimeError("Binary should be created on Windows")
|
||||
|
||||
# Make sure we remove any existing build-folders
|
||||
safe_remove("build")
|
||||
safe_remove("dist")
|
||||
|
||||
# Remove any leftovers
|
||||
safe_remove(RELEASE_NAME)
|
||||
safe_remove(RELEASE_WIN_BIN)
|
||||
|
||||
# Run PyInstaller and check output
|
||||
shutil.copyfile("builder/SABnzbd.spec", "SABnzbd.spec")
|
||||
run_external_command([sys.executable, "-O", "-m", "PyInstaller", "SABnzbd.spec"])
|
||||
|
||||
shutil.copytree("dist/SABnzbd-console", "dist/SABnzbd", dirs_exist_ok=True)
|
||||
safe_remove("dist/SABnzbd-console")
|
||||
|
||||
# Remove unwanted DLL's
|
||||
shutil.rmtree("dist/SABnzbd/Pythonwin")
|
||||
delete_files_glob("dist/SABnzbd/api-ms-win*.dll", allow_no_matches=True)
|
||||
delete_files_glob("dist/SABnzbd/ucrtbase.dll", allow_no_matches=True)
|
||||
|
||||
# Test the release
|
||||
test_sab_binary("dist/SABnzbd/SABnzbd.exe")
|
||||
|
||||
# Create the archive
|
||||
run_external_command(["win/7zip/7za.exe", "a", RELEASE_WIN_BIN, "SABnzbd"], cwd="dist")
|
||||
shutil.move(f"dist/{RELEASE_WIN_BIN}", RELEASE_WIN_BIN)
|
||||
|
||||
if "installer" in sys.argv:
|
||||
# Check if we have the dist folder
|
||||
if not os.path.exists("dist/SABnzbd/SABnzbd.exe"):
|
||||
raise FileNotFoundError("SABnzbd executable not found, run binary creation first")
|
||||
|
||||
# Check if we have a signed version
|
||||
if os.path.exists(f"signed/{RELEASE_WIN_BIN}"):
|
||||
print("Using signed version of SABnzbd binaries")
|
||||
safe_remove("dist/SABnzbd")
|
||||
run_external_command(["win/7zip/7za.exe", "x", "-odist", f"signed/{RELEASE_WIN_BIN}"])
|
||||
|
||||
# Make sure it exists
|
||||
if not os.path.exists("dist/SABnzbd/SABnzbd.exe"):
|
||||
raise FileNotFoundError("SABnzbd executable not found, signed zip extraction failed")
|
||||
elif RELEASE_THIS:
|
||||
raise FileNotFoundError("Signed SABnzbd executable not found, required for release!")
|
||||
else:
|
||||
print("Using unsigned version of SABnzbd binaries")
|
||||
|
||||
# Compile NSIS translations
|
||||
safe_remove("NSIS_Installer.nsi")
|
||||
safe_remove("NSIS_Installer.nsi.tmp")
|
||||
shutil.copyfile("builder/win/NSIS_Installer.nsi", "NSIS_Installer.nsi")
|
||||
run_external_command([sys.executable, "tools/make_mo.py", "nsis"])
|
||||
|
||||
# Run NSIS to build installer
|
||||
run_external_command(
|
||||
[
|
||||
"makensis.exe",
|
||||
"/V3",
|
||||
"/DSAB_VERSION=%s" % RELEASE_VERSION,
|
||||
"/DSAB_VERSIONKEY=%s" % ".".join(map(str, RELEASE_VERSION_TUPLE)),
|
||||
"/DSAB_FILE=%s" % RELEASE_WIN_INSTALLER,
|
||||
"NSIS_Installer.nsi.tmp",
|
||||
]
|
||||
)
|
||||
|
||||
if "app" in sys.argv:
|
||||
# Must be run on macOS
|
||||
if sys.platform != "darwin":
|
||||
raise RuntimeError("App should be created on macOS")
|
||||
|
||||
# Who will sign and notarize this?
|
||||
authority = os.environ.get("SIGNING_AUTH")
|
||||
notarization_user = os.environ.get("NOTARIZATION_USER")
|
||||
notarization_pass = os.environ.get("NOTARIZATION_PASS")
|
||||
|
||||
# We need to sign all the included binaries before packaging them
|
||||
# Otherwise the signature of the main application becomes invalid
|
||||
if authority:
|
||||
files_to_sign = [
|
||||
"macos/par2/par2",
|
||||
"macos/unrar/unrar",
|
||||
"macos/unrar/arm64/unrar",
|
||||
"macos/7zip/7zz",
|
||||
]
|
||||
for file_to_sign in files_to_sign:
|
||||
# Make sure it supports the macOS versions we want first
|
||||
test_macos_min_version(file_to_sign)
|
||||
|
||||
# Then sign in
|
||||
print("Signing %s with hardened runtime" % file_to_sign)
|
||||
run_external_command(
|
||||
[
|
||||
"codesign",
|
||||
"--deep",
|
||||
"--force",
|
||||
"--timestamp",
|
||||
"--options",
|
||||
"runtime",
|
||||
"--entitlements",
|
||||
"builder/macos/entitlements.plist",
|
||||
"-s",
|
||||
authority,
|
||||
file_to_sign,
|
||||
],
|
||||
print_output=False,
|
||||
)
|
||||
print("Signed %s!" % file_to_sign)
|
||||
|
||||
# Run PyInstaller and check output
|
||||
shutil.copyfile("builder/SABnzbd.spec", "SABnzbd.spec")
|
||||
run_external_command([sys.executable, "-O", "-m", "PyInstaller", "SABnzbd.spec"])
|
||||
|
||||
# Make sure we created a fully universal2 release when releasing or during CI
|
||||
if RELEASE_THIS or ON_GITHUB_ACTIONS:
|
||||
for bin_to_check in glob.glob("dist/SABnzbd.app/**/*.so", recursive=True):
|
||||
print("Checking if binary is universal2: %s" % bin_to_check)
|
||||
file_output = run_external_command(["file", bin_to_check], print_output=False)
|
||||
# Make sure we have both arm64 and x86
|
||||
if not ("x86_64" in file_output and "arm64" in file_output):
|
||||
raise RuntimeError("Non-universal2 binary found!")
|
||||
|
||||
# Make sure it supports the macOS versions we want
|
||||
test_macos_min_version(bin_to_check)
|
||||
|
||||
# Only continue if we can sign
|
||||
if authority:
|
||||
# We use PyInstaller to sign the main SABnzbd executable and the SABnzbd.app
|
||||
files_already_signed = [
|
||||
"dist/SABnzbd.app/Contents/MacOS/SABnzbd",
|
||||
"dist/SABnzbd.app",
|
||||
]
|
||||
for file_to_check in files_already_signed:
|
||||
print("Checking signature of %s" % file_to_check)
|
||||
sign_result = run_external_command(
|
||||
[
|
||||
"codesign",
|
||||
"-dv",
|
||||
"-r-",
|
||||
file_to_check,
|
||||
],
|
||||
print_output=False,
|
||||
) + run_external_command(
|
||||
[
|
||||
"codesign",
|
||||
"--verify",
|
||||
"--deep",
|
||||
file_to_check,
|
||||
],
|
||||
print_output=False,
|
||||
)
|
||||
if authority not in sign_result or "adhoc" in sign_result or "invalid" in sign_result:
|
||||
raise RuntimeError("Signature of %s seems invalid!" % file_to_check)
|
||||
|
||||
# Always notarize, as newer macOS versions don't allow any code without it
|
||||
if notarization_user and notarization_pass:
|
||||
# Prepare zip to upload to notarization service
|
||||
print("Creating zip to send to Apple notarization service")
|
||||
# We need to use ditto, otherwise the signature gets lost!
|
||||
notarization_zip = RELEASE_NAME + ".zip"
|
||||
run_external_command(
|
||||
["ditto", "-c", "-k", "--sequesterRsrc", "--keepParent", "dist/SABnzbd.app", notarization_zip]
|
||||
)
|
||||
|
||||
# Upload to Apple
|
||||
print("Sending zip to Apple notarization service")
|
||||
upload_result = run_external_command(
|
||||
[
|
||||
"xcrun",
|
||||
"notarytool",
|
||||
"submit",
|
||||
notarization_zip,
|
||||
"--apple-id",
|
||||
notarization_user,
|
||||
"--team-id",
|
||||
authority,
|
||||
"--password",
|
||||
notarization_pass,
|
||||
"--wait",
|
||||
],
|
||||
)
|
||||
|
||||
# Check if success
|
||||
if "status: accepted" not in upload_result.lower():
|
||||
raise RuntimeError("Failed to notarize..")
|
||||
|
||||
# Staple the notarization!
|
||||
print("Approved! Stapling the result to the app")
|
||||
run_external_command(["xcrun", "stapler", "staple", "dist/SABnzbd.app"])
|
||||
else:
|
||||
print("Notarization skipped, NOTARIZATION_USER or NOTARIZATION_PASS missing.")
|
||||
else:
|
||||
print("Signing skipped, missing SIGNING_AUTH.")
|
||||
|
||||
# Test the release, as the very last step to not mess with any release code
|
||||
test_sab_binary("dist/SABnzbd.app/Contents/MacOS/SABnzbd")
|
||||
|
||||
if "source" in sys.argv:
|
||||
# Prepare Source distribution package.
|
||||
# We assume the sources are freshly cloned from the repo
|
||||
# Make sure all source files are Unix format
|
||||
src_folder = "srcdist"
|
||||
safe_remove(src_folder)
|
||||
os.mkdir(src_folder)
|
||||
|
||||
# Remove any leftovers
|
||||
safe_remove(RELEASE_SRC)
|
||||
|
||||
# Add extra files and folders need for source dist
|
||||
EXTRA_FOLDERS.extend(["sabnzbd/", "po/", "linux/", "tools/", "tests/"])
|
||||
EXTRA_FILES.extend(["SABnzbd.py", "requirements.txt"])
|
||||
|
||||
# Copy all folders and files to the new folder
|
||||
for source_folder in EXTRA_FOLDERS:
|
||||
shutil.copytree(source_folder, os.path.join(src_folder, source_folder), dirs_exist_ok=True)
|
||||
|
||||
# Copy all files
|
||||
for source_file in EXTRA_FILES:
|
||||
shutil.copyfile(source_file, os.path.join(src_folder, source_file))
|
||||
|
||||
# Make sure all line-endings are correct
|
||||
for input_filename in glob.glob("%s/**/*.*" % src_folder, recursive=True):
|
||||
base, ext = os.path.splitext(input_filename)
|
||||
if ext.lower() not in (".py", ".txt", ".css", ".js", ".tmpl", ".sh", ".cmd"):
|
||||
continue
|
||||
print(input_filename)
|
||||
|
||||
with open(input_filename, "rb") as input_data:
|
||||
data = input_data.read()
|
||||
data = data.replace(b"\r", b"")
|
||||
with open(input_filename, "wb") as output_data:
|
||||
output_data.write(data)
|
||||
|
||||
# Create tar.gz file for source distro
|
||||
with tarfile.open(RELEASE_SRC, "w:gz") as tar_output:
|
||||
for root, dirs, files in os.walk(src_folder):
|
||||
for _file in files:
|
||||
input_path = os.path.join(root, _file)
|
||||
if sys.platform == "win32":
|
||||
tar_path = input_path.replace("srcdist\\", RELEASE_NAME + "/").replace("\\", "/")
|
||||
else:
|
||||
tar_path = input_path.replace("srcdist/", RELEASE_NAME + "/")
|
||||
tarinfo = tar_output.gettarinfo(input_path, tar_path)
|
||||
tarinfo.uid = 0
|
||||
tarinfo.gid = 0
|
||||
if _file in ("SABnzbd.py", "Sample-PostProc.sh", "make_mo.py", "msgfmt.py"):
|
||||
# Force Linux/macOS scripts as executable
|
||||
tarinfo.mode = 0o755
|
||||
else:
|
||||
tarinfo.mode = 0o644
|
||||
|
||||
with open(input_path, "rb") as f:
|
||||
tar_output.addfile(tarinfo, f)
|
||||
|
||||
# Remove source folder
|
||||
safe_remove(src_folder)
|
||||
|
||||
# Reset!
|
||||
run_git_command(["reset", "--hard"])
|
||||
run_git_command(["clean", "-f"])
|
||||
2
builder/release-requirements.txt
Normal file
2
builder/release-requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
PyGithub==2.8.1
|
||||
praw==7.8.1
|
||||
272
builder/release.py
Normal file
272
builder/release.py
Normal file
@@ -0,0 +1,272 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2008-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import github
|
||||
import praw
|
||||
|
||||
from constants import (
|
||||
RELEASE_VERSION,
|
||||
RELEASE_VERSION_BASE,
|
||||
PRERELEASE,
|
||||
RELEASE_SRC,
|
||||
RELEASE_WIN_BIN_X64,
|
||||
RELEASE_WIN_BIN_ARM64,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
RELEASE_MACOS,
|
||||
RELEASE_README,
|
||||
RELEASE_THIS,
|
||||
RELEASE_TITLE,
|
||||
APPDATA_FILE,
|
||||
ON_GITHUB_ACTIONS,
|
||||
)
|
||||
|
||||
# Verify we have all assets
|
||||
files_to_check = (
|
||||
RELEASE_SRC,
|
||||
RELEASE_WIN_BIN_X64,
|
||||
RELEASE_WIN_BIN_ARM64,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
RELEASE_MACOS,
|
||||
RELEASE_README,
|
||||
)
|
||||
for file_to_check in files_to_check:
|
||||
if not os.path.exists(file_to_check):
|
||||
raise RuntimeError("Not all release files are present!")
|
||||
print("All release files are present")
|
||||
|
||||
# Verify that appdata file is updated
|
||||
if not isinstance(ET.parse(APPDATA_FILE).find(f"./releases/release[@version='{RELEASE_VERSION_BASE}']"), ET.Element):
|
||||
release_missing = f"Could not find {RELEASE_VERSION_BASE} in {APPDATA_FILE}"
|
||||
if RELEASE_THIS:
|
||||
raise RuntimeError(release_missing)
|
||||
elif ON_GITHUB_ACTIONS:
|
||||
print(f"::warning file={APPDATA_FILE},title=Missing release::{release_missing}")
|
||||
else:
|
||||
print(release_missing)
|
||||
|
||||
# Calculate hashes for Synology release
|
||||
with open(RELEASE_SRC, "rb") as inp_file:
|
||||
source_data = inp_file.read()
|
||||
|
||||
print("---- Synology spksrc digest hashes ---- ")
|
||||
print(RELEASE_SRC, "SHA1", hashlib.sha1(source_data).hexdigest())
|
||||
print(RELEASE_SRC, "SHA256", hashlib.sha256(source_data).hexdigest())
|
||||
print(RELEASE_SRC, "MD5", hashlib.md5(source_data).hexdigest())
|
||||
print("----")
|
||||
|
||||
# Check if tagged as release and check for token
|
||||
gh_token = os.environ.get("AUTOMATION_GITHUB_TOKEN", "")
|
||||
if RELEASE_THIS and gh_token:
|
||||
gh_obj = github.Github(auth=github.Auth.Token(gh_token))
|
||||
gh_repo = gh_obj.get_repo("sabnzbd/sabnzbd")
|
||||
|
||||
# Read the release notes
|
||||
with open(RELEASE_README, "r") as readme_file:
|
||||
readme_data = readme_file.read()
|
||||
|
||||
# We have to manually check if we already created this release
|
||||
for release in gh_repo.get_releases():
|
||||
if release.tag_name == RELEASE_VERSION:
|
||||
gh_release = release
|
||||
print("Found existing release %s" % gh_release.name)
|
||||
break
|
||||
else:
|
||||
# Did not find it, so create the release, use the GitHub tag we got as input
|
||||
print("Creating GitHub release SABnzbd %s" % RELEASE_VERSION)
|
||||
gh_release = gh_repo.create_git_release(
|
||||
tag=RELEASE_VERSION,
|
||||
name=RELEASE_TITLE,
|
||||
message=readme_data,
|
||||
draft=True,
|
||||
prerelease=PRERELEASE,
|
||||
)
|
||||
|
||||
# Fetch existing assets, as overwriting is not allowed by GitHub
|
||||
gh_assets = gh_release.get_assets()
|
||||
|
||||
# Upload the assets
|
||||
for file_to_check in files_to_check:
|
||||
if os.path.exists(file_to_check):
|
||||
# Check if this file was previously uploaded
|
||||
if gh_assets.totalCount:
|
||||
for gh_asset in gh_assets:
|
||||
if gh_asset.name == file_to_check:
|
||||
print("Removing existing asset %s " % gh_asset.name)
|
||||
gh_asset.delete_asset()
|
||||
# Upload the new one
|
||||
print("Uploading %s to release %s" % (file_to_check, gh_release.name))
|
||||
gh_release.upload_asset(file_to_check)
|
||||
|
||||
# Check if we now have all files
|
||||
gh_new_assets = gh_release.get_assets()
|
||||
if gh_new_assets.totalCount:
|
||||
all_assets = [gh_asset.name for gh_asset in gh_new_assets]
|
||||
|
||||
# Check if we have all files, using set-comparison
|
||||
if set(files_to_check) == set(all_assets):
|
||||
print("All assets present, releasing %s" % RELEASE_VERSION)
|
||||
# Publish release
|
||||
gh_release.update_release(
|
||||
tag_name=RELEASE_VERSION,
|
||||
name=RELEASE_TITLE,
|
||||
message=readme_data,
|
||||
draft=False,
|
||||
prerelease=PRERELEASE,
|
||||
)
|
||||
|
||||
# Update the website
|
||||
gh_repo_web = gh_obj.get_repo("sabnzbd/sabnzbd.github.io")
|
||||
# Check if the branch already exists, only create one if it doesn't
|
||||
skip_website_update = False
|
||||
try:
|
||||
gh_repo_web.get_branch(RELEASE_VERSION)
|
||||
print("Branch %s on sabnzbd/sabnzbd.github.io already exists, skipping update" % RELEASE_VERSION)
|
||||
skip_website_update = True
|
||||
except github.GithubException:
|
||||
# Create a new branch to have the changes
|
||||
sb = gh_repo_web.get_branch("master")
|
||||
print("Creating branch %s on sabnzbd/sabnzbd.github.io" % RELEASE_VERSION)
|
||||
new_branch = gh_repo_web.create_git_ref(ref="refs/heads/" + RELEASE_VERSION, sha=sb.commit.sha)
|
||||
|
||||
# Update the files
|
||||
if not skip_website_update:
|
||||
# We need bytes version to interact with GitHub
|
||||
RELEASE_VERSION_BYTES = RELEASE_VERSION.encode()
|
||||
|
||||
# Get all the version files
|
||||
latest_txt = gh_repo_web.get_contents("latest.txt")
|
||||
latest_txt_items = latest_txt.decoded_content.split()
|
||||
new_latest_txt_items = latest_txt_items[:2]
|
||||
config_yml = gh_repo_web.get_contents("_config.yml")
|
||||
if PRERELEASE:
|
||||
# If it's a pre-release, we append to current version in latest.txt
|
||||
new_latest_txt_items.extend([RELEASE_VERSION_BYTES, latest_txt_items[1]])
|
||||
# And replace in _config.yml
|
||||
new_config_yml = re.sub(
|
||||
b"latest_testing: '[^']*'",
|
||||
b"latest_testing: '%s'" % RELEASE_VERSION_BYTES,
|
||||
config_yml.decoded_content,
|
||||
)
|
||||
else:
|
||||
# New stable release, replace the version
|
||||
new_latest_txt_items[0] = RELEASE_VERSION_BYTES
|
||||
# And replace in _config.yml
|
||||
new_config_yml = re.sub(
|
||||
b"latest_testing: '[^']*'",
|
||||
b"latest_testing: ''",
|
||||
config_yml.decoded_content,
|
||||
)
|
||||
new_config_yml = re.sub(
|
||||
b"latest_stable: '[^']*'",
|
||||
b"latest_stable: '%s'" % RELEASE_VERSION_BYTES,
|
||||
new_config_yml,
|
||||
)
|
||||
# Also update the wiki-settings, these only use x.x notation
|
||||
new_config_yml = re.sub(
|
||||
b"wiki_version: '[^']*'",
|
||||
b"wiki_version: '%s'" % RELEASE_VERSION_BYTES[:3],
|
||||
new_config_yml,
|
||||
)
|
||||
|
||||
# Update the files
|
||||
print("Updating latest.txt")
|
||||
gh_repo_web.update_file(
|
||||
"latest.txt",
|
||||
"Release %s: latest.txt" % RELEASE_VERSION,
|
||||
b"\n".join(new_latest_txt_items),
|
||||
latest_txt.sha,
|
||||
RELEASE_VERSION,
|
||||
)
|
||||
print("Updating _config.yml")
|
||||
gh_repo_web.update_file(
|
||||
"_config.yml",
|
||||
"Release %s: _config.yml" % RELEASE_VERSION,
|
||||
new_config_yml,
|
||||
config_yml.sha,
|
||||
RELEASE_VERSION,
|
||||
)
|
||||
|
||||
# Create pull-request
|
||||
print("Creating pull request in sabnzbd/sabnzbd.github.io for the update")
|
||||
update_pr = gh_repo_web.create_pull(
|
||||
title="Release %s" % RELEASE_VERSION,
|
||||
base="master",
|
||||
body="Automated update of release files",
|
||||
head=RELEASE_VERSION,
|
||||
)
|
||||
|
||||
# Merge pull-request
|
||||
print("Merging pull request in sabnzbd/sabnzbd.github.io for the update")
|
||||
update_pr.merge(merge_method="squash")
|
||||
|
||||
# Only with GitHub success we proceed to Reddit
|
||||
if reddit_token := os.environ.get("REDDIT_TOKEN", ""):
|
||||
# Token format (without whitespace):
|
||||
# {
|
||||
# "client_id":"XXX",
|
||||
# "client_secret":"XXX",
|
||||
# "user_agent":"SABnzbd release script",
|
||||
# "username":"Safihre",
|
||||
# "password":"XXX"
|
||||
# }
|
||||
credentials = json.loads(reddit_token)
|
||||
reddit = praw.Reddit(**credentials)
|
||||
|
||||
subreddit_sabnzbd = reddit.subreddit("sabnzbd")
|
||||
subreddit_usenet = reddit.subreddit("usenet")
|
||||
|
||||
# Read the release notes
|
||||
with open(RELEASE_README, "r") as readme_file:
|
||||
readme_lines = readme_file.readlines()
|
||||
|
||||
# Put the download link after the title
|
||||
readme_lines[2] = "## https://sabnzbd.org/downloads\n\n"
|
||||
|
||||
# Use the header in the readme as title
|
||||
title = readme_lines[0]
|
||||
release_notes_text = "".join(readme_lines[2:])
|
||||
print("Posting release notes to Reddit")
|
||||
|
||||
# Only stable releases to r/usenet
|
||||
if not PRERELEASE:
|
||||
# Get correct flair-id (required by r/usenet)
|
||||
for flair in subreddit_usenet.flair.link_templates.user_selectable():
|
||||
if flair["flair_text"] == "News":
|
||||
print("Posting to r/usenet")
|
||||
submission = subreddit_usenet.submit(
|
||||
title, selftext=release_notes_text, flair_id=flair["flair_template_id"]
|
||||
)
|
||||
break
|
||||
else:
|
||||
raise ValueError("Could not locate flair_text for posting to r/usenet")
|
||||
|
||||
# Post always to r/SABnzbd
|
||||
print("Posting to r/sabnzbd")
|
||||
subreddit_sabnzbd.submit(title, selftext=release_notes_text)
|
||||
|
||||
else:
|
||||
print("Missing REDDIT_TOKEN")
|
||||
|
||||
else:
|
||||
print("To push release to GitHub, first tag the commit.")
|
||||
print("Or missing the AUTOMATION_GITHUB_TOKEN, cannot push to GitHub without it.")
|
||||
19
builder/requirements.txt
Normal file
19
builder/requirements.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
# Basic build requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
pyinstaller==6.17.0
|
||||
packaging==25.0
|
||||
pyinstaller-hooks-contrib==2025.10
|
||||
altgraph==0.17.5
|
||||
wrapt==2.0.1
|
||||
setuptools==80.9.0
|
||||
|
||||
# For the Windows build
|
||||
pefile==2024.8.26; sys_platform == 'win32'
|
||||
pywin32-ctypes==0.2.3; sys_platform == 'win32'
|
||||
|
||||
# For the macOS build
|
||||
dmgbuild==1.6.6; sys_platform == 'darwin'
|
||||
mac-alias==2.2.3; sys_platform == 'darwin'
|
||||
macholib==1.16.4; sys_platform == 'darwin'
|
||||
ds-store==1.3.2; sys_platform == 'darwin'
|
||||
PyNaCl==1.6.1; sys_platform == 'darwin'
|
||||
442
builder/win/NSIS_Installer.nsi
Normal file
442
builder/win/NSIS_Installer.nsi
Normal file
@@ -0,0 +1,442 @@
|
||||
; -*- coding: utf-8 -*-
|
||||
;
|
||||
; Copyright 2008-2015 The SABnzbd-Team (sabnzbd.org)
|
||||
;
|
||||
; This program is free software; you can redistribute it and/or
|
||||
; modify it under the terms of the GNU General Public License
|
||||
; as published by the Free Software Foundation; either version 2
|
||||
; of the License, or (at your option) any later version.
|
||||
;
|
||||
; This program is distributed in the hope that it will be useful,
|
||||
; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
; GNU General Public License for more details.
|
||||
;
|
||||
; You should have received a copy of the GNU General Public License
|
||||
; along with this program; if not, write to the Free Software
|
||||
; Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
Unicode true
|
||||
|
||||
!addplugindir builder\win\nsis\Plugins
|
||||
!addincludedir builder\win\nsis\Include
|
||||
|
||||
!include "MUI2.nsh"
|
||||
!include "registerExtension.nsh"
|
||||
!include "FileFunc.nsh"
|
||||
!include "LogicLib.nsh"
|
||||
!include "WinVer.nsh"
|
||||
!include "nsProcess.nsh"
|
||||
!include "x64.nsh"
|
||||
!include "servicelib.nsh"
|
||||
!include "StdUtils.nsh"
|
||||
|
||||
;------------------------------------------------------------------
|
||||
;
|
||||
; Macro for removing existing and the current installation
|
||||
; It shared by the installer and the uninstaller.
|
||||
;
|
||||
!define RemovePrev "!insertmacro RemovePrev"
|
||||
!macro RemovePrev idir
|
||||
; Remove the whole dir
|
||||
; Users should not be putting stuff here!
|
||||
RMDir /r "${idir}"
|
||||
!macroend
|
||||
|
||||
!define RemovePrevShortcuts "!insertmacro RemovePrevShortcuts"
|
||||
!macro RemovePrevShortcuts
|
||||
; Remove shortcuts, starting with current user ones (from old installs)
|
||||
SetShellVarContext current
|
||||
!insertmacro MUI_STARTMENU_GETFOLDER Application $MUI_TEMP
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\Uninstall.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd - SafeMode.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd - Documentation.url"
|
||||
RMDir "$SMPROGRAMS\$MUI_TEMP"
|
||||
Delete "$SMPROGRAMS\Startup\SABnzbd.lnk"
|
||||
Delete "$DESKTOP\SABnzbd.lnk"
|
||||
|
||||
SetShellVarContext all
|
||||
!insertmacro MUI_STARTMENU_GETFOLDER Application $MUI_TEMP
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\Uninstall.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd - SafeMode.lnk"
|
||||
Delete "$SMPROGRAMS\$MUI_TEMP\SABnzbd - Documentation.url"
|
||||
RMDir "$SMPROGRAMS\$MUI_TEMP"
|
||||
Delete "$SMPROGRAMS\Startup\SABnzbd.lnk"
|
||||
Delete "$DESKTOP\SABnzbd.lnk"
|
||||
!macroend
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Define names of the product
|
||||
Name "SABnzbd ${SAB_VERSION}"
|
||||
VIProductVersion "${SAB_VERSIONKEY}"
|
||||
VIFileVersion "${SAB_VERSIONKEY}"
|
||||
|
||||
VIAddVersionKey "Comments" "SABnzbd ${SAB_VERSION}"
|
||||
VIAddVersionKey "CompanyName" "The SABnzbd-Team"
|
||||
VIAddVersionKey "FileDescription" "SABnzbd ${SAB_VERSION}"
|
||||
VIAddVersionKey "FileVersion" "${SAB_VERSION}"
|
||||
VIAddVersionKey "LegalCopyright" "The SABnzbd-Team"
|
||||
VIAddVersionKey "ProductName" "SABnzbd ${SAB_VERSION}"
|
||||
VIAddVersionKey "ProductVersion" "${SAB_VERSION}"
|
||||
|
||||
OutFile "${SAB_FILE}"
|
||||
InstallDir "$PROGRAMFILES\SABnzbd"
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Some default compiler settings (uncomment and change at will):
|
||||
SetCompress auto ; (can be off or force)
|
||||
SetDatablockOptimize on ; (can be off)
|
||||
CRCCheck on ; (can be off)
|
||||
AutoCloseWindow false ; (can be true for the window go away automatically at end)
|
||||
ShowInstDetails hide ; (can be show to have them shown, or nevershow to disable)
|
||||
SetDateSave off ; (can be on to have files restored to their original date)
|
||||
WindowIcon on
|
||||
SpaceTexts none
|
||||
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Vista/Win7 redirects $SMPROGRAMS to all users without this
|
||||
RequestExecutionLevel admin
|
||||
FileErrorText "If you have no admin rights, try to install into a user directory."
|
||||
|
||||
;------------------------------------------------------------------
|
||||
;Variables
|
||||
Var MUI_TEMP
|
||||
Var STARTMENU_FOLDER
|
||||
Var PREV_INST_DIR
|
||||
|
||||
;------------------------------------------------------------------
|
||||
;Interface Settings
|
||||
|
||||
!define MUI_ABORTWARNING
|
||||
|
||||
;Show all languages, despite user's codepage
|
||||
!define MUI_LANGDLL_ALLLANGUAGES
|
||||
|
||||
!define MUI_ICON "dist\SABnzbd\icons\sabnzbd.ico"
|
||||
|
||||
|
||||
;--------------------------------
|
||||
;Pages
|
||||
|
||||
!insertmacro MUI_PAGE_LICENSE "dist\SABnzbd\LICENSE.txt"
|
||||
!define MUI_COMPONENTSPAGE_NODESC
|
||||
!insertmacro MUI_PAGE_COMPONENTS
|
||||
|
||||
!insertmacro MUI_PAGE_DIRECTORY
|
||||
|
||||
;Start Menu Folder Page Configuration
|
||||
!define MUI_STARTMENUPAGE_REGISTRY_ROOT "HKCU"
|
||||
!define MUI_STARTMENUPAGE_REGISTRY_KEY "Software\SABnzbd"
|
||||
!define MUI_STARTMENUPAGE_REGISTRY_VALUENAME "Start Menu Folder"
|
||||
!define MUI_STARTMENUPAGE_DEFAULTFOLDER "SABnzbd"
|
||||
;Remember the installer language
|
||||
!define MUI_LANGDLL_REGISTRY_ROOT "HKCU"
|
||||
!define MUI_LANGDLL_REGISTRY_KEY "Software\SABnzbd"
|
||||
!define MUI_LANGDLL_REGISTRY_VALUENAME "Installer Language"
|
||||
|
||||
!insertmacro MUI_PAGE_STARTMENU Application $STARTMENU_FOLDER
|
||||
|
||||
!insertmacro MUI_PAGE_INSTFILES
|
||||
!define MUI_FINISHPAGE_RUN
|
||||
!define MUI_FINISHPAGE_RUN_FUNCTION PageFinishRun
|
||||
!define MUI_FINISHPAGE_RUN_TEXT $(MsgRunSAB)
|
||||
!define MUI_FINISHPAGE_SHOWREADME "$INSTDIR\README.txt"
|
||||
!define MUI_FINISHPAGE_SHOWREADME_TEXT $(MsgShowRelNote)
|
||||
!define MUI_FINISHPAGE_LINK $(MsgSupportUs)
|
||||
!define MUI_FINISHPAGE_LINK_LOCATION "https://sabnzbd.org/donate"
|
||||
|
||||
!insertmacro MUI_PAGE_FINISH
|
||||
|
||||
!insertmacro MUI_UNPAGE_CONFIRM
|
||||
!define MUI_UNPAGE_COMPONENTSPAGE_NODESC
|
||||
!insertmacro MUI_UNPAGE_COMPONENTS
|
||||
!insertmacro MUI_UNPAGE_INSTFILES
|
||||
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Run as user-level at end of install
|
||||
Function PageFinishRun
|
||||
; Check if SABnzbd service is installed
|
||||
!insertmacro SERVICE "installed" "SABnzbd" ""
|
||||
Pop $0 ;response
|
||||
${If} $0 == true
|
||||
; Service is installed, start the service
|
||||
!insertmacro SERVICE "start" "SABnzbd" ""
|
||||
${Else}
|
||||
; Service not installed, run executable as user
|
||||
${StdUtils.ExecShellAsUser} $0 "$INSTDIR\SABnzbd.exe" "" ""
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Set supported languages
|
||||
;
|
||||
; If you edit this list you also need to edit apireg.py in SABnzbd!
|
||||
;
|
||||
!insertmacro MUI_LANGUAGE "English" ;first language is the default language
|
||||
!insertmacro MUI_LANGUAGE "French"
|
||||
!insertmacro MUI_LANGUAGE "German"
|
||||
!insertmacro MUI_LANGUAGE "Dutch"
|
||||
!insertmacro MUI_LANGUAGE "Finnish"
|
||||
!insertmacro MUI_LANGUAGE "Polish"
|
||||
!insertmacro MUI_LANGUAGE "Swedish"
|
||||
!insertmacro MUI_LANGUAGE "Danish"
|
||||
!insertmacro MUI_LANGUAGE "Italian"
|
||||
!insertmacro MUI_LANGUAGE "Norwegian"
|
||||
!insertmacro MUI_LANGUAGE "Romanian"
|
||||
!insertmacro MUI_LANGUAGE "Spanish"
|
||||
!insertmacro MUI_LANGUAGE "PortugueseBR"
|
||||
!insertmacro MUI_LANGUAGE "Serbian"
|
||||
!insertmacro MUI_LANGUAGE "Turkish"
|
||||
!insertmacro MUI_LANGUAGE "Hebrew"
|
||||
!insertmacro MUI_LANGUAGE "Russian"
|
||||
!insertmacro MUI_LANGUAGE "Czech"
|
||||
!insertmacro MUI_LANGUAGE "SimpChinese"
|
||||
|
||||
|
||||
;------------------------------------------------------------------
|
||||
;Reserve Files
|
||||
;If you are using solid compression, files that are required before
|
||||
;the actual installation should be stored first in the data block,
|
||||
;because this will make your installer start faster.
|
||||
|
||||
!insertmacro MUI_RESERVEFILE_LANGDLL
|
||||
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; SECTION main program
|
||||
;
|
||||
Section "SABnzbd" SecDummy
|
||||
|
||||
SetOutPath "$INSTDIR"
|
||||
SetShellVarContext all
|
||||
|
||||
DetailPrint $(MsgShutting)
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Shutdown any running service
|
||||
|
||||
!insertmacro SERVICE "stop" "SABnzbd" ""
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Terminate SABnzbd.exe
|
||||
loop:
|
||||
${nsProcess::FindProcess} "SABnzbd.exe" $R0
|
||||
StrCmp $R0 0 0 endcheck
|
||||
${nsProcess::CloseProcess} "SABnzbd.exe" $R0
|
||||
Sleep 500
|
||||
Goto loop
|
||||
endcheck:
|
||||
${nsProcess::Unload}
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Make sure old versions are gone (reg-key already read in onInt)
|
||||
StrCmp $PREV_INST_DIR "" noPrevInstallRemove
|
||||
${RemovePrev} "$PREV_INST_DIR"
|
||||
Goto continueSetupAfterRemove
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Add firewall rules for new installs
|
||||
noPrevInstallRemove:
|
||||
liteFirewallW::AddRule "$INSTDIR\SABnzbd.exe" "SABnzbd"
|
||||
liteFirewallW::AddRule "$INSTDIR\SABnzbd-console.exe" "SABnzbd-console"
|
||||
|
||||
continueSetupAfterRemove:
|
||||
|
||||
; add files / whatever that need to be installed here.
|
||||
File /r "dist\SABnzbd\*"
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Add to registry
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "SOFTWARE\SABnzbd" "" "$INSTDIR"
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "SOFTWARE\SABnzbd" "Installer Language" "$(MsgLangCode)"
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "DisplayName" "SABnzbd ${SAB_VERSION}"
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "UninstallString" '"$INSTDIR\uninstall.exe"'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "DisplayVersion" '${SAB_VERSION}'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "Publisher" 'The SABnzbd-Team'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "HelpLink" 'https://forums.sabnzbd.org/'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "URLInfoAbout" 'https://sabnzbd.org/wiki/'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "URLUpdateInfo" 'https://sabnzbd.org/'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "Comments" 'The automated Usenet download tool'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "DisplayIcon" '$INSTDIR\icons\sabnzbd.ico'
|
||||
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "EstimatedSize" 40674
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "NoRepair" -1
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "NoModify" -1
|
||||
|
||||
WriteRegStr HKEY_CURRENT_USER "Software\Classes\AppUserModelId\SABnzbd" "DisplayName" "SABnzbd"
|
||||
WriteRegStr HKEY_CURRENT_USER "Software\Classes\AppUserModelId\SABnzbd" "IconUri" '$INSTDIR\icons\sabnzbd16_32.ico'
|
||||
|
||||
; write out uninstaller
|
||||
WriteUninstaller "$INSTDIR\Uninstall.exe"
|
||||
|
||||
!insertmacro MUI_STARTMENU_WRITE_BEGIN Application
|
||||
;Create shortcuts
|
||||
CreateDirectory "$SMPROGRAMS\$STARTMENU_FOLDER"
|
||||
CreateShortCut "$SMPROGRAMS\$STARTMENU_FOLDER\SABnzbd.lnk" "$INSTDIR\SABnzbd.exe"
|
||||
CreateShortCut "$SMPROGRAMS\$STARTMENU_FOLDER\SABnzbd - SafeMode.lnk" "$INSTDIR\SABnzbd.exe" "--server 127.0.0.1:8080 -b1 --no-login"
|
||||
WriteINIStr "$SMPROGRAMS\$STARTMENU_FOLDER\SABnzbd - Documentation.url" "InternetShortcut" "URL" "https://sabnzbd.org/wiki/"
|
||||
CreateShortCut "$SMPROGRAMS\$STARTMENU_FOLDER\Uninstall.lnk" "$INSTDIR\Uninstall.exe"
|
||||
!insertmacro MUI_STARTMENU_WRITE_END
|
||||
SectionEnd ; end of default section
|
||||
|
||||
Section $(MsgIcon) desktop
|
||||
CreateShortCut "$DESKTOP\SABnzbd.lnk" "$INSTDIR\SABnzbd.exe"
|
||||
SectionEnd ; end of desktop icon section
|
||||
|
||||
Section $(MsgAssoc) assoc
|
||||
${registerExtension} "$INSTDIR\icons\nzb.ico" "$INSTDIR\SABnzbd.exe" ".nzb" "NZB File"
|
||||
${RefreshShellIcons}
|
||||
SectionEnd ; end of file association section
|
||||
|
||||
Section /o $(MsgRunAtStart) startup
|
||||
CreateShortCut "$SMPROGRAMS\Startup\SABnzbd.lnk" "$INSTDIR\SABnzbd.exe" "-b0"
|
||||
SectionEnd ;
|
||||
|
||||
;------------------------------------------------------------------
|
||||
Function .onInit
|
||||
; We need to modify the dir here for X64
|
||||
${If} ${RunningX64}
|
||||
StrCpy $INSTDIR "$PROGRAMFILES64\SABnzbd"
|
||||
${Else}
|
||||
MessageBox MB_OK|MB_ICONSTOP $(MsgOnly64bit)
|
||||
Abort
|
||||
${EndIf}
|
||||
|
||||
; Python 3.9 no longer supports Windows 7
|
||||
${If} ${AtMostWin8}
|
||||
MessageBox MB_OK|MB_ICONSTOP $(MsgNoWin7)
|
||||
Abort
|
||||
${EndIf}
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Change settings based on if SAB was already installed
|
||||
ReadRegStr $PREV_INST_DIR HKEY_LOCAL_MACHINE "SOFTWARE\SABnzbd" ""
|
||||
StrCmp $PREV_INST_DIR "" noPrevInstall
|
||||
; We want to use the user's custom dir if he used one
|
||||
StrCmp $PREV_INST_DIR "$PROGRAMFILES\SABnzbd" noSpecialDir
|
||||
StrCmp $PREV_INST_DIR "$PROGRAMFILES64\SABnzbd" noSpecialDir
|
||||
; Set what the user had before
|
||||
StrCpy $INSTDIR "$PREV_INST_DIR"
|
||||
noSpecialDir:
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Check what the user has currently set for install options
|
||||
SetShellVarContext current
|
||||
IfFileExists "$SMPROGRAMS\Startup\SABnzbd.lnk" 0 endCheckStartupCurrent
|
||||
SectionSetFlags ${startup} 1
|
||||
endCheckStartupCurrent:
|
||||
SetShellVarContext all
|
||||
IfFileExists "$SMPROGRAMS\Startup\SABnzbd.lnk" 0 endCheckStartup
|
||||
SectionSetFlags ${startup} 1
|
||||
endCheckStartup:
|
||||
|
||||
SetShellVarContext current
|
||||
IfFileExists "$DESKTOP\SABnzbd.lnk" endCheckDesktop 0
|
||||
; If not present for current user, first check all user folder
|
||||
SetShellVarContext all
|
||||
IfFileExists "$DESKTOP\SABnzbd.lnk" endCheckDesktop 0
|
||||
SectionSetFlags ${desktop} 0 ; SAB is installed but desktop-icon not, so uncheck it
|
||||
endCheckDesktop:
|
||||
SetShellVarContext all
|
||||
|
||||
Push $1
|
||||
ReadRegStr $1 HKCR ".nzb" "" ; read current file association
|
||||
StrCmp "$1" "NZB File" noPrevInstall 0
|
||||
SectionSetFlags ${assoc} 0 ; Uncheck it when it wasn't checked before
|
||||
noPrevInstall:
|
||||
|
||||
;--------------------------------
|
||||
; Display language chooser
|
||||
!insertmacro MUI_LANGDLL_DISPLAY
|
||||
|
||||
;------------------------------------------------------------------
|
||||
; Tell users about the service change
|
||||
;
|
||||
!insertmacro SERVICE "installed" "SABHelper" ""
|
||||
Pop $0 ;response
|
||||
${If} $0 == true
|
||||
MessageBox MB_OKCANCEL|MB_ICONEXCLAMATION $(MsgServChange) IDOK removeservices IDCANCEL exitinstall
|
||||
exitinstall:
|
||||
Abort
|
||||
removeservices:
|
||||
!insertmacro SERVICE "delete" "SABHelper" ""
|
||||
!insertmacro SERVICE "delete" "SABnzbd" ""
|
||||
${EndIf}
|
||||
|
||||
FunctionEnd
|
||||
|
||||
;--------------------------------
|
||||
; begin uninstall settings/section
|
||||
UninstallText $(MsgUninstall)
|
||||
|
||||
Section "un.$(MsgDelProgram)" Uninstall
|
||||
;make sure sabnzbd.exe isn't running..if so shut it down
|
||||
DetailPrint $(MsgShutting)
|
||||
${nsProcess::KillProcess} "SABnzbd.exe" $R0
|
||||
${nsProcess::Unload}
|
||||
|
||||
; add delete commands to delete whatever files/registry keys/etc you installed here.
|
||||
Delete "$INSTDIR\uninstall.exe"
|
||||
DeleteRegKey HKEY_LOCAL_MACHINE "SOFTWARE\SABnzbd"
|
||||
DeleteRegKey HKEY_LOCAL_MACHINE "SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd"
|
||||
DeleteRegKey HKEY_CURRENT_USER "Software\Classes\AppUserModelId\SABnzbd"
|
||||
DeleteRegKey HKEY_CURRENT_USER "Software\SABnzbd"
|
||||
|
||||
${RemovePrev} "$INSTDIR"
|
||||
${RemovePrevShortcuts}
|
||||
|
||||
; Remove firewall entries
|
||||
liteFirewallW::RemoveRule "$INSTDIR\SABnzbd.exe" "SABnzbd"
|
||||
liteFirewallW::RemoveRule "$INSTDIR\SABnzbd-console.exe" "SABnzbd-console"
|
||||
|
||||
${unregisterExtension} ".nzb" "NZB File"
|
||||
${RefreshShellIcons}
|
||||
|
||||
SectionEnd ; end of uninstall section
|
||||
|
||||
Section /o "un.$(MsgDelSettings)" DelSettings
|
||||
DetailPrint "Uninstall settings $LOCALAPPDATA"
|
||||
Delete "$LOCALAPPDATA\sabnzbd\sabnzbd.ini"
|
||||
RMDir /r "$LOCALAPPDATA\sabnzbd"
|
||||
SectionEnd
|
||||
|
||||
; eof
|
||||
|
||||
;--------------------------------
|
||||
;Language strings
|
||||
LangString MsgShowRelNote ${LANG_ENGLISH} "Show Release Notes"
|
||||
|
||||
LangString MsgRunSAB ${LANG_ENGLISH} "Run SABnzbd"
|
||||
|
||||
LangString MsgSupportUs ${LANG_ENGLISH} "Support the project, Donate!"
|
||||
|
||||
LangString MsgServChange ${LANG_ENGLISH} "The SABnzbd Windows Service changed in SABnzbd 3.0.0. $\nYou will need to reinstall the SABnzbd service. $\n$\nClick `OK` to remove the existing services or `Cancel` to cancel this upgrade."
|
||||
|
||||
LangString MsgOnly64bit ${LANG_ENGLISH} "SABnzbd only supports 64-bit Windows."
|
||||
|
||||
LangString MsgNoWin7 ${LANG_ENGLISH} "SABnzbd only supports Windows 8.1 and above."
|
||||
|
||||
LangString MsgShutting ${LANG_ENGLISH} "Shutting down SABnzbd"
|
||||
|
||||
LangString MsgUninstall ${LANG_ENGLISH} "This will uninstall SABnzbd from your system"
|
||||
|
||||
LangString MsgRunAtStart ${LANG_ENGLISH} "Run at startup"
|
||||
|
||||
LangString MsgIcon ${LANG_ENGLISH} "Desktop Icon"
|
||||
|
||||
LangString MsgAssoc ${LANG_ENGLISH} "NZB File association"
|
||||
|
||||
LangString MsgDelProgram ${LANG_ENGLISH} "Delete Program"
|
||||
|
||||
LangString MsgDelSettings ${LANG_ENGLISH} "Delete Settings"
|
||||
|
||||
LangString MsgLangCode ${LANG_ENGLISH} "en"
|
||||
|
||||
Function un.onInit
|
||||
!insertmacro MUI_UNGETLANGUAGE
|
||||
FunctionEnd
|
||||
501
builder/win/nsis/Include/StdUtils.nsh
Normal file
501
builder/win/nsis/Include/StdUtils.nsh
Normal file
@@ -0,0 +1,501 @@
|
||||
#################################################################################
|
||||
# StdUtils plug-in for NSIS
|
||||
# Copyright (C) 2004-2018 LoRd_MuldeR <MuldeR2@GMX.de>
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
#
|
||||
# http://www.gnu.org/licenses/lgpl-2.1.txt
|
||||
#################################################################################
|
||||
|
||||
# DEVELOPER NOTES:
|
||||
# - Please see "https://github.com/lordmulder/stdutils/" for news and updates!
|
||||
# - Please see "Docs\StdUtils\StdUtils.html" for detailed function descriptions!
|
||||
# - Please see "Examples\StdUtils\StdUtilsTest.nsi" for usage examples!
|
||||
|
||||
#################################################################################
|
||||
# FUNCTION DECLARTIONS
|
||||
#################################################################################
|
||||
|
||||
!ifndef ___STDUTILS__NSH___
|
||||
!define ___STDUTILS__NSH___
|
||||
|
||||
!define StdUtils.Time '!insertmacro _StdU_Time' #time(), as in C standard library
|
||||
!define StdUtils.GetMinutes '!insertmacro _StdU_GetMinutes' #GetSystemTimeAsFileTime(), returns the number of minutes
|
||||
!define StdUtils.GetHours '!insertmacro _StdU_GetHours' #GetSystemTimeAsFileTime(), returns the number of hours
|
||||
!define StdUtils.GetDays '!insertmacro _StdU_GetDays' #GetSystemTimeAsFileTime(), returns the number of days
|
||||
!define StdUtils.Rand '!insertmacro _StdU_Rand' #rand(), as in C standard library
|
||||
!define StdUtils.RandMax '!insertmacro _StdU_RandMax' #rand(), as in C standard library, with maximum value
|
||||
!define StdUtils.RandMinMax '!insertmacro _StdU_RandMinMax' #rand(), as in C standard library, with minimum/maximum value
|
||||
!define StdUtils.RandList '!insertmacro _StdU_RandList' #rand(), as in C standard library, with list support
|
||||
!define StdUtils.RandBytes '!insertmacro _StdU_RandBytes' #Generates random bytes, returned as Base64-encoded string
|
||||
!define StdUtils.FormatStr '!insertmacro _StdU_FormatStr' #sprintf(), as in C standard library, one '%d' placeholder
|
||||
!define StdUtils.FormatStr2 '!insertmacro _StdU_FormatStr2' #sprintf(), as in C standard library, two '%d' placeholders
|
||||
!define StdUtils.FormatStr3 '!insertmacro _StdU_FormatStr3' #sprintf(), as in C standard library, three '%d' placeholders
|
||||
!define StdUtils.ScanStr '!insertmacro _StdU_ScanStr' #sscanf(), as in C standard library, one '%d' placeholder
|
||||
!define StdUtils.ScanStr2 '!insertmacro _StdU_ScanStr2' #sscanf(), as in C standard library, two '%d' placeholders
|
||||
!define StdUtils.ScanStr3 '!insertmacro _StdU_ScanStr3' #sscanf(), as in C standard library, three '%d' placeholders
|
||||
!define StdUtils.TrimStr '!insertmacro _StdU_TrimStr' #Remove whitspaces from string, left and right
|
||||
!define StdUtils.TrimStrLeft '!insertmacro _StdU_TrimStrLeft' #Remove whitspaces from string, left side only
|
||||
!define StdUtils.TrimStrRight '!insertmacro _StdU_TrimStrRight' #Remove whitspaces from string, right side only
|
||||
!define StdUtils.RevStr '!insertmacro _StdU_RevStr' #Reverse a string, e.g. "reverse me" <-> "em esrever"
|
||||
!define StdUtils.ValidFileName '!insertmacro _StdU_ValidFileName' #Test whether string is a valid file name - no paths allowed
|
||||
!define StdUtils.ValidPathSpec '!insertmacro _StdU_ValidPathSpec' #Test whether string is a valid full(!) path specification
|
||||
!define StdUtils.ValidDomainName '!insertmacro _StdU_ValidDomain' #Test whether string is a valid host name or domain name
|
||||
!define StdUtils.StrToUtf8 '!insertmacro _StdU_StrToUtf8' #Convert string from Unicode (UTF-16) or ANSI to UTF-8 bytes
|
||||
!define StdUtils.StrFromUtf8 '!insertmacro _StdU_StrFromUtf8' #Convert string from UTF-8 bytes to Unicode (UTF-16) or ANSI
|
||||
!define StdUtils.SHFileMove '!insertmacro _StdU_SHFileMove' #SHFileOperation(), using the FO_MOVE operation
|
||||
!define StdUtils.SHFileCopy '!insertmacro _StdU_SHFileCopy' #SHFileOperation(), using the FO_COPY operation
|
||||
!define StdUtils.AppendToFile '!insertmacro _StdU_AppendToFile' #Append contents of an existing file to another file
|
||||
!define StdUtils.ExecShellAsUser '!insertmacro _StdU_ExecShlUser' #ShellExecute() as NON-elevated user from elevated installer
|
||||
!define StdUtils.InvokeShellVerb '!insertmacro _StdU_InvkeShlVrb' #Invokes a "shell verb", e.g. for pinning items to the taskbar
|
||||
!define StdUtils.ExecShellWaitEx '!insertmacro _StdU_ExecShlWaitEx' #ShellExecuteEx(), returns the handle of the new process
|
||||
!define StdUtils.WaitForProcEx '!insertmacro _StdU_WaitForProcEx' #WaitForSingleObject(), e.g. to wait for a running process
|
||||
!define StdUtils.GetParameter '!insertmacro _StdU_GetParameter' #Get the value of a specific command-line option
|
||||
!define StdUtils.TestParameter '!insertmacro _StdU_TestParameter' #Test whether a specific command-line option has been set
|
||||
!define StdUtils.ParameterCnt '!insertmacro _StdU_ParameterCnt' #Get number of command-line tokens, similar to argc in main()
|
||||
!define StdUtils.ParameterStr '!insertmacro _StdU_ParameterStr' #Get the n-th command-line token, similar to argv[i] in main()
|
||||
!define StdUtils.GetAllParameters '!insertmacro _StdU_GetAllParams' #Get complete command-line, but without executable name
|
||||
!define StdUtils.GetRealOSVersion '!insertmacro _StdU_GetRealOSVer' #Get the *real* Windows version number, even on Windows 8.1+
|
||||
!define StdUtils.GetRealOSBuildNo '!insertmacro _StdU_GetRealOSBld' #Get the *real* Windows build number, even on Windows 8.1+
|
||||
!define StdUtils.GetRealOSName '!insertmacro _StdU_GetRealOSStr' #Get the *real* Windows version, as a "friendly" name
|
||||
!define StdUtils.GetOSEdition '!insertmacro _StdU_GetOSEdition' #Get the Windows edition, i.e. "workstation" or "server"
|
||||
!define StdUtils.GetOSReleaseId '!insertmacro _StdU_GetOSRelIdNo' #Get the Windows release identifier (on Windows 10)
|
||||
!define StdUtils.GetOSReleaseName '!insertmacro _StdU_GetOSRelIdStr' #Get the Windows release (on Windows 10), as a "friendly" name
|
||||
!define StdUtils.VerifyOSVersion '!insertmacro _StdU_VrfyRealOSVer' #Compare *real* operating system to an expected version number
|
||||
!define StdUtils.VerifyOSBuildNo '!insertmacro _StdU_VrfyRealOSBld' #Compare *real* operating system to an expected build number
|
||||
!define StdUtils.HashText '!insertmacro _StdU_HashText' #Compute hash from text string (CRC32, MD5, SHA1/2/3, BLAKE2)
|
||||
!define StdUtils.HashFile '!insertmacro _StdU_HashFile' #Compute hash from file (CRC32, MD5, SHA1/2/3, BLAKE2)
|
||||
!define StdUtils.NormalizePath '!insertmacro _StdU_NormalizePath' #Simplifies the path to produce a direct, well-formed path
|
||||
!define StdUtils.GetParentPath '!insertmacro _StdU_GetParentPath' #Get parent path by removing the last component from the path
|
||||
!define StdUtils.SplitPath '!insertmacro _StdU_SplitPath' #Split the components of the given path
|
||||
!define StdUtils.GetDrivePart '!insertmacro _StdU_GetDrivePart' #Get drive component of path
|
||||
!define StdUtils.GetDirectoryPart '!insertmacro _StdU_GetDirPart' #Get directory component of path
|
||||
!define StdUtils.GetFileNamePart '!insertmacro _StdU_GetFNamePart' #Get file name component of path
|
||||
!define StdUtils.GetExtensionPart '!insertmacro _StdU_GetExtnPart' #Get file extension component of path
|
||||
!define StdUtils.TimerCreate '!insertmacro _StdU_TimerCreate' #Create a new event-timer that will be triggered periodically
|
||||
!define StdUtils.TimerDestroy '!insertmacro _StdU_TimerDestroy' #Destroy a running timer created with TimerCreate()
|
||||
!define StdUtils.ProtectStr '!insertmacro _StdU_PrtctStr' #Protect a given String using Windows' DPAPI
|
||||
!define StdUtils.UnprotectStr '!insertmacro _StdU_UnprtctStr' #Unprotect a string that was protected via ProtectStr()
|
||||
!define StdUtils.GetLibVersion '!insertmacro _StdU_GetLibVersion' #Get the current StdUtils library version (for debugging)
|
||||
!define StdUtils.SetVerbose '!insertmacro _StdU_SetVerbose' #Enable or disable "verbose" mode (for debugging)
|
||||
|
||||
|
||||
#################################################################################
|
||||
# MACRO DEFINITIONS
|
||||
#################################################################################
|
||||
|
||||
!macro _StdU_Time out
|
||||
StdUtils::Time /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetMinutes out
|
||||
StdUtils::GetMinutes /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetHours out
|
||||
StdUtils::GetHours /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetDays out
|
||||
StdUtils::GetDays /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_Rand out
|
||||
StdUtils::Rand /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_RandMax out max
|
||||
push ${max}
|
||||
StdUtils::RandMax /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_RandMinMax out min max
|
||||
push ${min}
|
||||
push ${max}
|
||||
StdUtils::RandMinMax /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_RandList count max
|
||||
push ${max}
|
||||
push ${count}
|
||||
StdUtils::RandList /NOUNLOAD
|
||||
!macroend
|
||||
|
||||
!macro _StdU_RandBytes out count
|
||||
push ${count}
|
||||
StdUtils::RandBytes /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_FormatStr out format val
|
||||
push `${format}`
|
||||
push ${val}
|
||||
StdUtils::FormatStr /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_FormatStr2 out format val1 val2
|
||||
push `${format}`
|
||||
push ${val1}
|
||||
push ${val2}
|
||||
StdUtils::FormatStr2 /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_FormatStr3 out format val1 val2 val3
|
||||
push `${format}`
|
||||
push ${val1}
|
||||
push ${val2}
|
||||
push ${val3}
|
||||
StdUtils::FormatStr3 /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ScanStr out format input default
|
||||
push `${format}`
|
||||
push `${input}`
|
||||
push ${default}
|
||||
StdUtils::ScanStr /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ScanStr2 out1 out2 format input default1 default2
|
||||
push `${format}`
|
||||
push `${input}`
|
||||
push ${default1}
|
||||
push ${default2}
|
||||
StdUtils::ScanStr2 /NOUNLOAD
|
||||
pop ${out1}
|
||||
pop ${out2}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ScanStr3 out1 out2 out3 format input default1 default2 default3
|
||||
push `${format}`
|
||||
push `${input}`
|
||||
push ${default1}
|
||||
push ${default2}
|
||||
push ${default3}
|
||||
StdUtils::ScanStr3 /NOUNLOAD
|
||||
pop ${out1}
|
||||
pop ${out2}
|
||||
pop ${out3}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TrimStr var
|
||||
push ${var}
|
||||
StdUtils::TrimStr /NOUNLOAD
|
||||
pop ${var}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TrimStrLeft var
|
||||
push ${var}
|
||||
StdUtils::TrimStrLeft /NOUNLOAD
|
||||
pop ${var}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TrimStrRight var
|
||||
push ${var}
|
||||
StdUtils::TrimStrRight /NOUNLOAD
|
||||
pop ${var}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_RevStr var
|
||||
push ${var}
|
||||
StdUtils::RevStr /NOUNLOAD
|
||||
pop ${var}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ValidFileName out test
|
||||
push `${test}`
|
||||
StdUtils::ValidFileName /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ValidPathSpec out test
|
||||
push `${test}`
|
||||
StdUtils::ValidPathSpec /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ValidDomain out test
|
||||
push `${test}`
|
||||
StdUtils::ValidDomainName /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
|
||||
!macro _StdU_StrToUtf8 out str
|
||||
push `${str}`
|
||||
StdUtils::StrToUtf8 /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_StrFromUtf8 out trnc str
|
||||
push ${trnc}
|
||||
push `${str}`
|
||||
StdUtils::StrFromUtf8 /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_SHFileMove out from to hwnd
|
||||
push `${from}`
|
||||
push `${to}`
|
||||
push ${hwnd}
|
||||
StdUtils::SHFileMove /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_SHFileCopy out from to hwnd
|
||||
push `${from}`
|
||||
push `${to}`
|
||||
push ${hwnd}
|
||||
StdUtils::SHFileCopy /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_AppendToFile out from dest offset maxlen
|
||||
push `${from}`
|
||||
push `${dest}`
|
||||
push ${offset}
|
||||
push ${maxlen}
|
||||
StdUtils::AppendToFile /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ExecShlUser out file verb args
|
||||
push `${file}`
|
||||
push `${verb}`
|
||||
push `${args}`
|
||||
StdUtils::ExecShellAsUser /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_InvkeShlVrb out path file verb_id
|
||||
push "${path}"
|
||||
push "${file}"
|
||||
push ${verb_id}
|
||||
StdUtils::InvokeShellVerb /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ExecShlWaitEx out_res out_val file verb args
|
||||
push `${file}`
|
||||
push `${verb}`
|
||||
push `${args}`
|
||||
StdUtils::ExecShellWaitEx /NOUNLOAD
|
||||
pop ${out_res}
|
||||
pop ${out_val}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_WaitForProcEx out handle
|
||||
push `${handle}`
|
||||
StdUtils::WaitForProcEx /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetParameter out name default
|
||||
push `${name}`
|
||||
push `${default}`
|
||||
StdUtils::GetParameter /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TestParameter out name
|
||||
push `${name}`
|
||||
StdUtils::TestParameter /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ParameterCnt out
|
||||
StdUtils::ParameterCnt /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_ParameterStr out index
|
||||
push ${index}
|
||||
StdUtils::ParameterStr /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetAllParams out truncate
|
||||
push `${truncate}`
|
||||
StdUtils::GetAllParameters /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetRealOSVer out_major out_minor out_spack
|
||||
StdUtils::GetRealOsVersion /NOUNLOAD
|
||||
pop ${out_major}
|
||||
pop ${out_minor}
|
||||
pop ${out_spack}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetRealOSBld out
|
||||
StdUtils::GetRealOsBuildNo /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetRealOSStr out
|
||||
StdUtils::GetRealOsName /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_VrfyRealOSVer out major minor spack
|
||||
push `${major}`
|
||||
push `${minor}`
|
||||
push `${spack}`
|
||||
StdUtils::VerifyRealOsVersion /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_VrfyRealOSBld out build
|
||||
push `${build}`
|
||||
StdUtils::VerifyRealOsBuildNo /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetOSEdition out
|
||||
StdUtils::GetOsEdition /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetOSRelIdNo out
|
||||
StdUtils::GetOsReleaseId /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetOSRelIdStr out
|
||||
StdUtils::GetOsReleaseName /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_HashText out type text
|
||||
push `${type}`
|
||||
push `${text}`
|
||||
StdUtils::HashText /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_HashFile out type file
|
||||
push `${type}`
|
||||
push `${file}`
|
||||
StdUtils::HashFile /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_NormalizePath out path
|
||||
push `${path}`
|
||||
StdUtils::NormalizePath /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetParentPath out path
|
||||
push `${path}`
|
||||
StdUtils::GetParentPath /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_SplitPath out_drive out_dir out_fname out_ext path
|
||||
push `${path}`
|
||||
StdUtils::SplitPath /NOUNLOAD
|
||||
pop ${out_drive}
|
||||
pop ${out_dir}
|
||||
pop ${out_fname}
|
||||
pop ${out_ext}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetDrivePart out path
|
||||
push `${path}`
|
||||
StdUtils::GetDrivePart /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetDirPart out path
|
||||
push `${path}`
|
||||
StdUtils::GetDirectoryPart /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetFNamePart out path
|
||||
push `${path}`
|
||||
StdUtils::GetFileNamePart /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetExtnPart out path
|
||||
push `${path}`
|
||||
StdUtils::GetExtensionPart /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TimerCreate out callback interval
|
||||
GetFunctionAddress ${out} ${callback}
|
||||
push ${out}
|
||||
push ${interval}
|
||||
StdUtils::TimerCreate /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_TimerDestroy out timer_id
|
||||
push ${timer_id}
|
||||
StdUtils::TimerDestroy /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_PrtctStr out dpsc salt text
|
||||
push `${dpsc}`
|
||||
push `${salt}`
|
||||
push `${text}`
|
||||
StdUtils::ProtectStr /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_UnprtctStr out trnc salt data
|
||||
push `${trnc}`
|
||||
push `${salt}`
|
||||
push `${data}`
|
||||
StdUtils::UnprotectStr /NOUNLOAD
|
||||
pop ${out}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_GetLibVersion out_ver out_tst
|
||||
StdUtils::GetLibVersion /NOUNLOAD
|
||||
pop ${out_ver}
|
||||
pop ${out_tst}
|
||||
!macroend
|
||||
|
||||
!macro _StdU_SetVerbose enable
|
||||
Push ${enable}
|
||||
StdUtils::SetVerboseMode /NOUNLOAD
|
||||
!macroend
|
||||
|
||||
|
||||
#################################################################################
|
||||
# MAGIC NUMBERS
|
||||
#################################################################################
|
||||
|
||||
!define StdUtils.Const.ShellVerb.PinToTaskbar 0
|
||||
!define StdUtils.Const.ShellVerb.UnpinFromTaskbar 1
|
||||
!define StdUtils.Const.ShellVerb.PinToStart 2
|
||||
!define StdUtils.Const.ShellVerb.UnpinFromStart 3
|
||||
|
||||
!endif # !___STDUTILS__NSH___
|
||||
28
builder/win/nsis/Include/nsProcess.nsh
Normal file
28
builder/win/nsis/Include/nsProcess.nsh
Normal file
@@ -0,0 +1,28 @@
|
||||
!define nsProcess::FindProcess `!insertmacro nsProcess::FindProcess`
|
||||
|
||||
!macro nsProcess::FindProcess _FILE _ERR
|
||||
nsProcess::_FindProcess /NOUNLOAD `${_FILE}`
|
||||
Pop ${_ERR}
|
||||
!macroend
|
||||
|
||||
|
||||
!define nsProcess::KillProcess `!insertmacro nsProcess::KillProcess`
|
||||
|
||||
!macro nsProcess::KillProcess _FILE _ERR
|
||||
nsProcess::_KillProcess /NOUNLOAD `${_FILE}`
|
||||
Pop ${_ERR}
|
||||
!macroend
|
||||
|
||||
!define nsProcess::CloseProcess `!insertmacro nsProcess::CloseProcess`
|
||||
|
||||
!macro nsProcess::CloseProcess _FILE _ERR
|
||||
nsProcess::_CloseProcess /NOUNLOAD `${_FILE}`
|
||||
Pop ${_ERR}
|
||||
!macroend
|
||||
|
||||
|
||||
!define nsProcess::Unload `!insertmacro nsProcess::Unload`
|
||||
|
||||
!macro nsProcess::Unload
|
||||
nsProcess::_Unload
|
||||
!macroend
|
||||
53
builder/win/nsis/Include/registerExtension.nsh
Normal file
53
builder/win/nsis/Include/registerExtension.nsh
Normal file
@@ -0,0 +1,53 @@
|
||||
!define registerExtension "!insertmacro registerExtension"
|
||||
!define unregisterExtension "!insertmacro unregisterExtension"
|
||||
!define SHCNE_ASSOCCHANGED 0x8000000
|
||||
!define SHCNF_IDLIST 0
|
||||
|
||||
; Source = http://nsis.sourceforge.net/File_Association
|
||||
; Patched for SABnzbd by swi-tch
|
||||
|
||||
!macro registerExtension icon executable extension description
|
||||
Push "${icon}" ; "full path to icon.ico"
|
||||
Push "${executable}" ; "full path to my.exe"
|
||||
Push "${extension}" ; ".mkv"
|
||||
Push "${description}" ; "MKV File"
|
||||
Call registerExtension
|
||||
!macroend
|
||||
|
||||
; back up old value of .opt
|
||||
Function registerExtension
|
||||
!define Index "Line${__LINE__}"
|
||||
pop $R0 ; ext name
|
||||
pop $R1
|
||||
pop $R2
|
||||
pop $R3
|
||||
push $1
|
||||
push $0
|
||||
DeleteRegKey HKEY_CURRENT_USER "Software\Microsoft\Windows\CurrentVersion\Explorer\FileExts\$R1"
|
||||
WriteRegStr HKCR $R1 "" $R0
|
||||
WriteRegStr HKCR $R0 "" $R0
|
||||
WriteRegStr HKCR "$R0\shell" "" "open"
|
||||
WriteRegStr HKCR "$R0\DefaultIcon" "" "$R3,0"
|
||||
WriteRegStr HKCR "$R0\shell\open\command" "" '"$R2" "%1"'
|
||||
WriteRegStr HKCR "$R0\shell\edit" "" "Edit $R0"
|
||||
WriteRegStr HKCR "$R0\shell\edit\command" "" '"$R2" "%1"'
|
||||
pop $0
|
||||
pop $1
|
||||
!undef Index
|
||||
System::Call 'Shell32::SHChangeNotify(i ${SHCNE_ASSOCCHANGED}, i ${SHCNF_IDLIST}, i 0, i 0)'
|
||||
FunctionEnd
|
||||
|
||||
!macro unregisterExtension extension description
|
||||
Push "${extension}" ; ".mkv"
|
||||
Push "${description}" ; "MKV File"
|
||||
Call un.unregisterExtension
|
||||
!macroend
|
||||
|
||||
Function un.unregisterExtension
|
||||
pop $R1 ; description
|
||||
pop $R0 ; extension
|
||||
!define Index "Line${__LINE__}"
|
||||
DeleteRegKey HKCR $R0
|
||||
!undef Index
|
||||
System::Call 'Shell32::SHChangeNotify(i ${SHCNE_ASSOCCHANGED}, i ${SHCNF_IDLIST}, i 0, i 0)'
|
||||
FunctionEnd
|
||||
411
builder/win/nsis/Include/servicelib.nsh
Normal file
411
builder/win/nsis/Include/servicelib.nsh
Normal file
@@ -0,0 +1,411 @@
|
||||
; NSIS SERVICE LIBRARY - servicelib.nsh
|
||||
; Version 1.8.1 - Jun 21th, 2013
|
||||
; Questions/Comments - dselkirk@hotmail.com
|
||||
;
|
||||
; Description:
|
||||
; Provides an interface to window services
|
||||
;
|
||||
; Inputs:
|
||||
; action - systemlib action ie. create, delete, start, stop, pause,
|
||||
; continue, installed, running, status
|
||||
; name - name of service to manipulate
|
||||
; param - action parameters; usage: var1=value1;var2=value2;...etc.
|
||||
; (don't forget to add a ';' after the last value!)
|
||||
;
|
||||
; Actions:
|
||||
; create - creates a new windows service
|
||||
; Parameters:
|
||||
; path - path to service executable
|
||||
; autostart - automatically start with system ie. 1|0
|
||||
; interact - interact with the desktop ie. 1|0
|
||||
; depend - service dependencies
|
||||
; user - user that runs the service
|
||||
; password - password of the above user
|
||||
; display - display name in service's console
|
||||
; description - Description of service
|
||||
; starttype - start type (supersedes autostart)
|
||||
; servicetype - service type (supersedes interact)
|
||||
;
|
||||
; delete - deletes a windows service
|
||||
; start - start a stopped windows service
|
||||
; stop - stops a running windows service
|
||||
; pause - pauses a running windows service
|
||||
; continue - continues a paused windows service
|
||||
; installed - is the provided service installed
|
||||
; Parameters:
|
||||
; action - if true then invokes the specified action
|
||||
; running - is the provided service running
|
||||
; Parameters:
|
||||
; action - if true then invokes the specified action
|
||||
; status - check the status of the provided service
|
||||
;
|
||||
; Usage:
|
||||
; Method 1:
|
||||
; Push "action"
|
||||
; Push "name"
|
||||
; Push "param"
|
||||
; Call Service
|
||||
; Pop $0 ;response
|
||||
;
|
||||
; Method 2:
|
||||
; !insertmacro SERVICE "action" "name" "param"
|
||||
;
|
||||
; History:
|
||||
; 1.0 - 09/15/2003 - Initial release
|
||||
; 1.1 - 09/16/2003 - Changed &l to i, thx brainsucker
|
||||
; 1.2 - 02/29/2004 - Fixed documentation.
|
||||
; 1.3 - 01/05/2006 - Fixed interactive flag and pop order (Kichik)
|
||||
; 1.4 - 12/07/2006 - Added display and depend, fixed datatypes (Vitoco)
|
||||
; 1.5 - 06/25/2008 - Added description of service.(DeSafe.com/liuqixing#gmail.com)
|
||||
; 1.5.1 - 06/12/2009 - Added use of __UNINSTALL__
|
||||
; 1.6 - 08/02/2010 - Fixed description implementation (Anders)
|
||||
; 1.7 - 04/11/2010 - Added get running service process id (Nico)
|
||||
; 1.8 - 24/03/2011 - Added starttype and servicetype (Sergius)
|
||||
; 1.8.1 - 21/06/2013 - Added dynamic ASCII & Unicode support (Zinthose)
|
||||
|
||||
!ifndef SERVICELIB
|
||||
!define SERVICELIB
|
||||
|
||||
!define SC_MANAGER_ALL_ACCESS 0x3F
|
||||
!define SC_STATUS_PROCESS_INFO 0x0
|
||||
!define SERVICE_ALL_ACCESS 0xF01FF
|
||||
|
||||
!define SERVICE_CONTROL_STOP 1
|
||||
!define SERVICE_CONTROL_PAUSE 2
|
||||
!define SERVICE_CONTROL_CONTINUE 3
|
||||
|
||||
!define SERVICE_STOPPED 0x1
|
||||
!define SERVICE_START_PENDING 0x2
|
||||
!define SERVICE_STOP_PENDING 0x3
|
||||
!define SERVICE_RUNNING 0x4
|
||||
!define SERVICE_CONTINUE_PENDING 0x5
|
||||
!define SERVICE_PAUSE_PENDING 0x6
|
||||
!define SERVICE_PAUSED 0x7
|
||||
|
||||
!define SERVICE_KERNEL_DRIVER 0x00000001
|
||||
!define SERVICE_FILE_SYSTEM_DRIVER 0x00000002
|
||||
!define SERVICE_WIN32_OWN_PROCESS 0x00000010
|
||||
!define SERVICE_WIN32_SHARE_PROCESS 0x00000020
|
||||
!define SERVICE_INTERACTIVE_PROCESS 0x00000100
|
||||
|
||||
|
||||
!define SERVICE_BOOT_START 0x00000000
|
||||
!define SERVICE_SYSTEM_START 0x00000001
|
||||
!define SERVICE_AUTO_START 0x00000002
|
||||
!define SERVICE_DEMAND_START 0x00000003
|
||||
!define SERVICE_DISABLED 0x00000004
|
||||
|
||||
## Added by Zinthose for Native Unicode Support
|
||||
!ifdef NSIS_UNICODE
|
||||
!define APITAG "W"
|
||||
!else
|
||||
!define APITAG "A"
|
||||
!endif
|
||||
|
||||
!macro SERVICE ACTION NAME PARAM
|
||||
Push '${ACTION}'
|
||||
Push '${NAME}'
|
||||
Push '${PARAM}'
|
||||
!ifdef __UNINSTALL__
|
||||
Call un.Service
|
||||
!else
|
||||
Call Service
|
||||
!endif
|
||||
!macroend
|
||||
|
||||
!macro FUNC_GETPARAM
|
||||
Push $0
|
||||
Push $1
|
||||
Push $2
|
||||
Push $3
|
||||
Push $4
|
||||
Push $5
|
||||
Push $6
|
||||
Push $7
|
||||
Exch 8
|
||||
Pop $1 ;name
|
||||
Exch 8
|
||||
Pop $2 ;source
|
||||
StrCpy $0 ""
|
||||
StrLen $7 $2
|
||||
StrCpy $3 0
|
||||
lbl_loop:
|
||||
IntCmp $3 $7 0 0 lbl_done
|
||||
StrLen $4 "$1="
|
||||
StrCpy $5 $2 $4 $3
|
||||
StrCmp $5 "$1=" 0 lbl_next
|
||||
IntOp $5 $3 + $4
|
||||
StrCpy $3 $5
|
||||
lbl_loop2:
|
||||
IntCmp $3 $7 0 0 lbl_done
|
||||
StrCpy $6 $2 1 $3
|
||||
StrCmp $6 ";" 0 lbl_next2
|
||||
IntOp $6 $3 - $5
|
||||
StrCpy $0 $2 $6 $5
|
||||
Goto lbl_done
|
||||
lbl_next2:
|
||||
IntOp $3 $3 + 1
|
||||
Goto lbl_loop2
|
||||
lbl_next:
|
||||
IntOp $3 $3 + 1
|
||||
Goto lbl_loop
|
||||
lbl_done:
|
||||
Pop $5
|
||||
Pop $4
|
||||
Pop $3
|
||||
Pop $2
|
||||
Pop $1
|
||||
Exch 2
|
||||
Pop $6
|
||||
Pop $7
|
||||
Exch $0
|
||||
!macroend
|
||||
|
||||
!macro CALL_GETPARAM VAR NAME DEFAULT LABEL
|
||||
Push $1
|
||||
Push ${NAME}
|
||||
Call ${UN}GETPARAM
|
||||
Pop $6
|
||||
StrCpy ${VAR} "${DEFAULT}"
|
||||
StrCmp $6 "" "${LABEL}" 0
|
||||
StrCpy ${VAR} $6
|
||||
!macroend
|
||||
|
||||
!macro FUNC_SERVICE UN
|
||||
Push $0
|
||||
Push $1
|
||||
Push $2
|
||||
Push $3
|
||||
Push $4
|
||||
Push $5
|
||||
Push $6
|
||||
Push $7
|
||||
Exch 8
|
||||
Pop $1 ;param
|
||||
Exch 8
|
||||
Pop $2 ;name
|
||||
Exch 8
|
||||
Pop $3 ;action
|
||||
;$0 return
|
||||
;$4 OpenSCManager
|
||||
;$5 OpenService
|
||||
|
||||
StrCpy $0 "false"
|
||||
System::Call 'advapi32::OpenSCManager${APITAG}(n, n, i ${SC_MANAGER_ALL_ACCESS}) i.r4'
|
||||
IntCmp $4 0 lbl_done
|
||||
StrCmp $3 "create" lbl_create
|
||||
System::Call 'advapi32::OpenService${APITAG}(i r4, t r2, i ${SERVICE_ALL_ACCESS}) i.r5'
|
||||
IntCmp $5 0 lbl_done
|
||||
|
||||
lbl_select:
|
||||
StrCmp $3 "delete" lbl_delete
|
||||
StrCmp $3 "start" lbl_start
|
||||
StrCmp $3 "stop" lbl_stop
|
||||
StrCmp $3 "pause" lbl_pause
|
||||
StrCmp $3 "continue" lbl_continue
|
||||
StrCmp $3 "installed" lbl_installed
|
||||
StrCmp $3 "running" lbl_running
|
||||
StrCmp $3 "status" lbl_status
|
||||
StrCmp $3 "processid" lbl_processid
|
||||
Goto lbl_done
|
||||
|
||||
; create service
|
||||
lbl_create:
|
||||
Push $R1 ;depend
|
||||
Push $R2 ;user
|
||||
Push $R3 ;password
|
||||
Push $R4 ;servicetype/interact
|
||||
Push $R5 ;starttype/autostart
|
||||
Push $R6 ;path
|
||||
Push $R7 ;display
|
||||
Push $R8 ;description
|
||||
|
||||
!insertmacro CALL_GETPARAM $R1 "depend" "n" "lbl_depend"
|
||||
StrCpy $R1 't "$R1"'
|
||||
lbl_depend:
|
||||
StrCmp $R1 "n" 0 lbl_machine ;old name of depend param
|
||||
!insertmacro CALL_GETPARAM $R1 "machine" "n" "lbl_machine"
|
||||
StrCpy $R1 't "$R1"'
|
||||
lbl_machine:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R2 "user" "n" "lbl_user"
|
||||
StrCpy $R2 't "$R2"'
|
||||
lbl_user:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R3 "password" "n" "lbl_password"
|
||||
StrCpy $R3 't "$R3"'
|
||||
lbl_password:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R4 "interact" "${SERVICE_WIN32_OWN_PROCESS}" "lbl_interact"
|
||||
StrCpy $6 ${SERVICE_WIN32_OWN_PROCESS}
|
||||
IntCmp $R4 0 +2
|
||||
IntOp $6 $6 | ${SERVICE_INTERACTIVE_PROCESS}
|
||||
StrCpy $R4 $6
|
||||
lbl_interact:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R4 "servicetype" "$R4" "lbl_servicetype"
|
||||
lbl_servicetype:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R5 "autostart" "${SERVICE_DEMAND_START}" "lbl_autostart"
|
||||
StrCpy $6 ${SERVICE_DEMAND_START}
|
||||
IntCmp $R5 0 +2
|
||||
StrCpy $6 ${SERVICE_AUTO_START}
|
||||
StrCpy $R5 $6
|
||||
lbl_autostart:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R5 "starttype" "$R5" "lbl_starttype"
|
||||
lbl_starttype:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R6 "path" "n" "lbl_path"
|
||||
lbl_path:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R7 "display" "$2" "lbl_display"
|
||||
lbl_display:
|
||||
|
||||
!insertmacro CALL_GETPARAM $R8 "description" "$2" "lbl_description"
|
||||
lbl_description:
|
||||
|
||||
System::Call 'advapi32::CreateService${APITAG}(i r4, t r2, t R7, i ${SERVICE_ALL_ACCESS}, \
|
||||
i R4, i R5, i 0, t R6, n, n, $R1, $R2, $R3) i.r6'
|
||||
|
||||
; write description of service (SERVICE_CONFIG_DESCRIPTION)
|
||||
System::Call 'advapi32::ChangeServiceConfig2${APITAG}(ir6,i1,*t "$R8")i.R7'
|
||||
strcmp $R7 "error" 0 lbl_descriptioncomplete
|
||||
WriteRegStr HKLM "SYSTEM\CurrentControlSet\Services\$2" "Description" $R8
|
||||
lbl_descriptioncomplete:
|
||||
|
||||
Pop $R8
|
||||
Pop $R7
|
||||
Pop $R6
|
||||
Pop $R5
|
||||
Pop $R4
|
||||
Pop $R3
|
||||
Pop $R2
|
||||
Pop $R1
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; delete service
|
||||
lbl_delete:
|
||||
System::Call 'advapi32::DeleteService(i r5) i.r6'
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; start service
|
||||
lbl_start:
|
||||
System::Call 'advapi32::StartService${APITAG}(i r5, i 0, i 0) i.r6'
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; stop service
|
||||
lbl_stop:
|
||||
Push $R1
|
||||
System::Call '*(i,i,i,i,i,i,i) i.R1'
|
||||
System::Call 'advapi32::ControlService(i r5, i ${SERVICE_CONTROL_STOP}, i $R1) i'
|
||||
System::Free $R1
|
||||
Pop $R1
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; pause service
|
||||
lbl_pause:
|
||||
Push $R1
|
||||
System::Call '*(i,i,i,i,i,i,i) i.R1'
|
||||
System::Call 'advapi32::ControlService(i r5, i ${SERVICE_CONTROL_PAUSE}, i $R1) i'
|
||||
System::Free $R1
|
||||
Pop $R1
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; continue service
|
||||
lbl_continue:
|
||||
Push $R1
|
||||
System::Call '*(i,i,i,i,i,i,i) i.R1'
|
||||
System::Call 'advapi32::ControlService(i r5, i ${SERVICE_CONTROL_CONTINUE}, i $R1) i'
|
||||
System::Free $R1
|
||||
Pop $R1
|
||||
StrCmp $6 0 lbl_done lbl_good
|
||||
|
||||
; is installed
|
||||
lbl_installed:
|
||||
!insertmacro CALL_GETPARAM $7 "action" "" "lbl_good"
|
||||
StrCpy $3 $7
|
||||
Goto lbl_select
|
||||
|
||||
; is service running
|
||||
lbl_running:
|
||||
Push $R1
|
||||
System::Call '*(i,i,i,i,i,i,i) i.R1'
|
||||
System::Call 'advapi32::QueryServiceStatus(i r5, i $R1) i'
|
||||
System::Call '*$R1(i, i.r6)'
|
||||
System::Free $R1
|
||||
Pop $R1
|
||||
IntFmt $6 "0x%X" $6
|
||||
StrCmp $6 ${SERVICE_RUNNING} 0 lbl_done
|
||||
!insertmacro CALL_GETPARAM $7 "action" "" "lbl_good"
|
||||
StrCpy $3 $7
|
||||
Goto lbl_select
|
||||
|
||||
lbl_status:
|
||||
Push $R1
|
||||
System::Call '*(i,i,i,i,i,i,i) i.R1'
|
||||
System::Call 'advapi32::QueryServiceStatus(i r5, i $R1) i'
|
||||
System::Call '*$R1(i, i .r6)'
|
||||
System::Free $R1
|
||||
Pop $R1
|
||||
IntFmt $6 "0x%X" $6
|
||||
StrCpy $0 "running"
|
||||
IntCmp $6 ${SERVICE_RUNNING} lbl_done
|
||||
StrCpy $0 "stopped"
|
||||
IntCmp $6 ${SERVICE_STOPPED} lbl_done
|
||||
StrCpy $0 "start_pending"
|
||||
IntCmp $6 ${SERVICE_START_PENDING} lbl_done
|
||||
StrCpy $0 "stop_pending"
|
||||
IntCmp $6 ${SERVICE_STOP_PENDING} lbl_done
|
||||
StrCpy $0 "running"
|
||||
IntCmp $6 ${SERVICE_RUNNING} lbl_done
|
||||
StrCpy $0 "continue_pending"
|
||||
IntCmp $6 ${SERVICE_CONTINUE_PENDING} lbl_done
|
||||
StrCpy $0 "pause_pending"
|
||||
IntCmp $6 ${SERVICE_PAUSE_PENDING} lbl_done
|
||||
StrCpy $0 "paused"
|
||||
IntCmp $6 ${SERVICE_PAUSED} lbl_done
|
||||
StrCpy $0 "unknown"
|
||||
Goto lbl_done
|
||||
|
||||
lbl_processid:
|
||||
Push $R1
|
||||
Push $R2
|
||||
System::Call '*(i,i,i,i,i,i,i,i,i) i.R1'
|
||||
System::Call '*(i 0) i.R2'
|
||||
System::Call "advapi32::QueryServiceStatusEx(i r5, i ${SC_STATUS_PROCESS_INFO}, i $R1, i 36, i $R2) i"
|
||||
System::Call "*$R1(i,i,i,i,i,i,i, i .r0)"
|
||||
System::Free $R2
|
||||
System::Free $R1
|
||||
Pop $R2
|
||||
Pop $R1
|
||||
Goto lbl_done
|
||||
|
||||
lbl_good:
|
||||
StrCpy $0 "true"
|
||||
lbl_done:
|
||||
IntCmp $5 0 +2
|
||||
System::Call 'advapi32::CloseServiceHandle(i r5) n'
|
||||
IntCmp $4 0 +2
|
||||
System::Call 'advapi32::CloseServiceHandle(i r4) n'
|
||||
Pop $4
|
||||
Pop $3
|
||||
Pop $2
|
||||
Pop $1
|
||||
Exch 3
|
||||
Pop $5
|
||||
Pop $7
|
||||
Pop $6
|
||||
Exch $0
|
||||
!macroend
|
||||
|
||||
Function Service
|
||||
!insertmacro FUNC_SERVICE ""
|
||||
FunctionEnd
|
||||
|
||||
Function GetParam
|
||||
!insertmacro FUNC_GETPARAM
|
||||
FunctionEnd
|
||||
|
||||
!undef APITAG
|
||||
!endif
|
||||
BIN
builder/win/nsis/Plugins/StdUtils.dll
Normal file
BIN
builder/win/nsis/Plugins/StdUtils.dll
Normal file
Binary file not shown.
BIN
builder/win/nsis/Plugins/liteFirewallW.dll
Normal file
BIN
builder/win/nsis/Plugins/liteFirewallW.dll
Normal file
Binary file not shown.
BIN
builder/win/nsis/Plugins/nsProcess.dll
Normal file
BIN
builder/win/nsis/Plugins/nsProcess.dll
Normal file
Binary file not shown.
@@ -1,5 +0,0 @@
|
||||
CherryPy 8.1.2
|
||||
Official distribution: https://github.com/cherrypy/cherrypy/releases
|
||||
The folders 'tutorial', 'test' and 'scaffold' have been removed.
|
||||
This file has been added.
|
||||
|
||||
@@ -1,371 +0,0 @@
|
||||
"""CherryPy is a pythonic, object-oriented HTTP framework.
|
||||
|
||||
|
||||
CherryPy consists of not one, but four separate API layers.
|
||||
|
||||
The APPLICATION LAYER is the simplest. CherryPy applications are written as
|
||||
a tree of classes and methods, where each branch in the tree corresponds to
|
||||
a branch in the URL path. Each method is a 'page handler', which receives
|
||||
GET and POST params as keyword arguments, and returns or yields the (HTML)
|
||||
body of the response. The special method name 'index' is used for paths
|
||||
that end in a slash, and the special method name 'default' is used to
|
||||
handle multiple paths via a single handler. This layer also includes:
|
||||
|
||||
* the 'exposed' attribute (and cherrypy.expose)
|
||||
* cherrypy.quickstart()
|
||||
* _cp_config attributes
|
||||
* cherrypy.tools (including cherrypy.session)
|
||||
* cherrypy.url()
|
||||
|
||||
The ENVIRONMENT LAYER is used by developers at all levels. It provides
|
||||
information about the current request and response, plus the application
|
||||
and server environment, via a (default) set of top-level objects:
|
||||
|
||||
* cherrypy.request
|
||||
* cherrypy.response
|
||||
* cherrypy.engine
|
||||
* cherrypy.server
|
||||
* cherrypy.tree
|
||||
* cherrypy.config
|
||||
* cherrypy.thread_data
|
||||
* cherrypy.log
|
||||
* cherrypy.HTTPError, NotFound, and HTTPRedirect
|
||||
* cherrypy.lib
|
||||
|
||||
The EXTENSION LAYER allows advanced users to construct and share their own
|
||||
plugins. It consists of:
|
||||
|
||||
* Hook API
|
||||
* Tool API
|
||||
* Toolbox API
|
||||
* Dispatch API
|
||||
* Config Namespace API
|
||||
|
||||
Finally, there is the CORE LAYER, which uses the core API's to construct
|
||||
the default components which are available at higher layers. You can think
|
||||
of the default components as the 'reference implementation' for CherryPy.
|
||||
Megaframeworks (and advanced users) may replace the default components
|
||||
with customized or extended components. The core API's are:
|
||||
|
||||
* Application API
|
||||
* Engine API
|
||||
* Request API
|
||||
* Server API
|
||||
* WSGI API
|
||||
|
||||
These API's are described in the `CherryPy specification <https://bitbucket.org/cherrypy/cherrypy/wiki/CherryPySpec>`_.
|
||||
"""
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from threading import local as _local
|
||||
|
||||
from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect # noqa
|
||||
from cherrypy._cperror import NotFound, CherryPyException, TimeoutError # noqa
|
||||
|
||||
from cherrypy import _cplogging
|
||||
|
||||
from cherrypy import _cpdispatch as dispatch # noqa
|
||||
|
||||
from cherrypy import _cptools
|
||||
from cherrypy._cptools import default_toolbox as tools, Tool
|
||||
|
||||
from cherrypy import _cprequest
|
||||
from cherrypy.lib import httputil as _httputil
|
||||
|
||||
from cherrypy import _cptree
|
||||
from cherrypy._cptree import Application # noqa
|
||||
from cherrypy import _cpwsgi as wsgi # noqa
|
||||
|
||||
from cherrypy import _cpserver
|
||||
from cherrypy import process
|
||||
try:
|
||||
from cherrypy.process import win32
|
||||
engine = win32.Win32Bus()
|
||||
engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
|
||||
del win32
|
||||
except ImportError:
|
||||
engine = process.bus
|
||||
|
||||
|
||||
tree = _cptree.Tree()
|
||||
|
||||
|
||||
__version__ = '8.1.2'
|
||||
|
||||
|
||||
# Timeout monitor. We add two channels to the engine
|
||||
# to which cherrypy.Application will publish.
|
||||
engine.listeners['before_request'] = set()
|
||||
engine.listeners['after_request'] = set()
|
||||
|
||||
|
||||
class _TimeoutMonitor(process.plugins.Monitor):
|
||||
|
||||
def __init__(self, bus):
|
||||
self.servings = []
|
||||
process.plugins.Monitor.__init__(self, bus, self.run)
|
||||
|
||||
def before_request(self):
|
||||
self.servings.append((serving.request, serving.response))
|
||||
|
||||
def after_request(self):
|
||||
try:
|
||||
self.servings.remove((serving.request, serving.response))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""Check timeout on all responses. (Internal)"""
|
||||
for req, resp in self.servings:
|
||||
resp.check_timeout()
|
||||
engine.timeout_monitor = _TimeoutMonitor(engine)
|
||||
engine.timeout_monitor.subscribe()
|
||||
|
||||
engine.autoreload = process.plugins.Autoreloader(engine)
|
||||
engine.autoreload.subscribe()
|
||||
|
||||
engine.thread_manager = process.plugins.ThreadManager(engine)
|
||||
engine.thread_manager.subscribe()
|
||||
|
||||
engine.signal_handler = process.plugins.SignalHandler(engine)
|
||||
|
||||
|
||||
class _HandleSignalsPlugin(object):
|
||||
|
||||
"""Handle signals from other processes based on the configured
|
||||
platform handlers above."""
|
||||
|
||||
def __init__(self, bus):
|
||||
self.bus = bus
|
||||
|
||||
def subscribe(self):
|
||||
"""Add the handlers based on the platform"""
|
||||
if hasattr(self.bus, 'signal_handler'):
|
||||
self.bus.signal_handler.subscribe()
|
||||
if hasattr(self.bus, 'console_control_handler'):
|
||||
self.bus.console_control_handler.subscribe()
|
||||
|
||||
engine.signals = _HandleSignalsPlugin(engine)
|
||||
|
||||
|
||||
server = _cpserver.Server()
|
||||
server.subscribe()
|
||||
|
||||
|
||||
def quickstart(root=None, script_name='', config=None):
|
||||
"""Mount the given root, start the builtin server (and engine), then block.
|
||||
|
||||
root: an instance of a "controller class" (a collection of page handler
|
||||
methods) which represents the root of the application.
|
||||
script_name: a string containing the "mount point" of the application.
|
||||
This should start with a slash, and be the path portion of the URL
|
||||
at which to mount the given root. For example, if root.index() will
|
||||
handle requests to "http://www.example.com:8080/dept/app1/", then
|
||||
the script_name argument would be "/dept/app1".
|
||||
|
||||
It MUST NOT end in a slash. If the script_name refers to the root
|
||||
of the URI, it MUST be an empty string (not "/").
|
||||
config: a file or dict containing application config. If this contains
|
||||
a [global] section, those entries will be used in the global
|
||||
(site-wide) config.
|
||||
"""
|
||||
if config:
|
||||
_global_conf_alias.update(config)
|
||||
|
||||
tree.mount(root, script_name, config)
|
||||
|
||||
engine.signals.subscribe()
|
||||
engine.start()
|
||||
engine.block()
|
||||
|
||||
|
||||
class _Serving(_local):
|
||||
|
||||
"""An interface for registering request and response objects.
|
||||
|
||||
Rather than have a separate "thread local" object for the request and
|
||||
the response, this class works as a single threadlocal container for
|
||||
both objects (and any others which developers wish to define). In this
|
||||
way, we can easily dump those objects when we stop/start a new HTTP
|
||||
conversation, yet still refer to them as module-level globals in a
|
||||
thread-safe way.
|
||||
"""
|
||||
|
||||
request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
|
||||
_httputil.Host('127.0.0.1', 1111))
|
||||
"""
|
||||
The request object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
response = _cprequest.Response()
|
||||
"""
|
||||
The response object for the current thread. In the main thread,
|
||||
and any threads which are not receiving HTTP requests, this is None."""
|
||||
|
||||
def load(self, request, response):
|
||||
self.request = request
|
||||
self.response = response
|
||||
|
||||
def clear(self):
|
||||
"""Remove all attributes of self."""
|
||||
self.__dict__.clear()
|
||||
|
||||
serving = _Serving()
|
||||
|
||||
|
||||
class _ThreadLocalProxy(object):
|
||||
|
||||
__slots__ = ['__attrname__', '__dict__']
|
||||
|
||||
def __init__(self, attrname):
|
||||
self.__attrname__ = attrname
|
||||
|
||||
def __getattr__(self, name):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
return getattr(child, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in ('__attrname__', ):
|
||||
object.__setattr__(self, name, value)
|
||||
else:
|
||||
child = getattr(serving, self.__attrname__)
|
||||
setattr(child, name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
delattr(child, name)
|
||||
|
||||
def _get_dict(self):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
d = child.__class__.__dict__.copy()
|
||||
d.update(child.__dict__)
|
||||
return d
|
||||
__dict__ = property(_get_dict)
|
||||
|
||||
def __getitem__(self, key):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
return child[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
child[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
del child[key]
|
||||
|
||||
def __contains__(self, key):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
return key in child
|
||||
|
||||
def __len__(self):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
return len(child)
|
||||
|
||||
def __nonzero__(self):
|
||||
child = getattr(serving, self.__attrname__)
|
||||
return bool(child)
|
||||
# Python 3
|
||||
__bool__ = __nonzero__
|
||||
|
||||
# Create request and response object (the same objects will be used
|
||||
# throughout the entire life of the webserver, but will redirect
|
||||
# to the "serving" object)
|
||||
request = _ThreadLocalProxy('request')
|
||||
response = _ThreadLocalProxy('response')
|
||||
|
||||
# Create thread_data object as a thread-specific all-purpose storage
|
||||
|
||||
|
||||
class _ThreadData(_local):
|
||||
|
||||
"""A container for thread-specific data."""
|
||||
thread_data = _ThreadData()
|
||||
|
||||
|
||||
# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
|
||||
# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
|
||||
# The only other way would be to change what is returned from type(request)
|
||||
# and that's not possible in pure Python (you'd have to fake ob_type).
|
||||
def _cherrypy_pydoc_resolve(thing, forceload=0):
|
||||
"""Given an object or a path to an object, get the object and its name."""
|
||||
if isinstance(thing, _ThreadLocalProxy):
|
||||
thing = getattr(serving, thing.__attrname__)
|
||||
return _pydoc._builtin_resolve(thing, forceload)
|
||||
|
||||
try:
|
||||
import pydoc as _pydoc
|
||||
_pydoc._builtin_resolve = _pydoc.resolve
|
||||
_pydoc.resolve = _cherrypy_pydoc_resolve
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class _GlobalLogManager(_cplogging.LogManager):
|
||||
|
||||
"""A site-wide LogManager; routes to app.log or global log as appropriate.
|
||||
|
||||
This :class:`LogManager<cherrypy._cplogging.LogManager>` implements
|
||||
cherrypy.log() and cherrypy.log.access(). If either
|
||||
function is called during a request, the message will be sent to the
|
||||
logger for the current Application. If they are called outside of a
|
||||
request, the message will be sent to the site-wide logger.
|
||||
"""
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Log the given message to the app.log or global log as appropriate.
|
||||
"""
|
||||
# Do NOT use try/except here. See
|
||||
# https://github.com/cherrypy/cherrypy/issues/945
|
||||
if hasattr(request, 'app') and hasattr(request.app, 'log'):
|
||||
log = request.app.log
|
||||
else:
|
||||
log = self
|
||||
return log.error(*args, **kwargs)
|
||||
|
||||
def access(self):
|
||||
"""Log an access message to the app.log or global log as appropriate.
|
||||
"""
|
||||
try:
|
||||
return request.app.log.access()
|
||||
except AttributeError:
|
||||
return _cplogging.LogManager.access(self)
|
||||
|
||||
|
||||
log = _GlobalLogManager()
|
||||
# Set a default screen handler on the global log.
|
||||
log.screen = True
|
||||
log.error_file = ''
|
||||
# Using an access file makes CP about 10% slower. Leave off by default.
|
||||
log.access_file = ''
|
||||
|
||||
|
||||
def _buslog(msg, level):
|
||||
log.error(msg, 'ENGINE', severity=level)
|
||||
engine.subscribe('log', _buslog)
|
||||
|
||||
from cherrypy._helper import expose, popargs, url # noqa
|
||||
|
||||
# import _cpconfig last so it can reference other top-level objects
|
||||
from cherrypy import _cpconfig # noqa
|
||||
# Use _global_conf_alias so quickstart can use 'config' as an arg
|
||||
# without shadowing cherrypy.config.
|
||||
config = _global_conf_alias = _cpconfig.Config()
|
||||
config.defaults = {
|
||||
'tools.log_tracebacks.on': True,
|
||||
'tools.log_headers.on': True,
|
||||
'tools.trailing_slash.on': True,
|
||||
'tools.encode.on': True
|
||||
}
|
||||
config.namespaces['log'] = lambda k, v: setattr(log, k, v)
|
||||
config.namespaces['checker'] = lambda k, v: setattr(checker, k, v)
|
||||
# Must reset to get our defaults applied.
|
||||
config.reset()
|
||||
|
||||
from cherrypy import _cpchecker # noqa
|
||||
checker = _cpchecker.Checker()
|
||||
engine.subscribe('start', checker)
|
||||
@@ -1,4 +0,0 @@
|
||||
import cherrypy.daemon
|
||||
|
||||
if __name__ == '__main__':
|
||||
cherrypy.daemon.run()
|
||||
@@ -1,332 +0,0 @@
|
||||
import os
|
||||
import warnings
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import iteritems, copykeys, builtins
|
||||
|
||||
|
||||
class Checker(object):
|
||||
|
||||
"""A checker for CherryPy sites and their mounted applications.
|
||||
|
||||
When this object is called at engine startup, it executes each
|
||||
of its own methods whose names start with ``check_``. If you wish
|
||||
to disable selected checks, simply add a line in your global
|
||||
config which sets the appropriate method to False::
|
||||
|
||||
[global]
|
||||
checker.check_skipped_app_config = False
|
||||
|
||||
You may also dynamically add or replace ``check_*`` methods in this way.
|
||||
"""
|
||||
|
||||
on = True
|
||||
"""If True (the default), run all checks; if False, turn off all checks."""
|
||||
|
||||
def __init__(self):
|
||||
self._populate_known_types()
|
||||
|
||||
def __call__(self):
|
||||
"""Run all check_* methods."""
|
||||
if self.on:
|
||||
oldformatwarning = warnings.formatwarning
|
||||
warnings.formatwarning = self.formatwarning
|
||||
try:
|
||||
for name in dir(self):
|
||||
if name.startswith('check_'):
|
||||
method = getattr(self, name)
|
||||
if method and hasattr(method, '__call__'):
|
||||
method()
|
||||
finally:
|
||||
warnings.formatwarning = oldformatwarning
|
||||
|
||||
def formatwarning(self, message, category, filename, lineno, line=None):
|
||||
"""Function to format a warning."""
|
||||
return 'CherryPy Checker:\n%s\n\n' % message
|
||||
|
||||
# This value should be set inside _cpconfig.
|
||||
global_config_contained_paths = False
|
||||
|
||||
def check_app_config_entries_dont_start_with_script_name(self):
|
||||
"""Check for Application config with sections that repeat script_name.
|
||||
"""
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
if not app.config:
|
||||
continue
|
||||
if sn == '':
|
||||
continue
|
||||
sn_atoms = sn.strip('/').split('/')
|
||||
for key in app.config.keys():
|
||||
key_atoms = key.strip('/').split('/')
|
||||
if key_atoms[:len(sn_atoms)] == sn_atoms:
|
||||
warnings.warn(
|
||||
'The application mounted at %r has config '
|
||||
'entries that start with its script name: %r' % (sn,
|
||||
key))
|
||||
|
||||
def check_site_config_entries_in_app_config(self):
|
||||
"""Check for mounted Applications that have site-scoped config."""
|
||||
for sn, app in iteritems(cherrypy.tree.apps):
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
|
||||
msg = []
|
||||
for section, entries in iteritems(app.config):
|
||||
if section.startswith('/'):
|
||||
for key, value in iteritems(entries):
|
||||
for n in ('engine.', 'server.', 'tree.', 'checker.'):
|
||||
if key.startswith(n):
|
||||
msg.append('[%s] %s = %s' %
|
||||
(section, key, value))
|
||||
if msg:
|
||||
msg.insert(0,
|
||||
'The application mounted at %r contains the '
|
||||
'following config entries, which are only allowed '
|
||||
'in site-wide config. Move them to a [global] '
|
||||
'section and pass them to cherrypy.config.update() '
|
||||
'instead of tree.mount().' % sn)
|
||||
warnings.warn(os.linesep.join(msg))
|
||||
|
||||
def check_skipped_app_config(self):
|
||||
"""Check for mounted Applications that have no config."""
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
if not app.config:
|
||||
msg = 'The Application mounted at %r has an empty config.' % sn
|
||||
if self.global_config_contained_paths:
|
||||
msg += (' It looks like the config you passed to '
|
||||
'cherrypy.config.update() contains application-'
|
||||
'specific sections. You must explicitly pass '
|
||||
'application config via '
|
||||
'cherrypy.tree.mount(..., config=app_config)')
|
||||
warnings.warn(msg)
|
||||
return
|
||||
|
||||
def check_app_config_brackets(self):
|
||||
"""Check for Application config with extraneous brackets in section
|
||||
names.
|
||||
"""
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
if not app.config:
|
||||
continue
|
||||
for key in app.config.keys():
|
||||
if key.startswith('[') or key.endswith(']'):
|
||||
warnings.warn(
|
||||
'The application mounted at %r has config '
|
||||
'section names with extraneous brackets: %r. '
|
||||
'Config *files* need brackets; config *dicts* '
|
||||
'(e.g. passed to tree.mount) do not.' % (sn, key))
|
||||
|
||||
def check_static_paths(self):
|
||||
"""Check Application config for incorrect static paths."""
|
||||
# Use the dummy Request object in the main thread.
|
||||
request = cherrypy.request
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
request.app = app
|
||||
for section in app.config:
|
||||
# get_resource will populate request.config
|
||||
request.get_resource(section + '/dummy.html')
|
||||
conf = request.config.get
|
||||
|
||||
if conf('tools.staticdir.on', False):
|
||||
msg = ''
|
||||
root = conf('tools.staticdir.root')
|
||||
dir = conf('tools.staticdir.dir')
|
||||
if dir is None:
|
||||
msg = 'tools.staticdir.dir is not set.'
|
||||
else:
|
||||
fulldir = ''
|
||||
if os.path.isabs(dir):
|
||||
fulldir = dir
|
||||
if root:
|
||||
msg = ('dir is an absolute path, even '
|
||||
'though a root is provided.')
|
||||
testdir = os.path.join(root, dir[1:])
|
||||
if os.path.exists(testdir):
|
||||
msg += (
|
||||
'\nIf you meant to serve the '
|
||||
'filesystem folder at %r, remove the '
|
||||
'leading slash from dir.' % (testdir,))
|
||||
else:
|
||||
if not root:
|
||||
msg = (
|
||||
'dir is a relative path and '
|
||||
'no root provided.')
|
||||
else:
|
||||
fulldir = os.path.join(root, dir)
|
||||
if not os.path.isabs(fulldir):
|
||||
msg = ('%r is not an absolute path.' % (
|
||||
fulldir,))
|
||||
|
||||
if fulldir and not os.path.exists(fulldir):
|
||||
if msg:
|
||||
msg += '\n'
|
||||
msg += ('%r (root + dir) is not an existing '
|
||||
'filesystem path.' % fulldir)
|
||||
|
||||
if msg:
|
||||
warnings.warn('%s\nsection: [%s]\nroot: %r\ndir: %r'
|
||||
% (msg, section, root, dir))
|
||||
|
||||
# -------------------------- Compatibility -------------------------- #
|
||||
obsolete = {
|
||||
'server.default_content_type': 'tools.response_headers.headers',
|
||||
'log_access_file': 'log.access_file',
|
||||
'log_config_options': None,
|
||||
'log_file': 'log.error_file',
|
||||
'log_file_not_found': None,
|
||||
'log_request_headers': 'tools.log_headers.on',
|
||||
'log_to_screen': 'log.screen',
|
||||
'show_tracebacks': 'request.show_tracebacks',
|
||||
'throw_errors': 'request.throw_errors',
|
||||
'profiler.on': ('cherrypy.tree.mount(profiler.make_app('
|
||||
'cherrypy.Application(Root())))'),
|
||||
}
|
||||
|
||||
deprecated = {}
|
||||
|
||||
def _compat(self, config):
|
||||
"""Process config and warn on each obsolete or deprecated entry."""
|
||||
for section, conf in config.items():
|
||||
if isinstance(conf, dict):
|
||||
for k, v in conf.items():
|
||||
if k in self.obsolete:
|
||||
warnings.warn('%r is obsolete. Use %r instead.\n'
|
||||
'section: [%s]' %
|
||||
(k, self.obsolete[k], section))
|
||||
elif k in self.deprecated:
|
||||
warnings.warn('%r is deprecated. Use %r instead.\n'
|
||||
'section: [%s]' %
|
||||
(k, self.deprecated[k], section))
|
||||
else:
|
||||
if section in self.obsolete:
|
||||
warnings.warn('%r is obsolete. Use %r instead.'
|
||||
% (section, self.obsolete[section]))
|
||||
elif section in self.deprecated:
|
||||
warnings.warn('%r is deprecated. Use %r instead.'
|
||||
% (section, self.deprecated[section]))
|
||||
|
||||
def check_compatibility(self):
|
||||
"""Process config and warn on each obsolete or deprecated entry."""
|
||||
self._compat(cherrypy.config)
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
self._compat(app.config)
|
||||
|
||||
# ------------------------ Known Namespaces ------------------------ #
|
||||
extra_config_namespaces = []
|
||||
|
||||
def _known_ns(self, app):
|
||||
ns = ['wsgi']
|
||||
ns.extend(copykeys(app.toolboxes))
|
||||
ns.extend(copykeys(app.namespaces))
|
||||
ns.extend(copykeys(app.request_class.namespaces))
|
||||
ns.extend(copykeys(cherrypy.config.namespaces))
|
||||
ns += self.extra_config_namespaces
|
||||
|
||||
for section, conf in app.config.items():
|
||||
is_path_section = section.startswith('/')
|
||||
if is_path_section and isinstance(conf, dict):
|
||||
for k, v in conf.items():
|
||||
atoms = k.split('.')
|
||||
if len(atoms) > 1:
|
||||
if atoms[0] not in ns:
|
||||
# Spit out a special warning if a known
|
||||
# namespace is preceded by "cherrypy."
|
||||
if atoms[0] == 'cherrypy' and atoms[1] in ns:
|
||||
msg = (
|
||||
'The config entry %r is invalid; '
|
||||
'try %r instead.\nsection: [%s]'
|
||||
% (k, '.'.join(atoms[1:]), section))
|
||||
else:
|
||||
msg = (
|
||||
'The config entry %r is invalid, '
|
||||
'because the %r config namespace '
|
||||
'is unknown.\n'
|
||||
'section: [%s]' % (k, atoms[0], section))
|
||||
warnings.warn(msg)
|
||||
elif atoms[0] == 'tools':
|
||||
if atoms[1] not in dir(cherrypy.tools):
|
||||
msg = (
|
||||
'The config entry %r may be invalid, '
|
||||
'because the %r tool was not found.\n'
|
||||
'section: [%s]' % (k, atoms[1], section))
|
||||
warnings.warn(msg)
|
||||
|
||||
def check_config_namespaces(self):
|
||||
"""Process config and warn on each unknown config namespace."""
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
self._known_ns(app)
|
||||
|
||||
# -------------------------- Config Types -------------------------- #
|
||||
known_config_types = {}
|
||||
|
||||
def _populate_known_types(self):
|
||||
b = [x for x in vars(builtins).values()
|
||||
if type(x) is type(str)]
|
||||
|
||||
def traverse(obj, namespace):
|
||||
for name in dir(obj):
|
||||
# Hack for 3.2's warning about body_params
|
||||
if name == 'body_params':
|
||||
continue
|
||||
vtype = type(getattr(obj, name, None))
|
||||
if vtype in b:
|
||||
self.known_config_types[namespace + '.' + name] = vtype
|
||||
|
||||
traverse(cherrypy.request, 'request')
|
||||
traverse(cherrypy.response, 'response')
|
||||
traverse(cherrypy.server, 'server')
|
||||
traverse(cherrypy.engine, 'engine')
|
||||
traverse(cherrypy.log, 'log')
|
||||
|
||||
def _known_types(self, config):
|
||||
msg = ('The config entry %r in section %r is of type %r, '
|
||||
'which does not match the expected type %r.')
|
||||
|
||||
for section, conf in config.items():
|
||||
if isinstance(conf, dict):
|
||||
for k, v in conf.items():
|
||||
if v is not None:
|
||||
expected_type = self.known_config_types.get(k, None)
|
||||
vtype = type(v)
|
||||
if expected_type and vtype != expected_type:
|
||||
warnings.warn(msg % (k, section, vtype.__name__,
|
||||
expected_type.__name__))
|
||||
else:
|
||||
k, v = section, conf
|
||||
if v is not None:
|
||||
expected_type = self.known_config_types.get(k, None)
|
||||
vtype = type(v)
|
||||
if expected_type and vtype != expected_type:
|
||||
warnings.warn(msg % (k, section, vtype.__name__,
|
||||
expected_type.__name__))
|
||||
|
||||
def check_config_types(self):
|
||||
"""Assert that config values are of the same type as default values."""
|
||||
self._known_types(cherrypy.config)
|
||||
for sn, app in cherrypy.tree.apps.items():
|
||||
if not isinstance(app, cherrypy.Application):
|
||||
continue
|
||||
self._known_types(app.config)
|
||||
|
||||
# -------------------- Specific config warnings -------------------- #
|
||||
def check_localhost(self):
|
||||
"""Warn if any socket_host is 'localhost'. See #711."""
|
||||
for k, v in cherrypy.config.items():
|
||||
if k == 'server.socket_host' and v == 'localhost':
|
||||
warnings.warn("The use of 'localhost' as a socket host can "
|
||||
'cause problems on newer systems, since '
|
||||
"'localhost' can map to either an IPv4 or an "
|
||||
"IPv6 address. You should use '127.0.0.1' "
|
||||
"or '[::1]' instead.")
|
||||
@@ -1,334 +0,0 @@
|
||||
"""Compatibility code for using CherryPy with various versions of Python.
|
||||
|
||||
CherryPy 3.2 is compatible with Python versions 2.6+. This module provides a
|
||||
useful abstraction over the differences between Python versions, sometimes by
|
||||
preferring a newer idiom, sometimes an older one, and sometimes a custom one.
|
||||
|
||||
In particular, Python 2 uses str and '' for byte strings, while Python 3
|
||||
uses str and '' for unicode strings. We will call each of these the 'native
|
||||
string' type for each version. Because of this major difference, this module
|
||||
provides
|
||||
two functions: 'ntob', which translates native strings (of type 'str') into
|
||||
byte strings regardless of Python version, and 'ntou', which translates native
|
||||
strings to unicode strings. This also provides a 'BytesIO' name for dealing
|
||||
specifically with bytes, and a 'StringIO' name for dealing with native strings.
|
||||
It also provides a 'base64_decode' function with native strings as input and
|
||||
output.
|
||||
"""
|
||||
|
||||
import binascii
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import six
|
||||
|
||||
if six.PY3:
|
||||
def ntob(n, encoding='ISO-8859-1'):
|
||||
"""Return the given native string as a byte string in the given
|
||||
encoding.
|
||||
"""
|
||||
assert_native(n)
|
||||
# In Python 3, the native string type is unicode
|
||||
return n.encode(encoding)
|
||||
|
||||
def ntou(n, encoding='ISO-8859-1'):
|
||||
"""Return the given native string as a unicode string with the given
|
||||
encoding.
|
||||
"""
|
||||
assert_native(n)
|
||||
# In Python 3, the native string type is unicode
|
||||
return n
|
||||
|
||||
def tonative(n, encoding='ISO-8859-1'):
|
||||
"""Return the given string as a native string in the given encoding."""
|
||||
# In Python 3, the native string type is unicode
|
||||
if isinstance(n, bytes):
|
||||
return n.decode(encoding)
|
||||
return n
|
||||
else:
|
||||
# Python 2
|
||||
def ntob(n, encoding='ISO-8859-1'):
|
||||
"""Return the given native string as a byte string in the given
|
||||
encoding.
|
||||
"""
|
||||
assert_native(n)
|
||||
# In Python 2, the native string type is bytes. Assume it's already
|
||||
# in the given encoding, which for ISO-8859-1 is almost always what
|
||||
# was intended.
|
||||
return n
|
||||
|
||||
def ntou(n, encoding='ISO-8859-1'):
|
||||
"""Return the given native string as a unicode string with the given
|
||||
encoding.
|
||||
"""
|
||||
assert_native(n)
|
||||
# In Python 2, the native string type is bytes.
|
||||
# First, check for the special encoding 'escape'. The test suite uses
|
||||
# this to signal that it wants to pass a string with embedded \uXXXX
|
||||
# escapes, but without having to prefix it with u'' for Python 2,
|
||||
# but no prefix for Python 3.
|
||||
if encoding == 'escape':
|
||||
return unicode(
|
||||
re.sub(r'\\u([0-9a-zA-Z]{4})',
|
||||
lambda m: unichr(int(m.group(1), 16)),
|
||||
n.decode('ISO-8859-1')))
|
||||
# Assume it's already in the given encoding, which for ISO-8859-1
|
||||
# is almost always what was intended.
|
||||
return n.decode(encoding)
|
||||
|
||||
def tonative(n, encoding='ISO-8859-1'):
|
||||
"""Return the given string as a native string in the given encoding."""
|
||||
# In Python 2, the native string type is bytes.
|
||||
if isinstance(n, unicode):
|
||||
return n.encode(encoding)
|
||||
return n
|
||||
|
||||
|
||||
def assert_native(n):
|
||||
if not isinstance(n, str):
|
||||
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
|
||||
|
||||
try:
|
||||
# Python 3.1+
|
||||
from base64 import decodebytes as _base64_decodebytes
|
||||
except ImportError:
|
||||
# Python 3.0-
|
||||
# since CherryPy claims compability with Python 2.3, we must use
|
||||
# the legacy API of base64
|
||||
from base64 import decodestring as _base64_decodebytes
|
||||
|
||||
|
||||
def base64_decode(n, encoding='ISO-8859-1'):
|
||||
"""Return the native string base64-decoded (as a native string)."""
|
||||
if isinstance(n, six.text_type):
|
||||
b = n.encode(encoding)
|
||||
else:
|
||||
b = n
|
||||
b = _base64_decodebytes(b)
|
||||
if str is six.text_type:
|
||||
return b.decode(encoding)
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
try:
|
||||
sorted = sorted
|
||||
except NameError:
|
||||
def sorted(i):
|
||||
i = i[:]
|
||||
i.sort()
|
||||
return i
|
||||
|
||||
try:
|
||||
reversed = reversed
|
||||
except NameError:
|
||||
def reversed(x):
|
||||
i = len(x)
|
||||
while i > 0:
|
||||
i -= 1
|
||||
yield x[i]
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from urllib.parse import urljoin, urlencode
|
||||
from urllib.parse import quote, quote_plus
|
||||
from urllib.request import unquote, urlopen
|
||||
from urllib.request import parse_http_list, parse_keqv_list
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from urlparse import urljoin # noqa
|
||||
from urllib import urlencode, urlopen # noqa
|
||||
from urllib import quote, quote_plus # noqa
|
||||
from urllib import unquote # noqa
|
||||
from urllib2 import parse_http_list, parse_keqv_list # noqa
|
||||
|
||||
try:
|
||||
dict.iteritems
|
||||
# Python 2
|
||||
iteritems = lambda d: d.iteritems()
|
||||
copyitems = lambda d: d.items()
|
||||
except AttributeError:
|
||||
# Python 3
|
||||
iteritems = lambda d: d.items()
|
||||
copyitems = lambda d: list(d.items())
|
||||
|
||||
try:
|
||||
dict.iterkeys
|
||||
# Python 2
|
||||
iterkeys = lambda d: d.iterkeys()
|
||||
copykeys = lambda d: d.keys()
|
||||
except AttributeError:
|
||||
# Python 3
|
||||
iterkeys = lambda d: d.keys()
|
||||
copykeys = lambda d: list(d.keys())
|
||||
|
||||
try:
|
||||
dict.itervalues
|
||||
# Python 2
|
||||
itervalues = lambda d: d.itervalues()
|
||||
copyvalues = lambda d: d.values()
|
||||
except AttributeError:
|
||||
# Python 3
|
||||
itervalues = lambda d: d.values()
|
||||
copyvalues = lambda d: list(d.values())
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
import builtins
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import __builtin__ as builtins # noqa
|
||||
|
||||
try:
|
||||
# Python 2. We try Python 2 first clients on Python 2
|
||||
# don't try to import the 'http' module from cherrypy.lib
|
||||
from Cookie import SimpleCookie, CookieError
|
||||
from httplib import BadStatusLine, HTTPConnection, IncompleteRead
|
||||
from httplib import NotConnected
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler
|
||||
except ImportError:
|
||||
# Python 3
|
||||
from http.cookies import SimpleCookie, CookieError # noqa
|
||||
from http.client import BadStatusLine, HTTPConnection, IncompleteRead # noqa
|
||||
from http.client import NotConnected # noqa
|
||||
from http.server import BaseHTTPRequestHandler # noqa
|
||||
|
||||
# Some platforms don't expose HTTPSConnection, so handle it separately
|
||||
if six.PY3:
|
||||
try:
|
||||
from http.client import HTTPSConnection
|
||||
except ImportError:
|
||||
# Some platforms which don't have SSL don't expose HTTPSConnection
|
||||
HTTPSConnection = None
|
||||
else:
|
||||
try:
|
||||
from httplib import HTTPSConnection
|
||||
except ImportError:
|
||||
HTTPSConnection = None
|
||||
|
||||
try:
|
||||
# Python 2
|
||||
xrange = xrange
|
||||
except NameError:
|
||||
# Python 3
|
||||
xrange = range
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
from urllib.parse import unquote as parse_unquote
|
||||
|
||||
def unquote_qs(atom, encoding, errors='strict'):
|
||||
return parse_unquote(
|
||||
atom.replace('+', ' '),
|
||||
encoding=encoding,
|
||||
errors=errors)
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from urllib import unquote as parse_unquote
|
||||
|
||||
def unquote_qs(atom, encoding, errors='strict'):
|
||||
return parse_unquote(atom.replace('+', ' ')).decode(encoding, errors)
|
||||
|
||||
try:
|
||||
# Prefer simplejson, which is usually more advanced than the builtin
|
||||
# module.
|
||||
import simplejson as json
|
||||
json_decode = json.JSONDecoder().decode
|
||||
_json_encode = json.JSONEncoder().iterencode
|
||||
except ImportError:
|
||||
if sys.version_info >= (2, 6):
|
||||
# Python >=2.6 : json is part of the standard library
|
||||
import json
|
||||
json_decode = json.JSONDecoder().decode
|
||||
_json_encode = json.JSONEncoder().iterencode
|
||||
else:
|
||||
json = None
|
||||
|
||||
def json_decode(s):
|
||||
raise ValueError('No JSON library is available')
|
||||
|
||||
def _json_encode(s):
|
||||
raise ValueError('No JSON library is available')
|
||||
finally:
|
||||
if json and six.PY3:
|
||||
# The two Python 3 implementations (simplejson/json)
|
||||
# outputs str. We need bytes.
|
||||
def json_encode(value):
|
||||
for chunk in _json_encode(value):
|
||||
yield chunk.encode('utf8')
|
||||
else:
|
||||
json_encode = _json_encode
|
||||
|
||||
text_or_bytes = six.text_type, six.binary_type
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
# In Python 2, pickle is a Python version.
|
||||
# In Python 3, pickle is the sped-up C version.
|
||||
import pickle # noqa
|
||||
|
||||
def random20():
|
||||
return binascii.hexlify(os.urandom(20)).decode('ascii')
|
||||
|
||||
try:
|
||||
from _thread import get_ident as get_thread_ident
|
||||
except ImportError:
|
||||
from thread import get_ident as get_thread_ident # noqa
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
next = next
|
||||
except NameError:
|
||||
# Python 2
|
||||
def next(i):
|
||||
return i.next()
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
Timer = threading.Timer
|
||||
Event = threading.Event
|
||||
else:
|
||||
# Python 3.2 and earlier
|
||||
Timer = threading._Timer
|
||||
Event = threading._Event
|
||||
|
||||
try:
|
||||
# Python 2.7+
|
||||
from subprocess import _args_from_interpreter_flags
|
||||
except ImportError:
|
||||
def _args_from_interpreter_flags():
|
||||
"""Tries to reconstruct original interpreter args from sys.flags for Python 2.6
|
||||
|
||||
Backported from Python 3.5. Aims to return a list of
|
||||
command-line arguments reproducing the current
|
||||
settings in sys.flags and sys.warnoptions.
|
||||
"""
|
||||
flag_opt_map = {
|
||||
'debug': 'd',
|
||||
# 'inspect': 'i',
|
||||
# 'interactive': 'i',
|
||||
'optimize': 'O',
|
||||
'dont_write_bytecode': 'B',
|
||||
'no_user_site': 's',
|
||||
'no_site': 'S',
|
||||
'ignore_environment': 'E',
|
||||
'verbose': 'v',
|
||||
'bytes_warning': 'b',
|
||||
'quiet': 'q',
|
||||
'hash_randomization': 'R',
|
||||
'py3k_warning': '3',
|
||||
}
|
||||
|
||||
args = []
|
||||
for flag, opt in flag_opt_map.items():
|
||||
v = getattr(sys.flags, flag)
|
||||
if v > 0:
|
||||
if flag == 'hash_randomization':
|
||||
v = 1 # Handle specification of an exact seed
|
||||
args.append('-' + opt * v)
|
||||
for opt in sys.warnoptions:
|
||||
args.append('-W' + opt)
|
||||
|
||||
return args
|
||||
@@ -1,303 +0,0 @@
|
||||
"""
|
||||
Configuration system for CherryPy.
|
||||
|
||||
Configuration in CherryPy is implemented via dictionaries. Keys are strings
|
||||
which name the mapped value, which may be of any type.
|
||||
|
||||
|
||||
Architecture
|
||||
------------
|
||||
|
||||
CherryPy Requests are part of an Application, which runs in a global context,
|
||||
and configuration data may apply to any of those three scopes:
|
||||
|
||||
Global
|
||||
Configuration entries which apply everywhere are stored in
|
||||
cherrypy.config.
|
||||
|
||||
Application
|
||||
Entries which apply to each mounted application are stored
|
||||
on the Application object itself, as 'app.config'. This is a two-level
|
||||
dict where each key is a path, or "relative URL" (for example, "/" or
|
||||
"/path/to/my/page"), and each value is a config dict. Usually, this
|
||||
data is provided in the call to tree.mount(root(), config=conf),
|
||||
although you may also use app.merge(conf).
|
||||
|
||||
Request
|
||||
Each Request object possesses a single 'Request.config' dict.
|
||||
Early in the request process, this dict is populated by merging global
|
||||
config entries, Application entries (whose path equals or is a parent
|
||||
of Request.path_info), and any config acquired while looking up the
|
||||
page handler (see next).
|
||||
|
||||
|
||||
Declaration
|
||||
-----------
|
||||
|
||||
Configuration data may be supplied as a Python dictionary, as a filename,
|
||||
or as an open file object. When you supply a filename or file, CherryPy
|
||||
uses Python's builtin ConfigParser; you declare Application config by
|
||||
writing each path as a section header::
|
||||
|
||||
[/path/to/my/page]
|
||||
request.stream = True
|
||||
|
||||
To declare global configuration entries, place them in a [global] section.
|
||||
|
||||
You may also declare config entries directly on the classes and methods
|
||||
(page handlers) that make up your CherryPy application via the ``_cp_config``
|
||||
attribute, set with the ``cherrypy.config`` decorator. For example::
|
||||
|
||||
@cherrypy.config(**{'tools.gzip.on': True})
|
||||
class Demo:
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.config(**{'request.show_tracebacks': False})
|
||||
def index(self):
|
||||
return "Hello world"
|
||||
|
||||
.. note::
|
||||
|
||||
This behavior is only guaranteed for the default dispatcher.
|
||||
Other dispatchers may have different restrictions on where
|
||||
you can attach config attributes.
|
||||
|
||||
|
||||
Namespaces
|
||||
----------
|
||||
|
||||
Configuration keys are separated into namespaces by the first "." in the key.
|
||||
Current namespaces:
|
||||
|
||||
engine
|
||||
Controls the 'application engine', including autoreload.
|
||||
These can only be declared in the global config.
|
||||
|
||||
tree
|
||||
Grafts cherrypy.Application objects onto cherrypy.tree.
|
||||
These can only be declared in the global config.
|
||||
|
||||
hooks
|
||||
Declares additional request-processing functions.
|
||||
|
||||
log
|
||||
Configures the logging for each application.
|
||||
These can only be declared in the global or / config.
|
||||
|
||||
request
|
||||
Adds attributes to each Request.
|
||||
|
||||
response
|
||||
Adds attributes to each Response.
|
||||
|
||||
server
|
||||
Controls the default HTTP server via cherrypy.server.
|
||||
These can only be declared in the global config.
|
||||
|
||||
tools
|
||||
Runs and configures additional request-processing packages.
|
||||
|
||||
wsgi
|
||||
Adds WSGI middleware to an Application's "pipeline".
|
||||
These can only be declared in the app's root config ("/").
|
||||
|
||||
checker
|
||||
Controls the 'checker', which looks for common errors in
|
||||
app state (including config) when the engine starts.
|
||||
Global config only.
|
||||
|
||||
The only key that does not exist in a namespace is the "environment" entry.
|
||||
This special entry 'imports' other config entries from a template stored in
|
||||
cherrypy._cpconfig.environments[environment]. It only applies to the global
|
||||
config, and only when you use cherrypy.config.update.
|
||||
|
||||
You can define your own namespaces to be called at the Global, Application,
|
||||
or Request level, by adding a named handler to cherrypy.config.namespaces,
|
||||
app.namespaces, or app.request_class.namespaces. The name can
|
||||
be any string, and the handler must be either a callable or a (Python 2.5
|
||||
style) context manager.
|
||||
"""
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
from cherrypy.lib import reprconf
|
||||
|
||||
# Deprecated in CherryPy 3.2--remove in 3.3
|
||||
NamespaceSet = reprconf.NamespaceSet
|
||||
|
||||
|
||||
def merge(base, other):
|
||||
"""Merge one app config (from a dict, file, or filename) into another.
|
||||
|
||||
If the given config is a filename, it will be appended to
|
||||
the list of files to monitor for "autoreload" changes.
|
||||
"""
|
||||
if isinstance(other, text_or_bytes):
|
||||
cherrypy.engine.autoreload.files.add(other)
|
||||
|
||||
# Load other into base
|
||||
for section, value_map in reprconf.as_dict(other).items():
|
||||
if not isinstance(value_map, dict):
|
||||
raise ValueError(
|
||||
'Application config must include section headers, but the '
|
||||
"config you tried to merge doesn't have any sections. "
|
||||
'Wrap your config in another dict with paths as section '
|
||||
"headers, for example: {'/': config}.")
|
||||
base.setdefault(section, {}).update(value_map)
|
||||
|
||||
|
||||
class Config(reprconf.Config):
|
||||
|
||||
"""The 'global' configuration data for the entire CherryPy process."""
|
||||
|
||||
def update(self, config):
|
||||
"""Update self from a dict, file or filename."""
|
||||
if isinstance(config, text_or_bytes):
|
||||
# Filename
|
||||
cherrypy.engine.autoreload.files.add(config)
|
||||
reprconf.Config.update(self, config)
|
||||
|
||||
def _apply(self, config):
|
||||
"""Update self from a dict."""
|
||||
if isinstance(config.get('global'), dict):
|
||||
if len(config) > 1:
|
||||
cherrypy.checker.global_config_contained_paths = True
|
||||
config = config['global']
|
||||
if 'tools.staticdir.dir' in config:
|
||||
config['tools.staticdir.section'] = 'global'
|
||||
reprconf.Config._apply(self, config)
|
||||
|
||||
@staticmethod
|
||||
def __call__(*args, **kwargs):
|
||||
"""Decorator for page handlers to set _cp_config."""
|
||||
if args:
|
||||
raise TypeError(
|
||||
'The cherrypy.config decorator does not accept positional '
|
||||
'arguments; you must use keyword arguments.')
|
||||
|
||||
def tool_decorator(f):
|
||||
_Vars(f).setdefault('_cp_config', {}).update(kwargs)
|
||||
return f
|
||||
return tool_decorator
|
||||
|
||||
|
||||
class _Vars(object):
|
||||
"""
|
||||
Adapter that allows setting a default attribute on a function
|
||||
or class.
|
||||
"""
|
||||
def __init__(self, target):
|
||||
self.target = target
|
||||
|
||||
def setdefault(self, key, default):
|
||||
if not hasattr(self.target, key):
|
||||
setattr(self.target, key, default)
|
||||
return getattr(self.target, key)
|
||||
|
||||
|
||||
# Sphinx begin config.environments
|
||||
Config.environments = environments = {
|
||||
'staging': {
|
||||
'engine.autoreload.on': False,
|
||||
'checker.on': False,
|
||||
'tools.log_headers.on': False,
|
||||
'request.show_tracebacks': False,
|
||||
'request.show_mismatched_params': False,
|
||||
},
|
||||
'production': {
|
||||
'engine.autoreload.on': False,
|
||||
'checker.on': False,
|
||||
'tools.log_headers.on': False,
|
||||
'request.show_tracebacks': False,
|
||||
'request.show_mismatched_params': False,
|
||||
'log.screen': False,
|
||||
},
|
||||
'embedded': {
|
||||
# For use with CherryPy embedded in another deployment stack.
|
||||
'engine.autoreload.on': False,
|
||||
'checker.on': False,
|
||||
'tools.log_headers.on': False,
|
||||
'request.show_tracebacks': False,
|
||||
'request.show_mismatched_params': False,
|
||||
'log.screen': False,
|
||||
'engine.SIGHUP': None,
|
||||
'engine.SIGTERM': None,
|
||||
},
|
||||
'test_suite': {
|
||||
'engine.autoreload.on': False,
|
||||
'checker.on': False,
|
||||
'tools.log_headers.on': False,
|
||||
'request.show_tracebacks': True,
|
||||
'request.show_mismatched_params': True,
|
||||
'log.screen': False,
|
||||
},
|
||||
}
|
||||
# Sphinx end config.environments
|
||||
|
||||
|
||||
def _server_namespace_handler(k, v):
|
||||
"""Config handler for the "server" namespace."""
|
||||
atoms = k.split('.', 1)
|
||||
if len(atoms) > 1:
|
||||
# Special-case config keys of the form 'server.servername.socket_port'
|
||||
# to configure additional HTTP servers.
|
||||
if not hasattr(cherrypy, 'servers'):
|
||||
cherrypy.servers = {}
|
||||
|
||||
servername, k = atoms
|
||||
if servername not in cherrypy.servers:
|
||||
from cherrypy import _cpserver
|
||||
cherrypy.servers[servername] = _cpserver.Server()
|
||||
# On by default, but 'on = False' can unsubscribe it (see below).
|
||||
cherrypy.servers[servername].subscribe()
|
||||
|
||||
if k == 'on':
|
||||
if v:
|
||||
cherrypy.servers[servername].subscribe()
|
||||
else:
|
||||
cherrypy.servers[servername].unsubscribe()
|
||||
else:
|
||||
setattr(cherrypy.servers[servername], k, v)
|
||||
else:
|
||||
setattr(cherrypy.server, k, v)
|
||||
Config.namespaces['server'] = _server_namespace_handler
|
||||
|
||||
|
||||
def _engine_namespace_handler(k, v):
|
||||
"""Config handler for the "engine" namespace."""
|
||||
engine = cherrypy.engine
|
||||
|
||||
if k == 'SIGHUP':
|
||||
engine.subscribe('SIGHUP', v)
|
||||
elif k == 'SIGTERM':
|
||||
engine.subscribe('SIGTERM', v)
|
||||
elif '.' in k:
|
||||
plugin, attrname = k.split('.', 1)
|
||||
plugin = getattr(engine, plugin)
|
||||
if attrname == 'on':
|
||||
if v and hasattr(getattr(plugin, 'subscribe', None), '__call__'):
|
||||
plugin.subscribe()
|
||||
return
|
||||
elif (
|
||||
(not v) and
|
||||
hasattr(getattr(plugin, 'unsubscribe', None), '__call__')
|
||||
):
|
||||
plugin.unsubscribe()
|
||||
return
|
||||
setattr(plugin, attrname, v)
|
||||
else:
|
||||
setattr(engine, k, v)
|
||||
Config.namespaces['engine'] = _engine_namespace_handler
|
||||
|
||||
|
||||
def _tree_namespace_handler(k, v):
|
||||
"""Namespace handler for the 'tree' config namespace."""
|
||||
if isinstance(v, dict):
|
||||
for script_name, app in v.items():
|
||||
cherrypy.tree.graft(app, script_name)
|
||||
msg = 'Mounted: %s on %s' % (app, script_name or '/')
|
||||
cherrypy.engine.log(msg)
|
||||
else:
|
||||
cherrypy.tree.graft(v, v.script_name)
|
||||
cherrypy.engine.log('Mounted: %s on %s' % (v, v.script_name or '/'))
|
||||
Config.namespaces['tree'] = _tree_namespace_handler
|
||||
@@ -1,685 +0,0 @@
|
||||
"""CherryPy dispatchers.
|
||||
|
||||
A 'dispatcher' is the object which looks up the 'page handler' callable
|
||||
and collects config for the current request based on the path_info, other
|
||||
request attributes, and the application architecture. The core calls the
|
||||
dispatcher as early as possible, passing it a 'path_info' argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
"""
|
||||
|
||||
import string
|
||||
import sys
|
||||
import types
|
||||
try:
|
||||
classtype = (type, types.ClassType)
|
||||
except AttributeError:
|
||||
classtype = type
|
||||
|
||||
import cherrypy
|
||||
|
||||
|
||||
class PageHandler(object):
|
||||
|
||||
"""Callable which sets response.body."""
|
||||
|
||||
def __init__(self, callable, *args, **kwargs):
|
||||
self.callable = callable
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def get_args(self):
|
||||
return cherrypy.serving.request.args
|
||||
|
||||
def set_args(self, args):
|
||||
cherrypy.serving.request.args = args
|
||||
return cherrypy.serving.request.args
|
||||
|
||||
args = property(
|
||||
get_args,
|
||||
set_args,
|
||||
doc='The ordered args should be accessible from post dispatch hooks'
|
||||
)
|
||||
|
||||
def get_kwargs(self):
|
||||
return cherrypy.serving.request.kwargs
|
||||
|
||||
def set_kwargs(self, kwargs):
|
||||
cherrypy.serving.request.kwargs = kwargs
|
||||
return cherrypy.serving.request.kwargs
|
||||
|
||||
kwargs = property(
|
||||
get_kwargs,
|
||||
set_kwargs,
|
||||
doc='The named kwargs should be accessible from post dispatch hooks'
|
||||
)
|
||||
|
||||
def __call__(self):
|
||||
try:
|
||||
return self.callable(*self.args, **self.kwargs)
|
||||
except TypeError:
|
||||
x = sys.exc_info()[1]
|
||||
try:
|
||||
test_callable_spec(self.callable, self.args, self.kwargs)
|
||||
except cherrypy.HTTPError:
|
||||
raise sys.exc_info()[1]
|
||||
except:
|
||||
raise x
|
||||
raise
|
||||
|
||||
|
||||
def test_callable_spec(callable, callable_args, callable_kwargs):
|
||||
"""
|
||||
Inspect callable and test to see if the given args are suitable for it.
|
||||
|
||||
When an error occurs during the handler's invoking stage there are 2
|
||||
erroneous cases:
|
||||
1. Too many parameters passed to a function which doesn't define
|
||||
one of *args or **kwargs.
|
||||
2. Too little parameters are passed to the function.
|
||||
|
||||
There are 3 sources of parameters to a cherrypy handler.
|
||||
1. query string parameters are passed as keyword parameters to the
|
||||
handler.
|
||||
2. body parameters are also passed as keyword parameters.
|
||||
3. when partial matching occurs, the final path atoms are passed as
|
||||
positional args.
|
||||
Both the query string and path atoms are part of the URI. If they are
|
||||
incorrect, then a 404 Not Found should be raised. Conversely the body
|
||||
parameters are part of the request; if they are invalid a 400 Bad Request.
|
||||
"""
|
||||
show_mismatched_params = getattr(
|
||||
cherrypy.serving.request, 'show_mismatched_params', False)
|
||||
try:
|
||||
(args, varargs, varkw, defaults) = getargspec(callable)
|
||||
except TypeError:
|
||||
if isinstance(callable, object) and hasattr(callable, '__call__'):
|
||||
(args, varargs, varkw,
|
||||
defaults) = getargspec(callable.__call__)
|
||||
else:
|
||||
# If it wasn't one of our own types, re-raise
|
||||
# the original error
|
||||
raise
|
||||
|
||||
if args and args[0] == 'self':
|
||||
args = args[1:]
|
||||
|
||||
arg_usage = dict([(arg, 0,) for arg in args])
|
||||
vararg_usage = 0
|
||||
varkw_usage = 0
|
||||
extra_kwargs = set()
|
||||
|
||||
for i, value in enumerate(callable_args):
|
||||
try:
|
||||
arg_usage[args[i]] += 1
|
||||
except IndexError:
|
||||
vararg_usage += 1
|
||||
|
||||
for key in callable_kwargs.keys():
|
||||
try:
|
||||
arg_usage[key] += 1
|
||||
except KeyError:
|
||||
varkw_usage += 1
|
||||
extra_kwargs.add(key)
|
||||
|
||||
# figure out which args have defaults.
|
||||
args_with_defaults = args[-len(defaults or []):]
|
||||
for i, val in enumerate(defaults or []):
|
||||
# Defaults take effect only when the arg hasn't been used yet.
|
||||
if arg_usage[args_with_defaults[i]] == 0:
|
||||
arg_usage[args_with_defaults[i]] += 1
|
||||
|
||||
missing_args = []
|
||||
multiple_args = []
|
||||
for key, usage in arg_usage.items():
|
||||
if usage == 0:
|
||||
missing_args.append(key)
|
||||
elif usage > 1:
|
||||
multiple_args.append(key)
|
||||
|
||||
if missing_args:
|
||||
# In the case where the method allows body arguments
|
||||
# there are 3 potential errors:
|
||||
# 1. not enough query string parameters -> 404
|
||||
# 2. not enough body parameters -> 400
|
||||
# 3. not enough path parts (partial matches) -> 404
|
||||
#
|
||||
# We can't actually tell which case it is,
|
||||
# so I'm raising a 404 because that covers 2/3 of the
|
||||
# possibilities
|
||||
#
|
||||
# In the case where the method does not allow body
|
||||
# arguments it's definitely a 404.
|
||||
message = None
|
||||
if show_mismatched_params:
|
||||
message = 'Missing parameters: %s' % ','.join(missing_args)
|
||||
raise cherrypy.HTTPError(404, message=message)
|
||||
|
||||
# the extra positional arguments come from the path - 404 Not Found
|
||||
if not varargs and vararg_usage > 0:
|
||||
raise cherrypy.HTTPError(404)
|
||||
|
||||
body_params = cherrypy.serving.request.body.params or {}
|
||||
body_params = set(body_params.keys())
|
||||
qs_params = set(callable_kwargs.keys()) - body_params
|
||||
|
||||
if multiple_args:
|
||||
if qs_params.intersection(set(multiple_args)):
|
||||
# If any of the multiple parameters came from the query string then
|
||||
# it's a 404 Not Found
|
||||
error = 404
|
||||
else:
|
||||
# Otherwise it's a 400 Bad Request
|
||||
error = 400
|
||||
|
||||
message = None
|
||||
if show_mismatched_params:
|
||||
message = 'Multiple values for parameters: '\
|
||||
'%s' % ','.join(multiple_args)
|
||||
raise cherrypy.HTTPError(error, message=message)
|
||||
|
||||
if not varkw and varkw_usage > 0:
|
||||
|
||||
# If there were extra query string parameters, it's a 404 Not Found
|
||||
extra_qs_params = set(qs_params).intersection(extra_kwargs)
|
||||
if extra_qs_params:
|
||||
message = None
|
||||
if show_mismatched_params:
|
||||
message = 'Unexpected query string '\
|
||||
'parameters: %s' % ', '.join(extra_qs_params)
|
||||
raise cherrypy.HTTPError(404, message=message)
|
||||
|
||||
# If there were any extra body parameters, it's a 400 Not Found
|
||||
extra_body_params = set(body_params).intersection(extra_kwargs)
|
||||
if extra_body_params:
|
||||
message = None
|
||||
if show_mismatched_params:
|
||||
message = 'Unexpected body parameters: '\
|
||||
'%s' % ', '.join(extra_body_params)
|
||||
raise cherrypy.HTTPError(400, message=message)
|
||||
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except ImportError:
|
||||
test_callable_spec = lambda callable, args, kwargs: None
|
||||
else:
|
||||
getargspec = inspect.getargspec
|
||||
# Python 3 requires using getfullargspec if keyword-only arguments are present
|
||||
if hasattr(inspect, 'getfullargspec'):
|
||||
def getargspec(callable):
|
||||
return inspect.getfullargspec(callable)[:4]
|
||||
|
||||
|
||||
class LateParamPageHandler(PageHandler):
|
||||
|
||||
"""When passing cherrypy.request.params to the page handler, we do not
|
||||
want to capture that dict too early; we want to give tools like the
|
||||
decoding tool a chance to modify the params dict in-between the lookup
|
||||
of the handler and the actual calling of the handler. This subclass
|
||||
takes that into account, and allows request.params to be 'bound late'
|
||||
(it's more complicated than that, but that's the effect).
|
||||
"""
|
||||
|
||||
def _get_kwargs(self):
|
||||
kwargs = cherrypy.serving.request.params.copy()
|
||||
if self._kwargs:
|
||||
kwargs.update(self._kwargs)
|
||||
return kwargs
|
||||
|
||||
def _set_kwargs(self, kwargs):
|
||||
cherrypy.serving.request.kwargs = kwargs
|
||||
self._kwargs = kwargs
|
||||
|
||||
kwargs = property(_get_kwargs, _set_kwargs,
|
||||
doc='page handler kwargs (with '
|
||||
'cherrypy.request.params copied in)')
|
||||
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
punctuation_to_underscores = string.maketrans(
|
||||
string.punctuation, '_' * len(string.punctuation))
|
||||
|
||||
def validate_translator(t):
|
||||
if not isinstance(t, str) or len(t) != 256:
|
||||
raise ValueError(
|
||||
'The translate argument must be a str of len 256.')
|
||||
else:
|
||||
punctuation_to_underscores = str.maketrans(
|
||||
string.punctuation, '_' * len(string.punctuation))
|
||||
|
||||
def validate_translator(t):
|
||||
if not isinstance(t, dict):
|
||||
raise ValueError('The translate argument must be a dict.')
|
||||
|
||||
|
||||
class Dispatcher(object):
|
||||
|
||||
"""CherryPy Dispatcher which walks a tree of objects to find a handler.
|
||||
|
||||
The tree is rooted at cherrypy.request.app.root, and each hierarchical
|
||||
component in the path_info argument is matched to a corresponding nested
|
||||
attribute of the root object. Matching handlers must have an 'exposed'
|
||||
attribute which evaluates to True. The special method name "index"
|
||||
matches a URI which ends in a slash ("/"). The special method name
|
||||
"default" may match a portion of the path_info (but only when no longer
|
||||
substring of the path_info matches some other object).
|
||||
|
||||
This is the default, built-in dispatcher for CherryPy.
|
||||
"""
|
||||
|
||||
dispatch_method_name = '_cp_dispatch'
|
||||
"""
|
||||
The name of the dispatch method that nodes may optionally implement
|
||||
to provide their own dynamic dispatch algorithm.
|
||||
"""
|
||||
|
||||
def __init__(self, dispatch_method_name=None,
|
||||
translate=punctuation_to_underscores):
|
||||
validate_translator(translate)
|
||||
self.translate = translate
|
||||
if dispatch_method_name:
|
||||
self.dispatch_method_name = dispatch_method_name
|
||||
|
||||
def __call__(self, path_info):
|
||||
"""Set handler and config for the current request."""
|
||||
request = cherrypy.serving.request
|
||||
func, vpath = self.find_handler(path_info)
|
||||
|
||||
if func:
|
||||
# Decode any leftover %2F in the virtual_path atoms.
|
||||
vpath = [x.replace('%2F', '/') for x in vpath]
|
||||
request.handler = LateParamPageHandler(func, *vpath)
|
||||
else:
|
||||
request.handler = cherrypy.NotFound()
|
||||
|
||||
def find_handler(self, path):
|
||||
"""Return the appropriate page handler, plus any virtual path.
|
||||
|
||||
This will return two objects. The first will be a callable,
|
||||
which can be used to generate page output. Any parameters from
|
||||
the query string or request body will be sent to that callable
|
||||
as keyword arguments.
|
||||
|
||||
The callable is found by traversing the application's tree,
|
||||
starting from cherrypy.request.app.root, and matching path
|
||||
components to successive objects in the tree. For example, the
|
||||
URL "/path/to/handler" might return root.path.to.handler.
|
||||
|
||||
The second object returned will be a list of names which are
|
||||
'virtual path' components: parts of the URL which are dynamic,
|
||||
and were not used when looking up the handler.
|
||||
These virtual path components are passed to the handler as
|
||||
positional arguments.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
app = request.app
|
||||
root = app.root
|
||||
dispatch_name = self.dispatch_method_name
|
||||
|
||||
# Get config for the root object/path.
|
||||
fullpath = [x for x in path.strip('/').split('/') if x] + ['index']
|
||||
fullpath_len = len(fullpath)
|
||||
segleft = fullpath_len
|
||||
nodeconf = {}
|
||||
if hasattr(root, '_cp_config'):
|
||||
nodeconf.update(root._cp_config)
|
||||
if '/' in app.config:
|
||||
nodeconf.update(app.config['/'])
|
||||
object_trail = [['root', root, nodeconf, segleft]]
|
||||
|
||||
node = root
|
||||
iternames = fullpath[:]
|
||||
while iternames:
|
||||
name = iternames[0]
|
||||
# map to legal Python identifiers (e.g. replace '.' with '_')
|
||||
objname = name.translate(self.translate)
|
||||
|
||||
nodeconf = {}
|
||||
subnode = getattr(node, objname, None)
|
||||
pre_len = len(iternames)
|
||||
if subnode is None:
|
||||
dispatch = getattr(node, dispatch_name, None)
|
||||
if dispatch and hasattr(dispatch, '__call__') and not \
|
||||
getattr(dispatch, 'exposed', False) and \
|
||||
pre_len > 1:
|
||||
# Don't expose the hidden 'index' token to _cp_dispatch
|
||||
# We skip this if pre_len == 1 since it makes no sense
|
||||
# to call a dispatcher when we have no tokens left.
|
||||
index_name = iternames.pop()
|
||||
subnode = dispatch(vpath=iternames)
|
||||
iternames.append(index_name)
|
||||
else:
|
||||
# We didn't find a path, but keep processing in case there
|
||||
# is a default() handler.
|
||||
iternames.pop(0)
|
||||
else:
|
||||
# We found the path, remove the vpath entry
|
||||
iternames.pop(0)
|
||||
segleft = len(iternames)
|
||||
if segleft > pre_len:
|
||||
# No path segment was removed. Raise an error.
|
||||
raise cherrypy.CherryPyException(
|
||||
'A vpath segment was added. Custom dispatchers may only '
|
||||
+ 'remove elements. While trying to process '
|
||||
+ '{0} in {1}'.format(name, fullpath)
|
||||
)
|
||||
elif segleft == pre_len:
|
||||
# Assume that the handler used the current path segment, but
|
||||
# did not pop it. This allows things like
|
||||
# return getattr(self, vpath[0], None)
|
||||
iternames.pop(0)
|
||||
segleft -= 1
|
||||
node = subnode
|
||||
|
||||
if node is not None:
|
||||
# Get _cp_config attached to this node.
|
||||
if hasattr(node, '_cp_config'):
|
||||
nodeconf.update(node._cp_config)
|
||||
|
||||
# Mix in values from app.config for this path.
|
||||
existing_len = fullpath_len - pre_len
|
||||
if existing_len != 0:
|
||||
curpath = '/' + '/'.join(fullpath[0:existing_len])
|
||||
else:
|
||||
curpath = ''
|
||||
new_segs = fullpath[fullpath_len - pre_len:fullpath_len - segleft]
|
||||
for seg in new_segs:
|
||||
curpath += '/' + seg
|
||||
if curpath in app.config:
|
||||
nodeconf.update(app.config[curpath])
|
||||
|
||||
object_trail.append([name, node, nodeconf, segleft])
|
||||
|
||||
def set_conf():
|
||||
"""Collapse all object_trail config into cherrypy.request.config.
|
||||
"""
|
||||
base = cherrypy.config.copy()
|
||||
# Note that we merge the config from each node
|
||||
# even if that node was None.
|
||||
for name, obj, conf, segleft in object_trail:
|
||||
base.update(conf)
|
||||
if 'tools.staticdir.dir' in conf:
|
||||
base['tools.staticdir.section'] = '/' + \
|
||||
'/'.join(fullpath[0:fullpath_len - segleft])
|
||||
return base
|
||||
|
||||
# Try successive objects (reverse order)
|
||||
num_candidates = len(object_trail) - 1
|
||||
for i in range(num_candidates, -1, -1):
|
||||
|
||||
name, candidate, nodeconf, segleft = object_trail[i]
|
||||
if candidate is None:
|
||||
continue
|
||||
|
||||
# Try a "default" method on the current leaf.
|
||||
if hasattr(candidate, 'default'):
|
||||
defhandler = candidate.default
|
||||
if getattr(defhandler, 'exposed', False):
|
||||
# Insert any extra _cp_config from the default handler.
|
||||
conf = getattr(defhandler, '_cp_config', {})
|
||||
object_trail.insert(
|
||||
i + 1, ['default', defhandler, conf, segleft])
|
||||
request.config = set_conf()
|
||||
# See https://github.com/cherrypy/cherrypy/issues/613
|
||||
request.is_index = path.endswith('/')
|
||||
return defhandler, fullpath[fullpath_len - segleft:-1]
|
||||
|
||||
# Uncomment the next line to restrict positional params to
|
||||
# "default".
|
||||
# if i < num_candidates - 2: continue
|
||||
|
||||
# Try the current leaf.
|
||||
if getattr(candidate, 'exposed', False):
|
||||
request.config = set_conf()
|
||||
if i == num_candidates:
|
||||
# We found the extra ".index". Mark request so tools
|
||||
# can redirect if path_info has no trailing slash.
|
||||
request.is_index = True
|
||||
else:
|
||||
# We're not at an 'index' handler. Mark request so tools
|
||||
# can redirect if path_info has NO trailing slash.
|
||||
# Note that this also includes handlers which take
|
||||
# positional parameters (virtual paths).
|
||||
request.is_index = False
|
||||
return candidate, fullpath[fullpath_len - segleft:-1]
|
||||
|
||||
# We didn't find anything
|
||||
request.config = set_conf()
|
||||
return None, []
|
||||
|
||||
|
||||
class MethodDispatcher(Dispatcher):
|
||||
|
||||
"""Additional dispatch based on cherrypy.request.method.upper().
|
||||
|
||||
Methods named GET, POST, etc will be called on an exposed class.
|
||||
The method names must be all caps; the appropriate Allow header
|
||||
will be output showing all capitalized method names as allowable
|
||||
HTTP verbs.
|
||||
|
||||
Note that the containing class must be exposed, not the methods.
|
||||
"""
|
||||
|
||||
def __call__(self, path_info):
|
||||
"""Set handler and config for the current request."""
|
||||
request = cherrypy.serving.request
|
||||
resource, vpath = self.find_handler(path_info)
|
||||
|
||||
if resource:
|
||||
# Set Allow header
|
||||
avail = [m for m in dir(resource) if m.isupper()]
|
||||
if 'GET' in avail and 'HEAD' not in avail:
|
||||
avail.append('HEAD')
|
||||
avail.sort()
|
||||
cherrypy.serving.response.headers['Allow'] = ', '.join(avail)
|
||||
|
||||
# Find the subhandler
|
||||
meth = request.method.upper()
|
||||
func = getattr(resource, meth, None)
|
||||
if func is None and meth == 'HEAD':
|
||||
func = getattr(resource, 'GET', None)
|
||||
if func:
|
||||
# Grab any _cp_config on the subhandler.
|
||||
if hasattr(func, '_cp_config'):
|
||||
request.config.update(func._cp_config)
|
||||
|
||||
# Decode any leftover %2F in the virtual_path atoms.
|
||||
vpath = [x.replace('%2F', '/') for x in vpath]
|
||||
request.handler = LateParamPageHandler(func, *vpath)
|
||||
else:
|
||||
request.handler = cherrypy.HTTPError(405)
|
||||
else:
|
||||
request.handler = cherrypy.NotFound()
|
||||
|
||||
|
||||
class RoutesDispatcher(object):
|
||||
|
||||
"""A Routes based dispatcher for CherryPy."""
|
||||
|
||||
def __init__(self, full_result=False, **mapper_options):
|
||||
"""
|
||||
Routes dispatcher
|
||||
|
||||
Set full_result to True if you wish the controller
|
||||
and the action to be passed on to the page handler
|
||||
parameters. By default they won't be.
|
||||
"""
|
||||
import routes
|
||||
self.full_result = full_result
|
||||
self.controllers = {}
|
||||
self.mapper = routes.Mapper(**mapper_options)
|
||||
self.mapper.controller_scan = self.controllers.keys
|
||||
|
||||
def connect(self, name, route, controller, **kwargs):
|
||||
self.controllers[name] = controller
|
||||
self.mapper.connect(name, route, controller=name, **kwargs)
|
||||
|
||||
def redirect(self, url):
|
||||
raise cherrypy.HTTPRedirect(url)
|
||||
|
||||
def __call__(self, path_info):
|
||||
"""Set handler and config for the current request."""
|
||||
func = self.find_handler(path_info)
|
||||
if func:
|
||||
cherrypy.serving.request.handler = LateParamPageHandler(func)
|
||||
else:
|
||||
cherrypy.serving.request.handler = cherrypy.NotFound()
|
||||
|
||||
def find_handler(self, path_info):
|
||||
"""Find the right page handler, and set request.config."""
|
||||
import routes
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
||||
config = routes.request_config()
|
||||
config.mapper = self.mapper
|
||||
if hasattr(request, 'wsgi_environ'):
|
||||
config.environ = request.wsgi_environ
|
||||
config.host = request.headers.get('Host', None)
|
||||
config.protocol = request.scheme
|
||||
config.redirect = self.redirect
|
||||
|
||||
result = self.mapper.match(path_info)
|
||||
|
||||
config.mapper_dict = result
|
||||
params = {}
|
||||
if result:
|
||||
params = result.copy()
|
||||
if not self.full_result:
|
||||
params.pop('controller', None)
|
||||
params.pop('action', None)
|
||||
request.params.update(params)
|
||||
|
||||
# Get config for the root object/path.
|
||||
request.config = base = cherrypy.config.copy()
|
||||
curpath = ''
|
||||
|
||||
def merge(nodeconf):
|
||||
if 'tools.staticdir.dir' in nodeconf:
|
||||
nodeconf['tools.staticdir.section'] = curpath or '/'
|
||||
base.update(nodeconf)
|
||||
|
||||
app = request.app
|
||||
root = app.root
|
||||
if hasattr(root, '_cp_config'):
|
||||
merge(root._cp_config)
|
||||
if '/' in app.config:
|
||||
merge(app.config['/'])
|
||||
|
||||
# Mix in values from app.config.
|
||||
atoms = [x for x in path_info.split('/') if x]
|
||||
if atoms:
|
||||
last = atoms.pop()
|
||||
else:
|
||||
last = None
|
||||
for atom in atoms:
|
||||
curpath = '/'.join((curpath, atom))
|
||||
if curpath in app.config:
|
||||
merge(app.config[curpath])
|
||||
|
||||
handler = None
|
||||
if result:
|
||||
controller = result.get('controller')
|
||||
controller = self.controllers.get(controller, controller)
|
||||
if controller:
|
||||
if isinstance(controller, classtype):
|
||||
controller = controller()
|
||||
# Get config from the controller.
|
||||
if hasattr(controller, '_cp_config'):
|
||||
merge(controller._cp_config)
|
||||
|
||||
action = result.get('action')
|
||||
if action is not None:
|
||||
handler = getattr(controller, action, None)
|
||||
# Get config from the handler
|
||||
if hasattr(handler, '_cp_config'):
|
||||
merge(handler._cp_config)
|
||||
else:
|
||||
handler = controller
|
||||
|
||||
# Do the last path atom here so it can
|
||||
# override the controller's _cp_config.
|
||||
if last:
|
||||
curpath = '/'.join((curpath, last))
|
||||
if curpath in app.config:
|
||||
merge(app.config[curpath])
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
|
||||
from cherrypy.lib import xmlrpcutil
|
||||
|
||||
def xmlrpc_dispatch(path_info):
|
||||
path_info = xmlrpcutil.patched_path(path_info)
|
||||
return next_dispatcher(path_info)
|
||||
return xmlrpc_dispatch
|
||||
|
||||
|
||||
def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
|
||||
**domains):
|
||||
"""
|
||||
Select a different handler based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
It allows several domains to point to different parts of a single
|
||||
website structure. For example::
|
||||
|
||||
http://www.domain.example -> root
|
||||
http://www.domain2.example -> root/domain2/
|
||||
http://www.domain2.example:443 -> root/secure
|
||||
|
||||
can be accomplished via the following config::
|
||||
|
||||
[/]
|
||||
request.dispatch = cherrypy.dispatch.VirtualHost(
|
||||
**{'www.domain2.example': '/domain2',
|
||||
'www.domain2.example:443': '/secure',
|
||||
})
|
||||
|
||||
next_dispatcher
|
||||
The next dispatcher object in the dispatch chain.
|
||||
The VirtualHost dispatcher adds a prefix to the URL and calls
|
||||
another dispatcher. Defaults to cherrypy.dispatch.Dispatcher().
|
||||
|
||||
use_x_forwarded_host
|
||||
If True (the default), any "X-Forwarded-Host"
|
||||
request header will be used instead of the "Host" header. This
|
||||
is commonly added by HTTP servers (such as Apache) when proxying.
|
||||
|
||||
``**domains``
|
||||
A dict of {host header value: virtual prefix} pairs.
|
||||
The incoming "Host" request header is looked up in this dict,
|
||||
and, if a match is found, the corresponding "virtual prefix"
|
||||
value will be prepended to the URL path before calling the
|
||||
next dispatcher. Note that you often need separate entries
|
||||
for "example.com" and "www.example.com". In addition, "Host"
|
||||
headers may contain the port number.
|
||||
"""
|
||||
from cherrypy.lib import httputil
|
||||
|
||||
def vhost_dispatch(path_info):
|
||||
request = cherrypy.serving.request
|
||||
header = request.headers.get
|
||||
|
||||
domain = header('Host', '')
|
||||
if use_x_forwarded_host:
|
||||
domain = header('X-Forwarded-Host', domain)
|
||||
|
||||
prefix = domains.get(domain, '')
|
||||
if prefix:
|
||||
path_info = httputil.urljoin(prefix, path_info)
|
||||
|
||||
result = next_dispatcher(path_info)
|
||||
|
||||
# Touch up staticdir config. See
|
||||
# https://github.com/cherrypy/cherrypy/issues/614.
|
||||
section = request.config.get('tools.staticdir.section')
|
||||
if section:
|
||||
section = section[len(prefix):]
|
||||
request.config['tools.staticdir.section'] = section
|
||||
|
||||
return result
|
||||
return vhost_dispatch
|
||||
@@ -1,611 +0,0 @@
|
||||
"""Exception classes for CherryPy.
|
||||
|
||||
CherryPy provides (and uses) exceptions for declaring that the HTTP response
|
||||
should be a status other than the default "200 OK". You can ``raise`` them like
|
||||
normal Python exceptions. You can also call them and they will raise
|
||||
themselves; this means you can set an
|
||||
:class:`HTTPError<cherrypy._cperror.HTTPError>`
|
||||
or :class:`HTTPRedirect<cherrypy._cperror.HTTPRedirect>` as the
|
||||
:attr:`request.handler<cherrypy._cprequest.Request.handler>`.
|
||||
|
||||
.. _redirectingpost:
|
||||
|
||||
Redirecting POST
|
||||
================
|
||||
|
||||
When you GET a resource and are redirected by the server to another Location,
|
||||
there's generally no problem since GET is both a "safe method" (there should
|
||||
be no side-effects) and an "idempotent method" (multiple calls are no different
|
||||
than a single call).
|
||||
|
||||
POST, however, is neither safe nor idempotent--if you
|
||||
charge a credit card, you don't want to be charged twice by a redirect!
|
||||
|
||||
For this reason, *none* of the 3xx responses permit a user-agent (browser) to
|
||||
resubmit a POST on redirection without first confirming the action with the
|
||||
user:
|
||||
|
||||
===== ================================= ===========
|
||||
300 Multiple Choices Confirm with the user
|
||||
301 Moved Permanently Confirm with the user
|
||||
302 Found (Object moved temporarily) Confirm with the user
|
||||
303 See Other GET the new URI--no confirmation
|
||||
304 Not modified (for conditional GET only--POST should not raise this error)
|
||||
305 Use Proxy Confirm with the user
|
||||
307 Temporary Redirect Confirm with the user
|
||||
===== ================================= ===========
|
||||
|
||||
However, browsers have historically implemented these restrictions poorly;
|
||||
in particular, many browsers do not force the user to confirm 301, 302
|
||||
or 307 when redirecting POST. For this reason, CherryPy defaults to 303,
|
||||
which most user-agents appear to have implemented correctly. Therefore, if
|
||||
you raise HTTPRedirect for a POST request, the user-agent will most likely
|
||||
attempt to GET the new URI (without asking for confirmation from the user).
|
||||
We realize this is confusing for developers, but it's the safest thing we
|
||||
could do. You are of course free to raise ``HTTPRedirect(uri, status=302)``
|
||||
or any other 3xx status if you know what you're doing, but given the
|
||||
environment, we couldn't let any of those be the default.
|
||||
|
||||
Custom Error Handling
|
||||
=====================
|
||||
|
||||
.. image:: /refman/cperrors.gif
|
||||
|
||||
Anticipated HTTP responses
|
||||
--------------------------
|
||||
|
||||
The 'error_page' config namespace can be used to provide custom HTML output for
|
||||
expected responses (like 404 Not Found). Supply a filename from which the
|
||||
output will be read. The contents will be interpolated with the values
|
||||
%(status)s, %(message)s, %(traceback)s, and %(version)s using plain old Python
|
||||
`string formatting <http://docs.python.org/2/library/stdtypes.html#string-formatting-operations>`_.
|
||||
|
||||
::
|
||||
|
||||
_cp_config = {
|
||||
'error_page.404': os.path.join(localDir, "static/index.html")
|
||||
}
|
||||
|
||||
|
||||
Beginning in version 3.1, you may also provide a function or other callable as
|
||||
an error_page entry. It will be passed the same status, message, traceback and
|
||||
version arguments that are interpolated into templates::
|
||||
|
||||
def error_page_402(status, message, traceback, version):
|
||||
return "Error %s - Well, I'm very sorry but you haven't paid!" % status
|
||||
cherrypy.config.update({'error_page.402': error_page_402})
|
||||
|
||||
Also in 3.1, in addition to the numbered error codes, you may also supply
|
||||
"error_page.default" to handle all codes which do not have their own error_page
|
||||
entry.
|
||||
|
||||
|
||||
|
||||
Unanticipated errors
|
||||
--------------------
|
||||
|
||||
CherryPy also has a generic error handling mechanism: whenever an unanticipated
|
||||
error occurs in your code, it will call
|
||||
:func:`Request.error_response<cherrypy._cprequest.Request.error_response>` to
|
||||
set the response status, headers, and body. By default, this is the same
|
||||
output as
|
||||
:class:`HTTPError(500) <cherrypy._cperror.HTTPError>`. If you want to provide
|
||||
some other behavior, you generally replace "request.error_response".
|
||||
|
||||
Here is some sample code that shows how to display a custom error message and
|
||||
send an e-mail containing the error::
|
||||
|
||||
from cherrypy import _cperror
|
||||
|
||||
def handle_error():
|
||||
cherrypy.response.status = 500
|
||||
cherrypy.response.body = [
|
||||
"<html><body>Sorry, an error occured</body></html>"
|
||||
]
|
||||
sendMail('error@domain.com',
|
||||
'Error in your web app',
|
||||
_cperror.format_exc())
|
||||
|
||||
@cherrypy.config(**{'request.error_response': handle_error})
|
||||
class Root:
|
||||
pass
|
||||
|
||||
Note that you have to explicitly set
|
||||
:attr:`response.body <cherrypy._cprequest.Response.body>`
|
||||
and not simply return an error message as a result.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from cgi import escape as _escape
|
||||
from sys import exc_info as _exc_info
|
||||
from traceback import format_exception as _format_exception
|
||||
from xml.sax import saxutils
|
||||
|
||||
import six
|
||||
|
||||
from cherrypy._cpcompat import text_or_bytes, iteritems, ntob
|
||||
from cherrypy._cpcompat import tonative, urljoin as _urljoin
|
||||
from cherrypy.lib import httputil as _httputil
|
||||
|
||||
|
||||
class CherryPyException(Exception):
|
||||
|
||||
"""A base class for CherryPy exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class TimeoutError(CherryPyException):
|
||||
|
||||
"""Exception raised when Response.timed_out is detected."""
|
||||
pass
|
||||
|
||||
|
||||
class InternalRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised to switch to the handler for a different URL.
|
||||
|
||||
This exception will redirect processing to another path within the site
|
||||
(without informing the client). Provide the new path as an argument when
|
||||
raising the exception. Provide any params in the querystring for the new
|
||||
URL.
|
||||
"""
|
||||
|
||||
def __init__(self, path, query_string=''):
|
||||
import cherrypy
|
||||
self.request = cherrypy.serving.request
|
||||
|
||||
self.query_string = query_string
|
||||
if '?' in path:
|
||||
# Separate any params included in the path
|
||||
path, self.query_string = path.split('?', 1)
|
||||
|
||||
# Note that urljoin will "do the right thing" whether url is:
|
||||
# 1. a URL relative to root (e.g. "/dummy")
|
||||
# 2. a URL relative to the current path
|
||||
# Note that any query string will be discarded.
|
||||
path = _urljoin(self.request.path_info, path)
|
||||
|
||||
# Set a 'path' member attribute so that code which traps this
|
||||
# error can have access to it.
|
||||
self.path = path
|
||||
|
||||
CherryPyException.__init__(self, path, self.query_string)
|
||||
|
||||
|
||||
class HTTPRedirect(CherryPyException):
|
||||
|
||||
"""Exception raised when the request should be redirected.
|
||||
|
||||
This exception will force a HTTP redirect to the URL or URL's you give it.
|
||||
The new URL must be passed as the first argument to the Exception,
|
||||
e.g., HTTPRedirect(newUrl). Multiple URLs are allowed in a list.
|
||||
If a URL is absolute, it will be used as-is. If it is relative, it is
|
||||
assumed to be relative to the current cherrypy.request.path_info.
|
||||
|
||||
If one of the provided URL is a unicode object, it will be encoded
|
||||
using the default encoding or the one passed in parameter.
|
||||
|
||||
There are multiple types of redirect, from which you can select via the
|
||||
``status`` argument. If you do not provide a ``status`` arg, it defaults to
|
||||
303 (or 302 if responding with HTTP/1.0).
|
||||
|
||||
Examples::
|
||||
|
||||
raise cherrypy.HTTPRedirect("")
|
||||
raise cherrypy.HTTPRedirect("/abs/path", 307)
|
||||
raise cherrypy.HTTPRedirect(["path1", "path2?a=1&b=2"], 301)
|
||||
|
||||
See :ref:`redirectingpost` for additional caveats.
|
||||
"""
|
||||
|
||||
status = None
|
||||
"""The integer HTTP status code to emit."""
|
||||
|
||||
urls = None
|
||||
"""The list of URL's to emit."""
|
||||
|
||||
encoding = 'utf-8'
|
||||
"""The encoding when passed urls are not native strings"""
|
||||
|
||||
def __init__(self, urls, status=None, encoding=None):
|
||||
import cherrypy
|
||||
request = cherrypy.serving.request
|
||||
|
||||
if isinstance(urls, text_or_bytes):
|
||||
urls = [urls]
|
||||
|
||||
self.urls = [tonative(url, encoding or self.encoding) for url in urls]
|
||||
|
||||
# RFC 2616 indicates a 301 response code fits our goal; however,
|
||||
# browser support for 301 is quite messy. Do 302/303 instead. See
|
||||
# http://www.alanflavell.org.uk/www/post-redirect.html
|
||||
if status is None:
|
||||
if request.protocol >= (1, 1):
|
||||
status = 303
|
||||
else:
|
||||
status = 302
|
||||
else:
|
||||
status = int(status)
|
||||
if status < 300 or status > 399:
|
||||
raise ValueError('status must be between 300 and 399.')
|
||||
|
||||
self.status = status
|
||||
CherryPyException.__init__(self, self.urls, status)
|
||||
|
||||
def set_response(self):
|
||||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPRedirect object and set its output without *raising* the exception.
|
||||
"""
|
||||
import cherrypy
|
||||
response = cherrypy.serving.response
|
||||
response.status = status = self.status
|
||||
|
||||
if status in (300, 301, 302, 303, 307):
|
||||
response.headers['Content-Type'] = 'text/html;charset=utf-8'
|
||||
# "The ... URI SHOULD be given by the Location field
|
||||
# in the response."
|
||||
response.headers['Location'] = self.urls[0]
|
||||
|
||||
# "Unless the request method was HEAD, the entity of the response
|
||||
# SHOULD contain a short hypertext note with a hyperlink to the
|
||||
# new URI(s)."
|
||||
msg = {
|
||||
300: 'This resource can be found at ',
|
||||
301: 'This resource has permanently moved to ',
|
||||
302: 'This resource resides temporarily at ',
|
||||
303: 'This resource can be found at ',
|
||||
307: 'This resource has moved temporarily to ',
|
||||
}[status]
|
||||
msg += '<a href=%s>%s</a>.'
|
||||
msgs = [msg % (saxutils.quoteattr(u), u) for u in self.urls]
|
||||
response.body = ntob('<br />\n'.join(msgs), 'utf-8')
|
||||
# Previous code may have set C-L, so we have to reset it
|
||||
# (allow finalize to set it).
|
||||
response.headers.pop('Content-Length', None)
|
||||
elif status == 304:
|
||||
# Not Modified.
|
||||
# "The response MUST include the following header fields:
|
||||
# Date, unless its omission is required by section 14.18.1"
|
||||
# The "Date" header should have been set in Response.__init__
|
||||
|
||||
# "...the response SHOULD NOT include other entity-headers."
|
||||
for key in ('Allow', 'Content-Encoding', 'Content-Language',
|
||||
'Content-Length', 'Content-Location', 'Content-MD5',
|
||||
'Content-Range', 'Content-Type', 'Expires',
|
||||
'Last-Modified'):
|
||||
if key in response.headers:
|
||||
del response.headers[key]
|
||||
|
||||
# "The 304 response MUST NOT contain a message-body."
|
||||
response.body = None
|
||||
# Previous code may have set C-L, so we have to reset it.
|
||||
response.headers.pop('Content-Length', None)
|
||||
elif status == 305:
|
||||
# Use Proxy.
|
||||
# self.urls[0] should be the URI of the proxy.
|
||||
response.headers['Location'] = ntob(self.urls[0], 'utf-8')
|
||||
response.body = None
|
||||
# Previous code may have set C-L, so we have to reset it.
|
||||
response.headers.pop('Content-Length', None)
|
||||
else:
|
||||
raise ValueError('The %s status code is unknown.' % status)
|
||||
|
||||
def __call__(self):
|
||||
"""Use this exception as a request.handler (raise self)."""
|
||||
raise self
|
||||
|
||||
|
||||
def clean_headers(status):
|
||||
"""Remove any headers which should not apply to an error response."""
|
||||
import cherrypy
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# Remove headers which applied to the original content,
|
||||
# but do not apply to the error page.
|
||||
respheaders = response.headers
|
||||
for key in ['Accept-Ranges', 'Age', 'ETag', 'Location', 'Retry-After',
|
||||
'Vary', 'Content-Encoding', 'Content-Length', 'Expires',
|
||||
'Content-Location', 'Content-MD5', 'Last-Modified']:
|
||||
if key in respheaders:
|
||||
del respheaders[key]
|
||||
|
||||
if status != 416:
|
||||
# A server sending a response with status code 416 (Requested
|
||||
# range not satisfiable) SHOULD include a Content-Range field
|
||||
# with a byte-range-resp-spec of "*". The instance-length
|
||||
# specifies the current length of the selected resource.
|
||||
# A response with status code 206 (Partial Content) MUST NOT
|
||||
# include a Content-Range field with a byte-range- resp-spec of "*".
|
||||
if 'Content-Range' in respheaders:
|
||||
del respheaders['Content-Range']
|
||||
|
||||
|
||||
class HTTPError(CherryPyException):
|
||||
|
||||
"""Exception used to return an HTTP error code (4xx-5xx) to the client.
|
||||
|
||||
This exception can be used to automatically send a response using a
|
||||
http status code, with an appropriate error page. It takes an optional
|
||||
``status`` argument (which must be between 400 and 599); it defaults to 500
|
||||
("Internal Server Error"). It also takes an optional ``message`` argument,
|
||||
which will be returned in the response body. See
|
||||
`RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4>`_
|
||||
for a complete list of available error codes and when to use them.
|
||||
|
||||
Examples::
|
||||
|
||||
raise cherrypy.HTTPError(403)
|
||||
raise cherrypy.HTTPError(
|
||||
"403 Forbidden", "You are not allowed to access this resource.")
|
||||
"""
|
||||
|
||||
status = None
|
||||
"""The HTTP status code. May be of type int or str (with a Reason-Phrase).
|
||||
"""
|
||||
|
||||
code = None
|
||||
"""The integer HTTP status code."""
|
||||
|
||||
reason = None
|
||||
"""The HTTP Reason-Phrase string."""
|
||||
|
||||
def __init__(self, status=500, message=None):
|
||||
self.status = status
|
||||
try:
|
||||
self.code, self.reason, defaultmsg = _httputil.valid_status(status)
|
||||
except ValueError:
|
||||
raise self.__class__(500, _exc_info()[1].args[0])
|
||||
|
||||
if self.code < 400 or self.code > 599:
|
||||
raise ValueError('status must be between 400 and 599.')
|
||||
|
||||
# See http://www.python.org/dev/peps/pep-0352/
|
||||
# self.message = message
|
||||
self._message = message or defaultmsg
|
||||
CherryPyException.__init__(self, status, message)
|
||||
|
||||
def set_response(self):
|
||||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
self.
|
||||
|
||||
CherryPy uses this internally, but you can also use it to create an
|
||||
HTTPError object and set its output without *raising* the exception.
|
||||
"""
|
||||
import cherrypy
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
clean_headers(self.code)
|
||||
|
||||
# In all cases, finalize will be called after this method,
|
||||
# so don't bother cleaning up response values here.
|
||||
response.status = self.status
|
||||
tb = None
|
||||
if cherrypy.serving.request.show_tracebacks:
|
||||
tb = format_exc()
|
||||
|
||||
response.headers.pop('Content-Length', None)
|
||||
|
||||
content = self.get_error_page(self.status, traceback=tb,
|
||||
message=self._message)
|
||||
response.body = content
|
||||
|
||||
_be_ie_unfriendly(self.code)
|
||||
|
||||
def get_error_page(self, *args, **kwargs):
|
||||
return get_error_page(*args, **kwargs)
|
||||
|
||||
def __call__(self):
|
||||
"""Use this exception as a request.handler (raise self)."""
|
||||
raise self
|
||||
|
||||
@classmethod
|
||||
@contextlib.contextmanager
|
||||
def handle(cls, exception, status=500, message=''):
|
||||
"""Translate exception into an HTTPError."""
|
||||
try:
|
||||
yield
|
||||
except exception as exc:
|
||||
raise cls(status, message or str(exc))
|
||||
|
||||
|
||||
class NotFound(HTTPError):
|
||||
|
||||
"""Exception raised when a URL could not be mapped to any handler (404).
|
||||
|
||||
This is equivalent to raising
|
||||
:class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None):
|
||||
if path is None:
|
||||
import cherrypy
|
||||
request = cherrypy.serving.request
|
||||
path = request.script_name + request.path_info
|
||||
self.args = (path,)
|
||||
HTTPError.__init__(self, 404, "The path '%s' was not found." % path)
|
||||
|
||||
|
||||
_HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
|
||||
"-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"></meta>
|
||||
<title>%(status)s</title>
|
||||
<style type="text/css">
|
||||
#powered_by {
|
||||
margin-top: 20px;
|
||||
border-top: 2px solid black;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
#traceback {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h2>%(status)s</h2>
|
||||
<p>%(message)s</p>
|
||||
<pre id="traceback">%(traceback)s</pre>
|
||||
<div id="powered_by">
|
||||
<span>
|
||||
Powered by <a href="http://www.cherrypy.org">CherryPy %(version)s</a>
|
||||
</span>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
|
||||
|
||||
def get_error_page(status, **kwargs):
|
||||
"""Return an HTML page, containing a pretty error response.
|
||||
|
||||
status should be an int or a str.
|
||||
kwargs will be interpolated into the page template.
|
||||
"""
|
||||
import cherrypy
|
||||
|
||||
try:
|
||||
code, reason, message = _httputil.valid_status(status)
|
||||
except ValueError:
|
||||
raise cherrypy.HTTPError(500, _exc_info()[1].args[0])
|
||||
|
||||
# We can't use setdefault here, because some
|
||||
# callers send None for kwarg values.
|
||||
if kwargs.get('status') is None:
|
||||
kwargs['status'] = '%s %s' % (code, reason)
|
||||
if kwargs.get('message') is None:
|
||||
kwargs['message'] = message
|
||||
if kwargs.get('traceback') is None:
|
||||
kwargs['traceback'] = ''
|
||||
if kwargs.get('version') is None:
|
||||
kwargs['version'] = cherrypy.__version__
|
||||
|
||||
for k, v in iteritems(kwargs):
|
||||
if v is None:
|
||||
kwargs[k] = ''
|
||||
else:
|
||||
kwargs[k] = _escape(kwargs[k])
|
||||
|
||||
# Use a custom template or callable for the error page?
|
||||
pages = cherrypy.serving.request.error_page
|
||||
error_page = pages.get(code) or pages.get('default')
|
||||
|
||||
# Default template, can be overridden below.
|
||||
template = _HTTPErrorTemplate
|
||||
if error_page:
|
||||
try:
|
||||
if hasattr(error_page, '__call__'):
|
||||
# The caller function may be setting headers manually,
|
||||
# so we delegate to it completely. We may be returning
|
||||
# an iterator as well as a string here.
|
||||
#
|
||||
# We *must* make sure any content is not unicode.
|
||||
result = error_page(**kwargs)
|
||||
if cherrypy.lib.is_iterator(result):
|
||||
from cherrypy.lib.encoding import UTF8StreamEncoder
|
||||
return UTF8StreamEncoder(result)
|
||||
elif isinstance(result, six.text_type):
|
||||
return result.encode('utf-8')
|
||||
else:
|
||||
if not isinstance(result, bytes):
|
||||
raise ValueError('error page function did not '
|
||||
'return a bytestring, six.text_typeing or an '
|
||||
'iterator - returned object of type %s.'
|
||||
% (type(result).__name__))
|
||||
return result
|
||||
else:
|
||||
# Load the template from this path.
|
||||
template = tonative(open(error_page, 'rb').read())
|
||||
except:
|
||||
e = _format_exception(*_exc_info())[-1]
|
||||
m = kwargs['message']
|
||||
if m:
|
||||
m += '<br />'
|
||||
m += 'In addition, the custom error page failed:\n<br />%s' % e
|
||||
kwargs['message'] = m
|
||||
|
||||
response = cherrypy.serving.response
|
||||
response.headers['Content-Type'] = 'text/html;charset=utf-8'
|
||||
result = template % kwargs
|
||||
return result.encode('utf-8')
|
||||
|
||||
|
||||
|
||||
_ie_friendly_error_sizes = {
|
||||
400: 512, 403: 256, 404: 512, 405: 256,
|
||||
406: 512, 408: 512, 409: 512, 410: 256,
|
||||
500: 512, 501: 512, 505: 512,
|
||||
}
|
||||
|
||||
|
||||
def _be_ie_unfriendly(status):
|
||||
import cherrypy
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# For some statuses, Internet Explorer 5+ shows "friendly error
|
||||
# messages" instead of our response.body if the body is smaller
|
||||
# than a given size. Fix this by returning a body over that size
|
||||
# (by adding whitespace).
|
||||
# See http://support.microsoft.com/kb/q218155/
|
||||
s = _ie_friendly_error_sizes.get(status, 0)
|
||||
if s:
|
||||
s += 1
|
||||
# Since we are issuing an HTTP error status, we assume that
|
||||
# the entity is short, and we should just collapse it.
|
||||
content = response.collapse_body()
|
||||
l = len(content)
|
||||
if l and l < s:
|
||||
# IN ADDITION: the response must be written to IE
|
||||
# in one chunk or it will still get replaced! Bah.
|
||||
content = content + (ntob(' ') * (s - l))
|
||||
response.body = content
|
||||
response.headers['Content-Length'] = str(len(content))
|
||||
|
||||
|
||||
def format_exc(exc=None):
|
||||
"""Return exc (or sys.exc_info if None), formatted."""
|
||||
try:
|
||||
if exc is None:
|
||||
exc = _exc_info()
|
||||
if exc == (None, None, None):
|
||||
return ''
|
||||
import traceback
|
||||
return ''.join(traceback.format_exception(*exc))
|
||||
finally:
|
||||
del exc
|
||||
|
||||
|
||||
def bare_error(extrabody=None):
|
||||
"""Produce status, headers, body for a critical error.
|
||||
|
||||
Returns a triple without calling any other questionable functions,
|
||||
so it should be as error-free as possible. Call it from an HTTP server
|
||||
if you get errors outside of the request.
|
||||
|
||||
If extrabody is None, a friendly but rather unhelpful error message
|
||||
is set in the body. If extrabody is a string, it will be appended
|
||||
as-is to the body.
|
||||
"""
|
||||
|
||||
# The whole point of this function is to be a last line-of-defense
|
||||
# in handling errors. That is, it must not raise any errors itself;
|
||||
# it cannot be allowed to fail. Therefore, don't add to it!
|
||||
# In particular, don't call any other CP functions.
|
||||
|
||||
body = ntob('Unrecoverable error in the server.')
|
||||
if extrabody is not None:
|
||||
if not isinstance(extrabody, bytes):
|
||||
extrabody = extrabody.encode('utf-8')
|
||||
body += ntob('\n') + extrabody
|
||||
|
||||
return (ntob('500 Internal Server Error'),
|
||||
[(ntob('Content-Type'), ntob('text/plain')),
|
||||
(ntob('Content-Length'), ntob(str(len(body)), 'ISO-8859-1'))],
|
||||
[body])
|
||||
@@ -1,464 +0,0 @@
|
||||
"""
|
||||
Simple config
|
||||
=============
|
||||
|
||||
Although CherryPy uses the :mod:`Python logging module <logging>`, it does so
|
||||
behind the scenes so that simple logging is simple, but complicated logging
|
||||
is still possible. "Simple" logging means that you can log to the screen
|
||||
(i.e. console/stdout) or to a file, and that you can easily have separate
|
||||
error and access log files.
|
||||
|
||||
Here are the simplified logging settings. You use these by adding lines to
|
||||
your config file or dict. You should set these at either the global level or
|
||||
per application (see next), but generally not both.
|
||||
|
||||
* ``log.screen``: Set this to True to have both "error" and "access" messages
|
||||
printed to stdout.
|
||||
* ``log.access_file``: Set this to an absolute filename where you want
|
||||
"access" messages written.
|
||||
* ``log.error_file``: Set this to an absolute filename where you want "error"
|
||||
messages written.
|
||||
|
||||
Many events are automatically logged; to log your own application events, call
|
||||
:func:`cherrypy.log`.
|
||||
|
||||
Architecture
|
||||
============
|
||||
|
||||
Separate scopes
|
||||
---------------
|
||||
|
||||
CherryPy provides log managers at both the global and application layers.
|
||||
This means you can have one set of logging rules for your entire site,
|
||||
and another set of rules specific to each application. The global log
|
||||
manager is found at :func:`cherrypy.log`, and the log manager for each
|
||||
application is found at :attr:`app.log<cherrypy._cptree.Application.log>`.
|
||||
If you're inside a request, the latter is reachable from
|
||||
``cherrypy.request.app.log``; if you're outside a request, you'll have to
|
||||
obtain a reference to the ``app``: either the return value of
|
||||
:func:`tree.mount()<cherrypy._cptree.Tree.mount>` or, if you used
|
||||
:func:`quickstart()<cherrypy.quickstart>` instead, via
|
||||
``cherrypy.tree.apps['/']``.
|
||||
|
||||
By default, the global logs are named "cherrypy.error" and "cherrypy.access",
|
||||
and the application logs are named "cherrypy.error.2378745" and
|
||||
"cherrypy.access.2378745" (the number is the id of the Application object).
|
||||
This means that the application logs "bubble up" to the site logs, so if your
|
||||
application has no log handlers, the site-level handlers will still log the
|
||||
messages.
|
||||
|
||||
Errors vs. Access
|
||||
-----------------
|
||||
|
||||
Each log manager handles both "access" messages (one per HTTP request) and
|
||||
"error" messages (everything else). Note that the "error" log is not just for
|
||||
errors! The format of access messages is highly formalized, but the error log
|
||||
isn't--it receives messages from a variety of sources (including full error
|
||||
tracebacks, if enabled).
|
||||
|
||||
If you are logging the access log and error log to the same source, then there
|
||||
is a possibility that a specially crafted error message may replicate an access
|
||||
log message as described in CWE-117. In this case it is the application
|
||||
developer's responsibility to manually escape data before using CherryPy's log()
|
||||
functionality, or they may create an application that is vulnerable to CWE-117.
|
||||
This would be achieved by using a custom handler escape any special characters,
|
||||
and attached as described below.
|
||||
|
||||
Custom Handlers
|
||||
===============
|
||||
|
||||
The simple settings above work by manipulating Python's standard :mod:`logging`
|
||||
module. So when you need something more complex, the full power of the standard
|
||||
module is yours to exploit. You can borrow or create custom handlers, formats,
|
||||
filters, and much more. Here's an example that skips the standard FileHandler
|
||||
and uses a RotatingFileHandler instead:
|
||||
|
||||
::
|
||||
|
||||
#python
|
||||
log = app.log
|
||||
|
||||
# Remove the default FileHandlers if present.
|
||||
log.error_file = ""
|
||||
log.access_file = ""
|
||||
|
||||
maxBytes = getattr(log, "rot_maxBytes", 10000000)
|
||||
backupCount = getattr(log, "rot_backupCount", 1000)
|
||||
|
||||
# Make a new RotatingFileHandler for the error log.
|
||||
fname = getattr(log, "rot_error_file", "error.log")
|
||||
h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
|
||||
h.setLevel(DEBUG)
|
||||
h.setFormatter(_cplogging.logfmt)
|
||||
log.error_log.addHandler(h)
|
||||
|
||||
# Make a new RotatingFileHandler for the access log.
|
||||
fname = getattr(log, "rot_access_file", "access.log")
|
||||
h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
|
||||
h.setLevel(DEBUG)
|
||||
h.setFormatter(_cplogging.logfmt)
|
||||
log.access_log.addHandler(h)
|
||||
|
||||
|
||||
The ``rot_*`` attributes are pulled straight from the application log object.
|
||||
Since "log.*" config entries simply set attributes on the log object, you can
|
||||
add custom attributes to your heart's content. Note that these handlers are
|
||||
used ''instead'' of the default, simple handlers outlined above (so don't set
|
||||
the "log.error_file" config entry, for example).
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy import _cperror
|
||||
from cherrypy._cpcompat import ntob
|
||||
|
||||
|
||||
# Silence the no-handlers "warning" (stderr write!) in stdlib logging
|
||||
logging.Logger.manager.emittedNoHandlerWarning = 1
|
||||
logfmt = logging.Formatter('%(message)s')
|
||||
|
||||
|
||||
class NullHandler(logging.Handler):
|
||||
|
||||
"""A no-op logging handler to silence the logging.lastResort handler."""
|
||||
|
||||
def handle(self, record):
|
||||
pass
|
||||
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
def createLock(self):
|
||||
self.lock = None
|
||||
|
||||
|
||||
class LogManager(object):
|
||||
|
||||
"""An object to assist both simple and advanced logging.
|
||||
|
||||
``cherrypy.log`` is an instance of this class.
|
||||
"""
|
||||
|
||||
appid = None
|
||||
"""The id() of the Application object which owns this log manager. If this
|
||||
is a global log manager, appid is None."""
|
||||
|
||||
error_log = None
|
||||
"""The actual :class:`logging.Logger` instance for error messages."""
|
||||
|
||||
access_log = None
|
||||
"""The actual :class:`logging.Logger` instance for access messages."""
|
||||
|
||||
access_log_format = (
|
||||
'{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}"'
|
||||
if six.PY3 else
|
||||
'%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
|
||||
)
|
||||
|
||||
logger_root = None
|
||||
"""The "top-level" logger name.
|
||||
|
||||
This string will be used as the first segment in the Logger names.
|
||||
The default is "cherrypy", for example, in which case the Logger names
|
||||
will be of the form::
|
||||
|
||||
cherrypy.error.<appid>
|
||||
cherrypy.access.<appid>
|
||||
"""
|
||||
|
||||
def __init__(self, appid=None, logger_root='cherrypy'):
|
||||
self.logger_root = logger_root
|
||||
self.appid = appid
|
||||
if appid is None:
|
||||
self.error_log = logging.getLogger('%s.error' % logger_root)
|
||||
self.access_log = logging.getLogger('%s.access' % logger_root)
|
||||
else:
|
||||
self.error_log = logging.getLogger(
|
||||
'%s.error.%s' % (logger_root, appid))
|
||||
self.access_log = logging.getLogger(
|
||||
'%s.access.%s' % (logger_root, appid))
|
||||
self.error_log.setLevel(logging.INFO)
|
||||
self.access_log.setLevel(logging.INFO)
|
||||
|
||||
# Silence the no-handlers "warning" (stderr write!) in stdlib logging
|
||||
self.error_log.addHandler(NullHandler())
|
||||
self.access_log.addHandler(NullHandler())
|
||||
|
||||
cherrypy.engine.subscribe('graceful', self.reopen_files)
|
||||
|
||||
def reopen_files(self):
|
||||
"""Close and reopen all file handlers."""
|
||||
for log in (self.error_log, self.access_log):
|
||||
for h in log.handlers:
|
||||
if isinstance(h, logging.FileHandler):
|
||||
h.acquire()
|
||||
h.stream.close()
|
||||
h.stream = open(h.baseFilename, h.mode)
|
||||
h.release()
|
||||
|
||||
def error(self, msg='', context='', severity=logging.INFO,
|
||||
traceback=False):
|
||||
"""Write the given ``msg`` to the error log.
|
||||
|
||||
This is not just for errors! Applications may call this at any time
|
||||
to log application-specific information.
|
||||
|
||||
If ``traceback`` is True, the traceback of the current exception
|
||||
(if any) will be appended to ``msg``.
|
||||
"""
|
||||
exc_info = None
|
||||
if traceback:
|
||||
exc_info = _cperror._exc_info()
|
||||
|
||||
self.error_log.log(severity, ' '.join((self.time(), context, msg)), exc_info=exc_info)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""An alias for ``error``."""
|
||||
return self.error(*args, **kwargs)
|
||||
|
||||
def access(self):
|
||||
"""Write to the access log (in Apache/NCSA Combined Log format).
|
||||
|
||||
See the
|
||||
`apache documentation <http://httpd.apache.org/docs/current/logs.html#combined>`_
|
||||
for format details.
|
||||
|
||||
CherryPy calls this automatically for you. Note there are no arguments;
|
||||
it collects the data itself from
|
||||
:class:`cherrypy.request<cherrypy._cprequest.Request>`.
|
||||
|
||||
Like Apache started doing in 2.0.46, non-printable and other special
|
||||
characters in %r (and we expand that to all parts) are escaped using
|
||||
\\xhh sequences, where hh stands for the hexadecimal representation
|
||||
of the raw byte. Exceptions from this rule are " and \\, which are
|
||||
escaped by prepending a backslash, and all whitespace characters,
|
||||
which are written in their C-style notation (\\n, \\t, etc).
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
remote = request.remote
|
||||
response = cherrypy.serving.response
|
||||
outheaders = response.headers
|
||||
inheaders = request.headers
|
||||
if response.output_status is None:
|
||||
status = '-'
|
||||
else:
|
||||
status = response.output_status.split(ntob(' '), 1)[0]
|
||||
if six.PY3:
|
||||
status = status.decode('ISO-8859-1')
|
||||
|
||||
atoms = {'h': remote.name or remote.ip,
|
||||
'l': '-',
|
||||
'u': getattr(request, 'login', None) or '-',
|
||||
't': self.time(),
|
||||
'r': request.request_line,
|
||||
's': status,
|
||||
'b': dict.get(outheaders, 'Content-Length', '') or '-',
|
||||
'f': dict.get(inheaders, 'Referer', ''),
|
||||
'a': dict.get(inheaders, 'User-Agent', ''),
|
||||
'o': dict.get(inheaders, 'Host', '-'),
|
||||
}
|
||||
if six.PY3:
|
||||
for k, v in atoms.items():
|
||||
if not isinstance(v, str):
|
||||
v = str(v)
|
||||
v = v.replace('"', '\\"').encode('utf8')
|
||||
# Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
|
||||
# and backslash for us. All we have to do is strip the quotes.
|
||||
v = repr(v)[2:-1]
|
||||
|
||||
# in python 3.0 the repr of bytes (as returned by encode)
|
||||
# uses double \'s. But then the logger escapes them yet, again
|
||||
# resulting in quadruple slashes. Remove the extra one here.
|
||||
v = v.replace('\\\\', '\\')
|
||||
|
||||
# Escape double-quote.
|
||||
atoms[k] = v
|
||||
|
||||
try:
|
||||
self.access_log.log(
|
||||
logging.INFO, self.access_log_format.format(**atoms))
|
||||
except:
|
||||
self(traceback=True)
|
||||
else:
|
||||
for k, v in atoms.items():
|
||||
if isinstance(v, six.text_type):
|
||||
v = v.encode('utf8')
|
||||
elif not isinstance(v, str):
|
||||
v = str(v)
|
||||
# Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
|
||||
# and backslash for us. All we have to do is strip the quotes.
|
||||
v = repr(v)[1:-1]
|
||||
# Escape double-quote.
|
||||
atoms[k] = v.replace('"', '\\"')
|
||||
|
||||
try:
|
||||
self.access_log.log(
|
||||
logging.INFO, self.access_log_format % atoms)
|
||||
except:
|
||||
self(traceback=True)
|
||||
|
||||
def time(self):
|
||||
"""Return now() in Apache Common Log Format (no timezone)."""
|
||||
now = datetime.datetime.now()
|
||||
monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
|
||||
'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
|
||||
month = monthnames[now.month - 1].capitalize()
|
||||
return ('[%02d/%s/%04d:%02d:%02d:%02d]' %
|
||||
(now.day, month, now.year, now.hour, now.minute, now.second))
|
||||
|
||||
def _get_builtin_handler(self, log, key):
|
||||
for h in log.handlers:
|
||||
if getattr(h, '_cpbuiltin', None) == key:
|
||||
return h
|
||||
|
||||
# ------------------------- Screen handlers ------------------------- #
|
||||
def _set_screen_handler(self, log, enable, stream=None):
|
||||
h = self._get_builtin_handler(log, 'screen')
|
||||
if enable:
|
||||
if not h:
|
||||
if stream is None:
|
||||
stream = sys.stderr
|
||||
h = logging.StreamHandler(stream)
|
||||
h.setFormatter(logfmt)
|
||||
h._cpbuiltin = 'screen'
|
||||
log.addHandler(h)
|
||||
elif h:
|
||||
log.handlers.remove(h)
|
||||
|
||||
def _get_screen(self):
|
||||
h = self._get_builtin_handler
|
||||
has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
|
||||
return bool(has_h)
|
||||
|
||||
def _set_screen(self, newvalue):
|
||||
self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr)
|
||||
self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout)
|
||||
screen = property(_get_screen, _set_screen,
|
||||
doc="""Turn stderr/stdout logging on or off.
|
||||
|
||||
If you set this to True, it'll add the appropriate StreamHandler for
|
||||
you. If you set it to False, it will remove the handler.
|
||||
""")
|
||||
|
||||
# -------------------------- File handlers -------------------------- #
|
||||
|
||||
def _add_builtin_file_handler(self, log, fname):
|
||||
h = logging.FileHandler(fname)
|
||||
h.setFormatter(logfmt)
|
||||
h._cpbuiltin = 'file'
|
||||
log.addHandler(h)
|
||||
|
||||
def _set_file_handler(self, log, filename):
|
||||
h = self._get_builtin_handler(log, 'file')
|
||||
if filename:
|
||||
if h:
|
||||
if h.baseFilename != os.path.abspath(filename):
|
||||
h.close()
|
||||
log.handlers.remove(h)
|
||||
self._add_builtin_file_handler(log, filename)
|
||||
else:
|
||||
self._add_builtin_file_handler(log, filename)
|
||||
else:
|
||||
if h:
|
||||
h.close()
|
||||
log.handlers.remove(h)
|
||||
|
||||
def _get_error_file(self):
|
||||
h = self._get_builtin_handler(self.error_log, 'file')
|
||||
if h:
|
||||
return h.baseFilename
|
||||
return ''
|
||||
|
||||
def _set_error_file(self, newvalue):
|
||||
self._set_file_handler(self.error_log, newvalue)
|
||||
error_file = property(_get_error_file, _set_error_file,
|
||||
doc="""The filename for self.error_log.
|
||||
|
||||
If you set this to a string, it'll add the appropriate FileHandler for
|
||||
you. If you set it to ``None`` or ``''``, it will remove the handler.
|
||||
""")
|
||||
|
||||
def _get_access_file(self):
|
||||
h = self._get_builtin_handler(self.access_log, 'file')
|
||||
if h:
|
||||
return h.baseFilename
|
||||
return ''
|
||||
|
||||
def _set_access_file(self, newvalue):
|
||||
self._set_file_handler(self.access_log, newvalue)
|
||||
access_file = property(_get_access_file, _set_access_file,
|
||||
doc="""The filename for self.access_log.
|
||||
|
||||
If you set this to a string, it'll add the appropriate FileHandler for
|
||||
you. If you set it to ``None`` or ``''``, it will remove the handler.
|
||||
""")
|
||||
|
||||
# ------------------------- WSGI handlers ------------------------- #
|
||||
|
||||
def _set_wsgi_handler(self, log, enable):
|
||||
h = self._get_builtin_handler(log, 'wsgi')
|
||||
if enable:
|
||||
if not h:
|
||||
h = WSGIErrorHandler()
|
||||
h.setFormatter(logfmt)
|
||||
h._cpbuiltin = 'wsgi'
|
||||
log.addHandler(h)
|
||||
elif h:
|
||||
log.handlers.remove(h)
|
||||
|
||||
def _get_wsgi(self):
|
||||
return bool(self._get_builtin_handler(self.error_log, 'wsgi'))
|
||||
|
||||
def _set_wsgi(self, newvalue):
|
||||
self._set_wsgi_handler(self.error_log, newvalue)
|
||||
wsgi = property(_get_wsgi, _set_wsgi,
|
||||
doc="""Write errors to wsgi.errors.
|
||||
|
||||
If you set this to True, it'll add the appropriate
|
||||
:class:`WSGIErrorHandler<cherrypy._cplogging.WSGIErrorHandler>` for you
|
||||
(which writes errors to ``wsgi.errors``).
|
||||
If you set it to False, it will remove the handler.
|
||||
""")
|
||||
|
||||
|
||||
class WSGIErrorHandler(logging.Handler):
|
||||
|
||||
"A handler class which writes logging records to environ['wsgi.errors']."
|
||||
|
||||
def flush(self):
|
||||
"""Flushes the stream."""
|
||||
try:
|
||||
stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
else:
|
||||
stream.flush()
|
||||
|
||||
def emit(self, record):
|
||||
"""Emit a record."""
|
||||
try:
|
||||
stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
msg = self.format(record)
|
||||
fs = '%s\n'
|
||||
import types
|
||||
# if no unicode support...
|
||||
if not hasattr(types, 'UnicodeType'):
|
||||
stream.write(fs % msg)
|
||||
else:
|
||||
try:
|
||||
stream.write(fs % msg)
|
||||
except UnicodeError:
|
||||
stream.write(fs % msg.encode('UTF-8'))
|
||||
self.flush()
|
||||
except:
|
||||
self.handleError(record)
|
||||
@@ -1,354 +0,0 @@
|
||||
"""Native adapter for serving CherryPy via mod_python
|
||||
|
||||
Basic usage:
|
||||
|
||||
##########################################
|
||||
# Application in a module called myapp.py
|
||||
##########################################
|
||||
|
||||
import cherrypy
|
||||
|
||||
class Root:
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return 'Hi there, Ho there, Hey there'
|
||||
|
||||
|
||||
# We will use this method from the mod_python configuration
|
||||
# as the entry point to our application
|
||||
def setup_server():
|
||||
cherrypy.tree.mount(Root())
|
||||
cherrypy.config.update({'environment': 'production',
|
||||
'log.screen': False,
|
||||
'show_tracebacks': False})
|
||||
|
||||
##########################################
|
||||
# mod_python settings for apache2
|
||||
# This should reside in your httpd.conf
|
||||
# or a file that will be loaded at
|
||||
# apache startup
|
||||
##########################################
|
||||
|
||||
# Start
|
||||
DocumentRoot "/"
|
||||
Listen 8080
|
||||
LoadModule python_module /usr/lib/apache2/modules/mod_python.so
|
||||
|
||||
<Location "/">
|
||||
PythonPath "sys.path+['/path/to/my/application']"
|
||||
SetHandler python-program
|
||||
PythonHandler cherrypy._cpmodpy::handler
|
||||
PythonOption cherrypy.setup myapp::setup_server
|
||||
PythonDebug On
|
||||
</Location>
|
||||
# End
|
||||
|
||||
The actual path to your mod_python.so is dependent on your
|
||||
environment. In this case we suppose a global mod_python
|
||||
installation on a Linux distribution such as Ubuntu.
|
||||
|
||||
We do set the PythonPath configuration setting so that
|
||||
your application can be found by from the user running
|
||||
the apache2 instance. Of course if your application
|
||||
resides in the global site-package this won't be needed.
|
||||
|
||||
Then restart apache2 and access http://127.0.0.1:8080
|
||||
"""
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import copyitems, ntob
|
||||
from cherrypy._cperror import format_exc, bare_error
|
||||
from cherrypy.lib import httputil
|
||||
|
||||
|
||||
# ------------------------------ Request-handling
|
||||
|
||||
|
||||
def setup(req):
|
||||
from mod_python import apache
|
||||
|
||||
# Run any setup functions defined by a "PythonOption cherrypy.setup"
|
||||
# directive.
|
||||
options = req.get_options()
|
||||
if 'cherrypy.setup' in options:
|
||||
for function in options['cherrypy.setup'].split():
|
||||
atoms = function.split('::', 1)
|
||||
if len(atoms) == 1:
|
||||
mod = __import__(atoms[0], globals(), locals())
|
||||
else:
|
||||
modname, fname = atoms
|
||||
mod = __import__(modname, globals(), locals(), [fname])
|
||||
func = getattr(mod, fname)
|
||||
func()
|
||||
|
||||
cherrypy.config.update({'log.screen': False,
|
||||
'tools.ignore_headers.on': True,
|
||||
'tools.ignore_headers.headers': ['Range'],
|
||||
})
|
||||
|
||||
engine = cherrypy.engine
|
||||
if hasattr(engine, 'signal_handler'):
|
||||
engine.signal_handler.unsubscribe()
|
||||
if hasattr(engine, 'console_control_handler'):
|
||||
engine.console_control_handler.unsubscribe()
|
||||
engine.autoreload.unsubscribe()
|
||||
cherrypy.server.unsubscribe()
|
||||
|
||||
def _log(msg, level):
|
||||
newlevel = apache.APLOG_ERR
|
||||
if logging.DEBUG >= level:
|
||||
newlevel = apache.APLOG_DEBUG
|
||||
elif logging.INFO >= level:
|
||||
newlevel = apache.APLOG_INFO
|
||||
elif logging.WARNING >= level:
|
||||
newlevel = apache.APLOG_WARNING
|
||||
# On Windows, req.server is required or the msg will vanish. See
|
||||
# http://www.modpython.org/pipermail/mod_python/2003-October/014291.html
|
||||
# Also, "When server is not specified...LogLevel does not apply..."
|
||||
apache.log_error(msg, newlevel, req.server)
|
||||
engine.subscribe('log', _log)
|
||||
|
||||
engine.start()
|
||||
|
||||
def cherrypy_cleanup(data):
|
||||
engine.exit()
|
||||
try:
|
||||
# apache.register_cleanup wasn't available until 3.1.4.
|
||||
apache.register_cleanup(cherrypy_cleanup)
|
||||
except AttributeError:
|
||||
req.server.register_cleanup(req, cherrypy_cleanup)
|
||||
|
||||
|
||||
class _ReadOnlyRequest:
|
||||
expose = ('read', 'readline', 'readlines')
|
||||
|
||||
def __init__(self, req):
|
||||
for method in self.expose:
|
||||
self.__dict__[method] = getattr(req, method)
|
||||
|
||||
|
||||
recursive = False
|
||||
|
||||
_isSetUp = False
|
||||
|
||||
|
||||
def handler(req):
|
||||
from mod_python import apache
|
||||
try:
|
||||
global _isSetUp
|
||||
if not _isSetUp:
|
||||
setup(req)
|
||||
_isSetUp = True
|
||||
|
||||
# Obtain a Request object from CherryPy
|
||||
local = req.connection.local_addr
|
||||
local = httputil.Host(
|
||||
local[0], local[1], req.connection.local_host or '')
|
||||
remote = req.connection.remote_addr
|
||||
remote = httputil.Host(
|
||||
remote[0], remote[1], req.connection.remote_host or '')
|
||||
|
||||
scheme = req.parsed_uri[0] or 'http'
|
||||
req.get_basic_auth_pw()
|
||||
|
||||
try:
|
||||
# apache.mpm_query only became available in mod_python 3.1
|
||||
q = apache.mpm_query
|
||||
threaded = q(apache.AP_MPMQ_IS_THREADED)
|
||||
forked = q(apache.AP_MPMQ_IS_FORKED)
|
||||
except AttributeError:
|
||||
bad_value = ("You must provide a PythonOption '%s', "
|
||||
"either 'on' or 'off', when running a version "
|
||||
'of mod_python < 3.1')
|
||||
|
||||
threaded = options.get('multithread', '').lower()
|
||||
if threaded == 'on':
|
||||
threaded = True
|
||||
elif threaded == 'off':
|
||||
threaded = False
|
||||
else:
|
||||
raise ValueError(bad_value % 'multithread')
|
||||
|
||||
forked = options.get('multiprocess', '').lower()
|
||||
if forked == 'on':
|
||||
forked = True
|
||||
elif forked == 'off':
|
||||
forked = False
|
||||
else:
|
||||
raise ValueError(bad_value % 'multiprocess')
|
||||
|
||||
sn = cherrypy.tree.script_name(req.uri or '/')
|
||||
if sn is None:
|
||||
send_response(req, '404 Not Found', [], '')
|
||||
else:
|
||||
app = cherrypy.tree.apps[sn]
|
||||
method = req.method
|
||||
path = req.uri
|
||||
qs = req.args or ''
|
||||
reqproto = req.protocol
|
||||
headers = copyitems(req.headers_in)
|
||||
rfile = _ReadOnlyRequest(req)
|
||||
prev = None
|
||||
|
||||
try:
|
||||
redirections = []
|
||||
while True:
|
||||
request, response = app.get_serving(local, remote, scheme,
|
||||
'HTTP/1.1')
|
||||
request.login = req.user
|
||||
request.multithread = bool(threaded)
|
||||
request.multiprocess = bool(forked)
|
||||
request.app = app
|
||||
request.prev = prev
|
||||
|
||||
# Run the CherryPy Request object and obtain the response
|
||||
try:
|
||||
request.run(method, path, qs, reqproto, headers, rfile)
|
||||
break
|
||||
except cherrypy.InternalRedirect:
|
||||
ir = sys.exc_info()[1]
|
||||
app.release_serving()
|
||||
prev = request
|
||||
|
||||
if not recursive:
|
||||
if ir.path in redirections:
|
||||
raise RuntimeError(
|
||||
'InternalRedirector visited the same URL '
|
||||
'twice: %r' % ir.path)
|
||||
else:
|
||||
# Add the *previous* path_info + qs to
|
||||
# redirections.
|
||||
if qs:
|
||||
qs = '?' + qs
|
||||
redirections.append(sn + path + qs)
|
||||
|
||||
# Munge environment and try again.
|
||||
method = 'GET'
|
||||
path = ir.path
|
||||
qs = ir.query_string
|
||||
rfile = io.BytesIO()
|
||||
|
||||
send_response(
|
||||
req, response.output_status, response.header_list,
|
||||
response.body, response.stream)
|
||||
finally:
|
||||
app.release_serving()
|
||||
except:
|
||||
tb = format_exc()
|
||||
cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR)
|
||||
s, h, b = bare_error()
|
||||
send_response(req, s, h, b)
|
||||
return apache.OK
|
||||
|
||||
|
||||
def send_response(req, status, headers, body, stream=False):
|
||||
# Set response status
|
||||
req.status = int(status[:3])
|
||||
|
||||
# Set response headers
|
||||
req.content_type = 'text/plain'
|
||||
for header, value in headers:
|
||||
if header.lower() == 'content-type':
|
||||
req.content_type = value
|
||||
continue
|
||||
req.headers_out.add(header, value)
|
||||
|
||||
if stream:
|
||||
# Flush now so the status and headers are sent immediately.
|
||||
req.flush()
|
||||
|
||||
# Set response body
|
||||
if isinstance(body, text_or_bytes):
|
||||
req.write(body)
|
||||
else:
|
||||
for seg in body:
|
||||
req.write(seg)
|
||||
|
||||
|
||||
# --------------- Startup tools for CherryPy + mod_python --------------- #
|
||||
try:
|
||||
import subprocess
|
||||
|
||||
def popen(fullcmd):
|
||||
p = subprocess.Popen(fullcmd, shell=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
close_fds=True)
|
||||
return p.stdout
|
||||
except ImportError:
|
||||
def popen(fullcmd):
|
||||
pipein, pipeout = os.popen4(fullcmd)
|
||||
return pipeout
|
||||
|
||||
|
||||
def read_process(cmd, args=''):
|
||||
fullcmd = '%s %s' % (cmd, args)
|
||||
pipeout = popen(fullcmd)
|
||||
try:
|
||||
firstline = pipeout.readline()
|
||||
cmd_not_found = re.search(
|
||||
ntob('(not recognized|No such file|not found)'),
|
||||
firstline,
|
||||
re.IGNORECASE
|
||||
)
|
||||
if cmd_not_found:
|
||||
raise IOError('%s must be on your system path.' % cmd)
|
||||
output = firstline + pipeout.read()
|
||||
finally:
|
||||
pipeout.close()
|
||||
return output
|
||||
|
||||
|
||||
class ModPythonServer(object):
|
||||
|
||||
template = """
|
||||
# Apache2 server configuration file for running CherryPy with mod_python.
|
||||
|
||||
DocumentRoot "/"
|
||||
Listen %(port)s
|
||||
LoadModule python_module modules/mod_python.so
|
||||
|
||||
<Location %(loc)s>
|
||||
SetHandler python-program
|
||||
PythonHandler %(handler)s
|
||||
PythonDebug On
|
||||
%(opts)s
|
||||
</Location>
|
||||
"""
|
||||
|
||||
def __init__(self, loc='/', port=80, opts=None, apache_path='apache',
|
||||
handler='cherrypy._cpmodpy::handler'):
|
||||
self.loc = loc
|
||||
self.port = port
|
||||
self.opts = opts
|
||||
self.apache_path = apache_path
|
||||
self.handler = handler
|
||||
|
||||
def start(self):
|
||||
opts = ''.join([' PythonOption %s %s\n' % (k, v)
|
||||
for k, v in self.opts])
|
||||
conf_data = self.template % {'port': self.port,
|
||||
'loc': self.loc,
|
||||
'opts': opts,
|
||||
'handler': self.handler,
|
||||
}
|
||||
|
||||
mpconf = os.path.join(os.path.dirname(__file__), 'cpmodpy.conf')
|
||||
f = open(mpconf, 'wb')
|
||||
try:
|
||||
f.write(conf_data)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
response = read_process(self.apache_path, '-k start -f %s' % mpconf)
|
||||
self.ready = True
|
||||
return response
|
||||
|
||||
def stop(self):
|
||||
os.popen('apache -k stop')
|
||||
self.ready = False
|
||||
@@ -1,154 +0,0 @@
|
||||
"""Native adapter for serving CherryPy via its builtin server."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import io
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cperror import format_exc, bare_error
|
||||
from cherrypy.lib import httputil
|
||||
from cherrypy import wsgiserver
|
||||
|
||||
|
||||
class NativeGateway(wsgiserver.Gateway):
|
||||
|
||||
recursive = False
|
||||
|
||||
def respond(self):
|
||||
req = self.req
|
||||
try:
|
||||
# Obtain a Request object from CherryPy
|
||||
local = req.server.bind_addr
|
||||
local = httputil.Host(local[0], local[1], '')
|
||||
remote = req.conn.remote_addr, req.conn.remote_port
|
||||
remote = httputil.Host(remote[0], remote[1], '')
|
||||
|
||||
scheme = req.scheme
|
||||
sn = cherrypy.tree.script_name(req.uri or '/')
|
||||
if sn is None:
|
||||
self.send_response('404 Not Found', [], [''])
|
||||
else:
|
||||
app = cherrypy.tree.apps[sn]
|
||||
method = req.method
|
||||
path = req.path
|
||||
qs = req.qs or ''
|
||||
headers = req.inheaders.items()
|
||||
rfile = req.rfile
|
||||
prev = None
|
||||
|
||||
try:
|
||||
redirections = []
|
||||
while True:
|
||||
request, response = app.get_serving(
|
||||
local, remote, scheme, 'HTTP/1.1')
|
||||
request.multithread = True
|
||||
request.multiprocess = False
|
||||
request.app = app
|
||||
request.prev = prev
|
||||
|
||||
# Run the CherryPy Request object and obtain the
|
||||
# response
|
||||
try:
|
||||
request.run(method, path, qs,
|
||||
req.request_protocol, headers, rfile)
|
||||
break
|
||||
except cherrypy.InternalRedirect:
|
||||
ir = sys.exc_info()[1]
|
||||
app.release_serving()
|
||||
prev = request
|
||||
|
||||
if not self.recursive:
|
||||
if ir.path in redirections:
|
||||
raise RuntimeError(
|
||||
'InternalRedirector visited the same '
|
||||
'URL twice: %r' % ir.path)
|
||||
else:
|
||||
# Add the *previous* path_info + qs to
|
||||
# redirections.
|
||||
if qs:
|
||||
qs = '?' + qs
|
||||
redirections.append(sn + path + qs)
|
||||
|
||||
# Munge environment and try again.
|
||||
method = 'GET'
|
||||
path = ir.path
|
||||
qs = ir.query_string
|
||||
rfile = io.BytesIO()
|
||||
|
||||
self.send_response(
|
||||
response.output_status, response.header_list,
|
||||
response.body)
|
||||
finally:
|
||||
app.release_serving()
|
||||
except:
|
||||
tb = format_exc()
|
||||
# print tb
|
||||
cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR)
|
||||
s, h, b = bare_error()
|
||||
self.send_response(s, h, b)
|
||||
|
||||
def send_response(self, status, headers, body):
|
||||
req = self.req
|
||||
|
||||
# Set response status
|
||||
req.status = str(status or '500 Server Error')
|
||||
|
||||
# Set response headers
|
||||
for header, value in headers:
|
||||
req.outheaders.append((header, value))
|
||||
if (req.ready and not req.sent_headers):
|
||||
req.sent_headers = True
|
||||
req.send_headers()
|
||||
|
||||
# Set response body
|
||||
for seg in body:
|
||||
req.write(seg)
|
||||
|
||||
|
||||
class CPHTTPServer(wsgiserver.HTTPServer):
|
||||
|
||||
"""Wrapper for wsgiserver.HTTPServer.
|
||||
|
||||
wsgiserver has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications.
|
||||
Therefore, we wrap it here, so we can apply some attributes
|
||||
from config -> cherrypy.server -> HTTPServer.
|
||||
"""
|
||||
|
||||
def __init__(self, server_adapter=cherrypy.server):
|
||||
self.server_adapter = server_adapter
|
||||
|
||||
server_name = (self.server_adapter.socket_host or
|
||||
self.server_adapter.socket_file or
|
||||
None)
|
||||
|
||||
wsgiserver.HTTPServer.__init__(
|
||||
self, server_adapter.bind_addr, NativeGateway,
|
||||
minthreads=server_adapter.thread_pool,
|
||||
maxthreads=server_adapter.thread_pool_max,
|
||||
server_name=server_name)
|
||||
|
||||
self.max_request_header_size = (
|
||||
self.server_adapter.max_request_header_size or 0)
|
||||
self.max_request_body_size = (
|
||||
self.server_adapter.max_request_body_size or 0)
|
||||
self.request_queue_size = self.server_adapter.socket_queue_size
|
||||
self.timeout = self.server_adapter.socket_timeout
|
||||
self.shutdown_timeout = self.server_adapter.shutdown_timeout
|
||||
self.protocol = self.server_adapter.protocol_version
|
||||
self.nodelay = self.server_adapter.nodelay
|
||||
|
||||
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
|
||||
if self.server_adapter.ssl_context:
|
||||
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
|
||||
self.ssl_adapter = adapter_class(
|
||||
self.server_adapter.ssl_certificate,
|
||||
self.server_adapter.ssl_private_key,
|
||||
self.server_adapter.ssl_certificate_chain)
|
||||
self.ssl_adapter.context = self.server_adapter.ssl_context
|
||||
elif self.server_adapter.ssl_certificate:
|
||||
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
|
||||
self.ssl_adapter = adapter_class(
|
||||
self.server_adapter.ssl_certificate,
|
||||
self.server_adapter.ssl_private_key,
|
||||
self.server_adapter.ssl_certificate_chain)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,970 +0,0 @@
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import text_or_bytes, copykeys, ntob
|
||||
from cherrypy._cpcompat import SimpleCookie, CookieError
|
||||
from cherrypy import _cpreqbody, _cpconfig
|
||||
from cherrypy._cperror import format_exc, bare_error
|
||||
from cherrypy.lib import httputil, file_generator
|
||||
|
||||
|
||||
class Hook(object):
|
||||
|
||||
"""A callback and its metadata: failsafe, priority, and kwargs."""
|
||||
|
||||
callback = None
|
||||
"""
|
||||
The bare callable that this Hook object is wrapping, which will
|
||||
be called when the Hook is called."""
|
||||
|
||||
failsafe = False
|
||||
"""
|
||||
If True, the callback is guaranteed to run even if other callbacks
|
||||
from the same call point raise exceptions."""
|
||||
|
||||
priority = 50
|
||||
"""
|
||||
Defines the order of execution for a list of Hooks. Priority numbers
|
||||
should be limited to the closed interval [0, 100], but values outside
|
||||
this range are acceptable, as are fractional values."""
|
||||
|
||||
kwargs = {}
|
||||
"""
|
||||
A set of keyword arguments that will be passed to the
|
||||
callable on each call."""
|
||||
|
||||
def __init__(self, callback, failsafe=None, priority=None, **kwargs):
|
||||
self.callback = callback
|
||||
|
||||
if failsafe is None:
|
||||
failsafe = getattr(callback, 'failsafe', False)
|
||||
self.failsafe = failsafe
|
||||
|
||||
if priority is None:
|
||||
priority = getattr(callback, 'priority', 50)
|
||||
self.priority = priority
|
||||
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __lt__(self, other):
|
||||
# Python 3
|
||||
return self.priority < other.priority
|
||||
|
||||
def __cmp__(self, other):
|
||||
# Python 2
|
||||
return cmp(self.priority, other.priority)
|
||||
|
||||
def __call__(self):
|
||||
"""Run self.callback(**self.kwargs)."""
|
||||
return self.callback(**self.kwargs)
|
||||
|
||||
def __repr__(self):
|
||||
cls = self.__class__
|
||||
return ('%s.%s(callback=%r, failsafe=%r, priority=%r, %s)'
|
||||
% (cls.__module__, cls.__name__, self.callback,
|
||||
self.failsafe, self.priority,
|
||||
', '.join(['%s=%r' % (k, v)
|
||||
for k, v in self.kwargs.items()])))
|
||||
|
||||
|
||||
class HookMap(dict):
|
||||
|
||||
"""A map of call points to lists of callbacks (Hook objects)."""
|
||||
|
||||
def __new__(cls, points=None):
|
||||
d = dict.__new__(cls)
|
||||
for p in points or []:
|
||||
d[p] = []
|
||||
return d
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
pass
|
||||
|
||||
def attach(self, point, callback, failsafe=None, priority=None, **kwargs):
|
||||
"""Append a new Hook made from the supplied arguments."""
|
||||
self[point].append(Hook(callback, failsafe, priority, **kwargs))
|
||||
|
||||
def run(self, point):
|
||||
"""Execute all registered Hooks (callbacks) for the given point."""
|
||||
exc = None
|
||||
hooks = self[point]
|
||||
hooks.sort()
|
||||
for hook in hooks:
|
||||
# Some hooks are guaranteed to run even if others at
|
||||
# the same hookpoint fail. We will still log the failure,
|
||||
# but proceed on to the next hook. The only way
|
||||
# to stop all processing from one of these hooks is
|
||||
# to raise SystemExit and stop the whole server.
|
||||
if exc is None or hook.failsafe:
|
||||
try:
|
||||
hook()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except (cherrypy.HTTPError, cherrypy.HTTPRedirect,
|
||||
cherrypy.InternalRedirect):
|
||||
exc = sys.exc_info()[1]
|
||||
except:
|
||||
exc = sys.exc_info()[1]
|
||||
cherrypy.log(traceback=True, severity=40)
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
def __copy__(self):
|
||||
newmap = self.__class__()
|
||||
# We can't just use 'update' because we want copies of the
|
||||
# mutable values (each is a list) as well.
|
||||
for k, v in self.items():
|
||||
newmap[k] = v[:]
|
||||
return newmap
|
||||
copy = __copy__
|
||||
|
||||
def __repr__(self):
|
||||
cls = self.__class__
|
||||
return '%s.%s(points=%r)' % (
|
||||
cls.__module__,
|
||||
cls.__name__,
|
||||
copykeys(self)
|
||||
)
|
||||
|
||||
|
||||
# Config namespace handlers
|
||||
|
||||
def hooks_namespace(k, v):
|
||||
"""Attach bare hooks declared in config."""
|
||||
# Use split again to allow multiple hooks for a single
|
||||
# hookpoint per path (e.g. "hooks.before_handler.1").
|
||||
# Little-known fact you only get from reading source ;)
|
||||
hookpoint = k.split('.', 1)[0]
|
||||
if isinstance(v, text_or_bytes):
|
||||
v = cherrypy.lib.attributes(v)
|
||||
if not isinstance(v, Hook):
|
||||
v = Hook(v)
|
||||
cherrypy.serving.request.hooks[hookpoint].append(v)
|
||||
|
||||
|
||||
def request_namespace(k, v):
|
||||
"""Attach request attributes declared in config."""
|
||||
# Provides config entries to set request.body attrs (like
|
||||
# attempt_charsets).
|
||||
if k[:5] == 'body.':
|
||||
setattr(cherrypy.serving.request.body, k[5:], v)
|
||||
else:
|
||||
setattr(cherrypy.serving.request, k, v)
|
||||
|
||||
|
||||
def response_namespace(k, v):
|
||||
"""Attach response attributes declared in config."""
|
||||
# Provides config entries to set default response headers
|
||||
# http://cherrypy.org/ticket/889
|
||||
if k[:8] == 'headers.':
|
||||
cherrypy.serving.response.headers[k.split('.', 1)[1]] = v
|
||||
else:
|
||||
setattr(cherrypy.serving.response, k, v)
|
||||
|
||||
|
||||
def error_page_namespace(k, v):
|
||||
"""Attach error pages declared in config."""
|
||||
if k != 'default':
|
||||
k = int(k)
|
||||
cherrypy.serving.request.error_page[k] = v
|
||||
|
||||
|
||||
hookpoints = ['on_start_resource', 'before_request_body',
|
||||
'before_handler', 'before_finalize',
|
||||
'on_end_resource', 'on_end_request',
|
||||
'before_error_response', 'after_error_response']
|
||||
|
||||
|
||||
class Request(object):
|
||||
|
||||
"""An HTTP request.
|
||||
|
||||
This object represents the metadata of an HTTP request message;
|
||||
that is, it contains attributes which describe the environment
|
||||
in which the request URL, headers, and body were sent (if you
|
||||
want tools to interpret the headers and body, those are elsewhere,
|
||||
mostly in Tools). This 'metadata' consists of socket data,
|
||||
transport characteristics, and the Request-Line. This object
|
||||
also contains data regarding the configuration in effect for
|
||||
the given URL, and the execution plan for generating a response.
|
||||
"""
|
||||
|
||||
prev = None
|
||||
"""
|
||||
The previous Request object (if any). This should be None
|
||||
unless we are processing an InternalRedirect."""
|
||||
|
||||
# Conversation/connection attributes
|
||||
local = httputil.Host('127.0.0.1', 80)
|
||||
'An httputil.Host(ip, port, hostname) object for the server socket.'
|
||||
|
||||
remote = httputil.Host('127.0.0.1', 1111)
|
||||
'An httputil.Host(ip, port, hostname) object for the client socket.'
|
||||
|
||||
scheme = 'http'
|
||||
"""
|
||||
The protocol used between client and server. In most cases,
|
||||
this will be either 'http' or 'https'."""
|
||||
|
||||
server_protocol = 'HTTP/1.1'
|
||||
"""
|
||||
The HTTP version for which the HTTP server is at least
|
||||
conditionally compliant."""
|
||||
|
||||
base = ''
|
||||
"""The (scheme://host) portion of the requested URL.
|
||||
In some cases (e.g. when proxying via mod_rewrite), this may contain
|
||||
path segments which cherrypy.url uses when constructing url's, but
|
||||
which otherwise are ignored by CherryPy. Regardless, this value
|
||||
MUST NOT end in a slash."""
|
||||
|
||||
# Request-Line attributes
|
||||
request_line = ''
|
||||
"""
|
||||
The complete Request-Line received from the client. This is a
|
||||
single string consisting of the request method, URI, and protocol
|
||||
version (joined by spaces). Any final CRLF is removed."""
|
||||
|
||||
method = 'GET'
|
||||
"""
|
||||
Indicates the HTTP method to be performed on the resource identified
|
||||
by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
|
||||
DELETE. CherryPy allows any extension method; however, various HTTP
|
||||
servers and gateways may restrict the set of allowable methods.
|
||||
CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
|
||||
|
||||
query_string = ''
|
||||
"""
|
||||
The query component of the Request-URI, a string of information to be
|
||||
interpreted by the resource. The query portion of a URI follows the
|
||||
path component, and is separated by a '?'. For example, the URI
|
||||
'http://www.cherrypy.org/wiki?a=3&b=4' has the query component,
|
||||
'a=3&b=4'."""
|
||||
|
||||
query_string_encoding = 'utf8'
|
||||
"""
|
||||
The encoding expected for query string arguments after % HEX HEX decoding).
|
||||
If a query string is provided that cannot be decoded with this encoding,
|
||||
404 is raised (since technically it's a different URI). If you want
|
||||
arbitrary encodings to not error, set this to 'Latin-1'; you can then
|
||||
encode back to bytes and re-decode to whatever encoding you like later.
|
||||
"""
|
||||
|
||||
protocol = (1, 1)
|
||||
"""The HTTP protocol version corresponding to the set
|
||||
of features which should be allowed in the response. If BOTH
|
||||
the client's request message AND the server's level of HTTP
|
||||
compliance is HTTP/1.1, this attribute will be the tuple (1, 1).
|
||||
If either is 1.0, this attribute will be the tuple (1, 0).
|
||||
Lower HTTP protocol versions are not explicitly supported."""
|
||||
|
||||
params = {}
|
||||
"""
|
||||
A dict which combines query string (GET) and request entity (POST)
|
||||
variables. This is populated in two stages: GET params are added
|
||||
before the 'on_start_resource' hook, and POST params are added
|
||||
between the 'before_request_body' and 'before_handler' hooks."""
|
||||
|
||||
# Message attributes
|
||||
header_list = []
|
||||
"""
|
||||
A list of the HTTP request headers as (name, value) tuples.
|
||||
In general, you should use request.headers (a dict) instead."""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""
|
||||
A dict-like object containing the request headers. Keys are header
|
||||
names (in Title-Case format); however, you may get and set them in
|
||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||
headers['content-type'] refer to the same value. Values are header
|
||||
values (decoded according to :rfc:`2047` if necessary). See also:
|
||||
httputil.HeaderMap, httputil.HeaderElement."""
|
||||
|
||||
cookie = SimpleCookie()
|
||||
"""See help(Cookie)."""
|
||||
|
||||
rfile = None
|
||||
"""
|
||||
If the request included an entity (body), it will be available
|
||||
as a stream in this attribute. However, the rfile will normally
|
||||
be read for you between the 'before_request_body' hook and the
|
||||
'before_handler' hook, and the resulting string is placed into
|
||||
either request.params or the request.body attribute.
|
||||
|
||||
You may disable the automatic consumption of the rfile by setting
|
||||
request.process_request_body to False, either in config for the desired
|
||||
path, or in an 'on_start_resource' or 'before_request_body' hook.
|
||||
|
||||
WARNING: In almost every case, you should not attempt to read from the
|
||||
rfile stream after CherryPy's automatic mechanism has read it. If you
|
||||
turn off the automatic parsing of rfile, you should read exactly the
|
||||
number of bytes specified in request.headers['Content-Length'].
|
||||
Ignoring either of these warnings may result in a hung request thread
|
||||
or in corruption of the next (pipelined) request.
|
||||
"""
|
||||
|
||||
process_request_body = True
|
||||
"""
|
||||
If True, the rfile (if any) is automatically read and parsed,
|
||||
and the result placed into request.params or request.body."""
|
||||
|
||||
methods_with_bodies = ('POST', 'PUT')
|
||||
"""
|
||||
A sequence of HTTP methods for which CherryPy will automatically
|
||||
attempt to read a body from the rfile. If you are going to change
|
||||
this property, modify it on the configuration (recommended)
|
||||
or on the "hook point" `on_start_resource`.
|
||||
"""
|
||||
|
||||
body = None
|
||||
"""
|
||||
If the request Content-Type is 'application/x-www-form-urlencoded'
|
||||
or multipart, this will be None. Otherwise, this will be an instance
|
||||
of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
|
||||
can .read()); this value is set between the 'before_request_body' and
|
||||
'before_handler' hooks (assuming that process_request_body is True)."""
|
||||
|
||||
# Dispatch attributes
|
||||
dispatch = cherrypy.dispatch.Dispatcher()
|
||||
"""
|
||||
The object which looks up the 'page handler' callable and collects
|
||||
config for the current request based on the path_info, other
|
||||
request attributes, and the application architecture. The core
|
||||
calls the dispatcher as early as possible, passing it a 'path_info'
|
||||
argument.
|
||||
|
||||
The default dispatcher discovers the page handler by matching path_info
|
||||
to a hierarchical arrangement of objects, starting at request.app.root.
|
||||
See help(cherrypy.dispatch) for more information."""
|
||||
|
||||
script_name = ''
|
||||
"""
|
||||
The 'mount point' of the application which is handling this request.
|
||||
|
||||
This attribute MUST NOT end in a slash. If the script_name refers to
|
||||
the root of the URI, it MUST be an empty string (not "/").
|
||||
"""
|
||||
|
||||
path_info = '/'
|
||||
"""
|
||||
The 'relative path' portion of the Request-URI. This is relative
|
||||
to the script_name ('mount point') of the application which is
|
||||
handling this request."""
|
||||
|
||||
login = None
|
||||
"""
|
||||
When authentication is used during the request processing this is
|
||||
set to 'False' if it failed and to the 'username' value if it succeeded.
|
||||
The default 'None' implies that no authentication happened."""
|
||||
|
||||
# Note that cherrypy.url uses "if request.app:" to determine whether
|
||||
# the call is during a real HTTP request or not. So leave this None.
|
||||
app = None
|
||||
"""The cherrypy.Application object which is handling this request."""
|
||||
|
||||
handler = None
|
||||
"""
|
||||
The function, method, or other callable which CherryPy will call to
|
||||
produce the response. The discovery of the handler and the arguments
|
||||
it will receive are determined by the request.dispatch object.
|
||||
By default, the handler is discovered by walking a tree of objects
|
||||
starting at request.app.root, and is then passed all HTTP params
|
||||
(from the query string and POST body) as keyword arguments."""
|
||||
|
||||
toolmaps = {}
|
||||
"""
|
||||
A nested dict of all Toolboxes and Tools in effect for this request,
|
||||
of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
|
||||
|
||||
config = None
|
||||
"""
|
||||
A flat dict of all configuration entries which apply to the
|
||||
current request. These entries are collected from global config,
|
||||
application config (based on request.path_info), and from handler
|
||||
config (exactly how is governed by the request.dispatch object in
|
||||
effect for this request; by default, handler config can be attached
|
||||
anywhere in the tree between request.app.root and the final handler,
|
||||
and inherits downward)."""
|
||||
|
||||
is_index = None
|
||||
"""
|
||||
This will be True if the current request is mapped to an 'index'
|
||||
resource handler (also, a 'default' handler if path_info ends with
|
||||
a slash). The value may be used to automatically redirect the
|
||||
user-agent to a 'more canonical' URL which either adds or removes
|
||||
the trailing slash. See cherrypy.tools.trailing_slash."""
|
||||
|
||||
hooks = HookMap(hookpoints)
|
||||
"""
|
||||
A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
|
||||
Each key is a str naming the hook point, and each value is a list
|
||||
of hooks which will be called at that hook point during this request.
|
||||
The list of hooks is generally populated as early as possible (mostly
|
||||
from Tools specified in config), but may be extended at any time.
|
||||
See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
|
||||
|
||||
error_response = cherrypy.HTTPError(500).set_response
|
||||
"""
|
||||
The no-arg callable which will handle unexpected, untrapped errors
|
||||
during request processing. This is not used for expected exceptions
|
||||
(like NotFound, HTTPError, or HTTPRedirect) which are raised in
|
||||
response to expected conditions (those should be customized either
|
||||
via request.error_page or by overriding HTTPError.set_response).
|
||||
By default, error_response uses HTTPError(500) to return a generic
|
||||
error response to the user-agent."""
|
||||
|
||||
error_page = {}
|
||||
"""
|
||||
A dict of {error code: response filename or callable} pairs.
|
||||
|
||||
The error code must be an int representing a given HTTP error code,
|
||||
or the string 'default', which will be used if no matching entry
|
||||
is found for a given numeric code.
|
||||
|
||||
If a filename is provided, the file should contain a Python string-
|
||||
formatting template, and can expect by default to receive format
|
||||
values with the mapping keys %(status)s, %(message)s, %(traceback)s,
|
||||
and %(version)s. The set of format mappings can be extended by
|
||||
overriding HTTPError.set_response.
|
||||
|
||||
If a callable is provided, it will be called by default with keyword
|
||||
arguments 'status', 'message', 'traceback', and 'version', as for a
|
||||
string-formatting template. The callable must return a string or
|
||||
iterable of strings which will be set to response.body. It may also
|
||||
override headers or perform any other processing.
|
||||
|
||||
If no entry is given for an error code, and no 'default' entry exists,
|
||||
a default template will be used.
|
||||
"""
|
||||
|
||||
show_tracebacks = True
|
||||
"""
|
||||
If True, unexpected errors encountered during request processing will
|
||||
include a traceback in the response body."""
|
||||
|
||||
show_mismatched_params = True
|
||||
"""
|
||||
If True, mismatched parameters encountered during PageHandler invocation
|
||||
processing will be included in the response body."""
|
||||
|
||||
throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect)
|
||||
"""The sequence of exceptions which Request.run does not trap."""
|
||||
|
||||
throw_errors = False
|
||||
"""
|
||||
If True, Request.run will not trap any errors (except HTTPRedirect and
|
||||
HTTPError, which are more properly called 'exceptions', not errors)."""
|
||||
|
||||
closed = False
|
||||
"""True once the close method has been called, False otherwise."""
|
||||
|
||||
stage = None
|
||||
"""
|
||||
A string containing the stage reached in the request-handling process.
|
||||
This is useful when debugging a live server with hung requests."""
|
||||
|
||||
namespaces = _cpconfig.NamespaceSet(
|
||||
**{'hooks': hooks_namespace,
|
||||
'request': request_namespace,
|
||||
'response': response_namespace,
|
||||
'error_page': error_page_namespace,
|
||||
'tools': cherrypy.tools,
|
||||
})
|
||||
|
||||
def __init__(self, local_host, remote_host, scheme='http',
|
||||
server_protocol='HTTP/1.1'):
|
||||
"""Populate a new Request object.
|
||||
|
||||
local_host should be an httputil.Host object with the server info.
|
||||
remote_host should be an httputil.Host object with the client info.
|
||||
scheme should be a string, either "http" or "https".
|
||||
"""
|
||||
self.local = local_host
|
||||
self.remote = remote_host
|
||||
self.scheme = scheme
|
||||
self.server_protocol = server_protocol
|
||||
|
||||
self.closed = False
|
||||
|
||||
# Put a *copy* of the class error_page into self.
|
||||
self.error_page = self.error_page.copy()
|
||||
|
||||
# Put a *copy* of the class namespaces into self.
|
||||
self.namespaces = self.namespaces.copy()
|
||||
|
||||
self.stage = None
|
||||
|
||||
def close(self):
|
||||
"""Run cleanup code. (Core)"""
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self.stage = 'on_end_request'
|
||||
self.hooks.run('on_end_request')
|
||||
self.stage = 'close'
|
||||
|
||||
def run(self, method, path, query_string, req_protocol, headers, rfile):
|
||||
r"""Process the Request. (Core)
|
||||
|
||||
method, path, query_string, and req_protocol should be pulled directly
|
||||
from the Request-Line (e.g. "GET /path?key=val HTTP/1.0").
|
||||
|
||||
path
|
||||
This should be %XX-unquoted, but query_string should not be.
|
||||
|
||||
When using Python 2, they both MUST be byte strings,
|
||||
not unicode strings.
|
||||
|
||||
When using Python 3, they both MUST be unicode strings,
|
||||
not byte strings, and preferably not bytes \x00-\xFF
|
||||
disguised as unicode.
|
||||
|
||||
headers
|
||||
A list of (name, value) tuples.
|
||||
|
||||
rfile
|
||||
A file-like object containing the HTTP request entity.
|
||||
|
||||
When run() is done, the returned object should have 3 attributes:
|
||||
|
||||
* status, e.g. "200 OK"
|
||||
* header_list, a list of (name, value) tuples
|
||||
* body, an iterable yielding strings
|
||||
|
||||
Consumer code (HTTP servers) should then access these response
|
||||
attributes to build the outbound stream.
|
||||
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
self.stage = 'run'
|
||||
try:
|
||||
self.error_response = cherrypy.HTTPError(500).set_response
|
||||
|
||||
self.method = method
|
||||
path = path or '/'
|
||||
self.query_string = query_string or ''
|
||||
self.params = {}
|
||||
|
||||
# Compare request and server HTTP protocol versions, in case our
|
||||
# server does not support the requested protocol. Limit our output
|
||||
# to min(req, server). We want the following output:
|
||||
# request server actual written supported response
|
||||
# protocol protocol response protocol feature set
|
||||
# a 1.0 1.0 1.0 1.0
|
||||
# b 1.0 1.1 1.1 1.0
|
||||
# c 1.1 1.0 1.0 1.0
|
||||
# d 1.1 1.1 1.1 1.1
|
||||
# Notice that, in (b), the response will be "HTTP/1.1" even though
|
||||
# the client only understands 1.0. RFC 2616 10.5.6 says we should
|
||||
# only return 505 if the _major_ version is different.
|
||||
rp = int(req_protocol[5]), int(req_protocol[7])
|
||||
sp = int(self.server_protocol[5]), int(self.server_protocol[7])
|
||||
self.protocol = min(rp, sp)
|
||||
response.headers.protocol = self.protocol
|
||||
|
||||
# Rebuild first line of the request (e.g. "GET /path HTTP/1.0").
|
||||
url = path
|
||||
if query_string:
|
||||
url += '?' + query_string
|
||||
self.request_line = '%s %s %s' % (method, url, req_protocol)
|
||||
|
||||
self.header_list = list(headers)
|
||||
self.headers = httputil.HeaderMap()
|
||||
|
||||
self.rfile = rfile
|
||||
self.body = None
|
||||
|
||||
self.cookie = SimpleCookie()
|
||||
self.handler = None
|
||||
|
||||
# path_info should be the path from the
|
||||
# app root (script_name) to the handler.
|
||||
self.script_name = self.app.script_name
|
||||
self.path_info = pi = path[len(self.script_name):]
|
||||
|
||||
self.stage = 'respond'
|
||||
self.respond(pi)
|
||||
|
||||
except self.throws:
|
||||
raise
|
||||
except:
|
||||
if self.throw_errors:
|
||||
raise
|
||||
else:
|
||||
# Failure in setup, error handler or finalize. Bypass them.
|
||||
# Can't use handle_error because we may not have hooks yet.
|
||||
cherrypy.log(traceback=True, severity=40)
|
||||
if self.show_tracebacks:
|
||||
body = format_exc()
|
||||
else:
|
||||
body = ''
|
||||
r = bare_error(body)
|
||||
response.output_status, response.header_list, response.body = r
|
||||
|
||||
if self.method == 'HEAD':
|
||||
# HEAD requests MUST NOT return a message-body in the response.
|
||||
response.body = []
|
||||
|
||||
try:
|
||||
cherrypy.log.access()
|
||||
except:
|
||||
cherrypy.log.error(traceback=True)
|
||||
|
||||
if response.timed_out:
|
||||
raise cherrypy.TimeoutError()
|
||||
|
||||
return response
|
||||
|
||||
# Uncomment for stage debugging
|
||||
# stage = property(lambda self: self._stage, lambda self, v: print(v))
|
||||
|
||||
def respond(self, path_info):
|
||||
"""Generate a response for the resource at self.path_info. (Core)"""
|
||||
response = cherrypy.serving.response
|
||||
try:
|
||||
try:
|
||||
try:
|
||||
if self.app is None:
|
||||
raise cherrypy.NotFound()
|
||||
|
||||
# Get the 'Host' header, so we can HTTPRedirect properly.
|
||||
self.stage = 'process_headers'
|
||||
self.process_headers()
|
||||
|
||||
# Make a copy of the class hooks
|
||||
self.hooks = self.__class__.hooks.copy()
|
||||
self.toolmaps = {}
|
||||
|
||||
self.stage = 'get_resource'
|
||||
self.get_resource(path_info)
|
||||
|
||||
self.body = _cpreqbody.RequestBody(
|
||||
self.rfile, self.headers, request_params=self.params)
|
||||
|
||||
self.namespaces(self.config)
|
||||
|
||||
self.stage = 'on_start_resource'
|
||||
self.hooks.run('on_start_resource')
|
||||
|
||||
# Parse the querystring
|
||||
self.stage = 'process_query_string'
|
||||
self.process_query_string()
|
||||
|
||||
# Process the body
|
||||
if self.process_request_body:
|
||||
if self.method not in self.methods_with_bodies:
|
||||
self.process_request_body = False
|
||||
self.stage = 'before_request_body'
|
||||
self.hooks.run('before_request_body')
|
||||
if self.process_request_body:
|
||||
self.body.process()
|
||||
|
||||
# Run the handler
|
||||
self.stage = 'before_handler'
|
||||
self.hooks.run('before_handler')
|
||||
if self.handler:
|
||||
self.stage = 'handler'
|
||||
response.body = self.handler()
|
||||
|
||||
# Finalize
|
||||
self.stage = 'before_finalize'
|
||||
self.hooks.run('before_finalize')
|
||||
response.finalize()
|
||||
except (cherrypy.HTTPRedirect, cherrypy.HTTPError):
|
||||
inst = sys.exc_info()[1]
|
||||
inst.set_response()
|
||||
self.stage = 'before_finalize (HTTPError)'
|
||||
self.hooks.run('before_finalize')
|
||||
response.finalize()
|
||||
finally:
|
||||
self.stage = 'on_end_resource'
|
||||
self.hooks.run('on_end_resource')
|
||||
except self.throws:
|
||||
raise
|
||||
except:
|
||||
if self.throw_errors:
|
||||
raise
|
||||
self.handle_error()
|
||||
|
||||
def process_query_string(self):
|
||||
"""Parse the query string into Python structures. (Core)"""
|
||||
try:
|
||||
p = httputil.parse_query_string(
|
||||
self.query_string, encoding=self.query_string_encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise cherrypy.HTTPError(
|
||||
404, 'The given query string could not be processed. Query '
|
||||
'strings for this resource must be encoded with %r.' %
|
||||
self.query_string_encoding)
|
||||
|
||||
# Python 2 only: keyword arguments must be byte strings (type 'str').
|
||||
if six.PY2:
|
||||
for key, value in p.items():
|
||||
if isinstance(key, six.text_type):
|
||||
del p[key]
|
||||
p[key.encode(self.query_string_encoding)] = value
|
||||
self.params.update(p)
|
||||
|
||||
def process_headers(self):
|
||||
"""Parse HTTP header data into Python structures. (Core)"""
|
||||
# Process the headers into self.headers
|
||||
headers = self.headers
|
||||
for name, value in self.header_list:
|
||||
# Call title() now (and use dict.__method__(headers))
|
||||
# so title doesn't have to be called twice.
|
||||
name = name.title()
|
||||
value = value.strip()
|
||||
|
||||
# Warning: if there is more than one header entry for cookies
|
||||
# (AFAIK, only Konqueror does that), only the last one will
|
||||
# remain in headers (but they will be correctly stored in
|
||||
# request.cookie).
|
||||
if '=?' in value:
|
||||
dict.__setitem__(headers, name, httputil.decode_TEXT(value))
|
||||
else:
|
||||
dict.__setitem__(headers, name, value)
|
||||
|
||||
# Handle cookies differently because on Konqueror, multiple
|
||||
# cookies come on different lines with the same key
|
||||
if name == 'Cookie':
|
||||
try:
|
||||
self.cookie.load(value)
|
||||
except CookieError:
|
||||
msg = 'Illegal cookie name %s' % value.split('=')[0]
|
||||
raise cherrypy.HTTPError(400, msg)
|
||||
|
||||
if not dict.__contains__(headers, 'Host'):
|
||||
# All Internet-based HTTP/1.1 servers MUST respond with a 400
|
||||
# (Bad Request) status code to any HTTP/1.1 request message
|
||||
# which lacks a Host header field.
|
||||
if self.protocol >= (1, 1):
|
||||
msg = "HTTP/1.1 requires a 'Host' request header."
|
||||
raise cherrypy.HTTPError(400, msg)
|
||||
host = dict.get(headers, 'Host')
|
||||
if not host:
|
||||
host = self.local.name or self.local.ip
|
||||
self.base = '%s://%s' % (self.scheme, host)
|
||||
|
||||
def get_resource(self, path):
|
||||
"""Call a dispatcher (which sets self.handler and .config). (Core)"""
|
||||
# First, see if there is a custom dispatch at this URI. Custom
|
||||
# dispatchers can only be specified in app.config, not in _cp_config
|
||||
# (since custom dispatchers may not even have an app.root).
|
||||
dispatch = self.app.find_config(
|
||||
path, 'request.dispatch', self.dispatch)
|
||||
|
||||
# dispatch() should set self.handler and self.config
|
||||
dispatch(path)
|
||||
|
||||
def handle_error(self):
|
||||
"""Handle the last unanticipated exception. (Core)"""
|
||||
try:
|
||||
self.hooks.run('before_error_response')
|
||||
if self.error_response:
|
||||
self.error_response()
|
||||
self.hooks.run('after_error_response')
|
||||
cherrypy.serving.response.finalize()
|
||||
except cherrypy.HTTPRedirect:
|
||||
inst = sys.exc_info()[1]
|
||||
inst.set_response()
|
||||
cherrypy.serving.response.finalize()
|
||||
|
||||
# ------------------------- Properties ------------------------- #
|
||||
|
||||
def _get_body_params(self):
|
||||
warnings.warn(
|
||||
'body_params is deprecated in CherryPy 3.2, will be removed in '
|
||||
'CherryPy 3.3.',
|
||||
DeprecationWarning
|
||||
)
|
||||
return self.body.params
|
||||
body_params = property(_get_body_params,
|
||||
doc="""
|
||||
If the request Content-Type is 'application/x-www-form-urlencoded' or
|
||||
multipart, this will be a dict of the params pulled from the entity
|
||||
body; that is, it will be the portion of request.params that come
|
||||
from the message body (sometimes called "POST params", although they
|
||||
can be sent with various HTTP method verbs). This value is set between
|
||||
the 'before_request_body' and 'before_handler' hooks (assuming that
|
||||
process_request_body is True).
|
||||
|
||||
Deprecated in 3.2, will be removed for 3.3 in favor of
|
||||
:attr:`request.body.params<cherrypy._cprequest.RequestBody.params>`.""")
|
||||
|
||||
|
||||
class ResponseBody(object):
|
||||
|
||||
"""The body of the HTTP response (the response entity)."""
|
||||
|
||||
if six.PY3:
|
||||
unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
|
||||
'if you wish to return unicode.')
|
||||
|
||||
def __get__(self, obj, objclass=None):
|
||||
if obj is None:
|
||||
# When calling on the class instead of an instance...
|
||||
return self
|
||||
else:
|
||||
return obj._body
|
||||
|
||||
def __set__(self, obj, value):
|
||||
# Convert the given value to an iterable object.
|
||||
if six.PY3 and isinstance(value, str):
|
||||
raise ValueError(self.unicode_err)
|
||||
|
||||
if isinstance(value, text_or_bytes):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
# in a long string.
|
||||
if value:
|
||||
value = [value]
|
||||
else:
|
||||
# [''] doesn't evaluate to False, so replace it with [].
|
||||
value = []
|
||||
elif six.PY3 and isinstance(value, list):
|
||||
# every item in a list must be bytes...
|
||||
for i, item in enumerate(value):
|
||||
if isinstance(item, str):
|
||||
raise ValueError(self.unicode_err)
|
||||
# Don't use isinstance here; io.IOBase which has an ABC takes
|
||||
# 1000 times as long as, say, isinstance(value, str)
|
||||
elif hasattr(value, 'read'):
|
||||
value = file_generator(value)
|
||||
elif value is None:
|
||||
value = []
|
||||
obj._body = value
|
||||
|
||||
|
||||
class Response(object):
|
||||
|
||||
"""An HTTP Response, including status, headers, and body."""
|
||||
|
||||
status = ''
|
||||
"""The HTTP Status-Code and Reason-Phrase."""
|
||||
|
||||
header_list = []
|
||||
"""
|
||||
A list of the HTTP response headers as (name, value) tuples.
|
||||
In general, you should use response.headers (a dict) instead. This
|
||||
attribute is generated from response.headers and is not valid until
|
||||
after the finalize phase."""
|
||||
|
||||
headers = httputil.HeaderMap()
|
||||
"""
|
||||
A dict-like object containing the response headers. Keys are header
|
||||
names (in Title-Case format); however, you may get and set them in
|
||||
a case-insensitive manner. That is, headers['Content-Type'] and
|
||||
headers['content-type'] refer to the same value. Values are header
|
||||
values (decoded according to :rfc:`2047` if necessary).
|
||||
|
||||
.. seealso:: classes :class:`HeaderMap`, :class:`HeaderElement`
|
||||
"""
|
||||
|
||||
cookie = SimpleCookie()
|
||||
"""See help(Cookie)."""
|
||||
|
||||
body = ResponseBody()
|
||||
"""The body (entity) of the HTTP response."""
|
||||
|
||||
time = None
|
||||
"""The value of time.time() when created. Use in HTTP dates."""
|
||||
|
||||
timeout = 300
|
||||
"""Seconds after which the response will be aborted."""
|
||||
|
||||
timed_out = False
|
||||
"""
|
||||
Flag to indicate the response should be aborted, because it has
|
||||
exceeded its timeout."""
|
||||
|
||||
stream = False
|
||||
"""If False, buffer the response body."""
|
||||
|
||||
def __init__(self):
|
||||
self.status = None
|
||||
self.header_list = None
|
||||
self._body = []
|
||||
self.time = time.time()
|
||||
|
||||
self.headers = httputil.HeaderMap()
|
||||
# Since we know all our keys are titled strings, we can
|
||||
# bypass HeaderMap.update and get a big speed boost.
|
||||
dict.update(self.headers, {
|
||||
'Content-Type': 'text/html',
|
||||
'Server': 'CherryPy/' + cherrypy.__version__,
|
||||
'Date': httputil.HTTPDate(self.time),
|
||||
})
|
||||
self.cookie = SimpleCookie()
|
||||
|
||||
def collapse_body(self):
|
||||
"""Collapse self.body to a single string; replace it and return it."""
|
||||
if isinstance(self.body, text_or_bytes):
|
||||
return self.body
|
||||
|
||||
newbody = []
|
||||
for chunk in self.body:
|
||||
if six.PY3 and not isinstance(chunk, bytes):
|
||||
raise TypeError("Chunk %s is not of type 'bytes'." %
|
||||
repr(chunk))
|
||||
newbody.append(chunk)
|
||||
newbody = ntob('').join(newbody)
|
||||
|
||||
self.body = newbody
|
||||
return newbody
|
||||
|
||||
def finalize(self):
|
||||
"""Transform headers (and cookies) into self.header_list. (Core)"""
|
||||
try:
|
||||
code, reason, _ = httputil.valid_status(self.status)
|
||||
except ValueError:
|
||||
raise cherrypy.HTTPError(500, sys.exc_info()[1].args[0])
|
||||
|
||||
headers = self.headers
|
||||
|
||||
self.status = '%s %s' % (code, reason)
|
||||
self.output_status = ntob(str(code), 'ascii') + \
|
||||
ntob(' ') + headers.encode(reason)
|
||||
|
||||
if self.stream:
|
||||
# The upshot: wsgiserver will chunk the response if
|
||||
# you pop Content-Length (or set it explicitly to None).
|
||||
# Note that lib.static sets C-L to the file's st_size.
|
||||
if dict.get(headers, 'Content-Length') is None:
|
||||
dict.pop(headers, 'Content-Length', None)
|
||||
elif code < 200 or code in (204, 205, 304):
|
||||
# "All 1xx (informational), 204 (no content),
|
||||
# and 304 (not modified) responses MUST NOT
|
||||
# include a message-body."
|
||||
dict.pop(headers, 'Content-Length', None)
|
||||
self.body = ntob('')
|
||||
else:
|
||||
# Responses which are not streamed should have a Content-Length,
|
||||
# but allow user code to set Content-Length if desired.
|
||||
if dict.get(headers, 'Content-Length') is None:
|
||||
content = self.collapse_body()
|
||||
dict.__setitem__(headers, 'Content-Length', len(content))
|
||||
|
||||
# Transform our header dict into a list of tuples.
|
||||
self.header_list = h = headers.output()
|
||||
|
||||
cookie = self.cookie.output()
|
||||
if cookie:
|
||||
for line in cookie.split('\r\n'):
|
||||
name, value = line.split(': ', 1)
|
||||
if isinstance(name, six.text_type):
|
||||
name = name.encode('ISO-8859-1')
|
||||
if isinstance(value, six.text_type):
|
||||
value = headers.encode(value)
|
||||
h.append((name, value))
|
||||
|
||||
def check_timeout(self):
|
||||
"""If now > self.time + self.timeout, set self.timed_out.
|
||||
|
||||
This purposefully sets a flag, rather than raising an error,
|
||||
so that a monitor thread can interrupt the Response thread.
|
||||
"""
|
||||
if time.time() > self.time + self.timeout:
|
||||
self.timed_out = True
|
||||
@@ -1,226 +0,0 @@
|
||||
"""Manage HTTP servers with CherryPy."""
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy.lib.reprconf import attributes
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
|
||||
# We import * because we want to export check_port
|
||||
# et al as attributes of this module.
|
||||
from cherrypy.process.servers import *
|
||||
|
||||
|
||||
class Server(ServerAdapter):
|
||||
|
||||
"""An adapter for an HTTP server.
|
||||
|
||||
You can set attributes (like socket_host and socket_port)
|
||||
on *this* object (which is probably cherrypy.server), and call
|
||||
quickstart. For example::
|
||||
|
||||
cherrypy.server.socket_port = 80
|
||||
cherrypy.quickstart()
|
||||
"""
|
||||
|
||||
socket_port = 8080
|
||||
"""The TCP port on which to listen for connections."""
|
||||
|
||||
_socket_host = '127.0.0.1'
|
||||
|
||||
def _get_socket_host(self):
|
||||
return self._socket_host
|
||||
|
||||
def _set_socket_host(self, value):
|
||||
if value == '':
|
||||
raise ValueError("The empty string ('') is not an allowed value. "
|
||||
"Use '0.0.0.0' instead to listen on all active "
|
||||
'interfaces (INADDR_ANY).')
|
||||
self._socket_host = value
|
||||
socket_host = property(
|
||||
_get_socket_host,
|
||||
_set_socket_host,
|
||||
doc="""The hostname or IP address on which to listen for connections.
|
||||
|
||||
Host values may be any IPv4 or IPv6 address, or any valid hostname.
|
||||
The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
|
||||
your hosts file prefers IPv6). The string '0.0.0.0' is a special
|
||||
IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
|
||||
is the similar IN6ADDR_ANY for IPv6. The empty string or None are
|
||||
not allowed.""")
|
||||
|
||||
socket_file = None
|
||||
"""If given, the name of the UNIX socket to use instead of TCP/IP.
|
||||
|
||||
When this option is not None, the `socket_host` and `socket_port` options
|
||||
are ignored."""
|
||||
|
||||
socket_queue_size = 5
|
||||
"""The 'backlog' argument to socket.listen(); specifies the maximum number
|
||||
of queued connections (default 5)."""
|
||||
|
||||
socket_timeout = 10
|
||||
"""The timeout in seconds for accepted connections (default 10)."""
|
||||
|
||||
accepted_queue_size = -1
|
||||
"""The maximum number of requests which will be queued up before
|
||||
the server refuses to accept it (default -1, meaning no limit)."""
|
||||
|
||||
accepted_queue_timeout = 10
|
||||
"""The timeout in seconds for attempting to add a request to the
|
||||
queue when the queue is full (default 10)."""
|
||||
|
||||
shutdown_timeout = 5
|
||||
"""The time to wait for HTTP worker threads to clean up."""
|
||||
|
||||
protocol_version = 'HTTP/1.1'
|
||||
"""The version string to write in the Status-Line of all HTTP responses,
|
||||
for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
|
||||
this should also limit the supported features used in the response."""
|
||||
|
||||
thread_pool = 10
|
||||
"""The number of worker threads to start up in the pool."""
|
||||
|
||||
thread_pool_max = -1
|
||||
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
|
||||
"""
|
||||
|
||||
max_request_header_size = 500 * 1024
|
||||
"""The maximum number of bytes allowable in the request headers.
|
||||
If exceeded, the HTTP server should return "413 Request Entity Too Large".
|
||||
"""
|
||||
|
||||
max_request_body_size = 100 * 1024 * 1024
|
||||
"""The maximum number of bytes allowable in the request body. If exceeded,
|
||||
the HTTP server should return "413 Request Entity Too Large"."""
|
||||
|
||||
instance = None
|
||||
"""If not None, this should be an HTTP server instance (such as
|
||||
CPWSGIServer) which cherrypy.server will control. Use this when you need
|
||||
more control over object instantiation than is available in the various
|
||||
configuration options."""
|
||||
|
||||
ssl_context = None
|
||||
"""When using PyOpenSSL, an instance of SSL.Context."""
|
||||
|
||||
ssl_certificate = None
|
||||
"""The filename of the SSL certificate to use."""
|
||||
|
||||
ssl_certificate_chain = None
|
||||
"""When using PyOpenSSL, the certificate chain to pass to
|
||||
Context.load_verify_locations."""
|
||||
|
||||
ssl_private_key = None
|
||||
"""The filename of the private key to use with SSL."""
|
||||
|
||||
if six.PY3:
|
||||
ssl_module = 'builtin'
|
||||
"""The name of a registered SSL adaptation module to use with
|
||||
the builtin WSGI server. Builtin options are: 'builtin' (to
|
||||
use the SSL library built into recent versions of Python).
|
||||
You may also register your own classes in the
|
||||
wsgiserver.ssl_adapters dict."""
|
||||
else:
|
||||
ssl_module = 'pyopenssl'
|
||||
"""The name of a registered SSL adaptation module to use with the
|
||||
builtin WSGI server. Builtin options are 'builtin' (to use the SSL
|
||||
library built into recent versions of Python) and 'pyopenssl' (to
|
||||
use the PyOpenSSL project, which you must install separately). You
|
||||
may also register your own classes in the wsgiserver.ssl_adapters
|
||||
dict."""
|
||||
|
||||
statistics = False
|
||||
"""Turns statistics-gathering on or off for aware HTTP servers."""
|
||||
|
||||
nodelay = True
|
||||
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
|
||||
|
||||
wsgi_version = (1, 0)
|
||||
"""The WSGI version tuple to use with the builtin WSGI server.
|
||||
The provided options are (1, 0) [which includes support for PEP 3333,
|
||||
which declares it covers WSGI version 1.0.1 but still mandates the
|
||||
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
|
||||
You may create and register your own experimental versions of the WSGI
|
||||
protocol by adding custom classes to the wsgiserver.wsgi_gateways dict."""
|
||||
|
||||
def __init__(self):
|
||||
self.bus = cherrypy.engine
|
||||
self.httpserver = None
|
||||
self.interrupt = None
|
||||
self.running = False
|
||||
|
||||
def httpserver_from_self(self, httpserver=None):
|
||||
"""Return a (httpserver, bind_addr) pair based on self attributes."""
|
||||
if httpserver is None:
|
||||
httpserver = self.instance
|
||||
if httpserver is None:
|
||||
from cherrypy import _cpwsgi_server
|
||||
httpserver = _cpwsgi_server.CPWSGIServer(self)
|
||||
if isinstance(httpserver, text_or_bytes):
|
||||
# Is anyone using this? Can I add an arg?
|
||||
httpserver = attributes(httpserver)(self)
|
||||
return httpserver, self.bind_addr
|
||||
|
||||
def start(self):
|
||||
"""Start the HTTP server."""
|
||||
if not self.httpserver:
|
||||
self.httpserver, self.bind_addr = self.httpserver_from_self()
|
||||
ServerAdapter.start(self)
|
||||
start.priority = 75
|
||||
|
||||
def _get_bind_addr(self):
|
||||
if self.socket_file:
|
||||
return self.socket_file
|
||||
if self.socket_host is None and self.socket_port is None:
|
||||
return None
|
||||
return (self.socket_host, self.socket_port)
|
||||
|
||||
def _set_bind_addr(self, value):
|
||||
if value is None:
|
||||
self.socket_file = None
|
||||
self.socket_host = None
|
||||
self.socket_port = None
|
||||
elif isinstance(value, text_or_bytes):
|
||||
self.socket_file = value
|
||||
self.socket_host = None
|
||||
self.socket_port = None
|
||||
else:
|
||||
try:
|
||||
self.socket_host, self.socket_port = value
|
||||
self.socket_file = None
|
||||
except ValueError:
|
||||
raise ValueError('bind_addr must be a (host, port) tuple '
|
||||
'(for TCP sockets) or a string (for Unix '
|
||||
'domain sockets), not %r' % value)
|
||||
bind_addr = property(
|
||||
_get_bind_addr,
|
||||
_set_bind_addr,
|
||||
doc='A (host, port) tuple for TCP sockets or '
|
||||
'a str for Unix domain sockets.')
|
||||
|
||||
def base(self):
|
||||
"""Return the base (scheme://host[:port] or sock file) for this server.
|
||||
"""
|
||||
if self.socket_file:
|
||||
return self.socket_file
|
||||
|
||||
host = self.socket_host
|
||||
if host in ('0.0.0.0', '::'):
|
||||
# 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
|
||||
# Look up the host name, which should be the
|
||||
# safest thing to spit out in a URL.
|
||||
import socket
|
||||
host = socket.gethostname()
|
||||
|
||||
port = self.socket_port
|
||||
|
||||
if self.ssl_certificate:
|
||||
scheme = 'https'
|
||||
if port != 443:
|
||||
host += ':%s' % port
|
||||
else:
|
||||
scheme = 'http'
|
||||
if port != 80:
|
||||
host += ':%s' % port
|
||||
|
||||
return '%s://%s' % (scheme, host)
|
||||
@@ -1,538 +0,0 @@
|
||||
"""CherryPy tools. A "tool" is any helper, adapted to CP.
|
||||
|
||||
Tools are usually designed to be used in a variety of ways (although some
|
||||
may only offer one if they choose):
|
||||
|
||||
Library calls
|
||||
All tools are callables that can be used wherever needed.
|
||||
The arguments are straightforward and should be detailed within the
|
||||
docstring.
|
||||
|
||||
Function decorators
|
||||
All tools, when called, may be used as decorators which configure
|
||||
individual CherryPy page handlers (methods on the CherryPy tree).
|
||||
That is, "@tools.anytool()" should "turn on" the tool via the
|
||||
decorated function's _cp_config attribute.
|
||||
|
||||
CherryPy config
|
||||
If a tool exposes a "_setup" callable, it will be called
|
||||
once per Request (if the feature is "turned on" via config).
|
||||
|
||||
Tools may be implemented as any object with a namespace. The builtins
|
||||
are generally either modules or instances of the tools.Tool class.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._helper import expose
|
||||
|
||||
from cherrypy.lib import cptools, encoding, auth, static, jsontools
|
||||
from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc
|
||||
from cherrypy.lib import caching as _caching
|
||||
from cherrypy.lib import auth_basic, auth_digest
|
||||
|
||||
|
||||
def _getargs(func):
|
||||
"""Return the names of all static arguments to the given function."""
|
||||
# Use this instead of importing inspect for less mem overhead.
|
||||
import types
|
||||
if sys.version_info >= (3, 0):
|
||||
if isinstance(func, types.MethodType):
|
||||
func = func.__func__
|
||||
co = func.__code__
|
||||
else:
|
||||
if isinstance(func, types.MethodType):
|
||||
func = func.im_func
|
||||
co = func.func_code
|
||||
return co.co_varnames[:co.co_argcount]
|
||||
|
||||
|
||||
_attr_error = (
|
||||
'CherryPy Tools cannot be turned on directly. Instead, turn them '
|
||||
'on via config, or use them as decorators on your page handlers.'
|
||||
)
|
||||
|
||||
|
||||
class Tool(object):
|
||||
|
||||
"""A registered function for use with CherryPy request-processing hooks.
|
||||
|
||||
help(tool.callable) should give you more information about this Tool.
|
||||
"""
|
||||
|
||||
namespace = 'tools'
|
||||
|
||||
def __init__(self, point, callable, name=None, priority=50):
|
||||
self._point = point
|
||||
self.callable = callable
|
||||
self._name = name
|
||||
self._priority = priority
|
||||
self.__doc__ = self.callable.__doc__
|
||||
self._setargs()
|
||||
|
||||
def _get_on(self):
|
||||
raise AttributeError(_attr_error)
|
||||
|
||||
def _set_on(self, value):
|
||||
raise AttributeError(_attr_error)
|
||||
on = property(_get_on, _set_on)
|
||||
|
||||
def _setargs(self):
|
||||
"""Copy func parameter names to obj attributes."""
|
||||
try:
|
||||
for arg in _getargs(self.callable):
|
||||
setattr(self, arg, None)
|
||||
except (TypeError, AttributeError):
|
||||
if hasattr(self.callable, '__call__'):
|
||||
for arg in _getargs(self.callable.__call__):
|
||||
setattr(self, arg, None)
|
||||
# IronPython 1.0 raises NotImplementedError because
|
||||
# inspect.getargspec tries to access Python bytecode
|
||||
# in co_code attribute.
|
||||
except NotImplementedError:
|
||||
pass
|
||||
# IronPython 1B1 may raise IndexError in some cases,
|
||||
# but if we trap it here it doesn't prevent CP from working.
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def _merged_args(self, d=None):
|
||||
"""Return a dict of configuration entries for this Tool."""
|
||||
if d:
|
||||
conf = d.copy()
|
||||
else:
|
||||
conf = {}
|
||||
|
||||
tm = cherrypy.serving.request.toolmaps[self.namespace]
|
||||
if self._name in tm:
|
||||
conf.update(tm[self._name])
|
||||
|
||||
if 'on' in conf:
|
||||
del conf['on']
|
||||
|
||||
return conf
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""Compile-time decorator (turn on the tool in config).
|
||||
|
||||
For example::
|
||||
|
||||
@expose
|
||||
@tools.proxy()
|
||||
def whats_my_base(self):
|
||||
return cherrypy.request.base
|
||||
"""
|
||||
if args:
|
||||
raise TypeError('The %r Tool does not accept positional '
|
||||
'arguments; you must use keyword arguments.'
|
||||
% self._name)
|
||||
|
||||
def tool_decorator(f):
|
||||
if not hasattr(f, '_cp_config'):
|
||||
f._cp_config = {}
|
||||
subspace = self.namespace + '.' + self._name + '.'
|
||||
f._cp_config[subspace + 'on'] = True
|
||||
for k, v in kwargs.items():
|
||||
f._cp_config[subspace + k] = v
|
||||
return f
|
||||
return tool_decorator
|
||||
|
||||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
if p is None:
|
||||
p = getattr(self.callable, 'priority', self._priority)
|
||||
cherrypy.serving.request.hooks.attach(self._point, self.callable,
|
||||
priority=p, **conf)
|
||||
|
||||
|
||||
class HandlerTool(Tool):
|
||||
|
||||
"""Tool which is called 'before main', that may skip normal handlers.
|
||||
|
||||
If the tool successfully handles the request (by setting response.body),
|
||||
if should return True. This will cause CherryPy to skip any 'normal' page
|
||||
handler. If the tool did not handle the request, it should return False
|
||||
to tell CherryPy to continue on and call the normal page handler. If the
|
||||
tool is declared AS a page handler (see the 'handler' method), returning
|
||||
False will raise NotFound.
|
||||
"""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
Tool.__init__(self, 'before_handler', callable, name)
|
||||
|
||||
def handler(self, *args, **kwargs):
|
||||
"""Use this tool as a CherryPy page handler.
|
||||
|
||||
For example::
|
||||
|
||||
class Root:
|
||||
nav = tools.staticdir.handler(section="/nav", dir="nav",
|
||||
root=absDir)
|
||||
"""
|
||||
@expose
|
||||
def handle_func(*a, **kw):
|
||||
handled = self.callable(*args, **self._merged_args(kwargs))
|
||||
if not handled:
|
||||
raise cherrypy.NotFound()
|
||||
return cherrypy.serving.response.body
|
||||
return handle_func
|
||||
|
||||
def _wrapper(self, **kwargs):
|
||||
if self.callable(**kwargs):
|
||||
cherrypy.serving.request.handler = None
|
||||
|
||||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
conf = self._merged_args()
|
||||
p = conf.pop('priority', None)
|
||||
if p is None:
|
||||
p = getattr(self.callable, 'priority', self._priority)
|
||||
cherrypy.serving.request.hooks.attach(self._point, self._wrapper,
|
||||
priority=p, **conf)
|
||||
|
||||
|
||||
class HandlerWrapperTool(Tool):
|
||||
|
||||
"""Tool which wraps request.handler in a provided wrapper function.
|
||||
|
||||
The 'newhandler' arg must be a handler wrapper function that takes a
|
||||
'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all
|
||||
page handler
|
||||
functions, it must return an iterable for use as cherrypy.response.body.
|
||||
|
||||
For example, to allow your 'inner' page handlers to return dicts
|
||||
which then get interpolated into a template::
|
||||
|
||||
def interpolator(next_handler, *args, **kwargs):
|
||||
filename = cherrypy.request.config.get('template')
|
||||
cherrypy.response.template = env.get_template(filename)
|
||||
response_dict = next_handler(*args, **kwargs)
|
||||
return cherrypy.response.template.render(**response_dict)
|
||||
cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
|
||||
"""
|
||||
|
||||
def __init__(self, newhandler, point='before_handler', name=None,
|
||||
priority=50):
|
||||
self.newhandler = newhandler
|
||||
self._point = point
|
||||
self._name = name
|
||||
self._priority = priority
|
||||
|
||||
def callable(self, *args, **kwargs):
|
||||
innerfunc = cherrypy.serving.request.handler
|
||||
|
||||
def wrap(*args, **kwargs):
|
||||
return self.newhandler(innerfunc, *args, **kwargs)
|
||||
cherrypy.serving.request.handler = wrap
|
||||
|
||||
|
||||
class ErrorTool(Tool):
|
||||
|
||||
"""Tool which is used to replace the default request.error_response."""
|
||||
|
||||
def __init__(self, callable, name=None):
|
||||
Tool.__init__(self, None, callable, name)
|
||||
|
||||
def _wrapper(self):
|
||||
self.callable(**self._merged_args())
|
||||
|
||||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
cherrypy.serving.request.error_response = self._wrapper
|
||||
|
||||
|
||||
# Builtin tools #
|
||||
|
||||
|
||||
class SessionTool(Tool):
|
||||
|
||||
"""Session Tool for CherryPy.
|
||||
|
||||
sessions.locking
|
||||
When 'implicit' (the default), the session will be locked for you,
|
||||
just before running the page handler.
|
||||
|
||||
When 'early', the session will be locked before reading the request
|
||||
body. This is off by default for safety reasons; for example,
|
||||
a large upload would block the session, denying an AJAX
|
||||
progress meter
|
||||
(`issue <https://github.com/cherrypy/cherrypy/issues/630>`_).
|
||||
|
||||
When 'explicit' (or any other value), you need to call
|
||||
cherrypy.session.acquire_lock() yourself before using
|
||||
session data.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# _sessions.init must be bound after headers are read
|
||||
Tool.__init__(self, 'before_request_body', _sessions.init)
|
||||
|
||||
def _lock_session(self):
|
||||
cherrypy.serving.session.acquire_lock()
|
||||
|
||||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
hooks = cherrypy.serving.request.hooks
|
||||
|
||||
conf = self._merged_args()
|
||||
|
||||
p = conf.pop('priority', None)
|
||||
if p is None:
|
||||
p = getattr(self.callable, 'priority', self._priority)
|
||||
|
||||
hooks.attach(self._point, self.callable, priority=p, **conf)
|
||||
|
||||
locking = conf.pop('locking', 'implicit')
|
||||
if locking == 'implicit':
|
||||
hooks.attach('before_handler', self._lock_session)
|
||||
elif locking == 'early':
|
||||
# Lock before the request body (but after _sessions.init runs!)
|
||||
hooks.attach('before_request_body', self._lock_session,
|
||||
priority=60)
|
||||
else:
|
||||
# Don't lock
|
||||
pass
|
||||
|
||||
hooks.attach('before_finalize', _sessions.save)
|
||||
hooks.attach('on_end_request', _sessions.close)
|
||||
|
||||
def regenerate(self):
|
||||
"""Drop the current session and make a new one (with a new id)."""
|
||||
sess = cherrypy.serving.session
|
||||
sess.regenerate()
|
||||
|
||||
# Grab cookie-relevant tool args
|
||||
conf = dict([(k, v) for k, v in self._merged_args().items()
|
||||
if k in ('path', 'path_header', 'name', 'timeout',
|
||||
'domain', 'secure')])
|
||||
_sessions.set_response_cookie(**conf)
|
||||
|
||||
|
||||
class XMLRPCController(object):
|
||||
|
||||
"""A Controller (page handler collection) for XML-RPC.
|
||||
|
||||
To use it, have your controllers subclass this base class (it will
|
||||
turn on the tool for you).
|
||||
|
||||
You can also supply the following optional config entries::
|
||||
|
||||
tools.xmlrpc.encoding: 'utf-8'
|
||||
tools.xmlrpc.allow_none: 0
|
||||
|
||||
XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
|
||||
appropriate handler must first be performed according to the URL, and
|
||||
then a second dispatch step must take place according to the RPC method
|
||||
specified in the request body. It also allows a superfluous "/RPC2"
|
||||
prefix in the URL, supplies its own handler args in the body, and
|
||||
requires a 200 OK "Fault" response instead of 404 when the desired
|
||||
method is not found.
|
||||
|
||||
Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
|
||||
This Controller acts as the dispatch target for the first half (based
|
||||
on the URL); it then reads the RPC method from the request body and
|
||||
does its own second dispatch step based on that method. It also reads
|
||||
body params, and returns a Fault on error.
|
||||
|
||||
The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
|
||||
in your URL's, you can safely skip turning on the XMLRPCDispatcher.
|
||||
Otherwise, you need to use declare it in config::
|
||||
|
||||
request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
|
||||
"""
|
||||
|
||||
# Note we're hard-coding this into the 'tools' namespace. We could do
|
||||
# a huge amount of work to make it relocatable, but the only reason why
|
||||
# would be if someone actually disabled the default_toolbox. Meh.
|
||||
_cp_config = {'tools.xmlrpc.on': True}
|
||||
|
||||
@expose
|
||||
def default(self, *vpath, **params):
|
||||
rpcparams, rpcmethod = _xmlrpc.process_body()
|
||||
|
||||
subhandler = self
|
||||
for attr in str(rpcmethod).split('.'):
|
||||
subhandler = getattr(subhandler, attr, None)
|
||||
|
||||
if subhandler and getattr(subhandler, 'exposed', False):
|
||||
body = subhandler(*(vpath + rpcparams), **params)
|
||||
|
||||
else:
|
||||
# https://github.com/cherrypy/cherrypy/issues/533
|
||||
# if a method is not found, an xmlrpclib.Fault should be returned
|
||||
# raising an exception here will do that; see
|
||||
# cherrypy.lib.xmlrpcutil.on_error
|
||||
raise Exception('method "%s" is not supported' % attr)
|
||||
|
||||
conf = cherrypy.serving.request.toolmaps['tools'].get('xmlrpc', {})
|
||||
_xmlrpc.respond(body,
|
||||
conf.get('encoding', 'utf-8'),
|
||||
conf.get('allow_none', 0))
|
||||
return cherrypy.serving.response.body
|
||||
|
||||
|
||||
class SessionAuthTool(HandlerTool):
|
||||
|
||||
def _setargs(self):
|
||||
for name in dir(cptools.SessionAuth):
|
||||
if not name.startswith('__'):
|
||||
setattr(self, name, None)
|
||||
|
||||
|
||||
class CachingTool(Tool):
|
||||
|
||||
"""Caching Tool for CherryPy."""
|
||||
|
||||
def _wrapper(self, **kwargs):
|
||||
request = cherrypy.serving.request
|
||||
if _caching.get(**kwargs):
|
||||
request.handler = None
|
||||
else:
|
||||
if request.cacheable:
|
||||
# Note the devious technique here of adding hooks on the fly
|
||||
request.hooks.attach('before_finalize', _caching.tee_output,
|
||||
priority=90)
|
||||
_wrapper.priority = 20
|
||||
|
||||
def _setup(self):
|
||||
"""Hook caching into cherrypy.request."""
|
||||
conf = self._merged_args()
|
||||
|
||||
p = conf.pop('priority', None)
|
||||
cherrypy.serving.request.hooks.attach('before_handler', self._wrapper,
|
||||
priority=p, **conf)
|
||||
|
||||
|
||||
class Toolbox(object):
|
||||
|
||||
"""A collection of Tools.
|
||||
|
||||
This object also functions as a config namespace handler for itself.
|
||||
Custom toolboxes should be added to each Application's toolboxes dict.
|
||||
"""
|
||||
|
||||
def __init__(self, namespace):
|
||||
self.namespace = namespace
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
# If the Tool._name is None, supply it from the attribute name.
|
||||
if isinstance(value, Tool):
|
||||
if value._name is None:
|
||||
value._name = name
|
||||
value.namespace = self.namespace
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __enter__(self):
|
||||
"""Populate request.toolmaps from tools specified in config."""
|
||||
cherrypy.serving.request.toolmaps[self.namespace] = map = {}
|
||||
|
||||
def populate(k, v):
|
||||
toolname, arg = k.split('.', 1)
|
||||
bucket = map.setdefault(toolname, {})
|
||||
bucket[arg] = v
|
||||
return populate
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Run tool._setup() for each tool in our toolmap."""
|
||||
map = cherrypy.serving.request.toolmaps.get(self.namespace)
|
||||
if map:
|
||||
for name, settings in map.items():
|
||||
if settings.get('on', False):
|
||||
tool = getattr(self, name)
|
||||
tool._setup()
|
||||
|
||||
def register(self, point, **kwargs):
|
||||
"""Return a decorator which registers the function at the given hook point."""
|
||||
def decorator(func):
|
||||
setattr(self, kwargs.get('name', func.__name__), Tool(point, func, **kwargs))
|
||||
return func
|
||||
return decorator
|
||||
|
||||
|
||||
class DeprecatedTool(Tool):
|
||||
|
||||
_name = None
|
||||
warnmsg = 'This Tool is deprecated.'
|
||||
|
||||
def __init__(self, point, warnmsg=None):
|
||||
self.point = point
|
||||
if warnmsg is not None:
|
||||
self.warnmsg = warnmsg
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
warnings.warn(self.warnmsg)
|
||||
|
||||
def tool_decorator(f):
|
||||
return f
|
||||
return tool_decorator
|
||||
|
||||
def _setup(self):
|
||||
warnings.warn(self.warnmsg)
|
||||
|
||||
|
||||
default_toolbox = _d = Toolbox('tools')
|
||||
_d.session_auth = SessionAuthTool(cptools.session_auth)
|
||||
_d.allow = Tool('on_start_resource', cptools.allow)
|
||||
_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
|
||||
_d.response_headers = Tool('on_start_resource', cptools.response_headers)
|
||||
_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
|
||||
_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
|
||||
_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
|
||||
_d.err_redirect = ErrorTool(cptools.redirect)
|
||||
_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
|
||||
_d.decode = Tool('before_request_body', encoding.decode)
|
||||
# the order of encoding, gzip, caching is important
|
||||
_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70)
|
||||
_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
|
||||
_d.staticdir = HandlerTool(static.staticdir)
|
||||
_d.staticfile = HandlerTool(static.staticfile)
|
||||
_d.sessions = SessionTool()
|
||||
_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
|
||||
_d.caching = CachingTool('before_handler', _caching.get, 'caching')
|
||||
_d.expires = Tool('before_finalize', _caching.expires)
|
||||
_d.tidy = DeprecatedTool(
|
||||
'before_finalize',
|
||||
'The tidy tool has been removed from the standard distribution of '
|
||||
'CherryPy. The most recent version can be found at '
|
||||
'http://tools.cherrypy.org/browser.')
|
||||
_d.nsgmls = DeprecatedTool(
|
||||
'before_finalize',
|
||||
'The nsgmls tool has been removed from the standard distribution of '
|
||||
'CherryPy. The most recent version can be found at '
|
||||
'http://tools.cherrypy.org/browser.')
|
||||
_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
|
||||
_d.referer = Tool('before_request_body', cptools.referer)
|
||||
_d.basic_auth = Tool('on_start_resource', auth.basic_auth)
|
||||
_d.digest_auth = Tool('on_start_resource', auth.digest_auth)
|
||||
_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
|
||||
_d.flatten = Tool('before_finalize', cptools.flatten)
|
||||
_d.accept = Tool('on_start_resource', cptools.accept)
|
||||
_d.redirect = Tool('on_start_resource', cptools.redirect)
|
||||
_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0)
|
||||
_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30)
|
||||
_d.json_out = Tool('before_handler', jsontools.json_out, priority=30)
|
||||
_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1)
|
||||
_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1)
|
||||
_d.params = Tool('before_handler', cptools.convert_params)
|
||||
|
||||
del _d, cptools, encoding, auth, static
|
||||
@@ -1,287 +0,0 @@
|
||||
"""CherryPy Application and Tree objects."""
|
||||
|
||||
import os
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntou
|
||||
from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools
|
||||
from cherrypy.lib import httputil
|
||||
|
||||
|
||||
class Application(object):
|
||||
|
||||
"""A CherryPy Application.
|
||||
|
||||
Servers and gateways should not instantiate Request objects directly.
|
||||
Instead, they should ask an Application object for a request object.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object) for itself.
|
||||
"""
|
||||
|
||||
root = None
|
||||
"""The top-most container of page handlers for this app. Handlers should
|
||||
be arranged in a hierarchy of attributes, matching the expected URI
|
||||
hierarchy; the default dispatcher then searches this hierarchy for a
|
||||
matching handler. When using a dispatcher other than the default,
|
||||
this value may be None."""
|
||||
|
||||
config = {}
|
||||
"""A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
|
||||
of {key: value} pairs."""
|
||||
|
||||
namespaces = _cpconfig.NamespaceSet()
|
||||
toolboxes = {'tools': cherrypy.tools}
|
||||
|
||||
log = None
|
||||
"""A LogManager instance. See _cplogging."""
|
||||
|
||||
wsgiapp = None
|
||||
"""A CPWSGIApp instance. See _cpwsgi."""
|
||||
|
||||
request_class = _cprequest.Request
|
||||
response_class = _cprequest.Response
|
||||
|
||||
relative_urls = False
|
||||
|
||||
def __init__(self, root, script_name='', config=None):
|
||||
self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root)
|
||||
self.root = root
|
||||
self.script_name = script_name
|
||||
self.wsgiapp = _cpwsgi.CPWSGIApp(self)
|
||||
|
||||
self.namespaces = self.namespaces.copy()
|
||||
self.namespaces['log'] = lambda k, v: setattr(self.log, k, v)
|
||||
self.namespaces['wsgi'] = self.wsgiapp.namespace_handler
|
||||
|
||||
self.config = self.__class__.config.copy()
|
||||
if config:
|
||||
self.merge(config)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s.%s(%r, %r)' % (self.__module__, self.__class__.__name__,
|
||||
self.root, self.script_name)
|
||||
|
||||
script_name_doc = """The URI "mount point" for this app. A mount point
|
||||
is that portion of the URI which is constant for all URIs that are
|
||||
serviced by this application; it does not include scheme, host, or proxy
|
||||
("virtual host") portions of the URI.
|
||||
|
||||
For example, if script_name is "/my/cool/app", then the URL
|
||||
"http://www.example.com/my/cool/app/page1" might be handled by a
|
||||
"page1" method on the root object.
|
||||
|
||||
The value of script_name MUST NOT end in a slash. If the script_name
|
||||
refers to the root of the URI, it MUST be an empty string (not "/").
|
||||
|
||||
If script_name is explicitly set to None, then the script_name will be
|
||||
provided for each call from request.wsgi_environ['SCRIPT_NAME'].
|
||||
"""
|
||||
|
||||
def _get_script_name(self):
|
||||
if self._script_name is not None:
|
||||
return self._script_name
|
||||
|
||||
# A `_script_name` with a value of None signals that the script name
|
||||
# should be pulled from WSGI environ.
|
||||
return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip('/')
|
||||
|
||||
def _set_script_name(self, value):
|
||||
if value:
|
||||
value = value.rstrip('/')
|
||||
self._script_name = value
|
||||
script_name = property(fget=_get_script_name, fset=_set_script_name,
|
||||
doc=script_name_doc)
|
||||
|
||||
def merge(self, config):
|
||||
"""Merge the given config into self.config."""
|
||||
_cpconfig.merge(self.config, config)
|
||||
|
||||
# Handle namespaces specified in config.
|
||||
self.namespaces(self.config.get('/', {}))
|
||||
|
||||
def find_config(self, path, key, default=None):
|
||||
"""Return the most-specific value for key along path, or default."""
|
||||
trail = path or '/'
|
||||
while trail:
|
||||
nodeconf = self.config.get(trail, {})
|
||||
|
||||
if key in nodeconf:
|
||||
return nodeconf[key]
|
||||
|
||||
lastslash = trail.rfind('/')
|
||||
if lastslash == -1:
|
||||
break
|
||||
elif lastslash == 0 and trail != '/':
|
||||
trail = '/'
|
||||
else:
|
||||
trail = trail[:lastslash]
|
||||
|
||||
return default
|
||||
|
||||
def get_serving(self, local, remote, scheme, sproto):
|
||||
"""Create and return a Request and Response object."""
|
||||
req = self.request_class(local, remote, scheme, sproto)
|
||||
req.app = self
|
||||
|
||||
for name, toolbox in self.toolboxes.items():
|
||||
req.namespaces[name] = toolbox
|
||||
|
||||
resp = self.response_class()
|
||||
cherrypy.serving.load(req, resp)
|
||||
cherrypy.engine.publish('acquire_thread')
|
||||
cherrypy.engine.publish('before_request')
|
||||
|
||||
return req, resp
|
||||
|
||||
def release_serving(self):
|
||||
"""Release the current serving (request and response)."""
|
||||
req = cherrypy.serving.request
|
||||
|
||||
cherrypy.engine.publish('after_request')
|
||||
|
||||
try:
|
||||
req.close()
|
||||
except:
|
||||
cherrypy.log(traceback=True, severity=40)
|
||||
|
||||
cherrypy.serving.clear()
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
return self.wsgiapp(environ, start_response)
|
||||
|
||||
|
||||
class Tree(object):
|
||||
|
||||
"""A registry of CherryPy applications, mounted at diverse points.
|
||||
|
||||
An instance of this class may also be used as a WSGI callable
|
||||
(WSGI application object), in which case it dispatches to all
|
||||
mounted apps.
|
||||
"""
|
||||
|
||||
apps = {}
|
||||
"""
|
||||
A dict of the form {script name: application}, where "script name"
|
||||
is a string declaring the URI mount point (no trailing slash), and
|
||||
"application" is an instance of cherrypy.Application (or an arbitrary
|
||||
WSGI callable if you happen to be using a WSGI server)."""
|
||||
|
||||
def __init__(self):
|
||||
self.apps = {}
|
||||
|
||||
def mount(self, root, script_name='', config=None):
|
||||
"""Mount a new app from a root object, script_name, and config.
|
||||
|
||||
root
|
||||
An instance of a "controller class" (a collection of page
|
||||
handler methods) which represents the root of the application.
|
||||
This may also be an Application instance, or None if using
|
||||
a dispatcher other than the default.
|
||||
|
||||
script_name
|
||||
A string containing the "mount point" of the application.
|
||||
This should start with a slash, and be the path portion of the
|
||||
URL at which to mount the given root. For example, if root.index()
|
||||
will handle requests to "http://www.example.com:8080/dept/app1/",
|
||||
then the script_name argument would be "/dept/app1".
|
||||
|
||||
It MUST NOT end in a slash. If the script_name refers to the
|
||||
root of the URI, it MUST be an empty string (not "/").
|
||||
|
||||
config
|
||||
A file or dict containing application config.
|
||||
"""
|
||||
if script_name is None:
|
||||
raise TypeError(
|
||||
"The 'script_name' argument may not be None. Application "
|
||||
'objects may, however, possess a script_name of None (in '
|
||||
'order to inpect the WSGI environ for SCRIPT_NAME upon each '
|
||||
'request). You cannot mount such Applications on this Tree; '
|
||||
'you must pass them to a WSGI server interface directly.')
|
||||
|
||||
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
|
||||
script_name = script_name.rstrip('/')
|
||||
|
||||
if isinstance(root, Application):
|
||||
app = root
|
||||
if script_name != '' and script_name != app.script_name:
|
||||
raise ValueError(
|
||||
'Cannot specify a different script name and pass an '
|
||||
'Application instance to cherrypy.mount')
|
||||
script_name = app.script_name
|
||||
else:
|
||||
app = Application(root, script_name)
|
||||
|
||||
# If mounted at "", add favicon.ico
|
||||
if (script_name == '' and root is not None
|
||||
and not hasattr(root, 'favicon_ico')):
|
||||
favicon = os.path.join(os.getcwd(), os.path.dirname(__file__),
|
||||
'favicon.ico')
|
||||
root.favicon_ico = tools.staticfile.handler(favicon)
|
||||
|
||||
if config:
|
||||
app.merge(config)
|
||||
|
||||
self.apps[script_name] = app
|
||||
|
||||
return app
|
||||
|
||||
def graft(self, wsgi_callable, script_name=''):
|
||||
"""Mount a wsgi callable at the given script_name."""
|
||||
# Next line both 1) strips trailing slash and 2) maps "/" -> "".
|
||||
script_name = script_name.rstrip('/')
|
||||
self.apps[script_name] = wsgi_callable
|
||||
|
||||
def script_name(self, path=None):
|
||||
"""The script_name of the app at the given path, or None.
|
||||
|
||||
If path is None, cherrypy.request is used.
|
||||
"""
|
||||
if path is None:
|
||||
try:
|
||||
request = cherrypy.serving.request
|
||||
path = httputil.urljoin(request.script_name,
|
||||
request.path_info)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
while True:
|
||||
if path in self.apps:
|
||||
return path
|
||||
|
||||
if path == '':
|
||||
return None
|
||||
|
||||
# Move one node up the tree and try again.
|
||||
path = path[:path.rfind('/')]
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
# If you're calling this, then you're probably setting SCRIPT_NAME
|
||||
# to '' (some WSGI servers always set SCRIPT_NAME to '').
|
||||
# Try to look up the app using the full path.
|
||||
env1x = environ
|
||||
if six.PY2 and environ.get(ntou('wsgi.version')) == (ntou('u'), 0):
|
||||
env1x = _cpwsgi.downgrade_wsgi_ux_to_1x(environ)
|
||||
path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''),
|
||||
env1x.get('PATH_INFO', ''))
|
||||
sn = self.script_name(path or '/')
|
||||
if sn is None:
|
||||
start_response('404 Not Found', [])
|
||||
return []
|
||||
|
||||
app = self.apps[sn]
|
||||
|
||||
# Correct the SCRIPT_NAME and PATH_INFO environ entries.
|
||||
environ = environ.copy()
|
||||
if six.PY2 and environ.get(ntou('wsgi.version')) == (ntou('u'), 0):
|
||||
# Python 2/WSGI u.0: all strings MUST be of type unicode
|
||||
enc = environ[ntou('wsgi.url_encoding')]
|
||||
environ[ntou('SCRIPT_NAME')] = sn.decode(enc)
|
||||
environ[ntou('PATH_INFO')] = path[len(sn.rstrip('/')):].decode(enc)
|
||||
else:
|
||||
environ['SCRIPT_NAME'] = sn
|
||||
environ['PATH_INFO'] = path[len(sn.rstrip('/')):]
|
||||
return app(environ, start_response)
|
||||
@@ -1,466 +0,0 @@
|
||||
"""WSGI interface (see PEP 333 and 3333).
|
||||
|
||||
Note that WSGI environ keys and values are 'native strings'; that is,
|
||||
whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
|
||||
it's a unicode string. But PEP 3333 says: "even if Python's str type is
|
||||
actually Unicode "under the hood", the content of native strings must
|
||||
still be translatable to bytes via the Latin-1 encoding!"
|
||||
"""
|
||||
|
||||
import sys as _sys
|
||||
import io
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy as _cherrypy
|
||||
from cherrypy._cpcompat import ntob, ntou
|
||||
from cherrypy import _cperror
|
||||
from cherrypy.lib import httputil
|
||||
from cherrypy.lib import is_closable_iterator
|
||||
|
||||
def downgrade_wsgi_ux_to_1x(environ):
|
||||
"""Return a new environ dict for WSGI 1.x from the given WSGI u.x environ.
|
||||
"""
|
||||
env1x = {}
|
||||
|
||||
url_encoding = environ[ntou('wsgi.url_encoding')]
|
||||
for k, v in list(environ.items()):
|
||||
if k in [ntou('PATH_INFO'), ntou('SCRIPT_NAME'), ntou('QUERY_STRING')]:
|
||||
v = v.encode(url_encoding)
|
||||
elif isinstance(v, six.text_type):
|
||||
v = v.encode('ISO-8859-1')
|
||||
env1x[k.encode('ISO-8859-1')] = v
|
||||
|
||||
return env1x
|
||||
|
||||
|
||||
class VirtualHost(object):
|
||||
|
||||
"""Select a different WSGI application based on the Host header.
|
||||
|
||||
This can be useful when running multiple sites within one CP server.
|
||||
It allows several domains to point to different applications. For example::
|
||||
|
||||
root = Root()
|
||||
RootApp = cherrypy.Application(root)
|
||||
Domain2App = cherrypy.Application(root)
|
||||
SecureApp = cherrypy.Application(Secure())
|
||||
|
||||
vhost = cherrypy._cpwsgi.VirtualHost(
|
||||
RootApp,
|
||||
domains={
|
||||
'www.domain2.example': Domain2App,
|
||||
'www.domain2.example:443': SecureApp,
|
||||
},
|
||||
)
|
||||
|
||||
cherrypy.tree.graft(vhost)
|
||||
"""
|
||||
default = None
|
||||
"""Required. The default WSGI application."""
|
||||
|
||||
use_x_forwarded_host = True
|
||||
"""If True (the default), any "X-Forwarded-Host"
|
||||
request header will be used instead of the "Host" header. This
|
||||
is commonly added by HTTP servers (such as Apache) when proxying."""
|
||||
|
||||
domains = {}
|
||||
"""A dict of {host header value: application} pairs.
|
||||
The incoming "Host" request header is looked up in this dict,
|
||||
and, if a match is found, the corresponding WSGI application
|
||||
will be called instead of the default. Note that you often need
|
||||
separate entries for "example.com" and "www.example.com".
|
||||
In addition, "Host" headers may contain the port number.
|
||||
"""
|
||||
|
||||
def __init__(self, default, domains=None, use_x_forwarded_host=True):
|
||||
self.default = default
|
||||
self.domains = domains or {}
|
||||
self.use_x_forwarded_host = use_x_forwarded_host
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
domain = environ.get('HTTP_HOST', '')
|
||||
if self.use_x_forwarded_host:
|
||||
domain = environ.get('HTTP_X_FORWARDED_HOST', domain)
|
||||
|
||||
nextapp = self.domains.get(domain)
|
||||
if nextapp is None:
|
||||
nextapp = self.default
|
||||
return nextapp(environ, start_response)
|
||||
|
||||
|
||||
class InternalRedirector(object):
|
||||
|
||||
"""WSGI middleware that handles raised cherrypy.InternalRedirect."""
|
||||
|
||||
def __init__(self, nextapp, recursive=False):
|
||||
self.nextapp = nextapp
|
||||
self.recursive = recursive
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
redirections = []
|
||||
while True:
|
||||
environ = environ.copy()
|
||||
try:
|
||||
return self.nextapp(environ, start_response)
|
||||
except _cherrypy.InternalRedirect:
|
||||
ir = _sys.exc_info()[1]
|
||||
sn = environ.get('SCRIPT_NAME', '')
|
||||
path = environ.get('PATH_INFO', '')
|
||||
qs = environ.get('QUERY_STRING', '')
|
||||
|
||||
# Add the *previous* path_info + qs to redirections.
|
||||
old_uri = sn + path
|
||||
if qs:
|
||||
old_uri += '?' + qs
|
||||
redirections.append(old_uri)
|
||||
|
||||
if not self.recursive:
|
||||
# Check to see if the new URI has been redirected to
|
||||
# already
|
||||
new_uri = sn + ir.path
|
||||
if ir.query_string:
|
||||
new_uri += '?' + ir.query_string
|
||||
if new_uri in redirections:
|
||||
ir.request.close()
|
||||
tmpl = (
|
||||
'InternalRedirector visited the same URL twice: %r'
|
||||
)
|
||||
raise RuntimeError(tmpl % new_uri)
|
||||
|
||||
# Munge the environment and try again.
|
||||
environ['REQUEST_METHOD'] = 'GET'
|
||||
environ['PATH_INFO'] = ir.path
|
||||
environ['QUERY_STRING'] = ir.query_string
|
||||
environ['wsgi.input'] = io.BytesIO()
|
||||
environ['CONTENT_LENGTH'] = '0'
|
||||
environ['cherrypy.previous_request'] = ir.request
|
||||
|
||||
|
||||
class ExceptionTrapper(object):
|
||||
|
||||
"""WSGI middleware that traps exceptions."""
|
||||
|
||||
def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
|
||||
self.nextapp = nextapp
|
||||
self.throws = throws
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
return _TrappedResponse(
|
||||
self.nextapp,
|
||||
environ,
|
||||
start_response,
|
||||
self.throws
|
||||
)
|
||||
|
||||
|
||||
class _TrappedResponse(object):
|
||||
|
||||
response = iter([])
|
||||
|
||||
def __init__(self, nextapp, environ, start_response, throws):
|
||||
self.nextapp = nextapp
|
||||
self.environ = environ
|
||||
self.start_response = start_response
|
||||
self.throws = throws
|
||||
self.started_response = False
|
||||
self.response = self.trap(
|
||||
self.nextapp, self.environ, self.start_response,
|
||||
)
|
||||
self.iter_response = iter(self.response)
|
||||
|
||||
def __iter__(self):
|
||||
self.started_response = True
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return self.trap(next, self.iter_response)
|
||||
|
||||
# todo: https://pythonhosted.org/six/#six.Iterator
|
||||
if six.PY2:
|
||||
next = __next__
|
||||
|
||||
def close(self):
|
||||
if hasattr(self.response, 'close'):
|
||||
self.response.close()
|
||||
|
||||
def trap(self, func, *args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except self.throws:
|
||||
raise
|
||||
except StopIteration:
|
||||
raise
|
||||
except:
|
||||
tb = _cperror.format_exc()
|
||||
_cherrypy.log(tb, severity=40)
|
||||
if not _cherrypy.request.show_tracebacks:
|
||||
tb = ''
|
||||
s, h, b = _cperror.bare_error(tb)
|
||||
if six.PY3:
|
||||
# What fun.
|
||||
s = s.decode('ISO-8859-1')
|
||||
h = [
|
||||
(k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
|
||||
for k, v in h
|
||||
]
|
||||
if self.started_response:
|
||||
# Empty our iterable (so future calls raise StopIteration)
|
||||
self.iter_response = iter([])
|
||||
else:
|
||||
self.iter_response = iter(b)
|
||||
|
||||
try:
|
||||
self.start_response(s, h, _sys.exc_info())
|
||||
except:
|
||||
# "The application must not trap any exceptions raised by
|
||||
# start_response, if it called start_response with exc_info.
|
||||
# Instead, it should allow such exceptions to propagate
|
||||
# back to the server or gateway."
|
||||
# But we still log and call close() to clean up ourselves.
|
||||
_cherrypy.log(traceback=True, severity=40)
|
||||
raise
|
||||
|
||||
if self.started_response:
|
||||
return ntob('').join(b)
|
||||
else:
|
||||
return b
|
||||
|
||||
|
||||
# WSGI-to-CP Adapter #
|
||||
|
||||
|
||||
class AppResponse(object):
|
||||
|
||||
"""WSGI response iterable for CherryPy applications."""
|
||||
|
||||
def __init__(self, environ, start_response, cpapp):
|
||||
self.cpapp = cpapp
|
||||
try:
|
||||
if six.PY2:
|
||||
if environ.get(ntou('wsgi.version')) == (ntou('u'), 0):
|
||||
environ = downgrade_wsgi_ux_to_1x(environ)
|
||||
self.environ = environ
|
||||
self.run()
|
||||
|
||||
r = _cherrypy.serving.response
|
||||
|
||||
outstatus = r.output_status
|
||||
if not isinstance(outstatus, bytes):
|
||||
raise TypeError('response.output_status is not a byte string.')
|
||||
|
||||
outheaders = []
|
||||
for k, v in r.header_list:
|
||||
if not isinstance(k, bytes):
|
||||
tmpl = 'response.header_list key %r is not a byte string.'
|
||||
raise TypeError(tmpl % k)
|
||||
if not isinstance(v, bytes):
|
||||
tmpl = (
|
||||
'response.header_list value %r is not a byte string.'
|
||||
)
|
||||
raise TypeError(tmpl % v)
|
||||
outheaders.append((k, v))
|
||||
|
||||
if six.PY3:
|
||||
# According to PEP 3333, when using Python 3, the response
|
||||
# status and headers must be bytes masquerading as unicode;
|
||||
# that is, they must be of type "str" but are restricted to
|
||||
# code points in the "latin-1" set.
|
||||
outstatus = outstatus.decode('ISO-8859-1')
|
||||
outheaders = [
|
||||
(k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
|
||||
for k, v in outheaders
|
||||
]
|
||||
|
||||
self.iter_response = iter(r.body)
|
||||
self.write = start_response(outstatus, outheaders)
|
||||
except:
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
return next(self.iter_response)
|
||||
|
||||
# todo: https://pythonhosted.org/six/#six.Iterator
|
||||
if six.PY2:
|
||||
next = __next__
|
||||
|
||||
def close(self):
|
||||
"""Close and de-reference the current request and response. (Core)"""
|
||||
streaming = _cherrypy.serving.response.stream
|
||||
self.cpapp.release_serving()
|
||||
|
||||
# We avoid the expense of examining the iterator to see if it's
|
||||
# closable unless we are streaming the response, as that's the
|
||||
# only situation where we are going to have an iterator which
|
||||
# may not have been exhausted yet.
|
||||
if streaming and is_closable_iterator(self.iter_response):
|
||||
iter_close = self.iter_response.close
|
||||
try:
|
||||
iter_close()
|
||||
except Exception:
|
||||
_cherrypy.log(traceback=True, severity=40)
|
||||
|
||||
def run(self):
|
||||
"""Create a Request object using environ."""
|
||||
env = self.environ.get
|
||||
|
||||
local = httputil.Host(
|
||||
'',
|
||||
int(env('SERVER_PORT', 80) or -1),
|
||||
env('SERVER_NAME', ''),
|
||||
)
|
||||
remote = httputil.Host(
|
||||
env('REMOTE_ADDR', ''),
|
||||
int(env('REMOTE_PORT', -1) or -1),
|
||||
env('REMOTE_HOST', ''),
|
||||
)
|
||||
scheme = env('wsgi.url_scheme')
|
||||
sproto = env('ACTUAL_SERVER_PROTOCOL', 'HTTP/1.1')
|
||||
request, resp = self.cpapp.get_serving(local, remote, scheme, sproto)
|
||||
|
||||
# LOGON_USER is served by IIS, and is the name of the
|
||||
# user after having been mapped to a local account.
|
||||
# Both IIS and Apache set REMOTE_USER, when possible.
|
||||
request.login = env('LOGON_USER') or env('REMOTE_USER') or None
|
||||
request.multithread = self.environ['wsgi.multithread']
|
||||
request.multiprocess = self.environ['wsgi.multiprocess']
|
||||
request.wsgi_environ = self.environ
|
||||
request.prev = env('cherrypy.previous_request', None)
|
||||
|
||||
meth = self.environ['REQUEST_METHOD']
|
||||
|
||||
path = httputil.urljoin(
|
||||
self.environ.get('SCRIPT_NAME', ''),
|
||||
self.environ.get('PATH_INFO', ''),
|
||||
)
|
||||
qs = self.environ.get('QUERY_STRING', '')
|
||||
|
||||
path, qs = self.recode_path_qs(path, qs) or (path, qs)
|
||||
|
||||
rproto = self.environ.get('SERVER_PROTOCOL')
|
||||
headers = self.translate_headers(self.environ)
|
||||
rfile = self.environ['wsgi.input']
|
||||
request.run(meth, path, qs, rproto, headers, rfile)
|
||||
|
||||
headerNames = {
|
||||
'HTTP_CGI_AUTHORIZATION': 'Authorization',
|
||||
'CONTENT_LENGTH': 'Content-Length',
|
||||
'CONTENT_TYPE': 'Content-Type',
|
||||
'REMOTE_HOST': 'Remote-Host',
|
||||
'REMOTE_ADDR': 'Remote-Addr',
|
||||
}
|
||||
|
||||
def recode_path_qs(self, path, qs):
|
||||
if not six.PY3:
|
||||
return
|
||||
|
||||
# This isn't perfect; if the given PATH_INFO is in the
|
||||
# wrong encoding, it may fail to match the appropriate config
|
||||
# section URI. But meh.
|
||||
old_enc = self.environ.get('wsgi.url_encoding', 'ISO-8859-1')
|
||||
new_enc = self.cpapp.find_config(
|
||||
self.environ.get('PATH_INFO', ''),
|
||||
'request.uri_encoding', 'utf-8',
|
||||
)
|
||||
if new_enc.lower() == old_enc.lower():
|
||||
return
|
||||
|
||||
# Even though the path and qs are unicode, the WSGI server
|
||||
# is required by PEP 3333 to coerce them to ISO-8859-1
|
||||
# masquerading as unicode. So we have to encode back to
|
||||
# bytes and then decode again using the "correct" encoding.
|
||||
try:
|
||||
return (
|
||||
path.encode(old_enc).decode(new_enc),
|
||||
qs.encode(old_enc).decode(new_enc),
|
||||
)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
# Just pass them through without transcoding and hope.
|
||||
pass
|
||||
|
||||
def translate_headers(self, environ):
|
||||
"""Translate CGI-environ header names to HTTP header names."""
|
||||
for cgiName in environ:
|
||||
# We assume all incoming header keys are uppercase already.
|
||||
if cgiName in self.headerNames:
|
||||
yield self.headerNames[cgiName], environ[cgiName]
|
||||
elif cgiName[:5] == 'HTTP_':
|
||||
# Hackish attempt at recovering original header names.
|
||||
translatedHeader = cgiName[5:].replace('_', '-')
|
||||
yield translatedHeader, environ[cgiName]
|
||||
|
||||
|
||||
class CPWSGIApp(object):
|
||||
|
||||
"""A WSGI application object for a CherryPy Application."""
|
||||
|
||||
pipeline = [
|
||||
('ExceptionTrapper', ExceptionTrapper),
|
||||
('InternalRedirector', InternalRedirector),
|
||||
]
|
||||
"""A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
|
||||
constructor that takes an initial, positional 'nextapp' argument,
|
||||
plus optional keyword arguments, and returns a WSGI application
|
||||
(that takes environ and start_response arguments). The 'name' can
|
||||
be any you choose, and will correspond to keys in self.config."""
|
||||
|
||||
head = None
|
||||
"""Rather than nest all apps in the pipeline on each call, it's only
|
||||
done the first time, and the result is memoized into self.head. Set
|
||||
this to None again if you change self.pipeline after calling self."""
|
||||
|
||||
config = {}
|
||||
"""A dict whose keys match names listed in the pipeline. Each
|
||||
value is a further dict which will be passed to the corresponding
|
||||
named WSGI callable (from the pipeline) as keyword arguments."""
|
||||
|
||||
response_class = AppResponse
|
||||
"""The class to instantiate and return as the next app in the WSGI chain.
|
||||
"""
|
||||
|
||||
def __init__(self, cpapp, pipeline=None):
|
||||
self.cpapp = cpapp
|
||||
self.pipeline = self.pipeline[:]
|
||||
if pipeline:
|
||||
self.pipeline.extend(pipeline)
|
||||
self.config = self.config.copy()
|
||||
|
||||
def tail(self, environ, start_response):
|
||||
"""WSGI application callable for the actual CherryPy application.
|
||||
|
||||
You probably shouldn't call this; call self.__call__ instead,
|
||||
so that any WSGI middleware in self.pipeline can run first.
|
||||
"""
|
||||
return self.response_class(environ, start_response, self.cpapp)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
head = self.head
|
||||
if head is None:
|
||||
# Create and nest the WSGI apps in our pipeline (in reverse order).
|
||||
# Then memoize the result in self.head.
|
||||
head = self.tail
|
||||
for name, callable in self.pipeline[::-1]:
|
||||
conf = self.config.get(name, {})
|
||||
head = callable(head, **conf)
|
||||
self.head = head
|
||||
return head(environ, start_response)
|
||||
|
||||
def namespace_handler(self, k, v):
|
||||
"""Config handler for the 'wsgi' namespace."""
|
||||
if k == 'pipeline':
|
||||
# Note this allows multiple 'wsgi.pipeline' config entries
|
||||
# (but each entry will be processed in a 'random' order).
|
||||
# It should also allow developers to set default middleware
|
||||
# in code (passed to self.__init__) that deployers can add to
|
||||
# (but not remove) via config.
|
||||
self.pipeline.extend(v)
|
||||
elif k == 'response_class':
|
||||
self.response_class = v
|
||||
else:
|
||||
name, arg = k.split('.', 1)
|
||||
bucket = self.config.setdefault(name, {})
|
||||
bucket[arg] = v
|
||||
@@ -1,70 +0,0 @@
|
||||
"""WSGI server interface (see PEP 333). This adds some CP-specific bits to
|
||||
the framework-agnostic wsgiserver package.
|
||||
"""
|
||||
import sys
|
||||
|
||||
import cherrypy
|
||||
from cherrypy import wsgiserver
|
||||
|
||||
|
||||
class CPWSGIServer(wsgiserver.CherryPyWSGIServer):
|
||||
|
||||
"""Wrapper for wsgiserver.CherryPyWSGIServer.
|
||||
|
||||
wsgiserver has been designed to not reference CherryPy in any way,
|
||||
so that it can be used in other frameworks and applications. Therefore,
|
||||
we wrap it here, so we can set our own mount points from cherrypy.tree
|
||||
and apply some attributes from config -> cherrypy.server -> wsgiserver.
|
||||
"""
|
||||
|
||||
def __init__(self, server_adapter=cherrypy.server):
|
||||
self.server_adapter = server_adapter
|
||||
self.max_request_header_size = (
|
||||
self.server_adapter.max_request_header_size or 0
|
||||
)
|
||||
self.max_request_body_size = (
|
||||
self.server_adapter.max_request_body_size or 0
|
||||
)
|
||||
|
||||
server_name = (self.server_adapter.socket_host or
|
||||
self.server_adapter.socket_file or
|
||||
None)
|
||||
|
||||
self.wsgi_version = self.server_adapter.wsgi_version
|
||||
s = wsgiserver.CherryPyWSGIServer
|
||||
s.__init__(self, server_adapter.bind_addr, cherrypy.tree,
|
||||
self.server_adapter.thread_pool,
|
||||
server_name,
|
||||
max=self.server_adapter.thread_pool_max,
|
||||
request_queue_size=self.server_adapter.socket_queue_size,
|
||||
timeout=self.server_adapter.socket_timeout,
|
||||
shutdown_timeout=self.server_adapter.shutdown_timeout,
|
||||
accepted_queue_size=self.server_adapter.accepted_queue_size,
|
||||
accepted_queue_timeout=self.server_adapter.accepted_queue_timeout,
|
||||
)
|
||||
self.protocol = self.server_adapter.protocol_version
|
||||
self.nodelay = self.server_adapter.nodelay
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
ssl_module = self.server_adapter.ssl_module or 'builtin'
|
||||
else:
|
||||
ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
|
||||
if self.server_adapter.ssl_context:
|
||||
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
|
||||
self.ssl_adapter = adapter_class(
|
||||
self.server_adapter.ssl_certificate,
|
||||
self.server_adapter.ssl_private_key,
|
||||
self.server_adapter.ssl_certificate_chain)
|
||||
self.ssl_adapter.context = self.server_adapter.ssl_context
|
||||
elif self.server_adapter.ssl_certificate:
|
||||
adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module)
|
||||
self.ssl_adapter = adapter_class(
|
||||
self.server_adapter.ssl_certificate,
|
||||
self.server_adapter.ssl_private_key,
|
||||
self.server_adapter.ssl_certificate_chain)
|
||||
|
||||
self.stats['Enabled'] = getattr(
|
||||
self.server_adapter, 'statistics', False)
|
||||
|
||||
def error_log(self, msg='', level=20, traceback=False):
|
||||
cherrypy.engine.log(msg, level, traceback)
|
||||
@@ -1,298 +0,0 @@
|
||||
"""
|
||||
Helper functions for CP apps
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
from cherrypy._cpcompat import urljoin as _urljoin, urlencode as _urlencode
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
|
||||
import cherrypy
|
||||
|
||||
|
||||
def expose(func=None, alias=None):
|
||||
"""
|
||||
Expose the function or class, optionally providing an alias or set of aliases.
|
||||
"""
|
||||
def expose_(func):
|
||||
func.exposed = True
|
||||
if alias is not None:
|
||||
if isinstance(alias, text_or_bytes):
|
||||
parents[alias.replace('.', '_')] = func
|
||||
else:
|
||||
for a in alias:
|
||||
parents[a.replace('.', '_')] = func
|
||||
return func
|
||||
|
||||
import sys
|
||||
import types
|
||||
decoratable_types = types.FunctionType, types.MethodType, type,
|
||||
if six.PY2:
|
||||
# Old-style classes are type types.ClassType.
|
||||
decoratable_types += types.ClassType,
|
||||
if isinstance(func, decoratable_types):
|
||||
if alias is None:
|
||||
# @expose
|
||||
func.exposed = True
|
||||
return func
|
||||
else:
|
||||
# func = expose(func, alias)
|
||||
parents = sys._getframe(1).f_locals
|
||||
return expose_(func)
|
||||
elif func is None:
|
||||
if alias is None:
|
||||
# @expose()
|
||||
parents = sys._getframe(1).f_locals
|
||||
return expose_
|
||||
else:
|
||||
# @expose(alias="alias") or
|
||||
# @expose(alias=["alias1", "alias2"])
|
||||
parents = sys._getframe(1).f_locals
|
||||
return expose_
|
||||
else:
|
||||
# @expose("alias") or
|
||||
# @expose(["alias1", "alias2"])
|
||||
parents = sys._getframe(1).f_locals
|
||||
alias = func
|
||||
return expose_
|
||||
|
||||
|
||||
def popargs(*args, **kwargs):
|
||||
"""A decorator for _cp_dispatch
|
||||
(cherrypy.dispatch.Dispatcher.dispatch_method_name).
|
||||
|
||||
Optional keyword argument: handler=(Object or Function)
|
||||
|
||||
Provides a _cp_dispatch function that pops off path segments into
|
||||
cherrypy.request.params under the names specified. The dispatch
|
||||
is then forwarded on to the next vpath element.
|
||||
|
||||
Note that any existing (and exposed) member function of the class that
|
||||
popargs is applied to will override that value of the argument. For
|
||||
instance, if you have a method named "list" on the class decorated with
|
||||
popargs, then accessing "/list" will call that function instead of popping
|
||||
it off as the requested parameter. This restriction applies to all
|
||||
_cp_dispatch functions. The only way around this restriction is to create
|
||||
a "blank class" whose only function is to provide _cp_dispatch.
|
||||
|
||||
If there are path elements after the arguments, or more arguments
|
||||
are requested than are available in the vpath, then the 'handler'
|
||||
keyword argument specifies the next object to handle the parameterized
|
||||
request. If handler is not specified or is None, then self is used.
|
||||
If handler is a function rather than an instance, then that function
|
||||
will be called with the args specified and the return value from that
|
||||
function used as the next object INSTEAD of adding the parameters to
|
||||
cherrypy.request.args.
|
||||
|
||||
This decorator may be used in one of two ways:
|
||||
|
||||
As a class decorator:
|
||||
@cherrypy.popargs('year', 'month', 'day')
|
||||
class Blog:
|
||||
def index(self, year=None, month=None, day=None):
|
||||
#Process the parameters here; any url like
|
||||
#/, /2009, /2009/12, or /2009/12/31
|
||||
#will fill in the appropriate parameters.
|
||||
|
||||
def create(self):
|
||||
#This link will still be available at /create. Defined functions
|
||||
#take precedence over arguments.
|
||||
|
||||
Or as a member of a class:
|
||||
class Blog:
|
||||
_cp_dispatch = cherrypy.popargs('year', 'month', 'day')
|
||||
#...
|
||||
|
||||
The handler argument may be used to mix arguments with built in functions.
|
||||
For instance, the following setup allows different activities at the
|
||||
day, month, and year level:
|
||||
|
||||
class DayHandler:
|
||||
def index(self, year, month, day):
|
||||
#Do something with this day; probably list entries
|
||||
|
||||
def delete(self, year, month, day):
|
||||
#Delete all entries for this day
|
||||
|
||||
@cherrypy.popargs('day', handler=DayHandler())
|
||||
class MonthHandler:
|
||||
def index(self, year, month):
|
||||
#Do something with this month; probably list entries
|
||||
|
||||
def delete(self, year, month):
|
||||
#Delete all entries for this month
|
||||
|
||||
@cherrypy.popargs('month', handler=MonthHandler())
|
||||
class YearHandler:
|
||||
def index(self, year):
|
||||
#Do something with this year
|
||||
|
||||
#...
|
||||
|
||||
@cherrypy.popargs('year', handler=YearHandler())
|
||||
class Root:
|
||||
def index(self):
|
||||
#...
|
||||
|
||||
"""
|
||||
|
||||
# Since keyword arg comes after *args, we have to process it ourselves
|
||||
# for lower versions of python.
|
||||
|
||||
handler = None
|
||||
handler_call = False
|
||||
for k, v in kwargs.items():
|
||||
if k == 'handler':
|
||||
handler = v
|
||||
else:
|
||||
raise TypeError(
|
||||
"cherrypy.popargs() got an unexpected keyword argument '{0}'"
|
||||
.format(k)
|
||||
)
|
||||
|
||||
import inspect
|
||||
|
||||
if handler is not None \
|
||||
and (hasattr(handler, '__call__') or inspect.isclass(handler)):
|
||||
handler_call = True
|
||||
|
||||
def decorated(cls_or_self=None, vpath=None):
|
||||
if inspect.isclass(cls_or_self):
|
||||
# cherrypy.popargs is a class decorator
|
||||
cls = cls_or_self
|
||||
setattr(cls, cherrypy.dispatch.Dispatcher.dispatch_method_name, decorated)
|
||||
return cls
|
||||
|
||||
# We're in the actual function
|
||||
self = cls_or_self
|
||||
parms = {}
|
||||
for arg in args:
|
||||
if not vpath:
|
||||
break
|
||||
parms[arg] = vpath.pop(0)
|
||||
|
||||
if handler is not None:
|
||||
if handler_call:
|
||||
return handler(**parms)
|
||||
else:
|
||||
cherrypy.request.params.update(parms)
|
||||
return handler
|
||||
|
||||
cherrypy.request.params.update(parms)
|
||||
|
||||
# If we are the ultimate handler, then to prevent our _cp_dispatch
|
||||
# from being called again, we will resolve remaining elements through
|
||||
# getattr() directly.
|
||||
if vpath:
|
||||
return getattr(self, vpath.pop(0), None)
|
||||
else:
|
||||
return self
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def url(path='', qs='', script_name=None, base=None, relative=None):
|
||||
"""Create an absolute URL for the given path.
|
||||
|
||||
If 'path' starts with a slash ('/'), this will return
|
||||
(base + script_name + path + qs).
|
||||
If it does not start with a slash, this returns
|
||||
(base + script_name [+ request.path_info] + path + qs).
|
||||
|
||||
If script_name is None, cherrypy.request will be used
|
||||
to find a script_name, if available.
|
||||
|
||||
If base is None, cherrypy.request.base will be used (if available).
|
||||
Note that you can use cherrypy.tools.proxy to change this.
|
||||
|
||||
Finally, note that this function can be used to obtain an absolute URL
|
||||
for the current request path (minus the querystring) by passing no args.
|
||||
If you call url(qs=cherrypy.request.query_string), you should get the
|
||||
original browser URL (assuming no internal redirections).
|
||||
|
||||
If relative is None or not provided, request.app.relative_urls will
|
||||
be used (if available, else False). If False, the output will be an
|
||||
absolute URL (including the scheme, host, vhost, and script_name).
|
||||
If True, the output will instead be a URL that is relative to the
|
||||
current request path, perhaps including '..' atoms. If relative is
|
||||
the string 'server', the output will instead be a URL that is
|
||||
relative to the server root; i.e., it will start with a slash.
|
||||
"""
|
||||
if isinstance(qs, (tuple, list, dict)):
|
||||
qs = _urlencode(qs)
|
||||
if qs:
|
||||
qs = '?' + qs
|
||||
|
||||
if cherrypy.request.app:
|
||||
if not path.startswith('/'):
|
||||
# Append/remove trailing slash from path_info as needed
|
||||
# (this is to support mistyped URL's without redirecting;
|
||||
# if you want to redirect, use tools.trailing_slash).
|
||||
pi = cherrypy.request.path_info
|
||||
if cherrypy.request.is_index is True:
|
||||
if not pi.endswith('/'):
|
||||
pi = pi + '/'
|
||||
elif cherrypy.request.is_index is False:
|
||||
if pi.endswith('/') and pi != '/':
|
||||
pi = pi[:-1]
|
||||
|
||||
if path == '':
|
||||
path = pi
|
||||
else:
|
||||
path = _urljoin(pi, path)
|
||||
|
||||
if script_name is None:
|
||||
script_name = cherrypy.request.script_name
|
||||
if base is None:
|
||||
base = cherrypy.request.base
|
||||
|
||||
newurl = base + script_name + path + qs
|
||||
else:
|
||||
# No request.app (we're being called outside a request).
|
||||
# We'll have to guess the base from server.* attributes.
|
||||
# This will produce very different results from the above
|
||||
# if you're using vhosts or tools.proxy.
|
||||
if base is None:
|
||||
base = cherrypy.server.base()
|
||||
|
||||
path = (script_name or '') + path
|
||||
newurl = base + path + qs
|
||||
|
||||
if './' in newurl:
|
||||
# Normalize the URL by removing ./ and ../
|
||||
atoms = []
|
||||
for atom in newurl.split('/'):
|
||||
if atom == '.':
|
||||
pass
|
||||
elif atom == '..':
|
||||
atoms.pop()
|
||||
else:
|
||||
atoms.append(atom)
|
||||
newurl = '/'.join(atoms)
|
||||
|
||||
# At this point, we should have a fully-qualified absolute URL.
|
||||
|
||||
if relative is None:
|
||||
relative = getattr(cherrypy.request.app, 'relative_urls', False)
|
||||
|
||||
# See http://www.ietf.org/rfc/rfc2396.txt
|
||||
if relative == 'server':
|
||||
# "A relative reference beginning with a single slash character is
|
||||
# termed an absolute-path reference, as defined by <abs_path>..."
|
||||
# This is also sometimes called "server-relative".
|
||||
newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
|
||||
elif relative:
|
||||
# "A relative reference that does not begin with a scheme name
|
||||
# or a slash character is termed a relative-path reference."
|
||||
old = url(relative=False).split('/')[:-1]
|
||||
new = newurl.split('/')
|
||||
while old and new:
|
||||
a, b = old[0], new[0]
|
||||
if a != b:
|
||||
break
|
||||
old.pop(0)
|
||||
new.pop(0)
|
||||
new = (['..'] * len(old)) + new
|
||||
newurl = '/'.join(new)
|
||||
|
||||
return newurl
|
||||
@@ -1,6 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
import cherrypy.daemon
|
||||
|
||||
if __name__ == '__main__':
|
||||
cherrypy.daemon.run()
|
||||
@@ -1,106 +0,0 @@
|
||||
"""The CherryPy daemon."""
|
||||
|
||||
import sys
|
||||
|
||||
import cherrypy
|
||||
from cherrypy.process import plugins, servers
|
||||
from cherrypy import Application
|
||||
|
||||
|
||||
def start(configfiles=None, daemonize=False, environment=None,
|
||||
fastcgi=False, scgi=False, pidfile=None, imports=None,
|
||||
cgi=False):
|
||||
"""Subscribe all engine plugins and start the engine."""
|
||||
sys.path = [''] + sys.path
|
||||
for i in imports or []:
|
||||
exec('import %s' % i)
|
||||
|
||||
for c in configfiles or []:
|
||||
cherrypy.config.update(c)
|
||||
# If there's only one app mounted, merge config into it.
|
||||
if len(cherrypy.tree.apps) == 1:
|
||||
for app in cherrypy.tree.apps.values():
|
||||
if isinstance(app, Application):
|
||||
app.merge(c)
|
||||
|
||||
engine = cherrypy.engine
|
||||
|
||||
if environment is not None:
|
||||
cherrypy.config.update({'environment': environment})
|
||||
|
||||
# Only daemonize if asked to.
|
||||
if daemonize:
|
||||
# Don't print anything to stdout/sterr.
|
||||
cherrypy.config.update({'log.screen': False})
|
||||
plugins.Daemonizer(engine).subscribe()
|
||||
|
||||
if pidfile:
|
||||
plugins.PIDFile(engine, pidfile).subscribe()
|
||||
|
||||
if hasattr(engine, 'signal_handler'):
|
||||
engine.signal_handler.subscribe()
|
||||
if hasattr(engine, 'console_control_handler'):
|
||||
engine.console_control_handler.subscribe()
|
||||
|
||||
if (fastcgi and (scgi or cgi)) or (scgi and cgi):
|
||||
cherrypy.log.error('You may only specify one of the cgi, fastcgi, and '
|
||||
'scgi options.', 'ENGINE')
|
||||
sys.exit(1)
|
||||
elif fastcgi or scgi or cgi:
|
||||
# Turn off autoreload when using *cgi.
|
||||
cherrypy.config.update({'engine.autoreload.on': False})
|
||||
# Turn off the default HTTP server (which is subscribed by default).
|
||||
cherrypy.server.unsubscribe()
|
||||
|
||||
addr = cherrypy.server.bind_addr
|
||||
cls = (
|
||||
servers.FlupFCGIServer if fastcgi else
|
||||
servers.FlupSCGIServer if scgi else
|
||||
servers.FlupCGIServer
|
||||
)
|
||||
f = cls(application=cherrypy.tree, bindAddress=addr)
|
||||
s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr)
|
||||
s.subscribe()
|
||||
|
||||
# Always start the engine; this will start all other services
|
||||
try:
|
||||
engine.start()
|
||||
except:
|
||||
# Assume the error has been logged already via bus.log.
|
||||
sys.exit(1)
|
||||
else:
|
||||
engine.block()
|
||||
|
||||
|
||||
def run():
|
||||
from optparse import OptionParser
|
||||
|
||||
p = OptionParser()
|
||||
p.add_option('-c', '--config', action='append', dest='config',
|
||||
help='specify config file(s)')
|
||||
p.add_option('-d', action='store_true', dest='daemonize',
|
||||
help='run the server as a daemon')
|
||||
p.add_option('-e', '--environment', dest='environment', default=None,
|
||||
help='apply the given config environment')
|
||||
p.add_option('-f', action='store_true', dest='fastcgi',
|
||||
help='start a fastcgi server instead of the default HTTP '
|
||||
'server')
|
||||
p.add_option('-s', action='store_true', dest='scgi',
|
||||
help='start a scgi server instead of the default HTTP server')
|
||||
p.add_option('-x', action='store_true', dest='cgi',
|
||||
help='start a cgi server instead of the default HTTP server')
|
||||
p.add_option('-i', '--import', action='append', dest='imports',
|
||||
help='specify modules to import')
|
||||
p.add_option('-p', '--pidfile', dest='pidfile', default=None,
|
||||
help='store the process id in the given file')
|
||||
p.add_option('-P', '--Path', action='append', dest='Path',
|
||||
help='add the given paths to sys.path')
|
||||
options, args = p.parse_args()
|
||||
|
||||
if options.Path:
|
||||
for p in options.Path:
|
||||
sys.path.insert(0, p)
|
||||
|
||||
start(options.config, options.daemonize,
|
||||
options.environment, options.fastcgi, options.scgi,
|
||||
options.pidfile, options.imports, options.cgi)
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.4 KiB |
@@ -1,85 +0,0 @@
|
||||
"""CherryPy Library"""
|
||||
|
||||
|
||||
def is_iterator(obj):
|
||||
'''Returns a boolean indicating if the object provided implements
|
||||
the iterator protocol (i.e. like a generator). This will return
|
||||
false for objects which iterable, but not iterators themselves.'''
|
||||
from types import GeneratorType
|
||||
if isinstance(obj, GeneratorType):
|
||||
return True
|
||||
elif not hasattr(obj, '__iter__'):
|
||||
return False
|
||||
else:
|
||||
# Types which implement the protocol must return themselves when
|
||||
# invoking 'iter' upon them.
|
||||
return iter(obj) is obj
|
||||
|
||||
|
||||
def is_closable_iterator(obj):
|
||||
|
||||
# Not an iterator.
|
||||
if not is_iterator(obj):
|
||||
return False
|
||||
|
||||
# A generator - the easiest thing to deal with.
|
||||
import inspect
|
||||
if inspect.isgenerator(obj):
|
||||
return True
|
||||
|
||||
# A custom iterator. Look for a close method...
|
||||
if not (hasattr(obj, 'close') and callable(obj.close)):
|
||||
return False
|
||||
|
||||
# ... which doesn't require any arguments.
|
||||
try:
|
||||
inspect.getcallargs(obj.close)
|
||||
except TypeError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
class file_generator(object):
|
||||
|
||||
"""Yield the given input (a file object) in chunks (default 64k). (Core)"""
|
||||
|
||||
def __init__(self, input, chunkSize=65536):
|
||||
self.input = input
|
||||
self.chunkSize = chunkSize
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
chunk = self.input.read(self.chunkSize)
|
||||
if chunk:
|
||||
return chunk
|
||||
else:
|
||||
if hasattr(self.input, 'close'):
|
||||
self.input.close()
|
||||
raise StopIteration()
|
||||
next = __next__
|
||||
|
||||
|
||||
def file_generator_limited(fileobj, count, chunk_size=65536):
|
||||
"""Yield the given file object in chunks, stopping after `count`
|
||||
bytes has been emitted. Default chunk size is 64kB. (Core)
|
||||
"""
|
||||
remaining = count
|
||||
while remaining > 0:
|
||||
chunk = fileobj.read(min(chunk_size, remaining))
|
||||
chunklen = len(chunk)
|
||||
if chunklen == 0:
|
||||
return
|
||||
remaining -= chunklen
|
||||
yield chunk
|
||||
|
||||
|
||||
def set_vary_header(response, header_name):
|
||||
'Add a Vary header to a response'
|
||||
varies = response.headers.get('Vary', '')
|
||||
varies = [x.strip() for x in varies.split(',') if x.strip()]
|
||||
if header_name not in varies:
|
||||
varies.append(header_name)
|
||||
response.headers['Vary'] = ', '.join(varies)
|
||||
@@ -1,97 +0,0 @@
|
||||
import cherrypy
|
||||
from cherrypy.lib import httpauth
|
||||
|
||||
|
||||
def check_auth(users, encrypt=None, realm=None):
|
||||
"""If an authorization header contains credentials, return True or False.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if 'authorization' in request.headers:
|
||||
# make sure the provided credentials are correctly set
|
||||
ah = httpauth.parseAuthorization(request.headers['authorization'])
|
||||
if ah is None:
|
||||
raise cherrypy.HTTPError(400, 'Bad Request')
|
||||
|
||||
if not encrypt:
|
||||
encrypt = httpauth.DIGEST_AUTH_ENCODERS[httpauth.MD5]
|
||||
|
||||
if hasattr(users, '__call__'):
|
||||
try:
|
||||
# backward compatibility
|
||||
users = users() # expect it to return a dictionary
|
||||
|
||||
if not isinstance(users, dict):
|
||||
raise ValueError(
|
||||
'Authentication users must be a dictionary')
|
||||
|
||||
# fetch the user password
|
||||
password = users.get(ah['username'], None)
|
||||
except TypeError:
|
||||
# returns a password (encrypted or clear text)
|
||||
password = users(ah['username'])
|
||||
else:
|
||||
if not isinstance(users, dict):
|
||||
raise ValueError('Authentication users must be a dictionary')
|
||||
|
||||
# fetch the user password
|
||||
password = users.get(ah['username'], None)
|
||||
|
||||
# validate the authorization by re-computing it here
|
||||
# and compare it with what the user-agent provided
|
||||
if httpauth.checkResponse(ah, password, method=request.method,
|
||||
encrypt=encrypt, realm=realm):
|
||||
request.login = ah['username']
|
||||
return True
|
||||
|
||||
request.login = False
|
||||
return False
|
||||
|
||||
|
||||
def basic_auth(realm, users, encrypt=None, debug=False):
|
||||
"""If auth fails, raise 401 with a basic authentication header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
|
||||
users
|
||||
A dict of the form: {username: password} or a callable returning
|
||||
a dict.
|
||||
|
||||
encrypt
|
||||
callable used to encrypt the password returned from the user-agent.
|
||||
if None it defaults to a md5 encryption.
|
||||
|
||||
"""
|
||||
if check_auth(users, encrypt):
|
||||
if debug:
|
||||
cherrypy.log('Auth successful', 'TOOLS.BASIC_AUTH')
|
||||
return
|
||||
|
||||
# inform the user-agent this path is protected
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = httpauth.basicAuth(realm)
|
||||
|
||||
raise cherrypy.HTTPError(
|
||||
401, 'You are not authorized to access that resource')
|
||||
|
||||
|
||||
def digest_auth(realm, users, debug=False):
|
||||
"""If auth fails, raise 401 with a digest authentication header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
users
|
||||
A dict of the form: {username: password} or a callable returning
|
||||
a dict.
|
||||
"""
|
||||
if check_auth(users, realm=realm):
|
||||
if debug:
|
||||
cherrypy.log('Auth successful', 'TOOLS.DIGEST_AUTH')
|
||||
return
|
||||
|
||||
# inform the user-agent this path is protected
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = httpauth.digestAuth(realm)
|
||||
|
||||
raise cherrypy.HTTPError(
|
||||
401, 'You are not authorized to access that resource')
|
||||
@@ -1,90 +0,0 @@
|
||||
# This file is part of CherryPy <http://www.cherrypy.org/>
|
||||
# -*- coding: utf-8 -*-
|
||||
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
|
||||
|
||||
import binascii
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import base64_decode
|
||||
|
||||
|
||||
__doc__ = """This module provides a CherryPy 3.x tool which implements
|
||||
the server-side of HTTP Basic Access Authentication, as described in
|
||||
:rfc:`2617`.
|
||||
|
||||
Example usage, using the built-in checkpassword_dict function which uses a dict
|
||||
as the credentials store::
|
||||
|
||||
userpassdict = {'bird' : 'bebop', 'ornette' : 'wayout'}
|
||||
checkpassword = cherrypy.lib.auth_basic.checkpassword_dict(userpassdict)
|
||||
basic_auth = {'tools.auth_basic.on': True,
|
||||
'tools.auth_basic.realm': 'earth',
|
||||
'tools.auth_basic.checkpassword': checkpassword,
|
||||
}
|
||||
app_config = { '/' : basic_auth }
|
||||
|
||||
"""
|
||||
|
||||
__author__ = 'visteya'
|
||||
__date__ = 'April 2009'
|
||||
|
||||
|
||||
def checkpassword_dict(user_password_dict):
|
||||
"""Returns a checkpassword function which checks credentials
|
||||
against a dictionary of the form: {username : password}.
|
||||
|
||||
If you want a simple dictionary-based authentication scheme, use
|
||||
checkpassword_dict(my_credentials_dict) as the value for the
|
||||
checkpassword argument to basic_auth().
|
||||
"""
|
||||
def checkpassword(realm, user, password):
|
||||
p = user_password_dict.get(user)
|
||||
return p and p == password or False
|
||||
|
||||
return checkpassword
|
||||
|
||||
|
||||
def basic_auth(realm, checkpassword, debug=False):
|
||||
"""A CherryPy tool which hooks at before_handler to perform
|
||||
HTTP Basic Access Authentication, as specified in :rfc:`2617`.
|
||||
|
||||
If the request has an 'authorization' header with a 'Basic' scheme, this
|
||||
tool attempts to authenticate the credentials supplied in that header. If
|
||||
the request has no 'authorization' header, or if it does but the scheme is
|
||||
not 'Basic', or if authentication fails, the tool sends a 401 response with
|
||||
a 'WWW-Authenticate' Basic header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
|
||||
checkpassword
|
||||
A callable which checks the authentication credentials.
|
||||
Its signature is checkpassword(realm, username, password). where
|
||||
username and password are the values obtained from the request's
|
||||
'authorization' header. If authentication succeeds, checkpassword
|
||||
returns True, else it returns False.
|
||||
|
||||
"""
|
||||
|
||||
if '"' in realm:
|
||||
raise ValueError('Realm cannot contain the " (quote) character.')
|
||||
request = cherrypy.serving.request
|
||||
|
||||
auth_header = request.headers.get('authorization')
|
||||
if auth_header is not None:
|
||||
# split() error, base64.decodestring() error
|
||||
with cherrypy.HTTPError.handle((ValueError, binascii.Error), 400, 'Bad Request'):
|
||||
scheme, params = auth_header.split(' ', 1)
|
||||
if scheme.lower() == 'basic':
|
||||
username, password = base64_decode(params).split(':', 1)
|
||||
if checkpassword(realm, username, password):
|
||||
if debug:
|
||||
cherrypy.log('Auth succeeded', 'TOOLS.AUTH_BASIC')
|
||||
request.login = username
|
||||
return # successful authentication
|
||||
|
||||
# Respond with 401 status and a WWW-Authenticate header
|
||||
cherrypy.serving.response.headers[
|
||||
'www-authenticate'] = 'Basic realm="%s"' % realm
|
||||
raise cherrypy.HTTPError(
|
||||
401, 'You are not authorized to access that resource')
|
||||
@@ -1,390 +0,0 @@
|
||||
# This file is part of CherryPy <http://www.cherrypy.org/>
|
||||
# -*- coding: utf-8 -*-
|
||||
# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
|
||||
|
||||
import time
|
||||
from hashlib import md5
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, parse_http_list, parse_keqv_list
|
||||
|
||||
|
||||
__doc__ = """An implementation of the server-side of HTTP Digest Access
|
||||
Authentication, which is described in :rfc:`2617`.
|
||||
|
||||
Example usage, using the built-in get_ha1_dict_plain function which uses a dict
|
||||
of plaintext passwords as the credentials store::
|
||||
|
||||
userpassdict = {'alice' : '4x5istwelve'}
|
||||
get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(userpassdict)
|
||||
digest_auth = {'tools.auth_digest.on': True,
|
||||
'tools.auth_digest.realm': 'wonderland',
|
||||
'tools.auth_digest.get_ha1': get_ha1,
|
||||
'tools.auth_digest.key': 'a565c27146791cfb',
|
||||
}
|
||||
app_config = { '/' : digest_auth }
|
||||
"""
|
||||
|
||||
__author__ = 'visteya'
|
||||
__date__ = 'April 2009'
|
||||
|
||||
|
||||
md5_hex = lambda s: md5(ntob(s)).hexdigest()
|
||||
|
||||
qop_auth = 'auth'
|
||||
qop_auth_int = 'auth-int'
|
||||
valid_qops = (qop_auth, qop_auth_int)
|
||||
|
||||
valid_algorithms = ('MD5', 'MD5-sess')
|
||||
|
||||
|
||||
def TRACE(msg):
|
||||
cherrypy.log(msg, context='TOOLS.AUTH_DIGEST')
|
||||
|
||||
# Three helper functions for users of the tool, providing three variants
|
||||
# of get_ha1() functions for three different kinds of credential stores.
|
||||
|
||||
|
||||
def get_ha1_dict_plain(user_password_dict):
|
||||
"""Returns a get_ha1 function which obtains a plaintext password from a
|
||||
dictionary of the form: {username : password}.
|
||||
|
||||
If you want a simple dictionary-based authentication scheme, with plaintext
|
||||
passwords, use get_ha1_dict_plain(my_userpass_dict) as the value for the
|
||||
get_ha1 argument to digest_auth().
|
||||
"""
|
||||
def get_ha1(realm, username):
|
||||
password = user_password_dict.get(username)
|
||||
if password:
|
||||
return md5_hex('%s:%s:%s' % (username, realm, password))
|
||||
return None
|
||||
|
||||
return get_ha1
|
||||
|
||||
|
||||
def get_ha1_dict(user_ha1_dict):
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
dictionary of the form: {username : HA1}.
|
||||
|
||||
If you want a dictionary-based authentication scheme, but with
|
||||
pre-computed HA1 hashes instead of plain-text passwords, use
|
||||
get_ha1_dict(my_userha1_dict) as the value for the get_ha1
|
||||
argument to digest_auth().
|
||||
"""
|
||||
def get_ha1(realm, username):
|
||||
return user_ha1_dict.get(username)
|
||||
|
||||
return get_ha1
|
||||
|
||||
|
||||
def get_ha1_file_htdigest(filename):
|
||||
"""Returns a get_ha1 function which obtains a HA1 password hash from a
|
||||
flat file with lines of the same format as that produced by the Apache
|
||||
htdigest utility. For example, for realm 'wonderland', username 'alice',
|
||||
and password '4x5istwelve', the htdigest line would be::
|
||||
|
||||
alice:wonderland:3238cdfe91a8b2ed8e39646921a02d4c
|
||||
|
||||
If you want to use an Apache htdigest file as the credentials store,
|
||||
then use get_ha1_file_htdigest(my_htdigest_file) as the value for the
|
||||
get_ha1 argument to digest_auth(). It is recommended that the filename
|
||||
argument be an absolute path, to avoid problems.
|
||||
"""
|
||||
def get_ha1(realm, username):
|
||||
result = None
|
||||
f = open(filename, 'r')
|
||||
for line in f:
|
||||
u, r, ha1 = line.rstrip().split(':')
|
||||
if u == username and r == realm:
|
||||
result = ha1
|
||||
break
|
||||
f.close()
|
||||
return result
|
||||
|
||||
return get_ha1
|
||||
|
||||
|
||||
def synthesize_nonce(s, key, timestamp=None):
|
||||
"""Synthesize a nonce value which resists spoofing and can be checked
|
||||
for staleness. Returns a string suitable as the value for 'nonce' in
|
||||
the www-authenticate header.
|
||||
|
||||
s
|
||||
A string related to the resource, such as the hostname of the server.
|
||||
|
||||
key
|
||||
A secret string known only to the server.
|
||||
|
||||
timestamp
|
||||
An integer seconds-since-the-epoch timestamp
|
||||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = int(time.time())
|
||||
h = md5_hex('%s:%s:%s' % (timestamp, s, key))
|
||||
nonce = '%s:%s' % (timestamp, h)
|
||||
return nonce
|
||||
|
||||
|
||||
def H(s):
|
||||
"""The hash function H"""
|
||||
return md5_hex(s)
|
||||
|
||||
|
||||
class HttpDigestAuthorization (object):
|
||||
|
||||
"""Class to parse a Digest Authorization header and perform re-calculation
|
||||
of the digest.
|
||||
"""
|
||||
|
||||
def errmsg(self, s):
|
||||
return 'Digest Authorization header: %s' % s
|
||||
|
||||
def __init__(self, auth_header, http_method, debug=False):
|
||||
self.http_method = http_method
|
||||
self.debug = debug
|
||||
scheme, params = auth_header.split(' ', 1)
|
||||
self.scheme = scheme.lower()
|
||||
if self.scheme != 'digest':
|
||||
raise ValueError('Authorization scheme is not "Digest"')
|
||||
|
||||
self.auth_header = auth_header
|
||||
|
||||
# make a dict of the params
|
||||
items = parse_http_list(params)
|
||||
paramsd = parse_keqv_list(items)
|
||||
|
||||
self.realm = paramsd.get('realm')
|
||||
self.username = paramsd.get('username')
|
||||
self.nonce = paramsd.get('nonce')
|
||||
self.uri = paramsd.get('uri')
|
||||
self.method = paramsd.get('method')
|
||||
self.response = paramsd.get('response') # the response digest
|
||||
self.algorithm = paramsd.get('algorithm', 'MD5').upper()
|
||||
self.cnonce = paramsd.get('cnonce')
|
||||
self.opaque = paramsd.get('opaque')
|
||||
self.qop = paramsd.get('qop') # qop
|
||||
self.nc = paramsd.get('nc') # nonce count
|
||||
|
||||
# perform some correctness checks
|
||||
if self.algorithm not in valid_algorithms:
|
||||
raise ValueError(
|
||||
self.errmsg("Unsupported value for algorithm: '%s'" %
|
||||
self.algorithm))
|
||||
|
||||
has_reqd = (
|
||||
self.username and
|
||||
self.realm and
|
||||
self.nonce and
|
||||
self.uri and
|
||||
self.response
|
||||
)
|
||||
if not has_reqd:
|
||||
raise ValueError(
|
||||
self.errmsg('Not all required parameters are present.'))
|
||||
|
||||
if self.qop:
|
||||
if self.qop not in valid_qops:
|
||||
raise ValueError(
|
||||
self.errmsg("Unsupported value for qop: '%s'" % self.qop))
|
||||
if not (self.cnonce and self.nc):
|
||||
raise ValueError(
|
||||
self.errmsg('If qop is sent then '
|
||||
'cnonce and nc MUST be present'))
|
||||
else:
|
||||
if self.cnonce or self.nc:
|
||||
raise ValueError(
|
||||
self.errmsg('If qop is not sent, '
|
||||
'neither cnonce nor nc can be present'))
|
||||
|
||||
def __str__(self):
|
||||
return 'authorization : %s' % self.auth_header
|
||||
|
||||
def validate_nonce(self, s, key):
|
||||
"""Validate the nonce.
|
||||
Returns True if nonce was generated by synthesize_nonce() and the
|
||||
timestamp is not spoofed, else returns False.
|
||||
|
||||
s
|
||||
A string related to the resource, such as the hostname of
|
||||
the server.
|
||||
|
||||
key
|
||||
A secret string known only to the server.
|
||||
|
||||
Both s and key must be the same values which were used to synthesize
|
||||
the nonce we are trying to validate.
|
||||
"""
|
||||
try:
|
||||
timestamp, hashpart = self.nonce.split(':', 1)
|
||||
s_timestamp, s_hashpart = synthesize_nonce(
|
||||
s, key, timestamp).split(':', 1)
|
||||
is_valid = s_hashpart == hashpart
|
||||
if self.debug:
|
||||
TRACE('validate_nonce: %s' % is_valid)
|
||||
return is_valid
|
||||
except ValueError: # split() error
|
||||
pass
|
||||
return False
|
||||
|
||||
def is_nonce_stale(self, max_age_seconds=600):
|
||||
"""Returns True if a validated nonce is stale. The nonce contains a
|
||||
timestamp in plaintext and also a secure hash of the timestamp.
|
||||
You should first validate the nonce to ensure the plaintext
|
||||
timestamp is not spoofed.
|
||||
"""
|
||||
try:
|
||||
timestamp, hashpart = self.nonce.split(':', 1)
|
||||
if int(timestamp) + max_age_seconds > int(time.time()):
|
||||
return False
|
||||
except ValueError: # int() error
|
||||
pass
|
||||
if self.debug:
|
||||
TRACE('nonce is stale')
|
||||
return True
|
||||
|
||||
def HA2(self, entity_body=''):
|
||||
"""Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
|
||||
# RFC 2617 3.2.2.3
|
||||
# If the "qop" directive's value is "auth" or is unspecified,
|
||||
# then A2 is:
|
||||
# A2 = method ":" digest-uri-value
|
||||
#
|
||||
# If the "qop" value is "auth-int", then A2 is:
|
||||
# A2 = method ":" digest-uri-value ":" H(entity-body)
|
||||
if self.qop is None or self.qop == 'auth':
|
||||
a2 = '%s:%s' % (self.http_method, self.uri)
|
||||
elif self.qop == 'auth-int':
|
||||
a2 = '%s:%s:%s' % (self.http_method, self.uri, H(entity_body))
|
||||
else:
|
||||
# in theory, this should never happen, since I validate qop in
|
||||
# __init__()
|
||||
raise ValueError(self.errmsg('Unrecognized value for qop!'))
|
||||
return H(a2)
|
||||
|
||||
def request_digest(self, ha1, entity_body=''):
|
||||
"""Calculates the Request-Digest. See :rfc:`2617` section 3.2.2.1.
|
||||
|
||||
ha1
|
||||
The HA1 string obtained from the credentials store.
|
||||
|
||||
entity_body
|
||||
If 'qop' is set to 'auth-int', then A2 includes a hash
|
||||
of the "entity body". The entity body is the part of the
|
||||
message which follows the HTTP headers. See :rfc:`2617` section
|
||||
4.3. This refers to the entity the user agent sent in the
|
||||
request which has the Authorization header. Typically GET
|
||||
requests don't have an entity, and POST requests do.
|
||||
|
||||
"""
|
||||
ha2 = self.HA2(entity_body)
|
||||
# Request-Digest -- RFC 2617 3.2.2.1
|
||||
if self.qop:
|
||||
req = '%s:%s:%s:%s:%s' % (
|
||||
self.nonce, self.nc, self.cnonce, self.qop, ha2)
|
||||
else:
|
||||
req = '%s:%s' % (self.nonce, ha2)
|
||||
|
||||
# RFC 2617 3.2.2.2
|
||||
#
|
||||
# If the "algorithm" directive's value is "MD5" or is unspecified,
|
||||
# then A1 is:
|
||||
# A1 = unq(username-value) ":" unq(realm-value) ":" passwd
|
||||
#
|
||||
# If the "algorithm" directive's value is "MD5-sess", then A1 is
|
||||
# calculated only once - on the first request by the client following
|
||||
# receipt of a WWW-Authenticate challenge from the server.
|
||||
# A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
|
||||
# ":" unq(nonce-value) ":" unq(cnonce-value)
|
||||
if self.algorithm == 'MD5-sess':
|
||||
ha1 = H('%s:%s:%s' % (ha1, self.nonce, self.cnonce))
|
||||
|
||||
digest = H('%s:%s' % (ha1, req))
|
||||
return digest
|
||||
|
||||
|
||||
def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
|
||||
stale=False):
|
||||
"""Constructs a WWW-Authenticate header for Digest authentication."""
|
||||
if qop not in valid_qops:
|
||||
raise ValueError("Unsupported value for qop: '%s'" % qop)
|
||||
if algorithm not in valid_algorithms:
|
||||
raise ValueError("Unsupported value for algorithm: '%s'" % algorithm)
|
||||
|
||||
if nonce is None:
|
||||
nonce = synthesize_nonce(realm, key)
|
||||
s = 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
|
||||
realm, nonce, algorithm, qop)
|
||||
if stale:
|
||||
s += ', stale="true"'
|
||||
return s
|
||||
|
||||
|
||||
def digest_auth(realm, get_ha1, key, debug=False):
|
||||
"""A CherryPy tool which hooks at before_handler to perform
|
||||
HTTP Digest Access Authentication, as specified in :rfc:`2617`.
|
||||
|
||||
If the request has an 'authorization' header with a 'Digest' scheme,
|
||||
this tool authenticates the credentials supplied in that header.
|
||||
If the request has no 'authorization' header, or if it does but the
|
||||
scheme is not "Digest", or if authentication fails, the tool sends
|
||||
a 401 response with a 'WWW-Authenticate' Digest header.
|
||||
|
||||
realm
|
||||
A string containing the authentication realm.
|
||||
|
||||
get_ha1
|
||||
A callable which looks up a username in a credentials store
|
||||
and returns the HA1 string, which is defined in the RFC to be
|
||||
MD5(username : realm : password). The function's signature is:
|
||||
``get_ha1(realm, username)``
|
||||
where username is obtained from the request's 'authorization' header.
|
||||
If username is not found in the credentials store, get_ha1() returns
|
||||
None.
|
||||
|
||||
key
|
||||
A secret string known only to the server, used in the synthesis
|
||||
of nonces.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
||||
auth_header = request.headers.get('authorization')
|
||||
nonce_is_stale = False
|
||||
if auth_header is not None:
|
||||
with cherrypy.HTTPError.handle(ValueError, 400,
|
||||
'The Authorization header could not be parsed.'):
|
||||
auth = HttpDigestAuthorization(
|
||||
auth_header, request.method, debug=debug)
|
||||
|
||||
if debug:
|
||||
TRACE(str(auth))
|
||||
|
||||
if auth.validate_nonce(realm, key):
|
||||
ha1 = get_ha1(realm, auth.username)
|
||||
if ha1 is not None:
|
||||
# note that for request.body to be available we need to
|
||||
# hook in at before_handler, not on_start_resource like
|
||||
# 3.1.x digest_auth does.
|
||||
digest = auth.request_digest(ha1, entity_body=request.body)
|
||||
if digest == auth.response: # authenticated
|
||||
if debug:
|
||||
TRACE('digest matches auth.response')
|
||||
# Now check if nonce is stale.
|
||||
# The choice of ten minutes' lifetime for nonce is somewhat
|
||||
# arbitrary
|
||||
nonce_is_stale = auth.is_nonce_stale(max_age_seconds=600)
|
||||
if not nonce_is_stale:
|
||||
request.login = auth.username
|
||||
if debug:
|
||||
TRACE('authentication of %s successful' %
|
||||
auth.username)
|
||||
return
|
||||
|
||||
# Respond with 401 status and a WWW-Authenticate header
|
||||
header = www_authenticate(realm, key, stale=nonce_is_stale)
|
||||
if debug:
|
||||
TRACE(header)
|
||||
cherrypy.serving.response.headers['WWW-Authenticate'] = header
|
||||
raise cherrypy.HTTPError(
|
||||
401, 'You are not authorized to access that resource')
|
||||
@@ -1,470 +0,0 @@
|
||||
"""
|
||||
CherryPy implements a simple caching system as a pluggable Tool. This tool
|
||||
tries to be an (in-process) HTTP/1.1-compliant cache. It's not quite there
|
||||
yet, but it's probably good enough for most sites.
|
||||
|
||||
In general, GET responses are cached (along with selecting headers) and, if
|
||||
another request arrives for the same resource, the caching Tool will return 304
|
||||
Not Modified if possible, or serve the cached response otherwise. It also sets
|
||||
request.cached to True if serving a cached representation, and sets
|
||||
request.cacheable to False (so it doesn't get cached again).
|
||||
|
||||
If POST, PUT, or DELETE requests are made for a cached resource, they
|
||||
invalidate (delete) any cached response.
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
Configuration file example::
|
||||
|
||||
[/]
|
||||
tools.caching.on = True
|
||||
tools.caching.delay = 3600
|
||||
|
||||
You may use a class other than the default
|
||||
:class:`MemoryCache<cherrypy.lib.caching.MemoryCache>` by supplying the config
|
||||
entry ``cache_class``; supply the full dotted name of the replacement class
|
||||
as the config value. It must implement the basic methods ``get``, ``put``,
|
||||
``delete``, and ``clear``.
|
||||
|
||||
You may set any attribute, including overriding methods, on the cache
|
||||
instance by providing them in config. The above sets the
|
||||
:attr:`delay<cherrypy.lib.caching.MemoryCache.delay>` attribute, for example.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import cherrypy
|
||||
from cherrypy.lib import cptools, httputil
|
||||
from cherrypy._cpcompat import copyitems, ntob, sorted, Event
|
||||
|
||||
|
||||
class Cache(object):
|
||||
|
||||
"""Base class for Cache implementations."""
|
||||
|
||||
def get(self):
|
||||
"""Return the current variant if in the cache, else None."""
|
||||
raise NotImplemented
|
||||
|
||||
def put(self, obj, size):
|
||||
"""Store the current variant in the cache."""
|
||||
raise NotImplemented
|
||||
|
||||
def delete(self):
|
||||
"""Remove ALL cached variants of the current resource."""
|
||||
raise NotImplemented
|
||||
|
||||
def clear(self):
|
||||
"""Reset the cache to its initial, empty state."""
|
||||
raise NotImplemented
|
||||
|
||||
|
||||
# ------------------------------ Memory Cache ------------------------------- #
|
||||
class AntiStampedeCache(dict):
|
||||
|
||||
"""A storage system for cached items which reduces stampede collisions."""
|
||||
|
||||
def wait(self, key, timeout=5, debug=False):
|
||||
"""Return the cached value for the given key, or None.
|
||||
|
||||
If timeout is not None, and the value is already
|
||||
being calculated by another thread, wait until the given timeout has
|
||||
elapsed. If the value is available before the timeout expires, it is
|
||||
returned. If not, None is returned, and a sentinel placed in the cache
|
||||
to signal other threads to wait.
|
||||
|
||||
If timeout is None, no waiting is performed nor sentinels used.
|
||||
"""
|
||||
value = self.get(key)
|
||||
if isinstance(value, Event):
|
||||
if timeout is None:
|
||||
# Ignore the other thread and recalc it ourselves.
|
||||
if debug:
|
||||
cherrypy.log('No timeout', 'TOOLS.CACHING')
|
||||
return None
|
||||
|
||||
# Wait until it's done or times out.
|
||||
if debug:
|
||||
cherrypy.log('Waiting up to %s seconds' %
|
||||
timeout, 'TOOLS.CACHING')
|
||||
value.wait(timeout)
|
||||
if value.result is not None:
|
||||
# The other thread finished its calculation. Use it.
|
||||
if debug:
|
||||
cherrypy.log('Result!', 'TOOLS.CACHING')
|
||||
return value.result
|
||||
# Timed out. Stick an Event in the slot so other threads wait
|
||||
# on this one to finish calculating the value.
|
||||
if debug:
|
||||
cherrypy.log('Timed out', 'TOOLS.CACHING')
|
||||
e = threading.Event()
|
||||
e.result = None
|
||||
dict.__setitem__(self, key, e)
|
||||
|
||||
return None
|
||||
elif value is None:
|
||||
# Stick an Event in the slot so other threads wait
|
||||
# on this one to finish calculating the value.
|
||||
if debug:
|
||||
cherrypy.log('Timed out', 'TOOLS.CACHING')
|
||||
e = threading.Event()
|
||||
e.result = None
|
||||
dict.__setitem__(self, key, e)
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Set the cached value for the given key."""
|
||||
existing = self.get(key)
|
||||
dict.__setitem__(self, key, value)
|
||||
if isinstance(existing, Event):
|
||||
# Set Event.result so other threads waiting on it have
|
||||
# immediate access without needing to poll the cache again.
|
||||
existing.result = value
|
||||
existing.set()
|
||||
|
||||
|
||||
class MemoryCache(Cache):
|
||||
|
||||
"""An in-memory cache for varying response content.
|
||||
|
||||
Each key in self.store is a URI, and each value is an AntiStampedeCache.
|
||||
The response for any given URI may vary based on the values of
|
||||
"selecting request headers"; that is, those named in the Vary
|
||||
response header. We assume the list of header names to be constant
|
||||
for each URI throughout the lifetime of the application, and store
|
||||
that list in ``self.store[uri].selecting_headers``.
|
||||
|
||||
The items contained in ``self.store[uri]`` have keys which are tuples of
|
||||
request header values (in the same order as the names in its
|
||||
selecting_headers), and values which are the actual responses.
|
||||
"""
|
||||
|
||||
maxobjects = 1000
|
||||
"""The maximum number of cached objects; defaults to 1000."""
|
||||
|
||||
maxobj_size = 100000
|
||||
"""The maximum size of each cached object in bytes; defaults to 100 KB."""
|
||||
|
||||
maxsize = 10000000
|
||||
"""The maximum size of the entire cache in bytes; defaults to 10 MB."""
|
||||
|
||||
delay = 600
|
||||
"""Seconds until the cached content expires; defaults to 600 (10 minutes).
|
||||
"""
|
||||
|
||||
antistampede_timeout = 5
|
||||
"""Seconds to wait for other threads to release a cache lock."""
|
||||
|
||||
expire_freq = 0.1
|
||||
"""Seconds to sleep between cache expiration sweeps."""
|
||||
|
||||
debug = False
|
||||
|
||||
def __init__(self):
|
||||
self.clear()
|
||||
|
||||
# Run self.expire_cache in a separate daemon thread.
|
||||
t = threading.Thread(target=self.expire_cache, name='expire_cache')
|
||||
self.expiration_thread = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def clear(self):
|
||||
"""Reset the cache to its initial, empty state."""
|
||||
self.store = {}
|
||||
self.expirations = {}
|
||||
self.tot_puts = 0
|
||||
self.tot_gets = 0
|
||||
self.tot_hist = 0
|
||||
self.tot_expires = 0
|
||||
self.tot_non_modified = 0
|
||||
self.cursize = 0
|
||||
|
||||
def expire_cache(self):
|
||||
"""Continuously examine cached objects, expiring stale ones.
|
||||
|
||||
This function is designed to be run in its own daemon thread,
|
||||
referenced at ``self.expiration_thread``.
|
||||
"""
|
||||
# It's possible that "time" will be set to None
|
||||
# arbitrarily, so we check "while time" to avoid exceptions.
|
||||
# See tickets #99 and #180 for more information.
|
||||
while time:
|
||||
now = time.time()
|
||||
# Must make a copy of expirations so it doesn't change size
|
||||
# during iteration
|
||||
for expiration_time, objects in copyitems(self.expirations):
|
||||
if expiration_time <= now:
|
||||
for obj_size, uri, sel_header_values in objects:
|
||||
try:
|
||||
del self.store[uri][tuple(sel_header_values)]
|
||||
self.tot_expires += 1
|
||||
self.cursize -= obj_size
|
||||
except KeyError:
|
||||
# the key may have been deleted elsewhere
|
||||
pass
|
||||
del self.expirations[expiration_time]
|
||||
time.sleep(self.expire_freq)
|
||||
|
||||
def get(self):
|
||||
"""Return the current variant if in the cache, else None."""
|
||||
request = cherrypy.serving.request
|
||||
self.tot_gets += 1
|
||||
|
||||
uri = cherrypy.url(qs=request.query_string)
|
||||
uricache = self.store.get(uri)
|
||||
if uricache is None:
|
||||
return None
|
||||
|
||||
header_values = [request.headers.get(h, '')
|
||||
for h in uricache.selecting_headers]
|
||||
variant = uricache.wait(key=tuple(sorted(header_values)),
|
||||
timeout=self.antistampede_timeout,
|
||||
debug=self.debug)
|
||||
if variant is not None:
|
||||
self.tot_hist += 1
|
||||
return variant
|
||||
|
||||
def put(self, variant, size):
|
||||
"""Store the current variant in the cache."""
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
uri = cherrypy.url(qs=request.query_string)
|
||||
uricache = self.store.get(uri)
|
||||
if uricache is None:
|
||||
uricache = AntiStampedeCache()
|
||||
uricache.selecting_headers = [
|
||||
e.value for e in response.headers.elements('Vary')]
|
||||
self.store[uri] = uricache
|
||||
|
||||
if len(self.store) < self.maxobjects:
|
||||
total_size = self.cursize + size
|
||||
|
||||
# checks if there's space for the object
|
||||
if (size < self.maxobj_size and total_size < self.maxsize):
|
||||
# add to the expirations list
|
||||
expiration_time = response.time + self.delay
|
||||
bucket = self.expirations.setdefault(expiration_time, [])
|
||||
bucket.append((size, uri, uricache.selecting_headers))
|
||||
|
||||
# add to the cache
|
||||
header_values = [request.headers.get(h, '')
|
||||
for h in uricache.selecting_headers]
|
||||
uricache[tuple(sorted(header_values))] = variant
|
||||
self.tot_puts += 1
|
||||
self.cursize = total_size
|
||||
|
||||
def delete(self):
|
||||
"""Remove ALL cached variants of the current resource."""
|
||||
uri = cherrypy.url(qs=cherrypy.serving.request.query_string)
|
||||
self.store.pop(uri, None)
|
||||
|
||||
|
||||
def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
|
||||
"""Try to obtain cached output. If fresh enough, raise HTTPError(304).
|
||||
|
||||
If POST, PUT, or DELETE:
|
||||
* invalidates (deletes) any cached response for this resource
|
||||
* sets request.cached = False
|
||||
* sets request.cacheable = False
|
||||
|
||||
else if a cached copy exists:
|
||||
* sets request.cached = True
|
||||
* sets request.cacheable = False
|
||||
* sets response.headers to the cached values
|
||||
* checks the cached Last-Modified response header against the
|
||||
current If-(Un)Modified-Since request headers; raises 304
|
||||
if necessary.
|
||||
* sets response.status and response.body to the cached values
|
||||
* returns True
|
||||
|
||||
otherwise:
|
||||
* sets request.cached = False
|
||||
* sets request.cacheable = True
|
||||
* returns False
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
if not hasattr(cherrypy, '_cache'):
|
||||
# Make a process-wide Cache object.
|
||||
cherrypy._cache = kwargs.pop('cache_class', MemoryCache)()
|
||||
|
||||
# Take all remaining kwargs and set them on the Cache object.
|
||||
for k, v in kwargs.items():
|
||||
setattr(cherrypy._cache, k, v)
|
||||
cherrypy._cache.debug = debug
|
||||
|
||||
# POST, PUT, DELETE should invalidate (delete) the cached copy.
|
||||
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10.
|
||||
if request.method in invalid_methods:
|
||||
if debug:
|
||||
cherrypy.log('request.method %r in invalid_methods %r' %
|
||||
(request.method, invalid_methods), 'TOOLS.CACHING')
|
||||
cherrypy._cache.delete()
|
||||
request.cached = False
|
||||
request.cacheable = False
|
||||
return False
|
||||
|
||||
if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]:
|
||||
request.cached = False
|
||||
request.cacheable = True
|
||||
return False
|
||||
|
||||
cache_data = cherrypy._cache.get()
|
||||
request.cached = bool(cache_data)
|
||||
request.cacheable = not request.cached
|
||||
if request.cached:
|
||||
# Serve the cached copy.
|
||||
max_age = cherrypy._cache.delay
|
||||
for v in [e.value for e in request.headers.elements('Cache-Control')]:
|
||||
atoms = v.split('=', 1)
|
||||
directive = atoms.pop(0)
|
||||
if directive == 'max-age':
|
||||
if len(atoms) != 1 or not atoms[0].isdigit():
|
||||
raise cherrypy.HTTPError(
|
||||
400, 'Invalid Cache-Control header')
|
||||
max_age = int(atoms[0])
|
||||
break
|
||||
elif directive == 'no-cache':
|
||||
if debug:
|
||||
cherrypy.log(
|
||||
'Ignoring cache due to Cache-Control: no-cache',
|
||||
'TOOLS.CACHING')
|
||||
request.cached = False
|
||||
request.cacheable = True
|
||||
return False
|
||||
|
||||
if debug:
|
||||
cherrypy.log('Reading response from cache', 'TOOLS.CACHING')
|
||||
s, h, b, create_time = cache_data
|
||||
age = int(response.time - create_time)
|
||||
if (age > max_age):
|
||||
if debug:
|
||||
cherrypy.log('Ignoring cache due to age > %d' % max_age,
|
||||
'TOOLS.CACHING')
|
||||
request.cached = False
|
||||
request.cacheable = True
|
||||
return False
|
||||
|
||||
# Copy the response headers. See
|
||||
# https://github.com/cherrypy/cherrypy/issues/721.
|
||||
response.headers = rh = httputil.HeaderMap()
|
||||
for k in h:
|
||||
dict.__setitem__(rh, k, dict.__getitem__(h, k))
|
||||
|
||||
# Add the required Age header
|
||||
response.headers['Age'] = str(age)
|
||||
|
||||
try:
|
||||
# Note that validate_since depends on a Last-Modified header;
|
||||
# this was put into the cached copy, and should have been
|
||||
# resurrected just above (response.headers = cache_data[1]).
|
||||
cptools.validate_since()
|
||||
except cherrypy.HTTPRedirect:
|
||||
x = sys.exc_info()[1]
|
||||
if x.status == 304:
|
||||
cherrypy._cache.tot_non_modified += 1
|
||||
raise
|
||||
|
||||
# serve it & get out from the request
|
||||
response.status = s
|
||||
response.body = b
|
||||
else:
|
||||
if debug:
|
||||
cherrypy.log('request is not cached', 'TOOLS.CACHING')
|
||||
return request.cached
|
||||
|
||||
|
||||
def tee_output():
|
||||
"""Tee response output to cache storage. Internal."""
|
||||
# Used by CachingTool by attaching to request.hooks
|
||||
|
||||
request = cherrypy.serving.request
|
||||
if 'no-store' in request.headers.values('Cache-Control'):
|
||||
return
|
||||
|
||||
def tee(body):
|
||||
"""Tee response.body into a list."""
|
||||
if ('no-cache' in response.headers.values('Pragma') or
|
||||
'no-store' in response.headers.values('Cache-Control')):
|
||||
for chunk in body:
|
||||
yield chunk
|
||||
return
|
||||
|
||||
output = []
|
||||
for chunk in body:
|
||||
output.append(chunk)
|
||||
yield chunk
|
||||
|
||||
# save the cache data
|
||||
body = ntob('').join(output)
|
||||
cherrypy._cache.put((response.status, response.headers or {},
|
||||
body, response.time), len(body))
|
||||
|
||||
response = cherrypy.serving.response
|
||||
response.body = tee(response.body)
|
||||
|
||||
|
||||
def expires(secs=0, force=False, debug=False):
|
||||
"""Tool for influencing cache mechanisms using the 'Expires' header.
|
||||
|
||||
secs
|
||||
Must be either an int or a datetime.timedelta, and indicates the
|
||||
number of seconds between response.time and when the response should
|
||||
expire. The 'Expires' header will be set to response.time + secs.
|
||||
If secs is zero, the 'Expires' header is set one year in the past, and
|
||||
the following "cache prevention" headers are also set:
|
||||
|
||||
* Pragma: no-cache
|
||||
* Cache-Control': no-cache, must-revalidate
|
||||
|
||||
force
|
||||
If False, the following headers are checked:
|
||||
|
||||
* Etag
|
||||
* Last-Modified
|
||||
* Age
|
||||
* Expires
|
||||
|
||||
If any are already present, none of the above response headers are set.
|
||||
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
headers = response.headers
|
||||
|
||||
cacheable = False
|
||||
if not force:
|
||||
# some header names that indicate that the response can be cached
|
||||
for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'):
|
||||
if indicator in headers:
|
||||
cacheable = True
|
||||
break
|
||||
|
||||
if not cacheable and not force:
|
||||
if debug:
|
||||
cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES')
|
||||
else:
|
||||
if debug:
|
||||
cherrypy.log('request is cacheable', 'TOOLS.EXPIRES')
|
||||
if isinstance(secs, datetime.timedelta):
|
||||
secs = (86400 * secs.days) + secs.seconds
|
||||
|
||||
if secs == 0:
|
||||
if force or ('Pragma' not in headers):
|
||||
headers['Pragma'] = 'no-cache'
|
||||
if cherrypy.serving.request.protocol >= (1, 1):
|
||||
if force or 'Cache-Control' not in headers:
|
||||
headers['Cache-Control'] = 'no-cache, must-revalidate'
|
||||
# Set an explicit Expires date in the past.
|
||||
expiry = httputil.HTTPDate(1169942400.0)
|
||||
else:
|
||||
expiry = httputil.HTTPDate(response.time + secs)
|
||||
if force or 'Expires' not in headers:
|
||||
headers['Expires'] = expiry
|
||||
@@ -1,391 +0,0 @@
|
||||
"""Code-coverage tools for CherryPy.
|
||||
|
||||
To use this module, or the coverage tools in the test suite,
|
||||
you need to download 'coverage.py', either Gareth Rees' `original
|
||||
implementation <http://www.garethrees.org/2001/12/04/python-coverage/>`_
|
||||
or Ned Batchelder's `enhanced version:
|
||||
<http://www.nedbatchelder.com/code/modules/coverage.html>`_
|
||||
|
||||
To turn on coverage tracing, use the following code::
|
||||
|
||||
cherrypy.engine.subscribe('start', covercp.start)
|
||||
|
||||
DO NOT subscribe anything on the 'start_thread' channel, as previously
|
||||
recommended. Calling start once in the main thread should be sufficient
|
||||
to start coverage on all threads. Calling start again in each thread
|
||||
effectively clears any coverage data gathered up to that point.
|
||||
|
||||
Run your code, then use the ``covercp.serve()`` function to browse the
|
||||
results in a web browser. If you run this module from the command line,
|
||||
it will call ``serve()`` for you.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
import cgi
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import quote_plus
|
||||
|
||||
|
||||
localFile = os.path.join(os.path.dirname(__file__), 'coverage.cache')
|
||||
|
||||
the_coverage = None
|
||||
try:
|
||||
from coverage import coverage
|
||||
the_coverage = coverage(data_file=localFile)
|
||||
|
||||
def start():
|
||||
the_coverage.start()
|
||||
except ImportError:
|
||||
# Setting the_coverage to None will raise errors
|
||||
# that need to be trapped downstream.
|
||||
the_coverage = None
|
||||
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'No code coverage will be performed; '
|
||||
'coverage.py could not be imported.')
|
||||
|
||||
def start():
|
||||
pass
|
||||
start.priority = 20
|
||||
|
||||
TEMPLATE_MENU = """<html>
|
||||
<head>
|
||||
<title>CherryPy Coverage Menu</title>
|
||||
<style>
|
||||
body {font: 9pt Arial, serif;}
|
||||
#tree {
|
||||
font-size: 8pt;
|
||||
font-family: Andale Mono, monospace;
|
||||
white-space: pre;
|
||||
}
|
||||
#tree a:active, a:focus {
|
||||
background-color: black;
|
||||
padding: 1px;
|
||||
color: white;
|
||||
border: 0px solid #9999FF;
|
||||
-moz-outline-style: none;
|
||||
}
|
||||
.fail { color: red;}
|
||||
.pass { color: #888;}
|
||||
#pct { text-align: right;}
|
||||
h3 {
|
||||
font-size: small;
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
margin-top: 5px;
|
||||
}
|
||||
input { border: 1px solid #ccc; padding: 2px; }
|
||||
.directory {
|
||||
color: #933;
|
||||
font-style: italic;
|
||||
font-weight: bold;
|
||||
font-size: 10pt;
|
||||
}
|
||||
.file {
|
||||
color: #400;
|
||||
}
|
||||
a { text-decoration: none; }
|
||||
#crumbs {
|
||||
color: white;
|
||||
font-size: 8pt;
|
||||
font-family: Andale Mono, monospace;
|
||||
width: 100%;
|
||||
background-color: black;
|
||||
}
|
||||
#crumbs a {
|
||||
color: #f88;
|
||||
}
|
||||
#options {
|
||||
line-height: 2.3em;
|
||||
border: 1px solid black;
|
||||
background-color: #eee;
|
||||
padding: 4px;
|
||||
}
|
||||
#exclude {
|
||||
width: 100%;
|
||||
margin-bottom: 3px;
|
||||
border: 1px solid #999;
|
||||
}
|
||||
#submit {
|
||||
background-color: black;
|
||||
color: white;
|
||||
border: 0;
|
||||
margin-bottom: -9px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h2>CherryPy Coverage</h2>"""
|
||||
|
||||
TEMPLATE_FORM = """
|
||||
<div id="options">
|
||||
<form action='menu' method=GET>
|
||||
<input type='hidden' name='base' value='%(base)s' />
|
||||
Show percentages
|
||||
<input type='checkbox' %(showpct)s name='showpct' value='checked' /><br />
|
||||
Hide files over
|
||||
<input type='text' id='pct' name='pct' value='%(pct)s' size='3' />%%<br />
|
||||
Exclude files matching<br />
|
||||
<input type='text' id='exclude' name='exclude'
|
||||
value='%(exclude)s' size='20' />
|
||||
<br />
|
||||
|
||||
<input type='submit' value='Change view' id="submit"/>
|
||||
</form>
|
||||
</div>"""
|
||||
|
||||
TEMPLATE_FRAMESET = """<html>
|
||||
<head><title>CherryPy coverage data</title></head>
|
||||
<frameset cols='250, 1*'>
|
||||
<frame src='menu?base=%s' />
|
||||
<frame name='main' src='' />
|
||||
</frameset>
|
||||
</html>
|
||||
"""
|
||||
|
||||
TEMPLATE_COVERAGE = """<html>
|
||||
<head>
|
||||
<title>Coverage for %(name)s</title>
|
||||
<style>
|
||||
h2 { margin-bottom: .25em; }
|
||||
p { margin: .25em; }
|
||||
.covered { color: #000; background-color: #fff; }
|
||||
.notcovered { color: #fee; background-color: #500; }
|
||||
.excluded { color: #00f; background-color: #fff; }
|
||||
table .covered, table .notcovered, table .excluded
|
||||
{ font-family: Andale Mono, monospace;
|
||||
font-size: 10pt; white-space: pre; }
|
||||
|
||||
.lineno { background-color: #eee;}
|
||||
.notcovered .lineno { background-color: #000;}
|
||||
table { border-collapse: collapse;
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h2>%(name)s</h2>
|
||||
<p>%(fullpath)s</p>
|
||||
<p>Coverage: %(pc)s%%</p>"""
|
||||
|
||||
TEMPLATE_LOC_COVERED = """<tr class="covered">
|
||||
<td class="lineno">%s </td>
|
||||
<td>%s</td>
|
||||
</tr>\n"""
|
||||
TEMPLATE_LOC_NOT_COVERED = """<tr class="notcovered">
|
||||
<td class="lineno">%s </td>
|
||||
<td>%s</td>
|
||||
</tr>\n"""
|
||||
TEMPLATE_LOC_EXCLUDED = """<tr class="excluded">
|
||||
<td class="lineno">%s </td>
|
||||
<td>%s</td>
|
||||
</tr>\n"""
|
||||
|
||||
TEMPLATE_ITEM = (
|
||||
"%s%s<a class='file' href='report?name=%s' target='main'>%s</a>\n"
|
||||
)
|
||||
|
||||
|
||||
def _percent(statements, missing):
|
||||
s = len(statements)
|
||||
e = s - len(missing)
|
||||
if s > 0:
|
||||
return int(round(100.0 * e / s))
|
||||
return 0
|
||||
|
||||
|
||||
def _show_branch(root, base, path, pct=0, showpct=False, exclude='',
|
||||
coverage=the_coverage):
|
||||
|
||||
# Show the directory name and any of our children
|
||||
dirs = [k for k, v in root.items() if v]
|
||||
dirs.sort()
|
||||
for name in dirs:
|
||||
newpath = os.path.join(path, name)
|
||||
|
||||
if newpath.lower().startswith(base):
|
||||
relpath = newpath[len(base):]
|
||||
yield '| ' * relpath.count(os.sep)
|
||||
yield (
|
||||
"<a class='directory' "
|
||||
"href='menu?base=%s&exclude=%s'>%s</a>\n" %
|
||||
(newpath, quote_plus(exclude), name)
|
||||
)
|
||||
|
||||
for chunk in _show_branch(
|
||||
root[name], base, newpath, pct, showpct,
|
||||
exclude, coverage=coverage
|
||||
):
|
||||
yield chunk
|
||||
|
||||
# Now list the files
|
||||
if path.lower().startswith(base):
|
||||
relpath = path[len(base):]
|
||||
files = [k for k, v in root.items() if not v]
|
||||
files.sort()
|
||||
for name in files:
|
||||
newpath = os.path.join(path, name)
|
||||
|
||||
pc_str = ''
|
||||
if showpct:
|
||||
try:
|
||||
_, statements, _, missing, _ = coverage.analysis2(newpath)
|
||||
except:
|
||||
# Yes, we really want to pass on all errors.
|
||||
pass
|
||||
else:
|
||||
pc = _percent(statements, missing)
|
||||
pc_str = ('%3d%% ' % pc).replace(' ', ' ')
|
||||
if pc < float(pct) or pc == -1:
|
||||
pc_str = "<span class='fail'>%s</span>" % pc_str
|
||||
else:
|
||||
pc_str = "<span class='pass'>%s</span>" % pc_str
|
||||
|
||||
yield TEMPLATE_ITEM % ('| ' * (relpath.count(os.sep) + 1),
|
||||
pc_str, newpath, name)
|
||||
|
||||
|
||||
def _skip_file(path, exclude):
|
||||
if exclude:
|
||||
return bool(re.search(exclude, path))
|
||||
|
||||
|
||||
def _graft(path, tree):
|
||||
d = tree
|
||||
|
||||
p = path
|
||||
atoms = []
|
||||
while True:
|
||||
p, tail = os.path.split(p)
|
||||
if not tail:
|
||||
break
|
||||
atoms.append(tail)
|
||||
atoms.append(p)
|
||||
if p != '/':
|
||||
atoms.append('/')
|
||||
|
||||
atoms.reverse()
|
||||
for node in atoms:
|
||||
if node:
|
||||
d = d.setdefault(node, {})
|
||||
|
||||
|
||||
def get_tree(base, exclude, coverage=the_coverage):
|
||||
"""Return covered module names as a nested dict."""
|
||||
tree = {}
|
||||
runs = coverage.data.executed_files()
|
||||
for path in runs:
|
||||
if not _skip_file(path, exclude) and not os.path.isdir(path):
|
||||
_graft(path, tree)
|
||||
return tree
|
||||
|
||||
|
||||
class CoverStats(object):
|
||||
|
||||
def __init__(self, coverage, root=None):
|
||||
self.coverage = coverage
|
||||
if root is None:
|
||||
# Guess initial depth. Files outside this path will not be
|
||||
# reachable from the web interface.
|
||||
import cherrypy
|
||||
root = os.path.dirname(cherrypy.__file__)
|
||||
self.root = root
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return TEMPLATE_FRAMESET % self.root.lower()
|
||||
|
||||
@cherrypy.expose
|
||||
def menu(self, base='/', pct='50', showpct='',
|
||||
exclude=r'python\d\.\d|test|tut\d|tutorial'):
|
||||
|
||||
# The coverage module uses all-lower-case names.
|
||||
base = base.lower().rstrip(os.sep)
|
||||
|
||||
yield TEMPLATE_MENU
|
||||
yield TEMPLATE_FORM % locals()
|
||||
|
||||
# Start by showing links for parent paths
|
||||
yield "<div id='crumbs'>"
|
||||
path = ''
|
||||
atoms = base.split(os.sep)
|
||||
atoms.pop()
|
||||
for atom in atoms:
|
||||
path += atom + os.sep
|
||||
yield ("<a href='menu?base=%s&exclude=%s'>%s</a> %s"
|
||||
% (path, quote_plus(exclude), atom, os.sep))
|
||||
yield '</div>'
|
||||
|
||||
yield "<div id='tree'>"
|
||||
|
||||
# Then display the tree
|
||||
tree = get_tree(base, exclude, self.coverage)
|
||||
if not tree:
|
||||
yield '<p>No modules covered.</p>'
|
||||
else:
|
||||
for chunk in _show_branch(tree, base, '/', pct,
|
||||
showpct == 'checked', exclude,
|
||||
coverage=self.coverage):
|
||||
yield chunk
|
||||
|
||||
yield '</div>'
|
||||
yield '</body></html>'
|
||||
|
||||
def annotated_file(self, filename, statements, excluded, missing):
|
||||
source = open(filename, 'r')
|
||||
buffer = []
|
||||
for lineno, line in enumerate(source.readlines()):
|
||||
lineno += 1
|
||||
line = line.strip('\n\r')
|
||||
empty_the_buffer = True
|
||||
if lineno in excluded:
|
||||
template = TEMPLATE_LOC_EXCLUDED
|
||||
elif lineno in missing:
|
||||
template = TEMPLATE_LOC_NOT_COVERED
|
||||
elif lineno in statements:
|
||||
template = TEMPLATE_LOC_COVERED
|
||||
else:
|
||||
empty_the_buffer = False
|
||||
buffer.append((lineno, line))
|
||||
if empty_the_buffer:
|
||||
for lno, pastline in buffer:
|
||||
yield template % (lno, cgi.escape(pastline))
|
||||
buffer = []
|
||||
yield template % (lineno, cgi.escape(line))
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, name):
|
||||
filename, statements, excluded, missing, _ = self.coverage.analysis2(
|
||||
name)
|
||||
pc = _percent(statements, missing)
|
||||
yield TEMPLATE_COVERAGE % dict(name=os.path.basename(name),
|
||||
fullpath=name,
|
||||
pc=pc)
|
||||
yield '<table>\n'
|
||||
for line in self.annotated_file(filename, statements, excluded,
|
||||
missing):
|
||||
yield line
|
||||
yield '</table>'
|
||||
yield '</body>'
|
||||
yield '</html>'
|
||||
|
||||
|
||||
def serve(path=localFile, port=8080, root=None):
|
||||
if coverage is None:
|
||||
raise ImportError('The coverage module could not be imported.')
|
||||
from coverage import coverage
|
||||
cov = coverage(data_file=path)
|
||||
cov.load()
|
||||
|
||||
import cherrypy
|
||||
cherrypy.config.update({'server.socket_port': int(port),
|
||||
'server.thread_pool': 10,
|
||||
'environment': 'production',
|
||||
})
|
||||
cherrypy.quickstart(CoverStats(cov, root))
|
||||
|
||||
if __name__ == '__main__':
|
||||
serve(*tuple(sys.argv[1:]))
|
||||
@@ -1,690 +0,0 @@
|
||||
"""CPStats, a package for collecting and reporting on program statistics.
|
||||
|
||||
Overview
|
||||
========
|
||||
|
||||
Statistics about program operation are an invaluable monitoring and debugging
|
||||
tool. Unfortunately, the gathering and reporting of these critical values is
|
||||
usually ad-hoc. This package aims to add a centralized place for gathering
|
||||
statistical performance data, a structure for recording that data which
|
||||
provides for extrapolation of that data into more useful information,
|
||||
and a method of serving that data to both human investigators and
|
||||
monitoring software. Let's examine each of those in more detail.
|
||||
|
||||
Data Gathering
|
||||
--------------
|
||||
|
||||
Just as Python's `logging` module provides a common importable for gathering
|
||||
and sending messages, performance statistics would benefit from a similar
|
||||
common mechanism, and one that does *not* require each package which wishes
|
||||
to collect stats to import a third-party module. Therefore, we choose to
|
||||
re-use the `logging` module by adding a `statistics` object to it.
|
||||
|
||||
That `logging.statistics` object is a nested dict. It is not a custom class,
|
||||
because that would:
|
||||
|
||||
1. require libraries and applications to import a third-party module in
|
||||
order to participate
|
||||
2. inhibit innovation in extrapolation approaches and in reporting tools, and
|
||||
3. be slow.
|
||||
|
||||
There are, however, some specifications regarding the structure of the dict.::
|
||||
|
||||
{
|
||||
+----"SQLAlchemy": {
|
||||
| "Inserts": 4389745,
|
||||
| "Inserts per Second":
|
||||
| lambda s: s["Inserts"] / (time() - s["Start"]),
|
||||
| C +---"Table Statistics": {
|
||||
| o | "widgets": {-----------+
|
||||
N | l | "Rows": 1.3M, | Record
|
||||
a | l | "Inserts": 400, |
|
||||
m | e | },---------------------+
|
||||
e | c | "froobles": {
|
||||
s | t | "Rows": 7845,
|
||||
p | i | "Inserts": 0,
|
||||
a | o | },
|
||||
c | n +---},
|
||||
e | "Slow Queries":
|
||||
| [{"Query": "SELECT * FROM widgets;",
|
||||
| "Processing Time": 47.840923343,
|
||||
| },
|
||||
| ],
|
||||
+----},
|
||||
}
|
||||
|
||||
The `logging.statistics` dict has four levels. The topmost level is nothing
|
||||
more than a set of names to introduce modularity, usually along the lines of
|
||||
package names. If the SQLAlchemy project wanted to participate, for example,
|
||||
it might populate the item `logging.statistics['SQLAlchemy']`, whose value
|
||||
would be a second-layer dict we call a "namespace". Namespaces help multiple
|
||||
packages to avoid collisions over key names, and make reports easier to read,
|
||||
to boot. The maintainers of SQLAlchemy should feel free to use more than one
|
||||
namespace if needed (such as 'SQLAlchemy ORM'). Note that there are no case
|
||||
or other syntax constraints on the namespace names; they should be chosen
|
||||
to be maximally readable by humans (neither too short nor too long).
|
||||
|
||||
Each namespace, then, is a dict of named statistical values, such as
|
||||
'Requests/sec' or 'Uptime'. You should choose names which will look
|
||||
good on a report: spaces and capitalization are just fine.
|
||||
|
||||
In addition to scalars, values in a namespace MAY be a (third-layer)
|
||||
dict, or a list, called a "collection". For example, the CherryPy
|
||||
:class:`StatsTool` keeps track of what each request is doing (or has most
|
||||
recently done) in a 'Requests' collection, where each key is a thread ID; each
|
||||
value in the subdict MUST be a fourth dict (whew!) of statistical data about
|
||||
each thread. We call each subdict in the collection a "record". Similarly,
|
||||
the :class:`StatsTool` also keeps a list of slow queries, where each record
|
||||
contains data about each slow query, in order.
|
||||
|
||||
Values in a namespace or record may also be functions, which brings us to:
|
||||
|
||||
Extrapolation
|
||||
-------------
|
||||
|
||||
The collection of statistical data needs to be fast, as close to unnoticeable
|
||||
as possible to the host program. That requires us to minimize I/O, for example,
|
||||
but in Python it also means we need to minimize function calls. So when you
|
||||
are designing your namespace and record values, try to insert the most basic
|
||||
scalar values you already have on hand.
|
||||
|
||||
When it comes time to report on the gathered data, however, we usually have
|
||||
much more freedom in what we can calculate. Therefore, whenever reporting
|
||||
tools (like the provided :class:`StatsPage` CherryPy class) fetch the contents
|
||||
of `logging.statistics` for reporting, they first call
|
||||
`extrapolate_statistics` (passing the whole `statistics` dict as the only
|
||||
argument). This makes a deep copy of the statistics dict so that the
|
||||
reporting tool can both iterate over it and even change it without harming
|
||||
the original. But it also expands any functions in the dict by calling them.
|
||||
For example, you might have a 'Current Time' entry in the namespace with the
|
||||
value "lambda scope: time.time()". The "scope" parameter is the current
|
||||
namespace dict (or record, if we're currently expanding one of those
|
||||
instead), allowing you access to existing static entries. If you're truly
|
||||
evil, you can even modify more than one entry at a time.
|
||||
|
||||
However, don't try to calculate an entry and then use its value in further
|
||||
extrapolations; the order in which the functions are called is not guaranteed.
|
||||
This can lead to a certain amount of duplicated work (or a redesign of your
|
||||
schema), but that's better than complicating the spec.
|
||||
|
||||
After the whole thing has been extrapolated, it's time for:
|
||||
|
||||
Reporting
|
||||
---------
|
||||
|
||||
The :class:`StatsPage` class grabs the `logging.statistics` dict, extrapolates
|
||||
it all, and then transforms it to HTML for easy viewing. Each namespace gets
|
||||
its own header and attribute table, plus an extra table for each collection.
|
||||
This is NOT part of the statistics specification; other tools can format how
|
||||
they like.
|
||||
|
||||
You can control which columns are output and how they are formatted by updating
|
||||
StatsPage.formatting, which is a dict that mirrors the keys and nesting of
|
||||
`logging.statistics`. The difference is that, instead of data values, it has
|
||||
formatting values. Use None for a given key to indicate to the StatsPage that a
|
||||
given column should not be output. Use a string with formatting
|
||||
(such as '%.3f') to interpolate the value(s), or use a callable (such as
|
||||
lambda v: v.isoformat()) for more advanced formatting. Any entry which is not
|
||||
mentioned in the formatting dict is output unchanged.
|
||||
|
||||
Monitoring
|
||||
----------
|
||||
|
||||
Although the HTML output takes pains to assign unique id's to each <td> with
|
||||
statistical data, you're probably better off fetching /cpstats/data, which
|
||||
outputs the whole (extrapolated) `logging.statistics` dict in JSON format.
|
||||
That is probably easier to parse, and doesn't have any formatting controls,
|
||||
so you get the "original" data in a consistently-serialized format.
|
||||
Note: there's no treatment yet for datetime objects. Try time.time() instead
|
||||
for now if you can. Nagios will probably thank you.
|
||||
|
||||
Turning Collection Off
|
||||
----------------------
|
||||
|
||||
It is recommended each namespace have an "Enabled" item which, if False,
|
||||
stops collection (but not reporting) of statistical data. Applications
|
||||
SHOULD provide controls to pause and resume collection by setting these
|
||||
entries to False or True, if present.
|
||||
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
To collect statistics on CherryPy applications::
|
||||
|
||||
from cherrypy.lib import cpstats
|
||||
appconfig['/']['tools.cpstats.on'] = True
|
||||
|
||||
To collect statistics on your own code::
|
||||
|
||||
import logging
|
||||
# Initialize the repository
|
||||
if not hasattr(logging, 'statistics'): logging.statistics = {}
|
||||
# Initialize my namespace
|
||||
mystats = logging.statistics.setdefault('My Stuff', {})
|
||||
# Initialize my namespace's scalars and collections
|
||||
mystats.update({
|
||||
'Enabled': True,
|
||||
'Start Time': time.time(),
|
||||
'Important Events': 0,
|
||||
'Events/Second': lambda s: (
|
||||
(s['Important Events'] / (time.time() - s['Start Time']))),
|
||||
})
|
||||
...
|
||||
for event in events:
|
||||
...
|
||||
# Collect stats
|
||||
if mystats.get('Enabled', False):
|
||||
mystats['Important Events'] += 1
|
||||
|
||||
To report statistics::
|
||||
|
||||
root.cpstats = cpstats.StatsPage()
|
||||
|
||||
To format statistics reports::
|
||||
|
||||
See 'Reporting', above.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import json
|
||||
|
||||
# ------------------------------- Statistics -------------------------------- #
|
||||
|
||||
if not hasattr(logging, 'statistics'):
|
||||
logging.statistics = {}
|
||||
|
||||
|
||||
def extrapolate_statistics(scope):
|
||||
"""Return an extrapolated copy of the given scope."""
|
||||
c = {}
|
||||
for k, v in list(scope.items()):
|
||||
if isinstance(v, dict):
|
||||
v = extrapolate_statistics(v)
|
||||
elif isinstance(v, (list, tuple)):
|
||||
v = [extrapolate_statistics(record) for record in v]
|
||||
elif hasattr(v, '__call__'):
|
||||
v = v(scope)
|
||||
c[k] = v
|
||||
return c
|
||||
|
||||
|
||||
# -------------------- CherryPy Applications Statistics --------------------- #
|
||||
|
||||
appstats = logging.statistics.setdefault('CherryPy Applications', {})
|
||||
appstats.update({
|
||||
'Enabled': True,
|
||||
'Bytes Read/Request': lambda s: (
|
||||
s['Total Requests'] and
|
||||
(s['Total Bytes Read'] / float(s['Total Requests'])) or
|
||||
0.0
|
||||
),
|
||||
'Bytes Read/Second': lambda s: s['Total Bytes Read'] / s['Uptime'](s),
|
||||
'Bytes Written/Request': lambda s: (
|
||||
s['Total Requests'] and
|
||||
(s['Total Bytes Written'] / float(s['Total Requests'])) or
|
||||
0.0
|
||||
),
|
||||
'Bytes Written/Second': lambda s: (
|
||||
s['Total Bytes Written'] / s['Uptime'](s)
|
||||
),
|
||||
'Current Time': lambda s: time.time(),
|
||||
'Current Requests': 0,
|
||||
'Requests/Second': lambda s: float(s['Total Requests']) / s['Uptime'](s),
|
||||
'Server Version': cherrypy.__version__,
|
||||
'Start Time': time.time(),
|
||||
'Total Bytes Read': 0,
|
||||
'Total Bytes Written': 0,
|
||||
'Total Requests': 0,
|
||||
'Total Time': 0,
|
||||
'Uptime': lambda s: time.time() - s['Start Time'],
|
||||
'Requests': {},
|
||||
})
|
||||
|
||||
proc_time = lambda s: time.time() - s['Start Time']
|
||||
|
||||
|
||||
class ByteCountWrapper(object):
|
||||
|
||||
"""Wraps a file-like object, counting the number of bytes read."""
|
||||
|
||||
def __init__(self, rfile):
|
||||
self.rfile = rfile
|
||||
self.bytes_read = 0
|
||||
|
||||
def read(self, size=-1):
|
||||
data = self.rfile.read(size)
|
||||
self.bytes_read += len(data)
|
||||
return data
|
||||
|
||||
def readline(self, size=-1):
|
||||
data = self.rfile.readline(size)
|
||||
self.bytes_read += len(data)
|
||||
return data
|
||||
|
||||
def readlines(self, sizehint=0):
|
||||
# Shamelessly stolen from StringIO
|
||||
total = 0
|
||||
lines = []
|
||||
line = self.readline()
|
||||
while line:
|
||||
lines.append(line)
|
||||
total += len(line)
|
||||
if 0 < sizehint <= total:
|
||||
break
|
||||
line = self.readline()
|
||||
return lines
|
||||
|
||||
def close(self):
|
||||
self.rfile.close()
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
data = self.rfile.next()
|
||||
self.bytes_read += len(data)
|
||||
return data
|
||||
|
||||
|
||||
average_uriset_time = lambda s: s['Count'] and (s['Sum'] / s['Count']) or 0
|
||||
|
||||
|
||||
def _get_threading_ident():
|
||||
if sys.version_info >= (3, 3):
|
||||
return threading.get_ident()
|
||||
return threading._get_ident()
|
||||
|
||||
class StatsTool(cherrypy.Tool):
|
||||
|
||||
"""Record various information about the current request."""
|
||||
|
||||
def __init__(self):
|
||||
cherrypy.Tool.__init__(self, 'on_end_request', self.record_stop)
|
||||
|
||||
def _setup(self):
|
||||
"""Hook this tool into cherrypy.request.
|
||||
|
||||
The standard CherryPy request object will automatically call this
|
||||
method when the tool is "turned on" in config.
|
||||
"""
|
||||
if appstats.get('Enabled', False):
|
||||
cherrypy.Tool._setup(self)
|
||||
self.record_start()
|
||||
|
||||
def record_start(self):
|
||||
"""Record the beginning of a request."""
|
||||
request = cherrypy.serving.request
|
||||
if not hasattr(request.rfile, 'bytes_read'):
|
||||
request.rfile = ByteCountWrapper(request.rfile)
|
||||
request.body.fp = request.rfile
|
||||
|
||||
r = request.remote
|
||||
|
||||
appstats['Current Requests'] += 1
|
||||
appstats['Total Requests'] += 1
|
||||
appstats['Requests'][_get_threading_ident()] = {
|
||||
'Bytes Read': None,
|
||||
'Bytes Written': None,
|
||||
# Use a lambda so the ip gets updated by tools.proxy later
|
||||
'Client': lambda s: '%s:%s' % (r.ip, r.port),
|
||||
'End Time': None,
|
||||
'Processing Time': proc_time,
|
||||
'Request-Line': request.request_line,
|
||||
'Response Status': None,
|
||||
'Start Time': time.time(),
|
||||
}
|
||||
|
||||
def record_stop(
|
||||
self, uriset=None, slow_queries=1.0, slow_queries_count=100,
|
||||
debug=False, **kwargs):
|
||||
"""Record the end of a request."""
|
||||
resp = cherrypy.serving.response
|
||||
w = appstats['Requests'][_get_threading_ident()]
|
||||
|
||||
r = cherrypy.request.rfile.bytes_read
|
||||
w['Bytes Read'] = r
|
||||
appstats['Total Bytes Read'] += r
|
||||
|
||||
if resp.stream:
|
||||
w['Bytes Written'] = 'chunked'
|
||||
else:
|
||||
cl = int(resp.headers.get('Content-Length', 0))
|
||||
w['Bytes Written'] = cl
|
||||
appstats['Total Bytes Written'] += cl
|
||||
|
||||
w['Response Status'] = getattr(
|
||||
resp, 'output_status', None) or resp.status
|
||||
|
||||
w['End Time'] = time.time()
|
||||
p = w['End Time'] - w['Start Time']
|
||||
w['Processing Time'] = p
|
||||
appstats['Total Time'] += p
|
||||
|
||||
appstats['Current Requests'] -= 1
|
||||
|
||||
if debug:
|
||||
cherrypy.log('Stats recorded: %s' % repr(w), 'TOOLS.CPSTATS')
|
||||
|
||||
if uriset:
|
||||
rs = appstats.setdefault('URI Set Tracking', {})
|
||||
r = rs.setdefault(uriset, {
|
||||
'Min': None, 'Max': None, 'Count': 0, 'Sum': 0,
|
||||
'Avg': average_uriset_time})
|
||||
if r['Min'] is None or p < r['Min']:
|
||||
r['Min'] = p
|
||||
if r['Max'] is None or p > r['Max']:
|
||||
r['Max'] = p
|
||||
r['Count'] += 1
|
||||
r['Sum'] += p
|
||||
|
||||
if slow_queries and p > slow_queries:
|
||||
sq = appstats.setdefault('Slow Queries', [])
|
||||
sq.append(w.copy())
|
||||
if len(sq) > slow_queries_count:
|
||||
sq.pop(0)
|
||||
|
||||
|
||||
cherrypy.tools.cpstats = StatsTool()
|
||||
|
||||
|
||||
# ---------------------- CherryPy Statistics Reporting ---------------------- #
|
||||
|
||||
thisdir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
missing = object()
|
||||
|
||||
locale_date = lambda v: time.strftime('%c', time.gmtime(v))
|
||||
iso_format = lambda v: time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v))
|
||||
|
||||
|
||||
def pause_resume(ns):
|
||||
def _pause_resume(enabled):
|
||||
pause_disabled = ''
|
||||
resume_disabled = ''
|
||||
if enabled:
|
||||
resume_disabled = 'disabled="disabled" '
|
||||
else:
|
||||
pause_disabled = 'disabled="disabled" '
|
||||
return """
|
||||
<form action="pause" method="POST" style="display:inline">
|
||||
<input type="hidden" name="namespace" value="%s" />
|
||||
<input type="submit" value="Pause" %s/>
|
||||
</form>
|
||||
<form action="resume" method="POST" style="display:inline">
|
||||
<input type="hidden" name="namespace" value="%s" />
|
||||
<input type="submit" value="Resume" %s/>
|
||||
</form>
|
||||
""" % (ns, pause_disabled, ns, resume_disabled)
|
||||
return _pause_resume
|
||||
|
||||
|
||||
class StatsPage(object):
|
||||
|
||||
formatting = {
|
||||
'CherryPy Applications': {
|
||||
'Enabled': pause_resume('CherryPy Applications'),
|
||||
'Bytes Read/Request': '%.3f',
|
||||
'Bytes Read/Second': '%.3f',
|
||||
'Bytes Written/Request': '%.3f',
|
||||
'Bytes Written/Second': '%.3f',
|
||||
'Current Time': iso_format,
|
||||
'Requests/Second': '%.3f',
|
||||
'Start Time': iso_format,
|
||||
'Total Time': '%.3f',
|
||||
'Uptime': '%.3f',
|
||||
'Slow Queries': {
|
||||
'End Time': None,
|
||||
'Processing Time': '%.3f',
|
||||
'Start Time': iso_format,
|
||||
},
|
||||
'URI Set Tracking': {
|
||||
'Avg': '%.3f',
|
||||
'Max': '%.3f',
|
||||
'Min': '%.3f',
|
||||
'Sum': '%.3f',
|
||||
},
|
||||
'Requests': {
|
||||
'Bytes Read': '%s',
|
||||
'Bytes Written': '%s',
|
||||
'End Time': None,
|
||||
'Processing Time': '%.3f',
|
||||
'Start Time': None,
|
||||
},
|
||||
},
|
||||
'CherryPy WSGIServer': {
|
||||
'Enabled': pause_resume('CherryPy WSGIServer'),
|
||||
'Connections/second': '%.3f',
|
||||
'Start time': iso_format,
|
||||
},
|
||||
}
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
# Transform the raw data into pretty output for HTML
|
||||
yield """
|
||||
<html>
|
||||
<head>
|
||||
<title>Statistics</title>
|
||||
<style>
|
||||
|
||||
th, td {
|
||||
padding: 0.25em 0.5em;
|
||||
border: 1px solid #666699;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table.stats1 {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table.stats1 th {
|
||||
font-weight: bold;
|
||||
text-align: right;
|
||||
background-color: #CCD5DD;
|
||||
}
|
||||
|
||||
table.stats2, h2 {
|
||||
margin-left: 50px;
|
||||
}
|
||||
|
||||
table.stats2 th {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
background-color: #CCD5DD;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
"""
|
||||
for title, scalars, collections in self.get_namespaces():
|
||||
yield """
|
||||
<h1>%s</h1>
|
||||
|
||||
<table class='stats1'>
|
||||
<tbody>
|
||||
""" % title
|
||||
for i, (key, value) in enumerate(scalars):
|
||||
colnum = i % 3
|
||||
if colnum == 0:
|
||||
yield """
|
||||
<tr>"""
|
||||
yield (
|
||||
"""
|
||||
<th>%(key)s</th><td id='%(title)s-%(key)s'>%(value)s</td>""" %
|
||||
vars()
|
||||
)
|
||||
if colnum == 2:
|
||||
yield """
|
||||
</tr>"""
|
||||
|
||||
if colnum == 0:
|
||||
yield """
|
||||
<th></th><td></td>
|
||||
<th></th><td></td>
|
||||
</tr>"""
|
||||
elif colnum == 1:
|
||||
yield """
|
||||
<th></th><td></td>
|
||||
</tr>"""
|
||||
yield """
|
||||
</tbody>
|
||||
</table>"""
|
||||
|
||||
for subtitle, headers, subrows in collections:
|
||||
yield """
|
||||
<h2>%s</h2>
|
||||
<table class='stats2'>
|
||||
<thead>
|
||||
<tr>""" % subtitle
|
||||
for key in headers:
|
||||
yield """
|
||||
<th>%s</th>""" % key
|
||||
yield """
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>"""
|
||||
for subrow in subrows:
|
||||
yield """
|
||||
<tr>"""
|
||||
for value in subrow:
|
||||
yield """
|
||||
<td>%s</td>""" % value
|
||||
yield """
|
||||
</tr>"""
|
||||
yield """
|
||||
</tbody>
|
||||
</table>"""
|
||||
yield """
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
def get_namespaces(self):
|
||||
"""Yield (title, scalars, collections) for each namespace."""
|
||||
s = extrapolate_statistics(logging.statistics)
|
||||
for title, ns in sorted(s.items()):
|
||||
scalars = []
|
||||
collections = []
|
||||
ns_fmt = self.formatting.get(title, {})
|
||||
for k, v in sorted(ns.items()):
|
||||
fmt = ns_fmt.get(k, {})
|
||||
if isinstance(v, dict):
|
||||
headers, subrows = self.get_dict_collection(v, fmt)
|
||||
collections.append((k, ['ID'] + headers, subrows))
|
||||
elif isinstance(v, (list, tuple)):
|
||||
headers, subrows = self.get_list_collection(v, fmt)
|
||||
collections.append((k, headers, subrows))
|
||||
else:
|
||||
format = ns_fmt.get(k, missing)
|
||||
if format is None:
|
||||
# Don't output this column.
|
||||
continue
|
||||
if hasattr(format, '__call__'):
|
||||
v = format(v)
|
||||
elif format is not missing:
|
||||
v = format % v
|
||||
scalars.append((k, v))
|
||||
yield title, scalars, collections
|
||||
|
||||
def get_dict_collection(self, v, formatting):
|
||||
"""Return ([headers], [rows]) for the given collection."""
|
||||
# E.g., the 'Requests' dict.
|
||||
headers = []
|
||||
try:
|
||||
# python2
|
||||
vals = v.itervalues()
|
||||
except AttributeError:
|
||||
# python3
|
||||
vals = v.values()
|
||||
for record in vals:
|
||||
for k3 in record:
|
||||
format = formatting.get(k3, missing)
|
||||
if format is None:
|
||||
# Don't output this column.
|
||||
continue
|
||||
if k3 not in headers:
|
||||
headers.append(k3)
|
||||
headers.sort()
|
||||
|
||||
subrows = []
|
||||
for k2, record in sorted(v.items()):
|
||||
subrow = [k2]
|
||||
for k3 in headers:
|
||||
v3 = record.get(k3, '')
|
||||
format = formatting.get(k3, missing)
|
||||
if format is None:
|
||||
# Don't output this column.
|
||||
continue
|
||||
if hasattr(format, '__call__'):
|
||||
v3 = format(v3)
|
||||
elif format is not missing:
|
||||
v3 = format % v3
|
||||
subrow.append(v3)
|
||||
subrows.append(subrow)
|
||||
|
||||
return headers, subrows
|
||||
|
||||
def get_list_collection(self, v, formatting):
|
||||
"""Return ([headers], [subrows]) for the given collection."""
|
||||
# E.g., the 'Slow Queries' list.
|
||||
headers = []
|
||||
for record in v:
|
||||
for k3 in record:
|
||||
format = formatting.get(k3, missing)
|
||||
if format is None:
|
||||
# Don't output this column.
|
||||
continue
|
||||
if k3 not in headers:
|
||||
headers.append(k3)
|
||||
headers.sort()
|
||||
|
||||
subrows = []
|
||||
for record in v:
|
||||
subrow = []
|
||||
for k3 in headers:
|
||||
v3 = record.get(k3, '')
|
||||
format = formatting.get(k3, missing)
|
||||
if format is None:
|
||||
# Don't output this column.
|
||||
continue
|
||||
if hasattr(format, '__call__'):
|
||||
v3 = format(v3)
|
||||
elif format is not missing:
|
||||
v3 = format % v3
|
||||
subrow.append(v3)
|
||||
subrows.append(subrow)
|
||||
|
||||
return headers, subrows
|
||||
|
||||
if json is not None:
|
||||
@cherrypy.expose
|
||||
def data(self):
|
||||
s = extrapolate_statistics(logging.statistics)
|
||||
cherrypy.response.headers['Content-Type'] = 'application/json'
|
||||
return json.dumps(s, sort_keys=True, indent=4)
|
||||
|
||||
@cherrypy.expose
|
||||
def pause(self, namespace):
|
||||
logging.statistics.get(namespace, {})['Enabled'] = False
|
||||
raise cherrypy.HTTPRedirect('./')
|
||||
pause.cp_config = {'tools.allow.on': True,
|
||||
'tools.allow.methods': ['POST']}
|
||||
|
||||
@cherrypy.expose
|
||||
def resume(self, namespace):
|
||||
logging.statistics.get(namespace, {})['Enabled'] = True
|
||||
raise cherrypy.HTTPRedirect('./')
|
||||
resume.cp_config = {'tools.allow.on': True,
|
||||
'tools.allow.methods': ['POST']}
|
||||
@@ -1,648 +0,0 @@
|
||||
"""Functions for builtin CherryPy tools."""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from hashlib import md5
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import text_or_bytes
|
||||
from cherrypy.lib import httputil as _httputil
|
||||
from cherrypy.lib import is_iterator
|
||||
|
||||
|
||||
# Conditional HTTP request support #
|
||||
|
||||
def validate_etags(autotags=False, debug=False):
|
||||
"""Validate the current ETag against If-Match, If-None-Match headers.
|
||||
|
||||
If autotags is True, an ETag response-header value will be provided
|
||||
from an MD5 hash of the response body (unless some other code has
|
||||
already provided an ETag header). If False (the default), the ETag
|
||||
will not be automatic.
|
||||
|
||||
WARNING: the autotags feature is not designed for URL's which allow
|
||||
methods other than GET. For example, if a POST to the same URL returns
|
||||
no content, the automatic ETag will be incorrect, breaking a fundamental
|
||||
use for entity tags in a possibly destructive fashion. Likewise, if you
|
||||
raise 304 Not Modified, the response body will be empty, the ETag hash
|
||||
will be incorrect, and your application will break.
|
||||
See :rfc:`2616` Section 14.24.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# Guard against being run twice.
|
||||
if hasattr(response, 'ETag'):
|
||||
return
|
||||
|
||||
status, reason, msg = _httputil.valid_status(response.status)
|
||||
|
||||
etag = response.headers.get('ETag')
|
||||
|
||||
# Automatic ETag generation. See warning in docstring.
|
||||
if etag:
|
||||
if debug:
|
||||
cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS')
|
||||
elif not autotags:
|
||||
if debug:
|
||||
cherrypy.log('Autotags off', 'TOOLS.ETAGS')
|
||||
elif status != 200:
|
||||
if debug:
|
||||
cherrypy.log('Status not 200', 'TOOLS.ETAGS')
|
||||
else:
|
||||
etag = response.collapse_body()
|
||||
etag = '"%s"' % md5(etag).hexdigest()
|
||||
if debug:
|
||||
cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS')
|
||||
response.headers['ETag'] = etag
|
||||
|
||||
response.ETag = etag
|
||||
|
||||
# "If the request would, without the If-Match header field, result in
|
||||
# anything other than a 2xx or 412 status, then the If-Match header
|
||||
# MUST be ignored."
|
||||
if debug:
|
||||
cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS')
|
||||
if status >= 200 and status <= 299:
|
||||
request = cherrypy.serving.request
|
||||
|
||||
conditions = request.headers.elements('If-Match') or []
|
||||
conditions = [str(x) for x in conditions]
|
||||
if debug:
|
||||
cherrypy.log('If-Match conditions: %s' % repr(conditions),
|
||||
'TOOLS.ETAGS')
|
||||
if conditions and not (conditions == ['*'] or etag in conditions):
|
||||
raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
|
||||
'not match %r' % (etag, conditions))
|
||||
|
||||
conditions = request.headers.elements('If-None-Match') or []
|
||||
conditions = [str(x) for x in conditions]
|
||||
if debug:
|
||||
cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
|
||||
'TOOLS.ETAGS')
|
||||
if conditions == ['*'] or etag in conditions:
|
||||
if debug:
|
||||
cherrypy.log('request.method: %s' %
|
||||
request.method, 'TOOLS.ETAGS')
|
||||
if request.method in ('GET', 'HEAD'):
|
||||
raise cherrypy.HTTPRedirect([], 304)
|
||||
else:
|
||||
raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
|
||||
'matched %r' % (etag, conditions))
|
||||
|
||||
|
||||
def validate_since():
|
||||
"""Validate the current Last-Modified against If-Modified-Since headers.
|
||||
|
||||
If no code has set the Last-Modified response header, then no validation
|
||||
will be performed.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
lastmod = response.headers.get('Last-Modified')
|
||||
if lastmod:
|
||||
status, reason, msg = _httputil.valid_status(response.status)
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
||||
since = request.headers.get('If-Unmodified-Since')
|
||||
if since and since != lastmod:
|
||||
if (status >= 200 and status <= 299) or status == 412:
|
||||
raise cherrypy.HTTPError(412)
|
||||
|
||||
since = request.headers.get('If-Modified-Since')
|
||||
if since and since == lastmod:
|
||||
if (status >= 200 and status <= 299) or status == 304:
|
||||
if request.method in ('GET', 'HEAD'):
|
||||
raise cherrypy.HTTPRedirect([], 304)
|
||||
else:
|
||||
raise cherrypy.HTTPError(412)
|
||||
|
||||
|
||||
# Tool code #
|
||||
|
||||
def allow(methods=None, debug=False):
|
||||
"""Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
|
||||
|
||||
The given methods are case-insensitive, and may be in any order.
|
||||
If only one method is allowed, you may supply a single string;
|
||||
if more than one, supply a list of strings.
|
||||
|
||||
Regardless of whether the current method is allowed or not, this
|
||||
also emits an 'Allow' response header, containing the given methods.
|
||||
"""
|
||||
if not isinstance(methods, (tuple, list)):
|
||||
methods = [methods]
|
||||
methods = [m.upper() for m in methods if m]
|
||||
if not methods:
|
||||
methods = ['GET', 'HEAD']
|
||||
elif 'GET' in methods and 'HEAD' not in methods:
|
||||
methods.append('HEAD')
|
||||
|
||||
cherrypy.response.headers['Allow'] = ', '.join(methods)
|
||||
if cherrypy.request.method not in methods:
|
||||
if debug:
|
||||
cherrypy.log('request.method %r not in methods %r' %
|
||||
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
|
||||
raise cherrypy.HTTPError(405)
|
||||
else:
|
||||
if debug:
|
||||
cherrypy.log('request.method %r in methods %r' %
|
||||
(cherrypy.request.method, methods), 'TOOLS.ALLOW')
|
||||
|
||||
|
||||
def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
|
||||
scheme='X-Forwarded-Proto', debug=False):
|
||||
"""Change the base URL (scheme://host[:port][/path]).
|
||||
|
||||
For running a CP server behind Apache, lighttpd, or other HTTP server.
|
||||
|
||||
For Apache and lighttpd, you should leave the 'local' argument at the
|
||||
default value of 'X-Forwarded-Host'. For Squid, you probably want to set
|
||||
tools.proxy.local = 'Origin'.
|
||||
|
||||
If you want the new request.base to include path info (not just the host),
|
||||
you must explicitly set base to the full base path, and ALSO set 'local'
|
||||
to '', so that the X-Forwarded-Host request header (which never includes
|
||||
path info) does not override it. Regardless, the value for 'base' MUST
|
||||
NOT end in a slash.
|
||||
|
||||
cherrypy.request.remote.ip (the IP address of the client) will be
|
||||
rewritten if the header specified by the 'remote' arg is valid.
|
||||
By default, 'remote' is set to 'X-Forwarded-For'. If you do not
|
||||
want to rewrite remote.ip, set the 'remote' arg to an empty string.
|
||||
"""
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
||||
if scheme:
|
||||
s = request.headers.get(scheme, None)
|
||||
if debug:
|
||||
cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY')
|
||||
if s == 'on' and 'ssl' in scheme.lower():
|
||||
# This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header
|
||||
scheme = 'https'
|
||||
else:
|
||||
# This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
|
||||
scheme = s
|
||||
if not scheme:
|
||||
scheme = request.base[:request.base.find('://')]
|
||||
|
||||
if local:
|
||||
lbase = request.headers.get(local, None)
|
||||
if debug:
|
||||
cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY')
|
||||
if lbase is not None:
|
||||
base = lbase.split(',')[0]
|
||||
if not base:
|
||||
base = request.headers.get('Host', '127.0.0.1')
|
||||
port = request.local.port
|
||||
if port != 80:
|
||||
base += ':%s' % port
|
||||
|
||||
if base.find('://') == -1:
|
||||
# add http:// or https:// if needed
|
||||
base = scheme + '://' + base
|
||||
|
||||
request.base = base
|
||||
|
||||
if remote:
|
||||
xff = request.headers.get(remote)
|
||||
if debug:
|
||||
cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
|
||||
if xff:
|
||||
if remote == 'X-Forwarded-For':
|
||||
# Bug #1268
|
||||
xff = xff.split(',')[0].strip()
|
||||
request.remote.ip = xff
|
||||
|
||||
|
||||
def ignore_headers(headers=('Range',), debug=False):
|
||||
"""Delete request headers whose field names are included in 'headers'.
|
||||
|
||||
This is a useful tool for working behind certain HTTP servers;
|
||||
for example, Apache duplicates the work that CP does for 'Range'
|
||||
headers, and will doubly-truncate the response.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
for name in headers:
|
||||
if name in request.headers:
|
||||
if debug:
|
||||
cherrypy.log('Ignoring request header %r' % name,
|
||||
'TOOLS.IGNORE_HEADERS')
|
||||
del request.headers[name]
|
||||
|
||||
|
||||
def response_headers(headers=None, debug=False):
|
||||
"""Set headers on the response."""
|
||||
if debug:
|
||||
cherrypy.log('Setting response headers: %s' % repr(headers),
|
||||
'TOOLS.RESPONSE_HEADERS')
|
||||
for name, value in (headers or []):
|
||||
cherrypy.serving.response.headers[name] = value
|
||||
response_headers.failsafe = True
|
||||
|
||||
|
||||
def referer(pattern, accept=True, accept_missing=False, error=403,
|
||||
message='Forbidden Referer header.', debug=False):
|
||||
"""Raise HTTPError if Referer header does/does not match the given pattern.
|
||||
|
||||
pattern
|
||||
A regular expression pattern to test against the Referer.
|
||||
|
||||
accept
|
||||
If True, the Referer must match the pattern; if False,
|
||||
the Referer must NOT match the pattern.
|
||||
|
||||
accept_missing
|
||||
If True, permit requests with no Referer header.
|
||||
|
||||
error
|
||||
The HTTP error code to return to the client on failure.
|
||||
|
||||
message
|
||||
A string to include in the response body on failure.
|
||||
|
||||
"""
|
||||
try:
|
||||
ref = cherrypy.serving.request.headers['Referer']
|
||||
match = bool(re.match(pattern, ref))
|
||||
if debug:
|
||||
cherrypy.log('Referer %r matches %r' % (ref, pattern),
|
||||
'TOOLS.REFERER')
|
||||
if accept == match:
|
||||
return
|
||||
except KeyError:
|
||||
if debug:
|
||||
cherrypy.log('No Referer header', 'TOOLS.REFERER')
|
||||
if accept_missing:
|
||||
return
|
||||
|
||||
raise cherrypy.HTTPError(error, message)
|
||||
|
||||
|
||||
class SessionAuth(object):
|
||||
|
||||
"""Assert that the user is logged in."""
|
||||
|
||||
session_key = 'username'
|
||||
debug = False
|
||||
|
||||
def check_username_and_password(self, username, password):
|
||||
pass
|
||||
|
||||
def anonymous(self):
|
||||
"""Provide a temporary user name for anonymous users."""
|
||||
pass
|
||||
|
||||
def on_login(self, username):
|
||||
pass
|
||||
|
||||
def on_logout(self, username):
|
||||
pass
|
||||
|
||||
def on_check(self, username):
|
||||
pass
|
||||
|
||||
def login_screen(self, from_page='..', username='', error_msg='',
|
||||
**kwargs):
|
||||
return (six.text_type("""<html><body>
|
||||
Message: %(error_msg)s
|
||||
<form method="post" action="do_login">
|
||||
Login: <input type="text" name="username" value="%(username)s" size="10" />
|
||||
<br />
|
||||
Password: <input type="password" name="password" size="10" />
|
||||
<br />
|
||||
<input type="hidden" name="from_page" value="%(from_page)s" />
|
||||
<br />
|
||||
<input type="submit" />
|
||||
</form>
|
||||
</body></html>""") % vars()).encode('utf-8')
|
||||
|
||||
def do_login(self, username, password, from_page='..', **kwargs):
|
||||
"""Login. May raise redirect, or return True if request handled."""
|
||||
response = cherrypy.serving.response
|
||||
error_msg = self.check_username_and_password(username, password)
|
||||
if error_msg:
|
||||
body = self.login_screen(from_page, username, error_msg)
|
||||
response.body = body
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers['Content-Length']
|
||||
return True
|
||||
else:
|
||||
cherrypy.serving.request.login = username
|
||||
cherrypy.session[self.session_key] = username
|
||||
self.on_login(username)
|
||||
raise cherrypy.HTTPRedirect(from_page or '/')
|
||||
|
||||
def do_logout(self, from_page='..', **kwargs):
|
||||
"""Logout. May raise redirect, or return True if request handled."""
|
||||
sess = cherrypy.session
|
||||
username = sess.get(self.session_key)
|
||||
sess[self.session_key] = None
|
||||
if username:
|
||||
cherrypy.serving.request.login = None
|
||||
self.on_logout(username)
|
||||
raise cherrypy.HTTPRedirect(from_page)
|
||||
|
||||
def do_check(self):
|
||||
"""Assert username. Raise redirect, or return True if request handled.
|
||||
"""
|
||||
sess = cherrypy.session
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
username = sess.get(self.session_key)
|
||||
if not username:
|
||||
sess[self.session_key] = username = self.anonymous()
|
||||
self._debug_message('No session[username], trying anonymous')
|
||||
if not username:
|
||||
url = cherrypy.url(qs=request.query_string)
|
||||
self._debug_message(
|
||||
'No username, routing to login_screen with from_page %(url)r',
|
||||
locals(),
|
||||
)
|
||||
response.body = self.login_screen(url)
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers['Content-Length']
|
||||
return True
|
||||
self._debug_message('Setting request.login to %(username)r', locals())
|
||||
request.login = username
|
||||
self.on_check(username)
|
||||
|
||||
def _debug_message(self, template, context={}):
|
||||
if not self.debug:
|
||||
return
|
||||
cherrypy.log(template % context, 'TOOLS.SESSAUTH')
|
||||
|
||||
def run(self):
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
path = request.path_info
|
||||
if path.endswith('login_screen'):
|
||||
self._debug_message('routing %(path)r to login_screen', locals())
|
||||
response.body = self.login_screen()
|
||||
return True
|
||||
elif path.endswith('do_login'):
|
||||
if request.method != 'POST':
|
||||
response.headers['Allow'] = 'POST'
|
||||
self._debug_message('do_login requires POST')
|
||||
raise cherrypy.HTTPError(405)
|
||||
self._debug_message('routing %(path)r to do_login', locals())
|
||||
return self.do_login(**request.params)
|
||||
elif path.endswith('do_logout'):
|
||||
if request.method != 'POST':
|
||||
response.headers['Allow'] = 'POST'
|
||||
raise cherrypy.HTTPError(405)
|
||||
self._debug_message('routing %(path)r to do_logout', locals())
|
||||
return self.do_logout(**request.params)
|
||||
else:
|
||||
self._debug_message('No special path, running do_check')
|
||||
return self.do_check()
|
||||
|
||||
|
||||
def session_auth(**kwargs):
|
||||
sa = SessionAuth()
|
||||
for k, v in kwargs.items():
|
||||
setattr(sa, k, v)
|
||||
return sa.run()
|
||||
session_auth.__doc__ = """Session authentication hook.
|
||||
|
||||
Any attribute of the SessionAuth class may be overridden via a keyword arg
|
||||
to this function:
|
||||
|
||||
""" + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__)
|
||||
for k in dir(SessionAuth) if not k.startswith('__')])
|
||||
|
||||
|
||||
def log_traceback(severity=logging.ERROR, debug=False):
|
||||
"""Write the last error's traceback to the cherrypy error log."""
|
||||
cherrypy.log('', 'HTTP', severity=severity, traceback=True)
|
||||
|
||||
|
||||
def log_request_headers(debug=False):
|
||||
"""Write request headers to the cherrypy error log."""
|
||||
h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
|
||||
cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
|
||||
|
||||
|
||||
def log_hooks(debug=False):
|
||||
"""Write request.hooks to the cherrypy error log."""
|
||||
request = cherrypy.serving.request
|
||||
|
||||
msg = []
|
||||
# Sort by the standard points if possible.
|
||||
from cherrypy import _cprequest
|
||||
points = _cprequest.hookpoints
|
||||
for k in request.hooks.keys():
|
||||
if k not in points:
|
||||
points.append(k)
|
||||
|
||||
for k in points:
|
||||
msg.append(' %s:' % k)
|
||||
v = request.hooks.get(k, [])
|
||||
v.sort()
|
||||
for h in v:
|
||||
msg.append(' %r' % h)
|
||||
cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
|
||||
':\n' + '\n'.join(msg), 'HTTP')
|
||||
|
||||
|
||||
def redirect(url='', internal=True, debug=False):
|
||||
"""Raise InternalRedirect or HTTPRedirect to the given url."""
|
||||
if debug:
|
||||
cherrypy.log('Redirecting %sto: %s' %
|
||||
({True: 'internal ', False: ''}[internal], url),
|
||||
'TOOLS.REDIRECT')
|
||||
if internal:
|
||||
raise cherrypy.InternalRedirect(url)
|
||||
else:
|
||||
raise cherrypy.HTTPRedirect(url)
|
||||
|
||||
|
||||
def trailing_slash(missing=True, extra=False, status=None, debug=False):
|
||||
"""Redirect if path_info has (missing|extra) trailing slash."""
|
||||
request = cherrypy.serving.request
|
||||
pi = request.path_info
|
||||
|
||||
if debug:
|
||||
cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' %
|
||||
(request.is_index, missing, extra, pi),
|
||||
'TOOLS.TRAILING_SLASH')
|
||||
if request.is_index is True:
|
||||
if missing:
|
||||
if not pi.endswith('/'):
|
||||
new_url = cherrypy.url(pi + '/', request.query_string)
|
||||
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
|
||||
elif request.is_index is False:
|
||||
if extra:
|
||||
# If pi == '/', don't redirect to ''!
|
||||
if pi.endswith('/') and pi != '/':
|
||||
new_url = cherrypy.url(pi[:-1], request.query_string)
|
||||
raise cherrypy.HTTPRedirect(new_url, status=status or 301)
|
||||
|
||||
|
||||
def flatten(debug=False):
|
||||
"""Wrap response.body in a generator that recursively iterates over body.
|
||||
|
||||
This allows cherrypy.response.body to consist of 'nested generators';
|
||||
that is, a set of generators that yield generators.
|
||||
"""
|
||||
def flattener(input):
|
||||
numchunks = 0
|
||||
for x in input:
|
||||
if not is_iterator(x):
|
||||
numchunks += 1
|
||||
yield x
|
||||
else:
|
||||
for y in flattener(x):
|
||||
numchunks += 1
|
||||
yield y
|
||||
if debug:
|
||||
cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN')
|
||||
response = cherrypy.serving.response
|
||||
response.body = flattener(response.body)
|
||||
|
||||
|
||||
def accept(media=None, debug=False):
|
||||
"""Return the client's preferred media-type (from the given Content-Types).
|
||||
|
||||
If 'media' is None (the default), no test will be performed.
|
||||
|
||||
If 'media' is provided, it should be the Content-Type value (as a string)
|
||||
or values (as a list or tuple of strings) which the current resource
|
||||
can emit. The client's acceptable media ranges (as declared in the
|
||||
Accept request header) will be matched in order to these Content-Type
|
||||
values; the first such string is returned. That is, the return value
|
||||
will always be one of the strings provided in the 'media' arg (or None
|
||||
if 'media' is None).
|
||||
|
||||
If no match is found, then HTTPError 406 (Not Acceptable) is raised.
|
||||
Note that most web browsers send */* as a (low-quality) acceptable
|
||||
media range, which should match any Content-Type. In addition, "...if
|
||||
no Accept header field is present, then it is assumed that the client
|
||||
accepts all media types."
|
||||
|
||||
Matching types are checked in order of client preference first,
|
||||
and then in the order of the given 'media' values.
|
||||
|
||||
Note that this function does not honor accept-params (other than "q").
|
||||
"""
|
||||
if not media:
|
||||
return
|
||||
if isinstance(media, text_or_bytes):
|
||||
media = [media]
|
||||
request = cherrypy.serving.request
|
||||
|
||||
# Parse the Accept request header, and try to match one
|
||||
# of the requested media-ranges (in order of preference).
|
||||
ranges = request.headers.elements('Accept')
|
||||
if not ranges:
|
||||
# Any media type is acceptable.
|
||||
if debug:
|
||||
cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT')
|
||||
return media[0]
|
||||
else:
|
||||
# Note that 'ranges' is sorted in order of preference
|
||||
for element in ranges:
|
||||
if element.qvalue > 0:
|
||||
if element.value == '*/*':
|
||||
# Matches any type or subtype
|
||||
if debug:
|
||||
cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
|
||||
return media[0]
|
||||
elif element.value.endswith('/*'):
|
||||
# Matches any subtype
|
||||
mtype = element.value[:-1] # Keep the slash
|
||||
for m in media:
|
||||
if m.startswith(mtype):
|
||||
if debug:
|
||||
cherrypy.log('Match due to %s' % element.value,
|
||||
'TOOLS.ACCEPT')
|
||||
return m
|
||||
else:
|
||||
# Matches exact value
|
||||
if element.value in media:
|
||||
if debug:
|
||||
cherrypy.log('Match due to %s' % element.value,
|
||||
'TOOLS.ACCEPT')
|
||||
return element.value
|
||||
|
||||
# No suitable media-range found.
|
||||
ah = request.headers.get('Accept')
|
||||
if ah is None:
|
||||
msg = 'Your client did not send an Accept header.'
|
||||
else:
|
||||
msg = 'Your client sent this Accept header: %s.' % ah
|
||||
msg += (' But this resource only emits these media types: %s.' %
|
||||
', '.join(media))
|
||||
raise cherrypy.HTTPError(406, msg)
|
||||
|
||||
|
||||
class MonitoredHeaderMap(_httputil.HeaderMap):
|
||||
|
||||
def __init__(self):
|
||||
self.accessed_headers = set()
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.__getitem__(self, key)
|
||||
|
||||
def __contains__(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.__contains__(self, key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.get(self, key, default=default)
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
# Python 2
|
||||
def has_key(self, key):
|
||||
self.accessed_headers.add(key)
|
||||
return _httputil.HeaderMap.has_key(self, key)
|
||||
|
||||
|
||||
def autovary(ignore=None, debug=False):
|
||||
"""Auto-populate the Vary response header based on request.header access.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
|
||||
req_h = request.headers
|
||||
request.headers = MonitoredHeaderMap()
|
||||
request.headers.update(req_h)
|
||||
if ignore is None:
|
||||
ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type'])
|
||||
|
||||
def set_response_header():
|
||||
resp_h = cherrypy.serving.response.headers
|
||||
v = set([e.value for e in resp_h.elements('Vary')])
|
||||
if debug:
|
||||
cherrypy.log(
|
||||
'Accessed headers: %s' % request.headers.accessed_headers,
|
||||
'TOOLS.AUTOVARY')
|
||||
v = v.union(request.headers.accessed_headers)
|
||||
v = v.difference(ignore)
|
||||
v = list(v)
|
||||
v.sort()
|
||||
resp_h['Vary'] = ', '.join(v)
|
||||
request.hooks.attach('before_finalize', set_response_header, 95)
|
||||
|
||||
|
||||
def convert_params(exception=ValueError, error=400):
|
||||
"""Convert request params based on function annotations, with error handling.
|
||||
|
||||
exception
|
||||
Exception class to catch.
|
||||
|
||||
status
|
||||
The HTTP error code to return to the client on failure.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
types = request.handler.callable.__annotations__
|
||||
with cherrypy.HTTPError.handle(exception, error):
|
||||
for key in set(types).intersection(request.params):
|
||||
request.params[key] = types[key](request.params[key])
|
||||
@@ -1,424 +0,0 @@
|
||||
import struct
|
||||
import time
|
||||
import io
|
||||
|
||||
import six
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import text_or_bytes, ntob
|
||||
from cherrypy.lib import file_generator
|
||||
from cherrypy.lib import is_closable_iterator
|
||||
from cherrypy.lib import set_vary_header
|
||||
|
||||
|
||||
def decode(encoding=None, default_encoding='utf-8'):
|
||||
"""Replace or extend the list of charsets used to decode a request entity.
|
||||
|
||||
Either argument may be a single string or a list of strings.
|
||||
|
||||
encoding
|
||||
If not None, restricts the set of charsets attempted while decoding
|
||||
a request entity to the given set (even if a different charset is
|
||||
given in the Content-Type request header).
|
||||
|
||||
default_encoding
|
||||
Only in effect if the 'encoding' argument is not given.
|
||||
If given, the set of charsets attempted while decoding a request
|
||||
entity is *extended* with the given value(s).
|
||||
|
||||
"""
|
||||
body = cherrypy.request.body
|
||||
if encoding is not None:
|
||||
if not isinstance(encoding, list):
|
||||
encoding = [encoding]
|
||||
body.attempt_charsets = encoding
|
||||
elif default_encoding:
|
||||
if not isinstance(default_encoding, list):
|
||||
default_encoding = [default_encoding]
|
||||
body.attempt_charsets = body.attempt_charsets + default_encoding
|
||||
|
||||
class UTF8StreamEncoder:
|
||||
def __init__(self, iterator):
|
||||
self._iterator = iterator
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self):
|
||||
res = next(self._iterator)
|
||||
if isinstance(res, six.text_type):
|
||||
res = res.encode('utf-8')
|
||||
return res
|
||||
|
||||
def close(self):
|
||||
if is_closable_iterator(self._iterator):
|
||||
self._iterator.close()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr.startswith('__'):
|
||||
raise AttributeError(self, attr)
|
||||
return getattr(self._iterator, attr)
|
||||
|
||||
|
||||
class ResponseEncoder:
|
||||
|
||||
default_encoding = 'utf-8'
|
||||
failmsg = 'Response body could not be encoded with %r.'
|
||||
encoding = None
|
||||
errors = 'strict'
|
||||
text_only = True
|
||||
add_charset = True
|
||||
debug = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
self.attempted_charsets = set()
|
||||
request = cherrypy.serving.request
|
||||
if request.handler is not None:
|
||||
# Replace request.handler with self
|
||||
if self.debug:
|
||||
cherrypy.log('Replacing request.handler', 'TOOLS.ENCODE')
|
||||
self.oldhandler = request.handler
|
||||
request.handler = self
|
||||
|
||||
def encode_stream(self, encoding):
|
||||
"""Encode a streaming response body.
|
||||
|
||||
Use a generator wrapper, and just pray it works as the stream is
|
||||
being written out.
|
||||
"""
|
||||
if encoding in self.attempted_charsets:
|
||||
return False
|
||||
self.attempted_charsets.add(encoding)
|
||||
|
||||
def encoder(body):
|
||||
for chunk in body:
|
||||
if isinstance(chunk, six.text_type):
|
||||
chunk = chunk.encode(encoding, self.errors)
|
||||
yield chunk
|
||||
self.body = encoder(self.body)
|
||||
return True
|
||||
|
||||
def encode_string(self, encoding):
|
||||
"""Encode a buffered response body."""
|
||||
if encoding in self.attempted_charsets:
|
||||
return False
|
||||
self.attempted_charsets.add(encoding)
|
||||
body = []
|
||||
for chunk in self.body:
|
||||
if isinstance(chunk, six.text_type):
|
||||
try:
|
||||
chunk = chunk.encode(encoding, self.errors)
|
||||
except (LookupError, UnicodeError):
|
||||
return False
|
||||
body.append(chunk)
|
||||
self.body = body
|
||||
return True
|
||||
|
||||
def find_acceptable_charset(self):
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
if self.debug:
|
||||
cherrypy.log('response.stream %r' %
|
||||
response.stream, 'TOOLS.ENCODE')
|
||||
if response.stream:
|
||||
encoder = self.encode_stream
|
||||
else:
|
||||
encoder = self.encode_string
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
# Encoded strings may be of different lengths from their
|
||||
# unicode equivalents, and even from each other. For example:
|
||||
# >>> t = u"\u7007\u3040"
|
||||
# >>> len(t)
|
||||
# 2
|
||||
# >>> len(t.encode("UTF-8"))
|
||||
# 6
|
||||
# >>> len(t.encode("utf7"))
|
||||
# 8
|
||||
del response.headers['Content-Length']
|
||||
|
||||
# Parse the Accept-Charset request header, and try to provide one
|
||||
# of the requested charsets (in order of user preference).
|
||||
encs = request.headers.elements('Accept-Charset')
|
||||
charsets = [enc.value.lower() for enc in encs]
|
||||
if self.debug:
|
||||
cherrypy.log('charsets %s' % repr(charsets), 'TOOLS.ENCODE')
|
||||
|
||||
if self.encoding is not None:
|
||||
# If specified, force this encoding to be used, or fail.
|
||||
encoding = self.encoding.lower()
|
||||
if self.debug:
|
||||
cherrypy.log('Specified encoding %r' %
|
||||
encoding, 'TOOLS.ENCODE')
|
||||
if (not charsets) or '*' in charsets or encoding in charsets:
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting encoding %r' %
|
||||
encoding, 'TOOLS.ENCODE')
|
||||
if encoder(encoding):
|
||||
return encoding
|
||||
else:
|
||||
if not encs:
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting default encoding %r' %
|
||||
self.default_encoding, 'TOOLS.ENCODE')
|
||||
# Any character-set is acceptable.
|
||||
if encoder(self.default_encoding):
|
||||
return self.default_encoding
|
||||
else:
|
||||
raise cherrypy.HTTPError(500, self.failmsg %
|
||||
self.default_encoding)
|
||||
else:
|
||||
for element in encs:
|
||||
if element.qvalue > 0:
|
||||
if element.value == '*':
|
||||
# Matches any charset. Try our default.
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting default encoding due '
|
||||
'to %r' % element, 'TOOLS.ENCODE')
|
||||
if encoder(self.default_encoding):
|
||||
return self.default_encoding
|
||||
else:
|
||||
encoding = element.value
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting encoding %s (qvalue >'
|
||||
'0)' % element, 'TOOLS.ENCODE')
|
||||
if encoder(encoding):
|
||||
return encoding
|
||||
|
||||
if '*' not in charsets:
|
||||
# If no "*" is present in an Accept-Charset field, then all
|
||||
# character sets not explicitly mentioned get a quality
|
||||
# value of 0, except for ISO-8859-1, which gets a quality
|
||||
# value of 1 if not explicitly mentioned.
|
||||
iso = 'iso-8859-1'
|
||||
if iso not in charsets:
|
||||
if self.debug:
|
||||
cherrypy.log('Attempting ISO-8859-1 encoding',
|
||||
'TOOLS.ENCODE')
|
||||
if encoder(iso):
|
||||
return iso
|
||||
|
||||
# No suitable encoding found.
|
||||
ac = request.headers.get('Accept-Charset')
|
||||
if ac is None:
|
||||
msg = 'Your client did not send an Accept-Charset header.'
|
||||
else:
|
||||
msg = 'Your client sent this Accept-Charset header: %s.' % ac
|
||||
_charsets = ', '.join(sorted(self.attempted_charsets))
|
||||
msg += ' We tried these charsets: %s.' % (_charsets,)
|
||||
raise cherrypy.HTTPError(406, msg)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
response = cherrypy.serving.response
|
||||
self.body = self.oldhandler(*args, **kwargs)
|
||||
|
||||
if isinstance(self.body, text_or_bytes):
|
||||
# strings get wrapped in a list because iterating over a single
|
||||
# item list is much faster than iterating over every character
|
||||
# in a long string.
|
||||
if self.body:
|
||||
self.body = [self.body]
|
||||
else:
|
||||
# [''] doesn't evaluate to False, so replace it with [].
|
||||
self.body = []
|
||||
elif hasattr(self.body, 'read'):
|
||||
self.body = file_generator(self.body)
|
||||
elif self.body is None:
|
||||
self.body = []
|
||||
|
||||
ct = response.headers.elements('Content-Type')
|
||||
if self.debug:
|
||||
cherrypy.log('Content-Type: %r' % [str(h)
|
||||
for h in ct], 'TOOLS.ENCODE')
|
||||
if ct and self.add_charset:
|
||||
ct = ct[0]
|
||||
if self.text_only:
|
||||
if ct.value.lower().startswith('text/'):
|
||||
if self.debug:
|
||||
cherrypy.log(
|
||||
'Content-Type %s starts with "text/"' % ct,
|
||||
'TOOLS.ENCODE')
|
||||
do_find = True
|
||||
else:
|
||||
if self.debug:
|
||||
cherrypy.log('Not finding because Content-Type %s '
|
||||
'does not start with "text/"' % ct,
|
||||
'TOOLS.ENCODE')
|
||||
do_find = False
|
||||
else:
|
||||
if self.debug:
|
||||
cherrypy.log('Finding because not text_only',
|
||||
'TOOLS.ENCODE')
|
||||
do_find = True
|
||||
|
||||
if do_find:
|
||||
# Set "charset=..." param on response Content-Type header
|
||||
ct.params['charset'] = self.find_acceptable_charset()
|
||||
if self.debug:
|
||||
cherrypy.log('Setting Content-Type %s' % ct,
|
||||
'TOOLS.ENCODE')
|
||||
response.headers['Content-Type'] = str(ct)
|
||||
|
||||
return self.body
|
||||
|
||||
# GZIP
|
||||
|
||||
|
||||
def compress(body, compress_level):
|
||||
"""Compress 'body' at the given compress_level."""
|
||||
import zlib
|
||||
|
||||
# See http://www.gzip.org/zlib/rfc-gzip.html
|
||||
yield ntob('\x1f\x8b') # ID1 and ID2: gzip marker
|
||||
yield ntob('\x08') # CM: compression method
|
||||
yield ntob('\x00') # FLG: none set
|
||||
# MTIME: 4 bytes
|
||||
yield struct.pack('<L', int(time.time()) & int('FFFFFFFF', 16))
|
||||
yield ntob('\x02') # XFL: max compression, slowest algo
|
||||
yield ntob('\xff') # OS: unknown
|
||||
|
||||
crc = zlib.crc32(ntob(''))
|
||||
size = 0
|
||||
zobj = zlib.compressobj(compress_level,
|
||||
zlib.DEFLATED, -zlib.MAX_WBITS,
|
||||
zlib.DEF_MEM_LEVEL, 0)
|
||||
for line in body:
|
||||
size += len(line)
|
||||
crc = zlib.crc32(line, crc)
|
||||
yield zobj.compress(line)
|
||||
yield zobj.flush()
|
||||
|
||||
# CRC32: 4 bytes
|
||||
yield struct.pack('<L', crc & int('FFFFFFFF', 16))
|
||||
# ISIZE: 4 bytes
|
||||
yield struct.pack('<L', size & int('FFFFFFFF', 16))
|
||||
|
||||
|
||||
def decompress(body):
|
||||
import gzip
|
||||
|
||||
zbuf = io.BytesIO()
|
||||
zbuf.write(body)
|
||||
zbuf.seek(0)
|
||||
zfile = gzip.GzipFile(mode='rb', fileobj=zbuf)
|
||||
data = zfile.read()
|
||||
zfile.close()
|
||||
return data
|
||||
|
||||
|
||||
def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
|
||||
debug=False):
|
||||
"""Try to gzip the response body if Content-Type in mime_types.
|
||||
|
||||
cherrypy.response.headers['Content-Type'] must be set to one of the
|
||||
values in the mime_types arg before calling this function.
|
||||
|
||||
The provided list of mime-types must be of one of the following form:
|
||||
* type/subtype
|
||||
* type/*
|
||||
* type/*+subtype
|
||||
|
||||
No compression is performed if any of the following hold:
|
||||
* The client sends no Accept-Encoding request header
|
||||
* No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
|
||||
* No 'gzip' or 'x-gzip' with a qvalue > 0 is present
|
||||
* The 'identity' value is given with a qvalue > 0.
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
set_vary_header(response, 'Accept-Encoding')
|
||||
|
||||
if not response.body:
|
||||
# Response body is empty (might be a 304 for instance)
|
||||
if debug:
|
||||
cherrypy.log('No response body', context='TOOLS.GZIP')
|
||||
return
|
||||
|
||||
# If returning cached content (which should already have been gzipped),
|
||||
# don't re-zip.
|
||||
if getattr(request, 'cached', False):
|
||||
if debug:
|
||||
cherrypy.log('Not gzipping cached response', context='TOOLS.GZIP')
|
||||
return
|
||||
|
||||
acceptable = request.headers.elements('Accept-Encoding')
|
||||
if not acceptable:
|
||||
# If no Accept-Encoding field is present in a request,
|
||||
# the server MAY assume that the client will accept any
|
||||
# content coding. In this case, if "identity" is one of
|
||||
# the available content-codings, then the server SHOULD use
|
||||
# the "identity" content-coding, unless it has additional
|
||||
# information that a different content-coding is meaningful
|
||||
# to the client.
|
||||
if debug:
|
||||
cherrypy.log('No Accept-Encoding', context='TOOLS.GZIP')
|
||||
return
|
||||
|
||||
ct = response.headers.get('Content-Type', '').split(';')[0]
|
||||
for coding in acceptable:
|
||||
if coding.value == 'identity' and coding.qvalue != 0:
|
||||
if debug:
|
||||
cherrypy.log('Non-zero identity qvalue: %s' % coding,
|
||||
context='TOOLS.GZIP')
|
||||
return
|
||||
if coding.value in ('gzip', 'x-gzip'):
|
||||
if coding.qvalue == 0:
|
||||
if debug:
|
||||
cherrypy.log('Zero gzip qvalue: %s' % coding,
|
||||
context='TOOLS.GZIP')
|
||||
return
|
||||
|
||||
if ct not in mime_types:
|
||||
# If the list of provided mime-types contains tokens
|
||||
# such as 'text/*' or 'application/*+xml',
|
||||
# we go through them and find the most appropriate one
|
||||
# based on the given content-type.
|
||||
# The pattern matching is only caring about the most
|
||||
# common cases, as stated above, and doesn't support
|
||||
# for extra parameters.
|
||||
found = False
|
||||
if '/' in ct:
|
||||
ct_media_type, ct_sub_type = ct.split('/')
|
||||
for mime_type in mime_types:
|
||||
if '/' in mime_type:
|
||||
media_type, sub_type = mime_type.split('/')
|
||||
if ct_media_type == media_type:
|
||||
if sub_type == '*':
|
||||
found = True
|
||||
break
|
||||
elif '+' in sub_type and '+' in ct_sub_type:
|
||||
ct_left, ct_right = ct_sub_type.split('+')
|
||||
left, right = sub_type.split('+')
|
||||
if left == '*' and ct_right == right:
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
if debug:
|
||||
cherrypy.log('Content-Type %s not in mime_types %r' %
|
||||
(ct, mime_types), context='TOOLS.GZIP')
|
||||
return
|
||||
|
||||
if debug:
|
||||
cherrypy.log('Gzipping', context='TOOLS.GZIP')
|
||||
# Return a generator that compresses the page
|
||||
response.headers['Content-Encoding'] = 'gzip'
|
||||
response.body = compress(response.body, compress_level)
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers['Content-Length']
|
||||
|
||||
return
|
||||
|
||||
if debug:
|
||||
cherrypy.log('No acceptable encoding found.', context='GZIP')
|
||||
cherrypy.HTTPError(406, 'identity, gzip').set_response()
|
||||
@@ -1,216 +0,0 @@
|
||||
import gc
|
||||
import inspect
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
import objgraph
|
||||
except ImportError:
|
||||
objgraph = None
|
||||
|
||||
import cherrypy
|
||||
from cherrypy import _cprequest, _cpwsgi
|
||||
from cherrypy.process.plugins import SimplePlugin
|
||||
|
||||
|
||||
class ReferrerTree(object):
|
||||
|
||||
"""An object which gathers all referrers of an object to a given depth."""
|
||||
|
||||
peek_length = 40
|
||||
|
||||
def __init__(self, ignore=None, maxdepth=2, maxparents=10):
|
||||
self.ignore = ignore or []
|
||||
self.ignore.append(inspect.currentframe().f_back)
|
||||
self.maxdepth = maxdepth
|
||||
self.maxparents = maxparents
|
||||
|
||||
def ascend(self, obj, depth=1):
|
||||
"""Return a nested list containing referrers of the given object."""
|
||||
depth += 1
|
||||
parents = []
|
||||
|
||||
# Gather all referrers in one step to minimize
|
||||
# cascading references due to repr() logic.
|
||||
refs = gc.get_referrers(obj)
|
||||
self.ignore.append(refs)
|
||||
if len(refs) > self.maxparents:
|
||||
return [('[%s referrers]' % len(refs), [])]
|
||||
|
||||
try:
|
||||
ascendcode = self.ascend.__code__
|
||||
except AttributeError:
|
||||
ascendcode = self.ascend.im_func.func_code
|
||||
for parent in refs:
|
||||
if inspect.isframe(parent) and parent.f_code is ascendcode:
|
||||
continue
|
||||
if parent in self.ignore:
|
||||
continue
|
||||
if depth <= self.maxdepth:
|
||||
parents.append((parent, self.ascend(parent, depth)))
|
||||
else:
|
||||
parents.append((parent, []))
|
||||
|
||||
return parents
|
||||
|
||||
def peek(self, s):
|
||||
"""Return s, restricted to a sane length."""
|
||||
if len(s) > (self.peek_length + 3):
|
||||
half = self.peek_length // 2
|
||||
return s[:half] + '...' + s[-half:]
|
||||
else:
|
||||
return s
|
||||
|
||||
def _format(self, obj, descend=True):
|
||||
"""Return a string representation of a single object."""
|
||||
if inspect.isframe(obj):
|
||||
filename, lineno, func, context, index = inspect.getframeinfo(obj)
|
||||
return "<frame of function '%s'>" % func
|
||||
|
||||
if not descend:
|
||||
return self.peek(repr(obj))
|
||||
|
||||
if isinstance(obj, dict):
|
||||
return '{' + ', '.join(['%s: %s' % (self._format(k, descend=False),
|
||||
self._format(v, descend=False))
|
||||
for k, v in obj.items()]) + '}'
|
||||
elif isinstance(obj, list):
|
||||
return '[' + ', '.join([self._format(item, descend=False)
|
||||
for item in obj]) + ']'
|
||||
elif isinstance(obj, tuple):
|
||||
return '(' + ', '.join([self._format(item, descend=False)
|
||||
for item in obj]) + ')'
|
||||
|
||||
r = self.peek(repr(obj))
|
||||
if isinstance(obj, (str, int, float)):
|
||||
return r
|
||||
return '%s: %s' % (type(obj), r)
|
||||
|
||||
def format(self, tree):
|
||||
"""Return a list of string reprs from a nested list of referrers."""
|
||||
output = []
|
||||
|
||||
def ascend(branch, depth=1):
|
||||
for parent, grandparents in branch:
|
||||
output.append((' ' * depth) + self._format(parent))
|
||||
if grandparents:
|
||||
ascend(grandparents, depth + 1)
|
||||
ascend(tree)
|
||||
return output
|
||||
|
||||
|
||||
def get_instances(cls):
|
||||
return [x for x in gc.get_objects() if isinstance(x, cls)]
|
||||
|
||||
|
||||
class RequestCounter(SimplePlugin):
|
||||
|
||||
def start(self):
|
||||
self.count = 0
|
||||
|
||||
def before_request(self):
|
||||
self.count += 1
|
||||
|
||||
def after_request(self):
|
||||
self.count -= 1
|
||||
request_counter = RequestCounter(cherrypy.engine)
|
||||
request_counter.subscribe()
|
||||
|
||||
|
||||
def get_context(obj):
|
||||
if isinstance(obj, _cprequest.Request):
|
||||
return 'path=%s;stage=%s' % (obj.path_info, obj.stage)
|
||||
elif isinstance(obj, _cprequest.Response):
|
||||
return 'status=%s' % obj.status
|
||||
elif isinstance(obj, _cpwsgi.AppResponse):
|
||||
return 'PATH_INFO=%s' % obj.environ.get('PATH_INFO', '')
|
||||
elif hasattr(obj, 'tb_lineno'):
|
||||
return 'tb_lineno=%s' % obj.tb_lineno
|
||||
return ''
|
||||
|
||||
|
||||
class GCRoot(object):
|
||||
|
||||
"""A CherryPy page handler for testing reference leaks."""
|
||||
|
||||
classes = [
|
||||
(_cprequest.Request, 2, 2,
|
||||
'Should be 1 in this request thread and 1 in the main thread.'),
|
||||
(_cprequest.Response, 2, 2,
|
||||
'Should be 1 in this request thread and 1 in the main thread.'),
|
||||
(_cpwsgi.AppResponse, 1, 1,
|
||||
'Should be 1 in this request thread only.'),
|
||||
]
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return 'Hello, world!'
|
||||
|
||||
@cherrypy.expose
|
||||
def stats(self):
|
||||
output = ['Statistics:']
|
||||
|
||||
for trial in range(10):
|
||||
if request_counter.count > 0:
|
||||
break
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
output.append('\nNot all requests closed properly.')
|
||||
|
||||
# gc_collect isn't perfectly synchronous, because it may
|
||||
# break reference cycles that then take time to fully
|
||||
# finalize. Call it thrice and hope for the best.
|
||||
gc.collect()
|
||||
gc.collect()
|
||||
unreachable = gc.collect()
|
||||
if unreachable:
|
||||
if objgraph is not None:
|
||||
final = objgraph.by_type('Nondestructible')
|
||||
if final:
|
||||
objgraph.show_backrefs(final, filename='finalizers.png')
|
||||
|
||||
trash = {}
|
||||
for x in gc.garbage:
|
||||
trash[type(x)] = trash.get(type(x), 0) + 1
|
||||
if trash:
|
||||
output.insert(0, '\n%s unreachable objects:' % unreachable)
|
||||
trash = [(v, k) for k, v in trash.items()]
|
||||
trash.sort()
|
||||
for pair in trash:
|
||||
output.append(' ' + repr(pair))
|
||||
|
||||
# Check declared classes to verify uncollected instances.
|
||||
# These don't have to be part of a cycle; they can be
|
||||
# any objects that have unanticipated referrers that keep
|
||||
# them from being collected.
|
||||
allobjs = {}
|
||||
for cls, minobj, maxobj, msg in self.classes:
|
||||
allobjs[cls] = get_instances(cls)
|
||||
|
||||
for cls, minobj, maxobj, msg in self.classes:
|
||||
objs = allobjs[cls]
|
||||
lenobj = len(objs)
|
||||
if lenobj < minobj or lenobj > maxobj:
|
||||
if minobj == maxobj:
|
||||
output.append(
|
||||
'\nExpected %s %r references, got %s.' %
|
||||
(minobj, cls, lenobj))
|
||||
else:
|
||||
output.append(
|
||||
'\nExpected %s to %s %r references, got %s.' %
|
||||
(minobj, maxobj, cls, lenobj))
|
||||
|
||||
for obj in objs:
|
||||
if objgraph is not None:
|
||||
ig = [id(objs), id(inspect.currentframe())]
|
||||
fname = 'graph_%s_%s.png' % (cls.__name__, id(obj))
|
||||
objgraph.show_backrefs(
|
||||
obj, extra_ignore=ig, max_depth=4, too_many=20,
|
||||
filename=fname, extra_info=get_context)
|
||||
output.append('\nReferrers for %s (refcount=%s):' %
|
||||
(repr(obj), sys.getrefcount(obj)))
|
||||
t = ReferrerTree(ignore=[objs], maxdepth=3)
|
||||
tree = t.ascend(obj)
|
||||
output.extend(t.format(tree))
|
||||
|
||||
return '\n'.join(output)
|
||||
@@ -1,378 +0,0 @@
|
||||
"""
|
||||
This module defines functions to implement HTTP Digest Authentication
|
||||
(:rfc:`2617`).
|
||||
This has full compliance with 'Digest' and 'Basic' authentication methods. In
|
||||
'Digest' it supports both MD5 and MD5-sess algorithms.
|
||||
|
||||
Usage:
|
||||
First use 'doAuth' to request the client authentication for a
|
||||
certain resource. You should send an httplib.UNAUTHORIZED response to the
|
||||
client so he knows he has to authenticate itself.
|
||||
|
||||
Then use 'parseAuthorization' to retrieve the 'auth_map' used in
|
||||
'checkResponse'.
|
||||
|
||||
To use 'checkResponse' you must have already verified the password
|
||||
associated with the 'username' key in 'auth_map' dict. Then you use the
|
||||
'checkResponse' function to verify if the password matches the one sent
|
||||
by the client.
|
||||
|
||||
SUPPORTED_ALGORITHM - list of supported 'Digest' algorithms
|
||||
SUPPORTED_QOP - list of supported 'Digest' 'qop'.
|
||||
"""
|
||||
|
||||
import time
|
||||
from hashlib import md5
|
||||
|
||||
from cherrypy._cpcompat import (
|
||||
base64_decode, ntob,
|
||||
parse_http_list, parse_keqv_list
|
||||
)
|
||||
|
||||
|
||||
__version__ = 1, 0, 1
|
||||
__author__ = 'Tiago Cogumbreiro <cogumbreiro@users.sf.net>'
|
||||
__credits__ = """
|
||||
Peter van Kampen for its recipe which implement most of Digest
|
||||
authentication:
|
||||
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302378
|
||||
"""
|
||||
|
||||
__license__ = """
|
||||
Copyright (c) 2005, Tiago Cogumbreiro <cogumbreiro@users.sf.net>
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of Sylvain Hellegouarch nor the names of his
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
__all__ = ('digestAuth', 'basicAuth', 'doAuth', 'checkResponse',
|
||||
'parseAuthorization', 'SUPPORTED_ALGORITHM', 'md5SessionKey',
|
||||
'calculateNonce', 'SUPPORTED_QOP')
|
||||
|
||||
##########################################################################
|
||||
|
||||
MD5 = 'MD5'
|
||||
MD5_SESS = 'MD5-sess'
|
||||
AUTH = 'auth'
|
||||
AUTH_INT = 'auth-int'
|
||||
|
||||
SUPPORTED_ALGORITHM = (MD5, MD5_SESS)
|
||||
SUPPORTED_QOP = (AUTH, AUTH_INT)
|
||||
|
||||
##########################################################################
|
||||
# doAuth
|
||||
#
|
||||
DIGEST_AUTH_ENCODERS = {
|
||||
MD5: lambda val: md5(ntob(val)).hexdigest(),
|
||||
MD5_SESS: lambda val: md5(ntob(val)).hexdigest(),
|
||||
# SHA: lambda val: sha.new(ntob(val)).hexdigest (),
|
||||
}
|
||||
|
||||
|
||||
def calculateNonce(realm, algorithm=MD5):
|
||||
"""This is an auxaliary function that calculates 'nonce' value. It is used
|
||||
to handle sessions."""
|
||||
|
||||
global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS
|
||||
assert algorithm in SUPPORTED_ALGORITHM
|
||||
|
||||
try:
|
||||
encoder = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
except KeyError:
|
||||
raise NotImplementedError('The chosen algorithm (%s) does not have '
|
||||
'an implementation yet' % algorithm)
|
||||
|
||||
return encoder('%d:%s' % (time.time(), realm))
|
||||
|
||||
|
||||
def digestAuth(realm, algorithm=MD5, nonce=None, qop=AUTH):
|
||||
"""Challenges the client for a Digest authentication."""
|
||||
global SUPPORTED_ALGORITHM, DIGEST_AUTH_ENCODERS, SUPPORTED_QOP
|
||||
assert algorithm in SUPPORTED_ALGORITHM
|
||||
assert qop in SUPPORTED_QOP
|
||||
|
||||
if nonce is None:
|
||||
nonce = calculateNonce(realm, algorithm)
|
||||
|
||||
return 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"' % (
|
||||
realm, nonce, algorithm, qop
|
||||
)
|
||||
|
||||
|
||||
def basicAuth(realm):
|
||||
"""Challengenes the client for a Basic authentication."""
|
||||
assert '"' not in realm, "Realms cannot contain the \" (quote) character."
|
||||
|
||||
return 'Basic realm="%s"' % realm
|
||||
|
||||
|
||||
def doAuth(realm):
|
||||
"""'doAuth' function returns the challenge string b giving priority over
|
||||
Digest and fallback to Basic authentication when the browser doesn't
|
||||
support the first one.
|
||||
|
||||
This should be set in the HTTP header under the key 'WWW-Authenticate'."""
|
||||
|
||||
return digestAuth(realm) + ' ' + basicAuth(realm)
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Parse authorization parameters
|
||||
#
|
||||
def _parseDigestAuthorization(auth_params):
|
||||
# Convert the auth params to a dict
|
||||
items = parse_http_list(auth_params)
|
||||
params = parse_keqv_list(items)
|
||||
|
||||
# Now validate the params
|
||||
|
||||
# Check for required parameters
|
||||
required = ['username', 'realm', 'nonce', 'uri', 'response']
|
||||
for k in required:
|
||||
if k not in params:
|
||||
return None
|
||||
|
||||
# If qop is sent then cnonce and nc MUST be present
|
||||
if 'qop' in params and not ('cnonce' in params
|
||||
and 'nc' in params):
|
||||
return None
|
||||
|
||||
# If qop is not sent, neither cnonce nor nc can be present
|
||||
if ('cnonce' in params or 'nc' in params) and \
|
||||
'qop' not in params:
|
||||
return None
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def _parseBasicAuthorization(auth_params):
|
||||
username, password = base64_decode(auth_params).split(':', 1)
|
||||
return {'username': username, 'password': password}
|
||||
|
||||
AUTH_SCHEMES = {
|
||||
'basic': _parseBasicAuthorization,
|
||||
'digest': _parseDigestAuthorization,
|
||||
}
|
||||
|
||||
|
||||
def parseAuthorization(credentials):
|
||||
"""parseAuthorization will convert the value of the 'Authorization' key in
|
||||
the HTTP header to a map itself. If the parsing fails 'None' is returned.
|
||||
"""
|
||||
|
||||
global AUTH_SCHEMES
|
||||
|
||||
auth_scheme, auth_params = credentials.split(' ', 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
|
||||
parser = AUTH_SCHEMES[auth_scheme]
|
||||
params = parser(auth_params)
|
||||
|
||||
if params is None:
|
||||
return
|
||||
|
||||
assert 'auth_scheme' not in params
|
||||
params['auth_scheme'] = auth_scheme
|
||||
return params
|
||||
|
||||
|
||||
##########################################################################
|
||||
# Check provided response for a valid password
|
||||
#
|
||||
def md5SessionKey(params, password):
|
||||
"""
|
||||
If the "algorithm" directive's value is "MD5-sess", then A1
|
||||
[the session key] is calculated only once - on the first request by the
|
||||
client following receipt of a WWW-Authenticate challenge from the server.
|
||||
|
||||
This creates a 'session key' for the authentication of subsequent
|
||||
requests and responses which is different for each "authentication
|
||||
session", thus limiting the amount of material hashed with any one
|
||||
key.
|
||||
|
||||
Because the server need only use the hash of the user
|
||||
credentials in order to create the A1 value, this construction could
|
||||
be used in conjunction with a third party authentication service so
|
||||
that the web server would not need the actual password value. The
|
||||
specification of such a protocol is beyond the scope of this
|
||||
specification.
|
||||
"""
|
||||
|
||||
keys = ('username', 'realm', 'nonce', 'cnonce')
|
||||
params_copy = {}
|
||||
for key in keys:
|
||||
params_copy[key] = params[key]
|
||||
|
||||
params_copy['algorithm'] = MD5_SESS
|
||||
return _A1(params_copy, password)
|
||||
|
||||
|
||||
def _A1(params, password):
|
||||
algorithm = params.get('algorithm', MD5)
|
||||
H = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
|
||||
if algorithm == MD5:
|
||||
# If the "algorithm" directive's value is "MD5" or is
|
||||
# unspecified, then A1 is:
|
||||
# A1 = unq(username-value) ":" unq(realm-value) ":" passwd
|
||||
return '%s:%s:%s' % (params['username'], params['realm'], password)
|
||||
|
||||
elif algorithm == MD5_SESS:
|
||||
|
||||
# This is A1 if qop is set
|
||||
# A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
|
||||
# ":" unq(nonce-value) ":" unq(cnonce-value)
|
||||
h_a1 = H('%s:%s:%s' % (params['username'], params['realm'], password))
|
||||
return '%s:%s:%s' % (h_a1, params['nonce'], params['cnonce'])
|
||||
|
||||
|
||||
def _A2(params, method, kwargs):
|
||||
# If the "qop" directive's value is "auth" or is unspecified, then A2 is:
|
||||
# A2 = Method ":" digest-uri-value
|
||||
|
||||
qop = params.get('qop', 'auth')
|
||||
if qop == 'auth':
|
||||
return method + ':' + params['uri']
|
||||
elif qop == 'auth-int':
|
||||
# If the "qop" value is "auth-int", then A2 is:
|
||||
# A2 = Method ":" digest-uri-value ":" H(entity-body)
|
||||
entity_body = kwargs.get('entity_body', '')
|
||||
H = kwargs['H']
|
||||
|
||||
return '%s:%s:%s' % (
|
||||
method,
|
||||
params['uri'],
|
||||
H(entity_body)
|
||||
)
|
||||
|
||||
else:
|
||||
raise NotImplementedError("The 'qop' method is unknown: %s" % qop)
|
||||
|
||||
|
||||
def _computeDigestResponse(auth_map, password, method='GET', A1=None,
|
||||
**kwargs):
|
||||
"""
|
||||
Generates a response respecting the algorithm defined in RFC 2617
|
||||
"""
|
||||
params = auth_map
|
||||
|
||||
algorithm = params.get('algorithm', MD5)
|
||||
|
||||
H = DIGEST_AUTH_ENCODERS[algorithm]
|
||||
KD = lambda secret, data: H(secret + ':' + data)
|
||||
|
||||
qop = params.get('qop', None)
|
||||
|
||||
H_A2 = H(_A2(params, method, kwargs))
|
||||
|
||||
if algorithm == MD5_SESS and A1 is not None:
|
||||
H_A1 = H(A1)
|
||||
else:
|
||||
H_A1 = H(_A1(params, password))
|
||||
|
||||
if qop in ('auth', 'auth-int'):
|
||||
# If the "qop" value is "auth" or "auth-int":
|
||||
# request-digest = <"> < KD ( H(A1), unq(nonce-value)
|
||||
# ":" nc-value
|
||||
# ":" unq(cnonce-value)
|
||||
# ":" unq(qop-value)
|
||||
# ":" H(A2)
|
||||
# ) <">
|
||||
request = '%s:%s:%s:%s:%s' % (
|
||||
params['nonce'],
|
||||
params['nc'],
|
||||
params['cnonce'],
|
||||
params['qop'],
|
||||
H_A2,
|
||||
)
|
||||
elif qop is None:
|
||||
# If the "qop" directive is not present (this construction is
|
||||
# for compatibility with RFC 2069):
|
||||
# request-digest =
|
||||
# <"> < KD ( H(A1), unq(nonce-value) ":" H(A2) ) > <">
|
||||
request = '%s:%s' % (params['nonce'], H_A2)
|
||||
|
||||
return KD(H_A1, request)
|
||||
|
||||
|
||||
def _checkDigestResponse(auth_map, password, method='GET', A1=None, **kwargs):
|
||||
"""This function is used to verify the response given by the client when
|
||||
he tries to authenticate.
|
||||
Optional arguments:
|
||||
entity_body - when 'qop' is set to 'auth-int' you MUST provide the
|
||||
raw data you are going to send to the client (usually the
|
||||
HTML page.
|
||||
request_uri - the uri from the request line compared with the 'uri'
|
||||
directive of the authorization map. They must represent
|
||||
the same resource (unused at this time).
|
||||
"""
|
||||
|
||||
if auth_map['realm'] != kwargs.get('realm', None):
|
||||
return False
|
||||
|
||||
response = _computeDigestResponse(
|
||||
auth_map, password, method, A1, **kwargs)
|
||||
|
||||
return response == auth_map['response']
|
||||
|
||||
|
||||
def _checkBasicResponse(auth_map, password, method='GET', encrypt=None,
|
||||
**kwargs):
|
||||
# Note that the Basic response doesn't provide the realm value so we cannot
|
||||
# test it
|
||||
pass_through = lambda password, username=None: password
|
||||
encrypt = encrypt or pass_through
|
||||
try:
|
||||
candidate = encrypt(auth_map['password'], auth_map['username'])
|
||||
except TypeError:
|
||||
# if encrypt only takes one parameter, it's the password
|
||||
candidate = encrypt(auth_map['password'])
|
||||
return candidate == password
|
||||
|
||||
AUTH_RESPONSES = {
|
||||
'basic': _checkBasicResponse,
|
||||
'digest': _checkDigestResponse,
|
||||
}
|
||||
|
||||
|
||||
def checkResponse(auth_map, password, method='GET', encrypt=None, **kwargs):
|
||||
"""'checkResponse' compares the auth_map with the password and optionally
|
||||
other arguments that each implementation might need.
|
||||
|
||||
If the response is of type 'Basic' then the function has the following
|
||||
signature::
|
||||
|
||||
checkBasicResponse(auth_map, password) -> bool
|
||||
|
||||
If the response is of type 'Digest' then the function has the following
|
||||
signature::
|
||||
|
||||
checkDigestResponse(auth_map, password, method='GET', A1=None) -> bool
|
||||
|
||||
The 'A1' argument is only used in MD5_SESS algorithm based responses.
|
||||
Check md5SessionKey() for more info.
|
||||
"""
|
||||
checker = AUTH_RESPONSES[auth_map['auth_scheme']]
|
||||
return checker(auth_map, password, method=method, encrypt=encrypt,
|
||||
**kwargs)
|
||||
@@ -1,530 +0,0 @@
|
||||
"""HTTP library functions.
|
||||
|
||||
This module contains functions for building an HTTP application
|
||||
framework: any one, not just one whose name starts with "Ch". ;) If you
|
||||
reference any modules from some popular framework inside *this* module,
|
||||
FuManChu will personally hang you up by your thumbs and submit you
|
||||
to a public caning.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import email.utils
|
||||
import re
|
||||
from binascii import b2a_base64
|
||||
from cgi import parse_header
|
||||
try:
|
||||
# Python 3
|
||||
from email.header import decode_header
|
||||
except ImportError:
|
||||
from email.Header import decode_header
|
||||
|
||||
import six
|
||||
|
||||
from cherrypy._cpcompat import BaseHTTPRequestHandler, ntob, ntou
|
||||
from cherrypy._cpcompat import text_or_bytes, iteritems
|
||||
from cherrypy._cpcompat import reversed, sorted, unquote_qs
|
||||
|
||||
response_codes = BaseHTTPRequestHandler.responses.copy()
|
||||
|
||||
# From https://github.com/cherrypy/cherrypy/issues/361
|
||||
response_codes[500] = ('Internal Server Error',
|
||||
'The server encountered an unexpected condition '
|
||||
'which prevented it from fulfilling the request.')
|
||||
response_codes[503] = ('Service Unavailable',
|
||||
'The server is currently unable to handle the '
|
||||
'request due to a temporary overloading or '
|
||||
'maintenance of the server.')
|
||||
|
||||
|
||||
HTTPDate = functools.partial(email.utils.formatdate, usegmt=True)
|
||||
|
||||
|
||||
def urljoin(*atoms):
|
||||
"""Return the given path \*atoms, joined into a single URL.
|
||||
|
||||
This will correctly join a SCRIPT_NAME and PATH_INFO into the
|
||||
original URL, even if either atom is blank.
|
||||
"""
|
||||
url = '/'.join([x for x in atoms if x])
|
||||
while '//' in url:
|
||||
url = url.replace('//', '/')
|
||||
# Special-case the final url of "", and return "/" instead.
|
||||
return url or '/'
|
||||
|
||||
|
||||
def urljoin_bytes(*atoms):
|
||||
"""Return the given path *atoms, joined into a single URL.
|
||||
|
||||
This will correctly join a SCRIPT_NAME and PATH_INFO into the
|
||||
original URL, even if either atom is blank.
|
||||
"""
|
||||
url = ntob('/').join([x for x in atoms if x])
|
||||
while ntob('//') in url:
|
||||
url = url.replace(ntob('//'), ntob('/'))
|
||||
# Special-case the final url of "", and return "/" instead.
|
||||
return url or ntob('/')
|
||||
|
||||
|
||||
def protocol_from_http(protocol_str):
|
||||
"""Return a protocol tuple from the given 'HTTP/x.y' string."""
|
||||
return int(protocol_str[5]), int(protocol_str[7])
|
||||
|
||||
|
||||
def get_ranges(headervalue, content_length):
|
||||
"""Return a list of (start, stop) indices from a Range header, or None.
|
||||
|
||||
Each (start, stop) tuple will be composed of two ints, which are suitable
|
||||
for use in a slicing operation. That is, the header "Range: bytes=3-6",
|
||||
if applied against a Python string, is requesting resource[3:7]. This
|
||||
function will return the list [(3, 7)].
|
||||
|
||||
If this function returns an empty list, you should return HTTP 416.
|
||||
"""
|
||||
|
||||
if not headervalue:
|
||||
return None
|
||||
|
||||
result = []
|
||||
bytesunit, byteranges = headervalue.split('=', 1)
|
||||
for brange in byteranges.split(','):
|
||||
start, stop = [x.strip() for x in brange.split('-', 1)]
|
||||
if start:
|
||||
if not stop:
|
||||
stop = content_length - 1
|
||||
start, stop = int(start), int(stop)
|
||||
if start >= content_length:
|
||||
# From rfc 2616 sec 14.16:
|
||||
# "If the server receives a request (other than one
|
||||
# including an If-Range request-header field) with an
|
||||
# unsatisfiable Range request-header field (that is,
|
||||
# all of whose byte-range-spec values have a first-byte-pos
|
||||
# value greater than the current length of the selected
|
||||
# resource), it SHOULD return a response code of 416
|
||||
# (Requested range not satisfiable)."
|
||||
continue
|
||||
if stop < start:
|
||||
# From rfc 2616 sec 14.16:
|
||||
# "If the server ignores a byte-range-spec because it
|
||||
# is syntactically invalid, the server SHOULD treat
|
||||
# the request as if the invalid Range header field
|
||||
# did not exist. (Normally, this means return a 200
|
||||
# response containing the full entity)."
|
||||
return None
|
||||
result.append((start, stop + 1))
|
||||
else:
|
||||
if not stop:
|
||||
# See rfc quote above.
|
||||
return None
|
||||
# Negative subscript (last N bytes)
|
||||
#
|
||||
# RFC 2616 Section 14.35.1:
|
||||
# If the entity is shorter than the specified suffix-length,
|
||||
# the entire entity-body is used.
|
||||
if int(stop) > content_length:
|
||||
result.append((0, content_length))
|
||||
else:
|
||||
result.append((content_length - int(stop), content_length))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class HeaderElement(object):
|
||||
|
||||
"""An element (with parameters) from an HTTP header's element list."""
|
||||
|
||||
def __init__(self, value, params=None):
|
||||
self.value = value
|
||||
if params is None:
|
||||
params = {}
|
||||
self.params = params
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.value, other.value)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.value < other.value
|
||||
|
||||
def __str__(self):
|
||||
p = [';%s=%s' % (k, v) for k, v in iteritems(self.params)]
|
||||
return str('%s%s' % (self.value, ''.join(p)))
|
||||
|
||||
def __bytes__(self):
|
||||
return ntob(self.__str__())
|
||||
|
||||
def __unicode__(self):
|
||||
return ntou(self.__str__())
|
||||
|
||||
@staticmethod
|
||||
def parse(elementstr):
|
||||
"""Transform 'token;key=val' to ('token', {'key': 'val'})."""
|
||||
initial_value, params = parse_header(elementstr)
|
||||
return initial_value, params
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, elementstr):
|
||||
"""Construct an instance from a string of the form 'token;key=val'."""
|
||||
ival, params = cls.parse(elementstr)
|
||||
return cls(ival, params)
|
||||
|
||||
|
||||
q_separator = re.compile(r'; *q *=')
|
||||
|
||||
|
||||
class AcceptElement(HeaderElement):
|
||||
|
||||
"""An element (with parameters) from an Accept* header's element list.
|
||||
|
||||
AcceptElement objects are comparable; the more-preferred object will be
|
||||
"less than" the less-preferred object. They are also therefore sortable;
|
||||
if you sort a list of AcceptElement objects, they will be listed in
|
||||
priority order; the most preferred value will be first. Yes, it should
|
||||
have been the other way around, but it's too late to fix now.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def from_str(cls, elementstr):
|
||||
qvalue = None
|
||||
# The first "q" parameter (if any) separates the initial
|
||||
# media-range parameter(s) (if any) from the accept-params.
|
||||
atoms = q_separator.split(elementstr, 1)
|
||||
media_range = atoms.pop(0).strip()
|
||||
if atoms:
|
||||
# The qvalue for an Accept header can have extensions. The other
|
||||
# headers cannot, but it's easier to parse them as if they did.
|
||||
qvalue = HeaderElement.from_str(atoms[0].strip())
|
||||
|
||||
media_type, params = cls.parse(media_range)
|
||||
if qvalue is not None:
|
||||
params['q'] = qvalue
|
||||
return cls(media_type, params)
|
||||
|
||||
@property
|
||||
def qvalue(self):
|
||||
'The qvalue, or priority, of this value.'
|
||||
val = self.params.get('q', '1')
|
||||
if isinstance(val, HeaderElement):
|
||||
val = val.value
|
||||
return float(val)
|
||||
|
||||
def __cmp__(self, other):
|
||||
diff = cmp(self.qvalue, other.qvalue)
|
||||
if diff == 0:
|
||||
diff = cmp(str(self), str(other))
|
||||
return diff
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.qvalue == other.qvalue:
|
||||
return str(self) < str(other)
|
||||
else:
|
||||
return self.qvalue < other.qvalue
|
||||
|
||||
RE_HEADER_SPLIT = re.compile(',(?=(?:[^"]*"[^"]*")*[^"]*$)')
|
||||
def header_elements(fieldname, fieldvalue):
|
||||
"""Return a sorted HeaderElement list from a comma-separated header string.
|
||||
"""
|
||||
if not fieldvalue:
|
||||
return []
|
||||
|
||||
result = []
|
||||
for element in RE_HEADER_SPLIT.split(fieldvalue):
|
||||
if fieldname.startswith('Accept') or fieldname == 'TE':
|
||||
hv = AcceptElement.from_str(element)
|
||||
else:
|
||||
hv = HeaderElement.from_str(element)
|
||||
result.append(hv)
|
||||
|
||||
return list(reversed(sorted(result)))
|
||||
|
||||
|
||||
def decode_TEXT(value):
|
||||
r"""Decode :rfc:`2047` TEXT (e.g. "=?utf-8?q?f=C3=BCr?=" -> "f\xfcr")."""
|
||||
atoms = decode_header(value)
|
||||
decodedvalue = ''
|
||||
for atom, charset in atoms:
|
||||
if charset is not None:
|
||||
atom = atom.decode(charset)
|
||||
decodedvalue += atom
|
||||
return decodedvalue
|
||||
|
||||
|
||||
def valid_status(status):
|
||||
"""Return legal HTTP status Code, Reason-phrase and Message.
|
||||
|
||||
The status arg must be an int, or a str that begins with an int.
|
||||
|
||||
If status is an int, or a str and no reason-phrase is supplied,
|
||||
a default reason-phrase will be provided.
|
||||
"""
|
||||
|
||||
if not status:
|
||||
status = 200
|
||||
|
||||
status = str(status)
|
||||
parts = status.split(' ', 1)
|
||||
if len(parts) == 1:
|
||||
# No reason supplied.
|
||||
code, = parts
|
||||
reason = None
|
||||
else:
|
||||
code, reason = parts
|
||||
reason = reason.strip()
|
||||
|
||||
try:
|
||||
code = int(code)
|
||||
except ValueError:
|
||||
raise ValueError('Illegal response status from server '
|
||||
'(%s is non-numeric).' % repr(code))
|
||||
|
||||
if code < 100 or code > 599:
|
||||
raise ValueError('Illegal response status from server '
|
||||
'(%s is out of range).' % repr(code))
|
||||
|
||||
if code not in response_codes:
|
||||
# code is unknown but not illegal
|
||||
default_reason, message = '', ''
|
||||
else:
|
||||
default_reason, message = response_codes[code]
|
||||
|
||||
if reason is None:
|
||||
reason = default_reason
|
||||
|
||||
return code, reason, message
|
||||
|
||||
|
||||
# NOTE: the parse_qs functions that follow are modified version of those
|
||||
# in the python3.0 source - we need to pass through an encoding to the unquote
|
||||
# method, but the default parse_qs function doesn't allow us to. These do.
|
||||
|
||||
def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'):
|
||||
"""Parse a query given as a string argument.
|
||||
|
||||
Arguments:
|
||||
|
||||
qs: URL-encoded query string to be parsed
|
||||
|
||||
keep_blank_values: flag indicating whether blank values in
|
||||
URL encoded queries should be treated as blank strings. A
|
||||
true value indicates that blanks should be retained as blank
|
||||
strings. The default false value indicates that blank values
|
||||
are to be ignored and treated as if they were not included.
|
||||
|
||||
strict_parsing: flag indicating what to do with parsing errors. If
|
||||
false (the default), errors are silently ignored. If true,
|
||||
errors raise a ValueError exception.
|
||||
|
||||
Returns a dict, as G-d intended.
|
||||
"""
|
||||
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
|
||||
d = {}
|
||||
for name_value in pairs:
|
||||
if not name_value and not strict_parsing:
|
||||
continue
|
||||
nv = name_value.split('=', 1)
|
||||
if len(nv) != 2:
|
||||
if strict_parsing:
|
||||
raise ValueError('bad query field: %r' % (name_value,))
|
||||
# Handle case of a control-name with no equal sign
|
||||
if keep_blank_values:
|
||||
nv.append('')
|
||||
else:
|
||||
continue
|
||||
if len(nv[1]) or keep_blank_values:
|
||||
name = unquote_qs(nv[0], encoding)
|
||||
value = unquote_qs(nv[1], encoding)
|
||||
if name in d:
|
||||
if not isinstance(d[name], list):
|
||||
d[name] = [d[name]]
|
||||
d[name].append(value)
|
||||
else:
|
||||
d[name] = value
|
||||
return d
|
||||
|
||||
|
||||
image_map_pattern = re.compile(r'[0-9]+,[0-9]+')
|
||||
|
||||
|
||||
def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
|
||||
"""Build a params dictionary from a query_string.
|
||||
|
||||
Duplicate key/value pairs in the provided query_string will be
|
||||
returned as {'key': [val1, val2, ...]}. Single key/values will
|
||||
be returned as strings: {'key': 'value'}.
|
||||
"""
|
||||
if image_map_pattern.match(query_string):
|
||||
# Server-side image map. Map the coords to 'x' and 'y'
|
||||
# (like CGI::Request does).
|
||||
pm = query_string.split(',')
|
||||
pm = {'x': int(pm[0]), 'y': int(pm[1])}
|
||||
else:
|
||||
pm = _parse_qs(query_string, keep_blank_values, encoding=encoding)
|
||||
return pm
|
||||
|
||||
|
||||
class CaseInsensitiveDict(dict):
|
||||
|
||||
"""A case-insensitive dict subclass.
|
||||
|
||||
Each key is changed on entry to str(key).title().
|
||||
"""
|
||||
|
||||
def __getitem__(self, key):
|
||||
return dict.__getitem__(self, str(key).title())
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
dict.__setitem__(self, str(key).title(), value)
|
||||
|
||||
def __delitem__(self, key):
|
||||
dict.__delitem__(self, str(key).title())
|
||||
|
||||
def __contains__(self, key):
|
||||
return dict.__contains__(self, str(key).title())
|
||||
|
||||
def get(self, key, default=None):
|
||||
return dict.get(self, str(key).title(), default)
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
def has_key(self, key):
|
||||
return str(key).title() in self
|
||||
|
||||
def update(self, E):
|
||||
for k in E.keys():
|
||||
self[str(k).title()] = E[k]
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, seq, value=None):
|
||||
newdict = cls()
|
||||
for k in seq:
|
||||
newdict[str(k).title()] = value
|
||||
return newdict
|
||||
|
||||
def setdefault(self, key, x=None):
|
||||
key = str(key).title()
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = x
|
||||
return x
|
||||
|
||||
def pop(self, key, default):
|
||||
return dict.pop(self, str(key).title(), default)
|
||||
|
||||
|
||||
# TEXT = <any OCTET except CTLs, but including LWS>
|
||||
#
|
||||
# A CRLF is allowed in the definition of TEXT only as part of a header
|
||||
# field continuation. It is expected that the folding LWS will be
|
||||
# replaced with a single SP before interpretation of the TEXT value."
|
||||
if str == bytes:
|
||||
header_translate_table = ''.join([chr(i) for i in xrange(256)])
|
||||
header_translate_deletechars = ''.join(
|
||||
[chr(i) for i in xrange(32)]) + chr(127)
|
||||
else:
|
||||
header_translate_table = None
|
||||
header_translate_deletechars = bytes(range(32)) + bytes([127])
|
||||
|
||||
|
||||
class HeaderMap(CaseInsensitiveDict):
|
||||
|
||||
"""A dict subclass for HTTP request and response headers.
|
||||
|
||||
Each key is changed on entry to str(key).title(). This allows headers
|
||||
to be case-insensitive and avoid duplicates.
|
||||
|
||||
Values are header values (decoded according to :rfc:`2047` if necessary).
|
||||
"""
|
||||
|
||||
protocol = (1, 1)
|
||||
encodings = ['ISO-8859-1']
|
||||
|
||||
# Someday, when http-bis is done, this will probably get dropped
|
||||
# since few servers, clients, or intermediaries do it. But until then,
|
||||
# we're going to obey the spec as is.
|
||||
# "Words of *TEXT MAY contain characters from character sets other than
|
||||
# ISO-8859-1 only when encoded according to the rules of RFC 2047."
|
||||
use_rfc_2047 = True
|
||||
|
||||
def elements(self, key):
|
||||
"""Return a sorted list of HeaderElements for the given header."""
|
||||
key = str(key).title()
|
||||
value = self.get(key)
|
||||
return header_elements(key, value)
|
||||
|
||||
def values(self, key):
|
||||
"""Return a sorted list of HeaderElement.value for the given header."""
|
||||
return [e.value for e in self.elements(key)]
|
||||
|
||||
def output(self):
|
||||
"""Transform self into a list of (name, value) tuples."""
|
||||
return list(self.encode_header_items(self.items()))
|
||||
|
||||
@classmethod
|
||||
def encode_header_items(cls, header_items):
|
||||
"""
|
||||
Prepare the sequence of name, value tuples into a form suitable for
|
||||
transmitting on the wire for HTTP.
|
||||
"""
|
||||
for k, v in header_items:
|
||||
if isinstance(k, six.text_type):
|
||||
k = cls.encode(k)
|
||||
|
||||
if not isinstance(v, text_or_bytes):
|
||||
v = str(v)
|
||||
|
||||
if isinstance(v, six.text_type):
|
||||
v = cls.encode(v)
|
||||
|
||||
# See header_translate_* constants above.
|
||||
# Replace only if you really know what you're doing.
|
||||
k = k.translate(header_translate_table,
|
||||
header_translate_deletechars)
|
||||
v = v.translate(header_translate_table,
|
||||
header_translate_deletechars)
|
||||
|
||||
yield (k, v)
|
||||
|
||||
@classmethod
|
||||
def encode(cls, v):
|
||||
"""Return the given header name or value, encoded for HTTP output."""
|
||||
for enc in cls.encodings:
|
||||
try:
|
||||
return v.encode(enc)
|
||||
except UnicodeEncodeError:
|
||||
continue
|
||||
|
||||
if cls.protocol == (1, 1) and cls.use_rfc_2047:
|
||||
# Encode RFC-2047 TEXT
|
||||
# (e.g. u"\u8200" -> "=?utf-8?b?6IiA?=").
|
||||
# We do our own here instead of using the email module
|
||||
# because we never want to fold lines--folding has
|
||||
# been deprecated by the HTTP working group.
|
||||
v = b2a_base64(v.encode('utf-8'))
|
||||
return (ntob('=?utf-8?b?') + v.strip(ntob('\n')) + ntob('?='))
|
||||
|
||||
raise ValueError('Could not encode header part %r using '
|
||||
'any of the encodings %r.' %
|
||||
(v, cls.encodings))
|
||||
|
||||
|
||||
class Host(object):
|
||||
|
||||
"""An internet address.
|
||||
|
||||
name
|
||||
Should be the client's host name. If not available (because no DNS
|
||||
lookup is performed), the IP address should be used instead.
|
||||
|
||||
"""
|
||||
|
||||
ip = '0.0.0.0'
|
||||
port = 80
|
||||
name = 'unknown.tld'
|
||||
|
||||
def __init__(self, ip, port, name=None):
|
||||
self.ip = ip
|
||||
self.port = port
|
||||
if name is None:
|
||||
name = ip
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
return 'httputil.Host(%r, %r, %r)' % (self.ip, self.port, self.name)
|
||||
@@ -1,94 +0,0 @@
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import text_or_bytes, ntou, json_encode, json_decode
|
||||
|
||||
|
||||
def json_processor(entity):
|
||||
"""Read application/json data into request.json."""
|
||||
if not entity.headers.get(ntou('Content-Length'), ntou('')):
|
||||
raise cherrypy.HTTPError(411)
|
||||
|
||||
body = entity.fp.read()
|
||||
with cherrypy.HTTPError.handle(ValueError, 400, 'Invalid JSON document'):
|
||||
cherrypy.serving.request.json = json_decode(body.decode('utf-8'))
|
||||
|
||||
|
||||
def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
|
||||
force=True, debug=False, processor=json_processor):
|
||||
"""Add a processor to parse JSON request entities:
|
||||
The default processor places the parsed data into request.json.
|
||||
|
||||
Incoming request entities which match the given content_type(s) will
|
||||
be deserialized from JSON to the Python equivalent, and the result
|
||||
stored at cherrypy.request.json. The 'content_type' argument may
|
||||
be a Content-Type string or a list of allowable Content-Type strings.
|
||||
|
||||
If the 'force' argument is True (the default), then entities of other
|
||||
content types will not be allowed; "415 Unsupported Media Type" is
|
||||
raised instead.
|
||||
|
||||
Supply your own processor to use a custom decoder, or to handle the parsed
|
||||
data differently. The processor can be configured via
|
||||
tools.json_in.processor or via the decorator method.
|
||||
|
||||
Note that the deserializer requires the client send a Content-Length
|
||||
request header, or it will raise "411 Length Required". If for any
|
||||
other reason the request entity cannot be deserialized from JSON,
|
||||
it will raise "400 Bad Request: Invalid JSON document".
|
||||
|
||||
You must be using Python 2.6 or greater, or have the 'simplejson'
|
||||
package importable; otherwise, ValueError is raised during processing.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if isinstance(content_type, text_or_bytes):
|
||||
content_type = [content_type]
|
||||
|
||||
if force:
|
||||
if debug:
|
||||
cherrypy.log('Removing body processors %s' %
|
||||
repr(request.body.processors.keys()), 'TOOLS.JSON_IN')
|
||||
request.body.processors.clear()
|
||||
request.body.default_proc = cherrypy.HTTPError(
|
||||
415, 'Expected an entity of content type %s' %
|
||||
', '.join(content_type))
|
||||
|
||||
for ct in content_type:
|
||||
if debug:
|
||||
cherrypy.log('Adding body processor for %s' % ct, 'TOOLS.JSON_IN')
|
||||
request.body.processors[ct] = processor
|
||||
|
||||
|
||||
def json_handler(*args, **kwargs):
|
||||
value = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
|
||||
return json_encode(value)
|
||||
|
||||
|
||||
def json_out(content_type='application/json', debug=False,
|
||||
handler=json_handler):
|
||||
"""Wrap request.handler to serialize its output to JSON. Sets Content-Type.
|
||||
|
||||
If the given content_type is None, the Content-Type response header
|
||||
is not set.
|
||||
|
||||
Provide your own handler to use a custom encoder. For example
|
||||
cherrypy.config['tools.json_out.handler'] = <function>, or
|
||||
@json_out(handler=function).
|
||||
|
||||
You must be using Python 2.6 or greater, or have the 'simplejson'
|
||||
package importable; otherwise, ValueError is raised during processing.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
# request.handler may be set to None by e.g. the caching tool
|
||||
# to signal to all components that a response body has already
|
||||
# been attached, in which case we don't need to wrap anything.
|
||||
if request.handler is None:
|
||||
return
|
||||
if debug:
|
||||
cherrypy.log('Replacing %s with JSON handler' % request.handler,
|
||||
'TOOLS.JSON_OUT')
|
||||
request._json_inner_handler = request.handler
|
||||
request.handler = handler
|
||||
if content_type is not None:
|
||||
if debug:
|
||||
cherrypy.log('Setting Content-Type to %s' %
|
||||
content_type, 'TOOLS.JSON_OUT')
|
||||
cherrypy.serving.response.headers['Content-Type'] = content_type
|
||||
@@ -1,142 +0,0 @@
|
||||
"""
|
||||
Platform-independent file locking. Inspired by and modeled after zc.lockfile.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
import msvcrt
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class LockError(Exception):
|
||||
|
||||
'Could not obtain a lock'
|
||||
|
||||
msg = 'Unable to lock %r'
|
||||
|
||||
def __init__(self, path):
|
||||
super(LockError, self).__init__(self.msg % path)
|
||||
|
||||
|
||||
class UnlockError(LockError):
|
||||
|
||||
'Could not release a lock'
|
||||
|
||||
msg = 'Unable to unlock %r'
|
||||
|
||||
|
||||
# first, a default, naive locking implementation
|
||||
class LockFile(object):
|
||||
|
||||
"""
|
||||
A default, naive locking implementation. Always fails if the file
|
||||
already exists.
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
try:
|
||||
fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
|
||||
except OSError:
|
||||
raise LockError(self.path)
|
||||
os.close(fd)
|
||||
|
||||
def release(self):
|
||||
os.remove(self.path)
|
||||
|
||||
def remove(self):
|
||||
pass
|
||||
|
||||
|
||||
class SystemLockFile(object):
|
||||
|
||||
"""
|
||||
An abstract base class for platform-specific locking.
|
||||
"""
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
try:
|
||||
# Open lockfile for writing without truncation:
|
||||
self.fp = open(path, 'r+')
|
||||
except IOError:
|
||||
# If the file doesn't exist, IOError is raised; Use a+ instead.
|
||||
# Note that there may be a race here. Multiple processes
|
||||
# could fail on the r+ open and open the file a+, but only
|
||||
# one will get the the lock and write a pid.
|
||||
self.fp = open(path, 'a+')
|
||||
|
||||
try:
|
||||
self._lock_file()
|
||||
except:
|
||||
self.fp.seek(1)
|
||||
self.fp.close()
|
||||
del self.fp
|
||||
raise
|
||||
|
||||
self.fp.write(' %s\n' % os.getpid())
|
||||
self.fp.truncate()
|
||||
self.fp.flush()
|
||||
|
||||
def release(self):
|
||||
if not hasattr(self, 'fp'):
|
||||
return
|
||||
self._unlock_file()
|
||||
self.fp.close()
|
||||
del self.fp
|
||||
|
||||
def remove(self):
|
||||
"""
|
||||
Attempt to remove the file
|
||||
"""
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _unlock_file(self):
|
||||
"""Attempt to obtain the lock on self.fp. Raise UnlockError if not
|
||||
released."""
|
||||
|
||||
|
||||
class WindowsLockFile(SystemLockFile):
|
||||
|
||||
def _lock_file(self):
|
||||
# Lock just the first byte
|
||||
try:
|
||||
msvcrt.locking(self.fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
raise LockError(self.fp.name)
|
||||
|
||||
def _unlock_file(self):
|
||||
try:
|
||||
self.fp.seek(0)
|
||||
msvcrt.locking(self.fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
except IOError:
|
||||
raise UnlockError(self.fp.name)
|
||||
|
||||
if 'msvcrt' in globals():
|
||||
LockFile = WindowsLockFile
|
||||
|
||||
|
||||
class UnixLockFile(SystemLockFile):
|
||||
|
||||
def _lock_file(self):
|
||||
flags = fcntl.LOCK_EX | fcntl.LOCK_NB
|
||||
try:
|
||||
fcntl.flock(self.fp.fileno(), flags)
|
||||
except IOError:
|
||||
raise LockError(self.fp.name)
|
||||
|
||||
# no need to implement _unlock_file, it will be unlocked on close()
|
||||
|
||||
if 'fcntl' in globals():
|
||||
LockFile = UnixLockFile
|
||||
@@ -1,47 +0,0 @@
|
||||
import datetime
|
||||
|
||||
|
||||
class NeverExpires(object):
|
||||
def expired(self):
|
||||
return False
|
||||
|
||||
|
||||
class Timer(object):
|
||||
"""
|
||||
A simple timer that will indicate when an expiration time has passed.
|
||||
"""
|
||||
def __init__(self, expiration):
|
||||
'Create a timer that expires at `expiration` (UTC datetime)'
|
||||
self.expiration = expiration
|
||||
|
||||
@classmethod
|
||||
def after(cls, elapsed):
|
||||
"""
|
||||
Return a timer that will expire after `elapsed` passes.
|
||||
"""
|
||||
return cls(datetime.datetime.utcnow() + elapsed)
|
||||
|
||||
def expired(self):
|
||||
return datetime.datetime.utcnow() >= self.expiration
|
||||
|
||||
|
||||
class LockTimeout(Exception):
|
||||
'An exception when a lock could not be acquired before a timeout period'
|
||||
|
||||
|
||||
class LockChecker(object):
|
||||
"""
|
||||
Keep track of the time and detect if a timeout has expired
|
||||
"""
|
||||
def __init__(self, session_id, timeout):
|
||||
self.session_id = session_id
|
||||
if timeout:
|
||||
self.timer = Timer.after(timeout)
|
||||
else:
|
||||
self.timer = NeverExpires()
|
||||
|
||||
def expired(self):
|
||||
if self.timer.expired():
|
||||
raise LockTimeout(
|
||||
'Timeout acquiring lock for %(session_id)s' % vars(self))
|
||||
return False
|
||||
@@ -1,217 +0,0 @@
|
||||
"""Profiler tools for CherryPy.
|
||||
|
||||
CherryPy users
|
||||
==============
|
||||
|
||||
You can profile any of your pages as follows::
|
||||
|
||||
from cherrypy.lib import profiler
|
||||
|
||||
class Root:
|
||||
p = profiler.Profiler("/path/to/profile/dir")
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
self.p.run(self._index)
|
||||
|
||||
def _index(self):
|
||||
return "Hello, world!"
|
||||
|
||||
cherrypy.tree.mount(Root())
|
||||
|
||||
You can also turn on profiling for all requests
|
||||
using the ``make_app`` function as WSGI middleware.
|
||||
|
||||
CherryPy developers
|
||||
===================
|
||||
|
||||
This module can be used whenever you make changes to CherryPy,
|
||||
to get a quick sanity-check on overall CP performance. Use the
|
||||
``--profile`` flag when running the test suite. Then, use the ``serve()``
|
||||
function to browse the results in a web browser. If you run this
|
||||
module from the command line, it will call ``serve()`` for you.
|
||||
|
||||
"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import cherrypy
|
||||
|
||||
|
||||
try:
|
||||
import profile
|
||||
import pstats
|
||||
|
||||
def new_func_strip_path(func_name):
|
||||
"""Make profiler output more readable by adding `__init__` modules' parents
|
||||
"""
|
||||
filename, line, name = func_name
|
||||
if filename.endswith('__init__.py'):
|
||||
return os.path.basename(filename[:-12]) + filename[-12:], line, name
|
||||
return os.path.basename(filename), line, name
|
||||
|
||||
pstats.func_strip_path = new_func_strip_path
|
||||
except ImportError:
|
||||
profile = None
|
||||
pstats = None
|
||||
|
||||
|
||||
_count = 0
|
||||
|
||||
|
||||
class Profiler(object):
|
||||
|
||||
def __init__(self, path=None):
|
||||
if not path:
|
||||
path = os.path.join(os.path.dirname(__file__), 'profile')
|
||||
self.path = path
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
def run(self, func, *args, **params):
|
||||
"""Dump profile data into self.path."""
|
||||
global _count
|
||||
c = _count = _count + 1
|
||||
path = os.path.join(self.path, 'cp_%04d.prof' % c)
|
||||
prof = profile.Profile()
|
||||
result = prof.runcall(func, *args, **params)
|
||||
prof.dump_stats(path)
|
||||
return result
|
||||
|
||||
def statfiles(self):
|
||||
""":rtype: list of available profiles.
|
||||
"""
|
||||
return [f for f in os.listdir(self.path)
|
||||
if f.startswith('cp_') and f.endswith('.prof')]
|
||||
|
||||
def stats(self, filename, sortby='cumulative'):
|
||||
""":rtype stats(index): output of print_stats() for the given profile.
|
||||
"""
|
||||
sio = io.StringIO()
|
||||
if sys.version_info >= (2, 5):
|
||||
s = pstats.Stats(os.path.join(self.path, filename), stream=sio)
|
||||
s.strip_dirs()
|
||||
s.sort_stats(sortby)
|
||||
s.print_stats()
|
||||
else:
|
||||
# pstats.Stats before Python 2.5 didn't take a 'stream' arg,
|
||||
# but just printed to stdout. So re-route stdout.
|
||||
s = pstats.Stats(os.path.join(self.path, filename))
|
||||
s.strip_dirs()
|
||||
s.sort_stats(sortby)
|
||||
oldout = sys.stdout
|
||||
try:
|
||||
sys.stdout = sio
|
||||
s.print_stats()
|
||||
finally:
|
||||
sys.stdout = oldout
|
||||
response = sio.getvalue()
|
||||
sio.close()
|
||||
return response
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return """<html>
|
||||
<head><title>CherryPy profile data</title></head>
|
||||
<frameset cols='200, 1*'>
|
||||
<frame src='menu' />
|
||||
<frame name='main' src='' />
|
||||
</frameset>
|
||||
</html>
|
||||
"""
|
||||
|
||||
@cherrypy.expose
|
||||
def menu(self):
|
||||
yield '<h2>Profiling runs</h2>'
|
||||
yield '<p>Click on one of the runs below to see profiling data.</p>'
|
||||
runs = self.statfiles()
|
||||
runs.sort()
|
||||
for i in runs:
|
||||
yield "<a href='report?filename=%s' target='main'>%s</a><br />" % (
|
||||
i, i)
|
||||
|
||||
@cherrypy.expose
|
||||
def report(self, filename):
|
||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||
return self.stats(filename)
|
||||
|
||||
|
||||
class ProfileAggregator(Profiler):
|
||||
|
||||
def __init__(self, path=None):
|
||||
Profiler.__init__(self, path)
|
||||
global _count
|
||||
self.count = _count = _count + 1
|
||||
self.profiler = profile.Profile()
|
||||
|
||||
def run(self, func, *args, **params):
|
||||
path = os.path.join(self.path, 'cp_%04d.prof' % self.count)
|
||||
result = self.profiler.runcall(func, *args, **params)
|
||||
self.profiler.dump_stats(path)
|
||||
return result
|
||||
|
||||
|
||||
class make_app:
|
||||
|
||||
def __init__(self, nextapp, path=None, aggregate=False):
|
||||
"""Make a WSGI middleware app which wraps 'nextapp' with profiling.
|
||||
|
||||
nextapp
|
||||
the WSGI application to wrap, usually an instance of
|
||||
cherrypy.Application.
|
||||
|
||||
path
|
||||
where to dump the profiling output.
|
||||
|
||||
aggregate
|
||||
if True, profile data for all HTTP requests will go in
|
||||
a single file. If False (the default), each HTTP request will
|
||||
dump its profile data into a separate file.
|
||||
|
||||
"""
|
||||
if profile is None or pstats is None:
|
||||
msg = ('Your installation of Python does not have a profile '
|
||||
"module. If you're on Debian, try "
|
||||
'`sudo apt-get install python-profiler`. '
|
||||
'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
|
||||
'for details.')
|
||||
warnings.warn(msg)
|
||||
|
||||
self.nextapp = nextapp
|
||||
self.aggregate = aggregate
|
||||
if aggregate:
|
||||
self.profiler = ProfileAggregator(path)
|
||||
else:
|
||||
self.profiler = Profiler(path)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
def gather():
|
||||
result = []
|
||||
for line in self.nextapp(environ, start_response):
|
||||
result.append(line)
|
||||
return result
|
||||
return self.profiler.run(gather)
|
||||
|
||||
|
||||
def serve(path=None, port=8080):
|
||||
if profile is None or pstats is None:
|
||||
msg = ('Your installation of Python does not have a profile module. '
|
||||
"If you're on Debian, try "
|
||||
'`sudo apt-get install python-profiler`. '
|
||||
'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
|
||||
'for details.')
|
||||
warnings.warn(msg)
|
||||
|
||||
cherrypy.config.update({'server.socket_port': int(port),
|
||||
'server.thread_pool': 10,
|
||||
'environment': 'production',
|
||||
})
|
||||
cherrypy.quickstart(Profiler(path))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
serve(*tuple(sys.argv[1:]))
|
||||
@@ -1,534 +0,0 @@
|
||||
"""Generic configuration system using unrepr.
|
||||
|
||||
Configuration data may be supplied as a Python dictionary, as a filename,
|
||||
or as an open file object. When you supply a filename or file, Python's
|
||||
builtin ConfigParser is used (with some extensions).
|
||||
|
||||
Namespaces
|
||||
----------
|
||||
|
||||
Configuration keys are separated into namespaces by the first "." in the key.
|
||||
|
||||
The only key that cannot exist in a namespace is the "environment" entry.
|
||||
This special entry 'imports' other config entries from a template stored in
|
||||
the Config.environments dict.
|
||||
|
||||
You can define your own namespaces to be called when new config is merged
|
||||
by adding a named handler to Config.namespaces. The name can be any string,
|
||||
and the handler must be either a callable or a context manager.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Python 3.0+
|
||||
from configparser import ConfigParser
|
||||
except ImportError:
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
try:
|
||||
text_or_bytes
|
||||
except NameError:
|
||||
text_or_bytes = str
|
||||
|
||||
try:
|
||||
# Python 3
|
||||
import builtins
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import __builtin__ as builtins
|
||||
|
||||
import operator as _operator
|
||||
import sys
|
||||
|
||||
|
||||
def as_dict(config):
|
||||
"""Return a dict from 'config' whether it is a dict, file, or filename."""
|
||||
if isinstance(config, text_or_bytes):
|
||||
config = Parser().dict_from_file(config)
|
||||
elif hasattr(config, 'read'):
|
||||
config = Parser().dict_from_file(config)
|
||||
return config
|
||||
|
||||
|
||||
class NamespaceSet(dict):
|
||||
|
||||
"""A dict of config namespace names and handlers.
|
||||
|
||||
Each config entry should begin with a namespace name; the corresponding
|
||||
namespace handler will be called once for each config entry in that
|
||||
namespace, and will be passed two arguments: the config key (with the
|
||||
namespace removed) and the config value.
|
||||
|
||||
Namespace handlers may be any Python callable; they may also be
|
||||
Python 2.5-style 'context managers', in which case their __enter__
|
||||
method should return a callable to be used as the handler.
|
||||
See cherrypy.tools (the Toolbox class) for an example.
|
||||
"""
|
||||
|
||||
def __call__(self, config):
|
||||
"""Iterate through config and pass it to each namespace handler.
|
||||
|
||||
config
|
||||
A flat dict, where keys use dots to separate
|
||||
namespaces, and values are arbitrary.
|
||||
|
||||
The first name in each config key is used to look up the corresponding
|
||||
namespace handler. For example, a config entry of {'tools.gzip.on': v}
|
||||
will call the 'tools' namespace handler with the args: ('gzip.on', v)
|
||||
"""
|
||||
# Separate the given config into namespaces
|
||||
ns_confs = {}
|
||||
for k in config:
|
||||
if '.' in k:
|
||||
ns, name = k.split('.', 1)
|
||||
bucket = ns_confs.setdefault(ns, {})
|
||||
bucket[name] = config[k]
|
||||
|
||||
# I chose __enter__ and __exit__ so someday this could be
|
||||
# rewritten using Python 2.5's 'with' statement:
|
||||
# for ns, handler in self.iteritems():
|
||||
# with handler as callable:
|
||||
# for k, v in ns_confs.get(ns, {}).iteritems():
|
||||
# callable(k, v)
|
||||
for ns, handler in self.items():
|
||||
exit = getattr(handler, '__exit__', None)
|
||||
if exit:
|
||||
callable = handler.__enter__()
|
||||
no_exc = True
|
||||
try:
|
||||
try:
|
||||
for k, v in ns_confs.get(ns, {}).items():
|
||||
callable(k, v)
|
||||
except:
|
||||
# The exceptional case is handled here
|
||||
no_exc = False
|
||||
if exit is None:
|
||||
raise
|
||||
if not exit(*sys.exc_info()):
|
||||
raise
|
||||
# The exception is swallowed if exit() returns true
|
||||
finally:
|
||||
# The normal and non-local-goto cases are handled here
|
||||
if no_exc and exit:
|
||||
exit(None, None, None)
|
||||
else:
|
||||
for k, v in ns_confs.get(ns, {}).items():
|
||||
handler(k, v)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s.%s(%s)' % (self.__module__, self.__class__.__name__,
|
||||
dict.__repr__(self))
|
||||
|
||||
def __copy__(self):
|
||||
newobj = self.__class__()
|
||||
newobj.update(self)
|
||||
return newobj
|
||||
copy = __copy__
|
||||
|
||||
|
||||
class Config(dict):
|
||||
|
||||
"""A dict-like set of configuration data, with defaults and namespaces.
|
||||
|
||||
May take a file, filename, or dict.
|
||||
"""
|
||||
|
||||
defaults = {}
|
||||
environments = {}
|
||||
namespaces = NamespaceSet()
|
||||
|
||||
def __init__(self, file=None, **kwargs):
|
||||
self.reset()
|
||||
if file is not None:
|
||||
self.update(file)
|
||||
if kwargs:
|
||||
self.update(kwargs)
|
||||
|
||||
def reset(self):
|
||||
"""Reset self to default values."""
|
||||
self.clear()
|
||||
dict.update(self, self.defaults)
|
||||
|
||||
def update(self, config):
|
||||
"""Update self from a dict, file or filename."""
|
||||
if isinstance(config, text_or_bytes):
|
||||
# Filename
|
||||
config = Parser().dict_from_file(config)
|
||||
elif hasattr(config, 'read'):
|
||||
# Open file object
|
||||
config = Parser().dict_from_file(config)
|
||||
else:
|
||||
config = config.copy()
|
||||
self._apply(config)
|
||||
|
||||
def _apply(self, config):
|
||||
"""Update self from a dict."""
|
||||
which_env = config.get('environment')
|
||||
if which_env:
|
||||
env = self.environments[which_env]
|
||||
for k in env:
|
||||
if k not in config:
|
||||
config[k] = env[k]
|
||||
|
||||
dict.update(self, config)
|
||||
self.namespaces(config)
|
||||
|
||||
def __setitem__(self, k, v):
|
||||
dict.__setitem__(self, k, v)
|
||||
self.namespaces({k: v})
|
||||
|
||||
|
||||
class Parser(ConfigParser):
|
||||
|
||||
"""Sub-class of ConfigParser that keeps the case of options and that
|
||||
raises an exception if the file cannot be read.
|
||||
"""
|
||||
|
||||
def optionxform(self, optionstr):
|
||||
return optionstr
|
||||
|
||||
def read(self, filenames):
|
||||
if isinstance(filenames, text_or_bytes):
|
||||
filenames = [filenames]
|
||||
for filename in filenames:
|
||||
# try:
|
||||
# fp = open(filename)
|
||||
# except IOError:
|
||||
# continue
|
||||
fp = open(filename)
|
||||
try:
|
||||
self._read(fp, filename)
|
||||
finally:
|
||||
fp.close()
|
||||
|
||||
def as_dict(self, raw=False, vars=None):
|
||||
"""Convert an INI file to a dictionary"""
|
||||
# Load INI file into a dict
|
||||
result = {}
|
||||
for section in self.sections():
|
||||
if section not in result:
|
||||
result[section] = {}
|
||||
for option in self.options(section):
|
||||
value = self.get(section, option, raw=raw, vars=vars)
|
||||
try:
|
||||
value = unrepr(value)
|
||||
except Exception:
|
||||
x = sys.exc_info()[1]
|
||||
msg = ('Config error in section: %r, option: %r, '
|
||||
'value: %r. Config values must be valid Python.' %
|
||||
(section, option, value))
|
||||
raise ValueError(msg, x.__class__.__name__, x.args)
|
||||
result[section][option] = value
|
||||
return result
|
||||
|
||||
def dict_from_file(self, file):
|
||||
if hasattr(file, 'read'):
|
||||
self.readfp(file)
|
||||
else:
|
||||
self.read(file)
|
||||
return self.as_dict()
|
||||
|
||||
|
||||
# public domain "unrepr" implementation, found on the web and then improved.
|
||||
|
||||
|
||||
class _Builder2:
|
||||
|
||||
def build(self, o):
|
||||
m = getattr(self, 'build_' + o.__class__.__name__, None)
|
||||
if m is None:
|
||||
raise TypeError('unrepr does not recognize %s' %
|
||||
repr(o.__class__.__name__))
|
||||
return m(o)
|
||||
|
||||
def astnode(self, s):
|
||||
"""Return a Python2 ast Node compiled from a string."""
|
||||
try:
|
||||
import compiler
|
||||
except ImportError:
|
||||
# Fallback to eval when compiler package is not available,
|
||||
# e.g. IronPython 1.0.
|
||||
return eval(s)
|
||||
|
||||
p = compiler.parse('__tempvalue__ = ' + s)
|
||||
return p.getChildren()[1].getChildren()[0].getChildren()[1]
|
||||
|
||||
def build_Subscript(self, o):
|
||||
expr, flags, subs = o.getChildren()
|
||||
expr = self.build(expr)
|
||||
subs = self.build(subs)
|
||||
return expr[subs]
|
||||
|
||||
def build_CallFunc(self, o):
|
||||
children = o.getChildren()
|
||||
# Build callee from first child
|
||||
callee = self.build(children[0])
|
||||
# Build args and kwargs from remaining children
|
||||
args = []
|
||||
kwargs = {}
|
||||
for child in children[1:]:
|
||||
class_name = child.__class__.__name__
|
||||
# None is ignored
|
||||
if class_name == 'NoneType':
|
||||
continue
|
||||
# Keywords become kwargs
|
||||
if class_name == 'Keyword':
|
||||
kwargs.update(self.build(child))
|
||||
# Everything else becomes args
|
||||
else :
|
||||
args.append(self.build(child))
|
||||
|
||||
return callee(*args, **kwargs)
|
||||
|
||||
def build_Keyword(self, o):
|
||||
key, value_obj = o.getChildren()
|
||||
value = self.build(value_obj)
|
||||
kw_dict = {key: value}
|
||||
return kw_dict
|
||||
|
||||
def build_List(self, o):
|
||||
return map(self.build, o.getChildren())
|
||||
|
||||
def build_Const(self, o):
|
||||
return o.value
|
||||
|
||||
def build_Dict(self, o):
|
||||
d = {}
|
||||
i = iter(map(self.build, o.getChildren()))
|
||||
for el in i:
|
||||
d[el] = i.next()
|
||||
return d
|
||||
|
||||
def build_Tuple(self, o):
|
||||
return tuple(self.build_List(o))
|
||||
|
||||
def build_Name(self, o):
|
||||
name = o.name
|
||||
if name == 'None':
|
||||
return None
|
||||
if name == 'True':
|
||||
return True
|
||||
if name == 'False':
|
||||
return False
|
||||
|
||||
# See if the Name is a package or module. If it is, import it.
|
||||
try:
|
||||
return modules(name)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# See if the Name is in builtins.
|
||||
try:
|
||||
return getattr(builtins, name)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise TypeError('unrepr could not resolve the name %s' % repr(name))
|
||||
|
||||
def build_Add(self, o):
|
||||
left, right = map(self.build, o.getChildren())
|
||||
return left + right
|
||||
|
||||
def build_Mul(self, o):
|
||||
left, right = map(self.build, o.getChildren())
|
||||
return left * right
|
||||
|
||||
def build_Getattr(self, o):
|
||||
parent = self.build(o.expr)
|
||||
return getattr(parent, o.attrname)
|
||||
|
||||
def build_NoneType(self, o):
|
||||
return None
|
||||
|
||||
def build_UnarySub(self, o):
|
||||
return -self.build(o.getChildren()[0])
|
||||
|
||||
def build_UnaryAdd(self, o):
|
||||
return self.build(o.getChildren()[0])
|
||||
|
||||
|
||||
class _Builder3:
|
||||
|
||||
def build(self, o):
|
||||
m = getattr(self, 'build_' + o.__class__.__name__, None)
|
||||
if m is None:
|
||||
raise TypeError('unrepr does not recognize %s' %
|
||||
repr(o.__class__.__name__))
|
||||
return m(o)
|
||||
|
||||
def astnode(self, s):
|
||||
"""Return a Python3 ast Node compiled from a string."""
|
||||
try:
|
||||
import ast
|
||||
except ImportError:
|
||||
# Fallback to eval when ast package is not available,
|
||||
# e.g. IronPython 1.0.
|
||||
return eval(s)
|
||||
|
||||
p = ast.parse('__tempvalue__ = ' + s)
|
||||
return p.body[0].value
|
||||
|
||||
def build_Subscript(self, o):
|
||||
return self.build(o.value)[self.build(o.slice)]
|
||||
|
||||
def build_Index(self, o):
|
||||
return self.build(o.value)
|
||||
|
||||
def _build_call35(self, o):
|
||||
"""
|
||||
Workaround for python 3.5 _ast.Call signature, docs found here
|
||||
https://greentreesnakes.readthedocs.org/en/latest/nodes.html
|
||||
"""
|
||||
import ast
|
||||
callee = self.build(o.func)
|
||||
args = []
|
||||
if o.args is not None:
|
||||
for a in o.args:
|
||||
if isinstance(a, ast.Starred):
|
||||
args.append(self.build(a.value))
|
||||
else:
|
||||
args.append(self.build(a))
|
||||
kwargs = {}
|
||||
for kw in o.keywords:
|
||||
if kw.arg is None: # double asterix `**`
|
||||
rst = self.build(kw.value)
|
||||
if not isinstance(rst, dict):
|
||||
raise TypeError('Invalid argument for call.'
|
||||
'Must be a mapping object.')
|
||||
# give preference to the keys set directly from arg=value
|
||||
for k, v in rst.items():
|
||||
if k not in kwargs:
|
||||
kwargs[k] = v
|
||||
else: # defined on the call as: arg=value
|
||||
kwargs[kw.arg] = self.build(kw.value)
|
||||
return callee(*args, **kwargs)
|
||||
|
||||
def build_Call(self, o):
|
||||
if sys.version_info >= (3, 5):
|
||||
return self._build_call35(o)
|
||||
|
||||
callee = self.build(o.func)
|
||||
|
||||
if o.args is None:
|
||||
args = ()
|
||||
else:
|
||||
args = tuple([self.build(a) for a in o.args])
|
||||
|
||||
if o.starargs is None:
|
||||
starargs = ()
|
||||
else:
|
||||
starargs = tuple(self.build(o.starargs))
|
||||
|
||||
if o.kwargs is None:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = self.build(o.kwargs)
|
||||
if o.keywords is not None: # direct a=b keywords
|
||||
for kw in o.keywords:
|
||||
# preference because is a direct keyword against **kwargs
|
||||
kwargs[kw.arg] = self.build(kw.value)
|
||||
return callee(*(args + starargs), **kwargs)
|
||||
|
||||
def build_List(self, o):
|
||||
return list(map(self.build, o.elts))
|
||||
|
||||
def build_Str(self, o):
|
||||
return o.s
|
||||
|
||||
def build_Num(self, o):
|
||||
return o.n
|
||||
|
||||
def build_Dict(self, o):
|
||||
return dict([(self.build(k), self.build(v))
|
||||
for k, v in zip(o.keys, o.values)])
|
||||
|
||||
def build_Tuple(self, o):
|
||||
return tuple(self.build_List(o))
|
||||
|
||||
def build_Name(self, o):
|
||||
name = o.id
|
||||
if name == 'None':
|
||||
return None
|
||||
if name == 'True':
|
||||
return True
|
||||
if name == 'False':
|
||||
return False
|
||||
|
||||
# See if the Name is a package or module. If it is, import it.
|
||||
try:
|
||||
return modules(name)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# See if the Name is in builtins.
|
||||
try:
|
||||
import builtins
|
||||
return getattr(builtins, name)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
raise TypeError('unrepr could not resolve the name %s' % repr(name))
|
||||
|
||||
def build_NameConstant(self, o):
|
||||
return o.value
|
||||
|
||||
def build_UnaryOp(self, o):
|
||||
op, operand = map(self.build, [o.op, o.operand])
|
||||
return op(operand)
|
||||
|
||||
def build_BinOp(self, o):
|
||||
left, op, right = map(self.build, [o.left, o.op, o.right])
|
||||
return op(left, right)
|
||||
|
||||
def build_Add(self, o):
|
||||
return _operator.add
|
||||
|
||||
def build_Mult(self, o):
|
||||
return _operator.mul
|
||||
|
||||
def build_USub(self, o):
|
||||
return _operator.neg
|
||||
|
||||
def build_Attribute(self, o):
|
||||
parent = self.build(o.value)
|
||||
return getattr(parent, o.attr)
|
||||
|
||||
def build_NoneType(self, o):
|
||||
return None
|
||||
|
||||
|
||||
def unrepr(s):
|
||||
"""Return a Python object compiled from a string."""
|
||||
if not s:
|
||||
return s
|
||||
if sys.version_info < (3, 0):
|
||||
b = _Builder2()
|
||||
else:
|
||||
b = _Builder3()
|
||||
obj = b.astnode(s)
|
||||
return b.build(obj)
|
||||
|
||||
|
||||
def modules(modulePath):
|
||||
"""Load a module and retrieve a reference to that module."""
|
||||
__import__(modulePath)
|
||||
return sys.modules[modulePath]
|
||||
|
||||
|
||||
def attributes(full_attribute_name):
|
||||
"""Load a module and retrieve an attribute of that module."""
|
||||
|
||||
# Parse out the path, module, and attribute
|
||||
last_dot = full_attribute_name.rfind('.')
|
||||
attr_name = full_attribute_name[last_dot + 1:]
|
||||
mod_path = full_attribute_name[:last_dot]
|
||||
|
||||
mod = modules(mod_path)
|
||||
# Let an AttributeError propagate outward.
|
||||
try:
|
||||
attr = getattr(mod, attr_name)
|
||||
except AttributeError:
|
||||
raise AttributeError("'%s' object has no attribute '%s'"
|
||||
% (mod_path, attr_name))
|
||||
|
||||
# Return a reference to the attribute.
|
||||
return attr
|
||||
@@ -1,883 +0,0 @@
|
||||
"""Session implementation for CherryPy.
|
||||
|
||||
You need to edit your config file to use sessions. Here's an example::
|
||||
|
||||
[/]
|
||||
tools.sessions.on = True
|
||||
tools.sessions.storage_class = cherrypy.lib.sessions.FileSession
|
||||
tools.sessions.storage_path = "/home/site/sessions"
|
||||
tools.sessions.timeout = 60
|
||||
|
||||
This sets the session to be stored in files in the directory
|
||||
/home/site/sessions, and the session timeout to 60 minutes. If you omit
|
||||
``storage_class``, the sessions will be saved in RAM.
|
||||
``tools.sessions.on`` is the only required line for working sessions,
|
||||
the rest are optional.
|
||||
|
||||
By default, the session ID is passed in a cookie, so the client's browser must
|
||||
have cookies enabled for your site.
|
||||
|
||||
To set data for the current session, use
|
||||
``cherrypy.session['fieldname'] = 'fieldvalue'``;
|
||||
to get data use ``cherrypy.session.get('fieldname')``.
|
||||
|
||||
================
|
||||
Locking sessions
|
||||
================
|
||||
|
||||
By default, the ``'locking'`` mode of sessions is ``'implicit'``, which means
|
||||
the session is locked early and unlocked late. Be mindful of this default mode
|
||||
for any requests that take a long time to process (streaming responses,
|
||||
expensive calculations, database lookups, API calls, etc), as other concurrent
|
||||
requests that also utilize sessions will hang until the session is unlocked.
|
||||
|
||||
If you want to control when the session data is locked and unlocked,
|
||||
set ``tools.sessions.locking = 'explicit'``. Then call
|
||||
``cherrypy.session.acquire_lock()`` and ``cherrypy.session.release_lock()``.
|
||||
Regardless of which mode you use, the session is guaranteed to be unlocked when
|
||||
the request is complete.
|
||||
|
||||
=================
|
||||
Expiring Sessions
|
||||
=================
|
||||
|
||||
You can force a session to expire with :func:`cherrypy.lib.sessions.expire`.
|
||||
Simply call that function at the point you want the session to expire, and it
|
||||
will cause the session cookie to expire client-side.
|
||||
|
||||
===========================
|
||||
Session Fixation Protection
|
||||
===========================
|
||||
|
||||
If CherryPy receives, via a request cookie, a session id that it does not
|
||||
recognize, it will reject that id and create a new one to return in the
|
||||
response cookie. This `helps prevent session fixation attacks
|
||||
<http://en.wikipedia.org/wiki/Session_fixation#Regenerate_SID_on_each_request>`_.
|
||||
However, CherryPy "recognizes" a session id by looking up the saved session
|
||||
data for that id. Therefore, if you never save any session data,
|
||||
**you will get a new session id for every request**.
|
||||
|
||||
================
|
||||
Sharing Sessions
|
||||
================
|
||||
|
||||
If you run multiple instances of CherryPy (for example via mod_python behind
|
||||
Apache prefork), you most likely cannot use the RAM session backend, since each
|
||||
instance of CherryPy will have its own memory space. Use a different backend
|
||||
instead, and verify that all instances are pointing at the same file or db
|
||||
location. Alternately, you might try a load balancer which makes sessions
|
||||
"sticky". Google is your friend, there.
|
||||
|
||||
================
|
||||
Expiration Dates
|
||||
================
|
||||
|
||||
The response cookie will possess an expiration date to inform the client at
|
||||
which point to stop sending the cookie back in requests. If the server time
|
||||
and client time differ, expect sessions to be unreliable. **Make sure the
|
||||
system time of your server is accurate**.
|
||||
|
||||
CherryPy defaults to a 60-minute session timeout, which also applies to the
|
||||
cookie which is sent to the client. Unfortunately, some versions of Safari
|
||||
("4 public beta" on Windows XP at least) appear to have a bug in their parsing
|
||||
of the GMT expiration date--they appear to interpret the date as one hour in
|
||||
the past. Sixty minutes minus one hour is pretty close to zero, so you may
|
||||
experience this bug as a new session id for every request, unless the requests
|
||||
are less than one second apart. To fix, try increasing the session.timeout.
|
||||
|
||||
On the other extreme, some users report Firefox sending cookies after their
|
||||
expiration date, although this was on a system with an inaccurate system time.
|
||||
Maybe FF doesn't trust system time.
|
||||
"""
|
||||
import sys
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import copyitems, pickle, random20
|
||||
from cherrypy.lib import httputil
|
||||
from cherrypy.lib import lockfile
|
||||
from cherrypy.lib import locking
|
||||
from cherrypy.lib import is_iterator
|
||||
|
||||
missing = object()
|
||||
|
||||
|
||||
class Session(object):
|
||||
|
||||
"""A CherryPy dict-like Session object (one per request)."""
|
||||
|
||||
_id = None
|
||||
|
||||
id_observers = None
|
||||
"A list of callbacks to which to pass new id's."
|
||||
|
||||
def _get_id(self):
|
||||
return self._id
|
||||
|
||||
def _set_id(self, value):
|
||||
self._id = value
|
||||
for o in self.id_observers:
|
||||
o(value)
|
||||
id = property(_get_id, _set_id, doc='The current session ID.')
|
||||
|
||||
timeout = 60
|
||||
'Number of minutes after which to delete session data.'
|
||||
|
||||
locked = False
|
||||
"""
|
||||
If True, this session instance has exclusive read/write access
|
||||
to session data."""
|
||||
|
||||
loaded = False
|
||||
"""
|
||||
If True, data has been retrieved from storage. This should happen
|
||||
automatically on the first attempt to access session data."""
|
||||
|
||||
clean_thread = None
|
||||
'Class-level Monitor which calls self.clean_up.'
|
||||
|
||||
clean_freq = 5
|
||||
'The poll rate for expired session cleanup in minutes.'
|
||||
|
||||
originalid = None
|
||||
'The session id passed by the client. May be missing or unsafe.'
|
||||
|
||||
missing = False
|
||||
'True if the session requested by the client did not exist.'
|
||||
|
||||
regenerated = False
|
||||
"""
|
||||
True if the application called session.regenerate(). This is not set by
|
||||
internal calls to regenerate the session id."""
|
||||
|
||||
debug = False
|
||||
'If True, log debug information.'
|
||||
|
||||
# --------------------- Session management methods --------------------- #
|
||||
|
||||
def __init__(self, id=None, **kwargs):
|
||||
self.id_observers = []
|
||||
self._data = {}
|
||||
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
self.originalid = id
|
||||
self.missing = False
|
||||
if id is None:
|
||||
if self.debug:
|
||||
cherrypy.log('No id given; making a new one', 'TOOLS.SESSIONS')
|
||||
self._regenerate()
|
||||
else:
|
||||
self.id = id
|
||||
if self._exists():
|
||||
if self.debug:
|
||||
cherrypy.log('Set id to %s.' % id, 'TOOLS.SESSIONS')
|
||||
else:
|
||||
if self.debug:
|
||||
cherrypy.log('Expired or malicious session %r; '
|
||||
'making a new one' % id, 'TOOLS.SESSIONS')
|
||||
# Expired or malicious session. Make a new one.
|
||||
# See https://github.com/cherrypy/cherrypy/issues/709.
|
||||
self.id = None
|
||||
self.missing = True
|
||||
self._regenerate()
|
||||
|
||||
def now(self):
|
||||
"""Generate the session specific concept of 'now'.
|
||||
|
||||
Other session providers can override this to use alternative,
|
||||
possibly timezone aware, versions of 'now'.
|
||||
"""
|
||||
return datetime.datetime.now()
|
||||
|
||||
def regenerate(self):
|
||||
"""Replace the current session (with a new id)."""
|
||||
self.regenerated = True
|
||||
self._regenerate()
|
||||
|
||||
def _regenerate(self):
|
||||
if self.id is not None:
|
||||
if self.debug:
|
||||
cherrypy.log(
|
||||
'Deleting the existing session %r before '
|
||||
'regeneration.' % self.id,
|
||||
'TOOLS.SESSIONS')
|
||||
self.delete()
|
||||
|
||||
old_session_was_locked = self.locked
|
||||
if old_session_was_locked:
|
||||
self.release_lock()
|
||||
if self.debug:
|
||||
cherrypy.log('Old lock released.', 'TOOLS.SESSIONS')
|
||||
|
||||
self.id = None
|
||||
while self.id is None:
|
||||
self.id = self.generate_id()
|
||||
# Assert that the generated id is not already stored.
|
||||
if self._exists():
|
||||
self.id = None
|
||||
if self.debug:
|
||||
cherrypy.log('Set id to generated %s.' % self.id,
|
||||
'TOOLS.SESSIONS')
|
||||
|
||||
if old_session_was_locked:
|
||||
self.acquire_lock()
|
||||
if self.debug:
|
||||
cherrypy.log('Regenerated lock acquired.', 'TOOLS.SESSIONS')
|
||||
|
||||
def clean_up(self):
|
||||
"""Clean up expired sessions."""
|
||||
pass
|
||||
|
||||
def generate_id(self):
|
||||
"""Return a new session id."""
|
||||
return random20()
|
||||
|
||||
def save(self):
|
||||
"""Save session data."""
|
||||
try:
|
||||
# If session data has never been loaded then it's never been
|
||||
# accessed: no need to save it
|
||||
if self.loaded:
|
||||
t = datetime.timedelta(seconds=self.timeout * 60)
|
||||
expiration_time = self.now() + t
|
||||
if self.debug:
|
||||
cherrypy.log('Saving session %r with expiry %s' %
|
||||
(self.id, expiration_time),
|
||||
'TOOLS.SESSIONS')
|
||||
self._save(expiration_time)
|
||||
else:
|
||||
if self.debug:
|
||||
cherrypy.log(
|
||||
'Skipping save of session %r (no session loaded).' %
|
||||
self.id, 'TOOLS.SESSIONS')
|
||||
finally:
|
||||
if self.locked:
|
||||
# Always release the lock if the user didn't release it
|
||||
self.release_lock()
|
||||
if self.debug:
|
||||
cherrypy.log('Lock released after save.', 'TOOLS.SESSIONS')
|
||||
|
||||
def load(self):
|
||||
"""Copy stored session data into this session instance."""
|
||||
data = self._load()
|
||||
# data is either None or a tuple (session_data, expiration_time)
|
||||
if data is None or data[1] < self.now():
|
||||
if self.debug:
|
||||
cherrypy.log('Expired session %r, flushing data.' % self.id,
|
||||
'TOOLS.SESSIONS')
|
||||
self._data = {}
|
||||
else:
|
||||
if self.debug:
|
||||
cherrypy.log('Data loaded for session %r.' % self.id,
|
||||
'TOOLS.SESSIONS')
|
||||
self._data = data[0]
|
||||
self.loaded = True
|
||||
|
||||
# Stick the clean_thread in the class, not the instance.
|
||||
# The instances are created and destroyed per-request.
|
||||
cls = self.__class__
|
||||
if self.clean_freq and not cls.clean_thread:
|
||||
# clean_up is an instancemethod and not a classmethod,
|
||||
# so that tool config can be accessed inside the method.
|
||||
t = cherrypy.process.plugins.Monitor(
|
||||
cherrypy.engine, self.clean_up, self.clean_freq * 60,
|
||||
name='Session cleanup')
|
||||
t.subscribe()
|
||||
cls.clean_thread = t
|
||||
t.start()
|
||||
if self.debug:
|
||||
cherrypy.log('Started cleanup thread.', 'TOOLS.SESSIONS')
|
||||
|
||||
def delete(self):
|
||||
"""Delete stored session data."""
|
||||
self._delete()
|
||||
if self.debug:
|
||||
cherrypy.log('Deleted session %s.' % self.id,
|
||||
'TOOLS.SESSIONS')
|
||||
|
||||
# -------------------- Application accessor methods -------------------- #
|
||||
|
||||
def __getitem__(self, key):
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
self._data[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
del self._data[key]
|
||||
|
||||
def pop(self, key, default=missing):
|
||||
"""Remove the specified key and return the corresponding value.
|
||||
If key is not found, default is returned if given,
|
||||
otherwise KeyError is raised.
|
||||
"""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
if default is missing:
|
||||
return self._data.pop(key)
|
||||
else:
|
||||
return self._data.pop(key, default)
|
||||
|
||||
def __contains__(self, key):
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return key in self._data
|
||||
|
||||
if hasattr({}, 'has_key'):
|
||||
def has_key(self, key):
|
||||
"""D.has_key(k) -> True if D has a key k, else False."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return key in self._data
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.get(key, default)
|
||||
|
||||
def update(self, d):
|
||||
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
self._data.update(d)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
"""D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.setdefault(key, default)
|
||||
|
||||
def clear(self):
|
||||
"""D.clear() -> None. Remove all items from D."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
self._data.clear()
|
||||
|
||||
def keys(self):
|
||||
"""D.keys() -> list of D's keys."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.keys()
|
||||
|
||||
def items(self):
|
||||
"""D.items() -> list of D's (key, value) pairs, as 2-tuples."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.items()
|
||||
|
||||
def values(self):
|
||||
"""D.values() -> list of D's values."""
|
||||
if not self.loaded:
|
||||
self.load()
|
||||
return self._data.values()
|
||||
|
||||
|
||||
class RamSession(Session):
|
||||
|
||||
# Class-level objects. Don't rebind these!
|
||||
cache = {}
|
||||
locks = {}
|
||||
|
||||
def clean_up(self):
|
||||
"""Clean up expired sessions."""
|
||||
|
||||
now = self.now()
|
||||
for _id, (data, expiration_time) in copyitems(self.cache):
|
||||
if expiration_time <= now:
|
||||
try:
|
||||
del self.cache[_id]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
if self.locks[_id].acquire(blocking=False):
|
||||
lock = self.locks.pop(_id)
|
||||
lock.release()
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# added to remove obsolete lock objects
|
||||
for _id in list(self.locks):
|
||||
if _id not in self.cache and self.locks[_id].acquire(blocking=False):
|
||||
lock = self.locks.pop(_id)
|
||||
lock.release()
|
||||
|
||||
def _exists(self):
|
||||
return self.id in self.cache
|
||||
|
||||
def _load(self):
|
||||
return self.cache.get(self.id)
|
||||
|
||||
def _save(self, expiration_time):
|
||||
self.cache[self.id] = (self._data, expiration_time)
|
||||
|
||||
def _delete(self):
|
||||
self.cache.pop(self.id, None)
|
||||
|
||||
def acquire_lock(self):
|
||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
||||
self.locked = True
|
||||
self.locks.setdefault(self.id, threading.RLock()).acquire()
|
||||
|
||||
def release_lock(self):
|
||||
"""Release the lock on the currently-loaded session data."""
|
||||
self.locks[self.id].release()
|
||||
self.locked = False
|
||||
|
||||
def __len__(self):
|
||||
"""Return the number of active sessions."""
|
||||
return len(self.cache)
|
||||
|
||||
|
||||
class FileSession(Session):
|
||||
|
||||
"""Implementation of the File backend for sessions
|
||||
|
||||
storage_path
|
||||
The folder where session data will be saved. Each session
|
||||
will be saved as pickle.dump(data, expiration_time) in its own file;
|
||||
the filename will be self.SESSION_PREFIX + self.id.
|
||||
|
||||
lock_timeout
|
||||
A timedelta or numeric seconds indicating how long
|
||||
to block acquiring a lock. If None (default), acquiring a lock
|
||||
will block indefinitely.
|
||||
"""
|
||||
|
||||
SESSION_PREFIX = 'session-'
|
||||
LOCK_SUFFIX = '.lock'
|
||||
pickle_protocol = pickle.HIGHEST_PROTOCOL
|
||||
|
||||
def __init__(self, id=None, **kwargs):
|
||||
# The 'storage_path' arg is required for file-based sessions.
|
||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
||||
kwargs.setdefault('lock_timeout', None)
|
||||
|
||||
Session.__init__(self, id=id, **kwargs)
|
||||
|
||||
# validate self.lock_timeout
|
||||
if isinstance(self.lock_timeout, (int, float)):
|
||||
self.lock_timeout = datetime.timedelta(seconds=self.lock_timeout)
|
||||
if not isinstance(self.lock_timeout, (datetime.timedelta, type(None))):
|
||||
raise ValueError('Lock timeout must be numeric seconds or '
|
||||
'a timedelta instance.')
|
||||
|
||||
@classmethod
|
||||
def setup(cls, **kwargs):
|
||||
"""Set up the storage system for file-based sessions.
|
||||
|
||||
This should only be called once per process; this will be done
|
||||
automatically when using sessions.init (as the built-in Tool does).
|
||||
"""
|
||||
# The 'storage_path' arg is required for file-based sessions.
|
||||
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
|
||||
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
|
||||
def _get_file_path(self):
|
||||
f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
|
||||
if not os.path.abspath(f).startswith(self.storage_path):
|
||||
raise cherrypy.HTTPError(400, 'Invalid session id in cookie.')
|
||||
return f
|
||||
|
||||
def _exists(self):
|
||||
path = self._get_file_path()
|
||||
return os.path.exists(path)
|
||||
|
||||
def _load(self, path=None):
|
||||
assert self.locked, ('The session load without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
if path is None:
|
||||
path = self._get_file_path()
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
try:
|
||||
return pickle.load(f)
|
||||
finally:
|
||||
f.close()
|
||||
except (IOError, EOFError):
|
||||
e = sys.exc_info()[1]
|
||||
if self.debug:
|
||||
cherrypy.log('Error loading the session pickle: %s' %
|
||||
e, 'TOOLS.SESSIONS')
|
||||
return None
|
||||
|
||||
def _save(self, expiration_time):
|
||||
assert self.locked, ('The session was saved without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
f = open(self._get_file_path(), 'wb')
|
||||
try:
|
||||
pickle.dump((self._data, expiration_time), f, self.pickle_protocol)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def _delete(self):
|
||||
assert self.locked, ('The session deletion without being locked. '
|
||||
"Check your tools' priority levels.")
|
||||
try:
|
||||
os.unlink(self._get_file_path())
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def acquire_lock(self, path=None):
|
||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
||||
if path is None:
|
||||
path = self._get_file_path()
|
||||
path += self.LOCK_SUFFIX
|
||||
checker = locking.LockChecker(self.id, self.lock_timeout)
|
||||
while not checker.expired():
|
||||
try:
|
||||
self.lock = lockfile.LockFile(path)
|
||||
except lockfile.LockError:
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
break
|
||||
self.locked = True
|
||||
if self.debug:
|
||||
cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
|
||||
|
||||
def release_lock(self, path=None):
|
||||
"""Release the lock on the currently-loaded session data."""
|
||||
self.lock.release()
|
||||
self.lock.remove()
|
||||
self.locked = False
|
||||
|
||||
def clean_up(self):
|
||||
"""Clean up expired sessions."""
|
||||
now = self.now()
|
||||
# Iterate over all session files in self.storage_path
|
||||
for fname in os.listdir(self.storage_path):
|
||||
if (fname.startswith(self.SESSION_PREFIX)
|
||||
and not fname.endswith(self.LOCK_SUFFIX)):
|
||||
# We have a session file: lock and load it and check
|
||||
# if it's expired. If it fails, nevermind.
|
||||
path = os.path.join(self.storage_path, fname)
|
||||
self.acquire_lock(path)
|
||||
if self.debug:
|
||||
# This is a bit of a hack, since we're calling clean_up
|
||||
# on the first instance rather than the entire class,
|
||||
# so depending on whether you have "debug" set on the
|
||||
# path of the first session called, this may not run.
|
||||
cherrypy.log('Cleanup lock acquired.', 'TOOLS.SESSIONS')
|
||||
|
||||
try:
|
||||
contents = self._load(path)
|
||||
# _load returns None on IOError
|
||||
if contents is not None:
|
||||
data, expiration_time = contents
|
||||
if expiration_time < now:
|
||||
# Session expired: deleting it
|
||||
os.unlink(path)
|
||||
finally:
|
||||
self.release_lock(path)
|
||||
|
||||
def __len__(self):
|
||||
"""Return the number of active sessions."""
|
||||
return len([fname for fname in os.listdir(self.storage_path)
|
||||
if (fname.startswith(self.SESSION_PREFIX)
|
||||
and not fname.endswith(self.LOCK_SUFFIX))])
|
||||
|
||||
|
||||
class MemcachedSession(Session):
|
||||
|
||||
# The most popular memcached client for Python isn't thread-safe.
|
||||
# Wrap all .get and .set operations in a single lock.
|
||||
mc_lock = threading.RLock()
|
||||
|
||||
# This is a separate set of locks per session id.
|
||||
locks = {}
|
||||
|
||||
servers = ['127.0.0.1:11211']
|
||||
|
||||
@classmethod
|
||||
def setup(cls, **kwargs):
|
||||
"""Set up the storage system for memcached-based sessions.
|
||||
|
||||
This should only be called once per process; this will be done
|
||||
automatically when using sessions.init (as the built-in Tool does).
|
||||
"""
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
|
||||
import memcache
|
||||
cls.cache = memcache.Client(cls.servers)
|
||||
|
||||
def _exists(self):
|
||||
self.mc_lock.acquire()
|
||||
try:
|
||||
return bool(self.cache.get(self.id))
|
||||
finally:
|
||||
self.mc_lock.release()
|
||||
|
||||
def _load(self):
|
||||
self.mc_lock.acquire()
|
||||
try:
|
||||
return self.cache.get(self.id)
|
||||
finally:
|
||||
self.mc_lock.release()
|
||||
|
||||
def _save(self, expiration_time):
|
||||
# Send the expiration time as "Unix time" (seconds since 1/1/1970)
|
||||
td = int(time.mktime(expiration_time.timetuple()))
|
||||
self.mc_lock.acquire()
|
||||
try:
|
||||
if not self.cache.set(self.id, (self._data, expiration_time), td):
|
||||
raise AssertionError(
|
||||
'Session data for id %r not set.' % self.id)
|
||||
finally:
|
||||
self.mc_lock.release()
|
||||
|
||||
def _delete(self):
|
||||
self.cache.delete(self.id)
|
||||
|
||||
def acquire_lock(self):
|
||||
"""Acquire an exclusive lock on the currently-loaded session data."""
|
||||
self.locked = True
|
||||
self.locks.setdefault(self.id, threading.RLock()).acquire()
|
||||
if self.debug:
|
||||
cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
|
||||
|
||||
def release_lock(self):
|
||||
"""Release the lock on the currently-loaded session data."""
|
||||
self.locks[self.id].release()
|
||||
self.locked = False
|
||||
|
||||
def __len__(self):
|
||||
"""Return the number of active sessions."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# Hook functions (for CherryPy tools)
|
||||
|
||||
def save():
|
||||
"""Save any changed session data."""
|
||||
|
||||
if not hasattr(cherrypy.serving, 'session'):
|
||||
return
|
||||
request = cherrypy.serving.request
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# Guard against running twice
|
||||
if hasattr(request, '_sessionsaved'):
|
||||
return
|
||||
request._sessionsaved = True
|
||||
|
||||
if response.stream:
|
||||
# If the body is being streamed, we have to save the data
|
||||
# *after* the response has been written out
|
||||
request.hooks.attach('on_end_request', cherrypy.session.save)
|
||||
else:
|
||||
# If the body is not being streamed, we save the data now
|
||||
# (so we can release the lock).
|
||||
if is_iterator(response.body):
|
||||
response.collapse_body()
|
||||
cherrypy.session.save()
|
||||
save.failsafe = True
|
||||
|
||||
|
||||
def close():
|
||||
"""Close the session object for this request."""
|
||||
sess = getattr(cherrypy.serving, 'session', None)
|
||||
if getattr(sess, 'locked', False):
|
||||
# If the session is still locked we release the lock
|
||||
sess.release_lock()
|
||||
if sess.debug:
|
||||
cherrypy.log('Lock released on close.', 'TOOLS.SESSIONS')
|
||||
close.failsafe = True
|
||||
close.priority = 90
|
||||
|
||||
|
||||
def init(storage_type=None, path=None, path_header=None, name='session_id',
|
||||
timeout=60, domain=None, secure=False, clean_freq=5,
|
||||
persistent=True, httponly=False, debug=False,
|
||||
# Py27 compat
|
||||
# *, storage_class=RamSession,
|
||||
**kwargs):
|
||||
"""Initialize session object (using cookies).
|
||||
|
||||
storage_class
|
||||
The Session subclass to use. Defaults to RamSession.
|
||||
|
||||
storage_type
|
||||
(deprecated)
|
||||
One of 'ram', 'file', memcached'. This will be
|
||||
used to look up the corresponding class in cherrypy.lib.sessions
|
||||
globals. For example, 'file' will use the FileSession class.
|
||||
|
||||
path
|
||||
The 'path' value to stick in the response cookie metadata.
|
||||
|
||||
path_header
|
||||
If 'path' is None (the default), then the response
|
||||
cookie 'path' will be pulled from request.headers[path_header].
|
||||
|
||||
name
|
||||
The name of the cookie.
|
||||
|
||||
timeout
|
||||
The expiration timeout (in minutes) for the stored session data.
|
||||
If 'persistent' is True (the default), this is also the timeout
|
||||
for the cookie.
|
||||
|
||||
domain
|
||||
The cookie domain.
|
||||
|
||||
secure
|
||||
If False (the default) the cookie 'secure' value will not
|
||||
be set. If True, the cookie 'secure' value will be set (to 1).
|
||||
|
||||
clean_freq (minutes)
|
||||
The poll rate for expired session cleanup.
|
||||
|
||||
persistent
|
||||
If True (the default), the 'timeout' argument will be used
|
||||
to expire the cookie. If False, the cookie will not have an expiry,
|
||||
and the cookie will be a "session cookie" which expires when the
|
||||
browser is closed.
|
||||
|
||||
httponly
|
||||
If False (the default) the cookie 'httponly' value will not be set.
|
||||
If True, the cookie 'httponly' value will be set (to 1).
|
||||
|
||||
Any additional kwargs will be bound to the new Session instance,
|
||||
and may be specific to the storage type. See the subclass of Session
|
||||
you're using for more information.
|
||||
"""
|
||||
|
||||
# Py27 compat
|
||||
storage_class = kwargs.pop('storage_class', RamSession)
|
||||
|
||||
request = cherrypy.serving.request
|
||||
|
||||
# Guard against running twice
|
||||
if hasattr(request, '_session_init_flag'):
|
||||
return
|
||||
request._session_init_flag = True
|
||||
|
||||
# Check if request came with a session ID
|
||||
id = None
|
||||
if name in request.cookie:
|
||||
id = request.cookie[name].value
|
||||
if debug:
|
||||
cherrypy.log('ID obtained from request.cookie: %r' % id,
|
||||
'TOOLS.SESSIONS')
|
||||
|
||||
first_time = not hasattr(cherrypy, 'session')
|
||||
|
||||
if storage_type:
|
||||
if first_time:
|
||||
msg = 'storage_type is deprecated. Supply storage_class instead'
|
||||
cherrypy.log(msg)
|
||||
storage_class = storage_type.title() + 'Session'
|
||||
storage_class = globals()[storage_class]
|
||||
|
||||
# call setup first time only
|
||||
if first_time:
|
||||
if hasattr(storage_class, 'setup'):
|
||||
storage_class.setup(**kwargs)
|
||||
|
||||
# Create and attach a new Session instance to cherrypy.serving.
|
||||
# It will possess a reference to (and lock, and lazily load)
|
||||
# the requested session data.
|
||||
kwargs['timeout'] = timeout
|
||||
kwargs['clean_freq'] = clean_freq
|
||||
cherrypy.serving.session = sess = storage_class(id, **kwargs)
|
||||
sess.debug = debug
|
||||
|
||||
def update_cookie(id):
|
||||
"""Update the cookie every time the session id changes."""
|
||||
cherrypy.serving.response.cookie[name] = id
|
||||
sess.id_observers.append(update_cookie)
|
||||
|
||||
# Create cherrypy.session which will proxy to cherrypy.serving.session
|
||||
if not hasattr(cherrypy, 'session'):
|
||||
cherrypy.session = cherrypy._ThreadLocalProxy('session')
|
||||
|
||||
if persistent:
|
||||
cookie_timeout = timeout
|
||||
else:
|
||||
# See http://support.microsoft.com/kb/223799/EN-US/
|
||||
# and http://support.mozilla.com/en-US/kb/Cookies
|
||||
cookie_timeout = None
|
||||
set_response_cookie(path=path, path_header=path_header, name=name,
|
||||
timeout=cookie_timeout, domain=domain, secure=secure,
|
||||
httponly=httponly)
|
||||
|
||||
|
||||
def set_response_cookie(path=None, path_header=None, name='session_id',
|
||||
timeout=60, domain=None, secure=False, httponly=False):
|
||||
"""Set a response cookie for the client.
|
||||
|
||||
path
|
||||
the 'path' value to stick in the response cookie metadata.
|
||||
|
||||
path_header
|
||||
if 'path' is None (the default), then the response
|
||||
cookie 'path' will be pulled from request.headers[path_header].
|
||||
|
||||
name
|
||||
the name of the cookie.
|
||||
|
||||
timeout
|
||||
the expiration timeout for the cookie. If 0 or other boolean
|
||||
False, no 'expires' param will be set, and the cookie will be a
|
||||
"session cookie" which expires when the browser is closed.
|
||||
|
||||
domain
|
||||
the cookie domain.
|
||||
|
||||
secure
|
||||
if False (the default) the cookie 'secure' value will not
|
||||
be set. If True, the cookie 'secure' value will be set (to 1).
|
||||
|
||||
httponly
|
||||
If False (the default) the cookie 'httponly' value will not be set.
|
||||
If True, the cookie 'httponly' value will be set (to 1).
|
||||
|
||||
"""
|
||||
# Set response cookie
|
||||
cookie = cherrypy.serving.response.cookie
|
||||
cookie[name] = cherrypy.serving.session.id
|
||||
cookie[name]['path'] = (
|
||||
path or
|
||||
cherrypy.serving.request.headers.get(path_header) or
|
||||
'/'
|
||||
)
|
||||
|
||||
# We'd like to use the "max-age" param as indicated in
|
||||
# http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
|
||||
# save it to disk and the session is lost if people close
|
||||
# the browser. So we have to use the old "expires" ... sigh ...
|
||||
## cookie[name]['max-age'] = timeout * 60
|
||||
if timeout:
|
||||
e = time.time() + (timeout * 60)
|
||||
cookie[name]['expires'] = httputil.HTTPDate(e)
|
||||
if domain is not None:
|
||||
cookie[name]['domain'] = domain
|
||||
if secure:
|
||||
cookie[name]['secure'] = 1
|
||||
if httponly:
|
||||
if not cookie[name].isReservedKey('httponly'):
|
||||
raise ValueError('The httponly cookie token is not supported.')
|
||||
cookie[name]['httponly'] = 1
|
||||
|
||||
|
||||
def expire():
|
||||
"""Expire the current session cookie."""
|
||||
name = cherrypy.serving.request.config.get(
|
||||
'tools.sessions.name', 'session_id')
|
||||
one_year = 60 * 60 * 24 * 365
|
||||
e = time.time() - one_year
|
||||
cherrypy.serving.response.cookie[name]['expires'] = httputil.HTTPDate(e)
|
||||
@@ -1,381 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import mimetypes
|
||||
|
||||
try:
|
||||
from io import UnsupportedOperation
|
||||
except ImportError:
|
||||
UnsupportedOperation = object()
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob, unquote
|
||||
from cherrypy.lib import cptools, httputil, file_generator_limited
|
||||
|
||||
|
||||
mimetypes.init()
|
||||
mimetypes.types_map['.dwg'] = 'image/x-dwg'
|
||||
mimetypes.types_map['.ico'] = 'image/x-icon'
|
||||
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
|
||||
mimetypes.types_map['.gz'] = 'application/x-gzip'
|
||||
|
||||
|
||||
def serve_file(path, content_type=None, disposition=None, name=None,
|
||||
debug=False):
|
||||
"""Set status, headers, and body in order to serve the given path.
|
||||
|
||||
The Content-Type header will be set to the content_type arg, if provided.
|
||||
If not provided, the Content-Type will be guessed by the file extension
|
||||
of the 'path' argument.
|
||||
|
||||
If disposition is not None, the Content-Disposition header will be set
|
||||
to "<disposition>; filename=<name>". If name is None, it will be set
|
||||
to the basename of path. If disposition is None, no Content-Disposition
|
||||
header will be written.
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# If path is relative, users should fix it by making path absolute.
|
||||
# That is, CherryPy should not guess where the application root is.
|
||||
# It certainly should *not* use cwd (since CP may be invoked from a
|
||||
# variety of paths). If using tools.staticdir, you can make your relative
|
||||
# paths become absolute by supplying a value for "tools.staticdir.root".
|
||||
if not os.path.isabs(path):
|
||||
msg = "'%s' is not an absolute path." % path
|
||||
if debug:
|
||||
cherrypy.log(msg, 'TOOLS.STATICFILE')
|
||||
raise ValueError(msg)
|
||||
|
||||
try:
|
||||
st = os.stat(path)
|
||||
except (OSError, TypeError, ValueError):
|
||||
# OSError when file fails to stat
|
||||
# TypeError on Python 2 when there's a null byte
|
||||
# ValueError on Python 3 when there's a null byte
|
||||
if debug:
|
||||
cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC')
|
||||
raise cherrypy.NotFound()
|
||||
|
||||
# Check if path is a directory.
|
||||
if stat.S_ISDIR(st.st_mode):
|
||||
# Let the caller deal with it as they like.
|
||||
if debug:
|
||||
cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC')
|
||||
raise cherrypy.NotFound()
|
||||
|
||||
# Set the Last-Modified response header, so that
|
||||
# modified-since validation code can work.
|
||||
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
|
||||
cptools.validate_since()
|
||||
|
||||
if content_type is None:
|
||||
# Set content-type based on filename extension
|
||||
ext = ''
|
||||
i = path.rfind('.')
|
||||
if i != -1:
|
||||
ext = path[i:].lower()
|
||||
content_type = mimetypes.types_map.get(ext, None)
|
||||
if content_type is not None:
|
||||
response.headers['Content-Type'] = content_type
|
||||
if debug:
|
||||
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
|
||||
|
||||
cd = None
|
||||
if disposition is not None:
|
||||
if name is None:
|
||||
name = os.path.basename(path)
|
||||
cd = '%s; filename="%s"' % (disposition, name)
|
||||
response.headers['Content-Disposition'] = cd
|
||||
if debug:
|
||||
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
|
||||
|
||||
# Set Content-Length and use an iterable (file object)
|
||||
# this way CP won't load the whole file in memory
|
||||
content_length = st.st_size
|
||||
fileobj = open(path, 'rb')
|
||||
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
|
||||
|
||||
|
||||
def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
|
||||
debug=False):
|
||||
"""Set status, headers, and body in order to serve the given file object.
|
||||
|
||||
The Content-Type header will be set to the content_type arg, if provided.
|
||||
|
||||
If disposition is not None, the Content-Disposition header will be set
|
||||
to "<disposition>; filename=<name>". If name is None, 'filename' will
|
||||
not be set. If disposition is None, no Content-Disposition header will
|
||||
be written.
|
||||
|
||||
CAUTION: If the request contains a 'Range' header, one or more seek()s will
|
||||
be performed on the file object. This may cause undesired behavior if
|
||||
the file object is not seekable. It could also produce undesired results
|
||||
if the caller set the read position of the file object prior to calling
|
||||
serve_fileobj(), expecting that the data would be served starting from that
|
||||
position.
|
||||
"""
|
||||
|
||||
response = cherrypy.serving.response
|
||||
|
||||
try:
|
||||
st = os.fstat(fileobj.fileno())
|
||||
except AttributeError:
|
||||
if debug:
|
||||
cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC')
|
||||
content_length = None
|
||||
except UnsupportedOperation:
|
||||
content_length = None
|
||||
else:
|
||||
# Set the Last-Modified response header, so that
|
||||
# modified-since validation code can work.
|
||||
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
|
||||
cptools.validate_since()
|
||||
content_length = st.st_size
|
||||
|
||||
if content_type is not None:
|
||||
response.headers['Content-Type'] = content_type
|
||||
if debug:
|
||||
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
|
||||
|
||||
cd = None
|
||||
if disposition is not None:
|
||||
if name is None:
|
||||
cd = disposition
|
||||
else:
|
||||
cd = '%s; filename="%s"' % (disposition, name)
|
||||
response.headers['Content-Disposition'] = cd
|
||||
if debug:
|
||||
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
|
||||
|
||||
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
|
||||
|
||||
|
||||
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
"""Internal. Set response.body to the given file object, perhaps ranged."""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
|
||||
request = cherrypy.serving.request
|
||||
if request.protocol >= (1, 1):
|
||||
response.headers['Accept-Ranges'] = 'bytes'
|
||||
r = httputil.get_ranges(request.headers.get('Range'), content_length)
|
||||
if r == []:
|
||||
response.headers['Content-Range'] = 'bytes */%s' % content_length
|
||||
message = ('Invalid Range (first-byte-pos greater than '
|
||||
'Content-Length)')
|
||||
if debug:
|
||||
cherrypy.log(message, 'TOOLS.STATIC')
|
||||
raise cherrypy.HTTPError(416, message)
|
||||
|
||||
if r:
|
||||
if len(r) == 1:
|
||||
# Return a single-part response.
|
||||
start, stop = r[0]
|
||||
if stop > content_length:
|
||||
stop = content_length
|
||||
r_len = stop - start
|
||||
if debug:
|
||||
cherrypy.log(
|
||||
'Single part; start: %r, stop: %r' % (start, stop),
|
||||
'TOOLS.STATIC')
|
||||
response.status = '206 Partial Content'
|
||||
response.headers['Content-Range'] = (
|
||||
'bytes %s-%s/%s' % (start, stop - 1, content_length))
|
||||
response.headers['Content-Length'] = r_len
|
||||
fileobj.seek(start)
|
||||
response.body = file_generator_limited(fileobj, r_len)
|
||||
else:
|
||||
# Return a multipart/byteranges response.
|
||||
response.status = '206 Partial Content'
|
||||
try:
|
||||
# Python 3
|
||||
from email.generator import _make_boundary as make_boundary
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from mimetools import choose_boundary as make_boundary
|
||||
boundary = make_boundary()
|
||||
ct = 'multipart/byteranges; boundary=%s' % boundary
|
||||
response.headers['Content-Type'] = ct
|
||||
if 'Content-Length' in response.headers:
|
||||
# Delete Content-Length header so finalize() recalcs it.
|
||||
del response.headers['Content-Length']
|
||||
|
||||
def file_ranges():
|
||||
# Apache compatibility:
|
||||
yield ntob('\r\n')
|
||||
|
||||
for start, stop in r:
|
||||
if debug:
|
||||
cherrypy.log(
|
||||
'Multipart; start: %r, stop: %r' % (
|
||||
start, stop),
|
||||
'TOOLS.STATIC')
|
||||
yield ntob('--' + boundary, 'ascii')
|
||||
yield ntob('\r\nContent-type: %s' % content_type,
|
||||
'ascii')
|
||||
yield ntob(
|
||||
'\r\nContent-range: bytes %s-%s/%s\r\n\r\n' % (
|
||||
start, stop - 1, content_length),
|
||||
'ascii')
|
||||
fileobj.seek(start)
|
||||
gen = file_generator_limited(fileobj, stop - start)
|
||||
for chunk in gen:
|
||||
yield chunk
|
||||
yield ntob('\r\n')
|
||||
# Final boundary
|
||||
yield ntob('--' + boundary + '--', 'ascii')
|
||||
|
||||
# Apache compatibility:
|
||||
yield ntob('\r\n')
|
||||
response.body = file_ranges()
|
||||
return response.body
|
||||
else:
|
||||
if debug:
|
||||
cherrypy.log('No byteranges requested', 'TOOLS.STATIC')
|
||||
|
||||
# Set Content-Length and use an iterable (file object)
|
||||
# this way CP won't load the whole file in memory
|
||||
response.headers['Content-Length'] = content_length
|
||||
response.body = fileobj
|
||||
return response.body
|
||||
|
||||
|
||||
def serve_download(path, name=None):
|
||||
"""Serve 'path' as an application/x-download attachment."""
|
||||
# This is such a common idiom I felt it deserved its own wrapper.
|
||||
return serve_file(path, 'application/x-download', 'attachment', name)
|
||||
|
||||
|
||||
def _attempt(filename, content_types, debug=False):
|
||||
if debug:
|
||||
cherrypy.log('Attempting %r (content_types %r)' %
|
||||
(filename, content_types), 'TOOLS.STATICDIR')
|
||||
try:
|
||||
# you can set the content types for a
|
||||
# complete directory per extension
|
||||
content_type = None
|
||||
if content_types:
|
||||
r, ext = os.path.splitext(filename)
|
||||
content_type = content_types.get(ext[1:], None)
|
||||
serve_file(filename, content_type=content_type, debug=debug)
|
||||
return True
|
||||
except cherrypy.NotFound:
|
||||
# If we didn't find the static file, continue handling the
|
||||
# request. We might find a dynamic handler instead.
|
||||
if debug:
|
||||
cherrypy.log('NotFound', 'TOOLS.STATICFILE')
|
||||
return False
|
||||
|
||||
|
||||
def staticdir(section, dir, root='', match='', content_types=None, index='',
|
||||
debug=False):
|
||||
"""Serve a static resource from the given (root +) dir.
|
||||
|
||||
match
|
||||
If given, request.path_info will be searched for the given
|
||||
regular expression before attempting to serve static content.
|
||||
|
||||
content_types
|
||||
If given, it should be a Python dictionary of
|
||||
{file-extension: content-type} pairs, where 'file-extension' is
|
||||
a string (e.g. "gif") and 'content-type' is the value to write
|
||||
out in the Content-Type response header (e.g. "image/gif").
|
||||
|
||||
index
|
||||
If provided, it should be the (relative) name of a file to
|
||||
serve for directory requests. For example, if the dir argument is
|
||||
'/home/me', the Request-URI is 'myapp', and the index arg is
|
||||
'index.html', the file '/home/me/myapp/index.html' will be sought.
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if request.method not in ('GET', 'HEAD'):
|
||||
if debug:
|
||||
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
|
||||
return False
|
||||
|
||||
if match and not re.search(match, request.path_info):
|
||||
if debug:
|
||||
cherrypy.log('request.path_info %r does not match pattern %r' %
|
||||
(request.path_info, match), 'TOOLS.STATICDIR')
|
||||
return False
|
||||
|
||||
# Allow the use of '~' to refer to a user's home directory.
|
||||
dir = os.path.expanduser(dir)
|
||||
|
||||
# If dir is relative, make absolute using "root".
|
||||
if not os.path.isabs(dir):
|
||||
if not root:
|
||||
msg = 'Static dir requires an absolute dir (or root).'
|
||||
if debug:
|
||||
cherrypy.log(msg, 'TOOLS.STATICDIR')
|
||||
raise ValueError(msg)
|
||||
dir = os.path.join(root, dir)
|
||||
|
||||
# Determine where we are in the object tree relative to 'section'
|
||||
# (where the static tool was defined).
|
||||
if section == 'global':
|
||||
section = '/'
|
||||
section = section.rstrip(r'\/')
|
||||
branch = request.path_info[len(section) + 1:]
|
||||
branch = unquote(branch.lstrip(r'\/'))
|
||||
|
||||
# If branch is "", filename will end in a slash
|
||||
filename = os.path.join(dir, branch)
|
||||
if debug:
|
||||
cherrypy.log('Checking file %r to fulfill %r' %
|
||||
(filename, request.path_info), 'TOOLS.STATICDIR')
|
||||
|
||||
# There's a chance that the branch pulled from the URL might
|
||||
# have ".." or similar uplevel attacks in it. Check that the final
|
||||
# filename is a child of dir.
|
||||
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
|
||||
raise cherrypy.HTTPError(403) # Forbidden
|
||||
|
||||
handled = _attempt(filename, content_types)
|
||||
if not handled:
|
||||
# Check for an index file if a folder was requested.
|
||||
if index:
|
||||
handled = _attempt(os.path.join(filename, index), content_types)
|
||||
if handled:
|
||||
request.is_index = filename[-1] in (r'\/')
|
||||
return handled
|
||||
|
||||
|
||||
def staticfile(filename, root=None, match='', content_types=None, debug=False):
|
||||
"""Serve a static resource from the given (root +) filename.
|
||||
|
||||
match
|
||||
If given, request.path_info will be searched for the given
|
||||
regular expression before attempting to serve static content.
|
||||
|
||||
content_types
|
||||
If given, it should be a Python dictionary of
|
||||
{file-extension: content-type} pairs, where 'file-extension' is
|
||||
a string (e.g. "gif") and 'content-type' is the value to write
|
||||
out in the Content-Type response header (e.g. "image/gif").
|
||||
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
if request.method not in ('GET', 'HEAD'):
|
||||
if debug:
|
||||
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE')
|
||||
return False
|
||||
|
||||
if match and not re.search(match, request.path_info):
|
||||
if debug:
|
||||
cherrypy.log('request.path_info %r does not match pattern %r' %
|
||||
(request.path_info, match), 'TOOLS.STATICFILE')
|
||||
return False
|
||||
|
||||
# If filename is relative, make absolute using "root".
|
||||
if not os.path.isabs(filename):
|
||||
if not root:
|
||||
msg = "Static tool requires an absolute filename (got '%s')." % (
|
||||
filename,)
|
||||
if debug:
|
||||
cherrypy.log(msg, 'TOOLS.STATICFILE')
|
||||
raise ValueError(msg)
|
||||
filename = os.path.join(root, filename)
|
||||
|
||||
return _attempt(filename, content_types, debug=debug)
|
||||
@@ -1,57 +0,0 @@
|
||||
import sys
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob
|
||||
|
||||
|
||||
def get_xmlrpclib():
|
||||
try:
|
||||
import xmlrpc.client as x
|
||||
except ImportError:
|
||||
import xmlrpclib as x
|
||||
return x
|
||||
|
||||
|
||||
def process_body():
|
||||
"""Return (params, method) from request body."""
|
||||
try:
|
||||
return get_xmlrpclib().loads(cherrypy.request.body.read())
|
||||
except Exception:
|
||||
return ('ERROR PARAMS', ), 'ERRORMETHOD'
|
||||
|
||||
|
||||
def patched_path(path):
|
||||
"""Return 'path', doctored for RPC."""
|
||||
if not path.endswith('/'):
|
||||
path += '/'
|
||||
if path.startswith('/RPC2/'):
|
||||
# strip the first /rpc2
|
||||
path = path[5:]
|
||||
return path
|
||||
|
||||
|
||||
def _set_response(body):
|
||||
# The XML-RPC spec (http://www.xmlrpc.com/spec) says:
|
||||
# "Unless there's a lower-level error, always return 200 OK."
|
||||
# Since Python's xmlrpclib interprets a non-200 response
|
||||
# as a "Protocol Error", we'll just return 200 every time.
|
||||
response = cherrypy.response
|
||||
response.status = '200 OK'
|
||||
response.body = ntob(body, 'utf-8')
|
||||
response.headers['Content-Type'] = 'text/xml'
|
||||
response.headers['Content-Length'] = len(body)
|
||||
|
||||
|
||||
def respond(body, encoding='utf-8', allow_none=0):
|
||||
xmlrpclib = get_xmlrpclib()
|
||||
if not isinstance(body, xmlrpclib.Fault):
|
||||
body = (body,)
|
||||
_set_response(xmlrpclib.dumps(body, methodresponse=1,
|
||||
encoding=encoding,
|
||||
allow_none=allow_none))
|
||||
|
||||
|
||||
def on_error(*args, **kwargs):
|
||||
body = str(sys.exc_info()[1])
|
||||
xmlrpclib = get_xmlrpclib()
|
||||
_set_response(xmlrpclib.dumps(xmlrpclib.Fault(1, body)))
|
||||
@@ -1,14 +0,0 @@
|
||||
"""Site container for an HTTP server.
|
||||
|
||||
A Web Site Process Bus object is used to connect applications, servers,
|
||||
and frameworks with site-wide services such as daemonization, process
|
||||
reload, signal handling, drop privileges, PID file management, logging
|
||||
for all of these, and many more.
|
||||
|
||||
The 'plugins' module defines a few abstract and concrete services for
|
||||
use with the bus. Some use tool-specific channels; see the documentation
|
||||
for each class.
|
||||
"""
|
||||
|
||||
from cherrypy.process.wspbus import bus # noqa
|
||||
from cherrypy.process import plugins, servers # noqa
|
||||
@@ -1,740 +0,0 @@
|
||||
"""Site services for use with a Web Site Process Bus."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal as _signal
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
|
||||
from cherrypy._cpcompat import text_or_bytes, get_thread_ident
|
||||
from cherrypy._cpcompat import ntob, Timer
|
||||
|
||||
# _module__file__base is used by Autoreload to make
|
||||
# absolute any filenames retrieved from sys.modules which are not
|
||||
# already absolute paths. This is to work around Python's quirk
|
||||
# of importing the startup script and using a relative filename
|
||||
# for it in sys.modules.
|
||||
#
|
||||
# Autoreload examines sys.modules afresh every time it runs. If an application
|
||||
# changes the current directory by executing os.chdir(), then the next time
|
||||
# Autoreload runs, it will not be able to find any filenames which are
|
||||
# not absolute paths, because the current directory is not the same as when the
|
||||
# module was first imported. Autoreload will then wrongly conclude the file
|
||||
# has "changed", and initiate the shutdown/re-exec sequence.
|
||||
# See ticket #917.
|
||||
# For this workaround to have a decent probability of success, this module
|
||||
# needs to be imported as early as possible, before the app has much chance
|
||||
# to change the working directory.
|
||||
_module__file__base = os.getcwd()
|
||||
|
||||
|
||||
class SimplePlugin(object):
|
||||
|
||||
"""Plugin base class which auto-subscribes methods for known channels."""
|
||||
|
||||
bus = None
|
||||
"""A :class:`Bus <cherrypy.process.wspbus.Bus>`, usually cherrypy.engine.
|
||||
"""
|
||||
|
||||
def __init__(self, bus):
|
||||
self.bus = bus
|
||||
|
||||
def subscribe(self):
|
||||
"""Register this object as a (multi-channel) listener on the bus."""
|
||||
for channel in self.bus.listeners:
|
||||
# Subscribe self.start, self.exit, etc. if present.
|
||||
method = getattr(self, channel, None)
|
||||
if method is not None:
|
||||
self.bus.subscribe(channel, method)
|
||||
|
||||
def unsubscribe(self):
|
||||
"""Unregister this object as a listener on the bus."""
|
||||
for channel in self.bus.listeners:
|
||||
# Unsubscribe self.start, self.exit, etc. if present.
|
||||
method = getattr(self, channel, None)
|
||||
if method is not None:
|
||||
self.bus.unsubscribe(channel, method)
|
||||
|
||||
|
||||
class SignalHandler(object):
|
||||
|
||||
"""Register bus channels (and listeners) for system signals.
|
||||
|
||||
You can modify what signals your application listens for, and what it does
|
||||
when it receives signals, by modifying :attr:`SignalHandler.handlers`,
|
||||
a dict of {signal name: callback} pairs. The default set is::
|
||||
|
||||
handlers = {'SIGTERM': self.bus.exit,
|
||||
'SIGHUP': self.handle_SIGHUP,
|
||||
'SIGUSR1': self.bus.graceful,
|
||||
}
|
||||
|
||||
The :func:`SignalHandler.handle_SIGHUP`` method calls
|
||||
:func:`bus.restart()<cherrypy.process.wspbus.Bus.restart>`
|
||||
if the process is daemonized, but
|
||||
:func:`bus.exit()<cherrypy.process.wspbus.Bus.exit>`
|
||||
if the process is attached to a TTY. This is because Unix window
|
||||
managers tend to send SIGHUP to terminal windows when the user closes them.
|
||||
|
||||
Feel free to add signals which are not available on every platform.
|
||||
The :class:`SignalHandler` will ignore errors raised from attempting
|
||||
to register handlers for unknown signals.
|
||||
"""
|
||||
|
||||
handlers = {}
|
||||
"""A map from signal names (e.g. 'SIGTERM') to handlers (e.g. bus.exit)."""
|
||||
|
||||
signals = {}
|
||||
"""A map from signal numbers to names."""
|
||||
|
||||
for k, v in vars(_signal).items():
|
||||
if k.startswith('SIG') and not k.startswith('SIG_'):
|
||||
signals[v] = k
|
||||
del k, v
|
||||
|
||||
def __init__(self, bus):
|
||||
self.bus = bus
|
||||
# Set default handlers
|
||||
self.handlers = {'SIGTERM': self.bus.exit,
|
||||
'SIGHUP': self.handle_SIGHUP,
|
||||
'SIGUSR1': self.bus.graceful,
|
||||
}
|
||||
|
||||
if sys.platform[:4] == 'java':
|
||||
del self.handlers['SIGUSR1']
|
||||
self.handlers['SIGUSR2'] = self.bus.graceful
|
||||
self.bus.log('SIGUSR1 cannot be set on the JVM platform. '
|
||||
'Using SIGUSR2 instead.')
|
||||
self.handlers['SIGINT'] = self._jython_SIGINT_handler
|
||||
|
||||
self._previous_handlers = {}
|
||||
# used to determine is the process is a daemon in `self._is_daemonized`
|
||||
self._original_pid = os.getpid()
|
||||
|
||||
|
||||
def _jython_SIGINT_handler(self, signum=None, frame=None):
|
||||
# See http://bugs.jython.org/issue1313
|
||||
self.bus.log('Keyboard Interrupt: shutting down bus')
|
||||
self.bus.exit()
|
||||
|
||||
def _is_daemonized(self):
|
||||
"""Return boolean indicating if the current process is
|
||||
running as a daemon.
|
||||
|
||||
The criteria to determine the `daemon` condition is to verify
|
||||
if the current pid is not the same as the one that got used on
|
||||
the initial construction of the plugin *and* the stdin is not
|
||||
connected to a terminal.
|
||||
|
||||
The sole validation of the tty is not enough when the plugin
|
||||
is executing inside other process like in a CI tool
|
||||
(Buildbot, Jenkins).
|
||||
"""
|
||||
if (self._original_pid != os.getpid() and
|
||||
not os.isatty(sys.stdin.fileno())):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def subscribe(self):
|
||||
"""Subscribe self.handlers to signals."""
|
||||
for sig, func in self.handlers.items():
|
||||
try:
|
||||
self.set_handler(sig, func)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def unsubscribe(self):
|
||||
"""Unsubscribe self.handlers from signals."""
|
||||
for signum, handler in self._previous_handlers.items():
|
||||
signame = self.signals[signum]
|
||||
|
||||
if handler is None:
|
||||
self.bus.log('Restoring %s handler to SIG_DFL.' % signame)
|
||||
handler = _signal.SIG_DFL
|
||||
else:
|
||||
self.bus.log('Restoring %s handler %r.' % (signame, handler))
|
||||
|
||||
try:
|
||||
our_handler = _signal.signal(signum, handler)
|
||||
if our_handler is None:
|
||||
self.bus.log('Restored old %s handler %r, but our '
|
||||
'handler was not registered.' %
|
||||
(signame, handler), level=30)
|
||||
except ValueError:
|
||||
self.bus.log('Unable to restore %s handler %r.' %
|
||||
(signame, handler), level=40, traceback=True)
|
||||
|
||||
def set_handler(self, signal, listener=None):
|
||||
"""Subscribe a handler for the given signal (number or name).
|
||||
|
||||
If the optional 'listener' argument is provided, it will be
|
||||
subscribed as a listener for the given signal's channel.
|
||||
|
||||
If the given signal name or number is not available on the current
|
||||
platform, ValueError is raised.
|
||||
"""
|
||||
if isinstance(signal, text_or_bytes):
|
||||
signum = getattr(_signal, signal, None)
|
||||
if signum is None:
|
||||
raise ValueError('No such signal: %r' % signal)
|
||||
signame = signal
|
||||
else:
|
||||
try:
|
||||
signame = self.signals[signal]
|
||||
except KeyError:
|
||||
raise ValueError('No such signal: %r' % signal)
|
||||
signum = signal
|
||||
|
||||
prev = _signal.signal(signum, self._handle_signal)
|
||||
self._previous_handlers[signum] = prev
|
||||
|
||||
if listener is not None:
|
||||
self.bus.log('Listening for %s.' % signame)
|
||||
self.bus.subscribe(signame, listener)
|
||||
|
||||
def _handle_signal(self, signum=None, frame=None):
|
||||
"""Python signal handler (self.set_handler subscribes it for you)."""
|
||||
signame = self.signals[signum]
|
||||
self.bus.log('Caught signal %s.' % signame)
|
||||
self.bus.publish(signame)
|
||||
|
||||
def handle_SIGHUP(self):
|
||||
"""Restart if daemonized, else exit."""
|
||||
if self._is_daemonized():
|
||||
self.bus.log('SIGHUP caught while daemonized. Restarting.')
|
||||
self.bus.restart()
|
||||
else:
|
||||
# not daemonized (may be foreground or background)
|
||||
self.bus.log('SIGHUP caught but not daemonized. Exiting.')
|
||||
self.bus.exit()
|
||||
|
||||
|
||||
try:
|
||||
import pwd
|
||||
import grp
|
||||
except ImportError:
|
||||
pwd, grp = None, None
|
||||
|
||||
|
||||
class DropPrivileges(SimplePlugin):
|
||||
|
||||
"""Drop privileges. uid/gid arguments not available on Windows.
|
||||
|
||||
Special thanks to `Gavin Baker <http://antonym.org/2005/12/dropping-privileges-in-python.html>`_
|
||||
"""
|
||||
|
||||
def __init__(self, bus, umask=None, uid=None, gid=None):
|
||||
SimplePlugin.__init__(self, bus)
|
||||
self.finalized = False
|
||||
self.uid = uid
|
||||
self.gid = gid
|
||||
self.umask = umask
|
||||
|
||||
def _get_uid(self):
|
||||
return self._uid
|
||||
|
||||
def _set_uid(self, val):
|
||||
if val is not None:
|
||||
if pwd is None:
|
||||
self.bus.log('pwd module not available; ignoring uid.',
|
||||
level=30)
|
||||
val = None
|
||||
elif isinstance(val, text_or_bytes):
|
||||
val = pwd.getpwnam(val)[2]
|
||||
self._uid = val
|
||||
uid = property(_get_uid, _set_uid,
|
||||
doc='The uid under which to run. Availability: Unix.')
|
||||
|
||||
def _get_gid(self):
|
||||
return self._gid
|
||||
|
||||
def _set_gid(self, val):
|
||||
if val is not None:
|
||||
if grp is None:
|
||||
self.bus.log('grp module not available; ignoring gid.',
|
||||
level=30)
|
||||
val = None
|
||||
elif isinstance(val, text_or_bytes):
|
||||
val = grp.getgrnam(val)[2]
|
||||
self._gid = val
|
||||
gid = property(_get_gid, _set_gid,
|
||||
doc='The gid under which to run. Availability: Unix.')
|
||||
|
||||
def _get_umask(self):
|
||||
return self._umask
|
||||
|
||||
def _set_umask(self, val):
|
||||
if val is not None:
|
||||
try:
|
||||
os.umask
|
||||
except AttributeError:
|
||||
self.bus.log('umask function not available; ignoring umask.',
|
||||
level=30)
|
||||
val = None
|
||||
self._umask = val
|
||||
umask = property(
|
||||
_get_umask,
|
||||
_set_umask,
|
||||
doc="""The default permission mode for newly created files and
|
||||
directories.
|
||||
|
||||
Usually expressed in octal format, for example, ``0644``.
|
||||
Availability: Unix, Windows.
|
||||
""")
|
||||
|
||||
def start(self):
|
||||
# uid/gid
|
||||
def current_ids():
|
||||
"""Return the current (uid, gid) if available."""
|
||||
name, group = None, None
|
||||
if pwd:
|
||||
name = pwd.getpwuid(os.getuid())[0]
|
||||
if grp:
|
||||
group = grp.getgrgid(os.getgid())[0]
|
||||
return name, group
|
||||
|
||||
if self.finalized:
|
||||
if not (self.uid is None and self.gid is None):
|
||||
self.bus.log('Already running as uid: %r gid: %r' %
|
||||
current_ids())
|
||||
else:
|
||||
if self.uid is None and self.gid is None:
|
||||
if pwd or grp:
|
||||
self.bus.log('uid/gid not set', level=30)
|
||||
else:
|
||||
self.bus.log('Started as uid: %r gid: %r' % current_ids())
|
||||
if self.gid is not None:
|
||||
os.setgid(self.gid)
|
||||
os.setgroups([])
|
||||
if self.uid is not None:
|
||||
os.setuid(self.uid)
|
||||
self.bus.log('Running as uid: %r gid: %r' % current_ids())
|
||||
|
||||
# umask
|
||||
if self.finalized:
|
||||
if self.umask is not None:
|
||||
self.bus.log('umask already set to: %03o' % self.umask)
|
||||
else:
|
||||
if self.umask is None:
|
||||
self.bus.log('umask not set', level=30)
|
||||
else:
|
||||
old_umask = os.umask(self.umask)
|
||||
self.bus.log('umask old: %03o, new: %03o' %
|
||||
(old_umask, self.umask))
|
||||
|
||||
self.finalized = True
|
||||
# This is slightly higher than the priority for server.start
|
||||
# in order to facilitate the most common use: starting on a low
|
||||
# port (which requires root) and then dropping to another user.
|
||||
start.priority = 77
|
||||
|
||||
|
||||
class Daemonizer(SimplePlugin):
|
||||
|
||||
"""Daemonize the running script.
|
||||
|
||||
Use this with a Web Site Process Bus via::
|
||||
|
||||
Daemonizer(bus).subscribe()
|
||||
|
||||
When this component finishes, the process is completely decoupled from
|
||||
the parent environment. Please note that when this component is used,
|
||||
the return code from the parent process will still be 0 if a startup
|
||||
error occurs in the forked children. Errors in the initial daemonizing
|
||||
process still return proper exit codes. Therefore, if you use this
|
||||
plugin to daemonize, don't use the return code as an accurate indicator
|
||||
of whether the process fully started. In fact, that return code only
|
||||
indicates if the process succesfully finished the first fork.
|
||||
"""
|
||||
|
||||
def __init__(self, bus, stdin='/dev/null', stdout='/dev/null',
|
||||
stderr='/dev/null'):
|
||||
SimplePlugin.__init__(self, bus)
|
||||
self.stdin = stdin
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
self.finalized = False
|
||||
|
||||
def start(self):
|
||||
if self.finalized:
|
||||
self.bus.log('Already deamonized.')
|
||||
|
||||
# forking has issues with threads:
|
||||
# http://www.opengroup.org/onlinepubs/000095399/functions/fork.html
|
||||
# "The general problem with making fork() work in a multi-threaded
|
||||
# world is what to do with all of the threads..."
|
||||
# So we check for active threads:
|
||||
if threading.activeCount() != 1:
|
||||
self.bus.log('There are %r active threads. '
|
||||
'Daemonizing now may cause strange failures.' %
|
||||
threading.enumerate(), level=30)
|
||||
|
||||
# See http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
|
||||
# (or http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7)
|
||||
# and http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
|
||||
|
||||
# Finish up with the current stdout/stderr
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
# Do first fork.
|
||||
try:
|
||||
pid = os.fork()
|
||||
if pid == 0:
|
||||
# This is the child process. Continue.
|
||||
pass
|
||||
else:
|
||||
# This is the first parent. Exit, now that we've forked.
|
||||
self.bus.log('Forking once.')
|
||||
os._exit(0)
|
||||
except OSError:
|
||||
# Python raises OSError rather than returning negative numbers.
|
||||
exc = sys.exc_info()[1]
|
||||
sys.exit('%s: fork #1 failed: (%d) %s\n'
|
||||
% (sys.argv[0], exc.errno, exc.strerror))
|
||||
|
||||
os.setsid()
|
||||
|
||||
# Do second fork
|
||||
try:
|
||||
pid = os.fork()
|
||||
if pid > 0:
|
||||
self.bus.log('Forking twice.')
|
||||
os._exit(0) # Exit second parent
|
||||
except OSError:
|
||||
exc = sys.exc_info()[1]
|
||||
sys.exit('%s: fork #2 failed: (%d) %s\n'
|
||||
% (sys.argv[0], exc.errno, exc.strerror))
|
||||
|
||||
os.chdir('/')
|
||||
os.umask(0)
|
||||
|
||||
si = open(self.stdin, 'r')
|
||||
so = open(self.stdout, 'a+')
|
||||
se = open(self.stderr, 'a+')
|
||||
|
||||
# os.dup2(fd, fd2) will close fd2 if necessary,
|
||||
# so we don't explicitly close stdin/out/err.
|
||||
# See http://docs.python.org/lib/os-fd-ops.html
|
||||
os.dup2(si.fileno(), sys.stdin.fileno())
|
||||
os.dup2(so.fileno(), sys.stdout.fileno())
|
||||
os.dup2(se.fileno(), sys.stderr.fileno())
|
||||
|
||||
self.bus.log('Daemonized to PID: %s' % os.getpid())
|
||||
self.finalized = True
|
||||
start.priority = 65
|
||||
|
||||
|
||||
class PIDFile(SimplePlugin):
|
||||
|
||||
"""Maintain a PID file via a WSPBus."""
|
||||
|
||||
def __init__(self, bus, pidfile):
|
||||
SimplePlugin.__init__(self, bus)
|
||||
self.pidfile = pidfile
|
||||
self.finalized = False
|
||||
|
||||
def start(self):
|
||||
pid = os.getpid()
|
||||
if self.finalized:
|
||||
self.bus.log('PID %r already written to %r.' % (pid, self.pidfile))
|
||||
else:
|
||||
open(self.pidfile, 'wb').write(ntob('%s\n' % pid, 'utf8'))
|
||||
self.bus.log('PID %r written to %r.' % (pid, self.pidfile))
|
||||
self.finalized = True
|
||||
start.priority = 70
|
||||
|
||||
def exit(self):
|
||||
try:
|
||||
os.remove(self.pidfile)
|
||||
self.bus.log('PID file removed: %r.' % self.pidfile)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class PerpetualTimer(Timer):
|
||||
|
||||
"""A responsive subclass of threading.Timer whose run() method repeats.
|
||||
|
||||
Use this timer only when you really need a very interruptible timer;
|
||||
this checks its 'finished' condition up to 20 times a second, which can
|
||||
results in pretty high CPU usage
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"Override parent constructor to allow 'bus' to be provided."
|
||||
self.bus = kwargs.pop('bus', None)
|
||||
super(PerpetualTimer, self).__init__(*args, **kwargs)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
self.finished.wait(self.interval)
|
||||
if self.finished.isSet():
|
||||
return
|
||||
try:
|
||||
self.function(*self.args, **self.kwargs)
|
||||
except Exception:
|
||||
if self.bus:
|
||||
self.bus.log(
|
||||
'Error in perpetual timer thread function %r.' %
|
||||
self.function, level=40, traceback=True)
|
||||
# Quit on first error to avoid massive logs.
|
||||
raise
|
||||
|
||||
|
||||
class BackgroundTask(threading.Thread):
|
||||
|
||||
"""A subclass of threading.Thread whose run() method repeats.
|
||||
|
||||
Use this class for most repeating tasks. It uses time.sleep() to wait
|
||||
for each interval, which isn't very responsive; that is, even if you call
|
||||
self.cancel(), you'll have to wait until the sleep() call finishes before
|
||||
the thread stops. To compensate, it defaults to being daemonic, which means
|
||||
it won't delay stopping the whole process.
|
||||
"""
|
||||
|
||||
def __init__(self, interval, function, args=[], kwargs={}, bus=None):
|
||||
super(BackgroundTask, self).__init__()
|
||||
self.interval = interval
|
||||
self.function = function
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.running = False
|
||||
self.bus = bus
|
||||
|
||||
# default to daemonic
|
||||
self.daemon = True
|
||||
|
||||
def cancel(self):
|
||||
self.running = False
|
||||
|
||||
def run(self):
|
||||
self.running = True
|
||||
while self.running:
|
||||
time.sleep(self.interval)
|
||||
if not self.running:
|
||||
return
|
||||
try:
|
||||
self.function(*self.args, **self.kwargs)
|
||||
except Exception:
|
||||
if self.bus:
|
||||
self.bus.log('Error in background task thread function %r.'
|
||||
% self.function, level=40, traceback=True)
|
||||
# Quit on first error to avoid massive logs.
|
||||
raise
|
||||
|
||||
|
||||
class Monitor(SimplePlugin):
|
||||
|
||||
"""WSPBus listener to periodically run a callback in its own thread."""
|
||||
|
||||
callback = None
|
||||
"""The function to call at intervals."""
|
||||
|
||||
frequency = 60
|
||||
"""The time in seconds between callback runs."""
|
||||
|
||||
thread = None
|
||||
"""A :class:`BackgroundTask<cherrypy.process.plugins.BackgroundTask>`
|
||||
thread.
|
||||
"""
|
||||
|
||||
def __init__(self, bus, callback, frequency=60, name=None):
|
||||
SimplePlugin.__init__(self, bus)
|
||||
self.callback = callback
|
||||
self.frequency = frequency
|
||||
self.thread = None
|
||||
self.name = name
|
||||
|
||||
def start(self):
|
||||
"""Start our callback in its own background thread."""
|
||||
if self.frequency > 0:
|
||||
threadname = self.name or self.__class__.__name__
|
||||
if self.thread is None:
|
||||
self.thread = BackgroundTask(self.frequency, self.callback,
|
||||
bus=self.bus)
|
||||
self.thread.setName(threadname)
|
||||
self.thread.start()
|
||||
self.bus.log('Started monitor thread %r.' % threadname)
|
||||
else:
|
||||
self.bus.log('Monitor thread %r already started.' % threadname)
|
||||
start.priority = 70
|
||||
|
||||
def stop(self):
|
||||
"""Stop our callback's background task thread."""
|
||||
if self.thread is None:
|
||||
self.bus.log('No thread running for %s.' %
|
||||
self.name or self.__class__.__name__)
|
||||
else:
|
||||
if self.thread is not threading.currentThread():
|
||||
name = self.thread.getName()
|
||||
self.thread.cancel()
|
||||
if not self.thread.daemon:
|
||||
self.bus.log('Joining %r' % name)
|
||||
self.thread.join()
|
||||
self.bus.log('Stopped thread %r.' % name)
|
||||
self.thread = None
|
||||
|
||||
def graceful(self):
|
||||
"""Stop the callback's background task thread and restart it."""
|
||||
self.stop()
|
||||
self.start()
|
||||
|
||||
|
||||
class Autoreloader(Monitor):
|
||||
|
||||
"""Monitor which re-executes the process when files change.
|
||||
|
||||
This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
|
||||
if any of the files it monitors change (or is deleted). By default, the
|
||||
autoreloader monitors all imported modules; you can add to the
|
||||
set by adding to ``autoreload.files``::
|
||||
|
||||
cherrypy.engine.autoreload.files.add(myFile)
|
||||
|
||||
If there are imported files you do *not* wish to monitor, you can
|
||||
adjust the ``match`` attribute, a regular expression. For example,
|
||||
to stop monitoring cherrypy itself::
|
||||
|
||||
cherrypy.engine.autoreload.match = r'^(?!cherrypy).+'
|
||||
|
||||
Like all :class:`Monitor<cherrypy.process.plugins.Monitor>` plugins,
|
||||
the autoreload plugin takes a ``frequency`` argument. The default is
|
||||
1 second; that is, the autoreloader will examine files once each second.
|
||||
"""
|
||||
|
||||
files = None
|
||||
"""The set of files to poll for modifications."""
|
||||
|
||||
frequency = 1
|
||||
"""The interval in seconds at which to poll for modified files."""
|
||||
|
||||
match = '.*'
|
||||
"""A regular expression by which to match filenames."""
|
||||
|
||||
def __init__(self, bus, frequency=1, match='.*'):
|
||||
self.mtimes = {}
|
||||
self.files = set()
|
||||
self.match = match
|
||||
Monitor.__init__(self, bus, self.run, frequency)
|
||||
|
||||
def start(self):
|
||||
"""Start our own background task thread for self.run."""
|
||||
if self.thread is None:
|
||||
self.mtimes = {}
|
||||
Monitor.start(self)
|
||||
start.priority = 70
|
||||
|
||||
def sysfiles(self):
|
||||
"""Return a Set of sys.modules filenames to monitor."""
|
||||
files = set()
|
||||
for k, m in list(sys.modules.items()):
|
||||
if re.match(self.match, k):
|
||||
if (
|
||||
hasattr(m, '__loader__') and
|
||||
hasattr(m.__loader__, 'archive')
|
||||
):
|
||||
f = m.__loader__.archive
|
||||
else:
|
||||
f = getattr(m, '__file__', None)
|
||||
if f is not None and not os.path.isabs(f):
|
||||
# ensure absolute paths so a os.chdir() in the app
|
||||
# doesn't break me
|
||||
f = os.path.normpath(
|
||||
os.path.join(_module__file__base, f))
|
||||
files.add(f)
|
||||
return files
|
||||
|
||||
def run(self):
|
||||
"""Reload the process if registered files have been modified."""
|
||||
for filename in self.sysfiles() | self.files:
|
||||
if filename:
|
||||
if filename.endswith('.pyc'):
|
||||
filename = filename[:-1]
|
||||
|
||||
oldtime = self.mtimes.get(filename, 0)
|
||||
if oldtime is None:
|
||||
# Module with no .py file. Skip it.
|
||||
continue
|
||||
|
||||
try:
|
||||
mtime = os.stat(filename).st_mtime
|
||||
except OSError:
|
||||
# Either a module with no .py file, or it's been deleted.
|
||||
mtime = None
|
||||
|
||||
if filename not in self.mtimes:
|
||||
# If a module has no .py file, this will be None.
|
||||
self.mtimes[filename] = mtime
|
||||
else:
|
||||
if mtime is None or mtime > oldtime:
|
||||
# The file has been deleted or modified.
|
||||
self.bus.log('Restarting because %s changed.' %
|
||||
filename)
|
||||
self.thread.cancel()
|
||||
self.bus.log('Stopped thread %r.' %
|
||||
self.thread.getName())
|
||||
self.bus.restart()
|
||||
return
|
||||
|
||||
|
||||
class ThreadManager(SimplePlugin):
|
||||
|
||||
"""Manager for HTTP request threads.
|
||||
|
||||
If you have control over thread creation and destruction, publish to
|
||||
the 'acquire_thread' and 'release_thread' channels (for each thread).
|
||||
This will register/unregister the current thread and publish to
|
||||
'start_thread' and 'stop_thread' listeners in the bus as needed.
|
||||
|
||||
If threads are created and destroyed by code you do not control
|
||||
(e.g., Apache), then, at the beginning of every HTTP request,
|
||||
publish to 'acquire_thread' only. You should not publish to
|
||||
'release_thread' in this case, since you do not know whether
|
||||
the thread will be re-used or not. The bus will call
|
||||
'stop_thread' listeners for you when it stops.
|
||||
"""
|
||||
|
||||
threads = None
|
||||
"""A map of {thread ident: index number} pairs."""
|
||||
|
||||
def __init__(self, bus):
|
||||
self.threads = {}
|
||||
SimplePlugin.__init__(self, bus)
|
||||
self.bus.listeners.setdefault('acquire_thread', set())
|
||||
self.bus.listeners.setdefault('start_thread', set())
|
||||
self.bus.listeners.setdefault('release_thread', set())
|
||||
self.bus.listeners.setdefault('stop_thread', set())
|
||||
|
||||
def acquire_thread(self):
|
||||
"""Run 'start_thread' listeners for the current thread.
|
||||
|
||||
If the current thread has already been seen, any 'start_thread'
|
||||
listeners will not be run again.
|
||||
"""
|
||||
thread_ident = get_thread_ident()
|
||||
if thread_ident not in self.threads:
|
||||
# We can't just use get_ident as the thread ID
|
||||
# because some platforms reuse thread ID's.
|
||||
i = len(self.threads) + 1
|
||||
self.threads[thread_ident] = i
|
||||
self.bus.publish('start_thread', i)
|
||||
|
||||
def release_thread(self):
|
||||
"""Release the current thread and run 'stop_thread' listeners."""
|
||||
thread_ident = get_thread_ident()
|
||||
i = self.threads.pop(thread_ident, None)
|
||||
if i is not None:
|
||||
self.bus.publish('stop_thread', i)
|
||||
|
||||
def stop(self):
|
||||
"""Release all threads and run all 'stop_thread' listeners."""
|
||||
for thread_ident, i in self.threads.items():
|
||||
self.bus.publish('stop_thread', i)
|
||||
self.threads.clear()
|
||||
graceful = stop
|
||||
@@ -1,470 +0,0 @@
|
||||
"""
|
||||
Starting in CherryPy 3.1, cherrypy.server is implemented as an
|
||||
:ref:`Engine Plugin<plugins>`. It's an instance of
|
||||
:class:`cherrypy._cpserver.Server`, which is a subclass of
|
||||
:class:`cherrypy.process.servers.ServerAdapter`. The ``ServerAdapter`` class
|
||||
is designed to control other servers, as well.
|
||||
|
||||
Multiple servers/ports
|
||||
======================
|
||||
|
||||
If you need to start more than one HTTP server (to serve on multiple ports, or
|
||||
protocols, etc.), you can manually register each one and then start them all
|
||||
with engine.start::
|
||||
|
||||
s1 = ServerAdapter(cherrypy.engine, MyWSGIServer(host='0.0.0.0', port=80))
|
||||
s2 = ServerAdapter(cherrypy.engine,
|
||||
another.HTTPServer(host='127.0.0.1',
|
||||
SSL=True))
|
||||
s1.subscribe()
|
||||
s2.subscribe()
|
||||
cherrypy.engine.start()
|
||||
|
||||
.. index:: SCGI
|
||||
|
||||
FastCGI/SCGI
|
||||
============
|
||||
|
||||
There are also Flup\ **F**\ CGIServer and Flup\ **S**\ CGIServer classes in
|
||||
:mod:`cherrypy.process.servers`. To start an fcgi server, for example,
|
||||
wrap an instance of it in a ServerAdapter::
|
||||
|
||||
addr = ('0.0.0.0', 4000)
|
||||
f = servers.FlupFCGIServer(application=cherrypy.tree, bindAddress=addr)
|
||||
s = servers.ServerAdapter(cherrypy.engine, httpserver=f, bind_addr=addr)
|
||||
s.subscribe()
|
||||
|
||||
The :doc:`cherryd</deployguide/cherryd>` startup script will do the above for
|
||||
you via its `-f` flag.
|
||||
Note that you need to download and install `flup <http://trac.saddi.com/flup>`_
|
||||
yourself, whether you use ``cherryd`` or not.
|
||||
|
||||
.. _fastcgi:
|
||||
.. index:: FastCGI
|
||||
|
||||
FastCGI
|
||||
-------
|
||||
|
||||
A very simple setup lets your cherry run with FastCGI.
|
||||
You just need the flup library,
|
||||
plus a running Apache server (with ``mod_fastcgi``) or lighttpd server.
|
||||
|
||||
CherryPy code
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
hello.py::
|
||||
|
||||
#!/usr/bin/python
|
||||
import cherrypy
|
||||
|
||||
class HelloWorld:
|
||||
\"""Sample request handler class.\"""
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
return "Hello world!"
|
||||
|
||||
cherrypy.tree.mount(HelloWorld())
|
||||
# CherryPy autoreload must be disabled for the flup server to work
|
||||
cherrypy.config.update({'engine.autoreload.on':False})
|
||||
|
||||
Then run :doc:`/deployguide/cherryd` with the '-f' arg::
|
||||
|
||||
cherryd -c <myconfig> -d -f -i hello.py
|
||||
|
||||
Apache
|
||||
^^^^^^
|
||||
|
||||
At the top level in httpd.conf::
|
||||
|
||||
FastCgiIpcDir /tmp
|
||||
FastCgiServer /path/to/cherry.fcgi -idle-timeout 120 -processes 4
|
||||
|
||||
And inside the relevant VirtualHost section::
|
||||
|
||||
# FastCGI config
|
||||
AddHandler fastcgi-script .fcgi
|
||||
ScriptAliasMatch (.*$) /path/to/cherry.fcgi$1
|
||||
|
||||
Lighttpd
|
||||
^^^^^^^^
|
||||
|
||||
For `Lighttpd <http://www.lighttpd.net/>`_ you can follow these
|
||||
instructions. Within ``lighttpd.conf`` make sure ``mod_fastcgi`` is
|
||||
active within ``server.modules``. Then, within your ``$HTTP["host"]``
|
||||
directive, configure your fastcgi script like the following::
|
||||
|
||||
$HTTP["url"] =~ "" {
|
||||
fastcgi.server = (
|
||||
"/" => (
|
||||
"script.fcgi" => (
|
||||
"bin-path" => "/path/to/your/script.fcgi",
|
||||
"socket" => "/tmp/script.sock",
|
||||
"check-local" => "disable",
|
||||
"disable-time" => 1,
|
||||
"min-procs" => 1,
|
||||
"max-procs" => 1, # adjust as needed
|
||||
),
|
||||
),
|
||||
)
|
||||
} # end of $HTTP["url"] =~ "^/"
|
||||
|
||||
Please see `Lighttpd FastCGI Docs
|
||||
<http://redmine.lighttpd.net/wiki/lighttpd/Docs:ModFastCGI>`_ for
|
||||
an explanation of the possible configuration options.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
|
||||
class ServerAdapter(object):
|
||||
|
||||
"""Adapter for an HTTP server.
|
||||
|
||||
If you need to start more than one HTTP server (to serve on multiple
|
||||
ports, or protocols, etc.), you can manually register each one and then
|
||||
start them all with bus.start::
|
||||
|
||||
s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80))
|
||||
s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
|
||||
s1.subscribe()
|
||||
s2.subscribe()
|
||||
bus.start()
|
||||
"""
|
||||
|
||||
def __init__(self, bus, httpserver=None, bind_addr=None):
|
||||
self.bus = bus
|
||||
self.httpserver = httpserver
|
||||
self.bind_addr = bind_addr
|
||||
self.interrupt = None
|
||||
self.running = False
|
||||
|
||||
def subscribe(self):
|
||||
self.bus.subscribe('start', self.start)
|
||||
self.bus.subscribe('stop', self.stop)
|
||||
|
||||
def unsubscribe(self):
|
||||
self.bus.unsubscribe('start', self.start)
|
||||
self.bus.unsubscribe('stop', self.stop)
|
||||
|
||||
def start(self):
|
||||
"""Start the HTTP server."""
|
||||
if self.bind_addr is None:
|
||||
on_what = 'unknown interface (dynamic?)'
|
||||
elif isinstance(self.bind_addr, tuple):
|
||||
on_what = self._get_base()
|
||||
else:
|
||||
on_what = 'socket file: %s' % self.bind_addr
|
||||
|
||||
if self.running:
|
||||
self.bus.log('Already serving on %s' % on_what)
|
||||
return
|
||||
|
||||
self.interrupt = None
|
||||
if not self.httpserver:
|
||||
raise ValueError('No HTTP server has been created.')
|
||||
|
||||
if not os.environ.get('LISTEN_PID', None):
|
||||
# Start the httpserver in a new thread.
|
||||
if isinstance(self.bind_addr, tuple):
|
||||
wait_for_free_port(*self.bind_addr)
|
||||
|
||||
import threading
|
||||
t = threading.Thread(target=self._start_http_thread)
|
||||
t.setName('HTTPServer ' + t.getName())
|
||||
t.start()
|
||||
|
||||
self.wait()
|
||||
self.running = True
|
||||
self.bus.log('Serving on %s' % on_what)
|
||||
start.priority = 75
|
||||
|
||||
def _get_base(self):
|
||||
if not self.httpserver:
|
||||
return ''
|
||||
host, port = self.bind_addr
|
||||
if getattr(self.httpserver, 'ssl_adapter', None):
|
||||
scheme = 'https'
|
||||
if port != 443:
|
||||
host += ':%s' % port
|
||||
else:
|
||||
scheme = 'http'
|
||||
if port != 80:
|
||||
host += ':%s' % port
|
||||
|
||||
return '%s://%s' % (scheme, host)
|
||||
|
||||
def _start_http_thread(self):
|
||||
"""HTTP servers MUST be running in new threads, so that the
|
||||
main thread persists to receive KeyboardInterrupt's. If an
|
||||
exception is raised in the httpserver's thread then it's
|
||||
trapped here, and the bus (and therefore our httpserver)
|
||||
are shut down.
|
||||
"""
|
||||
try:
|
||||
self.httpserver.start()
|
||||
except KeyboardInterrupt:
|
||||
self.bus.log('<Ctrl-C> hit: shutting down HTTP server')
|
||||
self.interrupt = sys.exc_info()[1]
|
||||
self.bus.exit()
|
||||
except SystemExit:
|
||||
self.bus.log('SystemExit raised: shutting down HTTP server')
|
||||
self.interrupt = sys.exc_info()[1]
|
||||
self.bus.exit()
|
||||
raise
|
||||
except:
|
||||
self.interrupt = sys.exc_info()[1]
|
||||
self.bus.log('Error in HTTP server: shutting down',
|
||||
traceback=True, level=40)
|
||||
self.bus.exit()
|
||||
raise
|
||||
|
||||
def wait(self):
|
||||
"""Wait until the HTTP server is ready to receive requests."""
|
||||
while not getattr(self.httpserver, 'ready', False):
|
||||
if self.interrupt:
|
||||
raise self.interrupt
|
||||
time.sleep(.1)
|
||||
|
||||
# Wait for port to be occupied
|
||||
if not os.environ.get('LISTEN_PID', None):
|
||||
# Wait for port to be occupied if not running via socket-activation
|
||||
# (for socket-activation the port will be managed by systemd )
|
||||
if isinstance(self.bind_addr, tuple):
|
||||
host, port = self.bind_addr
|
||||
wait_for_occupied_port(host, port)
|
||||
|
||||
def stop(self):
|
||||
"""Stop the HTTP server."""
|
||||
if self.running:
|
||||
# stop() MUST block until the server is *truly* stopped.
|
||||
self.httpserver.stop()
|
||||
# Wait for the socket to be truly freed.
|
||||
if isinstance(self.bind_addr, tuple):
|
||||
wait_for_free_port(*self.bind_addr)
|
||||
self.running = False
|
||||
self.bus.log('HTTP Server %s shut down' % self.httpserver)
|
||||
else:
|
||||
self.bus.log('HTTP Server %s already shut down' % self.httpserver)
|
||||
stop.priority = 25
|
||||
|
||||
def restart(self):
|
||||
"""Restart the HTTP server."""
|
||||
self.stop()
|
||||
self.start()
|
||||
|
||||
|
||||
class FlupCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.cgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.ready = False
|
||||
|
||||
def start(self):
|
||||
"""Start the CGI server."""
|
||||
# We have to instantiate the server class here because its __init__
|
||||
# starts a threadpool. If we do it too early, daemonize won't work.
|
||||
from flup.server.cgi import WSGIServer
|
||||
|
||||
self.cgiserver = WSGIServer(*self.args, **self.kwargs)
|
||||
self.ready = True
|
||||
self.cgiserver.run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop the HTTP server."""
|
||||
self.ready = False
|
||||
|
||||
|
||||
class FlupFCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.fcgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if kwargs.get('bindAddress', None) is None:
|
||||
import socket
|
||||
if not hasattr(socket, 'fromfd'):
|
||||
raise ValueError(
|
||||
'Dynamic FCGI server not available on this platform. '
|
||||
'You must use a static or external one by providing a '
|
||||
'legal bindAddress.')
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.ready = False
|
||||
|
||||
def start(self):
|
||||
"""Start the FCGI server."""
|
||||
# We have to instantiate the server class here because its __init__
|
||||
# starts a threadpool. If we do it too early, daemonize won't work.
|
||||
from flup.server.fcgi import WSGIServer
|
||||
self.fcgiserver = WSGIServer(*self.args, **self.kwargs)
|
||||
# TODO: report this bug upstream to flup.
|
||||
# If we don't set _oldSIGs on Windows, we get:
|
||||
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
|
||||
# line 108, in run
|
||||
# self._restoreSignalHandlers()
|
||||
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
|
||||
# line 156, in _restoreSignalHandlers
|
||||
# for signum,handler in self._oldSIGs:
|
||||
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
|
||||
self.fcgiserver._installSignalHandlers = lambda: None
|
||||
self.fcgiserver._oldSIGs = []
|
||||
self.ready = True
|
||||
self.fcgiserver.run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop the HTTP server."""
|
||||
# Forcibly stop the fcgi server main event loop.
|
||||
self.fcgiserver._keepGoing = False
|
||||
# Force all worker threads to die off.
|
||||
self.fcgiserver._threadPool.maxSpare = (
|
||||
self.fcgiserver._threadPool._idleCount)
|
||||
self.ready = False
|
||||
|
||||
|
||||
class FlupSCGIServer(object):
|
||||
|
||||
"""Adapter for a flup.server.scgi.WSGIServer."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.ready = False
|
||||
|
||||
def start(self):
|
||||
"""Start the SCGI server."""
|
||||
# We have to instantiate the server class here because its __init__
|
||||
# starts a threadpool. If we do it too early, daemonize won't work.
|
||||
from flup.server.scgi import WSGIServer
|
||||
self.scgiserver = WSGIServer(*self.args, **self.kwargs)
|
||||
# TODO: report this bug upstream to flup.
|
||||
# If we don't set _oldSIGs on Windows, we get:
|
||||
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
|
||||
# line 108, in run
|
||||
# self._restoreSignalHandlers()
|
||||
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
|
||||
# line 156, in _restoreSignalHandlers
|
||||
# for signum,handler in self._oldSIGs:
|
||||
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
|
||||
self.scgiserver._installSignalHandlers = lambda: None
|
||||
self.scgiserver._oldSIGs = []
|
||||
self.ready = True
|
||||
self.scgiserver.run()
|
||||
|
||||
def stop(self):
|
||||
"""Stop the HTTP server."""
|
||||
self.ready = False
|
||||
# Forcibly stop the scgi server main event loop.
|
||||
self.scgiserver._keepGoing = False
|
||||
# Force all worker threads to die off.
|
||||
self.scgiserver._threadPool.maxSpare = 0
|
||||
|
||||
|
||||
def client_host(server_host):
|
||||
"""Return the host on which a client can connect to the given listener."""
|
||||
if server_host == '0.0.0.0':
|
||||
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
|
||||
return '127.0.0.1'
|
||||
if server_host in ('::', '::0', '::0.0.0.0'):
|
||||
# :: is IN6ADDR_ANY, which should answer on localhost.
|
||||
# ::0 and ::0.0.0.0 are non-canonical but common
|
||||
# ways to write IN6ADDR_ANY.
|
||||
return '::1'
|
||||
return server_host
|
||||
|
||||
|
||||
def check_port(host, port, timeout=1.0):
|
||||
"""Raise an error if the given port is not free on the given host."""
|
||||
if not host:
|
||||
raise ValueError("Host values of '' or None are not allowed.")
|
||||
host = client_host(host)
|
||||
port = int(port)
|
||||
|
||||
import socket
|
||||
|
||||
# AF_INET or AF_INET6 socket
|
||||
# Get the correct address family for our host (allows IPv6 addresses)
|
||||
try:
|
||||
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
|
||||
socket.SOCK_STREAM)
|
||||
except socket.gaierror:
|
||||
if ':' in host:
|
||||
info = [(
|
||||
socket.AF_INET6, socket.SOCK_STREAM, 0, '', (host, port, 0, 0)
|
||||
)]
|
||||
else:
|
||||
info = [(socket.AF_INET, socket.SOCK_STREAM, 0, '', (host, port))]
|
||||
|
||||
for res in info:
|
||||
af, socktype, proto, canonname, sa = res
|
||||
s = None
|
||||
try:
|
||||
s = socket.socket(af, socktype, proto)
|
||||
# See http://groups.google.com/group/cherrypy-users/
|
||||
# browse_frm/thread/bbfe5eb39c904fe0
|
||||
s.settimeout(timeout)
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
if s:
|
||||
s.close()
|
||||
else:
|
||||
raise IOError('Port %s is in use on %s; perhaps the previous '
|
||||
'httpserver did not shut down properly.' %
|
||||
(repr(port), repr(host)))
|
||||
|
||||
|
||||
# Feel free to increase these defaults on slow systems:
|
||||
free_port_timeout = 0.1
|
||||
occupied_port_timeout = 1.0
|
||||
|
||||
|
||||
def wait_for_free_port(host, port, timeout=None):
|
||||
"""Wait for the specified port to become free (drop requests)."""
|
||||
if not host:
|
||||
raise ValueError("Host values of '' or None are not allowed.")
|
||||
if timeout is None:
|
||||
timeout = free_port_timeout
|
||||
|
||||
for trial in range(50):
|
||||
try:
|
||||
# we are expecting a free port, so reduce the timeout
|
||||
check_port(host, port, timeout=timeout)
|
||||
except IOError:
|
||||
# Give the old server thread time to free the port.
|
||||
time.sleep(timeout)
|
||||
else:
|
||||
return
|
||||
|
||||
raise IOError('Port %r not free on %r' % (port, host))
|
||||
|
||||
|
||||
def wait_for_occupied_port(host, port, timeout=None):
|
||||
"""Wait for the specified port to become active (receive requests)."""
|
||||
if not host:
|
||||
raise ValueError("Host values of '' or None are not allowed.")
|
||||
if timeout is None:
|
||||
timeout = occupied_port_timeout
|
||||
|
||||
for trial in range(50):
|
||||
try:
|
||||
check_port(host, port, timeout=timeout)
|
||||
except IOError:
|
||||
# port is occupied
|
||||
return
|
||||
else:
|
||||
time.sleep(timeout)
|
||||
|
||||
if host == client_host(host):
|
||||
raise IOError('Port %r not bound on %r' % (port, host))
|
||||
|
||||
# On systems where a loopback interface is not available and the
|
||||
# server is bound to all interfaces, it's difficult to determine
|
||||
# whether the server is in fact occupying the port. In this case,
|
||||
# just issue a warning and move on. See issue #1100.
|
||||
msg = 'Unable to verify that the server is bound on %r' % port
|
||||
warnings.warn(msg)
|
||||
@@ -1,180 +0,0 @@
|
||||
"""Windows service. Requires pywin32."""
|
||||
|
||||
import os
|
||||
import win32api
|
||||
import win32con
|
||||
import win32event
|
||||
import win32service
|
||||
import win32serviceutil
|
||||
|
||||
from cherrypy.process import wspbus, plugins
|
||||
|
||||
|
||||
class ConsoleCtrlHandler(plugins.SimplePlugin):
|
||||
|
||||
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
|
||||
|
||||
def __init__(self, bus):
|
||||
self.is_set = False
|
||||
plugins.SimplePlugin.__init__(self, bus)
|
||||
|
||||
def start(self):
|
||||
if self.is_set:
|
||||
self.bus.log('Handler for console events already set.', level=40)
|
||||
return
|
||||
|
||||
result = win32api.SetConsoleCtrlHandler(self.handle, 1)
|
||||
if result == 0:
|
||||
self.bus.log('Could not SetConsoleCtrlHandler (error %r)' %
|
||||
win32api.GetLastError(), level=40)
|
||||
else:
|
||||
self.bus.log('Set handler for console events.', level=40)
|
||||
self.is_set = True
|
||||
|
||||
def stop(self):
|
||||
if not self.is_set:
|
||||
self.bus.log('Handler for console events already off.', level=40)
|
||||
return
|
||||
|
||||
try:
|
||||
result = win32api.SetConsoleCtrlHandler(self.handle, 0)
|
||||
except ValueError:
|
||||
# "ValueError: The object has not been registered"
|
||||
result = 1
|
||||
|
||||
if result == 0:
|
||||
self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' %
|
||||
win32api.GetLastError(), level=40)
|
||||
else:
|
||||
self.bus.log('Removed handler for console events.', level=40)
|
||||
self.is_set = False
|
||||
|
||||
def handle(self, event):
|
||||
"""Handle console control events (like Ctrl-C)."""
|
||||
if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
|
||||
win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
|
||||
win32con.CTRL_CLOSE_EVENT):
|
||||
self.bus.log('Console event %s: shutting down bus' % event)
|
||||
|
||||
# Remove self immediately so repeated Ctrl-C doesn't re-call it.
|
||||
try:
|
||||
self.stop()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.bus.exit()
|
||||
# 'First to return True stops the calls'
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
class Win32Bus(wspbus.Bus):
|
||||
|
||||
"""A Web Site Process Bus implementation for Win32.
|
||||
|
||||
Instead of time.sleep, this bus blocks using native win32event objects.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.events = {}
|
||||
wspbus.Bus.__init__(self)
|
||||
|
||||
def _get_state_event(self, state):
|
||||
"""Return a win32event for the given state (creating it if needed)."""
|
||||
try:
|
||||
return self.events[state]
|
||||
except KeyError:
|
||||
event = win32event.CreateEvent(None, 0, 0,
|
||||
'WSPBus %s Event (pid=%r)' %
|
||||
(state.name, os.getpid()))
|
||||
self.events[state] = event
|
||||
return event
|
||||
|
||||
def _get_state(self):
|
||||
return self._state
|
||||
|
||||
def _set_state(self, value):
|
||||
self._state = value
|
||||
event = self._get_state_event(value)
|
||||
win32event.PulseEvent(event)
|
||||
state = property(_get_state, _set_state)
|
||||
|
||||
def wait(self, state, interval=0.1, channel=None):
|
||||
"""Wait for the given state(s), KeyboardInterrupt or SystemExit.
|
||||
|
||||
Since this class uses native win32event objects, the interval
|
||||
argument is ignored.
|
||||
"""
|
||||
if isinstance(state, (tuple, list)):
|
||||
# Don't wait for an event that beat us to the punch ;)
|
||||
if self.state not in state:
|
||||
events = tuple([self._get_state_event(s) for s in state])
|
||||
win32event.WaitForMultipleObjects(
|
||||
events, 0, win32event.INFINITE)
|
||||
else:
|
||||
# Don't wait for an event that beat us to the punch ;)
|
||||
if self.state != state:
|
||||
event = self._get_state_event(state)
|
||||
win32event.WaitForSingleObject(event, win32event.INFINITE)
|
||||
|
||||
|
||||
class _ControlCodes(dict):
|
||||
|
||||
"""Control codes used to "signal" a service via ControlService.
|
||||
|
||||
User-defined control codes are in the range 128-255. We generally use
|
||||
the standard Python value for the Linux signal and add 128. Example:
|
||||
|
||||
>>> signal.SIGUSR1
|
||||
10
|
||||
control_codes['graceful'] = 128 + 10
|
||||
"""
|
||||
|
||||
def key_for(self, obj):
|
||||
"""For the given value, return its corresponding key."""
|
||||
for key, val in self.items():
|
||||
if val is obj:
|
||||
return key
|
||||
raise ValueError('The given object could not be found: %r' % obj)
|
||||
|
||||
control_codes = _ControlCodes({'graceful': 138})
|
||||
|
||||
|
||||
def signal_child(service, command):
|
||||
if command == 'stop':
|
||||
win32serviceutil.StopService(service)
|
||||
elif command == 'restart':
|
||||
win32serviceutil.RestartService(service)
|
||||
else:
|
||||
win32serviceutil.ControlService(service, control_codes[command])
|
||||
|
||||
|
||||
class PyWebService(win32serviceutil.ServiceFramework):
|
||||
|
||||
"""Python Web Service."""
|
||||
|
||||
_svc_name_ = 'Python Web Service'
|
||||
_svc_display_name_ = 'Python Web Service'
|
||||
_svc_deps_ = None # sequence of service names on which this depends
|
||||
_exe_name_ = 'pywebsvc'
|
||||
_exe_args_ = None # Default to no arguments
|
||||
|
||||
# Only exists on Windows 2000 or later, ignored on windows NT
|
||||
_svc_description_ = 'Python Web Service'
|
||||
|
||||
def SvcDoRun(self):
|
||||
from cherrypy import process
|
||||
process.bus.start()
|
||||
process.bus.block()
|
||||
|
||||
def SvcStop(self):
|
||||
from cherrypy import process
|
||||
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
|
||||
process.bus.exit()
|
||||
|
||||
def SvcOther(self, control):
|
||||
process.bus.publish(control_codes.key_for(control))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
win32serviceutil.HandleCommandLine(PyWebService)
|
||||
@@ -1,519 +0,0 @@
|
||||
"""An implementation of the Web Site Process Bus.
|
||||
|
||||
This module is completely standalone, depending only on the stdlib.
|
||||
|
||||
Web Site Process Bus
|
||||
--------------------
|
||||
|
||||
A Bus object is used to contain and manage site-wide behavior:
|
||||
daemonization, HTTP server start/stop, process reload, signal handling,
|
||||
drop privileges, PID file management, logging for all of these,
|
||||
and many more.
|
||||
|
||||
In addition, a Bus object provides a place for each web framework
|
||||
to register code that runs in response to site-wide events (like
|
||||
process start and stop), or which controls or otherwise interacts with
|
||||
the site-wide components mentioned above. For example, a framework which
|
||||
uses file-based templates would add known template filenames to an
|
||||
autoreload component.
|
||||
|
||||
Ideally, a Bus object will be flexible enough to be useful in a variety
|
||||
of invocation scenarios:
|
||||
|
||||
1. The deployer starts a site from the command line via a
|
||||
framework-neutral deployment script; applications from multiple frameworks
|
||||
are mixed in a single site. Command-line arguments and configuration
|
||||
files are used to define site-wide components such as the HTTP server,
|
||||
WSGI component graph, autoreload behavior, signal handling, etc.
|
||||
2. The deployer starts a site via some other process, such as Apache;
|
||||
applications from multiple frameworks are mixed in a single site.
|
||||
Autoreload and signal handling (from Python at least) are disabled.
|
||||
3. The deployer starts a site via a framework-specific mechanism;
|
||||
for example, when running tests, exploring tutorials, or deploying
|
||||
single applications from a single framework. The framework controls
|
||||
which site-wide components are enabled as it sees fit.
|
||||
|
||||
The Bus object in this package uses topic-based publish-subscribe
|
||||
messaging to accomplish all this. A few topic channels are built in
|
||||
('start', 'stop', 'exit', 'graceful', 'log', and 'main'). Frameworks and
|
||||
site containers are free to define their own. If a message is sent to a
|
||||
channel that has not been defined or has no listeners, there is no effect.
|
||||
|
||||
In general, there should only ever be a single Bus object per process.
|
||||
Frameworks and site containers share a single Bus object by publishing
|
||||
messages and subscribing listeners.
|
||||
|
||||
The Bus object works as a finite state machine which models the current
|
||||
state of the process. Bus methods move it from one state to another;
|
||||
those methods then publish to subscribed listeners on the channel for
|
||||
the new state.::
|
||||
|
||||
O
|
||||
|
|
||||
V
|
||||
STOPPING --> STOPPED --> EXITING -> X
|
||||
A A |
|
||||
| \___ |
|
||||
| \ |
|
||||
| V V
|
||||
STARTED <-- STARTING
|
||||
|
||||
"""
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import operator
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback as _traceback
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
from cherrypy._cpcompat import _args_from_interpreter_flags
|
||||
|
||||
|
||||
# Here I save the value of os.getcwd(), which, if I am imported early enough,
|
||||
# will be the directory from which the startup script was run. This is needed
|
||||
# by _do_execv(), to change back to the original directory before execv()ing a
|
||||
# new process. This is a defense against the application having changed the
|
||||
# current working directory (which could make sys.executable "not found" if
|
||||
# sys.executable is a relative-path, and/or cause other problems).
|
||||
_startup_cwd = os.getcwd()
|
||||
|
||||
|
||||
class ChannelFailures(Exception):
|
||||
|
||||
"""Exception raised when errors occur in a listener during Bus.publish().
|
||||
"""
|
||||
delimiter = '\n'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Exception, self).__init__(*args, **kwargs)
|
||||
self._exceptions = list()
|
||||
|
||||
def handle_exception(self):
|
||||
"""Append the current exception to self."""
|
||||
self._exceptions.append(sys.exc_info()[1])
|
||||
|
||||
def get_instances(self):
|
||||
"""Return a list of seen exception instances."""
|
||||
return self._exceptions[:]
|
||||
|
||||
def __str__(self):
|
||||
exception_strings = map(repr, self.get_instances())
|
||||
return self.delimiter.join(exception_strings)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self._exceptions)
|
||||
__nonzero__ = __bool__
|
||||
|
||||
# Use a flag to indicate the state of the bus.
|
||||
|
||||
|
||||
class _StateEnum(object):
|
||||
|
||||
class State(object):
|
||||
name = None
|
||||
|
||||
def __repr__(self):
|
||||
return 'states.%s' % self.name
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if isinstance(value, self.State):
|
||||
value.name = key
|
||||
object.__setattr__(self, key, value)
|
||||
states = _StateEnum()
|
||||
states.STOPPED = states.State()
|
||||
states.STARTING = states.State()
|
||||
states.STARTED = states.State()
|
||||
states.STOPPING = states.State()
|
||||
states.EXITING = states.State()
|
||||
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
max_files = 0
|
||||
else:
|
||||
try:
|
||||
max_files = os.sysconf('SC_OPEN_MAX')
|
||||
except AttributeError:
|
||||
max_files = 1024
|
||||
|
||||
|
||||
class Bus(object):
|
||||
|
||||
"""Process state-machine and messenger for HTTP site deployment.
|
||||
|
||||
All listeners for a given channel are guaranteed to be called even
|
||||
if others at the same channel fail. Each failure is logged, but
|
||||
execution proceeds on to the next listener. The only way to stop all
|
||||
processing from inside a listener is to raise SystemExit and stop the
|
||||
whole server.
|
||||
"""
|
||||
|
||||
states = states
|
||||
state = states.STOPPED
|
||||
execv = False
|
||||
max_cloexec_files = max_files
|
||||
|
||||
def __init__(self):
|
||||
self.execv = False
|
||||
self.state = states.STOPPED
|
||||
channels = 'start', 'stop', 'exit', 'graceful', 'log', 'main'
|
||||
self.listeners = dict(
|
||||
(channel, set())
|
||||
for channel in channels
|
||||
)
|
||||
self._priorities = {}
|
||||
|
||||
def subscribe(self, channel, callback, priority=None):
|
||||
"""Add the given callback at the given channel (if not present)."""
|
||||
ch_listeners = self.listeners.setdefault(channel, set())
|
||||
ch_listeners.add(callback)
|
||||
|
||||
if priority is None:
|
||||
priority = getattr(callback, 'priority', 50)
|
||||
self._priorities[(channel, callback)] = priority
|
||||
|
||||
def unsubscribe(self, channel, callback):
|
||||
"""Discard the given callback (if present)."""
|
||||
listeners = self.listeners.get(channel)
|
||||
if listeners and callback in listeners:
|
||||
listeners.discard(callback)
|
||||
del self._priorities[(channel, callback)]
|
||||
|
||||
def publish(self, channel, *args, **kwargs):
|
||||
"""Return output of all subscribers for the given channel."""
|
||||
if channel not in self.listeners:
|
||||
return []
|
||||
|
||||
exc = ChannelFailures()
|
||||
output = []
|
||||
|
||||
raw_items = (
|
||||
(self._priorities[(channel, listener)], listener)
|
||||
for listener in self.listeners[channel]
|
||||
)
|
||||
items = sorted(raw_items, key=operator.itemgetter(0))
|
||||
for priority, listener in items:
|
||||
try:
|
||||
output.append(listener(*args, **kwargs))
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except SystemExit:
|
||||
e = sys.exc_info()[1]
|
||||
# If we have previous errors ensure the exit code is non-zero
|
||||
if exc and e.code == 0:
|
||||
e.code = 1
|
||||
raise
|
||||
except:
|
||||
exc.handle_exception()
|
||||
if channel == 'log':
|
||||
# Assume any further messages to 'log' will fail.
|
||||
pass
|
||||
else:
|
||||
self.log('Error in %r listener %r' % (channel, listener),
|
||||
level=40, traceback=True)
|
||||
if exc:
|
||||
raise exc
|
||||
return output
|
||||
|
||||
def _clean_exit(self):
|
||||
"""An atexit handler which asserts the Bus is not running."""
|
||||
if self.state != states.EXITING:
|
||||
warnings.warn(
|
||||
'The main thread is exiting, but the Bus is in the %r state; '
|
||||
'shutting it down automatically now. You must either call '
|
||||
'bus.block() after start(), or call bus.exit() before the '
|
||||
'main thread exits.' % self.state, RuntimeWarning)
|
||||
self.exit()
|
||||
|
||||
def start(self):
|
||||
"""Start all services."""
|
||||
atexit.register(self._clean_exit)
|
||||
|
||||
self.state = states.STARTING
|
||||
self.log('Bus STARTING')
|
||||
try:
|
||||
self.publish('start')
|
||||
self.state = states.STARTED
|
||||
self.log('Bus STARTED')
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
self.log('Shutting down due to error in start listener:',
|
||||
level=40, traceback=True)
|
||||
e_info = sys.exc_info()[1]
|
||||
try:
|
||||
self.exit()
|
||||
except:
|
||||
# Any stop/exit errors will be logged inside publish().
|
||||
pass
|
||||
# Re-raise the original error
|
||||
raise e_info
|
||||
|
||||
def exit(self):
|
||||
"""Stop all services and prepare to exit the process."""
|
||||
exitstate = self.state
|
||||
try:
|
||||
self.stop()
|
||||
|
||||
self.state = states.EXITING
|
||||
self.log('Bus EXITING')
|
||||
self.publish('exit')
|
||||
# This isn't strictly necessary, but it's better than seeing
|
||||
# "Waiting for child threads to terminate..." and then nothing.
|
||||
self.log('Bus EXITED')
|
||||
except:
|
||||
# This method is often called asynchronously (whether thread,
|
||||
# signal handler, console handler, or atexit handler), so we
|
||||
# can't just let exceptions propagate out unhandled.
|
||||
# Assume it's been logged and just die.
|
||||
os._exit(70) # EX_SOFTWARE
|
||||
|
||||
if exitstate == states.STARTING:
|
||||
# exit() was called before start() finished, possibly due to
|
||||
# Ctrl-C because a start listener got stuck. In this case,
|
||||
# we could get stuck in a loop where Ctrl-C never exits the
|
||||
# process, so we just call os.exit here.
|
||||
os._exit(70) # EX_SOFTWARE
|
||||
|
||||
def restart(self):
|
||||
"""Restart the process (may close connections).
|
||||
|
||||
This method does not restart the process from the calling thread;
|
||||
instead, it stops the bus and asks the main thread to call execv.
|
||||
"""
|
||||
self.execv = True
|
||||
self.exit()
|
||||
|
||||
def graceful(self):
|
||||
"""Advise all services to reload."""
|
||||
self.log('Bus graceful')
|
||||
self.publish('graceful')
|
||||
|
||||
def block(self, interval=0.1):
|
||||
"""Wait for the EXITING state, KeyboardInterrupt or SystemExit.
|
||||
|
||||
This function is intended to be called only by the main thread.
|
||||
After waiting for the EXITING state, it also waits for all threads
|
||||
to terminate, and then calls os.execv if self.execv is True. This
|
||||
design allows another thread to call bus.restart, yet have the main
|
||||
thread perform the actual execv call (required on some platforms).
|
||||
"""
|
||||
try:
|
||||
self.wait(states.EXITING, interval=interval, channel='main')
|
||||
except (KeyboardInterrupt, IOError):
|
||||
# The time.sleep call might raise
|
||||
# "IOError: [Errno 4] Interrupted function call" on KBInt.
|
||||
self.log('Keyboard Interrupt: shutting down bus')
|
||||
self.exit()
|
||||
except SystemExit:
|
||||
self.log('SystemExit raised: shutting down bus')
|
||||
self.exit()
|
||||
raise
|
||||
|
||||
# Waiting for ALL child threads to finish is necessary on OS X.
|
||||
# See https://github.com/cherrypy/cherrypy/issues/581.
|
||||
# It's also good to let them all shut down before allowing
|
||||
# the main thread to call atexit handlers.
|
||||
# See https://github.com/cherrypy/cherrypy/issues/751.
|
||||
self.log('Waiting for child threads to terminate...')
|
||||
for t in threading.enumerate():
|
||||
# Validate the we're not trying to join the MainThread
|
||||
# that will cause a deadlock and the case exist when
|
||||
# implemented as a windows service and in any other case
|
||||
# that another thread executes cherrypy.engine.exit()
|
||||
if (
|
||||
t != threading.currentThread() and
|
||||
t.isAlive() and
|
||||
not isinstance(t, threading._MainThread)
|
||||
):
|
||||
# Note that any dummy (external) threads are always daemonic.
|
||||
if hasattr(threading.Thread, 'daemon'):
|
||||
# Python 2.6+
|
||||
d = t.daemon
|
||||
else:
|
||||
d = t.isDaemon()
|
||||
if not d:
|
||||
self.log('Waiting for thread %s.' % t.getName())
|
||||
t.join()
|
||||
|
||||
if self.execv:
|
||||
self._do_execv()
|
||||
|
||||
def wait(self, state, interval=0.1, channel=None):
|
||||
"""Poll for the given state(s) at intervals; publish to channel."""
|
||||
if isinstance(state, (tuple, list)):
|
||||
states = state
|
||||
else:
|
||||
states = [state]
|
||||
|
||||
def _wait():
|
||||
while self.state not in states:
|
||||
time.sleep(interval)
|
||||
self.publish(channel)
|
||||
|
||||
# From http://psyco.sourceforge.net/psycoguide/bugs.html:
|
||||
# "The compiled machine code does not include the regular polling
|
||||
# done by Python, meaning that a KeyboardInterrupt will not be
|
||||
# detected before execution comes back to the regular Python
|
||||
# interpreter. Your program cannot be interrupted if caught
|
||||
# into an infinite Psyco-compiled loop."
|
||||
try:
|
||||
sys.modules['psyco'].cannotcompile(_wait)
|
||||
except (KeyError, AttributeError):
|
||||
pass
|
||||
|
||||
_wait()
|
||||
|
||||
def _do_execv(self):
|
||||
"""Re-execute the current process.
|
||||
|
||||
This must be called from the main thread, because certain platforms
|
||||
(OS X) don't allow execv to be called in a child thread very well.
|
||||
"""
|
||||
try:
|
||||
args = self._get_true_argv()
|
||||
except NotImplementedError:
|
||||
"""It's probably win32"""
|
||||
# For the SABnzbd.exe binary we don't want interpreter flags
|
||||
# https://github.com/cherrypy/cherrypy/issues/1526
|
||||
if getattr(sys, 'frozen', False):
|
||||
args = [sys.executable] + sys.argv
|
||||
else:
|
||||
args = [sys.executable] + _args_from_interpreter_flags() + sys.argv
|
||||
|
||||
self.log('Re-spawning %s' % ' '.join(args))
|
||||
|
||||
self._extend_pythonpath(os.environ)
|
||||
|
||||
if sys.platform[:4] == 'java':
|
||||
from _systemrestart import SystemRestart
|
||||
raise SystemRestart
|
||||
else:
|
||||
if sys.platform == 'win32':
|
||||
args = ['"%s"' % arg for arg in args]
|
||||
|
||||
os.chdir(_startup_cwd)
|
||||
if self.max_cloexec_files:
|
||||
self._set_cloexec()
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
@staticmethod
|
||||
def _get_true_argv():
|
||||
"""Retrieves all real arguments of the python interpreter
|
||||
|
||||
...even those not listed in ``sys.argv``
|
||||
|
||||
:seealso: http://stackoverflow.com/a/28338254/595220
|
||||
:seealso: http://stackoverflow.com/a/6683222/595220
|
||||
:seealso: http://stackoverflow.com/a/28414807/595220
|
||||
"""
|
||||
|
||||
try:
|
||||
char_p = ctypes.c_char_p if six.PY2 else ctypes.c_wchar_p
|
||||
|
||||
argv = ctypes.POINTER(char_p)()
|
||||
argc = ctypes.c_int()
|
||||
|
||||
ctypes.pythonapi.Py_GetArgcArgv(ctypes.byref(argc), ctypes.byref(argv))
|
||||
except AttributeError:
|
||||
"""It looks Py_GetArgcArgv is completely absent in MS Windows
|
||||
|
||||
:seealso: https://github.com/cherrypy/cherrypy/issues/1506
|
||||
:ref: https://chromium.googlesource.com/infra/infra/+/69eb0279c12bcede5937ce9298020dd4581e38dd%5E!/
|
||||
"""
|
||||
raise NotImplementedError
|
||||
else:
|
||||
return argv[:argc.value]
|
||||
|
||||
@staticmethod
|
||||
def _extend_pythonpath(env):
|
||||
"""
|
||||
If sys.path[0] is an empty string, the interpreter was likely
|
||||
invoked with -m and the effective path is about to change on
|
||||
re-exec. Add the current directory to $PYTHONPATH to ensure
|
||||
that the new process sees the same path.
|
||||
|
||||
This issue cannot be addressed in the general case because
|
||||
Python cannot reliably reconstruct the
|
||||
original command line (http://bugs.python.org/issue14208).
|
||||
|
||||
(This idea filched from tornado.autoreload)
|
||||
"""
|
||||
path_prefix = '.' + os.pathsep
|
||||
existing_path = env.get('PYTHONPATH', '')
|
||||
needs_patch = (
|
||||
sys.path[0] == '' and
|
||||
not existing_path.startswith(path_prefix)
|
||||
)
|
||||
|
||||
if needs_patch:
|
||||
env['PYTHONPATH'] = path_prefix + existing_path
|
||||
|
||||
def _set_cloexec(self):
|
||||
"""Set the CLOEXEC flag on all open files (except stdin/out/err).
|
||||
|
||||
If self.max_cloexec_files is an integer (the default), then on
|
||||
platforms which support it, it represents the max open files setting
|
||||
for the operating system. This function will be called just before
|
||||
the process is restarted via os.execv() to prevent open files
|
||||
from persisting into the new process.
|
||||
|
||||
Set self.max_cloexec_files to 0 to disable this behavior.
|
||||
"""
|
||||
for fd in range(3, self.max_cloexec_files): # skip stdin/out/err
|
||||
try:
|
||||
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
|
||||
except IOError:
|
||||
continue
|
||||
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
|
||||
|
||||
def stop(self):
|
||||
"""Stop all services."""
|
||||
self.state = states.STOPPING
|
||||
self.log('Bus STOPPING')
|
||||
self.publish('stop')
|
||||
self.state = states.STOPPED
|
||||
self.log('Bus STOPPED')
|
||||
|
||||
def start_with_callback(self, func, args=None, kwargs=None):
|
||||
"""Start 'func' in a new thread T, then start self (and return T)."""
|
||||
if args is None:
|
||||
args = ()
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
args = (func,) + args
|
||||
|
||||
def _callback(func, *a, **kw):
|
||||
self.wait(states.STARTED)
|
||||
func(*a, **kw)
|
||||
t = threading.Thread(target=_callback, args=args, kwargs=kwargs)
|
||||
t.setName('Bus Callback ' + t.getName())
|
||||
t.start()
|
||||
|
||||
self.start()
|
||||
|
||||
return t
|
||||
|
||||
def log(self, msg='', level=20, traceback=False):
|
||||
"""Log the given message. Append the last traceback if requested."""
|
||||
if traceback:
|
||||
# Work-around for bug in Python's traceback implementation
|
||||
# which crashes when the error message contains %1, %2 etc.
|
||||
errors = sys.exc_info()
|
||||
if '%' in errors[1].message:
|
||||
errors[1].message = errors[1].message.replace('%', '#')
|
||||
errors[1].args = [item.replace('%', '#') for item in errors[1].args]
|
||||
msg += "\n" + "".join(_traceback.format_exception(*errors))
|
||||
self.publish('log', msg, level)
|
||||
|
||||
bus = Bus()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,124 +0,0 @@
|
||||
"""A library for integrating Python's builtin ``ssl`` library with CherryPy.
|
||||
|
||||
The ssl module must be importable for SSL functionality.
|
||||
|
||||
To use this module, set ``CherryPyWSGIServer.ssl_adapter`` to an instance of
|
||||
``BuiltinSSLAdapter``.
|
||||
"""
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
ssl = None
|
||||
|
||||
try:
|
||||
from _pyio import DEFAULT_BUFFER_SIZE
|
||||
except ImportError:
|
||||
try:
|
||||
from io import DEFAULT_BUFFER_SIZE
|
||||
except ImportError:
|
||||
DEFAULT_BUFFER_SIZE = -1
|
||||
|
||||
import sys
|
||||
|
||||
from cherrypy import wsgiserver
|
||||
|
||||
|
||||
class BuiltinSSLAdapter(wsgiserver.SSLAdapter):
|
||||
|
||||
"""A wrapper for integrating Python's builtin ssl module with CherryPy."""
|
||||
|
||||
certificate = None
|
||||
"""The filename of the server SSL certificate."""
|
||||
|
||||
private_key = None
|
||||
"""The filename of the server's private key file."""
|
||||
|
||||
certificate_chain = None
|
||||
"""The filename of the certificate chain file."""
|
||||
|
||||
"""The ssl.SSLContext that will be used to wrap sockets where available
|
||||
(on Python > 2.7.9 / 3.3)
|
||||
"""
|
||||
context = None
|
||||
|
||||
def __init__(self, certificate, private_key, certificate_chain=None):
|
||||
if ssl is None:
|
||||
raise ImportError('You must install the ssl module to use HTTPS.')
|
||||
self.certificate = certificate
|
||||
self.private_key = private_key
|
||||
self.certificate_chain = certificate_chain
|
||||
if hasattr(ssl, 'create_default_context'):
|
||||
self.context = ssl.create_default_context(
|
||||
purpose=ssl.Purpose.CLIENT_AUTH,
|
||||
cafile=certificate_chain
|
||||
)
|
||||
self.context.load_cert_chain(certificate, private_key)
|
||||
|
||||
def bind(self, sock):
|
||||
"""Wrap and return the given socket."""
|
||||
return sock
|
||||
|
||||
def wrap(self, sock):
|
||||
"""Wrap and return the given socket, plus WSGI environ entries."""
|
||||
try:
|
||||
if self.context is not None:
|
||||
s = self.context.wrap_socket(sock,do_handshake_on_connect=True,
|
||||
server_side=True)
|
||||
else:
|
||||
s = ssl.wrap_socket(sock, do_handshake_on_connect=True,
|
||||
server_side=True, certfile=self.certificate,
|
||||
keyfile=self.private_key,
|
||||
ssl_version=ssl.PROTOCOL_SSLv23,
|
||||
ca_certs=self.certificate_chain)
|
||||
except ssl.SSLError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == ssl.SSL_ERROR_EOF:
|
||||
# This is almost certainly due to the cherrypy engine
|
||||
# 'pinging' the socket to assert it's connectable;
|
||||
# the 'ping' isn't SSL.
|
||||
return None, {}
|
||||
elif e.errno == ssl.SSL_ERROR_SSL:
|
||||
if 'http request' in e.args[1]:
|
||||
# The client is speaking HTTP to an HTTPS server.
|
||||
raise wsgiserver.NoSSLError
|
||||
|
||||
# Check if it's one of the known errors
|
||||
# Errors that are caught by PyOpenSSL, but thrown by built-in ssl
|
||||
_block_errors = ('unknown protocol', 'unknown ca', 'unknown_ca', 'unknown error',
|
||||
'https proxy request', 'inappropriate fallback', 'wrong version number',
|
||||
'no shared cipher', 'certificate unknown', 'ccs received early')
|
||||
for error_text in _block_errors:
|
||||
if error_text in e.args[1].lower():
|
||||
# Accepted error, let's pass
|
||||
return None, {}
|
||||
elif 'handshake operation timed out' in e.args[0]:
|
||||
# This error is thrown by builtin SSL after a timeout
|
||||
# when client is speaking HTTP to an HTTPS server.
|
||||
# The connection can safely be dropped.
|
||||
return None, {}
|
||||
raise
|
||||
except:
|
||||
# Temporary fix for https://github.com/cherrypy/cherrypy/issues/1618
|
||||
e = sys.exc_info()[1]
|
||||
if e.args == (0, 'Error'):
|
||||
return None, {}
|
||||
raise
|
||||
return s, self.get_environ(s)
|
||||
|
||||
# TODO: fill this out more with mod ssl env
|
||||
def get_environ(self, sock):
|
||||
"""Create WSGI environ entries to be merged into each request."""
|
||||
cipher = sock.cipher()
|
||||
ssl_environ = {
|
||||
'wsgi.url_scheme': 'https',
|
||||
'HTTPS': 'on',
|
||||
'SSL_PROTOCOL': cipher[1],
|
||||
'SSL_CIPHER': cipher[0]
|
||||
# SSL_VERSION_INTERFACE string The mod_ssl program version
|
||||
# SSL_VERSION_LIBRARY string The OpenSSL program version
|
||||
}
|
||||
return ssl_environ
|
||||
|
||||
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
|
||||
return wsgiserver.CP_makefile(sock, mode, bufsize)
|
||||
@@ -1,253 +0,0 @@
|
||||
"""A library for integrating pyOpenSSL with CherryPy.
|
||||
|
||||
The OpenSSL module must be importable for SSL functionality.
|
||||
You can obtain it from `here <https://launchpad.net/pyopenssl>`_.
|
||||
|
||||
To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of
|
||||
SSLAdapter. There are two ways to use SSL:
|
||||
|
||||
Method One
|
||||
----------
|
||||
|
||||
* ``ssl_adapter.context``: an instance of SSL.Context.
|
||||
|
||||
If this is not None, it is assumed to be an SSL.Context instance,
|
||||
and will be passed to SSL.Connection on bind(). The developer is
|
||||
responsible for forming a valid Context object. This approach is
|
||||
to be preferred for more flexibility, e.g. if the cert and key are
|
||||
streams instead of files, or need decryption, or SSL.SSLv3_METHOD
|
||||
is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
|
||||
the pyOpenSSL documentation for complete options.
|
||||
|
||||
Method Two (shortcut)
|
||||
---------------------
|
||||
|
||||
* ``ssl_adapter.certificate``: the filename of the server SSL certificate.
|
||||
* ``ssl_adapter.private_key``: the filename of the server's private key file.
|
||||
|
||||
Both are None by default. If ssl_adapter.context is None, but .private_key
|
||||
and .certificate are both given and valid, they will be read, and the
|
||||
context will be automatically created from them.
|
||||
"""
|
||||
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
|
||||
from cherrypy import wsgiserver
|
||||
|
||||
try:
|
||||
from OpenSSL import SSL
|
||||
from OpenSSL import crypto
|
||||
except ImportError:
|
||||
SSL = None
|
||||
|
||||
|
||||
class SSL_fileobject(wsgiserver.CP_makefile):
|
||||
|
||||
"""SSL file object attached to a socket object."""
|
||||
|
||||
ssl_timeout = 3
|
||||
ssl_retry = .01
|
||||
|
||||
def _safe_call(self, is_reader, call, *args, **kwargs):
|
||||
"""Wrap the given call with SSL error-trapping.
|
||||
|
||||
is_reader: if False EOF errors will be raised. If True, EOF errors
|
||||
will return "" (to emulate normal sockets).
|
||||
"""
|
||||
start = time.time()
|
||||
while True:
|
||||
try:
|
||||
return call(*args, **kwargs)
|
||||
except SSL.WantReadError:
|
||||
# Sleep and try again. This is dangerous, because it means
|
||||
# the rest of the stack has no way of differentiating
|
||||
# between a "new handshake" error and "client dropped".
|
||||
# Note this isn't an endless loop: there's a timeout below.
|
||||
time.sleep(self.ssl_retry)
|
||||
except SSL.WantWriteError:
|
||||
time.sleep(self.ssl_retry)
|
||||
except SSL.SysCallError as e:
|
||||
if is_reader and e.args == (-1, 'Unexpected EOF'):
|
||||
return ''
|
||||
|
||||
errnum = e.args[0]
|
||||
if is_reader and errnum in wsgiserver.socket_errors_to_ignore:
|
||||
return ''
|
||||
raise socket.error(errnum)
|
||||
except SSL.Error as e:
|
||||
if is_reader and e.args == (-1, 'Unexpected EOF'):
|
||||
return ''
|
||||
|
||||
thirdarg = None
|
||||
try:
|
||||
thirdarg = e.args[0][0][2]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if thirdarg == 'http request':
|
||||
# The client is talking HTTP to an HTTPS server.
|
||||
raise wsgiserver.NoSSLError()
|
||||
|
||||
raise wsgiserver.FatalSSLAlert(*e.args)
|
||||
except:
|
||||
raise
|
||||
|
||||
if time.time() - start > self.ssl_timeout:
|
||||
raise socket.timeout('timed out')
|
||||
|
||||
def recv(self, size):
|
||||
return self._safe_call(True, super(SSL_fileobject, self).recv, size)
|
||||
|
||||
def sendall(self, *args, **kwargs):
|
||||
return self._safe_call(False, super(SSL_fileobject, self).sendall,
|
||||
*args, **kwargs)
|
||||
|
||||
def send(self, *args, **kwargs):
|
||||
return self._safe_call(False, super(SSL_fileobject, self).send,
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
class SSLConnection:
|
||||
|
||||
"""A thread-safe wrapper for an SSL.Connection.
|
||||
|
||||
``*args``: the arguments to create the wrapped ``SSL.Connection(*args)``.
|
||||
"""
|
||||
|
||||
def __init__(self, *args):
|
||||
self._ssl_conn = SSL.Connection(*args)
|
||||
self._lock = threading.RLock()
|
||||
|
||||
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
|
||||
'renegotiate', 'bind', 'listen', 'connect', 'accept',
|
||||
'setblocking', 'fileno', 'close', 'get_cipher_list',
|
||||
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
|
||||
'makefile', 'get_app_data', 'set_app_data', 'state_string',
|
||||
'sock_shutdown', 'get_peer_certificate', 'want_read',
|
||||
'want_write', 'set_connect_state', 'set_accept_state',
|
||||
'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
|
||||
exec("""def %s(self, *args):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
return self._ssl_conn.%s(*args)
|
||||
finally:
|
||||
self._lock.release()
|
||||
""" % (f, f))
|
||||
|
||||
def shutdown(self, *args):
|
||||
self._lock.acquire()
|
||||
try:
|
||||
# pyOpenSSL.socket.shutdown takes no args
|
||||
return self._ssl_conn.shutdown()
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
|
||||
class pyOpenSSLAdapter(wsgiserver.SSLAdapter):
|
||||
|
||||
"""A wrapper for integrating pyOpenSSL with CherryPy."""
|
||||
|
||||
context = None
|
||||
"""An instance of SSL.Context."""
|
||||
|
||||
certificate = None
|
||||
"""The filename of the server SSL certificate."""
|
||||
|
||||
private_key = None
|
||||
"""The filename of the server's private key file."""
|
||||
|
||||
certificate_chain = None
|
||||
"""Optional. The filename of CA's intermediate certificate bundle.
|
||||
|
||||
This is needed for cheaper "chained root" SSL certificates, and should be
|
||||
left as None if not required."""
|
||||
|
||||
def __init__(self, certificate, private_key, certificate_chain=None):
|
||||
if SSL is None:
|
||||
raise ImportError('You must install pyOpenSSL to use HTTPS.')
|
||||
|
||||
self.context = None
|
||||
self.certificate = certificate
|
||||
self.private_key = private_key
|
||||
self.certificate_chain = certificate_chain
|
||||
self._environ = None
|
||||
|
||||
def bind(self, sock):
|
||||
"""Wrap and return the given socket."""
|
||||
if self.context is None:
|
||||
self.context = self.get_context()
|
||||
conn = SSLConnection(self.context, sock)
|
||||
self._environ = self.get_environ()
|
||||
return conn
|
||||
|
||||
def wrap(self, sock):
|
||||
"""Wrap and return the given socket, plus WSGI environ entries."""
|
||||
return sock, self._environ.copy()
|
||||
|
||||
def get_context(self):
|
||||
"""Return an SSL.Context from self attributes."""
|
||||
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
|
||||
c = SSL.Context(SSL.SSLv23_METHOD)
|
||||
c.use_privatekey_file(self.private_key)
|
||||
if self.certificate_chain:
|
||||
c.load_verify_locations(self.certificate_chain)
|
||||
c.use_certificate_file(self.certificate)
|
||||
return c
|
||||
|
||||
def get_environ(self):
|
||||
"""Return WSGI environ entries to be merged into each request."""
|
||||
ssl_environ = {
|
||||
'HTTPS': 'on',
|
||||
# pyOpenSSL doesn't provide access to any of these AFAICT
|
||||
# 'SSL_PROTOCOL': 'SSLv2',
|
||||
# SSL_CIPHER string The cipher specification name
|
||||
# SSL_VERSION_INTERFACE string The mod_ssl program version
|
||||
# SSL_VERSION_LIBRARY string The OpenSSL program version
|
||||
}
|
||||
|
||||
if self.certificate:
|
||||
# Server certificate attributes
|
||||
cert = open(self.certificate, 'rb').read()
|
||||
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
|
||||
ssl_environ.update({
|
||||
'SSL_SERVER_M_VERSION': cert.get_version(),
|
||||
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
|
||||
# 'SSL_SERVER_V_START':
|
||||
# Validity of server's certificate (start time),
|
||||
# 'SSL_SERVER_V_END':
|
||||
# Validity of server's certificate (end time),
|
||||
})
|
||||
|
||||
for prefix, dn in [('I', cert.get_issuer()),
|
||||
('S', cert.get_subject())]:
|
||||
# X509Name objects don't seem to have a way to get the
|
||||
# complete DN string. Use str() and slice it instead,
|
||||
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
|
||||
dnstr = str(dn)[18:-2]
|
||||
|
||||
wsgikey = 'SSL_SERVER_%s_DN' % prefix
|
||||
ssl_environ[wsgikey] = dnstr
|
||||
|
||||
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
|
||||
# for any value to contain slashes itself (in a URL).
|
||||
while dnstr:
|
||||
pos = dnstr.rfind('=')
|
||||
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
|
||||
pos = dnstr.rfind('/')
|
||||
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
|
||||
if key and value:
|
||||
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
|
||||
ssl_environ[wsgikey] = value
|
||||
|
||||
return ssl_environ
|
||||
|
||||
def makefile(self, sock, mode='r', bufsize=-1):
|
||||
if SSL and isinstance(sock, SSL.ConnectionType):
|
||||
timeout = sock.gettimeout()
|
||||
f = SSL_fileobject(sock, mode, bufsize)
|
||||
f.ssl_timeout = timeout
|
||||
return f
|
||||
else:
|
||||
return wsgiserver.CP_fileobject(sock, mode, bufsize)
|
||||
@@ -1,7 +1,7 @@
|
||||
##
|
||||
## Bad URL Fetch Email template for SABnzbd
|
||||
## This a Cheetah template
|
||||
## Documentation: http://sabnzbd.wikidot.com/email-templates
|
||||
## Documentation: https://sabnzbd.org/wiki/extra/email-templates
|
||||
##
|
||||
## Newlines and whitespace are significant!
|
||||
##
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
##
|
||||
## Default Email template for SABnzbd
|
||||
## This a Cheetah template
|
||||
## Documentation: http://sabnzbd.wikidot.com/email-templates
|
||||
## Documentation: https://sabnzbd.org/wiki/extra/email-templates
|
||||
##
|
||||
## Newlines and whitespace are significant!
|
||||
##
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
##
|
||||
## RSS Email template for SABnzbd
|
||||
## This a Cheetah template
|
||||
## Documentation: http://sabnzbd.wikidot.com/email-templates
|
||||
## Documentation: https://sabnzbd.org/wiki/extra/email-templates
|
||||
##
|
||||
## Newlines and whitespace are significant!
|
||||
##
|
||||
|
||||
141
gntp/cli.py
141
gntp/cli.py
@@ -1,141 +0,0 @@
|
||||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser, OptionGroup
|
||||
|
||||
from gntp.notifier import GrowlNotifier
|
||||
from gntp.shim import RawConfigParser
|
||||
from gntp.version import __version__
|
||||
|
||||
DEFAULT_CONFIG = os.path.expanduser('~/.gntp')
|
||||
|
||||
config = RawConfigParser({
|
||||
'hostname': 'localhost',
|
||||
'password': None,
|
||||
'port': 23053,
|
||||
})
|
||||
config.read([DEFAULT_CONFIG])
|
||||
if not config.has_section('gntp'):
|
||||
config.add_section('gntp')
|
||||
|
||||
|
||||
class ClientParser(OptionParser):
|
||||
def __init__(self):
|
||||
OptionParser.__init__(self, version="%%prog %s" % __version__)
|
||||
|
||||
group = OptionGroup(self, "Network Options")
|
||||
group.add_option("-H", "--host",
|
||||
dest="host", default=config.get('gntp', 'hostname'),
|
||||
help="Specify a hostname to which to send a remote notification. [%default]")
|
||||
group.add_option("--port",
|
||||
dest="port", default=config.getint('gntp', 'port'), type="int",
|
||||
help="port to listen on [%default]")
|
||||
group.add_option("-P", "--password",
|
||||
dest='password', default=config.get('gntp', 'password'),
|
||||
help="Network password")
|
||||
self.add_option_group(group)
|
||||
|
||||
group = OptionGroup(self, "Notification Options")
|
||||
group.add_option("-n", "--name",
|
||||
dest="app", default='Python GNTP Test Client',
|
||||
help="Set the name of the application [%default]")
|
||||
group.add_option("-s", "--sticky",
|
||||
dest='sticky', default=False, action="store_true",
|
||||
help="Make the notification sticky [%default]")
|
||||
group.add_option("--image",
|
||||
dest="icon", default=None,
|
||||
help="Icon for notification (URL or /path/to/file)")
|
||||
group.add_option("-m", "--message",
|
||||
dest="message", default=None,
|
||||
help="Sets the message instead of using stdin")
|
||||
group.add_option("-p", "--priority",
|
||||
dest="priority", default=0, type="int",
|
||||
help="-2 to 2 [%default]")
|
||||
group.add_option("-d", "--identifier",
|
||||
dest="identifier",
|
||||
help="Identifier for coalescing")
|
||||
group.add_option("-t", "--title",
|
||||
dest="title", default=None,
|
||||
help="Set the title of the notification [%default]")
|
||||
group.add_option("-N", "--notification",
|
||||
dest="name", default='Notification',
|
||||
help="Set the notification name [%default]")
|
||||
group.add_option("--callback",
|
||||
dest="callback",
|
||||
help="URL callback")
|
||||
self.add_option_group(group)
|
||||
|
||||
# Extra Options
|
||||
self.add_option('-v', '--verbose',
|
||||
dest='verbose', default=0, action='count',
|
||||
help="Verbosity levels")
|
||||
|
||||
def parse_args(self, args=None, values=None):
|
||||
values, args = OptionParser.parse_args(self, args, values)
|
||||
|
||||
if values.message is None:
|
||||
print('Enter a message followed by Ctrl-D')
|
||||
try:
|
||||
message = sys.stdin.read()
|
||||
except KeyboardInterrupt:
|
||||
exit()
|
||||
else:
|
||||
message = values.message
|
||||
|
||||
if values.title is None:
|
||||
values.title = ' '.join(args)
|
||||
|
||||
# If we still have an empty title, use the
|
||||
# first bit of the message as the title
|
||||
if values.title == '':
|
||||
values.title = message[:20]
|
||||
|
||||
values.verbose = logging.WARNING - values.verbose * 10
|
||||
|
||||
return values, message
|
||||
|
||||
|
||||
def main():
|
||||
(options, message) = ClientParser().parse_args()
|
||||
logging.basicConfig(level=options.verbose)
|
||||
if not os.path.exists(DEFAULT_CONFIG):
|
||||
logging.info('No config read found at %s', DEFAULT_CONFIG)
|
||||
|
||||
growl = GrowlNotifier(
|
||||
applicationName=options.app,
|
||||
notifications=[options.name],
|
||||
defaultNotifications=[options.name],
|
||||
hostname=options.host,
|
||||
password=options.password,
|
||||
port=options.port,
|
||||
)
|
||||
result = growl.register()
|
||||
if result is not True:
|
||||
exit(result)
|
||||
|
||||
# This would likely be better placed within the growl notifier
|
||||
# class but until I make _checkIcon smarter this is "easier"
|
||||
if options.icon and growl._checkIcon(options.icon) is False:
|
||||
logging.info('Loading image %s', options.icon)
|
||||
f = open(options.icon, 'rb')
|
||||
options.icon = f.read()
|
||||
f.close()
|
||||
|
||||
result = growl.notify(
|
||||
noteType=options.name,
|
||||
title=options.title,
|
||||
description=message,
|
||||
icon=options.icon,
|
||||
sticky=options.sticky,
|
||||
priority=options.priority,
|
||||
callback=options.callback,
|
||||
identifier=options.identifier,
|
||||
)
|
||||
if result is not True:
|
||||
exit(result)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,77 +0,0 @@
|
||||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
"""
|
||||
The gntp.config module is provided as an extended GrowlNotifier object that takes
|
||||
advantage of the ConfigParser module to allow us to setup some default values
|
||||
(such as hostname, password, and port) in a more global way to be shared among
|
||||
programs using gntp
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import gntp.notifier
|
||||
import gntp.shim
|
||||
|
||||
__all__ = [
|
||||
'mini',
|
||||
'GrowlNotifier'
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GrowlNotifier(gntp.notifier.GrowlNotifier):
|
||||
"""
|
||||
ConfigParser enhanced GrowlNotifier object
|
||||
|
||||
For right now, we are only interested in letting users overide certain
|
||||
values from ~/.gntp
|
||||
|
||||
::
|
||||
|
||||
[gntp]
|
||||
hostname = ?
|
||||
password = ?
|
||||
port = ?
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
config = gntp.shim.RawConfigParser({
|
||||
'hostname': kwargs.get('hostname', 'localhost'),
|
||||
'password': kwargs.get('password'),
|
||||
'port': kwargs.get('port', 23053),
|
||||
})
|
||||
|
||||
config.read([os.path.expanduser('~/.gntp')])
|
||||
|
||||
# If the file does not exist, then there will be no gntp section defined
|
||||
# and the config.get() lines below will get confused. Since we are not
|
||||
# saving the config, it should be safe to just add it here so the
|
||||
# code below doesn't complain
|
||||
if not config.has_section('gntp'):
|
||||
logger.info('Error reading ~/.gntp config file')
|
||||
config.add_section('gntp')
|
||||
|
||||
kwargs['password'] = config.get('gntp', 'password')
|
||||
kwargs['hostname'] = config.get('gntp', 'hostname')
|
||||
kwargs['port'] = config.getint('gntp', 'port')
|
||||
|
||||
super(GrowlNotifier, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def mini(description, **kwargs):
|
||||
"""Single notification function
|
||||
|
||||
Simple notification function in one line. Has only one required parameter
|
||||
and attempts to use reasonable defaults for everything else
|
||||
:param string description: Notification message
|
||||
"""
|
||||
kwargs['notifierFactory'] = GrowlNotifier
|
||||
gntp.notifier.mini(description, **kwargs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# If we're running this module directly we're likely running it as a test
|
||||
# so extra debugging is useful
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
mini('Testing mini notification')
|
||||
518
gntp/core.py
518
gntp/core.py
@@ -1,518 +0,0 @@
|
||||
# Copyright: 2013 Paul Traylor
|
||||
# These sources are released under the terms of the MIT license: see LICENSE
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
import time
|
||||
|
||||
import gntp.shim
|
||||
import gntp.errors as errors
|
||||
|
||||
__all__ = [
|
||||
'GNTPRegister',
|
||||
'GNTPNotice',
|
||||
'GNTPSubscribe',
|
||||
'GNTPOK',
|
||||
'GNTPError',
|
||||
'parse_gntp',
|
||||
]
|
||||
|
||||
#GNTP/<version> <messagetype> <encryptionAlgorithmID>[:<ivValue>][ <keyHashAlgorithmID>:<keyHash>.<salt>]
|
||||
GNTP_INFO_LINE = re.compile(
|
||||
'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)' +
|
||||
' (?P<encryptionAlgorithmID>[A-Z0-9]+(:(?P<ivValue>[A-F0-9]+))?) ?' +
|
||||
'((?P<keyHashAlgorithmID>[A-Z0-9]+):(?P<keyHash>[A-F0-9]+).(?P<salt>[A-F0-9]+))?\r\n',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
GNTP_INFO_LINE_SHORT = re.compile(
|
||||
'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
GNTP_HEADER = re.compile('([\w-]+):(.+)')
|
||||
|
||||
GNTP_EOL = gntp.shim.b('\r\n')
|
||||
GNTP_SEP = gntp.shim.b(': ')
|
||||
|
||||
|
||||
class _GNTPBuffer(gntp.shim.StringIO):
|
||||
"""GNTP Buffer class"""
|
||||
def writeln(self, value=None):
|
||||
if value:
|
||||
self.write(gntp.shim.b(value))
|
||||
self.write(GNTP_EOL)
|
||||
|
||||
def writeheader(self, key, value):
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
self.write(gntp.shim.b(key))
|
||||
self.write(GNTP_SEP)
|
||||
self.write(gntp.shim.b(value))
|
||||
self.write(GNTP_EOL)
|
||||
|
||||
|
||||
class _GNTPBase(object):
|
||||
"""Base initilization
|
||||
|
||||
:param string messagetype: GNTP Message type
|
||||
:param string version: GNTP Protocol version
|
||||
:param string encription: Encryption protocol
|
||||
"""
|
||||
def __init__(self, messagetype=None, version='1.0', encryption=None):
|
||||
self.info = {
|
||||
'version': version,
|
||||
'messagetype': messagetype,
|
||||
'encryptionAlgorithmID': encryption
|
||||
}
|
||||
self.hash_algo = {
|
||||
'MD5': hashlib.md5,
|
||||
'SHA1': hashlib.sha1,
|
||||
'SHA256': hashlib.sha256,
|
||||
'SHA512': hashlib.sha512,
|
||||
}
|
||||
self.headers = {}
|
||||
self.resources = {}
|
||||
|
||||
# For Python2 we can just return the bytes as is without worry
|
||||
# but on Python3 we want to make sure we return the packet as
|
||||
# a unicode string so that things like logging won't get confused
|
||||
if gntp.shim.PY2:
|
||||
def __str__(self):
|
||||
return self.encode()
|
||||
else:
|
||||
def __str__(self):
|
||||
return gntp.shim.u(self.encode())
|
||||
|
||||
def _parse_info(self, data):
|
||||
"""Parse the first line of a GNTP message to get security and other info values
|
||||
|
||||
:param string data: GNTP Message
|
||||
:return dict: Parsed GNTP Info line
|
||||
"""
|
||||
|
||||
match = GNTP_INFO_LINE.match(data)
|
||||
|
||||
if not match:
|
||||
raise errors.ParseError('ERROR_PARSING_INFO_LINE')
|
||||
|
||||
info = match.groupdict()
|
||||
if info['encryptionAlgorithmID'] == 'NONE':
|
||||
info['encryptionAlgorithmID'] = None
|
||||
|
||||
return info
|
||||
|
||||
def set_password(self, password, encryptAlgo='MD5'):
|
||||
"""Set a password for a GNTP Message
|
||||
|
||||
:param string password: Null to clear password
|
||||
:param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512
|
||||
"""
|
||||
if not password:
|
||||
self.info['encryptionAlgorithmID'] = None
|
||||
self.info['keyHashAlgorithm'] = None
|
||||
return
|
||||
|
||||
self.password = gntp.shim.b(password)
|
||||
self.encryptAlgo = encryptAlgo.upper()
|
||||
|
||||
if not self.encryptAlgo in self.hash_algo:
|
||||
raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo)
|
||||
|
||||
hashfunction = self.hash_algo.get(self.encryptAlgo)
|
||||
|
||||
password = password.encode('utf8')
|
||||
seed = time.ctime().encode('utf8')
|
||||
salt = hashfunction(seed).hexdigest()
|
||||
saltHash = hashfunction(seed).digest()
|
||||
keyBasis = password + saltHash
|
||||
key = hashfunction(keyBasis).digest()
|
||||
keyHash = hashfunction(key).hexdigest()
|
||||
|
||||
self.info['keyHashAlgorithmID'] = self.encryptAlgo
|
||||
self.info['keyHash'] = keyHash.upper()
|
||||
self.info['salt'] = salt.upper()
|
||||
|
||||
def _decode_hex(self, value):
|
||||
"""Helper function to decode hex string to `proper` hex string
|
||||
|
||||
:param string value: Human readable hex string
|
||||
:return string: Hex string
|
||||
"""
|
||||
result = ''
|
||||
for i in range(0, len(value), 2):
|
||||
tmp = int(value[i:i + 2], 16)
|
||||
result += chr(tmp)
|
||||
return result
|
||||
|
||||
def _decode_binary(self, rawIdentifier, identifier):
|
||||
rawIdentifier += '\r\n\r\n'
|
||||
dataLength = int(identifier['Length'])
|
||||
pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier)
|
||||
pointerEnd = pointerStart + dataLength
|
||||
data = self.raw[pointerStart:pointerEnd]
|
||||
if not len(data) == dataLength:
|
||||
raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data)))
|
||||
return data
|
||||
|
||||
def _validate_password(self, password):
|
||||
"""Validate GNTP Message against stored password"""
|
||||
self.password = password
|
||||
if password is None:
|
||||
raise errors.AuthError('Missing password')
|
||||
keyHash = self.info.get('keyHash', None)
|
||||
if keyHash is None and self.password is None:
|
||||
return True
|
||||
if keyHash is None:
|
||||
raise errors.AuthError('Invalid keyHash')
|
||||
if self.password is None:
|
||||
raise errors.AuthError('Missing password')
|
||||
|
||||
keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5')
|
||||
|
||||
password = self.password.encode('utf8')
|
||||
saltHash = self._decode_hex(self.info['salt'])
|
||||
|
||||
keyBasis = password + saltHash
|
||||
self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest()
|
||||
keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest()
|
||||
|
||||
if not keyHash.upper() == self.info['keyHash'].upper():
|
||||
raise errors.AuthError('Invalid Hash')
|
||||
return True
|
||||
|
||||
def validate(self):
|
||||
"""Verify required headers"""
|
||||
for header in self._requiredHeaders:
|
||||
if not self.headers.get(header, False):
|
||||
raise errors.ParseError('Missing Notification Header: ' + header)
|
||||
|
||||
def _format_info(self):
|
||||
"""Generate info line for GNTP Message
|
||||
|
||||
:return string:
|
||||
"""
|
||||
info = 'GNTP/%s %s' % (
|
||||
self.info.get('version'),
|
||||
self.info.get('messagetype'),
|
||||
)
|
||||
if self.info.get('encryptionAlgorithmID', None):
|
||||
info += ' %s:%s' % (
|
||||
self.info.get('encryptionAlgorithmID'),
|
||||
self.info.get('ivValue'),
|
||||
)
|
||||
else:
|
||||
info += ' NONE'
|
||||
|
||||
if self.info.get('keyHashAlgorithmID', None):
|
||||
info += ' %s:%s.%s' % (
|
||||
self.info.get('keyHashAlgorithmID'),
|
||||
self.info.get('keyHash'),
|
||||
self.info.get('salt')
|
||||
)
|
||||
|
||||
return info
|
||||
|
||||
def _parse_dict(self, data):
|
||||
"""Helper function to parse blocks of GNTP headers into a dictionary
|
||||
|
||||
:param string data:
|
||||
:return dict: Dictionary of parsed GNTP Headers
|
||||
"""
|
||||
d = {}
|
||||
for line in data.split('\r\n'):
|
||||
match = GNTP_HEADER.match(line)
|
||||
if not match:
|
||||
continue
|
||||
|
||||
key = match.group(1).strip()
|
||||
val = match.group(2).strip()
|
||||
d[key] = val
|
||||
return d
|
||||
|
||||
def add_header(self, key, value):
|
||||
self.headers[key] = value
|
||||
|
||||
def add_resource(self, data):
|
||||
"""Add binary resource
|
||||
|
||||
:param string data: Binary Data
|
||||
"""
|
||||
data = gntp.shim.b(data)
|
||||
identifier = hashlib.md5(data).hexdigest()
|
||||
self.resources[identifier] = data
|
||||
return 'x-growl-resource://%s' % identifier
|
||||
|
||||
def decode(self, data, password=None):
|
||||
"""Decode GNTP Message
|
||||
|
||||
:param string data:
|
||||
"""
|
||||
self.password = password
|
||||
self.raw = gntp.shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
def encode(self):
|
||||
"""Encode a generic GNTP Message
|
||||
|
||||
:return string: GNTP Message ready to be sent. Returned as a byte string
|
||||
"""
|
||||
|
||||
buff = _GNTPBuffer()
|
||||
|
||||
buff.writeln(self._format_info())
|
||||
|
||||
#Headers
|
||||
for k, v in self.headers.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Resources
|
||||
for resource, data in self.resources.items():
|
||||
buff.writeheader('Identifier', resource)
|
||||
buff.writeheader('Length', len(data))
|
||||
buff.writeln()
|
||||
buff.write(data)
|
||||
buff.writeln()
|
||||
buff.writeln()
|
||||
|
||||
return buff.getvalue()
|
||||
|
||||
|
||||
class GNTPRegister(_GNTPBase):
|
||||
"""Represents a GNTP Registration Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Application-Name',
|
||||
'Notifications-Count'
|
||||
]
|
||||
_requiredNotificationHeaders = ['Notification-Name']
|
||||
|
||||
def __init__(self, data=None, password=None):
|
||||
_GNTPBase.__init__(self, 'REGISTER')
|
||||
self.notifications = []
|
||||
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
self.add_header('Application-Name', 'pygntp')
|
||||
self.add_header('Notifications-Count', 0)
|
||||
|
||||
def validate(self):
|
||||
'''Validate required headers and validate notification headers'''
|
||||
for header in self._requiredHeaders:
|
||||
if not self.headers.get(header, False):
|
||||
raise errors.ParseError('Missing Registration Header: ' + header)
|
||||
for notice in self.notifications:
|
||||
for header in self._requiredNotificationHeaders:
|
||||
if not notice.get(header, False):
|
||||
raise errors.ParseError('Missing Notification Header: ' + header)
|
||||
|
||||
def decode(self, data, password):
|
||||
"""Decode existing GNTP Registration message
|
||||
|
||||
:param string data: Message to decode
|
||||
"""
|
||||
self.raw = gntp.shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self._validate_password(password)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
if i == 0:
|
||||
continue # Skip Header
|
||||
if part.strip() == '':
|
||||
continue
|
||||
notice = self._parse_dict(part)
|
||||
if notice.get('Notification-Name', False):
|
||||
self.notifications.append(notice)
|
||||
elif notice.get('Identifier', False):
|
||||
notice['Data'] = self._decode_binary(part, notice)
|
||||
#open('register.png','wblol').write(notice['Data'])
|
||||
self.resources[notice.get('Identifier')] = notice
|
||||
|
||||
def add_notification(self, name, enabled=True):
|
||||
"""Add new Notification to Registration message
|
||||
|
||||
:param string name: Notification Name
|
||||
:param boolean enabled: Enable this notification by default
|
||||
"""
|
||||
notice = {}
|
||||
notice['Notification-Name'] = name
|
||||
notice['Notification-Enabled'] = enabled
|
||||
|
||||
self.notifications.append(notice)
|
||||
self.add_header('Notifications-Count', len(self.notifications))
|
||||
|
||||
def encode(self):
|
||||
"""Encode a GNTP Registration Message
|
||||
|
||||
:return string: Encoded GNTP Registration message. Returned as a byte string
|
||||
"""
|
||||
|
||||
buff = _GNTPBuffer()
|
||||
|
||||
buff.writeln(self._format_info())
|
||||
|
||||
#Headers
|
||||
for k, v in self.headers.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Notifications
|
||||
if len(self.notifications) > 0:
|
||||
for notice in self.notifications:
|
||||
for k, v in notice.items():
|
||||
buff.writeheader(k, v)
|
||||
buff.writeln()
|
||||
|
||||
#Resources
|
||||
for resource, data in self.resources.items():
|
||||
buff.writeheader('Identifier', resource)
|
||||
buff.writeheader('Length', len(data))
|
||||
buff.writeln()
|
||||
buff.write(data)
|
||||
buff.writeln()
|
||||
buff.writeln()
|
||||
|
||||
return buff.getvalue()
|
||||
|
||||
|
||||
class GNTPNotice(_GNTPBase):
|
||||
"""Represents a GNTP Notification Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string app: (Optional) Set Application-Name
|
||||
:param string name: (Optional) Set Notification-Name
|
||||
:param string title: (Optional) Set Notification Title
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Application-Name',
|
||||
'Notification-Name',
|
||||
'Notification-Title'
|
||||
]
|
||||
|
||||
def __init__(self, data=None, app=None, name=None, title=None, password=None):
|
||||
_GNTPBase.__init__(self, 'NOTIFY')
|
||||
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
if app:
|
||||
self.add_header('Application-Name', app)
|
||||
if name:
|
||||
self.add_header('Notification-Name', name)
|
||||
if title:
|
||||
self.add_header('Notification-Title', title)
|
||||
|
||||
def decode(self, data, password):
|
||||
"""Decode existing GNTP Notification message
|
||||
|
||||
:param string data: Message to decode.
|
||||
"""
|
||||
self.raw = gntp.shim.u(data)
|
||||
parts = self.raw.split('\r\n\r\n')
|
||||
self.info = self._parse_info(self.raw)
|
||||
self._validate_password(password)
|
||||
self.headers = self._parse_dict(parts[0])
|
||||
|
||||
for i, part in enumerate(parts):
|
||||
if i == 0:
|
||||
continue # Skip Header
|
||||
if part.strip() == '':
|
||||
continue
|
||||
notice = self._parse_dict(part)
|
||||
if notice.get('Identifier', False):
|
||||
notice['Data'] = self._decode_binary(part, notice)
|
||||
#open('notice.png','wblol').write(notice['Data'])
|
||||
self.resources[notice.get('Identifier')] = notice
|
||||
|
||||
|
||||
class GNTPSubscribe(_GNTPBase):
|
||||
"""Represents a GNTP Subscribe Command
|
||||
|
||||
:param string data: (Optional) See decode()
|
||||
:param string password: (Optional) Password to use while encoding/decoding messages
|
||||
"""
|
||||
_requiredHeaders = [
|
||||
'Subscriber-ID',
|
||||
'Subscriber-Name',
|
||||
]
|
||||
|
||||
def __init__(self, data=None, password=None):
|
||||
_GNTPBase.__init__(self, 'SUBSCRIBE')
|
||||
if data:
|
||||
self.decode(data, password)
|
||||
else:
|
||||
self.set_password(password)
|
||||
|
||||
|
||||
class GNTPOK(_GNTPBase):
|
||||
"""Represents a GNTP OK Response
|
||||
|
||||
:param string data: (Optional) See _GNTPResponse.decode()
|
||||
:param string action: (Optional) Set type of action the OK Response is for
|
||||
"""
|
||||
_requiredHeaders = ['Response-Action']
|
||||
|
||||
def __init__(self, data=None, action=None):
|
||||
_GNTPBase.__init__(self, '-OK')
|
||||
if data:
|
||||
self.decode(data)
|
||||
if action:
|
||||
self.add_header('Response-Action', action)
|
||||
|
||||
|
||||
class GNTPError(_GNTPBase):
|
||||
"""Represents a GNTP Error response
|
||||
|
||||
:param string data: (Optional) See _GNTPResponse.decode()
|
||||
:param string errorcode: (Optional) Error code
|
||||
:param string errordesc: (Optional) Error Description
|
||||
"""
|
||||
_requiredHeaders = ['Error-Code', 'Error-Description']
|
||||
|
||||
def __init__(self, data=None, errorcode=None, errordesc=None):
|
||||
_GNTPBase.__init__(self, '-ERROR')
|
||||
if data:
|
||||
self.decode(data)
|
||||
if errorcode:
|
||||
self.add_header('Error-Code', errorcode)
|
||||
self.add_header('Error-Description', errordesc)
|
||||
|
||||
def error(self):
|
||||
return (self.headers.get('Error-Code', None),
|
||||
self.headers.get('Error-Description', None))
|
||||
|
||||
|
||||
def parse_gntp(data, password=None):
|
||||
"""Attempt to parse a message as a GNTP message
|
||||
|
||||
:param string data: Message to be parsed
|
||||
:param string password: Optional password to be used to verify the message
|
||||
"""
|
||||
data = gntp.shim.u(data)
|
||||
match = GNTP_INFO_LINE_SHORT.match(data)
|
||||
if not match:
|
||||
raise errors.ParseError('INVALID_GNTP_INFO')
|
||||
info = match.groupdict()
|
||||
if info['messagetype'] == 'REGISTER':
|
||||
return GNTPRegister(data, password=password)
|
||||
elif info['messagetype'] == 'NOTIFY':
|
||||
return GNTPNotice(data, password=password)
|
||||
elif info['messagetype'] == 'SUBSCRIBE':
|
||||
return GNTPSubscribe(data, password=password)
|
||||
elif info['messagetype'] == '-OK':
|
||||
return GNTPOK(data)
|
||||
elif info['messagetype'] == '-ERROR':
|
||||
return GNTPError(data)
|
||||
raise errors.ParseError('INVALID_GNTP_MESSAGE')
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user