Compare commits
787 Commits
update-pyt
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a2f86b5514 | ||
|
|
4b51e3004c | ||
|
|
018800c5cc | ||
|
|
2eb4154169 | ||
|
|
becc2c3fee | ||
|
|
cb5382d4d5 | ||
|
|
6975e32998 | ||
|
|
62962357c5 | ||
|
|
343b540970 | ||
|
|
089b7efacd | ||
|
|
1584b0b58c | ||
|
|
141b15077c | ||
|
|
ee2c4844b9 | ||
|
|
181813b9b2 | ||
|
|
43d0652b0d | ||
|
|
80e68bec26 | ||
|
|
7b2b2b1685 | ||
|
|
c3c587001f | ||
|
|
281b724996 | ||
|
|
3d5714f499 | ||
|
|
ba9f1939ab | ||
|
|
481c8c0600 | ||
|
|
35b177a1a0 | ||
|
|
95976762ac | ||
|
|
bf64e7f4e4 | ||
|
|
9213154e44 | ||
|
|
810d50eb3d | ||
|
|
99a4129cd4 | ||
|
|
8db8fcf26c | ||
|
|
6d8fec7760 | ||
|
|
4f3af45f5c | ||
|
|
8ebd8ea9fb | ||
|
|
83d8fbbd75 | ||
|
|
0c49ab462f | ||
|
|
35091d878f | ||
|
|
c31f53a225 | ||
|
|
d2a13c55f2 | ||
|
|
de1ce7340f | ||
|
|
929f9e944f | ||
|
|
6c422774d5 | ||
|
|
443ec2200f | ||
|
|
38cbeb560c | ||
|
|
873f979a25 | ||
|
|
2a41402f2a | ||
|
|
6ecf3782ac | ||
|
|
d1347fce9a | ||
|
|
9412ce9f05 | ||
|
|
fabe7bbc78 | ||
|
|
1e34fb8b51 | ||
|
|
4d7d0d6d04 | ||
|
|
cf77b3c3fa | ||
|
|
a7674bd45a | ||
|
|
cdeac7a745 | ||
|
|
50b2097d38 | ||
|
|
30e7f23360 | ||
|
|
248455a92b | ||
|
|
cca30ace31 | ||
|
|
90348bde4e | ||
|
|
54dd12cd57 | ||
|
|
4e6934a4b6 | ||
|
|
57bb4e40b7 | ||
|
|
7f885292f9 | ||
|
|
8978e4dbff | ||
|
|
920b717c45 | ||
|
|
54b7b1883e | ||
|
|
87ab76fc7d | ||
|
|
4925f71933 | ||
|
|
39fda1d44d | ||
|
|
c8a1e30981 | ||
|
|
7abf143394 | ||
|
|
f4e031f505 | ||
|
|
2844fdd74a | ||
|
|
3b183961a9 | ||
|
|
76411ecca7 | ||
|
|
725c64d581 | ||
|
|
99ae4ac5ef | ||
|
|
b8b759f1d2 | ||
|
|
8b5a017b05 | ||
|
|
b7ef536ec3 | ||
|
|
282df74ee9 | ||
|
|
0a565815f9 | ||
|
|
d33bbf35c2 | ||
|
|
714a0b348d | ||
|
|
7ca1b8f286 | ||
|
|
be40e377d9 | ||
|
|
01cf4bae75 | ||
|
|
ef949f9149 | ||
|
|
926d0c7e0f | ||
|
|
9d8eb6ccfd | ||
|
|
585f70e700 | ||
|
|
21e24264f4 | ||
|
|
cf78ad3a3d | ||
|
|
dccb92d57f | ||
|
|
f9d31faadc | ||
|
|
e9225d1cc2 | ||
|
|
3dd1f0eea9 | ||
|
|
917e3aba6b | ||
|
|
3833255980 | ||
|
|
619654f317 | ||
|
|
76f9a33e45 | ||
|
|
5f67bb99a8 | ||
|
|
d8392bf394 | ||
|
|
6a89fcbc56 | ||
|
|
2adaacab82 | ||
|
|
2ae810c45a | ||
|
|
b62133e3e1 | ||
|
|
a92bb44ff9 | ||
|
|
9650c25968 | ||
|
|
c3c29afdca | ||
|
|
dca4ddd3d8 | ||
|
|
7bb85f8440 | ||
|
|
cf4f07d17e | ||
|
|
36c78b3717 | ||
|
|
bf5475fa74 | ||
|
|
9bfc9dda5c | ||
|
|
e904bd4e29 | ||
|
|
d140a7df52 | ||
|
|
bd550c09c2 | ||
|
|
01405a8fa6 | ||
|
|
5bf833fe28 | ||
|
|
d1577280ad | ||
|
|
3ae9d7e03a | ||
|
|
5594ac20e0 | ||
|
|
7f6000f1d4 | ||
|
|
1863c66179 | ||
|
|
185c20c71b | ||
|
|
a1b773cbdc | ||
|
|
937eaef621 | ||
|
|
e40741955f | ||
|
|
6f7b5ab1cd | ||
|
|
5cf5f36f19 | ||
|
|
a96fb4b6ce | ||
|
|
11e17ef77b | ||
|
|
8a95c030e6 | ||
|
|
d9d825ac50 | ||
|
|
07b1b0d8b2 | ||
|
|
beec975379 | ||
|
|
01d129dfca | ||
|
|
8bf21cad25 | ||
|
|
dcac5ed8f0 | ||
|
|
228e3f2a8d | ||
|
|
6624e0b65c | ||
|
|
21113d17c7 | ||
|
|
61773be971 | ||
|
|
5849ff73fb | ||
|
|
4e60a0d03a | ||
|
|
44046c70c3 | ||
|
|
02efc8c5ca | ||
|
|
0862e05754 | ||
|
|
08d1979bcb | ||
|
|
6c66764f25 | ||
|
|
c4642c2dfe | ||
|
|
bcb7f371e3 | ||
|
|
732a3ac962 | ||
|
|
694c758db7 | ||
|
|
f5cb0a156b | ||
|
|
4178e8ffc4 | ||
|
|
a3353b5c42 | ||
|
|
24c8825d22 | ||
|
|
23f9dfc655 | ||
|
|
cc359dab46 | ||
|
|
89902e26bf | ||
|
|
b1978ff188 | ||
|
|
579b39dce1 | ||
|
|
9b4b99f3e8 | ||
|
|
3e84b94308 | ||
|
|
2cb2cb0575 | ||
|
|
ddd4b31b1c | ||
|
|
68d812e6dd | ||
|
|
6effedc2f4 | ||
|
|
c31d3a2cfd | ||
|
|
e6572e695b | ||
|
|
a7674548ab | ||
|
|
436b7fbe28 | ||
|
|
d0e11c81b1 | ||
|
|
4fc4d536c1 | ||
|
|
b1e5efac3c | ||
|
|
539d48d438 | ||
|
|
ae6268ea3c | ||
|
|
2d8a274eb5 | ||
|
|
ff8afe827b | ||
|
|
468f4749b8 | ||
|
|
a5d223d1e5 | ||
|
|
b5661e84e8 | ||
|
|
aa270b37a2 | ||
|
|
35209d921d | ||
|
|
0ac8e10c85 | ||
|
|
36bfddbf4e | ||
|
|
721c4665e6 | ||
|
|
013621d04e | ||
|
|
e0e2bfe13a | ||
|
|
d2e2a92cdd | ||
|
|
6e52695faa | ||
|
|
5b5a2efdc9 | ||
|
|
8a0b0f63de | ||
|
|
10fba2ee3f | ||
|
|
67f14f177b | ||
|
|
f378ec4a0f | ||
|
|
b0d0a83277 | ||
|
|
399b932a86 | ||
|
|
b9ec3155f7 | ||
|
|
ef5f4cae04 | ||
|
|
31094bc547 | ||
|
|
f41673982d | ||
|
|
996cc20cd7 | ||
|
|
20ccf8c9c9 | ||
|
|
5503d12395 | ||
|
|
4740e20725 | ||
|
|
dc05b4da7a | ||
|
|
5149dfd96e | ||
|
|
9ee1eee219 | ||
|
|
7a68b29140 | ||
|
|
a78073812c | ||
|
|
45327d00c4 | ||
|
|
163bb9e945 | ||
|
|
1fe201e320 | ||
|
|
86c51acb91 | ||
|
|
3c667e8fff | ||
|
|
10ba4ea349 | ||
|
|
df85c25da8 | ||
|
|
1bd6bef42f | ||
|
|
097af18417 | ||
|
|
608d731e2b | ||
|
|
63fb97d8de | ||
|
|
c987c3f3aa | ||
|
|
ba3dde9384 | ||
|
|
9e30e8afa9 | ||
|
|
ed6bbde38f | ||
|
|
16cc1a74be | ||
|
|
6832521272 | ||
|
|
e10e549a95 | ||
|
|
208ef4eb94 | ||
|
|
f004383582 | ||
|
|
fbf7f1f4d1 | ||
|
|
a16f316b8f | ||
|
|
8037321ad7 | ||
|
|
6a666b0323 | ||
|
|
7ce1f1789e | ||
|
|
8e28e36178 | ||
|
|
5d5dc429c4 | ||
|
|
c07b5efb7f | ||
|
|
7a6752a68e | ||
|
|
40486f3ab4 | ||
|
|
e3880b8912 | ||
|
|
242c96527b | ||
|
|
336ca91c26 | ||
|
|
eeb88c0855 | ||
|
|
b586672f78 | ||
|
|
6c22e29875 | ||
|
|
397f6bc20a | ||
|
|
48b499a38f | ||
|
|
9f5e666702 | ||
|
|
077cfb7861 | ||
|
|
da01846d34 | ||
|
|
cd467f2ce1 | ||
|
|
bdd81a5961 | ||
|
|
87f8eca033 | ||
|
|
10b0fb6da0 | ||
|
|
44be66eed9 | ||
|
|
bd3e3d1af1 | ||
|
|
97dd95329d | ||
|
|
676863760a | ||
|
|
173b832a8f | ||
|
|
34e6b1e74d | ||
|
|
72be7999ed | ||
|
|
1cb8c389b7 | ||
|
|
7c840a7dfd | ||
|
|
a42cffc351 | ||
|
|
1cb48eca58 | ||
|
|
2a7eeef176 | ||
|
|
a3c9371dc5 | ||
|
|
fedb0b5f9d | ||
|
|
1b904b62c9 | ||
|
|
941119f05b | ||
|
|
5d34a4d982 | ||
|
|
d4b2d3202b | ||
|
|
1fe2d671cb | ||
|
|
9960c1907b | ||
|
|
1da113d7d6 | ||
|
|
64800c2b1f | ||
|
|
dc07dfd07b | ||
|
|
057524aa52 | ||
|
|
1bb09da270 | ||
|
|
d8d177ce72 | ||
|
|
7d4535a836 | ||
|
|
c32da71e8a | ||
|
|
ca5f13d0e3 | ||
|
|
91005a0422 | ||
|
|
f91d3ca828 | ||
|
|
3512d15dff | ||
|
|
caad4d93d0 | ||
|
|
aac02bef35 | ||
|
|
cbb4c871c2 | ||
|
|
99956ecab9 | ||
|
|
d792d39813 | ||
|
|
f5ea5d453e | ||
|
|
cd9ee996a8 | ||
|
|
99184daff6 | ||
|
|
23090198bf | ||
|
|
724635bbbd | ||
|
|
71a14f5193 | ||
|
|
cea717db3e | ||
|
|
e75dc1dfd0 | ||
|
|
1b1b27df28 | ||
|
|
00f8d82808 | ||
|
|
8226d30af0 | ||
|
|
f0b6ba072f | ||
|
|
99fea03c50 | ||
|
|
08e008ac54 | ||
|
|
2e3cace739 | ||
|
|
f3c6f7d46e | ||
|
|
b0748b69e7 | ||
|
|
c79a5d4407 | ||
|
|
4ad71ab5ae | ||
|
|
81ff6fcc0d | ||
|
|
613e698199 | ||
|
|
554143e187 | ||
|
|
6505054f62 | ||
|
|
be3bf316c0 | ||
|
|
e7fcd0e08d | ||
|
|
8149e255c8 | ||
|
|
32a233d93b | ||
|
|
a63bf5f88b | ||
|
|
4ab4c9b65d | ||
|
|
b56df2fdd9 | ||
|
|
b1f22aa8a2 | ||
|
|
d641f062f2 | ||
|
|
666e12b25d | ||
|
|
b81ef33f33 | ||
|
|
8155d60e9a | ||
|
|
124e6d80c3 | ||
|
|
ac75977156 | ||
|
|
63ff1f2a3a | ||
|
|
74af586f4b | ||
|
|
c3e1d7e560 | ||
|
|
8e30f13e57 | ||
|
|
06bba7167d | ||
|
|
118fce34d3 | ||
|
|
746631351f | ||
|
|
3bc463a66f | ||
|
|
ac0f4ba3ee | ||
|
|
d47242296d | ||
|
|
edfd84fab5 | ||
|
|
af503ad836 | ||
|
|
06d40ec272 | ||
|
|
1615185a14 | ||
|
|
27af7b5d15 | ||
|
|
a807240db7 | ||
|
|
70a36fdf00 | ||
|
|
6b7549bf3a | ||
|
|
4c04328e6d | ||
|
|
7240e06613 | ||
|
|
51bf92f353 | ||
|
|
5e193eb12f | ||
|
|
63ea7d54e7 | ||
|
|
26d3ab86b8 | ||
|
|
b6695b7213 | ||
|
|
1f262e677c | ||
|
|
023bb494b5 | ||
|
|
70d3fc5916 | ||
|
|
e22d78b36c | ||
|
|
17a7097011 | ||
|
|
27809fbc59 | ||
|
|
a6ef3245ae | ||
|
|
1b5afb179f | ||
|
|
f0251a7959 | ||
|
|
8390c65a95 | ||
|
|
fe5e56a52c | ||
|
|
c178fa8c0b | ||
|
|
c78503f21d | ||
|
|
f171f0fcd9 | ||
|
|
1e61513859 | ||
|
|
7b27d98370 | ||
|
|
3d0c2abd3b | ||
|
|
f11dad9e04 | ||
|
|
30b4fd59a5 | ||
|
|
b2dc3e99d6 | ||
|
|
1c5e56d9c7 | ||
|
|
ad708a0299 | ||
|
|
371cc6f9f1 | ||
|
|
d244013355 | ||
|
|
652d5e96be | ||
|
|
455f9a0d6c | ||
|
|
9c003bc2d6 | ||
|
|
09af133af3 | ||
|
|
21ef8e4332 | ||
|
|
383a42851c | ||
|
|
f9962c3013 | ||
|
|
a384886a15 | ||
|
|
10f60bab0c | ||
|
|
202db15274 | ||
|
|
1773edcad0 | ||
|
|
a8015fa102 | ||
|
|
fd22bd0f66 | ||
|
|
c087b6f6c9 | ||
|
|
d88b9a5d11 | ||
|
|
dd2df86625 | ||
|
|
7ab82b6f64 | ||
|
|
9cf062d8d4 | ||
|
|
63de0ca9e6 | ||
|
|
f73e062c7a | ||
|
|
7865bbd39a | ||
|
|
eed1afb808 | ||
|
|
529942fe4b | ||
|
|
3a738cadc3 | ||
|
|
5270c34dd7 | ||
|
|
314ded348e | ||
|
|
92aaa9703b | ||
|
|
f5e7d16303 | ||
|
|
a0b8a2cc62 | ||
|
|
d5d8739783 | ||
|
|
4fcc0f7c2a | ||
|
|
e84271b36b | ||
|
|
3eac48ba5a | ||
|
|
9409c086d4 | ||
|
|
d0fbde9126 | ||
|
|
049e29cc1c | ||
|
|
e3448fa0d5 | ||
|
|
2460d9ad0c | ||
|
|
4ec115cca5 | ||
|
|
fdd2a7e937 | ||
|
|
26d479d6e3 | ||
|
|
c9eba6ccd3 | ||
|
|
5d6e067a74 | ||
|
|
652c06a8ae | ||
|
|
f6d532a15b | ||
|
|
212c2ba990 | ||
|
|
c42dd567ca | ||
|
|
a845ab8446 | ||
|
|
758cfb9f79 | ||
|
|
7c3b9043a1 | ||
|
|
e0b639397b | ||
|
|
db31a8c1f5 | ||
|
|
d214da191d | ||
|
|
0326cbf95e | ||
|
|
314b2ef89b | ||
|
|
39e8d14e1b | ||
|
|
f4db687130 | ||
|
|
63771b48bb | ||
|
|
80071c86f5 | ||
|
|
614eaf6898 | ||
|
|
0b284125d2 | ||
|
|
667b736879 | ||
|
|
c68d4d6389 | ||
|
|
9d736d5c9c | ||
|
|
529a0e2272 | ||
|
|
a4a2315537 | ||
|
|
5e247d1683 | ||
|
|
20ca9288d5 | ||
|
|
804ca32314 | ||
|
|
c20d40ddba | ||
|
|
f7e61edcb2 | ||
|
|
f9d148be56 | ||
|
|
012314d946 | ||
|
|
d8e45c286d | ||
|
|
a81d58fa6e | ||
|
|
cb632c376f | ||
|
|
94bbb4c44c | ||
|
|
2574a8dfb5 | ||
|
|
1b79c077a6 | ||
|
|
b73f3e2b16 | ||
|
|
42a4d30267 | ||
|
|
e9e7a69c7b | ||
|
|
495b97aafe | ||
|
|
f82530d8c0 | ||
|
|
ae7967c8ae | ||
|
|
82b6e15be7 | ||
|
|
32608a142b | ||
|
|
b9f6c3e5b6 | ||
|
|
184e087edf | ||
|
|
1da36a9278 | ||
|
|
2b1255cd6a | ||
|
|
c599aa08ad | ||
|
|
f1f526d63c | ||
|
|
ef0746eb1d | ||
|
|
befa4434ad | ||
|
|
7e2105fca8 | ||
|
|
6e1696ba32 | ||
|
|
dedbdea1d9 | ||
|
|
b9f9952660 | ||
|
|
1d2540629f | ||
|
|
49f21bcc9f | ||
|
|
885ebf80e3 | ||
|
|
7505bb0c60 | ||
|
|
99da999b2b | ||
|
|
7197ae4b77 | ||
|
|
1a25c4052c | ||
|
|
a73a86bbc0 | ||
|
|
3b211a6e1b | ||
|
|
4dd603f786 | ||
|
|
0dac0f173a | ||
|
|
b9f9ebc4fc | ||
|
|
bcf1ce3f33 | ||
|
|
295fc5e33a | ||
|
|
d13131e303 | ||
|
|
7758a03b5b | ||
|
|
cf63470db9 | ||
|
|
5c111d0bd1 | ||
|
|
ec49b94acb | ||
|
|
7dd1e814fb | ||
|
|
2017669544 | ||
|
|
8d7ced5e12 | ||
|
|
e593921560 | ||
|
|
373ff0e6e9 | ||
|
|
103b8bc8f9 | ||
|
|
828be0071e | ||
|
|
71a3d8fffb | ||
|
|
48155b1ec7 | ||
|
|
8066f230f5 | ||
|
|
3b6f3450c2 | ||
|
|
20336266fd | ||
|
|
549bc0a5fd | ||
|
|
0ca8ec6f7f | ||
|
|
df982b33b9 | ||
|
|
7a2c26fd22 | ||
|
|
0fb5094250 | ||
|
|
87216372dd | ||
|
|
b7df4416b5 | ||
|
|
b9a7d771bc | ||
|
|
3f8fde4270 | ||
|
|
5b8cc18456 | ||
|
|
e8a1e6deb1 | ||
|
|
b5a187841e | ||
|
|
d45a702649 | ||
|
|
fe0b637e4d | ||
|
|
284023a1b7 | ||
|
|
4456a6ba0b | ||
|
|
142fcad28b | ||
|
|
1d45939cab | ||
|
|
9ef6110e36 | ||
|
|
05a9ded297 | ||
|
|
690f62bae2 | ||
|
|
5404701111 | ||
|
|
5ef18d905a | ||
|
|
429bc553a0 | ||
|
|
690c35530f | ||
|
|
44c097fc05 | ||
|
|
cf6c511e91 | ||
|
|
f58e3c5e92 | ||
|
|
f54d9a3257 | ||
|
|
ae41832f7c | ||
|
|
2b51661430 | ||
|
|
ee2f5f5a0a | ||
|
|
a513b57e5e | ||
|
|
f251a13f32 | ||
|
|
92de543fe7 | ||
|
|
5d493ca53c | ||
|
|
b95deaa7e4 | ||
|
|
6e8f58e3f6 | ||
|
|
f734e7a81c | ||
|
|
9d312af32a | ||
|
|
430d383e47 | ||
|
|
3d3077e1f1 | ||
|
|
88c0e4c6fa | ||
|
|
c1030c0d40 | ||
|
|
3c9e690e19 | ||
|
|
37e4f5735a | ||
|
|
96847ba779 | ||
|
|
d9dd3134f0 | ||
|
|
e2b7b62b98 | ||
|
|
5e76669c50 | ||
|
|
c3cb01d24a | ||
|
|
c96420dbe0 | ||
|
|
6f85eb928c | ||
|
|
a1b2e973c0 | ||
|
|
f14cefff18 | ||
|
|
9524a9fc16 | ||
|
|
d73c6f44c5 | ||
|
|
d655ab6913 | ||
|
|
719f3ac577 | ||
|
|
69461bc15a | ||
|
|
6c278c3352 | ||
|
|
d1608f8f2d | ||
|
|
fe0c0dc3ae | ||
|
|
5b17a18355 | ||
|
|
0181a0b07f | ||
|
|
fb1b105ba2 | ||
|
|
dac0b2c187 | ||
|
|
7198f43008 | ||
|
|
fb564cddd9 | ||
|
|
3d5defe28a | ||
|
|
12442a2aca | ||
|
|
7d9e9a4900 | ||
|
|
afc5be5abe | ||
|
|
16aad35d31 | ||
|
|
30b066f082 | ||
|
|
7e5e51aeff | ||
|
|
ed7964b424 | ||
|
|
97a09dee19 | ||
|
|
a0dbe1e850 | ||
|
|
eeca208c8f | ||
|
|
00f98fa911 | ||
|
|
dc66c87928 | ||
|
|
93c2852fdb | ||
|
|
a391a34631 | ||
|
|
1577cd8663 | ||
|
|
89ccbccff0 | ||
|
|
cb3e1403cd | ||
|
|
3353c0df43 | ||
|
|
97d9e2c97d | ||
|
|
89cefc177a | ||
|
|
8799b108c2 | ||
|
|
dab7864809 | ||
|
|
693c674a7e | ||
|
|
c02ead0f11 | ||
|
|
d5ea9072af | ||
|
|
6463a2e22d | ||
|
|
d6adc4a2d0 | ||
|
|
402f18e039 | ||
|
|
aeb7beb1b1 | ||
|
|
0d10a44f4b | ||
|
|
86406ab63a | ||
|
|
7d3a344d43 | ||
|
|
250d7b1542 | ||
|
|
19147e1b8c | ||
|
|
d1821b3ad7 | ||
|
|
267fb94478 | ||
|
|
6766786049 | ||
|
|
d2578e05e7 | ||
|
|
2746fc572f | ||
|
|
d20e42562c | ||
|
|
4756b66089 | ||
|
|
e8265dbf9c | ||
|
|
b1edda8a65 | ||
|
|
81d5d2b421 | ||
|
|
8f8dd2824e | ||
|
|
9740f5428e | ||
|
|
91c079ab41 | ||
|
|
200e1f1709 | ||
|
|
e501e277b3 | ||
|
|
cdbc264bb6 | ||
|
|
a0f22d21ce | ||
|
|
94af235713 | ||
|
|
2375d87831 | ||
|
|
1a698fa235 | ||
|
|
d250d34193 | ||
|
|
777935c8ed | ||
|
|
15c2792036 | ||
|
|
46beb8af84 | ||
|
|
aa63688450 | ||
|
|
93f61887be | ||
|
|
54475964bd | ||
|
|
065df4c9a7 | ||
|
|
cde3e1fa97 | ||
|
|
bb2db252a7 | ||
|
|
abe23c0e60 | ||
|
|
b0aa064640 | ||
|
|
bd5f4f2d8a | ||
|
|
aea3c7e363 | ||
|
|
fc02b10560 | ||
|
|
ee190db235 | ||
|
|
077d28828a | ||
|
|
0b63d81f95 | ||
|
|
d139e26a1c | ||
|
|
dedd0b84a8 | ||
|
|
b9e4763de3 | ||
|
|
8897a81f7d | ||
|
|
5d6abc3234 | ||
|
|
dc7524d1d6 | ||
|
|
70a18a9486 | ||
|
|
b7bde05aee | ||
|
|
8ff7153019 | ||
|
|
0d4f92fa81 | ||
|
|
1a2d74decc | ||
|
|
f6d5c8ffbe | ||
|
|
52e207a404 | ||
|
|
694c03bd6a | ||
|
|
058faeadac | ||
|
|
295dc5a2a9 | ||
|
|
a8a8a39ff1 | ||
|
|
435ae075a5 | ||
|
|
06c8113863 | ||
|
|
143ea15253 | ||
|
|
acf48df979 | ||
|
|
6a9e0ec59d | ||
|
|
5b96cc9c37 | ||
|
|
525c427c60 | ||
|
|
23e1e07139 | ||
|
|
241a7c32a2 | ||
|
|
10dc41e83d | ||
|
|
6943cea6b7 | ||
|
|
b4c49cf781 | ||
|
|
5e87aee968 | ||
|
|
693a2a7904 | ||
|
|
3058b6e748 | ||
|
|
7b78770010 | ||
|
|
cd2dff2db1 | ||
|
|
8194e8faef | ||
|
|
06698ad95f | ||
|
|
0d76d1f219 | ||
|
|
5c3c682b6e | ||
|
|
1129d850d3 | ||
|
|
bdc48e6a32 | ||
|
|
523f8f5e65 | ||
|
|
1dabddeb85 | ||
|
|
f9ef894141 | ||
|
|
979e21dcbf | ||
|
|
8133d3e70a | ||
|
|
08839758bd | ||
|
|
10eecf9c97 | ||
|
|
bebd399488 | ||
|
|
a105b587ac | ||
|
|
8e29063ba7 | ||
|
|
117791b582 | ||
|
|
2ab7857fa5 | ||
|
|
7ede5c3487 | ||
|
|
915459258b | ||
|
|
d94cf0e1d6 | ||
|
|
952a296e20 | ||
|
|
d9a1850eaa | ||
|
|
667750f3ff | ||
|
|
8b610239bf | ||
|
|
62426caa5a | ||
|
|
f137d8424e | ||
|
|
e5c41e76c5 | ||
|
|
1e114b4ef8 | ||
|
|
bc7c953bcc | ||
|
|
60a91eb688 | ||
|
|
1b025e84e8 | ||
|
|
d3555623ba | ||
|
|
18ea72faf1 | ||
|
|
c8255dded5 | ||
|
|
b48e336554 | ||
|
|
0c637860cd | ||
|
|
0b08a80dce | ||
|
|
d7b26c1bb2 | ||
|
|
78261dbae2 | ||
|
|
2ed4e0a17e | ||
|
|
c372dd8aee | ||
|
|
01772280c0 | ||
|
|
814d8d1aba | ||
|
|
a190480517 | ||
|
|
7e8f22e136 | ||
|
|
965a403699 | ||
|
|
968cc5801b | ||
|
|
492b578662 | ||
|
|
e946479b9f | ||
|
|
f88105a952 | ||
|
|
3380694fa8 | ||
|
|
18631b99ef | ||
|
|
55d461392a | ||
|
|
a7a9a8480b | ||
|
|
3640b8546e | ||
|
|
1f94c7db20 | ||
|
|
a02223a97f | ||
|
|
2e31b1ca41 | ||
|
|
7ce86f588b | ||
|
|
39b396763a | ||
|
|
6f27c32db1 | ||
|
|
099c6c8b24 | ||
|
|
315ddb247f | ||
|
|
2df279bc5b | ||
|
|
9e6b406218 | ||
|
|
352ee258b7 | ||
|
|
5040495741 | ||
|
|
bc23e07ee5 | ||
|
|
466e437a20 | ||
|
|
ee3b3656ea | ||
|
|
db40974788 | ||
|
|
89396cefa2 | ||
|
|
c48adc5753 | ||
|
|
c788820f5d | ||
|
|
b0552e1939 | ||
|
|
7eae058af5 | ||
|
|
934de48d44 | ||
|
|
e39c7b5233 | ||
|
|
56c781aec4 | ||
|
|
484309ed95 | ||
|
|
67e3c54744 | ||
|
|
bb6a076fda | ||
|
|
dd0e590de3 | ||
|
|
d3d9a05826 | ||
|
|
2cf1775864 | ||
|
|
f339d23e54 | ||
|
|
ac3edc2c1d | ||
|
|
ba912018f8 | ||
|
|
c06e40dbef | ||
|
|
32247b3c89 | ||
|
|
e4f5aced1c | ||
|
|
9292666b28 | ||
|
|
fb967fda15 | ||
|
|
4a404e2a4a | ||
|
|
0dbe17bbd4 | ||
|
|
e33090f282 | ||
|
|
06bece36de | ||
|
|
7f9857a81b | ||
|
|
459a254aea |
@@ -1,8 +1,8 @@
|
||||
# Configuring Azure Pipelines with Certbot
|
||||
|
||||
Let's begin. All pipelines are defined in `.azure-pipelines`. Currently there are two:
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for master, and pushes to master,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for master.
|
||||
* `.azure-pipelines/main.yml` is the main one, executed on PRs for main, and pushes to main,
|
||||
* `.azure-pipelines/advanced.yml` add installer testing on top of the main pipeline, and is executed for `test-*` branches, release branches, and nightly run for main.
|
||||
|
||||
Several templates are defined in `.azure-pipelines/templates`. These YAML files aggregate common jobs configuration that can be reused in several pipelines.
|
||||
|
||||
@@ -64,7 +64,7 @@ Azure Pipeline needs RW on code, RO on metadata, RW on checks, commit statuses,
|
||||
RW access here is required to allow update of the pipelines YAML files from Azure DevOps interface, and to
|
||||
update the status of builds and PRs on GitHub side when Azure Pipelines are triggered.
|
||||
Note however that no admin access is defined here: this means that Azure Pipelines cannot do anything with
|
||||
protected branches, like master, and cannot modify the security context around this on GitHub.
|
||||
protected branches, like main, and cannot modify the security context around this on GitHub.
|
||||
Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
@@ -91,11 +91,11 @@ grant permissions from Azure Pipelines to GitHub in order to setup a GitHub OAut
|
||||
then are way too large (admin level on almost everything), while the classic approach does not add any more
|
||||
permissions, and works perfectly well.__
|
||||
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, master in default branch.
|
||||
- Select GitHub in "Select your repository section", choose certbot/certbot in Repository, main in default branch.
|
||||
- Click on YAML option for "Select a template"
|
||||
- Choose a name for the pipeline (eg. test-pipeline), and browse to the actual pipeline YAML definition in the
|
||||
"YAML file path" input (eg. `.azure-pipelines/test-pipeline.yml`)
|
||||
- Click "Save & queue", choose the master branch to build the first pipeline, and click "Save and run" button.
|
||||
- Click "Save & queue", choose the main branch to build the first pipeline, and click "Save and run" button.
|
||||
|
||||
_Done. Pipeline is operational. Repeat to add more pipelines from existing YAML files in `.azure-pipelines`._
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 5400
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
|
||||
@@ -1,8 +1,18 @@
|
||||
trigger: none
|
||||
# We run the test suite on commits to main so codecov gets coverage data
|
||||
# about the main branch and can use it to track coverage changes.
|
||||
trigger:
|
||||
- main
|
||||
pr:
|
||||
- master
|
||||
- main
|
||||
- '*.x'
|
||||
|
||||
variables:
|
||||
# We set this here to avoid coverage data being uploaded from things like our
|
||||
# nightly pipeline. This is done because codecov (helpfully) keeps track of
|
||||
# the number of coverage uploads for a commit and displays a warning when
|
||||
# comparing two commits with an unequal number of uploads. Only uploading
|
||||
# coverage here should keep the number of uploads it sees consistent.
|
||||
uploadCoverage: true
|
||||
|
||||
jobs:
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
# Nightly pipeline running each day for main.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
@@ -6,13 +6,14 @@ schedules:
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- main
|
||||
always: true
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
snapBuildTimeout: 19800
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/nightly-deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -8,11 +8,10 @@ pr: none
|
||||
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
snapBuildTimeout: 19800
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/release-deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
128
.azure-pipelines/templates/jobs/common-deploy-jobs.yml
Normal file
128
.azure-pipelines/templates/jobs/common-deploy-jobs.yml
Normal file
@@ -0,0 +1,128 @@
|
||||
# As (somewhat) described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates?view=azure-devops#context,
|
||||
# each template only has access to the parameters passed into it. To help make
|
||||
# use of this design, we define snapReleaseChannel without a default value
|
||||
# which requires the user of this template to define it as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/parameters-name?view=azure-pipelines#remarks.
|
||||
# This makes the user of this template be explicit while allowing them to
|
||||
# define their own parameters with defaults that make sense for that context.
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
# (provide the shared snapcraft credentials when prompted)
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file for use in the "nightly" and "release"
|
||||
# pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current file will
|
||||
# expire on 2024-02-10. The file will need to be updated before then to
|
||||
# prevent automated deploys from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
export SNAPCRAFT_STORE_CREDENTIALS=$(cat "$(snapcraftCfg.secureFilePath)")
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
# The credentials used in the following jobs are for the shared
|
||||
# certbotbot account on Docker Hub. The credentials are stored
|
||||
# in a service account which was created by following the
|
||||
# instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. The authentication used when
|
||||
# creating this service account was a personal access token
|
||||
# rather than a password to bypass 2FA. When Brad set this up,
|
||||
# Azure Pipelines failed to verify the credentials with an error
|
||||
# like "access is forbidden with a JWT issued from a personal
|
||||
# access token", but after saving them without verification, the
|
||||
# access token worked when the pipeline actually ran. "Grant
|
||||
# access to all pipelines" should also be checked on the service
|
||||
# account. The access token can be deleted on Docker Hub if
|
||||
# these credentials need to be revoked.
|
||||
- job: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_images.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images by architecture
|
||||
- job: publish_docker_multiarch
|
||||
dependsOn: publish_docker_by_arch
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy_manifests.sh $(dockerTag) all
|
||||
displayName: Deploy the Docker multiarch manifests
|
||||
@@ -2,98 +2,55 @@ jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
value: ubuntu-22.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.9
|
||||
value: 3.12
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py36:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py37:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py38:
|
||||
linux-py39:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
linux-py310:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: py310
|
||||
linux-py311:
|
||||
PYTHON_VERSION: 3.11
|
||||
TOXENV: py311
|
||||
linux-isolated:
|
||||
TOXENV: 'isolated-acme,isolated-certbot,isolated-apache,isolated-cloudflare,isolated-digitalocean,isolated-dnsimple,isolated-dnsmadeeasy,isolated-gehirn,isolated-google,isolated-linode,isolated-luadns,isolated-nsone,isolated-ovh,isolated-rfc2136,isolated-route53,isolated-sakuracloud,isolated-nginx'
|
||||
linux-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py37-nopin:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-external-mock:
|
||||
TOXENV: external-mock
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py38-integration:
|
||||
linux-integration-nginx-oldest:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py39-integration:
|
||||
TOXENV: integration-nginx-oldest
|
||||
# python 3.8 integration tests are not run here because they're run as
|
||||
# part of the standard test suite
|
||||
linux-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
linux-py310-integration:
|
||||
PYTHON_VERSION: 3.10
|
||||
TOXENV: integration
|
||||
linux-py311-integration:
|
||||
PYTHON_VERSION: 3.11
|
||||
TOXENV: integration
|
||||
linux-py312-integration:
|
||||
PYTHON_VERSION: 3.12
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
linux-integration-rfc2136:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-dns-rfc2136
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: modification
|
||||
farmtest-apache2:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-leauto-upgrades:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-leauto-upgrades
|
||||
farmtest-certonly-standalone:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-certonly-standalone
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: test-farm-sdists
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
# The default timeout of 60 minutes is a little low for compiling
|
||||
# cryptography on ARM architectures.
|
||||
timeoutInMinutes: 180
|
||||
@@ -34,107 +32,40 @@ jobs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: docker_run
|
||||
- job: docker_test
|
||||
dependsOn: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_amd64
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- bash: |
|
||||
set -ex
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}:{{.Tag}}')
|
||||
for DOCKER_IMAGE in ${DOCKER_IMAGES}
|
||||
do docker run --rm "${DOCKER_IMAGE}" plugins --prepare
|
||||
done
|
||||
set -e && tools/docker/test.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Run integration tests for Docker images
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e windows-installer
|
||||
displayName: Prepare Windows installer build environment
|
||||
- script: |
|
||||
venv\Scripts\construct-windows-installer
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
if ($PSVersionTable.PSVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
- script: |
|
||||
python -m venv venv
|
||||
venv\Scripts\python tools\pipstrap.py
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
env:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
vmImage: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
armhf:
|
||||
SNAP_ARCH: armhf
|
||||
arm64:
|
||||
SNAP_ARCH: arm64
|
||||
timeoutInMinutes: 0
|
||||
steps:
|
||||
- script: |
|
||||
@@ -145,7 +76,7 @@ jobs:
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.12
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
@@ -157,7 +88,7 @@ jobs:
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout 19800
|
||||
python3 tools/snap/build_remote.py ALL --archs ${SNAP_ARCH} --timeout $(snapBuildTimeout)
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
@@ -172,18 +103,17 @@ jobs:
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.12
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -U tox
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
@@ -197,12 +127,12 @@ jobs:
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
venv/bin/python -m tox run -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
@@ -211,7 +141,7 @@ jobs:
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.8
|
||||
versionSpec: 3.12
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
@@ -221,7 +151,6 @@ jobs:
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python tools/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
|
||||
@@ -1,71 +1,50 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.9
|
||||
PYTHON_VERSION: 3.12
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py36:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
macos-py39:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py38-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
macos-py38-cover:
|
||||
IMAGE_NAME: macOS-12
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
TOXENV: cover
|
||||
# As of pip 23.1.0, builds started failing on macOS unless this flag was set.
|
||||
# See https://github.com/certbot/certbot/pull/9717#issuecomment-1610861794.
|
||||
PIP_USE_PEP517: "true"
|
||||
macos-cover:
|
||||
IMAGE_NAME: macOS-13
|
||||
TOXENV: cover
|
||||
# See explanation under macos-py38-cover.
|
||||
PIP_USE_PEP517: "true"
|
||||
linux-oldest:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: '{dns,nginx}-oldest'
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39-cover
|
||||
linux-py39-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: lint
|
||||
linux-py39-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: oldest
|
||||
linux-py38:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-cover:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: cover
|
||||
linux-lint:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: lint-posix
|
||||
linux-mypy:
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: apache_compat
|
||||
le-modification:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: modification
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
IMAGE_NAME: ubuntu-22.04
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
|
||||
@@ -3,7 +3,7 @@ stages:
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
vmImage: windows-2019
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
SNAP_ARCH: amd64
|
||||
arm32v6:
|
||||
SNAP_ARCH: armhf
|
||||
arm64v8:
|
||||
SNAP_ARCH: arm64
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps_$(SNAP_ARCH)
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
snapcraft login --with $(snapcraftCfg.secureFilePath)
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
# The credentials used here are for the shared certbotbot account
|
||||
# on Docker Hub. The credentials are stored in a service account
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images
|
||||
@@ -0,0 +1,6 @@
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/common-deploy-jobs.yml
|
||||
parameters:
|
||||
snapReleaseChannel: edge
|
||||
38
.azure-pipelines/templates/stages/release-deploy-stage.yml
Normal file
38
.azure-pipelines/templates/stages/release-deploy-stage.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
- template: ../jobs/common-deploy-jobs.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- job: create_github_release
|
||||
pool:
|
||||
vmImage: ubuntu-22.04
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: changelog
|
||||
path: '$(Pipeline.Workspace)'
|
||||
- task: GitHubRelease@1
|
||||
inputs:
|
||||
# this "github-releases" credential is what azure pipelines calls a
|
||||
# "service connection". it was created using the instructions at
|
||||
# https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#github-service-connection
|
||||
# with a fine-grained personal access token from github to limit
|
||||
# the permissions given to azure pipelines. the connection on azure
|
||||
# needs permissions for the "release" pipeline (and maybe the
|
||||
# "full-test-suite" pipeline to simplify testing it). information
|
||||
# on how to set up these permissions can be found at
|
||||
# https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#secure-a-service-connection.
|
||||
# the github token that is used needs "contents:write" and
|
||||
# "workflows:write" permissions for the certbot repo
|
||||
#
|
||||
# as of writing this, the current token will expire on 3/15/2025.
|
||||
# when recreating it, you may also want to create it using the
|
||||
# shared "certbotbot" github account so the credentials aren't tied
|
||||
# to any one dev's github account and their access to the certbot
|
||||
# repo
|
||||
gitHubConnection: github-releases
|
||||
title: ${{ format('Certbot {0}', replace(variables['Build.SourceBranchName'], 'v', '')) }}
|
||||
releaseNotesFilePath: '$(Pipeline.Workspace)/release_notes.md'
|
||||
assets: '$(Build.SourcesDirectory)/packages/{*.tar.gz,SHA256SUMS*}'
|
||||
addChangeLog: false
|
||||
@@ -1,15 +1,16 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libaugeas0
|
||||
FINAL_STATUS=0
|
||||
declare -a FAILED_BUILDS
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
python tools/pipstrap.py
|
||||
tools/venv.py
|
||||
source venv/bin/activate
|
||||
for doc_path in */docs
|
||||
do
|
||||
echo ""
|
||||
echo "##[group]Building $doc_path"
|
||||
pip install -q -e $doc_path/..[docs]
|
||||
if ! sphinx-build -W --keep-going -b html $doc_path $doc_path/_build/html; then
|
||||
FINAL_STATUS=1
|
||||
FAILED_BUILDS[${#FAILED_BUILDS[@]}]="${doc_path%/docs}"
|
||||
|
||||
@@ -1,6 +1,18 @@
|
||||
# This does not include the dependencies needed to build cryptography. See
|
||||
# https://cryptography.io/en/latest/installation/
|
||||
steps:
|
||||
# We run brew update because we've seen attempts to install an older version
|
||||
# of a package fail. See
|
||||
# https://github.com/actions/virtual-environments/issues/3165.
|
||||
#
|
||||
# We untap homebrew/core and homebrew/cask and unset HOMEBREW_NO_INSTALL_FROM_API (which
|
||||
# is set by the CI macOS env) because GitHub has been having issues, making these jobs
|
||||
# fail on git clones: https://github.com/orgs/Homebrew/discussions/4612.
|
||||
- bash: |
|
||||
set -e
|
||||
unset HOMEBREW_NO_INSTALL_FROM_API
|
||||
brew untap homebrew/core homebrew/cask
|
||||
brew update
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
@@ -8,32 +20,19 @@ steps:
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
nginx-light \
|
||||
openssl
|
||||
nginx-light
|
||||
sudo systemctl stop nginx
|
||||
sudo sysctl net.ipv4.ip_unprivileged_port_start=0
|
||||
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
|
||||
displayName: Install Linux dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python tools/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
python3 tools/pip_install.py tox
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
@@ -45,9 +44,34 @@ steps:
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
python -m tox
|
||||
python3 -m tox run
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
# For now, let's omit `set -e` and avoid the script exiting with a nonzero
|
||||
# status code to prevent problems here from causing build failures. If
|
||||
# this turns out to work well, we can change this.
|
||||
- bash: |
|
||||
python3 tools/pip_install.py -I coverage
|
||||
case "$AGENT_OS" in
|
||||
Darwin)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/macos/codecov"
|
||||
;;
|
||||
Linux)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/linux/codecov"
|
||||
;;
|
||||
Windows_NT)
|
||||
CODECOV_URL="https://uploader.codecov.io/latest/windows/codecov.exe"
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected OS"
|
||||
exit 0
|
||||
esac
|
||||
curl --retry 3 -o codecov "$CODECOV_URL"
|
||||
chmod +x codecov
|
||||
coverage xml
|
||||
./codecov || echo "Uploading coverage data failed"
|
||||
condition: and(eq(variables['uploadCoverage'], true), or(startsWith(variables['TOXENV'], 'cover'), startsWith(variables['TOXENV'], 'integration')))
|
||||
displayName: Upload coverage data
|
||||
|
||||
19
.coveragerc
19
.coveragerc
@@ -1,5 +1,24 @@
|
||||
[run]
|
||||
omit = */setup.py
|
||||
source =
|
||||
acme
|
||||
certbot
|
||||
certbot-apache
|
||||
certbot-dns-cloudflare
|
||||
certbot-dns-digitalocean
|
||||
certbot-dns-dnsimple
|
||||
certbot-dns-dnsmadeeasy
|
||||
certbot-dns-gehirn
|
||||
certbot-dns-google
|
||||
certbot-dns-linode
|
||||
certbot-dns-luadns
|
||||
certbot-dns-nsone
|
||||
certbot-dns-ovh
|
||||
certbot-dns-rfc2136
|
||||
certbot-dns-route53
|
||||
certbot-dns-sakuracloud
|
||||
certbot-nginx
|
||||
|
||||
[report]
|
||||
omit = */setup.py
|
||||
show_missing = True
|
||||
|
||||
12
.envrc
12
.envrc
@@ -1,12 +0,0 @@
|
||||
# This file is just a nicety for developers who use direnv. When you cd under
|
||||
# the Certbot repo, Certbot's virtual environment will be automatically
|
||||
# activated and then deactivated when you cd elsewhere. Developers have to have
|
||||
# direnv set up and run `direnv allow` to allow this file to execute on their
|
||||
# system. You can find more information at https://direnv.net/.
|
||||
. venv/bin/activate
|
||||
# direnv doesn't support modifying PS1 so we unset it to squelch the error
|
||||
# it'll otherwise print about this being done in the activate script. See
|
||||
# https://github.com/direnv/direnv/wiki/PS1. If you would like your shell
|
||||
# prompt to change like it normally does, see
|
||||
# https://github.com/direnv/direnv/wiki/Python#restoring-the-ps1.
|
||||
unset PS1
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: https://supporters.eff.org/donate/support-work-on-certbot
|
||||
7
.github/codecov.yml
vendored
Normal file
7
.github/codecov.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# This disables all reporting from codecov. Let's just set it up to collect
|
||||
# data for now and then we can play with the settings here.
|
||||
comment: false
|
||||
coverage:
|
||||
status:
|
||||
project: off
|
||||
patch: off
|
||||
6
.github/pull_request_template.md
vendored
Normal file
6
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
## Pull Request Checklist
|
||||
|
||||
- [ ] The Certbot team has recently expressed interest in reviewing a PR for this. If not, this PR may be closed due our limited resources and need to prioritize how we spend them.
|
||||
- [ ] If the change being made is to a [distributed component](https://certbot.eff.org/docs/contributing.html#code-components-and-layout), edit the `main` section of `certbot/CHANGELOG.md` to include a description of the change being made.
|
||||
- [ ] Add or update any documentation as needed to support the changes in this PR.
|
||||
- [ ] Include your name in `AUTHORS.md` if you like.
|
||||
35
.github/stale.yml
vendored
35
.github/stale.yml
vendored
@@ -1,35 +0,0 @@
|
||||
# Configuration for https://github.com/marketplace/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 365
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
# When changing this value, be sure to also update markComment below.
|
||||
daysUntilClose: 30
|
||||
|
||||
# Ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: needs-update
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
closeComment: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 1
|
||||
|
||||
# Don't mark pull requests as stale.
|
||||
only: issues
|
||||
21
.github/workflows/merged.yaml
vendored
Normal file
21
.github/workflows/merged.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Merge Event
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
if_merged:
|
||||
# Forked repos can not access Mattermost secret.
|
||||
if: github.event.pull_request.merged == true && !github.event.pull_request.head.repo.fork
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: mattermost/action-mattermost-notify@main
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_MERGE_WEBHOOK }}
|
||||
TEXT: >
|
||||
[${{ github.repository }}] |
|
||||
[${{ github.event.pull_request.title }}
|
||||
#${{ github.event.number }}](https://github.com/${{ github.repository }}/pull/${{ github.event.number }})
|
||||
was merged into main by ${{ github.actor }}
|
||||
27
.github/workflows/notify_weekly.yaml
vendored
Normal file
27
.github/workflows/notify_weekly.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Weekly Github Update
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Every week on Thursday @ 13:00
|
||||
- cron: "0 13 * * 4"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
send-mattermost-message:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Create Mattermost Message
|
||||
run: |
|
||||
DATE=$(date --date="7 days ago" +"%Y-%m-%d")
|
||||
echo "MERGED_URL=https://github.com/pulls?q=merged%3A%3E${DATE}+org%3Acertbot" >> $GITHUB_ENV
|
||||
echo "UPDATED_URL=https://github.com/pulls?q=updated%3A%3E${DATE}+org%3Acertbot" >> $GITHUB_ENV
|
||||
- uses: mattermost/action-mattermost-notify@main
|
||||
with:
|
||||
MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_WEBHOOK_URL }}
|
||||
MATTERMOST_CHANNEL: private-certbot
|
||||
TEXT: |
|
||||
## Updates Across Certbot Repos
|
||||
- Certbot team members SHOULD look at: [link](${{ env.MERGED_URL }})
|
||||
- Certbot team members MAY also want to look at: [link](${{ env.UPDATED_URL }})
|
||||
- Want to Discuss something today? Place it [here](https://docs.google.com/document/d/17YMUbtC1yg6MfiTMwT8zVm9LmO-cuGVBom0qFn8XJBM/edit?usp=sharing) and we can meet today on Zoom.
|
||||
- The key words SHOULD and MAY in this message are to be interpreted as described in [RFC 8147](https://www.rfc-editor.org/rfc/rfc8174).
|
||||
48
.github/workflows/stale.yml
vendored
Normal file
48
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
name: Update Stale Issues
|
||||
on:
|
||||
schedule:
|
||||
# Run 1:24AM every night
|
||||
- cron: '24 1 * * *'
|
||||
workflow_dispatch:
|
||||
permissions:
|
||||
issues: write
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v6
|
||||
with:
|
||||
# Idle number of days before marking issues stale
|
||||
days-before-issue-stale: 365
|
||||
|
||||
# Never mark PRs as stale
|
||||
days-before-pr-stale: -1
|
||||
|
||||
# Idle number of days before closing stale issues
|
||||
days-before-issue-close: 30
|
||||
|
||||
# Never close PRs
|
||||
days-before-pr-close: -1
|
||||
|
||||
# Ignore issues with an assignee
|
||||
exempt-all-issue-assignees: true
|
||||
|
||||
# Label to use when marking as stale
|
||||
stale-issue-label: needs-update
|
||||
|
||||
stale-issue-message: >
|
||||
We've made a lot of changes to Certbot since this issue was opened. If you
|
||||
still have this issue with an up-to-date version of Certbot, can you please
|
||||
add a comment letting us know? This helps us to better see what issues are
|
||||
still affecting our users. If there is no activity in the next 30 days, this
|
||||
issue will be automatically closed.
|
||||
|
||||
close-issue-message: >
|
||||
This issue has been closed due to lack of activity, but if you think it
|
||||
should be reopened, please open a new issue with a link to this one and we'll
|
||||
take a look.
|
||||
|
||||
# Limit the number of actions per run. As of writing this, GitHub's
|
||||
# rate limit is 1000 requests per hour so we're still a ways off. See
|
||||
# https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#rate-limits-for-requests-from-github-actions.
|
||||
operations-per-run: 180
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -4,17 +4,16 @@
|
||||
build/
|
||||
dist*/
|
||||
/venv*/
|
||||
/kgs/
|
||||
/.tox/
|
||||
/releases*/
|
||||
/log*
|
||||
letsencrypt.log
|
||||
certbot.log
|
||||
letsencrypt-auto-source/letsencrypt-auto.sig.lzma.base64
|
||||
poetry.lock
|
||||
|
||||
# coverage
|
||||
.coverage
|
||||
.coverage.*
|
||||
/htmlcov/
|
||||
|
||||
/.vagrant
|
||||
@@ -32,12 +31,6 @@ tags
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
|
||||
# letstest
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
tests/letstest/venv3/
|
||||
|
||||
.venv
|
||||
|
||||
# pytest cache
|
||||
@@ -66,3 +59,4 @@ certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
certbot-dns*/snap
|
||||
snapcraft.cfg
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[settings]
|
||||
skip_glob=venv*
|
||||
skip=letsencrypt-auto-source
|
||||
force_sort_within_sections=True
|
||||
force_single_line=True
|
||||
order_by_type=False
|
||||
line_length=400
|
||||
src_paths=acme/acme,acme/tests,certbot*/certbot*,certbot*/tests
|
||||
|
||||
733
.pylintrc
733
.pylintrc
@@ -1,54 +1,373 @@
|
||||
[MASTER]
|
||||
[MAIN]
|
||||
|
||||
# use as many jobs as there are cores
|
||||
jobs=0
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\' represents the directory delimiter on Windows systems, it
|
||||
# can't be used as an escape character.
|
||||
# CERTBOT COMMENT
|
||||
# Changing this line back to the default of `ignore-paths=` is being tracked by
|
||||
# https://github.com/certbot/certbot/issues/7908.
|
||||
ignore-paths=.*/_internal/tests/
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
# CERTBOT COMMENT
|
||||
# This is needed for pylint to import linter_plugin.py since
|
||||
# https://github.com/PyCQA/pylint/pull/3396.
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(pylint.config.PYLINTRC))"
|
||||
init-hook="import pylint.config, os, sys; sys.path.append(os.path.dirname(next(pylint.config.find_default_config_files())))"
|
||||
|
||||
# Profiled execution.
|
||||
profile=no
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=0
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=linter_plugin
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=linter_plugin
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.10
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
function-rgx=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_,
|
||||
fd,
|
||||
logger
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=(__.*__)|(test_[A-Za-z0-9_]*)|(_.*)|(.*Test$)
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,
|
||||
builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
# git history told me that "This does something silly/broken..."
|
||||
#indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1250
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging,logger
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
# CERTBOT COMMENT
|
||||
# 1) Once certbot codebase is claimed to be compatible exclusively with Python 3,
|
||||
# the useless-object-inheritance check can be enabled again, and code fixed accordingly.
|
||||
@@ -56,261 +375,203 @@ extension-pkg-whitelist=pywintypes,win32api,win32file,win32security
|
||||
# See https://github.com/PyCQA/pylint/issues/1498.
|
||||
# 3) Same as point 2 for no-value-for-parameter.
|
||||
# See https://github.com/PyCQA/pylint/issues/2820.
|
||||
disable=fixme,locally-disabled,locally-enabled,bad-continuation,no-self-use,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue
|
||||
# 4) raise-missing-from makes it an error to raise an exception from except
|
||||
# block without using explicit exception chaining. While explicit exception
|
||||
# chaining results in a slightly more informative traceback, I don't think
|
||||
# it's beneficial enough for us to change all of our current instances and
|
||||
# give Certbot developers errors about this when they're working on new code
|
||||
# in the future. You can read more about exception chaining and this pylint
|
||||
# check at
|
||||
# https://blog.ram.rachum.com/post/621791438475296768/improving-python-exception-chaining-with.
|
||||
# 5) wrong-import-order generates false positives and a pylint developer
|
||||
# suggests that people using isort should disable this check at
|
||||
# https://github.com/PyCQA/pylint/issues/3817#issuecomment-687892090.
|
||||
# 6) unspecified-encoding generates errors when encoding is not specified in
|
||||
# in a call to the built-in open function. This relates more to a design decision
|
||||
# (unspecified encoding makes the open function use the default encoding of the system)
|
||||
# than a clear flaw on which a check should be enforced. Anyway the project does
|
||||
# not need to enforce encoding on files so we disable this check.
|
||||
# 7) consider-using-f-string is "suggesting" to move to f-string when possible with an error. This
|
||||
# clearly relates to code design and not to potential defects in the code, let's just ignore that.
|
||||
disable=fixme,locally-disabled,invalid-name,cyclic-import,duplicate-code,design,import-outside-toplevel,useless-object-inheritance,unsubscriptable-object,no-value-for-parameter,no-else-return,no-else-raise,no-else-break,no-else-continue,raise-missing-from,wrong-import-order,unspecified-encoding,consider-using-f-string,raw-checker-failed,bad-inline-option,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,use-symbolic-message-instead
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Put messages in a separate file for each module / package specified on the
|
||||
# command line instead of printing them on stdout. Reports (if any) will be
|
||||
# written in a file name "pylint_global.[txt|html]".
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=yes
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Add a comment according to your evaluation note. This is used by the global
|
||||
# evaluation report (RP0004).
|
||||
comment=no
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[BASIC]
|
||||
[METHOD_ARGS]
|
||||
|
||||
# Required attributes for module, separated by a comma
|
||||
required-attributes=
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input,file
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=f,i,j,k,ex,Run,_,fd,logger
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=[a-z_][a-z0-9_]{2,40}$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=(__.*__)|(test_[A-Za-z0-9_]*)|(_.*)|(.*Test$)
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,XXX,TODO
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
[REFACTORING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging,logger
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
[REPORTS]
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
dummy-variables-rgx=(unused)?_.*|dummy
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
additional-builtins=
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=6
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
[STRING]
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=yes
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled
|
||||
no-space-check=trailing-comma
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1250
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
# This does something silly/broken...
|
||||
#indent-after-paren=4
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace,Field,Header,JWS,closing
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
ignored-modules=pkg_resources,confargparse,argparse
|
||||
# import errors ignored only in 1.4.4
|
||||
# https://bitbucket.org/logilab/pylint/commits/cd000904c9e2
|
||||
ignored-modules=confargparse,argparse
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
ignored-classes=Field,Header,JWS,closing
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
# to generated-members.
|
||||
zope=yes
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=REQUEST,acl_users,aq_parent
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
[VARIABLES]
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=regsub,TERMIOS,Bastion,rexec
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
[CLASSES]
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# List of interface methods to ignore, separated by a comma. This is used for
|
||||
# instance to not check methods defined in Zope's Interface base class.
|
||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by,implementedBy,providedBy
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
20
AUTHORS.md
20
AUTHORS.md
@@ -17,9 +17,13 @@ Authors
|
||||
* [Alex Halderman](https://github.com/jhalderm)
|
||||
* [Alex Jordan](https://github.com/strugee)
|
||||
* [Alex Zorin](https://github.com/alexzorin)
|
||||
* [Alexis Hancock](https://github.com/zoracon)
|
||||
* [Amir Omidi](https://github.com/aaomidi)
|
||||
* [Amjad Mashaal](https://github.com/TheNavigat)
|
||||
* [amplifi](https://github.com/amplifi)
|
||||
* [Andrew Murray](https://github.com/radarhere)
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anna Glasgall](https://github.com/aglasgall)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||
* [April King](https://github.com/april)
|
||||
@@ -65,6 +69,7 @@ Authors
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
* [Daniel McMahon] (https://github.com/igloodan)
|
||||
* [Dave Guarino](https://github.com/daguar)
|
||||
* [David cz](https://github.com/dave-cz)
|
||||
* [David Dworken](https://github.com/ddworken)
|
||||
@@ -89,6 +94,7 @@ Authors
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francesco Colista](https://github.com/fcolista)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -115,8 +121,10 @@ Authors
|
||||
* [Jacob Sachs](https://github.com/jsachs)
|
||||
* [Jairo Llopis](https://github.com/Yajo)
|
||||
* [Jakub Warmuz](https://github.com/kuba)
|
||||
* [James Balazs](https://github.com/jamesbalazs)
|
||||
* [James Kasten](https://github.com/jdkasten)
|
||||
* [Jason Grinblat](https://github.com/ptychomancer)
|
||||
* [Jawshua](https://github.com/jawshua)
|
||||
* [Jay Faulkner](https://github.com/jayofdoom)
|
||||
* [J.C. Jones](https://github.com/jcjones)
|
||||
* [Jeff Hodges](https://github.com/jmhodges)
|
||||
@@ -138,6 +146,7 @@ Authors
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
* [Kane York](https://github.com/riking)
|
||||
* [Katsuyoshi Ozaki](https://github.com/moratori)
|
||||
* [Kenichi Maehashi](https://github.com/kmaehashi)
|
||||
* [Kenneth Skovhede](https://github.com/kenkendk)
|
||||
* [Kevin Burke](https://github.com/kevinburke)
|
||||
@@ -146,6 +155,7 @@ Authors
|
||||
* [LeCoyote](https://github.com/LeCoyote)
|
||||
* [Lee Watson](https://github.com/TheReverend403)
|
||||
* [Leo Famulari](https://github.com/lfam)
|
||||
* [Leon G](https://github.com/LeonGr)
|
||||
* [lf](https://github.com/lf-)
|
||||
* [Liam Marshall](https://github.com/liamim)
|
||||
* [Lior Sabag](https://github.com/liorsbg)
|
||||
@@ -156,6 +166,7 @@ Authors
|
||||
* [Luca Ebach](https://github.com/lucebac)
|
||||
* [Luca Olivetti](https://github.com/olivluca)
|
||||
* [Luke Rogers](https://github.com/lukeroge)
|
||||
* [Lukhnos Liu](https://github.com/lukhnos)
|
||||
* [Maarten](https://github.com/mrtndwrd)
|
||||
* [Mads Jensen](https://github.com/atombrella)
|
||||
* [Maikel Martens](https://github.com/krukas)
|
||||
@@ -173,6 +184,7 @@ Authors
|
||||
* [Mathieu Leduc-Hamel](https://github.com/mlhamel)
|
||||
* [Matt Bostock](https://github.com/mattbostock)
|
||||
* [Matthew Ames](https://github.com/SuperMatt)
|
||||
* [Matthew W. Thomas](https://github.com/mwt)
|
||||
* [Michael Schumacher](https://github.com/schumaml)
|
||||
* [Michael Strache](https://github.com/Jarodiv)
|
||||
* [Michael Sverdlin](https://github.com/sveder)
|
||||
@@ -197,24 +209,30 @@ Authors
|
||||
* [osirisinferi](https://github.com/osirisinferi)
|
||||
* Patrick Figel
|
||||
* [Patrick Heppler](https://github.com/PatrickHeppler)
|
||||
* [Paul Buonopane](https://github.com/Zenexer)
|
||||
* [Paul Feitzinger](https://github.com/pfeyz)
|
||||
* [Paulo Dias](https://github.com/paulojmdias)
|
||||
* [Pavan Gupta](https://github.com/pavgup)
|
||||
* [Pavel Pavlov](https://github.com/ghost355)
|
||||
* [Peter Conrad](https://github.com/pconrad-fb)
|
||||
* [Peter Eckersley](https://github.com/pde)
|
||||
* [Peter Mosmans](https://github.com/PeterMosmans)
|
||||
* [Phil Martin](https://github.com/frillip)
|
||||
* [Philippe Langlois](https://github.com/langloisjp)
|
||||
* [Philipp Spitzer](https://github.com/spitza)
|
||||
* [Piero Steinger](https://github.com/Jadaw1n)
|
||||
* [Pierre Jaury](https://github.com/kaiyou)
|
||||
* [Piotr Kasprzyk](https://github.com/kwadrat)
|
||||
* [Prayag Verma](https://github.com/pra85)
|
||||
* [Preston Locke](https://github.com/Preston12321)
|
||||
* [Q Misell][https://magicalcodewit.ch]
|
||||
* [Rasesh Patel](https://github.com/raspat1)
|
||||
* [Reinaldo de Souza Jr](https://github.com/juniorz)
|
||||
* [Remi Rampin](https://github.com/remram44)
|
||||
* [Rémy HUBSCHER](https://github.com/Natim)
|
||||
* [Rémy Léone](https://github.com/sieben)
|
||||
* [Richard Barnes](https://github.com/r-barnes)
|
||||
* [Richard Harman](https://github.com/warewolf)
|
||||
* [Richard Panek](https://github.com/kernelpanek)
|
||||
* [Robert Buchholz](https://github.com/rbu)
|
||||
* [Robert Dailey](https://github.com/pahrohfit)
|
||||
@@ -272,6 +290,7 @@ Authors
|
||||
* [Wilfried Teiken](https://github.com/wteiken)
|
||||
* [Willem Fibbe](https://github.com/fibbers)
|
||||
* [William Budington](https://github.com/Hainish)
|
||||
* [Will Greenberg](https://github.com/wgreenberg)
|
||||
* [Will Newby](https://github.com/willnewby)
|
||||
* [Will Oller](https://github.com/willoller)
|
||||
* [Yan](https://github.com/diracdeltas)
|
||||
@@ -282,3 +301,4 @@ Authors
|
||||
* [Yuseong Cho](https://github.com/g6123)
|
||||
* [Zach Shepherd](https://github.com/zjs)
|
||||
* [陈三](https://github.com/chenxsan)
|
||||
* [Shahar Naveh](https://github.com/ShaharNaveh)
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# This Dockerfile builds an image for development.
|
||||
FROM ubuntu:focal
|
||||
|
||||
# Note: this only exposes the port to other docker containers.
|
||||
EXPOSE 80 443
|
||||
|
||||
WORKDIR /opt/certbot/src
|
||||
|
||||
COPY . .
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install apache2 git python3-dev \
|
||||
python3-venv gcc libaugeas0 libssl-dev libffi-dev ca-certificates \
|
||||
openssl nginx-light -y --no-install-recommends && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* \
|
||||
/tmp/* \
|
||||
/var/tmp/*
|
||||
|
||||
RUN VENV_NAME="../venv" python3 tools/venv.py
|
||||
|
||||
ENV PATH /opt/certbot/venv/bin:$PATH
|
||||
5
SECURITY.md
Normal file
5
SECURITY.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Security vulnerabilities can be reported using GitHub's [private vulnerability reporting tool](https://github.com/certbot/certbot/security/advisories/new).
|
||||
33
acme/.readthedocs.yaml
Normal file
33
acme/.readthedocs.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
# Read the Docs configuration file for Sphinx projects
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the OS, Python version and other tools you might need
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
# You can also specify other tool versions:
|
||||
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: acme/docs/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
formats:
|
||||
- pdf
|
||||
- epub
|
||||
|
||||
# Optional but recommended, declare the Python requirements required
|
||||
# to build your documentation
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- requirements: acme/readthedocs.org.requirements.txt
|
||||
@@ -3,6 +3,7 @@ include README.rst
|
||||
include pytest.ini
|
||||
recursive-include docs *
|
||||
recursive-include examples *
|
||||
recursive-include tests *
|
||||
recursive-include acme/_internal/tests/testdata *
|
||||
include acme/py.typed
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
This module is an implementation of the `ACME protocol`_.
|
||||
|
||||
.. _`ACME protocol`: https://ietf-wg-acme.github.io/acme
|
||||
.. _`ACME protocol`: https://datatracker.ietf.org/doc/html/rfc8555
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
1
acme/acme/_internal/__init__.py
Normal file
1
acme/acme/_internal/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""acme's internal implementation"""
|
||||
1
acme/acme/_internal/tests/__init__.py
Normal file
1
acme/acme/_internal/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""acme tests"""
|
||||
@@ -1,15 +1,17 @@
|
||||
"""Tests for acme.challenges."""
|
||||
import urllib.parse as urllib_parse
|
||||
import sys
|
||||
import unittest
|
||||
from unittest import mock
|
||||
import urllib.parse as urllib_parse
|
||||
|
||||
import josepy as jose
|
||||
from josepy.jwk import JWKEC
|
||||
import OpenSSL
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from acme import errors
|
||||
|
||||
import test_util
|
||||
from acme._internal.tests import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
KEY = jose.JWKRSA(key=test_util.load_rsa_private_key('rsa512_key.pem'))
|
||||
@@ -21,7 +23,7 @@ class ChallengeTest(unittest.TestCase):
|
||||
from acme.challenges import Challenge
|
||||
from acme.challenges import UnrecognizedChallenge
|
||||
chall = UnrecognizedChallenge({"type": "foo"})
|
||||
self.assertEqual(chall, Challenge.from_json(chall.jobj))
|
||||
assert chall == Challenge.from_json(chall.jobj)
|
||||
|
||||
|
||||
class UnrecognizedChallengeTest(unittest.TestCase):
|
||||
@@ -32,12 +34,11 @@ class UnrecognizedChallengeTest(unittest.TestCase):
|
||||
self.chall = UnrecognizedChallenge(self.jobj)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jobj, self.chall.to_partial_json())
|
||||
assert self.jobj == self.chall.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import UnrecognizedChallenge
|
||||
self.assertEqual(
|
||||
self.chall, UnrecognizedChallenge.from_json(self.jobj))
|
||||
assert self.chall == UnrecognizedChallenge.from_json(self.jobj)
|
||||
|
||||
|
||||
class KeyAuthorizationChallengeResponseTest(unittest.TestCase):
|
||||
@@ -53,26 +54,26 @@ class KeyAuthorizationChallengeResponseTest(unittest.TestCase):
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
response = KeyAuthorizationChallengeResponse(
|
||||
key_authorization='foo.oKGqedy-b-acd5eoybm2f-NVFxvyOoET5CNy3xnv8WY')
|
||||
self.assertTrue(response.verify(self.chall, KEY.public_key()))
|
||||
assert response.verify(self.chall, KEY.public_key())
|
||||
|
||||
def test_verify_wrong_token(self):
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
response = KeyAuthorizationChallengeResponse(
|
||||
key_authorization='bar.oKGqedy-b-acd5eoybm2f-NVFxvyOoET5CNy3xnv8WY')
|
||||
self.assertFalse(response.verify(self.chall, KEY.public_key()))
|
||||
assert not response.verify(self.chall, KEY.public_key())
|
||||
|
||||
def test_verify_wrong_thumbprint(self):
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
response = KeyAuthorizationChallengeResponse(
|
||||
key_authorization='foo.oKGqedy-b-acd5eoybm2f-NVFxv')
|
||||
self.assertFalse(response.verify(self.chall, KEY.public_key()))
|
||||
assert not response.verify(self.chall, KEY.public_key())
|
||||
|
||||
def test_verify_wrong_form(self):
|
||||
from acme.challenges import KeyAuthorizationChallengeResponse
|
||||
response = KeyAuthorizationChallengeResponse(
|
||||
key_authorization='.foo.oKGqedy-b-acd5eoybm2f-'
|
||||
'NVFxvyOoET5CNy3xnv8WY')
|
||||
self.assertFalse(response.verify(self.chall, KEY.public_key()))
|
||||
assert not response.verify(self.chall, KEY.public_key())
|
||||
|
||||
|
||||
class DNS01ResponseTest(unittest.TestCase):
|
||||
@@ -91,12 +92,11 @@ class DNS01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
assert {} == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS01Response
|
||||
self.assertEqual(self.msg, DNS01Response.from_json(self.jmsg))
|
||||
assert self.msg == DNS01Response.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import DNS01Response
|
||||
@@ -106,12 +106,12 @@ class DNS01ResponseTest(unittest.TestCase):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
public_key = key2.public_key()
|
||||
verified = self.response.simple_verify(self.chall, "local", public_key)
|
||||
self.assertFalse(verified)
|
||||
assert not verified
|
||||
|
||||
def test_simple_verify_success(self):
|
||||
public_key = KEY.public_key()
|
||||
verified = self.response.simple_verify(self.chall, "local", public_key)
|
||||
self.assertTrue(verified)
|
||||
assert verified
|
||||
|
||||
|
||||
class DNS01Test(unittest.TestCase):
|
||||
@@ -126,20 +126,19 @@ class DNS01Test(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
self.assertEqual('_acme-challenge.www.example.com',
|
||||
self.msg.validation_domain_name('www.example.com'))
|
||||
assert '_acme-challenge.www.example.com' == \
|
||||
self.msg.validation_domain_name('www.example.com')
|
||||
|
||||
def test_validation(self):
|
||||
self.assertEqual(
|
||||
"rAa7iIg4K2y63fvUhCfy8dP1Xl7wEhmQq0oChTcE3Zk",
|
||||
self.msg.validation(KEY))
|
||||
assert "rAa7iIg4K2y63fvUhCfy8dP1Xl7wEhmQq0oChTcE3Zk" == \
|
||||
self.msg.validation(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
assert self.jmsg == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS01
|
||||
self.assertEqual(self.msg, DNS01.from_json(self.jmsg))
|
||||
assert self.msg == DNS01.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import DNS01
|
||||
@@ -162,13 +161,11 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
assert {} == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
self.assertEqual(
|
||||
self.msg, HTTP01Response.from_json(self.jmsg))
|
||||
assert self.msg == HTTP01Response.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
@@ -182,15 +179,16 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
def test_simple_verify_good_validation(self, mock_get):
|
||||
validation = self.chall.validation(KEY)
|
||||
mock_get.return_value = mock.MagicMock(text=validation)
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
assert self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key())
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=mock.ANY)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_bad_validation(self, mock_get):
|
||||
mock_get.return_value = mock.MagicMock(text="!")
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
assert not self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key())
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_whitespace_validation(self, mock_get):
|
||||
@@ -198,23 +196,34 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
mock_get.return_value = mock.MagicMock(
|
||||
text=(self.chall.validation(KEY) +
|
||||
HTTP01Response.WHITESPACE_CUTSET))
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
assert self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key())
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=mock.ANY)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_connection_error(self, mock_get):
|
||||
mock_get.side_effect = requests.exceptions.RequestException
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
assert not self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key())
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_port(self, mock_get):
|
||||
self.response.simple_verify(
|
||||
self.chall, domain="local",
|
||||
account_public_key=KEY.public_key(), port=8080)
|
||||
self.assertEqual("local:8080", urllib_parse.urlparse(
|
||||
mock_get.mock_calls[0][1][0]).netloc)
|
||||
assert "local:8080" == urllib_parse.urlparse(
|
||||
mock_get.mock_calls[0][1][0]).netloc
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_timeout(self, mock_get):
|
||||
self.response.simple_verify(self.chall, "local", KEY.public_key())
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=30)
|
||||
mock_get.reset_mock()
|
||||
self.response.simple_verify(self.chall, "local", KEY.public_key(), timeout=1234)
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False,
|
||||
timeout=1234)
|
||||
|
||||
|
||||
class HTTP01Test(unittest.TestCase):
|
||||
@@ -230,30 +239,28 @@ class HTTP01Test(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_path(self):
|
||||
self.assertEqual(self.msg.path, '/.well-known/acme-challenge/'
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA')
|
||||
assert self.msg.path == '/.well-known/acme-challenge/' \
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA'
|
||||
|
||||
def test_uri(self):
|
||||
self.assertEqual(
|
||||
'http://example.com/.well-known/acme-challenge/'
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
|
||||
self.msg.uri('example.com'))
|
||||
assert 'http://example.com/.well-known/acme-challenge/' \
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA' == \
|
||||
self.msg.uri('example.com')
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
assert self.jmsg == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import HTTP01
|
||||
self.assertEqual(self.msg, HTTP01.from_json(self.jmsg))
|
||||
assert self.msg == HTTP01.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import HTTP01
|
||||
hash(HTTP01.from_json(self.jmsg))
|
||||
|
||||
def test_good_token(self):
|
||||
self.assertTrue(self.msg.good_token)
|
||||
self.assertFalse(
|
||||
self.msg.update(token=b'..').good_token)
|
||||
assert self.msg.good_token
|
||||
assert not self.msg.update(token=b'..').good_token
|
||||
|
||||
|
||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
@@ -273,12 +280,11 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.response.to_partial_json())
|
||||
assert {} == self.response.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.assertEqual(self.response, TLSALPN01Response.from_json(self.jmsg))
|
||||
assert self.response == TLSALPN01Response.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
@@ -287,23 +293,23 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
def test_gen_verify_cert(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
assert key1 == key2
|
||||
assert self.response.verify_cert(self.domain, cert)
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
assert isinstance(key, OpenSSL.crypto.PKey)
|
||||
assert self.response.verify_cert(self.domain, cert)
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
self.assertFalse(self.response.verify_cert(self.domain,
|
||||
test_util.load_cert('cert.pem')))
|
||||
assert not self.response.verify_cert(self.domain,
|
||||
test_util.load_cert('cert.pem'))
|
||||
|
||||
def test_verify_bad_domain(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertFalse(self.response.verify_cert(self.domain2, cert))
|
||||
assert key1 == key2
|
||||
assert not self.response.verify_cert(self.domain2, cert)
|
||||
|
||||
def test_simple_verify_bad_key_authorization(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
@@ -312,10 +318,9 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.verify_cert', autospec=True)
|
||||
def test_simple_verify(self, mock_verify_cert):
|
||||
mock_verify_cert.return_value = mock.sentinel.verification
|
||||
self.assertEqual(
|
||||
mock.sentinel.verification, self.response.simple_verify(
|
||||
assert mock.sentinel.verification == self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key(),
|
||||
cert=mock.sentinel.cert))
|
||||
cert=mock.sentinel.cert)
|
||||
mock_verify_cert.assert_called_once_with(
|
||||
self.response, self.domain, mock.sentinel.cert)
|
||||
|
||||
@@ -326,19 +331,19 @@ class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host='127.0.0.1', port=self.response.PORT, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
|
||||
alpn_protocols=[b'acme-tls/1'])
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host='8.8.8.8', port=mock.ANY, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
|
||||
alpn_protocols=[b'acme-tls/1'])
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||
mock_probe_cert.side_effect = errors.Error
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key()))
|
||||
assert not self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key())
|
||||
|
||||
|
||||
class TLSALPN01Test(unittest.TestCase):
|
||||
@@ -353,11 +358,11 @@ class TLSALPN01Test(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
assert self.jmsg == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01
|
||||
self.assertEqual(self.msg, TLSALPN01.from_json(self.jmsg))
|
||||
assert self.msg == TLSALPN01.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSALPN01
|
||||
@@ -366,14 +371,14 @@ class TLSALPN01Test(unittest.TestCase):
|
||||
def test_from_json_invalid_token_length(self):
|
||||
from acme.challenges import TLSALPN01
|
||||
self.jmsg['token'] = jose.encode_b64jose(b'abcd')
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, TLSALPN01.from_json, self.jmsg)
|
||||
with pytest.raises(jose.DeserializationError):
|
||||
TLSALPN01.from_json(self.jmsg)
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.gen_cert')
|
||||
def test_validation(self, mock_gen_cert):
|
||||
mock_gen_cert.return_value = ('cert', 'key')
|
||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key, domain=mock.sentinel.domain))
|
||||
assert ('cert', 'key') == self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key, domain=mock.sentinel.domain)
|
||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key,
|
||||
domain=mock.sentinel.domain)
|
||||
|
||||
@@ -390,24 +395,27 @@ class DNSTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg, self.msg.to_partial_json())
|
||||
assert self.jmsg == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNS
|
||||
self.assertEqual(self.msg, DNS.from_json(self.jmsg))
|
||||
assert self.msg == DNS.from_json(self.jmsg)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import DNS
|
||||
hash(DNS.from_json(self.jmsg))
|
||||
|
||||
def test_gen_check_validation(self):
|
||||
self.assertTrue(self.msg.check_validation(
|
||||
self.msg.gen_validation(KEY), KEY.public_key()))
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
for key, alg in [(KEY, jose.RS256), (ec_key_secp384r1, jose.ES384)]:
|
||||
with self.subTest(key=key, alg=alg):
|
||||
assert self.msg.check_validation(
|
||||
self.msg.gen_validation(key, alg=alg), key.public_key())
|
||||
|
||||
def test_gen_check_validation_wrong_key(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa1024_key.pem'))
|
||||
self.assertFalse(self.msg.check_validation(
|
||||
self.msg.gen_validation(KEY), key2.public_key()))
|
||||
assert not self.msg.check_validation(
|
||||
self.msg.gen_validation(KEY), key2.public_key())
|
||||
|
||||
def test_check_validation_wrong_payload(self):
|
||||
validations = tuple(
|
||||
@@ -415,28 +423,32 @@ class DNSTest(unittest.TestCase):
|
||||
for payload in (b'', b'{}')
|
||||
)
|
||||
for validation in validations:
|
||||
self.assertFalse(self.msg.check_validation(
|
||||
validation, KEY.public_key()))
|
||||
assert not self.msg.check_validation(
|
||||
validation, KEY.public_key())
|
||||
|
||||
def test_check_validation_wrong_fields(self):
|
||||
bad_validation = jose.JWS.sign(
|
||||
payload=self.msg.update(
|
||||
token=b'x' * 20).json_dumps().encode('utf-8'),
|
||||
alg=jose.RS256, key=KEY)
|
||||
self.assertFalse(self.msg.check_validation(
|
||||
bad_validation, KEY.public_key()))
|
||||
assert not self.msg.check_validation(bad_validation, KEY.public_key())
|
||||
|
||||
def test_gen_response(self):
|
||||
with mock.patch('acme.challenges.DNS.gen_validation') as mock_gen:
|
||||
mock_gen.return_value = mock.sentinel.validation
|
||||
response = self.msg.gen_response(KEY)
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertTrue(isinstance(response, DNSResponse))
|
||||
self.assertEqual(response.validation, mock.sentinel.validation)
|
||||
assert isinstance(response, DNSResponse)
|
||||
assert response.validation == mock.sentinel.validation
|
||||
|
||||
def test_validation_domain_name(self):
|
||||
self.assertEqual(
|
||||
'_acme-challenge.le.wtf', self.msg.validation_domain_name('le.wtf'))
|
||||
assert '_acme-challenge.le.wtf' == self.msg.validation_domain_name('le.wtf')
|
||||
|
||||
def test_validation_domain_name_ecdsa(self):
|
||||
ec_key_secp384r1 = JWKEC(key=test_util.load_ecdsa_private_key('ec_secp384r1_key.pem'))
|
||||
assert self.msg.check_validation(
|
||||
self.msg.gen_validation(ec_key_secp384r1, alg=jose.ES384),
|
||||
ec_key_secp384r1.public_key()) is True
|
||||
|
||||
|
||||
class DNSResponseTest(unittest.TestCase):
|
||||
@@ -452,8 +464,6 @@ class DNSResponseTest(unittest.TestCase):
|
||||
from acme.challenges import DNSResponse
|
||||
self.msg = DNSResponse(validation=self.validation)
|
||||
self.jmsg_to = {
|
||||
'resource': 'challenge',
|
||||
'type': 'dns',
|
||||
'validation': self.validation,
|
||||
}
|
||||
self.jmsg_from = {
|
||||
@@ -463,19 +473,18 @@ class DNSResponseTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jmsg_to, self.msg.to_partial_json())
|
||||
assert self.jmsg_to == self.msg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import DNSResponse
|
||||
self.assertEqual(self.msg, DNSResponse.from_json(self.jmsg_from))
|
||||
assert self.msg == DNSResponse.from_json(self.jmsg_from)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import DNSResponse
|
||||
hash(DNSResponse.from_json(self.jmsg_from))
|
||||
|
||||
def test_check_validation(self):
|
||||
self.assertTrue(
|
||||
self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
assert self.msg.check_validation(self.chall, KEY.public_key())
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
@@ -484,12 +493,11 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
from acme.challenges import HTTP01Response
|
||||
|
||||
challenge_body = HTTP01Response()
|
||||
challenge_body.le_acme_version = 2
|
||||
|
||||
jobj = challenge_body.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that challenge responses must have an empty payload.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
assert jobj == b'{}'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
794
acme/acme/_internal/tests/client_test.py
Normal file
794
acme/acme/_internal/tests/client_test.py
Normal file
@@ -0,0 +1,794 @@
|
||||
"""Tests for acme.client."""
|
||||
# pylint: disable=too-many-lines
|
||||
import copy
|
||||
import datetime
|
||||
import http.client as http_client
|
||||
import json
|
||||
import sys
|
||||
from typing import Dict
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import errors
|
||||
from acme import jws as acme_jws
|
||||
from acme import messages
|
||||
from acme._internal.tests import messages_test
|
||||
from acme._internal.tests import test_util
|
||||
from acme.client import ClientNetwork
|
||||
from acme.client import ClientV2
|
||||
|
||||
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
|
||||
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
|
||||
DIRECTORY_V2 = messages.Directory({
|
||||
'newAccount': 'https://www.letsencrypt-demo.org/acme/new-account',
|
||||
'newNonce': 'https://www.letsencrypt-demo.org/acme/new-nonce',
|
||||
'newOrder': 'https://www.letsencrypt-demo.org/acme/new-order',
|
||||
'revokeCert': 'https://www.letsencrypt-demo.org/acme/revoke-cert',
|
||||
'meta': messages.Directory.Meta(),
|
||||
})
|
||||
|
||||
|
||||
class ClientV2Test(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientV2."""
|
||||
|
||||
def setUp(self):
|
||||
self.response = mock.MagicMock(
|
||||
ok=True, status_code=http_client.OK, headers={}, links={})
|
||||
self.net = mock.MagicMock()
|
||||
self.net.post.return_value = self.response
|
||||
self.net.get.return_value = self.response
|
||||
|
||||
self.identifier = messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='example.com')
|
||||
|
||||
# Registration
|
||||
self.contact = ('mailto:cert-admin@example.com', 'tel:+12025551212')
|
||||
reg = messages.Registration(
|
||||
contact=self.contact, key=KEY.public_key())
|
||||
the_arg: Dict = dict(reg)
|
||||
self.new_reg = messages.NewRegistration(**the_arg)
|
||||
self.regr = messages.RegistrationResource(
|
||||
body=reg, uri='https://www.letsencrypt-demo.org/acme/reg/1')
|
||||
|
||||
# Authorization
|
||||
authzr_uri = 'https://www.letsencrypt-demo.org/acme/authz/1'
|
||||
challb = messages.ChallengeBody(
|
||||
uri=(authzr_uri + '/1'), status=messages.STATUS_VALID,
|
||||
chall=challenges.DNS(token=jose.b64decode(
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA')))
|
||||
self.challr = messages.ChallengeResource(
|
||||
body=challb, authzr_uri=authzr_uri)
|
||||
self.authz = messages.Authorization(
|
||||
identifier=messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='example.com'),
|
||||
challenges=(challb,))
|
||||
self.authzr = messages.AuthorizationResource(
|
||||
body=self.authz, uri=authzr_uri)
|
||||
|
||||
# Reason code for revocation
|
||||
self.rsn = 1
|
||||
|
||||
self.directory = DIRECTORY_V2
|
||||
|
||||
self.client = ClientV2(self.directory, self.net)
|
||||
|
||||
self.new_reg = self.new_reg.update(terms_of_service_agreed=True)
|
||||
|
||||
self.authzr_uri2 = 'https://www.letsencrypt-demo.org/acme/authz/2'
|
||||
self.authz2 = self.authz.update(identifier=messages.Identifier(
|
||||
typ=messages.IDENTIFIER_FQDN, value='www.example.com'),
|
||||
status=messages.STATUS_PENDING)
|
||||
self.authzr2 = messages.AuthorizationResource(
|
||||
body=self.authz2, uri=self.authzr_uri2)
|
||||
|
||||
self.order = messages.Order(
|
||||
identifiers=(self.authz.identifier, self.authz2.identifier),
|
||||
status=messages.STATUS_PENDING,
|
||||
authorizations=(self.authzr.uri, self.authzr_uri2),
|
||||
finalize='https://www.letsencrypt-demo.org/acme/acct/1/order/1/finalize')
|
||||
self.orderr = messages.OrderResource(
|
||||
body=self.order,
|
||||
uri='https://www.letsencrypt-demo.org/acme/acct/1/order/1',
|
||||
authorizations=[self.authzr, self.authzr2], csr_pem=CSR_MIXED_PEM)
|
||||
|
||||
def test_new_account(self):
|
||||
self.response.status_code = http_client.CREATED
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
|
||||
assert self.regr == self.client.new_account(self.new_reg)
|
||||
|
||||
def test_new_account_tos_link(self):
|
||||
self.response.status_code = http_client.CREATED
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.links.update({
|
||||
'terms-of-service': {'url': 'https://www.letsencrypt-demo.org/tos'},
|
||||
})
|
||||
|
||||
assert self.client.new_account(self.new_reg).terms_of_service == \
|
||||
'https://www.letsencrypt-demo.org/tos'
|
||||
|
||||
|
||||
def test_new_account_conflict(self):
|
||||
self.response.status_code = http_client.OK
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
with pytest.raises(errors.ConflictError):
|
||||
self.client.new_account(self.new_reg)
|
||||
|
||||
def test_deactivate_account(self):
|
||||
deactivated_regr = self.regr.update(
|
||||
body=self.regr.body.update(status='deactivated'))
|
||||
self.response.json.return_value = deactivated_regr.body.to_json()
|
||||
self.response.status_code = http_client.OK
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
assert self.client.deactivate_registration(self.regr) == deactivated_regr
|
||||
|
||||
def test_deactivate_authorization(self):
|
||||
deactivated_authz = self.authzr.update(
|
||||
body=self.authzr.body.update(status=messages.STATUS_DEACTIVATED))
|
||||
self.response.json.return_value = deactivated_authz.body.to_json()
|
||||
authzr = self.client.deactivate_authorization(self.authzr)
|
||||
assert deactivated_authz.body == authzr.body
|
||||
assert self.client.net.post.call_count == 1
|
||||
assert self.authzr.uri in self.net.post.call_args_list[0][0]
|
||||
|
||||
def test_new_order(self):
|
||||
order_response = copy.deepcopy(self.response)
|
||||
order_response.status_code = http_client.CREATED
|
||||
order_response.json.return_value = self.order.to_json()
|
||||
order_response.headers['Location'] = self.orderr.uri
|
||||
self.net.post.return_value = order_response
|
||||
|
||||
authz_response = copy.deepcopy(self.response)
|
||||
authz_response.json.return_value = self.authz.to_json()
|
||||
authz_response.headers['Location'] = self.authzr.uri
|
||||
authz_response2 = self.response
|
||||
authz_response2.json.return_value = self.authz2.to_json()
|
||||
authz_response2.headers['Location'] = self.authzr2.uri
|
||||
|
||||
with mock.patch('acme.client.ClientV2._post_as_get') as mock_post_as_get:
|
||||
mock_post_as_get.side_effect = (authz_response, authz_response2)
|
||||
assert self.client.new_order(CSR_MIXED_PEM) == self.orderr
|
||||
|
||||
def test_answer_challege(self):
|
||||
self.response.links['up'] = {'url': self.challr.authzr_uri}
|
||||
self.response.json.return_value = self.challr.body.to_json()
|
||||
chall_response = challenges.DNSResponse(validation=None)
|
||||
self.client.answer_challenge(self.challr.body, chall_response)
|
||||
|
||||
with pytest.raises(errors.UnexpectedUpdate):
|
||||
self.client.answer_challenge(self.challr.body.update(uri='foo'), chall_response)
|
||||
|
||||
def test_answer_challenge_missing_next(self):
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.client.answer_challenge(self.challr.body, challenges.DNSResponse(validation=None))
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_and_finalize(self, mock_datetime):
|
||||
mock_datetime.datetime.now.return_value = datetime.datetime(2018, 2, 15)
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
expected_deadline = mock_datetime.datetime.now() + datetime.timedelta(seconds=90)
|
||||
|
||||
self.client.poll_authorizations = mock.Mock(return_value=self.orderr)
|
||||
self.client.finalize_order = mock.Mock(return_value=self.orderr)
|
||||
|
||||
assert self.client.poll_and_finalize(self.orderr) == self.orderr
|
||||
self.client.poll_authorizations.assert_called_once_with(self.orderr, expected_deadline)
|
||||
self.client.finalize_order.assert_called_once_with(self.orderr, expected_deadline)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_poll_authorizations_timeout(self, mock_datetime):
|
||||
now_side_effect = [datetime.datetime(2018, 2, 15),
|
||||
datetime.datetime(2018, 2, 16),
|
||||
datetime.datetime(2018, 2, 17)]
|
||||
mock_datetime.datetime.now.side_effect = now_side_effect
|
||||
self.response.json.side_effect = [
|
||||
self.authz.to_json(), self.authz2.to_json(), self.authz2.to_json()]
|
||||
|
||||
with pytest.raises(errors.TimeoutError):
|
||||
self.client.poll_authorizations(self.orderr, now_side_effect[1])
|
||||
|
||||
def test_poll_authorizations_failure(self):
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
challb = self.challr.body.update(status=messages.STATUS_INVALID,
|
||||
error=messages.Error.with_code('unauthorized'))
|
||||
authz = self.authz.update(status=messages.STATUS_INVALID, challenges=(challb,))
|
||||
self.response.json.return_value = authz.to_json()
|
||||
|
||||
with pytest.raises(errors.ValidationError):
|
||||
self.client.poll_authorizations(self.orderr, deadline)
|
||||
|
||||
def test_poll_authorizations_success(self):
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
updated_authz2 = self.authz2.update(status=messages.STATUS_VALID)
|
||||
updated_authzr2 = messages.AuthorizationResource(
|
||||
body=updated_authz2, uri=self.authzr_uri2)
|
||||
updated_orderr = self.orderr.update(authorizations=[self.authzr, updated_authzr2])
|
||||
|
||||
self.response.json.side_effect = (
|
||||
self.authz.to_json(), self.authz2.to_json(), updated_authz2.to_json())
|
||||
assert self.client.poll_authorizations(self.orderr, deadline) == updated_orderr
|
||||
|
||||
def test_poll_unexpected_update(self):
|
||||
updated_authz = self.authz.update(identifier=self.identifier.update(value='foo'))
|
||||
self.response.json.return_value = updated_authz.to_json()
|
||||
with pytest.raises(errors.UnexpectedUpdate):
|
||||
self.client.poll(self.authzr)
|
||||
|
||||
def test_finalize_order_success(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
status=messages.STATUS_VALID)
|
||||
updated_orderr = self.orderr.update(body=updated_order, fullchain_pem=CERT_SAN_PEM)
|
||||
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
assert self.client.finalize_order(self.orderr, deadline) == updated_orderr
|
||||
|
||||
def test_finalize_order_error(self):
|
||||
updated_order = self.order.update(
|
||||
error=messages.Error.with_code('unauthorized'),
|
||||
status=messages.STATUS_INVALID)
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
with pytest.raises(errors.IssuanceError):
|
||||
self.client.finalize_order(self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_invalid_status(self):
|
||||
# https://github.com/certbot/certbot/issues/9296
|
||||
order = self.order.update(error=None, status=messages.STATUS_INVALID)
|
||||
self.response.json.return_value = order.to_json()
|
||||
with pytest.raises(errors.Error, match="The certificate order failed"):
|
||||
self.client.finalize_order(self.orderr, datetime.datetime(9999, 9, 9))
|
||||
|
||||
def test_finalize_order_timeout(self):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
with pytest.raises(errors.TimeoutError):
|
||||
self.client.finalize_order(self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_alt_chains(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
status=messages.STATUS_VALID
|
||||
)
|
||||
updated_orderr = self.orderr.update(body=updated_order,
|
||||
fullchain_pem=CERT_SAN_PEM,
|
||||
alternative_fullchains_pem=[CERT_SAN_PEM,
|
||||
CERT_SAN_PEM])
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||
'<https://example.com/dir>;rel="index", ' + \
|
||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/1',
|
||||
mock.ANY, new_nonce_url=mock.ANY)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/2',
|
||||
mock.ANY, new_nonce_url=mock.ANY)
|
||||
assert resp == updated_orderr
|
||||
|
||||
del self.response.headers['Link']
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
assert resp == updated_orderr.update(alternative_fullchains_pem=[])
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
self.directory["revokeCert"], mock.ANY, new_nonce_url=DIRECTORY_V2['newNonce'])
|
||||
|
||||
def test_revoke_bad_status_raises_error(self):
|
||||
self.response.status_code = http_client.METHOD_NOT_ALLOWED
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.client.revoke(messages_test.CERT,
|
||||
self.rsn)
|
||||
|
||||
def test_update_registration(self):
|
||||
# "Instance of 'Field' has no to_json/update member" bug:
|
||||
self.response.headers['Location'] = self.regr.uri
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
assert self.regr == self.client.update_registration(self.regr)
|
||||
assert self.client.net.account is not None
|
||||
assert self.client.net.post.call_count == 2
|
||||
assert DIRECTORY_V2.newAccount in self.net.post.call_args_list[0][0]
|
||||
|
||||
self.response.json.return_value = self.regr.body.update(
|
||||
contact=()).to_json()
|
||||
|
||||
def test_external_account_required_true(self):
|
||||
self.client.directory = messages.Directory({
|
||||
'meta': messages.Directory.Meta(external_account_required=True)
|
||||
})
|
||||
|
||||
assert self.client.external_account_required()
|
||||
|
||||
def test_external_account_required_false(self):
|
||||
self.client.directory = messages.Directory({
|
||||
'meta': messages.Directory.Meta(external_account_required=False)
|
||||
})
|
||||
|
||||
assert not self.client.external_account_required()
|
||||
|
||||
def test_external_account_required_default(self):
|
||||
assert not self.client.external_account_required()
|
||||
|
||||
def test_query_registration_client(self):
|
||||
self.response.json.return_value = self.regr.body.to_json()
|
||||
self.response.headers['Location'] = 'https://www.letsencrypt-demo.org/acme/reg/1'
|
||||
assert self.regr == self.client.query_registration(self.regr)
|
||||
|
||||
def test_post_as_get(self):
|
||||
with mock.patch('acme.client.ClientV2._authzr_from_response') as mock_client:
|
||||
mock_client.return_value = self.authzr2
|
||||
|
||||
self.client.poll(self.authzr2) # pylint: disable=protected-access
|
||||
|
||||
self.client.net.post.assert_called_once_with(
|
||||
self.authzr2.uri, None,
|
||||
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
|
||||
self.client.net.get.assert_not_called()
|
||||
|
||||
def test_retry_after_date(self):
|
||||
self.response.headers['Retry-After'] = 'Fri, 31 Dec 1999 23:59:59 GMT'
|
||||
assert datetime.datetime(1999, 12, 31, 23, 59, 59) == \
|
||||
self.client.retry_after(response=self.response, default=10)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_retry_after_invalid(self, dt_mock):
|
||||
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
|
||||
dt_mock.timedelta = datetime.timedelta
|
||||
|
||||
self.response.headers['Retry-After'] = 'foooo'
|
||||
assert datetime.datetime(2015, 3, 27, 0, 0, 10) == \
|
||||
self.client.retry_after(response=self.response, default=10)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_retry_after_overflow(self, dt_mock):
|
||||
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
|
||||
dt_mock.timedelta = datetime.timedelta
|
||||
dt_mock.datetime.side_effect = datetime.datetime
|
||||
|
||||
self.response.headers['Retry-After'] = "Tue, 116 Feb 2016 11:50:00 MST"
|
||||
assert datetime.datetime(2015, 3, 27, 0, 0, 10) == \
|
||||
self.client.retry_after(response=self.response, default=10)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_retry_after_seconds(self, dt_mock):
|
||||
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
|
||||
dt_mock.timedelta = datetime.timedelta
|
||||
|
||||
self.response.headers['Retry-After'] = '50'
|
||||
assert datetime.datetime(2015, 3, 27, 0, 0, 50) == \
|
||||
self.client.retry_after(response=self.response, default=10)
|
||||
|
||||
@mock.patch('acme.client.datetime')
|
||||
def test_retry_after_missing(self, dt_mock):
|
||||
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
|
||||
dt_mock.timedelta = datetime.timedelta
|
||||
|
||||
assert datetime.datetime(2015, 3, 27, 0, 0, 10) == \
|
||||
self.client.retry_after(response=self.response, default=10)
|
||||
|
||||
def test_get_directory(self):
|
||||
self.response.json.return_value = DIRECTORY_V2.to_json()
|
||||
assert DIRECTORY_V2.to_partial_json() == \
|
||||
ClientV2.get_directory('https://example.com/dir', self.net).to_partial_json()
|
||||
|
||||
|
||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def to_partial_json(self):
|
||||
return {'foo': self.value}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
pass # pragma: no cover
|
||||
|
||||
|
||||
class ClientNetworkTest(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientNetwork."""
|
||||
|
||||
def setUp(self):
|
||||
self.verify_ssl = mock.MagicMock()
|
||||
self.wrap_in_jws = mock.MagicMock(return_value=mock.sentinel.wrapped)
|
||||
|
||||
self.net = ClientNetwork(
|
||||
key=KEY, alg=jose.RS256, verify_ssl=self.verify_ssl,
|
||||
user_agent='acme-python-test')
|
||||
|
||||
self.response = mock.MagicMock(ok=True, status_code=http_client.OK)
|
||||
self.response.headers = {}
|
||||
self.response.links = {}
|
||||
|
||||
def test_init(self):
|
||||
assert self.net.verify_ssl is self.verify_ssl
|
||||
|
||||
def test_wrap_in_jws(self):
|
||||
# pylint: disable=protected-access
|
||||
jws_dump = self.net._wrap_in_jws(
|
||||
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url")
|
||||
jws = acme_jws.JWS.json_loads(jws_dump)
|
||||
assert json.loads(jws.payload.decode()) == {'foo': 'foo'}
|
||||
assert jws.signature.combined.nonce == b'Tg'
|
||||
|
||||
def test_wrap_in_jws_v2(self):
|
||||
self.net.account = {'uri': 'acct-uri'}
|
||||
# pylint: disable=protected-access
|
||||
jws_dump = self.net._wrap_in_jws(
|
||||
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url")
|
||||
jws = acme_jws.JWS.json_loads(jws_dump)
|
||||
assert json.loads(jws.payload.decode()) == {'foo': 'foo'}
|
||||
assert jws.signature.combined.nonce == b'Tg'
|
||||
assert jws.signature.combined.kid == u'acct-uri'
|
||||
assert jws.signature.combined.url == u'url'
|
||||
|
||||
def test_check_response_not_ok_jobj_no_error(self):
|
||||
self.response.ok = False
|
||||
self.response.json.return_value = {}
|
||||
with mock.patch('acme.client.messages.Error.from_json') as from_json:
|
||||
from_json.side_effect = jose.DeserializationError
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.net._check_response(self.response)
|
||||
|
||||
def test_check_response_not_ok_jobj_error(self):
|
||||
self.response.ok = False
|
||||
self.response.json.return_value = messages.Error.with_code(
|
||||
'serverInternal', detail='foo', title='some title').to_json()
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(messages.Error):
|
||||
self.net._check_response(self.response)
|
||||
|
||||
def test_check_response_not_ok_no_jobj(self):
|
||||
self.response.ok = False
|
||||
self.response.json.side_effect = ValueError
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.net._check_response(self.response)
|
||||
|
||||
def test_check_response_ok_no_jobj_ct_required(self):
|
||||
self.response.json.side_effect = ValueError
|
||||
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
||||
self.response.headers['Content-Type'] = response_ct
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.net._check_response(self.response,
|
||||
content_type=self.net.JSON_CONTENT_TYPE)
|
||||
|
||||
def test_check_response_ok_no_jobj_no_ct(self):
|
||||
self.response.json.side_effect = ValueError
|
||||
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
||||
self.response.headers['Content-Type'] = response_ct
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._check_response(self.response)
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_ct_with_charset(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._check_response(
|
||||
self.response, content_type='application/json')
|
||||
try:
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'application/json; charset=utf-8'
|
||||
)
|
||||
except AssertionError:
|
||||
return
|
||||
raise AssertionError('Expected Content-Type warning ' #pragma: no cover
|
||||
'to not have been logged')
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_bad_ct(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'text/plain'
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._check_response(
|
||||
self.response, content_type='application/json')
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'text/plain'
|
||||
)
|
||||
|
||||
def test_check_response_conflict(self):
|
||||
self.response.ok = False
|
||||
self.response.status_code = 409
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(errors.ConflictError):
|
||||
self.net._check_response(self.response)
|
||||
|
||||
def test_check_response_jobj(self):
|
||||
self.response.json.return_value = {}
|
||||
for response_ct in [self.net.JSON_CONTENT_TYPE, 'foo']:
|
||||
self.response.headers['Content-Type'] = response_ct
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._check_response(self.response)
|
||||
|
||||
def test_send_request(self):
|
||||
self.net.session = mock.MagicMock()
|
||||
self.net.session.request.return_value = self.response
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._send_request(
|
||||
'HEAD', 'http://example.com/', 'foo', bar='baz')
|
||||
self.net.session.request.assert_called_once_with(
|
||||
'HEAD', 'http://example.com/', 'foo',
|
||||
headers=mock.ANY, verify=mock.ANY, timeout=mock.ANY, bar='baz')
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_send_request_get_der(self, mock_logger):
|
||||
self.net.session = mock.MagicMock()
|
||||
self.net.session.request.return_value = mock.MagicMock(
|
||||
ok=True, status_code=http_client.OK,
|
||||
content=b"hi")
|
||||
# pylint: disable=protected-access
|
||||
self.net._send_request('HEAD', 'http://example.com/', 'foo',
|
||||
timeout=mock.ANY, bar='baz', headers={'Accept': 'application/pkix-cert'})
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Received response:\nHTTP %d\n%s\n\n%s', 200,
|
||||
'', b'aGk=')
|
||||
|
||||
def test_send_request_post(self):
|
||||
self.net.session = mock.MagicMock()
|
||||
self.net.session.request.return_value = self.response
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net._send_request(
|
||||
'POST', 'http://example.com/', 'foo', data='qux', bar='baz')
|
||||
self.net.session.request.assert_called_once_with(
|
||||
'POST', 'http://example.com/', 'foo',
|
||||
headers=mock.ANY, verify=mock.ANY, timeout=mock.ANY, data='qux', bar='baz')
|
||||
|
||||
def test_send_request_verify_ssl(self):
|
||||
# pylint: disable=protected-access
|
||||
for verify in True, False:
|
||||
self.net.session = mock.MagicMock()
|
||||
self.net.session.request.return_value = self.response
|
||||
self.net.verify_ssl = verify
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == \
|
||||
self.net._send_request('GET', 'http://example.com/')
|
||||
self.net.session.request.assert_called_once_with(
|
||||
'GET', 'http://example.com/', verify=verify,
|
||||
timeout=mock.ANY, headers=mock.ANY)
|
||||
|
||||
def test_send_request_user_agent(self):
|
||||
self.net.session = mock.MagicMock()
|
||||
# pylint: disable=protected-access
|
||||
self.net._send_request('GET', 'http://example.com/',
|
||||
headers={'bar': 'baz'})
|
||||
self.net.session.request.assert_called_once_with(
|
||||
'GET', 'http://example.com/', verify=mock.ANY,
|
||||
timeout=mock.ANY,
|
||||
headers={'User-Agent': 'acme-python-test', 'bar': 'baz'})
|
||||
|
||||
self.net._send_request('GET', 'http://example.com/',
|
||||
headers={'User-Agent': 'foo2'})
|
||||
self.net.session.request.assert_called_with(
|
||||
'GET', 'http://example.com/',
|
||||
verify=mock.ANY, timeout=mock.ANY, headers={'User-Agent': 'foo2'})
|
||||
|
||||
def test_send_request_timeout(self):
|
||||
self.net.session = mock.MagicMock()
|
||||
# pylint: disable=protected-access
|
||||
self.net._send_request('GET', 'http://example.com/',
|
||||
headers={'bar': 'baz'})
|
||||
self.net.session.request.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, verify=mock.ANY, headers=mock.ANY,
|
||||
timeout=45)
|
||||
|
||||
def test_del(self, close_exception=None):
|
||||
sess = mock.MagicMock()
|
||||
|
||||
if close_exception is not None:
|
||||
sess.close.side_effect = close_exception
|
||||
|
||||
self.net.session = sess
|
||||
del self.net
|
||||
sess.close.assert_called_once_with()
|
||||
|
||||
def test_del_error(self):
|
||||
self.test_del(ReferenceError)
|
||||
|
||||
@mock.patch('acme.client.requests')
|
||||
def test_requests_error_passthrough(self, mock_requests):
|
||||
mock_requests.exceptions = requests.exceptions
|
||||
mock_requests.request.side_effect = requests.exceptions.RequestException
|
||||
# pylint: disable=protected-access
|
||||
with pytest.raises(requests.exceptions.RequestException):
|
||||
self.net._send_request('GET', 'uri')
|
||||
|
||||
def test_urllib_error(self):
|
||||
# Using a connection error to test a properly formatted error message
|
||||
try:
|
||||
# pylint: disable=protected-access
|
||||
self.net._send_request('GET', "http://localhost:19123/nonexistent.txt")
|
||||
|
||||
# Value Error Generated Exceptions
|
||||
except ValueError as y:
|
||||
assert "Requesting localhost/nonexistent: " \
|
||||
"Connection refused" == str(y)
|
||||
|
||||
# Requests Library Exceptions
|
||||
except requests.exceptions.ConnectionError as z: #pragma: no cover
|
||||
assert "'Connection aborted.'" in str(z) or "[WinError 10061]" in str(z)
|
||||
|
||||
|
||||
class ClientNetworkWithMockedResponseTest(unittest.TestCase):
|
||||
"""Tests for acme.client.ClientNetwork which mock out response."""
|
||||
|
||||
def setUp(self):
|
||||
self.net = ClientNetwork(key=None, alg=None)
|
||||
|
||||
self.response = mock.MagicMock(ok=True, status_code=http_client.OK)
|
||||
self.response.headers = {}
|
||||
self.response.links = {}
|
||||
self.response.checked = False
|
||||
self.acmev1_nonce_response = mock.MagicMock(
|
||||
ok=False, status_code=http_client.METHOD_NOT_ALLOWED)
|
||||
self.acmev1_nonce_response.headers = {}
|
||||
self.obj = mock.MagicMock()
|
||||
self.wrapped_obj = mock.MagicMock()
|
||||
self.content_type = mock.sentinel.content_type
|
||||
|
||||
self.all_nonces = [
|
||||
jose.b64encode(b'Nonce'),
|
||||
jose.b64encode(b'Nonce2'), jose.b64encode(b'Nonce3')]
|
||||
self.available_nonces = self.all_nonces[:]
|
||||
|
||||
def send_request(*args, **kwargs):
|
||||
# pylint: disable=unused-argument,missing-docstring
|
||||
assert "new_nonce_url" not in kwargs
|
||||
method = args[0]
|
||||
uri = args[1]
|
||||
if method == 'HEAD' and uri != "new_nonce_uri":
|
||||
response = self.acmev1_nonce_response
|
||||
else:
|
||||
response = self.response
|
||||
|
||||
if self.available_nonces:
|
||||
response.headers = {
|
||||
self.net.REPLAY_NONCE_HEADER:
|
||||
self.available_nonces.pop().decode()}
|
||||
else:
|
||||
response.headers = {}
|
||||
return response
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.net._send_request = self.send_request = mock.MagicMock(
|
||||
side_effect=send_request)
|
||||
self.net._check_response = self.check_response
|
||||
self.net._wrap_in_jws = mock.MagicMock(return_value=self.wrapped_obj)
|
||||
|
||||
def check_response(self, response, content_type):
|
||||
# pylint: disable=missing-docstring
|
||||
assert self.response == response
|
||||
assert self.content_type == content_type
|
||||
assert self.response.ok
|
||||
self.response.checked = True
|
||||
return self.response
|
||||
|
||||
def test_head(self):
|
||||
assert self.acmev1_nonce_response == self.net.head(
|
||||
'http://example.com/', 'foo', bar='baz')
|
||||
self.send_request.assert_called_once_with(
|
||||
'HEAD', 'http://example.com/', 'foo', bar='baz')
|
||||
|
||||
def test_head_v2(self):
|
||||
assert self.response == self.net.head(
|
||||
'new_nonce_uri', 'foo', bar='baz')
|
||||
self.send_request.assert_called_once_with(
|
||||
'HEAD', 'new_nonce_uri', 'foo', bar='baz')
|
||||
|
||||
def test_get(self):
|
||||
assert self.response == self.net.get(
|
||||
'http://example.com/', content_type=self.content_type, bar='baz')
|
||||
assert self.response.checked
|
||||
self.send_request.assert_called_once_with(
|
||||
'GET', 'http://example.com/', bar='baz')
|
||||
|
||||
def test_post_no_content_type(self):
|
||||
self.content_type = self.net.JOSE_CONTENT_TYPE
|
||||
assert self.response == self.net.post('uri', self.obj)
|
||||
assert self.response.checked
|
||||
|
||||
def test_post(self):
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net.post(
|
||||
'uri', self.obj, content_type=self.content_type)
|
||||
assert self.response.checked
|
||||
self.net._wrap_in_jws.assert_called_once_with(
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()), "uri")
|
||||
|
||||
self.available_nonces = []
|
||||
with pytest.raises(errors.MissingNonce):
|
||||
self.net.post('uri', self.obj, content_type=self.content_type)
|
||||
self.net._wrap_in_jws.assert_called_with(
|
||||
self.obj, jose.b64decode(self.all_nonces.pop()), "uri")
|
||||
|
||||
def test_post_wrong_initial_nonce(self): # HEAD
|
||||
self.available_nonces = [b'f', jose.b64encode(b'good')]
|
||||
with pytest.raises(errors.BadNonce):
|
||||
self.net.post('uri',
|
||||
self.obj, content_type=self.content_type)
|
||||
|
||||
def test_post_wrong_post_response_nonce(self):
|
||||
self.available_nonces = [jose.b64encode(b'good'), b'f']
|
||||
with pytest.raises(errors.BadNonce):
|
||||
self.net.post('uri',
|
||||
self.obj, content_type=self.content_type)
|
||||
|
||||
def test_post_failed_retry(self):
|
||||
check_response = mock.MagicMock()
|
||||
check_response.side_effect = messages.Error.with_code('badNonce')
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.net._check_response = check_response
|
||||
with pytest.raises(messages.Error):
|
||||
self.net.post('uri',
|
||||
self.obj, content_type=self.content_type)
|
||||
|
||||
def test_post_not_retried(self):
|
||||
check_response = mock.MagicMock()
|
||||
check_response.side_effect = [messages.Error.with_code('malformed'),
|
||||
self.response]
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.net._check_response = check_response
|
||||
with pytest.raises(messages.Error):
|
||||
self.net.post('uri',
|
||||
self.obj, content_type=self.content_type)
|
||||
|
||||
def test_post_successful_retry(self):
|
||||
post_once = mock.MagicMock()
|
||||
post_once.side_effect = [messages.Error.with_code('badNonce'),
|
||||
self.response]
|
||||
|
||||
# pylint: disable=protected-access
|
||||
assert self.response == self.net.post(
|
||||
'uri', self.obj, content_type=self.content_type)
|
||||
|
||||
def test_head_get_post_error_passthrough(self):
|
||||
self.send_request.side_effect = requests.exceptions.RequestException
|
||||
for method in self.net.head, self.net.get:
|
||||
with pytest.raises(requests.exceptions.RequestException):
|
||||
method('GET', 'uri')
|
||||
with pytest.raises(requests.exceptions.RequestException):
|
||||
self.net.post('uri', obj=self.obj)
|
||||
|
||||
def test_post_bad_nonce_head(self):
|
||||
# pylint: disable=protected-access
|
||||
# regression test for https://github.com/certbot/certbot/issues/6092
|
||||
bad_response = mock.MagicMock(ok=False, status_code=http_client.SERVICE_UNAVAILABLE)
|
||||
self.net._send_request = mock.MagicMock()
|
||||
self.net._send_request.return_value = bad_response
|
||||
self.content_type = None
|
||||
check_response = mock.MagicMock()
|
||||
self.net._check_response = check_response
|
||||
with pytest.raises(errors.ClientError):
|
||||
self.net.post('uri',
|
||||
self.obj, content_type=self.content_type,
|
||||
new_nonce_url='new_nonce_uri')
|
||||
assert check_response.call_count == 1
|
||||
|
||||
def test_new_nonce_uri_removed(self):
|
||||
self.content_type = None
|
||||
self.net.post('uri', self.obj, content_type=None, new_nonce_url='new_nonce_uri')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -1,17 +1,20 @@
|
||||
"""Tests for acme.crypto_util."""
|
||||
import ipaddress
|
||||
import itertools
|
||||
import socket
|
||||
import socketserver
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import List
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import OpenSSL
|
||||
import pytest
|
||||
|
||||
from acme import errors
|
||||
import test_util
|
||||
from acme._internal.tests import test_util
|
||||
|
||||
|
||||
class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
@@ -26,11 +29,9 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
from acme.crypto_util import SSLSocket
|
||||
|
||||
class _TestServer(socketserver.TCPServer):
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
socketserver.TCPServer.server_bind(self)
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.socket = SSLSocket(self.socket, certs)
|
||||
|
||||
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
|
||||
self.port = self.server.socket.getsockname()[1]
|
||||
@@ -41,6 +42,7 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
if self.server_thread.is_alive():
|
||||
# The thread may have already terminated.
|
||||
self.server_thread.join() # pragma: no cover
|
||||
self.server.server_close()
|
||||
|
||||
def _probe(self, name):
|
||||
from acme.crypto_util import probe_sni
|
||||
@@ -53,18 +55,20 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
|
||||
def test_probe_ok(self):
|
||||
self._start_server()
|
||||
self.assertEqual(self.cert, self._probe(b'foo'))
|
||||
assert self.cert == self._probe(b'foo')
|
||||
|
||||
def test_probe_not_recognized_name(self):
|
||||
self._start_server()
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
with pytest.raises(errors.Error):
|
||||
self._probe(b'bar')
|
||||
|
||||
def test_probe_connection_error(self):
|
||||
self.server.server_close()
|
||||
original_timeout = socket.getdefaulttimeout()
|
||||
try:
|
||||
socket.setdefaulttimeout(1)
|
||||
self.assertRaises(errors.Error, self._probe, b'bar')
|
||||
with pytest.raises(errors.Error):
|
||||
self._probe(b'bar')
|
||||
finally:
|
||||
socket.setdefaulttimeout(original_timeout)
|
||||
|
||||
@@ -74,10 +78,10 @@ class SSLSocketTest(unittest.TestCase):
|
||||
|
||||
def test_ssl_socket_invalid_arguments(self):
|
||||
from acme.crypto_util import SSLSocket
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
_ = SSLSocket(None, {'sni': ('key', 'cert')},
|
||||
cert_selection=lambda _: None)
|
||||
with self.assertRaises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
_ = SSLSocket(None)
|
||||
|
||||
|
||||
@@ -94,21 +98,20 @@ class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def test_cert_one_san_no_common(self):
|
||||
self.assertEqual(self._call_cert('cert-nocn.der'),
|
||||
['no-common-name.badssl.com'])
|
||||
assert self._call_cert('cert-nocn.der') == \
|
||||
['no-common-name.badssl.com']
|
||||
|
||||
def test_cert_no_sans_yes_common(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), ['example.com'])
|
||||
assert self._call_cert('cert.pem') == ['example.com']
|
||||
|
||||
def test_cert_two_sans_yes_common(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'),
|
||||
['example.com', 'www.example.com'])
|
||||
assert self._call_cert('cert-san.pem') == \
|
||||
['example.com', 'www.example.com']
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
|
||||
|
||||
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
@@ -131,52 +134,93 @@ class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
|
||||
return self._call(test_util.load_csr, name)
|
||||
|
||||
def test_cert_no_sans(self):
|
||||
self.assertEqual(self._call_cert('cert.pem'), [])
|
||||
assert self._call_cert('cert.pem') == []
|
||||
|
||||
def test_cert_two_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-san.pem'),
|
||||
['example.com', 'www.example.com'])
|
||||
assert self._call_cert('cert-san.pem') == \
|
||||
['example.com', 'www.example.com']
|
||||
|
||||
def test_cert_hundred_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-100sans.pem'),
|
||||
['example{0}.com'.format(i) for i in range(1, 101)])
|
||||
assert self._call_cert('cert-100sans.pem') == \
|
||||
['example{0}.com'.format(i) for i in range(1, 101)]
|
||||
|
||||
def test_cert_idn_sans(self):
|
||||
self.assertEqual(self._call_cert('cert-idnsans.pem'),
|
||||
self._get_idn_names())
|
||||
assert self._call_cert('cert-idnsans.pem') == \
|
||||
self._get_idn_names()
|
||||
|
||||
def test_csr_no_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
|
||||
assert self._call_csr('csr-nosans.pem') == []
|
||||
|
||||
def test_csr_one_san(self):
|
||||
self.assertEqual(self._call_csr('csr.pem'), ['example.com'])
|
||||
assert self._call_csr('csr.pem') == ['example.com']
|
||||
|
||||
def test_csr_two_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-san.pem'),
|
||||
['example.com', 'www.example.com'])
|
||||
assert self._call_csr('csr-san.pem') == \
|
||||
['example.com', 'www.example.com']
|
||||
|
||||
def test_csr_six_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-6sans.pem'),
|
||||
assert self._call_csr('csr-6sans.pem') == \
|
||||
['example.com', 'example.org', 'example.net',
|
||||
'example.info', 'subdomain.example.com',
|
||||
'other.subdomain.example.com'])
|
||||
'other.subdomain.example.com']
|
||||
|
||||
def test_csr_hundred_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-100sans.pem'),
|
||||
['example{0}.com'.format(i) for i in range(1, 101)])
|
||||
assert self._call_csr('csr-100sans.pem') == \
|
||||
['example{0}.com'.format(i) for i in range(1, 101)]
|
||||
|
||||
def test_csr_idn_sans(self):
|
||||
self.assertEqual(self._call_csr('csr-idnsans.pem'),
|
||||
self._get_idn_names())
|
||||
assert self._call_csr('csr-idnsans.pem') == \
|
||||
self._get_idn_names()
|
||||
|
||||
def test_critical_san(self):
|
||||
self.assertEqual(self._call_cert('critical-san.pem'),
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example'])
|
||||
assert self._call_cert('critical-san.pem') == \
|
||||
['chicago-cubs.venafi.example', 'cubs.venafi.example']
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqSANIPTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san_ip."""
|
||||
|
||||
class RandomSnTest(unittest.TestCase):
|
||||
"""Test for random certificate serial numbers."""
|
||||
@classmethod
|
||||
def _call(cls, loader, name):
|
||||
# pylint: disable=protected-access
|
||||
from acme.crypto_util import _pyopenssl_cert_or_req_san_ip
|
||||
return _pyopenssl_cert_or_req_san_ip(loader(name))
|
||||
|
||||
def _call_cert(self, name):
|
||||
return self._call(test_util.load_cert, name)
|
||||
|
||||
def _call_csr(self, name):
|
||||
return self._call(test_util.load_csr, name)
|
||||
|
||||
def test_cert_no_sans(self):
|
||||
assert self._call_cert('cert.pem') == []
|
||||
|
||||
def test_csr_no_sans(self):
|
||||
assert self._call_csr('csr-nosans.pem') == []
|
||||
|
||||
def test_cert_domain_sans(self):
|
||||
assert self._call_cert('cert-san.pem') == []
|
||||
|
||||
def test_csr_domain_sans(self):
|
||||
assert self._call_csr('csr-san.pem') == []
|
||||
|
||||
def test_cert_ip_two_sans(self):
|
||||
assert self._call_cert('cert-ipsans.pem') == ['192.0.2.145', '203.0.113.1']
|
||||
|
||||
def test_csr_ip_two_sans(self):
|
||||
assert self._call_csr('csr-ipsans.pem') == ['192.0.2.145', '203.0.113.1']
|
||||
|
||||
def test_csr_ipv6_sans(self):
|
||||
assert self._call_csr('csr-ipv6sans.pem') == \
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5']
|
||||
|
||||
def test_cert_ipv6_sans(self):
|
||||
assert self._call_cert('cert-ipv6sans.pem') == \
|
||||
['0:0:0:0:0:0:0:1', 'A3BE:32F3:206E:C75D:956:CEE:9858:5EC5']
|
||||
|
||||
|
||||
class GenSsCertTest(unittest.TestCase):
|
||||
"""Test for gen_ss_cert (generation of self-signed cert)."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
@@ -187,11 +231,19 @@ class RandomSnTest(unittest.TestCase):
|
||||
|
||||
def test_sn_collisions(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
|
||||
for _ in range(self.cert_count):
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
|
||||
cert = gen_ss_cert(self.key, ['dummy'], force_san=True,
|
||||
ips=[ipaddress.ip_address("10.10.10.10")])
|
||||
self.serial_num.append(cert.get_serial_number())
|
||||
self.assertTrue(len(set(self.serial_num)) > 1)
|
||||
assert len(set(self.serial_num)) >= self.cert_count
|
||||
|
||||
|
||||
def test_no_name(self):
|
||||
from acme.crypto_util import gen_ss_cert
|
||||
with pytest.raises(AssertionError):
|
||||
gen_ss_cert(self.key, ips=[ipaddress.ip_address("1.1.1.1")])
|
||||
gen_ss_cert(self.key)
|
||||
|
||||
|
||||
class MakeCSRTest(unittest.TestCase):
|
||||
"""Test for standalone functions."""
|
||||
@@ -206,22 +258,41 @@ class MakeCSRTest(unittest.TestCase):
|
||||
|
||||
def test_make_csr(self):
|
||||
csr_pem = self._call_with_key(["a.example", "b.example"])
|
||||
self.assertTrue(b'--BEGIN CERTIFICATE REQUEST--' in csr_pem)
|
||||
self.assertTrue(b'--END CERTIFICATE REQUEST--' in csr_pem)
|
||||
assert b'--BEGIN CERTIFICATE REQUEST--' in csr_pem
|
||||
assert b'--END CERTIFICATE REQUEST--' in csr_pem
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEqual(len(csr.get_extensions()), 1)
|
||||
self.assertEqual(csr.get_extensions()[0].get_data(),
|
||||
assert len(csr.get_extensions()) == 1
|
||||
assert csr.get_extensions()[0].get_data() == \
|
||||
OpenSSL.crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=b'DNS:a.example, DNS:b.example',
|
||||
).get_data(),
|
||||
)
|
||||
).get_data()
|
||||
|
||||
def test_make_csr_ip(self):
|
||||
csr_pem = self._call_with_key(["a.example"], False, [ipaddress.ip_address('127.0.0.1'), ipaddress.ip_address('::1')])
|
||||
assert b'--BEGIN CERTIFICATE REQUEST--' in csr_pem
|
||||
assert b'--END CERTIFICATE REQUEST--' in csr_pem
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
# In pyopenssl 0.13 (used with TOXENV=py27-oldest), csr objects don't
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
assert len(csr.get_extensions()) == 1
|
||||
assert csr.get_extensions()[0].get_data() == \
|
||||
OpenSSL.crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=b'DNS:a.example, IP:127.0.0.1, IP:::1',
|
||||
).get_data()
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downstream thankfully handle it for us
|
||||
|
||||
def test_make_csr_must_staple(self):
|
||||
csr_pem = self._call_with_key(["a.example"], must_staple=True)
|
||||
@@ -232,14 +303,26 @@ class MakeCSRTest(unittest.TestCase):
|
||||
# have a get_extensions() method, so we skip this test if the method
|
||||
# isn't available.
|
||||
if hasattr(csr, 'get_extensions'):
|
||||
self.assertEqual(len(csr.get_extensions()), 2)
|
||||
assert len(csr.get_extensions()) == 2
|
||||
# NOTE: Ideally we would filter by the TLS Feature OID, but
|
||||
# OpenSSL.crypto.X509Extension doesn't give us the extension's raw OID,
|
||||
# and the shortname field is just "UNDEF"
|
||||
must_staple_exts = [e for e in csr.get_extensions()
|
||||
if e.get_data() == b"0\x03\x02\x01\x05"]
|
||||
self.assertEqual(len(must_staple_exts), 1,
|
||||
"Expected exactly one Must Staple extension")
|
||||
assert len(must_staple_exts) == 1, \
|
||||
"Expected exactly one Must Staple extension"
|
||||
|
||||
def test_make_csr_without_hostname(self):
|
||||
with pytest.raises(ValueError):
|
||||
self._call_with_key()
|
||||
|
||||
def test_make_csr_correct_version(self):
|
||||
csr_pem = self._call_with_key(["a.example"])
|
||||
csr = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM, csr_pem)
|
||||
|
||||
assert csr.get_version() == 0, \
|
||||
"Expected CSR version to be v1 (encoded as 0), per RFC 2986, section 4"
|
||||
|
||||
|
||||
class DumpPyopensslChainTest(unittest.TestCase):
|
||||
@@ -257,7 +340,7 @@ class DumpPyopensslChainTest(unittest.TestCase):
|
||||
length = sum(
|
||||
len(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
|
||||
for cert in loaded)
|
||||
self.assertEqual(len(self._call(loaded)), length)
|
||||
assert len(self._call(loaded)) == length
|
||||
|
||||
def test_dump_pyopenssl_chain_wrapped(self):
|
||||
names = ['cert.pem', 'cert-san.pem', 'cert-idnsans.pem']
|
||||
@@ -266,8 +349,8 @@ class DumpPyopensslChainTest(unittest.TestCase):
|
||||
wrapped = [wrap_func(cert) for cert in loaded]
|
||||
dump_func = OpenSSL.crypto.dump_certificate
|
||||
length = sum(len(dump_func(OpenSSL.crypto.FILETYPE_PEM, cert)) for cert in loaded)
|
||||
self.assertEqual(len(self._call(wrapped)), length)
|
||||
assert len(self._call(wrapped)) == length
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Tests for acme.errors."""
|
||||
import sys
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
"""Tests for acme.errors.BadNonce."""
|
||||
@@ -11,7 +14,7 @@ class BadNonceTest(unittest.TestCase):
|
||||
self.error = BadNonce(nonce="xxx", error="error")
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual("Invalid nonce ('xxx'): error", str(self.error))
|
||||
assert "Invalid nonce ('xxx'): error" == str(self.error)
|
||||
|
||||
|
||||
class MissingNonceTest(unittest.TestCase):
|
||||
@@ -24,8 +27,8 @@ class MissingNonceTest(unittest.TestCase):
|
||||
self.error = MissingNonce(self.response)
|
||||
|
||||
def test_str(self):
|
||||
self.assertTrue("FOO" in str(self.error))
|
||||
self.assertTrue("{}" in str(self.error))
|
||||
assert "FOO" in str(self.error)
|
||||
assert "{}" in str(self.error)
|
||||
|
||||
|
||||
class PollErrorTest(unittest.TestCase):
|
||||
@@ -40,13 +43,13 @@ class PollErrorTest(unittest.TestCase):
|
||||
mock.sentinel.AR: mock.sentinel.AR2})
|
||||
|
||||
def test_timeout(self):
|
||||
self.assertTrue(self.timeout.timeout)
|
||||
self.assertFalse(self.invalid.timeout)
|
||||
assert self.timeout.timeout
|
||||
assert not self.invalid.timeout
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual('PollError(exhausted=%s, updated={sentinel.AR: '
|
||||
'sentinel.AR2})' % repr(set()), repr(self.invalid))
|
||||
assert 'PollError(exhausted=%s, updated={sentinel.AR: ' \
|
||||
'sentinel.AR2})' % repr(set()) == repr(self.invalid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
60
acme/acme/_internal/tests/fields_test.py
Normal file
60
acme/acme/_internal/tests/fields_test.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Tests for acme.fields."""
|
||||
import datetime
|
||||
import sys
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
import pytest
|
||||
import pytz
|
||||
|
||||
|
||||
class FixedTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Fixed."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import fixed
|
||||
self.field = fixed('name', 'x')
|
||||
|
||||
def test_decode(self):
|
||||
assert 'x' == self.field.decode('x')
|
||||
|
||||
def test_decode_bad(self):
|
||||
with pytest.raises(jose.DeserializationError):
|
||||
self.field.decode('y')
|
||||
|
||||
def test_encode(self):
|
||||
assert 'x' == self.field.encode('x')
|
||||
|
||||
def test_encode_override(self):
|
||||
assert 'y' == self.field.encode('y')
|
||||
|
||||
|
||||
class RFC3339FieldTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.RFC3339Field."""
|
||||
|
||||
def setUp(self):
|
||||
self.decoded = datetime.datetime(2015, 3, 27, tzinfo=pytz.UTC)
|
||||
self.encoded = '2015-03-27T00:00:00Z'
|
||||
|
||||
def test_default_encoder(self):
|
||||
from acme.fields import RFC3339Field
|
||||
assert self.encoded == RFC3339Field.default_encoder(self.decoded)
|
||||
|
||||
def test_default_encoder_naive_fails(self):
|
||||
from acme.fields import RFC3339Field
|
||||
with pytest.raises(ValueError):
|
||||
RFC3339Field.default_encoder(datetime.datetime.now())
|
||||
|
||||
def test_default_decoder(self):
|
||||
from acme.fields import RFC3339Field
|
||||
assert self.decoded == RFC3339Field.default_decoder(self.encoded)
|
||||
|
||||
def test_default_decoder_raises_deserialization_error(self):
|
||||
from acme.fields import RFC3339Field
|
||||
with pytest.raises(jose.DeserializationError):
|
||||
RFC3339Field.default_decoder('')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
54
acme/acme/_internal/tests/jose_test.py
Normal file
54
acme/acme/_internal/tests/jose_test.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Tests for acme.jose shim."""
|
||||
import importlib
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _test_it(submodule, attribute):
|
||||
if submodule:
|
||||
acme_jose_path = 'acme.jose.' + submodule
|
||||
josepy_path = 'josepy.' + submodule
|
||||
else:
|
||||
acme_jose_path = 'acme.jose'
|
||||
josepy_path = 'josepy'
|
||||
acme_jose_mod = importlib.import_module(acme_jose_path)
|
||||
josepy_mod = importlib.import_module(josepy_path)
|
||||
|
||||
assert acme_jose_mod is josepy_mod
|
||||
assert getattr(acme_jose_mod, attribute) is getattr(josepy_mod, attribute)
|
||||
|
||||
# We use the imports below with eval, but pylint doesn't
|
||||
# understand that.
|
||||
import josepy # pylint: disable=unused-import
|
||||
|
||||
import acme # pylint: disable=unused-import
|
||||
acme_jose_mod = eval(acme_jose_path) # pylint: disable=eval-used
|
||||
josepy_mod = eval(josepy_path) # pylint: disable=eval-used
|
||||
assert acme_jose_mod is josepy_mod
|
||||
assert getattr(acme_jose_mod, attribute) is getattr(josepy_mod, attribute)
|
||||
|
||||
def test_top_level():
|
||||
_test_it('', 'RS512')
|
||||
|
||||
def test_submodules():
|
||||
# This test ensures that the modules in josepy that were
|
||||
# available at the time it was moved into its own package are
|
||||
# available under acme.jose. Backwards compatibility with new
|
||||
# modules or testing code is not maintained.
|
||||
mods_and_attrs = [('b64', 'b64decode',),
|
||||
('errors', 'Error',),
|
||||
('interfaces', 'JSONDeSerializable',),
|
||||
('json_util', 'Field',),
|
||||
('jwa', 'HS256',),
|
||||
('jwk', 'JWK',),
|
||||
('jws', 'JWS',),
|
||||
('util', 'ImmutableMap',),]
|
||||
|
||||
for mod, attr in mods_and_attrs:
|
||||
_test_it(mod, attr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Tests for acme.jws."""
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import pytest
|
||||
|
||||
import test_util
|
||||
from acme._internal.tests import test_util
|
||||
|
||||
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
|
||||
|
||||
@@ -25,9 +27,9 @@ class HeaderTest(unittest.TestCase):
|
||||
from acme.jws import Header
|
||||
nonce_field = Header._fields['nonce']
|
||||
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, nonce_field.decode, self.wrong_nonce)
|
||||
self.assertEqual(b'foo', nonce_field.decode(self.good_nonce))
|
||||
with pytest.raises(jose.DeserializationError):
|
||||
nonce_field.decode(self.wrong_nonce)
|
||||
assert b'foo' == nonce_field.decode(self.good_nonce)
|
||||
|
||||
|
||||
class JWSTest(unittest.TestCase):
|
||||
@@ -45,22 +47,22 @@ class JWSTest(unittest.TestCase):
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url, kid=self.kid)
|
||||
self.assertEqual(jws.signature.combined.nonce, self.nonce)
|
||||
self.assertEqual(jws.signature.combined.url, self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, self.kid)
|
||||
self.assertEqual(jws.signature.combined.jwk, None)
|
||||
assert jws.signature.combined.nonce == self.nonce
|
||||
assert jws.signature.combined.url == self.url
|
||||
assert jws.signature.combined.kid == self.kid
|
||||
assert jws.signature.combined.jwk is None
|
||||
# TODO: check that nonce is in protected header
|
||||
|
||||
self.assertEqual(jws, JWS.from_json(jws.to_json()))
|
||||
assert jws == JWS.from_json(jws.to_json())
|
||||
|
||||
def test_jwk_serialize(self):
|
||||
from acme.jws import JWS
|
||||
jws = JWS.sign(payload=b'foo', key=self.privkey,
|
||||
alg=jose.RS256, nonce=self.nonce,
|
||||
url=self.url)
|
||||
self.assertEqual(jws.signature.combined.kid, None)
|
||||
self.assertEqual(jws.signature.combined.jwk, self.pubkey)
|
||||
assert jws.signature.combined.kid is None
|
||||
assert jws.signature.combined.jwk == self.pubkey
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -1,12 +1,16 @@
|
||||
"""Tests for acme.messages."""
|
||||
import contextlib
|
||||
import sys
|
||||
from typing import Dict
|
||||
import unittest
|
||||
from unittest import mock
|
||||
import warnings
|
||||
|
||||
import josepy as jose
|
||||
import pytest
|
||||
|
||||
from acme import challenges
|
||||
import test_util
|
||||
from acme._internal.tests import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.der')
|
||||
CSR = test_util.load_comparable_csr('csr.der')
|
||||
@@ -17,7 +21,10 @@ class ErrorTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.Error."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.messages import Error, ERROR_PREFIX
|
||||
from acme.messages import Error
|
||||
from acme.messages import ERROR_PREFIX
|
||||
from acme.messages import Identifier
|
||||
from acme.messages import IDENTIFIER_FQDN
|
||||
self.error = Error.with_code('malformed', detail='foo', title='title')
|
||||
self.jobj = {
|
||||
'detail': 'foo',
|
||||
@@ -25,54 +32,84 @@ class ErrorTest(unittest.TestCase):
|
||||
'type': ERROR_PREFIX + 'malformed',
|
||||
}
|
||||
self.error_custom = Error(typ='custom', detail='bar')
|
||||
self.identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
self.subproblem = Error.with_code('caa', detail='bar', title='title', identifier=self.identifier)
|
||||
self.error_with_subproblems = Error.with_code('malformed', detail='foo', title='title', subproblems=[self.subproblem])
|
||||
self.empty_error = Error()
|
||||
|
||||
def test_default_typ(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual(Error().typ, 'about:blank')
|
||||
assert Error().typ == 'about:blank'
|
||||
|
||||
def test_from_json_empty(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual(Error(), Error.from_json('{}'))
|
||||
assert Error() == Error.from_json('{}')
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.messages import Error
|
||||
hash(Error.from_json(self.error.to_json()))
|
||||
|
||||
def test_from_json_with_subproblems(self):
|
||||
from acme.messages import Error
|
||||
|
||||
parsed_error = Error.from_json(self.error_with_subproblems.to_json())
|
||||
|
||||
assert 1 == len(parsed_error.subproblems)
|
||||
assert self.subproblem == parsed_error.subproblems[0]
|
||||
|
||||
def test_description(self):
|
||||
self.assertEqual('The request message was malformed', self.error.description)
|
||||
self.assertTrue(self.error_custom.description is None)
|
||||
assert 'The request message was malformed' == self.error.description
|
||||
assert self.error_custom.description is None
|
||||
|
||||
def test_code(self):
|
||||
from acme.messages import Error
|
||||
self.assertEqual('malformed', self.error.code)
|
||||
self.assertEqual(None, self.error_custom.code)
|
||||
self.assertEqual(None, Error().code)
|
||||
assert 'malformed' == self.error.code
|
||||
assert self.error_custom.code is None
|
||||
assert Error().code is None
|
||||
|
||||
def test_is_acme_error(self):
|
||||
from acme.messages import is_acme_error, Error
|
||||
self.assertTrue(is_acme_error(self.error))
|
||||
self.assertFalse(is_acme_error(self.error_custom))
|
||||
self.assertFalse(is_acme_error(Error()))
|
||||
self.assertFalse(is_acme_error(self.empty_error))
|
||||
self.assertFalse(is_acme_error("must pet all the {dogs|rabbits}"))
|
||||
from acme.messages import Error
|
||||
from acme.messages import is_acme_error
|
||||
assert is_acme_error(self.error)
|
||||
assert not is_acme_error(self.error_custom)
|
||||
assert not is_acme_error(Error())
|
||||
assert not is_acme_error(self.empty_error)
|
||||
assert not is_acme_error("must pet all the {dogs|rabbits}")
|
||||
|
||||
def test_unicode_error(self):
|
||||
from acme.messages import Error, is_acme_error
|
||||
from acme.messages import Error
|
||||
from acme.messages import is_acme_error
|
||||
arabic_error = Error.with_code(
|
||||
'malformed', detail=u'\u0639\u062f\u0627\u0644\u0629', title='title')
|
||||
self.assertTrue(is_acme_error(arabic_error))
|
||||
assert is_acme_error(arabic_error)
|
||||
|
||||
def test_with_code(self):
|
||||
from acme.messages import Error, is_acme_error
|
||||
self.assertTrue(is_acme_error(Error.with_code('badCSR')))
|
||||
self.assertRaises(ValueError, Error.with_code, 'not an ACME error code')
|
||||
from acme.messages import Error
|
||||
from acme.messages import is_acme_error
|
||||
assert is_acme_error(Error.with_code('badCSR'))
|
||||
with pytest.raises(ValueError):
|
||||
Error.with_code('not an ACME error code')
|
||||
|
||||
def test_str(self):
|
||||
self.assertEqual(
|
||||
str(self.error),
|
||||
u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}"
|
||||
.format(self.error))
|
||||
assert str(self.error) == \
|
||||
u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}" \
|
||||
.format(self.error)
|
||||
assert str(self.error_with_subproblems) == \
|
||||
(u"{0.typ} :: {0.description} :: {0.detail} :: {0.title}\n"+
|
||||
u"Problem for {1.identifier.value}: {1.typ} :: {1.description} :: {1.detail} :: {1.title}").format(
|
||||
self.error_with_subproblems, self.subproblem)
|
||||
|
||||
# this test is based on a minimal reproduction of a contextmanager/immutable
|
||||
# exception related error: https://github.com/python/cpython/issues/99856
|
||||
def test_setting_traceback(self):
|
||||
assert self.error_custom.__traceback__ is None
|
||||
|
||||
try:
|
||||
1/0
|
||||
except ZeroDivisionError as e:
|
||||
self.error_custom.__traceback__ = e.__traceback__
|
||||
|
||||
assert self.error_custom.__traceback__ is not None
|
||||
|
||||
|
||||
class ConstantTest(unittest.TestCase):
|
||||
@@ -89,28 +126,28 @@ class ConstantTest(unittest.TestCase):
|
||||
self.const_b = MockConstant('b')
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual('a', self.const_a.to_partial_json())
|
||||
self.assertEqual('b', self.const_b.to_partial_json())
|
||||
assert 'a' == self.const_a.to_partial_json()
|
||||
assert 'b' == self.const_b.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
self.assertEqual(self.const_a, self.MockConstant.from_json('a'))
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, self.MockConstant.from_json, 'c')
|
||||
assert self.const_a == self.MockConstant.from_json('a')
|
||||
with pytest.raises(jose.DeserializationError):
|
||||
self.MockConstant.from_json('c')
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
hash(self.MockConstant.from_json('a'))
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual('MockConstant(a)', repr(self.const_a))
|
||||
self.assertEqual('MockConstant(b)', repr(self.const_b))
|
||||
assert 'MockConstant(a)' == repr(self.const_a)
|
||||
assert 'MockConstant(b)' == repr(self.const_b)
|
||||
|
||||
def test_equality(self):
|
||||
const_a_prime = self.MockConstant('a')
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
assert self.const_a != self.const_b
|
||||
assert self.const_a == const_a_prime
|
||||
|
||||
self.assertNotEqual(self.const_a, self.const_b)
|
||||
self.assertEqual(self.const_a, const_a_prime)
|
||||
assert self.const_a != self.const_b
|
||||
assert self.const_a == const_a_prime
|
||||
|
||||
|
||||
class DirectoryTest(unittest.TestCase):
|
||||
@@ -119,8 +156,8 @@ class DirectoryTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
from acme.messages import Directory
|
||||
self.dir = Directory({
|
||||
'new-reg': 'reg',
|
||||
mock.MagicMock(resource_type='new-cert'): 'cert',
|
||||
'newReg': 'reg',
|
||||
'newCert': 'cert',
|
||||
'meta': Directory.Meta(
|
||||
terms_of_service='https://example.com/acme/terms',
|
||||
website='https://www.example.com/',
|
||||
@@ -133,30 +170,29 @@ class DirectoryTest(unittest.TestCase):
|
||||
Directory({'foo': 'bar'})
|
||||
|
||||
def test_getitem(self):
|
||||
self.assertEqual('reg', self.dir['new-reg'])
|
||||
from acme.messages import NewRegistration
|
||||
self.assertEqual('reg', self.dir[NewRegistration])
|
||||
self.assertEqual('reg', self.dir[NewRegistration()])
|
||||
assert 'reg' == self.dir['newReg']
|
||||
|
||||
def test_getitem_fails_with_key_error(self):
|
||||
self.assertRaises(KeyError, self.dir.__getitem__, 'foo')
|
||||
with pytest.raises(KeyError):
|
||||
self.dir.__getitem__('foo')
|
||||
|
||||
def test_getattr(self):
|
||||
self.assertEqual('reg', self.dir.new_reg)
|
||||
assert 'reg' == self.dir.newReg
|
||||
|
||||
def test_getattr_fails_with_attribute_error(self):
|
||||
self.assertRaises(AttributeError, self.dir.__getattr__, 'foo')
|
||||
with pytest.raises(AttributeError):
|
||||
self.dir.__getattr__('foo')
|
||||
|
||||
def test_to_json(self):
|
||||
self.assertEqual(self.dir.to_json(), {
|
||||
'new-reg': 'reg',
|
||||
'new-cert': 'cert',
|
||||
assert self.dir.to_json() == {
|
||||
'newReg': 'reg',
|
||||
'newCert': 'cert',
|
||||
'meta': {
|
||||
'terms-of-service': 'https://example.com/acme/terms',
|
||||
'termsOfService': 'https://example.com/acme/terms',
|
||||
'website': 'https://www.example.com/',
|
||||
'caaIdentities': ['example.com'],
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
def test_from_json_deserialization_unknown_key_success(self): # pylint: disable=no-self-use
|
||||
from acme.messages import Directory
|
||||
@@ -167,7 +203,7 @@ class DirectoryTest(unittest.TestCase):
|
||||
for k in self.dir.meta:
|
||||
if k == 'terms_of_service':
|
||||
result = self.dir.meta[k] == 'https://example.com/acme/terms'
|
||||
self.assertTrue(result)
|
||||
assert result
|
||||
|
||||
|
||||
class ExternalAccountBindingTest(unittest.TestCase):
|
||||
@@ -184,8 +220,8 @@ class ExternalAccountBindingTest(unittest.TestCase):
|
||||
from acme.messages import ExternalAccountBinding
|
||||
eab = ExternalAccountBinding.from_data(self.key, self.kid, self.hmac_key, self.dir)
|
||||
|
||||
self.assertEqual(len(eab), 3)
|
||||
self.assertEqual(sorted(eab.keys()), sorted(['protected', 'payload', 'signature']))
|
||||
assert len(eab) == 3
|
||||
assert sorted(eab.keys()) == sorted(['protected', 'payload', 'signature'])
|
||||
|
||||
|
||||
class RegistrationTest(unittest.TestCase):
|
||||
@@ -214,13 +250,15 @@ class RegistrationTest(unittest.TestCase):
|
||||
def test_from_data(self):
|
||||
from acme.messages import Registration
|
||||
reg = Registration.from_data(phone='1234', email='admin@foo.com')
|
||||
self.assertEqual(reg.contact, (
|
||||
assert reg.contact == (
|
||||
'tel:1234',
|
||||
'mailto:admin@foo.com',
|
||||
))
|
||||
)
|
||||
|
||||
def test_new_registration_from_data_with_eab(self):
|
||||
from acme.messages import NewRegistration, ExternalAccountBinding, Directory
|
||||
from acme.messages import Directory
|
||||
from acme.messages import ExternalAccountBinding
|
||||
from acme.messages import NewRegistration
|
||||
key = jose.jwk.JWKRSA(key=KEY.public_key())
|
||||
kid = "kid-for-testing"
|
||||
hmac_key = "hmac-key-for-testing"
|
||||
@@ -229,24 +267,24 @@ class RegistrationTest(unittest.TestCase):
|
||||
})
|
||||
eab = ExternalAccountBinding.from_data(key, kid, hmac_key, directory)
|
||||
reg = NewRegistration.from_data(email='admin@foo.com', external_account_binding=eab)
|
||||
self.assertEqual(reg.contact, (
|
||||
assert reg.contact == (
|
||||
'mailto:admin@foo.com',
|
||||
))
|
||||
self.assertEqual(sorted(reg.external_account_binding.keys()),
|
||||
sorted(['protected', 'payload', 'signature']))
|
||||
)
|
||||
assert sorted(reg.external_account_binding.keys()) == \
|
||||
sorted(['protected', 'payload', 'signature'])
|
||||
|
||||
def test_phones(self):
|
||||
self.assertEqual(('1234',), self.reg.phones)
|
||||
assert ('1234',) == self.reg.phones
|
||||
|
||||
def test_emails(self):
|
||||
self.assertEqual(('admin@foo.com',), self.reg.emails)
|
||||
assert ('admin@foo.com',) == self.reg.emails
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jobj_to, self.reg.to_partial_json())
|
||||
assert self.jobj_to == self.reg.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.messages import Registration
|
||||
self.assertEqual(self.reg, Registration.from_json(self.jobj_from))
|
||||
assert self.reg == Registration.from_json(self.jobj_from)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.messages import Registration
|
||||
@@ -257,13 +295,13 @@ class RegistrationTest(unittest.TestCase):
|
||||
empty_new_reg = NewRegistration()
|
||||
new_reg_with_contact = NewRegistration(contact=())
|
||||
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
assert empty_new_reg.contact == ()
|
||||
assert new_reg_with_contact.contact == ()
|
||||
|
||||
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||
assert 'contact' not in empty_new_reg.to_partial_json()
|
||||
assert 'contact' not in empty_new_reg.fields_to_partial_json()
|
||||
assert 'contact' in new_reg_with_contact.to_partial_json()
|
||||
assert 'contact' in new_reg_with_contact.fields_to_partial_json()
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
@@ -272,9 +310,8 @@ class UpdateRegistrationTest(unittest.TestCase):
|
||||
def test_empty(self):
|
||||
from acme.messages import UpdateRegistration
|
||||
jstring = '{"resource": "reg"}'
|
||||
self.assertEqual(jstring, UpdateRegistration().json_dumps())
|
||||
self.assertEqual(
|
||||
UpdateRegistration(), UpdateRegistration.json_loads(jstring))
|
||||
assert '{}' == UpdateRegistration().json_dumps()
|
||||
assert UpdateRegistration() == UpdateRegistration.json_loads(jstring)
|
||||
|
||||
|
||||
class RegistrationResourceTest(unittest.TestCase):
|
||||
@@ -287,11 +324,11 @@ class RegistrationResourceTest(unittest.TestCase):
|
||||
terms_of_service=mock.sentinel.terms_of_service)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.regr.to_json(), {
|
||||
assert self.regr.to_json() == {
|
||||
'body': mock.sentinel.body,
|
||||
'uri': mock.sentinel.uri,
|
||||
'terms_of_service': mock.sentinel.terms_of_service,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class ChallengeResourceTest(unittest.TestCase):
|
||||
@@ -299,8 +336,8 @@ class ChallengeResourceTest(unittest.TestCase):
|
||||
|
||||
def test_uri(self):
|
||||
from acme.messages import ChallengeResource
|
||||
self.assertEqual('http://challb', ChallengeResource(body=mock.MagicMock(
|
||||
uri='http://challb'), authzr_uri='http://authz').uri)
|
||||
assert 'http://challb' == ChallengeResource(body=mock.MagicMock(
|
||||
uri='http://challb'), authzr_uri='http://authz').uri
|
||||
|
||||
|
||||
class ChallengeBodyTest(unittest.TestCase):
|
||||
@@ -320,7 +357,7 @@ class ChallengeBodyTest(unittest.TestCase):
|
||||
error=error)
|
||||
|
||||
self.jobj_to = {
|
||||
'uri': 'http://challb',
|
||||
'url': 'http://challb',
|
||||
'status': self.status,
|
||||
'type': 'dns',
|
||||
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
|
||||
@@ -334,22 +371,22 @@ class ChallengeBodyTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
def test_encode(self):
|
||||
self.assertEqual(self.challb.encode('uri'), self.challb.uri)
|
||||
assert self.challb.encode('uri') == self.challb.uri
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.jobj_to, self.challb.to_partial_json())
|
||||
assert self.jobj_to == self.challb.to_partial_json()
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.messages import ChallengeBody
|
||||
self.assertEqual(self.challb, ChallengeBody.from_json(self.jobj_from))
|
||||
assert self.challb == ChallengeBody.from_json(self.jobj_from)
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.messages import ChallengeBody
|
||||
hash(ChallengeBody.from_json(self.jobj_from))
|
||||
|
||||
def test_proxy(self):
|
||||
self.assertEqual(jose.b64decode(
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA'), self.challb.token)
|
||||
assert jose.b64decode(
|
||||
'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA') == self.challb.token
|
||||
|
||||
|
||||
class AuthorizationTest(unittest.TestCase):
|
||||
@@ -367,20 +404,17 @@ class AuthorizationTest(unittest.TestCase):
|
||||
chall=challenges.DNS(
|
||||
token=b'DGyRejmCefe7v4NfDGDKfA')),
|
||||
)
|
||||
combinations = ((0,), (1,))
|
||||
|
||||
from acme.messages import Authorization
|
||||
from acme.messages import Identifier
|
||||
from acme.messages import IDENTIFIER_FQDN
|
||||
identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
self.authz = Authorization(
|
||||
identifier=identifier, combinations=combinations,
|
||||
challenges=self.challbs)
|
||||
identifier=identifier, challenges=self.challbs)
|
||||
|
||||
self.jobj_from = {
|
||||
'identifier': identifier.to_json(),
|
||||
'challenges': [challb.to_json() for challb in self.challbs],
|
||||
'combinations': combinations,
|
||||
}
|
||||
|
||||
def test_from_json(self):
|
||||
@@ -391,12 +425,6 @@ class AuthorizationTest(unittest.TestCase):
|
||||
from acme.messages import Authorization
|
||||
hash(Authorization.from_json(self.jobj_from))
|
||||
|
||||
def test_resolved_combinations(self):
|
||||
self.assertEqual(self.authz.resolved_combinations, (
|
||||
(self.challbs[0],),
|
||||
(self.challbs[1],),
|
||||
))
|
||||
|
||||
|
||||
class AuthorizationResourceTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.AuthorizationResource."""
|
||||
@@ -406,7 +434,7 @@ class AuthorizationResourceTest(unittest.TestCase):
|
||||
authzr = AuthorizationResource(
|
||||
uri=mock.sentinel.uri,
|
||||
body=mock.sentinel.body)
|
||||
self.assertTrue(isinstance(authzr, jose.JSONDeSerializable))
|
||||
assert isinstance(authzr, jose.JSONDeSerializable)
|
||||
|
||||
|
||||
class CertificateRequestTest(unittest.TestCase):
|
||||
@@ -417,10 +445,9 @@ class CertificateRequestTest(unittest.TestCase):
|
||||
self.req = CertificateRequest(csr=CSR)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertTrue(isinstance(self.req, jose.JSONDeSerializable))
|
||||
assert isinstance(self.req, jose.JSONDeSerializable)
|
||||
from acme.messages import CertificateRequest
|
||||
self.assertEqual(
|
||||
self.req, CertificateRequest.from_json(self.req.to_json()))
|
||||
assert self.req == CertificateRequest.from_json(self.req.to_json())
|
||||
|
||||
|
||||
class CertificateResourceTest(unittest.TestCase):
|
||||
@@ -433,10 +460,9 @@ class CertificateResourceTest(unittest.TestCase):
|
||||
cert_chain_uri=mock.sentinel.cert_chain_uri)
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
self.assertTrue(isinstance(self.certr, jose.JSONDeSerializable))
|
||||
assert isinstance(self.certr, jose.JSONDeSerializable)
|
||||
from acme.messages import CertificateResource
|
||||
self.assertEqual(
|
||||
self.certr, CertificateResource.from_json(self.certr.to_json()))
|
||||
assert self.certr == CertificateResource.from_json(self.certr.to_json())
|
||||
|
||||
|
||||
class RevocationTest(unittest.TestCase):
|
||||
@@ -460,12 +486,42 @@ class OrderResourceTest(unittest.TestCase):
|
||||
body=mock.sentinel.body, uri=mock.sentinel.uri)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.regr.to_json(), {
|
||||
assert self.regr.to_json() == {
|
||||
'body': mock.sentinel.body,
|
||||
'uri': mock.sentinel.uri,
|
||||
'authorizations': None,
|
||||
})
|
||||
}
|
||||
|
||||
def test_json_de_serializable(self):
|
||||
from acme.messages import ChallengeBody
|
||||
from acme.messages import STATUS_PENDING
|
||||
challbs = (
|
||||
ChallengeBody(
|
||||
uri='http://challb1', status=STATUS_PENDING,
|
||||
chall=challenges.HTTP01(token=b'IlirfxKKXAsHtmzK29Pj8A')),
|
||||
ChallengeBody(uri='http://challb2', status=STATUS_PENDING,
|
||||
chall=challenges.DNS(
|
||||
token=b'DGyRejmCefe7v4NfDGDKfA')),
|
||||
)
|
||||
|
||||
from acme.messages import Authorization
|
||||
from acme.messages import AuthorizationResource
|
||||
from acme.messages import Identifier
|
||||
from acme.messages import IDENTIFIER_FQDN
|
||||
identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
|
||||
authz = AuthorizationResource(uri="http://authz1",
|
||||
body=Authorization(
|
||||
identifier=identifier,
|
||||
challenges=challbs))
|
||||
from acme.messages import Order
|
||||
body = Order(identifiers=(identifier,), status=STATUS_PENDING,
|
||||
authorizations=tuple(challb.uri for challb in challbs))
|
||||
from acme.messages import OrderResource
|
||||
orderr = OrderResource(uri="http://order1", body=body,
|
||||
csr_pem=b'test blob',
|
||||
authorizations=(authz,))
|
||||
self.assertEqual(orderr,
|
||||
OrderResource.from_json(orderr.to_json()))
|
||||
|
||||
class NewOrderTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.NewOrder."""
|
||||
@@ -476,9 +532,9 @@ class NewOrderTest(unittest.TestCase):
|
||||
identifiers=mock.sentinel.identifiers)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual(self.reg.to_json(), {
|
||||
assert self.reg.to_json() == {
|
||||
'identifiers': mock.sentinel.identifiers,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
@@ -487,12 +543,11 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
from acme.messages import NewAuthorization
|
||||
|
||||
new_order = NewAuthorization()
|
||||
new_order.le_acme_version = 2
|
||||
|
||||
jobj = new_order.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that JWS bodies must not have a resource field.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
assert jobj == b'{}'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -2,19 +2,20 @@
|
||||
import http.client as http_client
|
||||
import socket
|
||||
import socketserver
|
||||
import sys
|
||||
import threading
|
||||
import unittest
|
||||
from typing import Set
|
||||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
import josepy as jose
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
|
||||
import test_util
|
||||
from acme._internal.tests import test_util
|
||||
|
||||
|
||||
class TLSServerTest(unittest.TestCase):
|
||||
@@ -54,18 +55,18 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
self.server.shutdown()
|
||||
self.thread.join()
|
||||
self.server.server_close()
|
||||
|
||||
def test_index(self):
|
||||
response = requests.get(
|
||||
'http://localhost:{0}'.format(self.port), verify=False)
|
||||
self.assertEqual(
|
||||
response.text, 'ACME client standalone challenge solver')
|
||||
self.assertTrue(response.ok)
|
||||
assert response.text == 'ACME client standalone challenge solver'
|
||||
assert response.ok
|
||||
|
||||
def test_404(self):
|
||||
response = requests.get(
|
||||
'http://localhost:{0}/foo'.format(self.port), verify=False)
|
||||
self.assertEqual(response.status_code, http_client.NOT_FOUND)
|
||||
assert response.status_code == http_client.NOT_FOUND
|
||||
|
||||
def _test_http01(self, add):
|
||||
chall = challenges.HTTP01(token=(b'x' * 16))
|
||||
@@ -81,32 +82,32 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
port=self.port)
|
||||
|
||||
def test_http01_found(self):
|
||||
self.assertTrue(self._test_http01(add=True))
|
||||
assert self._test_http01(add=True)
|
||||
|
||||
def test_http01_not_found(self):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
assert not self._test_http01(add=False)
|
||||
|
||||
def test_timely_shutdown(self):
|
||||
from acme.standalone import HTTP01Server
|
||||
server = HTTP01Server(('', 0), resources=set(), timeout=0.05)
|
||||
server_thread = threading.Thread(target=server.serve_forever)
|
||||
server_thread.start()
|
||||
with HTTP01Server(('', 0), resources=set(), timeout=0.05) as server:
|
||||
server_thread = threading.Thread(target=server.serve_forever)
|
||||
server_thread.start()
|
||||
|
||||
client = socket.socket()
|
||||
client.connect(('localhost', server.socket.getsockname()[1]))
|
||||
with socket.socket() as client:
|
||||
client.connect(('localhost', server.socket.getsockname()[1]))
|
||||
|
||||
stop_thread = threading.Thread(target=server.shutdown)
|
||||
stop_thread.start()
|
||||
server_thread.join(5.)
|
||||
stop_thread = threading.Thread(target=server.shutdown)
|
||||
stop_thread.start()
|
||||
server_thread.join(5.)
|
||||
|
||||
is_hung = server_thread.is_alive()
|
||||
try:
|
||||
client.shutdown(socket.SHUT_RDWR)
|
||||
except: # pragma: no cover, pylint: disable=bare-except
|
||||
# may raise error because socket could already be closed
|
||||
pass
|
||||
is_hung = server_thread.is_alive()
|
||||
try:
|
||||
client.shutdown(socket.SHUT_RDWR)
|
||||
except: # pragma: no cover, pylint: disable=bare-except
|
||||
# may raise error because socket could already be closed
|
||||
pass
|
||||
|
||||
self.assertFalse(is_hung, msg='Server shutdown should not be hung')
|
||||
assert not is_hung, 'Server shutdown should not be hung'
|
||||
|
||||
|
||||
@unittest.skipIf(not challenges.TLSALPN01.is_supported(), "pyOpenSSL too old")
|
||||
@@ -133,6 +134,7 @@ class TLSALPN01ServerTest(unittest.TestCase):
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.thread.join()
|
||||
self.server.server_close()
|
||||
|
||||
# TODO: This is not implemented yet, see comments in standalone.py
|
||||
# def test_certs(self):
|
||||
@@ -149,14 +151,12 @@ class TLSALPN01ServerTest(unittest.TestCase):
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"acme-tls/1"])
|
||||
# Expect challenge cert when connecting with ALPN.
|
||||
self.assertEqual(
|
||||
jose.ComparableX509(cert),
|
||||
assert jose.ComparableX509(cert) == \
|
||||
jose.ComparableX509(self.challenge_certs[b'localhost'][1])
|
||||
)
|
||||
|
||||
def test_bad_alpn(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
with self.assertRaises(errors.Error):
|
||||
with pytest.raises(errors.Error):
|
||||
crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"bad-alpn"])
|
||||
@@ -165,7 +165,6 @@ class TLSALPN01ServerTest(unittest.TestCase):
|
||||
class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
||||
|
||||
|
||||
class SingleProtocolServer(socketserver.TCPServer):
|
||||
"""Server that only serves on a single protocol. FreeBSD has this behavior for AF_INET6."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -175,7 +174,7 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
kwargs["bind_and_activate"] = False
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if ipv6:
|
||||
# NB: On Windows, socket.IPPROTO_IPV6 constant may be missing.
|
||||
# We use the corresponding value (41) instead.
|
||||
@@ -190,12 +189,18 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
|
||||
@mock.patch("socket.socket.bind")
|
||||
def test_fail_to_bind(self, mock_bind):
|
||||
mock_bind.side_effect = socket.error
|
||||
from errno import EADDRINUSE
|
||||
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
self.assertRaises(socket.error, BaseDualNetworkedServers,
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0),
|
||||
socketserver.BaseRequestHandler)
|
||||
|
||||
mock_bind.side_effect = socket.error(EADDRINUSE, "Fake addr in use error")
|
||||
|
||||
with pytest.raises(socket.error) as exc_info:
|
||||
BaseDualNetworkedServers(
|
||||
BaseDualNetworkedServersTest.SingleProtocolServer,
|
||||
('', 0), socketserver.BaseRequestHandler)
|
||||
|
||||
assert exc_info.value.errno == EADDRINUSE
|
||||
|
||||
def test_ports_equal(self):
|
||||
from acme.standalone import BaseDualNetworkedServers
|
||||
@@ -209,8 +214,10 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
for sockname in socknames:
|
||||
port = sockname[1]
|
||||
if prev_port:
|
||||
self.assertEqual(prev_port, port)
|
||||
assert prev_port == port
|
||||
prev_port = port
|
||||
for server in servers.servers:
|
||||
server.server_close()
|
||||
|
||||
|
||||
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
@@ -233,14 +240,13 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
def test_index(self):
|
||||
response = requests.get(
|
||||
'http://localhost:{0}'.format(self.port), verify=False)
|
||||
self.assertEqual(
|
||||
response.text, 'ACME client standalone challenge solver')
|
||||
self.assertTrue(response.ok)
|
||||
assert response.text == 'ACME client standalone challenge solver'
|
||||
assert response.ok
|
||||
|
||||
def test_404(self):
|
||||
response = requests.get(
|
||||
'http://localhost:{0}/foo'.format(self.port), verify=False)
|
||||
self.assertEqual(response.status_code, http_client.NOT_FOUND)
|
||||
assert response.status_code == http_client.NOT_FOUND
|
||||
|
||||
def _test_http01(self, add):
|
||||
chall = challenges.HTTP01(token=(b'x' * 16))
|
||||
@@ -256,11 +262,11 @@ class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
port=self.port)
|
||||
|
||||
def test_http01_found(self):
|
||||
self.assertTrue(self._test_http01(add=True))
|
||||
assert self._test_http01(add=True)
|
||||
|
||||
def test_http01_not_found(self):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
assert not self._test_http01(add=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -4,19 +4,25 @@
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
import josepy as jose
|
||||
from josepy.util import ComparableECKey
|
||||
from OpenSSL import crypto
|
||||
import pkg_resources
|
||||
|
||||
if sys.version_info >= (3, 9): # pragma: no cover
|
||||
import importlib.resources as importlib_resources
|
||||
else: # pragma: no cover
|
||||
import importlib_resources
|
||||
|
||||
|
||||
def load_vector(*names):
|
||||
"""Load contents of a test vector."""
|
||||
# luckily, resource_string opens file in binary mode
|
||||
return pkg_resources.resource_string(
|
||||
__name__, os.path.join('testdata', *names))
|
||||
vector_ref = importlib_resources.files(__package__).joinpath('testdata', *names)
|
||||
return vector_ref.read_bytes()
|
||||
|
||||
|
||||
def _guess_loader(filename, loader_pem, loader_der):
|
||||
@@ -60,6 +66,14 @@ def load_rsa_private_key(*names):
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_ecdsa_private_key(*names):
|
||||
"""Load ECDSA private key."""
|
||||
loader = _guess_loader(names[-1], serialization.load_pem_private_key,
|
||||
serialization.load_der_private_key)
|
||||
return ComparableECKey(loader(
|
||||
load_vector(*names), password=None, backend=default_backend()))
|
||||
|
||||
|
||||
def load_pyopenssl_private_key(*names):
|
||||
"""Load pyOpenSSL private key."""
|
||||
loader = _guess_loader(
|
||||
@@ -15,3 +15,7 @@ and for the certificates:
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
|
||||
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
|
||||
|
||||
and for the elliptic key curves:
|
||||
|
||||
openssl genpkey -algorithm EC -out ec_secp384r1.pem -pkeyopt ec_paramgen_curve:P-384 -pkeyopt ec_param_enc:named_curve
|
||||
21
acme/acme/_internal/tests/testdata/cert-ipsans.pem
vendored
Normal file
21
acme/acme/_internal/tests/testdata/cert-ipsans.pem
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDizCCAnOgAwIBAgIIPNBLQXwhoUkwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSAxNzNiMjYwHhcNMjAwNTI5MTkxODA5
|
||||
WhcNMjUwNTI5MTkxODA5WjAWMRQwEgYDVQQDEwsxOTIuMC4yLjE0NTCCASIwDQYJ
|
||||
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALyChb+NDA26GF1AfC0nzEdfOTchKw0h
|
||||
q41xEjonvg5UXgZf/aH/ntvugIkYP0MaFifNAjebOVVsemEVEtyWcUKTfBHKZGbZ
|
||||
ukTDwFIjfTccCfo6U/B2H7ZLzJIywl8DcUw9DypadeQBm8PS0VVR2ncy73dvaqym
|
||||
crhAwlASyXU0mhLqRDMMxfg5Bn/FWpcsIcDpLmPn8Q/FvdRc2t5ryBNw/aWOlwqT
|
||||
Oy16nbfLj2T0zG1A3aPuD+eT/JFUe/o3K7R+FAx7wt+RziQO46wLVVF1SueZUrIU
|
||||
zqN04Gl8Kt1WM2SniZ0gq/rORUNcPtT0NAEsEslTQfA+Trq6j2peqyMCAwEAAaOB
|
||||
yjCBxzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUF
|
||||
BwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFHj1mwZzP//nMIH2i58NRUl/arHn
|
||||
MB8GA1UdIwQYMBaAFF5DVAKabvIUvKFHGouscA2Qdpe6MDEGCCsGAQUFBwEBBCUw
|
||||
IzAhBggrBgEFBQcwAYYVaHR0cDovLzEyNy4wLjAuMTo0MDAyMBUGA1UdEQQOMAyH
|
||||
BMAAApGHBMsAcQEwDQYJKoZIhvcNAQELBQADggEBAHjSgDg76/UCIMSYddyhj18r
|
||||
LdNKjA7p8ovnErSkebFT4lIZ9f3Sma9moNr0w64M33NamuFyHe/KTdk90mvoW8Uu
|
||||
26aDekiRIeeMakzbAtDKn67tt2tbedKIYRATcSYVwsV46uZKbM621dZKIjjxOWpo
|
||||
IY6rZYrku8LYhoXJXOqRduV3cTRVuTm5bBa9FfVNtt6N1T5JOtKKDEhuSaF4RSug
|
||||
PDy3hQIiHrVvhPfVrXU3j6owz/8UCS5549inES9ONTFrvM9o0H1R/MsmGNXR5hF5
|
||||
iJqHKC7n8LZujhVnoFIpHu2Dsiefbfr+yRYJS4I+ezy6Nq/Ok8rc8zp0eoX+uyY=
|
||||
-----END CERTIFICATE-----
|
||||
22
acme/acme/_internal/tests/testdata/cert-ipv6sans.pem
vendored
Normal file
22
acme/acme/_internal/tests/testdata/cert-ipv6sans.pem
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDmzCCAoOgAwIBAgIIFdxeZP+v2rgwDQYJKoZIhvcNAQELBQAwKDEmMCQGA1UE
|
||||
AxMdUGViYmxlIEludGVybWVkaWF0ZSBDQSA0M2M5NTcwHhcNMjAwNTMwMDQwNzMw
|
||||
WhcNMjUwNTMwMDQwNzMwWjAOMQwwCgYDVQQDEwM6OjEwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQC7VidVduJvqKtrSH0fw6PjE0cqL4Kfzo7klexWUkHG
|
||||
KVAa0fRVZFZ462jxKOt417V2U4WJQ6WHHO9PJ+3gW62d/MhCw8FRtUQS4nYFjqB6
|
||||
32+RFU21VRN7cWoQEqSwnEPbh/v/zv/KS5JhQ+swWUo79AOLm1kjnZWCKtcqh1Lc
|
||||
Ug5Tkpot6luoxTKp52MkchvXDpj0q2B/XpLJ8/pw5cqjv7mH12EDOK2HXllA+WwX
|
||||
ZpstcEhaA4FqtaHOW/OHnwTX5MUbINXE5YYHVEDR6moVM31/W/3pe9NDUMTDE7Si
|
||||
lVQnZbXM9NYbzZqlh+WhemDWwnIfGI6rtsfNEiirVEOlAgMBAAGjgeIwgd8wDgYD
|
||||
VR0PAQH/BAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNV
|
||||
HRMBAf8EAjAAMB0GA1UdDgQWBBS8DL+MZfDIy6AKky69Tgry2Vxq5DAfBgNVHSME
|
||||
GDAWgBRAsFqVenRRKgB1YPzWKzb9bzZ/ozAxBggrBgEFBQcBAQQlMCMwIQYIKwYB
|
||||
BQUHMAGGFWh0dHA6Ly8xMjcuMC4wLjE6NDAwMjAtBgNVHREEJjAkhxAAAAAAAAAA
|
||||
AAAAAAAAAAABhxCjvjLzIG7HXQlWDO6YWF7FMA0GCSqGSIb3DQEBCwUAA4IBAQBY
|
||||
M9UTZ3uaKMQ+He9kWR3p9jh6hTSD0FNi79ZdfkG0lgSzhhduhN7OhzQH2ihUUfa6
|
||||
rtKTw74fGbszhizCd9UB8YPKlm3si1Xbg6ZUQlA1RtoQo7RUGEa6ZbR68PKGm9Go
|
||||
hTTFIl/JS8jzxBR8jywZdyqtprUx+nnNUDiNk0hJtFLhw7OJH0AHlAUNqHsfD08m
|
||||
HXRdaV6q14HXU5g31slBat9H4D6tCU/2uqBURwW0wVdnqh4QeRfAeqiatJS9EmSF
|
||||
ctbc7n894Idy2Xce7NFoIy5cht3m6Rd42o/LmBsJopBmQcDPZT70/XzRtc2qE0cS
|
||||
CzBIGQHUJ6BfmBjrCQnp
|
||||
-----END CERTIFICATE-----
|
||||
16
acme/acme/_internal/tests/testdata/csr-ipsans.pem
vendored
Normal file
16
acme/acme/_internal/tests/testdata/csr-ipsans.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICbTCCAVUCAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKT/
|
||||
CE7Y5EYBvI4p7Frt763upIKHDHO/R5/TWMjG8Jm9qTMui8sbMgyh2Yh+lR/j/5Xd
|
||||
tQrhgC6wx10MrW2+3JtYS88HP1p6si8zU1dbK34n3NyyklR2RivW0R7dXgnYNy7t
|
||||
5YcDYLCrbRMIPINV/uHrmzIHWYUDNcZVdAfIM2AHfKYuV6Mepcn///5GR+l4GcAh
|
||||
Nkf9CW8OdAIuKdbyLCxVr0mUW/vJz1b12uxPsgUdax9sjXgZdT4pfMXADsFd1NeF
|
||||
atpsXU073inqtHru+2F9ijHTQ75TC+u/rr6eYl3BnBntac0gp/ADtDBii7/Q1JOO
|
||||
Bhq7xJNqqxIEdiyM7zcCAwEAAaAoMCYGCSqGSIb3DQEJDjEZMBcwFQYDVR0RBA4w
|
||||
DIcEwAACkYcEywBxATANBgkqhkiG9w0BAQsFAAOCAQEADG5g3zdbSCaXpZhWHkzE
|
||||
Mek3f442TUE1pB+ITRpthmM4N3zZWETYmbLCIAO624uMrRnbCCMvAoLs/L/9ETg/
|
||||
XMMFtonQC8u9i9tV8B1ceBh8lpIfa+8b9TMWH3bqnrbWQ+YIl+Yd0gXiCZWJ9vK4
|
||||
eM1Gddu/2bR6s/k4h/XAWRgEexqk57EHr1z0N+T9OoX939n3mVcNI+u9kfd5VJ0z
|
||||
VyA3R8WR6T6KlEl5P5pcWe5Kuyhi7xMmLVImXqBtvKq4O1AMfM+gQr/yn9aE8IRq
|
||||
khP7JrMBLUIub1c/qu2TfvnynNPSM/ZcOX+6PHdHmRkR3nI0Ndpv7Ntv31FTplAm
|
||||
Dw==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/acme/_internal/tests/testdata/csr-ipv6sans.pem
vendored
Normal file
16
acme/acme/_internal/tests/testdata/csr-ipv6sans.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIIChTCCAW0CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOIc
|
||||
UAppcqJfTkSqqOFqGt1v7lIJZPOcF4bcKI3d5cHAGbOuVxbC7uMaDuObwYLzoiED
|
||||
qnvs1NaEq2phO6KsgGESB7IE2LUjJivO7OnSZjNRpL5si/9egvBiNCn/50lULaWG
|
||||
gLEuyMfk3awZy2mVAymy7Grhbx069A4TH8TqsHuq2RpKyuDL27e/jUt6yYecb3pu
|
||||
hWMiWy3segif4tI46pkOW0/I6DpxyYD2OqOvzxm/voS9RMqE2+7YJA327H7bEi3N
|
||||
lJZEZ1zy7clZ9ga5fBQaetzbg2RyxTrZ7F919NQXSFoXgxb10Eg64wIpz0L3ooCm
|
||||
GEHehsZZexa3J5ccIvMCAwEAAaBAMD4GCSqGSIb3DQEJDjExMC8wLQYDVR0RBCYw
|
||||
JIcQAAAAAAAAAAAAAAAAAAAAAYcQo74y8yBux10JVgzumFhexTANBgkqhkiG9w0B
|
||||
AQsFAAOCAQEALvwVn0A/JPTCiNzcozHFnp5M23C9PXCplWc5u4k34d4XXzpSeFDz
|
||||
fL4gy7NpYIueme2K2ppw2j3PNQUdR6vQ5a75sriegWYrosL+7Q6Joh51ZyEUZQoD
|
||||
mNl4M4S4oX85EaChR6NFGBywTfjFarYi32XBTbFE7rK8N8KM+DQkNdwL1MXqaHWz
|
||||
F1obQKpNXlLedbCBOteV5Eg4zG3565zu/Gw/NhwzzV3mQmgxUcd1sMJxAfHQz4Vl
|
||||
ImLL+xMcR03nDsH2bgtDbK2tJm7WszSxA9tC+Xp2lRewxrnQloRWPYDz177WGQ5Q
|
||||
SoGDzTTtA6uWZxG8h7CkNLOGvA8LtU2rNA==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
16
acme/acme/_internal/tests/testdata/csr-mixed.pem
vendored
Normal file
16
acme/acme/_internal/tests/testdata/csr-mixed.pem
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICdjCCAV4CAQIwADCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMXq
|
||||
v1y8EIcCbaUIzCtOcLkLS0MJ35oS+6DmV5WB1A0cIk6YrjsHIsY2lwMm13BWIvmw
|
||||
tY+Y6n0rr7eViNx5ZRGHpHEI/TL3Neb+VefTydL5CgvK3dd4ex2kSbTaed3fmpOx
|
||||
qMajEduwNcZPCcmoEXPkfrCP8w2vKQUkQ+JRPcdX1nTuzticeRP5B7YCmJsmxkEh
|
||||
Y0tzzZ+NIRDARoYNofefY86h3e5q66gtJxccNchmIM3YQahhg5n3Xoo8hGfM/TIc
|
||||
R7ncCBCLO6vtqo0QFva/NQODrgOmOsmgvqPkUWQFdZfWM8yIaU826dktx0CPB78t
|
||||
TudnJ1rBRvGsjHMsZikCAwEAAaAxMC8GCSqGSIb3DQEJDjEiMCAwHgYDVR0RBBcw
|
||||
FYINYS5leGVtcGxlLmNvbYcEwAACbzANBgkqhkiG9w0BAQsFAAOCAQEAdGMcRCxq
|
||||
1X09gn1TNdMt64XUv+wdJCKDaJ+AgyIJj7QvVw8H5k7dOnxS4I+a/yo4jE+LDl2/
|
||||
AuHcBLFEI4ddewdJSMrTNZjuRYuOdr3KP7fL7MffICSBi45vw5EOXg0tnjJCEiKu
|
||||
6gcJgbLSP5JMMd7Haf33Q/VWsmHofR3VwOMdrnakwAU3Ff5WTuXTNVhL1kT/uLFX
|
||||
yW1ru6BF4unwNqSR2UeulljpNfRBsiN4zJK11W6n9KT0NkBr9zY5WCM4sW7i8k9V
|
||||
TeypWGo3jBKzYAGeuxZsB97U77jZ2lrGdBLZKfbcjnTeRVqCvCRrui4El7UGYFmj
|
||||
7s6OJyWx5DSV8w==
|
||||
-----END CERTIFICATE REQUEST-----
|
||||
6
acme/acme/_internal/tests/testdata/ec_secp384r1_key.pem
vendored
Normal file
6
acme/acme/_internal/tests/testdata/ec_secp384r1_key.pem
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDArTn0pbFk3xHfKeXte
|
||||
xJgS4JVdJQ8mqvezhaNpULZPnwb+mcKLlrj6f5SRM52yREGhZANiAAQcrMoPMVqV
|
||||
rHnDGGz5HUKLNmXfChlNgsrwsruawXF+M283CA6eckAjTXNyiC/ounWmvtoKsZG0
|
||||
2UQOfQUNSCANId/r986yRGc03W6RJSkcRp86qBYjNsLgbZpber/3+M4=
|
||||
-----END PRIVATE KEY-----
|
||||
16
acme/acme/_internal/tests/util_test.py
Normal file
16
acme/acme/_internal/tests/util_test.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Tests for acme.util."""
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def test_it():
|
||||
from acme.util import map_keys
|
||||
assert {'a': 'b', 'c': 'd'} == \
|
||||
map_keys({'a': 'b', 'c': 'd'}, lambda key: key)
|
||||
assert {2: 2, 4: 4} == map_keys({1: 2, 3: 4}, lambda x: x + 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(pytest.main(sys.argv[1:] + [__file__])) # pragma: no cover
|
||||
@@ -5,43 +5,56 @@ import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import SSL
|
||||
import requests
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin
|
||||
from acme.mixins import TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
GenericChallenge = TypeVar('GenericChallenge', bound='Challenge')
|
||||
|
||||
|
||||
class Challenge(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge."""
|
||||
TYPES: dict = {}
|
||||
TYPES: Dict[str, Type['Challenge']] = {}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls: Type[GenericChallenge],
|
||||
jobj: Mapping[str, Any]) -> Union[GenericChallenge, 'UnrecognizedChallenge']:
|
||||
try:
|
||||
return super(Challenge, cls).from_json(jobj)
|
||||
return cast(GenericChallenge, super().from_json(jobj))
|
||||
except jose.UnrecognizedTypeError as error:
|
||||
logger.debug(error)
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
|
||||
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES: dict = {}
|
||||
resource_type = 'challenge'
|
||||
resource = fields.Resource(resource_type)
|
||||
TYPES: Dict[str, Type['ChallengeResponse']] = {}
|
||||
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
# Removes the `type` field which is inserted by TypedJSONObjectWithFields.to_partial_json.
|
||||
# This field breaks RFC8555 compliance.
|
||||
jobj = super().to_partial_json()
|
||||
jobj.pop(self.type_field_name, None)
|
||||
return jobj
|
||||
|
||||
|
||||
class UnrecognizedChallenge(Challenge):
|
||||
@@ -56,16 +69,17 @@ class UnrecognizedChallenge(Challenge):
|
||||
:ivar jobj: Original JSON decoded object.
|
||||
|
||||
"""
|
||||
jobj: Dict[str, Any]
|
||||
|
||||
def __init__(self, jobj):
|
||||
super(UnrecognizedChallenge, self).__init__()
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
super().__init__()
|
||||
object.__setattr__(self, "jobj", jobj)
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
return self.jobj # pylint: disable=no-member
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: Mapping[str, Any]) -> 'UnrecognizedChallenge':
|
||||
return cls(jobj)
|
||||
|
||||
|
||||
@@ -79,13 +93,13 @@ class _TokenChallenge(Challenge):
|
||||
"""Minimum size of the :attr:`token` in bytes."""
|
||||
|
||||
# TODO: acme-spec doesn't specify token as base64-encoded value
|
||||
token = jose.Field(
|
||||
token: bytes = jose.field(
|
||||
"token", encoder=jose.encode_b64jose, decoder=functools.partial(
|
||||
jose.decode_b64jose, size=TOKEN_SIZE, minimum=True))
|
||||
|
||||
# XXX: rename to ~token_good_for_url
|
||||
@property
|
||||
def good_token(self): # XXX: @token.decoder
|
||||
def good_token(self) -> bool: # XXX: @token.decoder
|
||||
"""Is `token` good?
|
||||
|
||||
.. todo:: acme-spec wants "It MUST NOT contain any non-ASCII
|
||||
@@ -102,13 +116,13 @@ class _TokenChallenge(Challenge):
|
||||
class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
"""Response to Challenges based on Key Authorization.
|
||||
|
||||
:param unicode key_authorization:
|
||||
:param str key_authorization:
|
||||
|
||||
"""
|
||||
key_authorization = jose.Field("keyAuthorization")
|
||||
key_authorization: str = jose.field("keyAuthorization")
|
||||
thumbprint_hash_function = hashes.SHA256
|
||||
|
||||
def verify(self, chall, account_public_key):
|
||||
def verify(self, chall: 'KeyAuthorizationChallenge', account_public_key: jose.JWK) -> bool:
|
||||
"""Verify the key authorization.
|
||||
|
||||
:param KeyAuthorization chall: Challenge that corresponds to
|
||||
@@ -120,7 +134,7 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
parts = self.key_authorization.split('.')
|
||||
parts = self.key_authorization.split('.') # pylint: disable=no-member
|
||||
if len(parts) != 2:
|
||||
logger.debug("Key authorization (%r) is not well formed",
|
||||
self.key_authorization)
|
||||
@@ -140,12 +154,15 @@ class KeyAuthorizationChallengeResponse(ChallengeResponse):
|
||||
|
||||
return True
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(KeyAuthorizationChallengeResponse, self).to_partial_json()
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
jobj.pop('keyAuthorization', None)
|
||||
return jobj
|
||||
|
||||
|
||||
# TODO: Make this method a generic of K (bound=KeyAuthorizationChallenge), response_cls of type
|
||||
# Type[K] and use it in response/response_and_validation return types once Python 3.6 support is
|
||||
# dropped (do not support generic ABC classes, see https://github.com/python/typing/issues/449).
|
||||
class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
"""Challenge based on Key Authorization.
|
||||
|
||||
@@ -158,18 +175,18 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
thumbprint_hash_function = (
|
||||
KeyAuthorizationChallengeResponse.thumbprint_hash_function)
|
||||
|
||||
def key_authorization(self, account_key):
|
||||
def key_authorization(self, account_key: jose.JWK) -> str:
|
||||
"""Generate Key Authorization.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype unicode:
|
||||
:rtype str:
|
||||
|
||||
"""
|
||||
return self.encode("token") + "." + jose.b64encode(
|
||||
account_key.thumbprint(
|
||||
hash_function=self.thumbprint_hash_function)).decode()
|
||||
|
||||
def response(self, account_key):
|
||||
def response(self, account_key: jose.JWK) -> KeyAuthorizationChallengeResponse:
|
||||
"""Generate response to the challenge.
|
||||
|
||||
:param JWK account_key:
|
||||
@@ -182,7 +199,7 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
key_authorization=self.key_authorization(account_key))
|
||||
|
||||
@abc.abstractmethod
|
||||
def validation(self, account_key, **kwargs):
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Any:
|
||||
"""Generate validation for the challenge.
|
||||
|
||||
Subclasses must implement this method, but they are likely to
|
||||
@@ -196,7 +213,8 @@ class KeyAuthorizationChallenge(_TokenChallenge, metaclass=abc.ABCMeta):
|
||||
"""
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
def response_and_validation(self, account_key, *args, **kwargs):
|
||||
def response_and_validation(self, account_key: jose.JWK, *args: Any, **kwargs: Any
|
||||
) -> Tuple[KeyAuthorizationChallengeResponse, Any]:
|
||||
"""Generate response and validation.
|
||||
|
||||
Convenience function that return results of `response` and
|
||||
@@ -215,14 +233,14 @@ class DNS01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME dns-01 challenge response."""
|
||||
typ = "dns-01"
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key): # pylint: disable=unused-argument
|
||||
def simple_verify(self, chall: 'DNS01', domain: str, account_public_key: jose.JWK) -> bool: # pylint: disable=unused-argument
|
||||
"""Simple verify.
|
||||
|
||||
This method no longer checks DNS records and is a simple wrapper
|
||||
around `KeyAuthorizationChallengeResponse.verify`.
|
||||
|
||||
:param challenges.DNS01 chall: Corresponding challenge.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param str domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
|
||||
@@ -246,23 +264,24 @@ class DNS01(KeyAuthorizationChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: unicode
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return jose.b64encode(hashlib.sha256(self.key_authorization(
|
||||
account_key).encode("utf-8")).digest()).decode()
|
||||
|
||||
def validation_domain_name(self, name):
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param unicode name: Domain name being validated.
|
||||
:param str name: Domain name being validated.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
return f"{self.LABEL}.{name}"
|
||||
|
||||
|
||||
@ChallengeResponse.register
|
||||
@@ -281,14 +300,16 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
WHITESPACE_CUTSET = "\n\r\t "
|
||||
"""Whitespace characters which should be ignored at the end of the body."""
|
||||
|
||||
def simple_verify(self, chall, domain, account_public_key, port=None):
|
||||
def simple_verify(self, chall: 'HTTP01', domain: str, account_public_key: jose.JWK,
|
||||
port: Optional[int] = None, timeout: int = 30) -> bool:
|
||||
"""Simple verify.
|
||||
|
||||
:param challenges.SimpleHTTP chall: Corresponding challenge.
|
||||
:param unicode domain: Domain name being verified.
|
||||
:param str domain: Domain name being verified.
|
||||
:param JWK account_public_key: Public key for the key pair
|
||||
being authorized.
|
||||
:param int port: Port used in the validation.
|
||||
:param int timeout: Timeout in seconds.
|
||||
|
||||
:returns: ``True`` iff validation with the files currently served by the
|
||||
HTTP server is successful.
|
||||
@@ -310,10 +331,19 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
uri = chall.uri(domain)
|
||||
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
||||
try:
|
||||
http_response = requests.get(uri, verify=False)
|
||||
http_response = requests.get(uri, verify=False, timeout=timeout)
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error("Unable to reach %s: %s", uri, error)
|
||||
return False
|
||||
# By default, http_response.text will try to guess the encoding to use
|
||||
# when decoding the response to Python unicode strings. This guesswork
|
||||
# is error prone. RFC 8555 specifies that HTTP-01 responses should be
|
||||
# key authorizations with possible trailing whitespace. Since key
|
||||
# authorizations must be composed entirely of the base64url alphabet
|
||||
# plus ".", we tell requests that the response should be ASCII. See
|
||||
# https://datatracker.ietf.org/doc/html/rfc8555#section-8.3 for more
|
||||
# info.
|
||||
http_response.encoding = "ascii"
|
||||
logger.debug("Received %s: %s. Headers: %s", http_response,
|
||||
http_response.text, http_response.headers)
|
||||
|
||||
@@ -337,31 +367,31 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
"""URI root path for the server provisioned resource."""
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
def path(self) -> str:
|
||||
"""Path (starting with '/') for provisioned resource.
|
||||
|
||||
:rtype: string
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return '/' + self.URI_ROOT_PATH + '/' + self.encode('token')
|
||||
|
||||
def uri(self, domain):
|
||||
def uri(self, domain: str) -> str:
|
||||
"""Create an URI to the provisioned resource.
|
||||
|
||||
Forms an URI to the HTTPS server provisioned resource
|
||||
(containing :attr:`~SimpleHTTP.token`).
|
||||
|
||||
:param unicode domain: Domain name being verified.
|
||||
:rtype: string
|
||||
:param str domain: Domain name being verified.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return "http://" + domain + self.path
|
||||
|
||||
def validation(self, account_key, **unused_kwargs):
|
||||
def validation(self, account_key: jose.JWK, **unused_kwargs: Any) -> str:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:rtype: unicode
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return self.key_authorization(account_key)
|
||||
@@ -381,17 +411,18 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""
|
||||
|
||||
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
|
||||
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
@property
|
||||
def h(self):
|
||||
def h(self) -> bytes:
|
||||
"""Hash value stored in challenge certificate"""
|
||||
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||
|
||||
def gen_cert(self, domain, key=None, bits=2048):
|
||||
def gen_cert(self, domain: str, key: Optional[crypto.PKey] = None, bits: int = 2048
|
||||
) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate tls-alpn-01 certificate.
|
||||
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||
certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
@@ -404,19 +435,19 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, bits)
|
||||
|
||||
|
||||
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||
critical=True, value=der_value)
|
||||
critical=True, value=der_value)
|
||||
|
||||
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||
extensions=[acme_extension]), key
|
||||
extensions=[acme_extension]), key
|
||||
|
||||
def probe_cert(self, domain, host=None, port=None):
|
||||
def probe_cert(self, domain: str, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> crypto.X509:
|
||||
"""Probe tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: domain being validated, required.
|
||||
:param string host: IP address used to probe the certificate.
|
||||
:param str domain: domain being validated, required.
|
||||
:param str host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
"""
|
||||
@@ -426,13 +457,13 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
if port is None:
|
||||
port = self.PORT
|
||||
|
||||
return crypto_util.probe_sni(host=host, port=port, name=domain,
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
return crypto_util.probe_sni(host=host.encode(), port=port, name=domain.encode(),
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
|
||||
def verify_cert(self, domain, cert):
|
||||
def verify_cert(self, domain: str, cert: crypto.X509) -> bool:
|
||||
"""Verify tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: Domain name being validated.
|
||||
:param str domain: Domain name being validated.
|
||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||
|
||||
:returns: Whether the certificate was successfully verified.
|
||||
@@ -441,7 +472,10 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
|
||||
# Type ignore needed due to
|
||||
# https://github.com/pyca/pyopenssl/issues/730.
|
||||
logger.debug('Certificate %s. SANs: %s',
|
||||
cert.digest('sha256'), names)
|
||||
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||
return False
|
||||
|
||||
@@ -456,8 +490,9 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
return False
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def simple_verify(self, chall, domain, account_public_key,
|
||||
cert=None, host=None, port=None):
|
||||
def simple_verify(self, chall: 'TLSALPN01', domain: str, account_public_key: jose.JWK,
|
||||
cert: Optional[crypto.X509] = None, host: Optional[str] = None,
|
||||
port: Optional[int] = None) -> bool:
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
@@ -497,11 +532,11 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
response_cls = TLSALPN01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
def validation(self, account_key: jose.JWK, **kwargs: Any) -> Tuple[crypto.X509, crypto.PKey]:
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param str domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||
in certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
@@ -509,12 +544,13 @@ class TLSALPN01(KeyAuthorizationChallenge):
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
return self.response(account_key).gen_cert(
|
||||
# TODO: Remove cast when response() is generic.
|
||||
return cast(TLSALPN01Response, self.response(account_key)).gen_cert(
|
||||
key=kwargs.get('cert_key'),
|
||||
domain=kwargs.get('domain'))
|
||||
domain=cast(str, kwargs.get('domain')))
|
||||
|
||||
@staticmethod
|
||||
def is_supported():
|
||||
def is_supported() -> bool:
|
||||
"""
|
||||
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||
@@ -536,7 +572,8 @@ class DNS(_TokenChallenge):
|
||||
LABEL = "_acme-challenge"
|
||||
"""Label clients prepend to the domain name being validated."""
|
||||
|
||||
def gen_validation(self, account_key, alg=jose.RS256, **kwargs):
|
||||
def gen_validation(self, account_key: jose.JWK, alg: jose.JWASignature = jose.RS256,
|
||||
**kwargs: Any) -> jose.JWS:
|
||||
"""Generate validation.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -550,7 +587,7 @@ class DNS(_TokenChallenge):
|
||||
payload=self.json_dumps(sort_keys=True).encode('utf-8'),
|
||||
key=account_key, alg=alg, **kwargs)
|
||||
|
||||
def check_validation(self, validation, account_public_key):
|
||||
def check_validation(self, validation: jose.JWS, account_public_key: jose.JWK) -> bool:
|
||||
"""Check validation.
|
||||
|
||||
:param JWS validation:
|
||||
@@ -567,7 +604,7 @@ class DNS(_TokenChallenge):
|
||||
logger.debug("Checking validation for DNS failed: %s", error)
|
||||
return False
|
||||
|
||||
def gen_response(self, account_key, **kwargs):
|
||||
def gen_response(self, account_key: jose.JWK, **kwargs: Any) -> 'DNSResponse':
|
||||
"""Generate response.
|
||||
|
||||
:param .JWK account_key: Private account key.
|
||||
@@ -576,13 +613,12 @@ class DNS(_TokenChallenge):
|
||||
:rtype: DNSResponse
|
||||
|
||||
"""
|
||||
return DNSResponse(validation=self.gen_validation(
|
||||
account_key, **kwargs))
|
||||
return DNSResponse(validation=self.gen_validation(account_key, **kwargs))
|
||||
|
||||
def validation_domain_name(self, name):
|
||||
def validation_domain_name(self, name: str) -> str:
|
||||
"""Domain name for TXT validation record.
|
||||
|
||||
:param unicode name: Domain name being validated.
|
||||
:param str name: Domain name being validated.
|
||||
|
||||
"""
|
||||
return "{0}.{1}".format(self.LABEL, name)
|
||||
@@ -597,9 +633,9 @@ class DNSResponse(ChallengeResponse):
|
||||
"""
|
||||
typ = "dns"
|
||||
|
||||
validation = jose.Field("validation", decoder=jose.JWS.from_json)
|
||||
validation: jose.JWS = jose.field("validation", decoder=jose.JWS.from_json)
|
||||
|
||||
def check_validation(self, chall, account_public_key):
|
||||
def check_validation(self, chall: 'DNS', account_public_key: jose.JWK) -> bool:
|
||||
"""Check validation.
|
||||
|
||||
:param challenges.DNS chall:
|
||||
|
||||
1180
acme/acme/client.py
1180
acme/acme/client.py
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,24 @@
|
||||
"""Crypto utilities."""
|
||||
import binascii
|
||||
import contextlib
|
||||
import ipaddress
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
import josepy as jose
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import SSL
|
||||
|
||||
from acme import errors
|
||||
|
||||
@@ -24,16 +31,18 @@ logger = logging.getLogger(__name__)
|
||||
# https://www.openssl.org/docs/ssl/SSLv23_method.html). _serve_sni
|
||||
# should be changed to use "set_options" to disable SSLv2 and SSLv3,
|
||||
# in case it's used for things other than probing/serving!
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD
|
||||
|
||||
|
||||
class _DefaultCertSelection:
|
||||
def __init__(self, certs):
|
||||
def __init__(self, certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]):
|
||||
self.certs = certs
|
||||
|
||||
def __call__(self, connection):
|
||||
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
if server_name:
|
||||
return self.certs.get(server_name, None)
|
||||
return None # pragma: no cover
|
||||
|
||||
|
||||
class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
@@ -49,9 +58,14 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
`certs` parameter would be ignored, and therefore must be empty.
|
||||
|
||||
"""
|
||||
def __init__(self, sock, certs=None,
|
||||
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
|
||||
cert_selection=None):
|
||||
def __init__(self, sock: socket.socket,
|
||||
certs: Optional[Mapping[bytes, Tuple[crypto.PKey, crypto.X509]]] = None,
|
||||
method: int = _DEFAULT_SSL_METHOD,
|
||||
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
|
||||
cert_selection: Optional[Callable[[SSL.Connection],
|
||||
Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]]] = None
|
||||
) -> None:
|
||||
self.sock = sock
|
||||
self.alpn_selection = alpn_selection
|
||||
self.method = method
|
||||
@@ -59,14 +73,18 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
raise ValueError("Neither cert_selection or certs specified.")
|
||||
if cert_selection and certs:
|
||||
raise ValueError("Both cert_selection and certs specified.")
|
||||
if cert_selection is None:
|
||||
cert_selection = _DefaultCertSelection(certs)
|
||||
self.cert_selection = cert_selection
|
||||
actual_cert_selection: Union[_DefaultCertSelection,
|
||||
Optional[Callable[[SSL.Connection],
|
||||
Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]]]] = cert_selection
|
||||
if actual_cert_selection is None:
|
||||
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
|
||||
self.cert_selection = actual_cert_selection
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self.sock, name)
|
||||
|
||||
def _pick_certificate_cb(self, connection):
|
||||
def _pick_certificate_cb(self, connection: SSL.Connection) -> None:
|
||||
"""SNI certificate callback.
|
||||
|
||||
This method will set a new OpenSSL context object for this
|
||||
@@ -98,43 +116,58 @@ class SSLSocket: # pylint: disable=too-few-public-methods
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
def __init__(self, connection):
|
||||
def __init__(self, connection: SSL.Connection) -> None:
|
||||
self._wrapped = connection
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self._wrapped, name)
|
||||
|
||||
def shutdown(self, *unused_args):
|
||||
def shutdown(self, *unused_args: Any) -> bool:
|
||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||
return self._wrapped.shutdown()
|
||||
try:
|
||||
return self._wrapped.shutdown()
|
||||
except SSL.Error as error:
|
||||
# We wrap the error so we raise the same error type as sockets
|
||||
# in the standard library. This is useful when this object is
|
||||
# used by code which expects a standard socket such as
|
||||
# socketserver in the standard library.
|
||||
raise socket.error(error)
|
||||
|
||||
def accept(self): # pylint: disable=missing-function-docstring
|
||||
def accept(self) -> Tuple[FakeConnection, Any]: # pylint: disable=missing-function-docstring
|
||||
sock, addr = self.sock.accept()
|
||||
|
||||
context = SSL.Context(self.method)
|
||||
context.set_options(SSL.OP_NO_SSLv2)
|
||||
context.set_options(SSL.OP_NO_SSLv3)
|
||||
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
||||
if self.alpn_selection is not None:
|
||||
context.set_alpn_select_callback(self.alpn_selection)
|
||||
|
||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||
ssl_sock.set_accept_state()
|
||||
|
||||
logger.debug("Performing handshake with %s", addr)
|
||||
try:
|
||||
ssl_sock.do_handshake()
|
||||
except SSL.Error as error:
|
||||
# _pick_certificate_cb might have returned without
|
||||
# creating SSL context (wrong server name)
|
||||
raise socket.error(error)
|
||||
context = SSL.Context(self.method)
|
||||
context.set_options(SSL.OP_NO_SSLv2)
|
||||
context.set_options(SSL.OP_NO_SSLv3)
|
||||
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
||||
if self.alpn_selection is not None:
|
||||
context.set_alpn_select_callback(self.alpn_selection)
|
||||
|
||||
return ssl_sock, addr
|
||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||
ssl_sock.set_accept_state()
|
||||
|
||||
# This log line is especially desirable because without it requests to
|
||||
# our standalone TLSALPN server would not be logged.
|
||||
logger.debug("Performing handshake with %s", addr)
|
||||
try:
|
||||
ssl_sock.do_handshake()
|
||||
except SSL.Error as error:
|
||||
# _pick_certificate_cb might have returned without
|
||||
# creating SSL context (wrong server name)
|
||||
raise socket.error(error)
|
||||
|
||||
return ssl_sock, addr
|
||||
except:
|
||||
# If we encounter any error, close the new socket before reraising
|
||||
# the exception.
|
||||
sock.close()
|
||||
raise
|
||||
|
||||
|
||||
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
|
||||
alpn_protocols=None):
|
||||
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
|
||||
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
|
||||
alpn_protocols: Optional[Sequence[bytes]] = None) -> crypto.X509:
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -147,7 +180,7 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
of source interface). See `socket.creation_connection` for more
|
||||
info. Available only in Python 2.7+.
|
||||
:param alpn_protocols: Protocols to request using ALPN.
|
||||
:type alpn_protocols: `list` of `bytes`
|
||||
:type alpn_protocols: `Sequence` of `bytes`
|
||||
|
||||
:raises acme.errors.Error: In case of any problems.
|
||||
|
||||
@@ -168,8 +201,8 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
source_address[1]
|
||||
) if any(source_address) else ""
|
||||
)
|
||||
socket_tuple: Tuple[str, int] = (host, port)
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore
|
||||
socket_tuple: Tuple[bytes, int] = (host, port)
|
||||
sock = socket.create_connection(socket_tuple, **socket_kwargs) # type: ignore[arg-type]
|
||||
except socket.error as error:
|
||||
raise errors.Error(error)
|
||||
|
||||
@@ -184,26 +217,50 @@ def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-argu
|
||||
client_ssl.shutdown()
|
||||
except SSL.Error as error:
|
||||
raise errors.Error(error)
|
||||
return client_ssl.get_peer_certificate()
|
||||
cert = client_ssl.get_peer_certificate()
|
||||
assert cert # Appease mypy. We would have crashed out by now if there was no certificate.
|
||||
return cert
|
||||
|
||||
|
||||
def make_csr(private_key_pem, domains, must_staple=False):
|
||||
"""Generate a CSR containing a list of domains as subjectAltNames.
|
||||
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,
|
||||
must_staple: bool = False,
|
||||
ipaddrs: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> bytes:
|
||||
"""Generate a CSR containing domains or IPs as subjectAltNames.
|
||||
|
||||
:param buffer private_key_pem: Private key, in PEM PKCS#8 format.
|
||||
:param list domains: List of DNS names to include in subjectAltNames of CSR.
|
||||
:param bool must_staple: Whether to include the TLS Feature extension (aka
|
||||
OCSP Must Staple: https://tools.ietf.org/html/rfc7633).
|
||||
:param list ipaddrs: List of IPaddress(type ipaddress.IPv4Address or ipaddress.IPv6Address)
|
||||
names to include in subbjectAltNames of CSR.
|
||||
params ordered this way for backward competablity when called by positional argument.
|
||||
:returns: buffer PEM-encoded Certificate Signing Request.
|
||||
"""
|
||||
private_key = crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM, private_key_pem)
|
||||
csr = crypto.X509Req()
|
||||
sanlist = []
|
||||
# if domain or ip list not supplied make it empty list so it's easier to iterate
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ipaddrs is None:
|
||||
ipaddrs = []
|
||||
if len(domains)+len(ipaddrs) == 0:
|
||||
raise ValueError("At least one of domains or ipaddrs parameter need to be not empty")
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ips in ipaddrs:
|
||||
sanlist.append('IP:' + ips.exploded)
|
||||
# make sure its ascii encoded
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
# for IP san it's actually need to be octet-string,
|
||||
# but somewhere downsteam thankfully handle it for us
|
||||
extensions = [
|
||||
crypto.X509Extension(
|
||||
b'subjectAltName',
|
||||
critical=False,
|
||||
value=', '.join('DNS:' + d for d in domains).encode('ascii')
|
||||
value=san_string
|
||||
),
|
||||
]
|
||||
if must_staple:
|
||||
@@ -213,13 +270,16 @@ def make_csr(private_key_pem, domains, must_staple=False):
|
||||
value=b"DER:30:03:02:01:05"))
|
||||
csr.add_extensions(extensions)
|
||||
csr.set_pubkey(private_key)
|
||||
csr.set_version(2)
|
||||
# RFC 2986 Section 4.1 only defines version 0
|
||||
csr.set_version(0)
|
||||
csr.sign(private_key, 'sha256')
|
||||
return crypto.dump_certificate_request(
|
||||
crypto.FILETYPE_PEM, csr)
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req: Union[crypto.X509, crypto.X509Req]
|
||||
) -> List[str]:
|
||||
# unlike its name this only outputs DNS names, other type of idents will ignored
|
||||
common_name = loaded_cert_or_req.get_subject().CN
|
||||
sans = _pyopenssl_cert_or_req_san(loaded_cert_or_req)
|
||||
|
||||
@@ -228,7 +288,7 @@ def _pyopenssl_cert_or_req_all_names(loaded_cert_or_req):
|
||||
return [common_name] + [d for d in sans if d != common_name]
|
||||
|
||||
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
def _pyopenssl_cert_or_req_san(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names from certificate or CSR using pyOpenSSL.
|
||||
|
||||
.. todo:: Implement directly in PyOpenSSL!
|
||||
@@ -239,47 +299,87 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names.
|
||||
:rtype: `list` of `unicode`
|
||||
:returns: A list of Subject Alternative Names that is DNS.
|
||||
:rtype: `list` of `str`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to support PyOpenSSL version 0.13 where the
|
||||
# `_subjectAltNameString` and `get_extensions` methods are not available
|
||||
# for CSRs.
|
||||
# This function finds SANs with dns name
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
parts_separator = ", "
|
||||
prefix = "DNS" + part_separator
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
func: Union[Callable[[int, crypto.X509Req], bytes], Callable[[int, crypto.X509], bytes]] = crypto.dump_certificate
|
||||
else:
|
||||
func = crypto.dump_certificate_request
|
||||
text = func(crypto.FILETYPE_TEXT, cert_or_req).decode("utf-8")
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
match = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if match is None else match.group(1).split(parts_separator)
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part.split(part_separator)[1]
|
||||
for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def gen_ss_cert(key, domains, not_before=None,
|
||||
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
|
||||
def _pyopenssl_cert_or_req_san_ip(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get Subject Alternative Names IPs from certificate or CSR using pyOpenSSL.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: A list of Subject Alternative Names that are IP Addresses.
|
||||
:rtype: `list` of `str`. note that this returns as string, not IPaddress object
|
||||
|
||||
"""
|
||||
|
||||
# constants based on PyOpenSSL certificate/CSR text dump
|
||||
part_separator = ":"
|
||||
prefix = "IP Address" + part_separator
|
||||
|
||||
sans_parts = _pyopenssl_extract_san_list_raw(cert_or_req)
|
||||
|
||||
return [part[len(prefix):] for part in sans_parts if part.startswith(prefix)]
|
||||
|
||||
|
||||
def _pyopenssl_extract_san_list_raw(cert_or_req: Union[crypto.X509, crypto.X509Req]) -> List[str]:
|
||||
"""Get raw SAN string from cert or csr, parse it as UTF-8 and return.
|
||||
|
||||
:param cert_or_req: Certificate or CSR.
|
||||
:type cert_or_req: `OpenSSL.crypto.X509` or `OpenSSL.crypto.X509Req`.
|
||||
|
||||
:returns: raw san strings, parsed byte as utf-8
|
||||
:rtype: `list` of `str`
|
||||
|
||||
"""
|
||||
# This function finds SANs by dumping the certificate/CSR to text and
|
||||
# searching for "X509v3 Subject Alternative Name" in the text. This method
|
||||
# is used to because in PyOpenSSL version <0.17 `_subjectAltNameString` methods are
|
||||
# not able to Parse IP Addresses in subjectAltName string.
|
||||
|
||||
if isinstance(cert_or_req, crypto.X509):
|
||||
# pylint: disable=line-too-long
|
||||
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
else:
|
||||
text = crypto.dump_certificate_request(crypto.FILETYPE_TEXT, cert_or_req).decode('utf-8')
|
||||
# WARNING: this function does not support multiple SANs extensions.
|
||||
# Multiple X509v3 extensions of the same type is disallowed by RFC 5280.
|
||||
raw_san = re.search(r"X509v3 Subject Alternative Name:(?: critical)?\s*(.*)", text)
|
||||
|
||||
parts_separator = ", "
|
||||
# WARNING: this function assumes that no SAN can include
|
||||
# parts_separator, hence the split!
|
||||
sans_parts = [] if raw_san is None else raw_san.group(1).split(parts_separator)
|
||||
return sans_parts
|
||||
|
||||
|
||||
def gen_ss_cert(key: crypto.PKey, domains: Optional[List[str]] = None,
|
||||
not_before: Optional[int] = None,
|
||||
validity: int = (7 * 24 * 60 * 60), force_san: bool = True,
|
||||
extensions: Optional[List[crypto.X509Extension]] = None,
|
||||
ips: Optional[List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]] = None
|
||||
) -> crypto.X509:
|
||||
"""Generate new self-signed certificate.
|
||||
|
||||
:type domains: `list` of `unicode`
|
||||
:type domains: `list` of `str`
|
||||
:param OpenSSL.crypto.PKey key:
|
||||
:param bool force_san:
|
||||
:param extensions: List of additional extensions to include in the cert.
|
||||
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||
:type ips: `list` of (`ipaddress.IPv4Address` or `ipaddress.IPv6Address`)
|
||||
|
||||
If more than one domain is provided, all of the domains are put into
|
||||
``subjectAltName`` X.509 extension and first domain is set as the
|
||||
@@ -287,28 +387,39 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
extension is used, unless `force_san` is ``True``.
|
||||
|
||||
"""
|
||||
assert domains, "Must provide one or more hostnames for the cert."
|
||||
assert domains or ips, "Must provide one or more hostnames or IPs for the cert."
|
||||
|
||||
cert = crypto.X509()
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
|
||||
if domains is None:
|
||||
domains = []
|
||||
if ips is None:
|
||||
ips = []
|
||||
extensions.append(
|
||||
crypto.X509Extension(
|
||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||
)
|
||||
|
||||
cert.get_subject().CN = domains[0]
|
||||
if len(domains) > 0:
|
||||
cert.get_subject().CN = domains[0]
|
||||
# TODO: what to put into cert.get_subject()?
|
||||
cert.set_issuer(cert.get_subject())
|
||||
|
||||
if force_san or len(domains) > 1:
|
||||
sanlist = []
|
||||
for address in domains:
|
||||
sanlist.append('DNS:' + address)
|
||||
for ip in ips:
|
||||
sanlist.append('IP:' + ip.exploded)
|
||||
san_string = ', '.join(sanlist).encode('ascii')
|
||||
if force_san or len(domains) > 1 or len(ips) > 0:
|
||||
extensions.append(crypto.X509Extension(
|
||||
b"subjectAltName",
|
||||
critical=False,
|
||||
value=b", ".join(b"DNS:" + d.encode() for d in domains)
|
||||
value=san_string
|
||||
))
|
||||
|
||||
cert.add_extensions(extensions)
|
||||
@@ -321,7 +432,8 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
return cert
|
||||
|
||||
|
||||
def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
def dump_pyopenssl_chain(chain: Union[List[jose.ComparableX509], List[crypto.X509]],
|
||||
filetype: int = crypto.FILETYPE_PEM) -> bytes:
|
||||
"""Dump certificate chain into a bundle.
|
||||
|
||||
:param list chain: List of `OpenSSL.crypto.X509` (or wrapped in
|
||||
@@ -334,8 +446,10 @@ def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
# XXX: returns empty string when no chain is available, which
|
||||
# shuts up RenewableCert, but might not be the best solution...
|
||||
|
||||
def _dump_cert(cert):
|
||||
def _dump_cert(cert: Union[jose.ComparableX509, crypto.X509]) -> bytes:
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
if isinstance(cert.wrapped, crypto.X509Req):
|
||||
raise errors.Error("Unexpected CSR provided.") # pragma: no cover
|
||||
cert = cert.wrapped
|
||||
return crypto.dump_certificate(filetype, cert)
|
||||
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
"""ACME errors."""
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Set
|
||||
|
||||
from josepy import errors as jose_errors
|
||||
import requests
|
||||
|
||||
# We import acme.messages only during type check to avoid circular dependencies. Type references
|
||||
# to acme.message.* must be quoted to be lazily initialized and avoid compilation errors.
|
||||
if typing.TYPE_CHECKING:
|
||||
from acme import messages # pragma: no cover
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
@@ -28,12 +40,12 @@ class NonceError(ClientError):
|
||||
|
||||
class BadNonce(NonceError):
|
||||
"""Bad nonce error."""
|
||||
def __init__(self, nonce, error, *args):
|
||||
super(BadNonce, self).__init__(*args)
|
||||
def __init__(self, nonce: str, error: Exception, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.nonce = nonce
|
||||
self.error = error
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
|
||||
|
||||
|
||||
@@ -47,11 +59,11 @@ class MissingNonce(NonceError):
|
||||
:ivar requests.Response ~.response: HTTP Response
|
||||
|
||||
"""
|
||||
def __init__(self, response, *args):
|
||||
super(MissingNonce, self).__init__(*args)
|
||||
def __init__(self, response: requests.Response, *args: Any) -> None:
|
||||
super().__init__(*args)
|
||||
self.response = response
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
return ('Server {0} response did not include a replay '
|
||||
'nonce, headers: {1} (This may be a service outage)'.format(
|
||||
self.response.request.method, self.response.headers))
|
||||
@@ -69,17 +81,20 @@ class PollError(ClientError):
|
||||
to the most recently updated one
|
||||
|
||||
"""
|
||||
def __init__(self, exhausted, updated):
|
||||
def __init__(self, exhausted: Set['messages.AuthorizationResource'],
|
||||
updated: Mapping['messages.AuthorizationResource',
|
||||
'messages.AuthorizationResource']
|
||||
) -> None:
|
||||
self.exhausted = exhausted
|
||||
self.updated = updated
|
||||
super(PollError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
def timeout(self) -> bool:
|
||||
"""Was the error caused by timeout?"""
|
||||
return bool(self.exhausted)
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return '{0}(exhausted={1!r}, updated={2!r})'.format(
|
||||
self.__class__.__name__, self.exhausted, self.updated)
|
||||
|
||||
@@ -88,9 +103,9 @@ class ValidationError(Error):
|
||||
"""Error for authorization failures. Contains a list of authorization
|
||||
resources, each of which is invalid and should have an error field.
|
||||
"""
|
||||
def __init__(self, failed_authzrs):
|
||||
def __init__(self, failed_authzrs: List['messages.AuthorizationResource']) -> None:
|
||||
self.failed_authzrs = failed_authzrs
|
||||
super(ValidationError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
@@ -100,13 +115,13 @@ class TimeoutError(Error): # pylint: disable=redefined-builtin
|
||||
class IssuanceError(Error):
|
||||
"""Error sent by the server after requesting issuance of a certificate."""
|
||||
|
||||
def __init__(self, error):
|
||||
def __init__(self, error: 'messages.Error') -> None:
|
||||
"""Initialize.
|
||||
|
||||
:param messages.Error error: The error provided by the server.
|
||||
"""
|
||||
self.error = error
|
||||
super(IssuanceError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class ConflictError(ClientError):
|
||||
@@ -117,9 +132,9 @@ class ConflictError(ClientError):
|
||||
|
||||
Also used in V2 of the ACME client for the same purpose.
|
||||
"""
|
||||
def __init__(self, location):
|
||||
def __init__(self, location: str) -> None:
|
||||
self.location = location
|
||||
super(ConflictError, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class WildcardUnsupportedError(Error):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""ACME JSON fields."""
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import josepy as jose
|
||||
import pyrfc3339
|
||||
@@ -10,17 +12,17 @@ logger = logging.getLogger(__name__)
|
||||
class Fixed(jose.Field):
|
||||
"""Fixed field."""
|
||||
|
||||
def __init__(self, json_name, value):
|
||||
def __init__(self, json_name: str, value: Any) -> None:
|
||||
self.value = value
|
||||
super(Fixed, self).__init__(
|
||||
super().__init__(
|
||||
json_name=json_name, default=value, omitempty=False)
|
||||
|
||||
def decode(self, value):
|
||||
def decode(self, value: Any) -> Any:
|
||||
if value != self.value:
|
||||
raise jose.DeserializationError('Expected {0!r}'.format(self.value))
|
||||
return self.value
|
||||
|
||||
def encode(self, value):
|
||||
def encode(self, value: Any) -> Any:
|
||||
if value != self.value:
|
||||
logger.warning(
|
||||
'Overriding fixed field (%s) with %r', self.json_name, value)
|
||||
@@ -32,33 +34,27 @@ class RFC3339Field(jose.Field):
|
||||
|
||||
Handles decoding/encoding between RFC3339 strings and aware (not
|
||||
naive) `datetime.datetime` objects
|
||||
(e.g. ``datetime.datetime.now(pytz.utc)``).
|
||||
(e.g. ``datetime.datetime.now(pytz.UTC)``).
|
||||
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def default_encoder(cls, value):
|
||||
def default_encoder(cls, value: datetime.datetime) -> str:
|
||||
return pyrfc3339.generate(value)
|
||||
|
||||
@classmethod
|
||||
def default_decoder(cls, value):
|
||||
def default_decoder(cls, value: str) -> datetime.datetime:
|
||||
try:
|
||||
return pyrfc3339.parse(value)
|
||||
except ValueError as error:
|
||||
raise jose.DeserializationError(error)
|
||||
|
||||
|
||||
class Resource(jose.Field):
|
||||
"""Resource MITM field."""
|
||||
def fixed(json_name: str, value: Any) -> Any:
|
||||
"""Generates a type-friendly Fixed field."""
|
||||
return Fixed(json_name, value)
|
||||
|
||||
def __init__(self, resource_type, *args, **kwargs):
|
||||
self.resource_type = resource_type
|
||||
super(Resource, self).__init__(
|
||||
'resource', default=resource_type, *args, **kwargs)
|
||||
|
||||
def decode(self, value):
|
||||
if value != self.resource_type:
|
||||
raise jose.DeserializationError(
|
||||
'Wrong resource type: {0} instead of {1}'.format(
|
||||
value, self.resource_type))
|
||||
return value
|
||||
def rfc3339(json_name: str, omitempty: bool = False) -> Any:
|
||||
"""Generates a type-friendly RFC3339 field."""
|
||||
return RFC3339Field(json_name, omitempty=omitempty)
|
||||
|
||||
@@ -4,20 +4,22 @@ The JWS implementation in josepy only implements the base JOSE standard. In
|
||||
order to support the new header fields defined in ACME, this module defines some
|
||||
ACME-specific classes that layer on top of josepy.
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import josepy as jose
|
||||
|
||||
|
||||
class Header(jose.Header):
|
||||
"""ACME-specific JOSE Header. Implements nonce, kid, and url.
|
||||
"""
|
||||
nonce = jose.Field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid = jose.Field('kid', omitempty=True)
|
||||
url = jose.Field('url', omitempty=True)
|
||||
nonce: Optional[bytes] = jose.field('nonce', omitempty=True, encoder=jose.encode_b64jose)
|
||||
kid: Optional[str] = jose.field('kid', omitempty=True)
|
||||
url: Optional[str] = jose.field('url', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that nonce is redefined. Let's ignore the type check here.
|
||||
@nonce.decoder # type: ignore
|
||||
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
@nonce.decoder # type: ignore[no-redef,union-attr]
|
||||
def nonce(value: str) -> bytes: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
except jose.DeserializationError as error:
|
||||
@@ -27,12 +29,12 @@ class Header(jose.Header):
|
||||
|
||||
class Signature(jose.Signature):
|
||||
"""ACME-specific Signature. Uses ACME-specific Header for customer fields."""
|
||||
__slots__ = jose.Signature._orig_slots # pylint: disable=no-member
|
||||
__slots__ = jose.Signature._orig_slots # pylint: disable=protected-access,no-member
|
||||
|
||||
# TODO: decoder/encoder should accept cls? Otherwise, subclassing
|
||||
# JSONObjectWithFields is tricky...
|
||||
header_cls = Header
|
||||
header = jose.Field(
|
||||
header: Header = jose.field(
|
||||
'header', omitempty=True, default=header_cls(),
|
||||
decoder=header_cls.from_json)
|
||||
|
||||
@@ -42,15 +44,16 @@ class Signature(jose.Signature):
|
||||
class JWS(jose.JWS):
|
||||
"""ACME-specific JWS. Includes none, url, and kid in protected header."""
|
||||
signature_cls = Signature
|
||||
__slots__ = jose.JWS._orig_slots
|
||||
__slots__ = jose.JWS._orig_slots # pylint: disable=protected-access
|
||||
|
||||
@classmethod
|
||||
# pylint: disable=arguments-differ
|
||||
def sign(cls, payload, key, alg, nonce, url=None, kid=None):
|
||||
# type: ignore[override] # pylint: disable=arguments-differ
|
||||
def sign(cls, payload: bytes, key: jose.JWK, alg: jose.JWASignature, nonce: Optional[bytes],
|
||||
url: Optional[str] = None, kid: Optional[str] = None) -> jose.JWS:
|
||||
# Per ACME spec, jwk and kid are mutually exclusive, so only include a
|
||||
# jwk field if kid is not provided.
|
||||
include_jwk = kid is None
|
||||
return super(JWS, cls).sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
return super().sign(payload, key=key, alg=alg,
|
||||
protect=frozenset(['nonce', 'url', 'kid', 'jwk', 'alg']),
|
||||
nonce=nonce, url=url, kid=kid,
|
||||
include_jwk=include_jwk)
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Simple shim around the typing module.
|
||||
|
||||
This was useful when this code supported Python 2 and typing wasn't always
|
||||
available. This code is being kept for now for backwards compatibility.
|
||||
|
||||
"""
|
||||
import warnings
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
|
||||
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
|
||||
DeprecationWarning)
|
||||
|
||||
class TypingClass:
|
||||
"""Ignore import errors by getting anything"""
|
||||
def __getattr__(self, name):
|
||||
return None # pragma: no cover
|
||||
@@ -1,9 +1,17 @@
|
||||
"""ACME protocol messages."""
|
||||
from collections.abc import Hashable
|
||||
import datetime
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
|
||||
import josepy as jose
|
||||
|
||||
@@ -12,9 +20,7 @@ from acme import errors
|
||||
from acme import fields
|
||||
from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
OLD_ERROR_PREFIX = "urn:acme:error:"
|
||||
ERROR_PREFIX = "urn:ietf:params:acme:error:"
|
||||
|
||||
ERROR_CODES = {
|
||||
@@ -50,39 +56,101 @@ ERROR_CODES = {
|
||||
'externalAccountRequired': 'The server requires external account binding',
|
||||
}
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS = dict(
|
||||
(ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())
|
||||
|
||||
ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me
|
||||
(OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()))
|
||||
ERROR_TYPE_DESCRIPTIONS = {**{
|
||||
ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
|
||||
}}
|
||||
|
||||
|
||||
def is_acme_error(err):
|
||||
def is_acme_error(err: BaseException) -> bool:
|
||||
"""Check if argument is an ACME error."""
|
||||
if isinstance(err, Error) and (err.typ is not None):
|
||||
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
|
||||
return ERROR_PREFIX in err.typ
|
||||
return False
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable):
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
super().__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self) -> str:
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj: str) -> '_Constant':
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(f'{cls.__name__} not recognized')
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'{self.__class__.__name__}({self.name})'
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__, self.name))
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar str value:
|
||||
|
||||
"""
|
||||
typ: IdentifierType = jose.field('type', decoder=IdentifierType.from_json)
|
||||
value: str = jose.field('value')
|
||||
|
||||
|
||||
class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
"""ACME error.
|
||||
|
||||
https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00
|
||||
https://datatracker.ietf.org/doc/html/rfc7807
|
||||
|
||||
:ivar unicode typ:
|
||||
:ivar unicode title:
|
||||
:ivar unicode detail:
|
||||
Note: Although Error inherits from JSONObjectWithFields, which is immutable,
|
||||
we add mutability for Error to comply with the Python exception API.
|
||||
|
||||
:ivar str typ:
|
||||
:ivar str title:
|
||||
:ivar str detail:
|
||||
:ivar Identifier identifier:
|
||||
:ivar tuple subproblems: An array of ACME Errors which may be present when the CA
|
||||
returns multiple errors related to the same request, `tuple` of `Error`.
|
||||
|
||||
"""
|
||||
typ = jose.Field('type', omitempty=True, default='about:blank')
|
||||
title = jose.Field('title', omitempty=True)
|
||||
detail = jose.Field('detail', omitempty=True)
|
||||
typ: str = jose.field('type', omitempty=True, default='about:blank')
|
||||
title: str = jose.field('title', omitempty=True)
|
||||
detail: str = jose.field('detail', omitempty=True)
|
||||
identifier: Optional['Identifier'] = jose.field(
|
||||
'identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
subproblems: Optional[Tuple['Error', ...]] = jose.field('subproblems', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that subproblems is redefined. Let's ignore the type check here.
|
||||
@subproblems.decoder # type: ignore
|
||||
def subproblems(value: List[Dict[str, Any]]) -> Tuple['Error', ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Error.from_json(subproblem) for subproblem in value)
|
||||
|
||||
@classmethod
|
||||
def with_code(cls, code, **kwargs):
|
||||
def with_code(cls, code: str, **kwargs: Any) -> 'Error':
|
||||
"""Create an Error instance with an ACME Error code.
|
||||
|
||||
:unicode code: An ACME error code, like 'dnssec'.
|
||||
:str code: An ACME error code, like 'dnssec'.
|
||||
:kwargs: kwargs to pass to Error.
|
||||
|
||||
"""
|
||||
@@ -92,73 +160,55 @@ class Error(jose.JSONObjectWithFields, errors.Error):
|
||||
typ = ERROR_PREFIX + code
|
||||
# Mypy will not understand that the Error constructor accepts a named argument
|
||||
# "typ" because of josepy magic. Let's ignore the type check here.
|
||||
return cls(typ=typ, **kwargs) # type: ignore
|
||||
return cls(typ=typ, **kwargs)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
def description(self) -> Optional[str]:
|
||||
"""Hardcoded error description based on its type.
|
||||
|
||||
:returns: Description if standard ACME error or ``None``.
|
||||
:rtype: unicode
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return ERROR_TYPE_DESCRIPTIONS.get(self.typ)
|
||||
|
||||
@property
|
||||
def code(self):
|
||||
def code(self) -> Optional[str]:
|
||||
"""ACME error code.
|
||||
|
||||
Basically self.typ without the ERROR_PREFIX.
|
||||
|
||||
:returns: error code if standard ACME code or ``None``.
|
||||
:rtype: unicode
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
code = str(self.typ).split(':')[-1]
|
||||
code = str(self.typ).rsplit(':', maxsplit=1)[-1]
|
||||
if code in ERROR_CODES:
|
||||
return code
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return b' :: '.join(
|
||||
# Hack to allow mutability on Errors (see GH #9539)
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
return object.__setattr__(self, name, value)
|
||||
|
||||
def __str__(self) -> str:
|
||||
result = b' :: '.join(
|
||||
part.encode('ascii', 'backslashreplace') for part in
|
||||
(self.typ, self.description, self.detail, self.title)
|
||||
if part is not None).decode()
|
||||
|
||||
|
||||
class _Constant(jose.JSONDeSerializable, Hashable): # type: ignore
|
||||
"""ACME constant."""
|
||||
__slots__ = ('name',)
|
||||
POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented
|
||||
|
||||
def __init__(self, name):
|
||||
super(_Constant, self).__init__()
|
||||
self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation
|
||||
self.name = name
|
||||
|
||||
def to_partial_json(self):
|
||||
return self.name
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test
|
||||
raise jose.DeserializationError(
|
||||
'{0} not recognized'.format(cls.__name__))
|
||||
return cls.POSSIBLE_NAMES[jobj]
|
||||
|
||||
def __repr__(self):
|
||||
return '{0}({1})'.format(self.__class__.__name__, self.name)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, type(self)) and other.name == self.name
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, self.name))
|
||||
if self.identifier:
|
||||
result = f'Problem for {self.identifier.value}: ' + result # pylint: disable=no-member
|
||||
if self.subproblems and len(self.subproblems) > 0:
|
||||
for subproblem in self.subproblems:
|
||||
result += f'\n{subproblem}'
|
||||
return result
|
||||
|
||||
|
||||
class Status(_Constant):
|
||||
"""ACME "status" field."""
|
||||
POSSIBLE_NAMES: dict = {}
|
||||
POSSIBLE_NAMES: Dict[str, _Constant] = {}
|
||||
|
||||
|
||||
STATUS_UNKNOWN = Status('unknown')
|
||||
STATUS_PENDING = Status('pending')
|
||||
STATUS_PROCESSING = Status('processing')
|
||||
@@ -169,90 +219,57 @@ STATUS_READY = Status('ready')
|
||||
STATUS_DEACTIVATED = Status('deactivated')
|
||||
|
||||
|
||||
class IdentifierType(_Constant):
|
||||
"""ACME identifier type."""
|
||||
POSSIBLE_NAMES: Dict[str, 'IdentifierType'] = {}
|
||||
IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder
|
||||
|
||||
|
||||
class Identifier(jose.JSONObjectWithFields):
|
||||
"""ACME identifier.
|
||||
|
||||
:ivar IdentifierType typ:
|
||||
:ivar unicode value:
|
||||
|
||||
"""
|
||||
typ = jose.Field('type', decoder=IdentifierType.from_json)
|
||||
value = jose.Field('value')
|
||||
|
||||
|
||||
class Directory(jose.JSONDeSerializable):
|
||||
"""Directory."""
|
||||
"""Directory.
|
||||
|
||||
_REGISTERED_TYPES: Dict[str, Type[Any]] = {}
|
||||
Directory resources must be accessed by the exact field name in RFC8555 (section 9.7.5).
|
||||
"""
|
||||
|
||||
class Meta(jose.JSONObjectWithFields):
|
||||
"""Directory Meta."""
|
||||
_terms_of_service = jose.Field('terms-of-service', omitempty=True)
|
||||
_terms_of_service_v2 = jose.Field('termsOfService', omitempty=True)
|
||||
website = jose.Field('website', omitempty=True)
|
||||
caa_identities = jose.Field('caaIdentities', omitempty=True)
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
_terms_of_service: str = jose.field('termsOfService', omitempty=True)
|
||||
website: str = jose.field('website', omitempty=True)
|
||||
caa_identities: List[str] = jose.field('caaIdentities', omitempty=True)
|
||||
external_account_required: bool = jose.field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def terms_of_service(self):
|
||||
def terms_of_service(self) -> str:
|
||||
"""URL for the CA TOS"""
|
||||
return self._terms_of_service or self._terms_of_service_v2
|
||||
return self._terms_of_service
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
# When iterating over fields, use the external name 'terms_of_service' instead of
|
||||
# the internal '_terms_of_service'.
|
||||
for name in super(Directory.Meta, self).__iter__():
|
||||
for name in super().__iter__():
|
||||
yield name[1:] if name == '_terms_of_service' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
def _internal_name(self, name: str) -> str:
|
||||
return '_' + name if name == 'terms_of_service' else name
|
||||
|
||||
def __init__(self, jobj: Mapping[str, Any]) -> None:
|
||||
self._jobj = jobj
|
||||
|
||||
@classmethod
|
||||
def _canon_key(cls, key):
|
||||
return getattr(key, 'resource_type', key)
|
||||
|
||||
@classmethod
|
||||
def register(cls, resource_body_cls: Type[Any]) -> Type[Any]:
|
||||
"""Register resource."""
|
||||
resource_type = resource_body_cls.resource_type
|
||||
assert resource_type not in cls._REGISTERED_TYPES
|
||||
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
|
||||
return resource_body_cls
|
||||
|
||||
def __init__(self, jobj):
|
||||
canon_jobj = util.map_keys(jobj, self._canon_key)
|
||||
# TODO: check that everything is an absolute URL; acme-spec is
|
||||
# not clear on that
|
||||
self._jobj = canon_jobj
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
try:
|
||||
return self[name.replace('_', '-')]
|
||||
return self[name]
|
||||
except KeyError as error:
|
||||
raise AttributeError(str(error))
|
||||
|
||||
def __getitem__(self, name):
|
||||
def __getitem__(self, name: str) -> Any:
|
||||
try:
|
||||
return self._jobj[self._canon_key(name)]
|
||||
return self._jobj[name]
|
||||
except KeyError:
|
||||
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||
raise KeyError(f'Directory field "{name}" not found')
|
||||
|
||||
def to_partial_json(self):
|
||||
return self._jobj
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
return util.map_keys(self._jobj, lambda k: k)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, jobj):
|
||||
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
|
||||
jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {}))
|
||||
return cls(jobj)
|
||||
|
||||
@@ -263,16 +280,16 @@ class Resource(jose.JSONObjectWithFields):
|
||||
:ivar acme.messages.ResourceBody body: Resource body.
|
||||
|
||||
"""
|
||||
body = jose.Field('body')
|
||||
body: "ResourceBody" = jose.field('body')
|
||||
|
||||
|
||||
class ResourceWithURI(Resource):
|
||||
"""ACME Resource with URI.
|
||||
|
||||
:ivar unicode ~.uri: Location of the resource.
|
||||
:ivar str uri: Location of the resource.
|
||||
|
||||
"""
|
||||
uri = jose.Field('uri') # no ChallengeResource.uri
|
||||
uri: str = jose.field('uri') # no ChallengeResource.uri
|
||||
|
||||
|
||||
class ResourceBody(jose.JSONObjectWithFields):
|
||||
@@ -283,7 +300,8 @@ class ExternalAccountBinding:
|
||||
"""ACME External Account Binding"""
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, account_public_key, kid, hmac_key, directory):
|
||||
def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str,
|
||||
directory: Directory) -> Dict[str, Any]:
|
||||
"""Create External Account Binding Resource from contact details, kid and hmac."""
|
||||
|
||||
key_json = json.dumps(account_public_key.to_partial_json()).encode()
|
||||
@@ -297,33 +315,40 @@ class ExternalAccountBinding:
|
||||
return eab.to_partial_json()
|
||||
|
||||
|
||||
GenericRegistration = TypeVar('GenericRegistration', bound='Registration')
|
||||
|
||||
|
||||
class Registration(ResourceBody):
|
||||
"""Registration Resource Body.
|
||||
|
||||
:ivar josepy.jwk.JWK key: Public key.
|
||||
:ivar jose.JWK key: Public key.
|
||||
:ivar tuple contact: Contact information following ACME spec,
|
||||
`tuple` of `unicode`.
|
||||
:ivar unicode agreement:
|
||||
`tuple` of `str`.
|
||||
:ivar str agreement:
|
||||
|
||||
"""
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
key: jose.JWK = jose.field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
terms_of_service_agreed = jose.Field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing = jose.Field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding = jose.Field('externalAccountBinding', omitempty=True)
|
||||
contact: Tuple[str, ...] = jose.field('contact', omitempty=True, default=())
|
||||
agreement: str = jose.field('agreement', omitempty=True)
|
||||
status: Status = jose.field('status', omitempty=True)
|
||||
terms_of_service_agreed: bool = jose.field('termsOfServiceAgreed', omitempty=True)
|
||||
only_return_existing: bool = jose.field('onlyReturnExisting', omitempty=True)
|
||||
external_account_binding: Dict[str, Any] = jose.field('externalAccountBinding',
|
||||
omitempty=True)
|
||||
|
||||
phone_prefix = 'tel:'
|
||||
email_prefix = 'mailto:'
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
def from_data(cls: Type[GenericRegistration], phone: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
external_account_binding: Optional[Dict[str, Any]] = None,
|
||||
**kwargs: Any) -> GenericRegistration:
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
@@ -352,19 +377,19 @@ class Registration(ResourceBody):
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs:
|
||||
if 'contact' in kwargs and kwargs['contact'] is not None:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super(Registration, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
def _filter_contact(self, prefix: str) -> Tuple[str, ...]:
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
@@ -381,51 +406,46 @@ class Registration(ResourceBody):
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super(Registration, self).to_partial_json()
|
||||
jobj = super().to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
def fields_to_partial_json(self) -> Dict[str, Any]:
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super(Registration, self).fields_to_partial_json()
|
||||
jobj = super().fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self):
|
||||
def phones(self) -> Tuple[str, ...]:
|
||||
"""All phones found in the ``contact`` field."""
|
||||
return self._filter_contact(self.phone_prefix)
|
||||
|
||||
@property
|
||||
def emails(self):
|
||||
def emails(self) -> Tuple[str, ...]:
|
||||
"""All emails found in the ``contact`` field."""
|
||||
return self._filter_contact(self.email_prefix)
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewRegistration(ResourceMixin, Registration):
|
||||
class NewRegistration(Registration):
|
||||
"""New registration."""
|
||||
resource_type = 'new-reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateRegistration(ResourceMixin, Registration):
|
||||
class UpdateRegistration(Registration):
|
||||
"""Update registration."""
|
||||
resource_type = 'reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class RegistrationResource(ResourceWithURI):
|
||||
"""Registration Resource.
|
||||
|
||||
:ivar acme.messages.Registration body:
|
||||
:ivar unicode new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar unicode terms_of_service: URL for the CA TOS.
|
||||
:ivar str new_authzr_uri: Deprecated. Do not use.
|
||||
:ivar str terms_of_service: URL for the CA TOS.
|
||||
|
||||
"""
|
||||
body = jose.Field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri = jose.Field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service = jose.Field('terms_of_service', omitempty=True)
|
||||
body: Registration = jose.field('body', decoder=Registration.from_json)
|
||||
new_authzr_uri: str = jose.field('new_authzr_uri', omitempty=True)
|
||||
terms_of_service: str = jose.field('terms_of_service', omitempty=True)
|
||||
|
||||
|
||||
class ChallengeBody(ResourceBody):
|
||||
@@ -450,64 +470,63 @@ class ChallengeBody(ResourceBody):
|
||||
# challenge object supports either one, but should be accessed through the
|
||||
# name "uri". In Client.answer_challenge, whichever one is set will be
|
||||
# used.
|
||||
_uri = jose.Field('uri', omitempty=True, default=None)
|
||||
_url = jose.Field('url', omitempty=True, default=None)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
_url: str = jose.field('url', omitempty=True, default=None)
|
||||
status: Status = jose.field('status', decoder=Status.from_json,
|
||||
omitempty=True, default=STATUS_PENDING)
|
||||
validated = fields.RFC3339Field('validated', omitempty=True)
|
||||
error = jose.Field('error', decoder=Error.from_json,
|
||||
validated: datetime.datetime = fields.rfc3339('validated', omitempty=True)
|
||||
error: Error = jose.field('error', decoder=Error.from_json,
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
return super(ChallengeBody, self).encode(self._internal_name(name))
|
||||
def encode(self, name: str) -> Any:
|
||||
return super().encode(self._internal_name(name))
|
||||
|
||||
def to_partial_json(self):
|
||||
jobj = super(ChallengeBody, self).to_partial_json()
|
||||
def to_partial_json(self) -> Dict[str, Any]:
|
||||
jobj = super().to_partial_json()
|
||||
jobj.update(self.chall.to_partial_json())
|
||||
return jobj
|
||||
|
||||
@classmethod
|
||||
def fields_from_json(cls, jobj):
|
||||
jobj_fields = super(ChallengeBody, cls).fields_from_json(jobj)
|
||||
def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]:
|
||||
jobj_fields = super().fields_from_json(jobj)
|
||||
jobj_fields['chall'] = challenges.Challenge.from_json(jobj)
|
||||
return jobj_fields
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
def uri(self) -> str:
|
||||
"""The URL of this challenge."""
|
||||
return self._url or self._uri
|
||||
return self._url
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
return getattr(self.chall, name)
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
# When iterating over fields, use the external name 'uri' instead of
|
||||
# the internal '_uri'.
|
||||
for name in super(ChallengeBody, self).__iter__():
|
||||
yield name[1:] if name == '_uri' else name
|
||||
for name in super().__iter__():
|
||||
yield 'uri' if name == '_url' else name
|
||||
|
||||
def _internal_name(self, name):
|
||||
return '_' + name if name == 'uri' else name
|
||||
def _internal_name(self, name: str) -> str:
|
||||
return '_url' if name == 'uri' else name
|
||||
|
||||
|
||||
class ChallengeResource(Resource):
|
||||
"""Challenge Resource.
|
||||
|
||||
:ivar acme.messages.ChallengeBody body:
|
||||
:ivar unicode authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
:ivar str authzr_uri: URI found in the 'up' ``Link`` header.
|
||||
|
||||
"""
|
||||
body = jose.Field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri = jose.Field('authzr_uri')
|
||||
body: ChallengeBody = jose.field('body', decoder=ChallengeBody.from_json)
|
||||
authzr_uri: str = jose.field('authzr_uri')
|
||||
|
||||
@property
|
||||
def uri(self):
|
||||
def uri(self) -> str:
|
||||
"""The URL of the challenge body."""
|
||||
return self.body.uri
|
||||
return self.body.uri # pylint: disable=no-member
|
||||
|
||||
|
||||
class Authorization(ResourceBody):
|
||||
@@ -515,72 +534,55 @@ class Authorization(ResourceBody):
|
||||
|
||||
:ivar acme.messages.Identifier identifier:
|
||||
:ivar list challenges: `list` of `.ChallengeBody`
|
||||
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
|
||||
of `int`, as opposed to `list` of `list` from the spec).
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar datetime.datetime expires:
|
||||
|
||||
"""
|
||||
identifier = jose.Field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges = jose.Field('challenges', omitempty=True)
|
||||
combinations = jose.Field('combinations', omitempty=True)
|
||||
identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True)
|
||||
challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True)
|
||||
|
||||
status = jose.Field('status', omitempty=True, decoder=Status.from_json)
|
||||
status: Status = jose.field('status', omitempty=True, decoder=Status.from_json)
|
||||
# TODO: 'expires' is allowed for Authorization Resources in
|
||||
# general, but for Key Authorization '[t]he "expires" field MUST
|
||||
# be absent'... then acme-spec gives example with 'expires'
|
||||
# present... That's confusing!
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
wildcard: bool = jose.field('wildcard', omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that challenge is redefined. Let's ignore the type check here.
|
||||
@challenges.decoder # type: ignore
|
||||
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@property
|
||||
def resolved_combinations(self):
|
||||
"""Combinations with challenges instead of indices."""
|
||||
return tuple(tuple(self.challenges[idx] for idx in combo)
|
||||
for combo in self.combinations) # pylint: disable=not-an-iterable
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewAuthorization(ResourceMixin, Authorization):
|
||||
class NewAuthorization(Authorization):
|
||||
"""New authorization."""
|
||||
resource_type = 'new-authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(ResourceMixin, Authorization):
|
||||
class UpdateAuthorization(Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class AuthorizationResource(ResourceWithURI):
|
||||
"""Authorization Resource.
|
||||
|
||||
:ivar acme.messages.Authorization body:
|
||||
:ivar unicode new_cert_uri: Deprecated. Do not use.
|
||||
:ivar str new_cert_uri: Deprecated. Do not use.
|
||||
|
||||
"""
|
||||
body = jose.Field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri = jose.Field('new_cert_uri', omitempty=True)
|
||||
body: Authorization = jose.field('body', decoder=Authorization.from_json)
|
||||
new_cert_uri: str = jose.field('new_cert_uri', omitempty=True)
|
||||
|
||||
|
||||
@Directory.register
|
||||
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
|
||||
"""ACME new-cert request.
|
||||
class CertificateRequest(jose.JSONObjectWithFields):
|
||||
"""ACME newOrder request.
|
||||
|
||||
:ivar josepy.util.ComparableX509 csr:
|
||||
:ivar jose.ComparableX509 csr:
|
||||
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
|
||||
|
||||
"""
|
||||
resource_type = 'new-cert'
|
||||
resource = fields.Resource(resource_type)
|
||||
csr = jose.Field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
|
||||
|
||||
|
||||
class CertificateResource(ResourceWithURI):
|
||||
@@ -588,27 +590,24 @@ class CertificateResource(ResourceWithURI):
|
||||
|
||||
:ivar josepy.util.ComparableX509 body:
|
||||
`OpenSSL.crypto.X509` wrapped in `.ComparableX509`
|
||||
:ivar unicode cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar str cert_chain_uri: URI found in the 'up' ``Link`` header
|
||||
:ivar tuple authzrs: `tuple` of `AuthorizationResource`.
|
||||
|
||||
"""
|
||||
cert_chain_uri = jose.Field('cert_chain_uri')
|
||||
authzrs = jose.Field('authzrs')
|
||||
cert_chain_uri: str = jose.field('cert_chain_uri')
|
||||
authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs')
|
||||
|
||||
|
||||
@Directory.register
|
||||
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
class Revocation(jose.JSONObjectWithFields):
|
||||
"""Revocation message.
|
||||
|
||||
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`.ComparableX509`
|
||||
:ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
`jose.ComparableX509`
|
||||
|
||||
"""
|
||||
resource_type = 'revoke-cert'
|
||||
resource = fields.Resource(resource_type)
|
||||
certificate = jose.Field(
|
||||
certificate: jose.ComparableX509 = jose.field(
|
||||
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
|
||||
reason = jose.Field('reason')
|
||||
reason: int = jose.field('reason')
|
||||
|
||||
|
||||
class Order(ResourceBody):
|
||||
@@ -625,26 +624,26 @@ class Order(ResourceBody):
|
||||
:ivar datetime.datetime expires: When the order expires.
|
||||
:ivar ~.Error error: Any error that occurred during finalization, if applicable.
|
||||
"""
|
||||
identifiers = jose.Field('identifiers', omitempty=True)
|
||||
status = jose.Field('status', decoder=Status.from_json,
|
||||
omitempty=True)
|
||||
authorizations = jose.Field('authorizations', omitempty=True)
|
||||
certificate = jose.Field('certificate', omitempty=True)
|
||||
finalize = jose.Field('finalize', omitempty=True)
|
||||
expires = fields.RFC3339Field('expires', omitempty=True)
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
identifiers: List[Identifier] = jose.field('identifiers', omitempty=True)
|
||||
status: Status = jose.field('status', decoder=Status.from_json, omitempty=True)
|
||||
authorizations: List[str] = jose.field('authorizations', omitempty=True)
|
||||
certificate: str = jose.field('certificate', omitempty=True)
|
||||
finalize: str = jose.field('finalize', omitempty=True)
|
||||
expires: datetime.datetime = fields.rfc3339('expires', omitempty=True)
|
||||
error: Error = jose.field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that identifiers is redefined. Let's ignore the type check here.
|
||||
@identifiers.decoder # type: ignore
|
||||
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
def identifiers(value: List[Dict[str, Any]]) -> Tuple[Identifier, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
"""Order Resource.
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar bytes csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
@@ -654,13 +653,29 @@ class OrderResource(ResourceWithURI):
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
|
||||
body: Order = jose.field('body', decoder=Order.from_json)
|
||||
csr_pem: bytes = jose.field('csr_pem', omitempty=True,
|
||||
# This looks backwards, but it's not -
|
||||
# we want the deserialized value to be
|
||||
# `bytes`, but anything we put into
|
||||
# JSON needs to be `str`, so we encode
|
||||
# to decode and decode to
|
||||
# encode. Otherwise we end up with an
|
||||
# array of ints on serialization
|
||||
decoder=lambda s: s.encode("utf-8"),
|
||||
encoder=lambda b: b.decode("utf-8"))
|
||||
|
||||
authorizations: List[AuthorizationResource] = jose.field('authorizations')
|
||||
fullchain_pem: str = jose.field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem: List[str] = jose.field('alternative_fullchains_pem',
|
||||
omitempty=True)
|
||||
|
||||
# Mypy does not understand the josepy magic happening here, and falsely claims
|
||||
# that authorizations is redefined. Let's ignore the type check here.
|
||||
@authorizations.decoder # type: ignore
|
||||
def authorizations(value: List[Dict[str, Any]]) -> Tuple[AuthorizationResource, ...]: # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(AuthorizationResource.from_json(authz) for authz in value)
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
"""New order."""
|
||||
resource_type = 'new-order'
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
|
||||
|
||||
class VersionedLEACMEMixin:
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self):
|
||||
"""Define the version of ACME protocol to use"""
|
||||
return getattr(self, '_le_acme_version', 1)
|
||||
|
||||
@le_acme_version.setter
|
||||
def le_acme_version(self, version):
|
||||
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||
# for any attempt to set an attribute to make objects immutable).
|
||||
object.__setattr__(self, '_le_acme_version', version)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == 'le_acme_version':
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin generates a RFC8555 compliant JWS payload
|
||||
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
class TypeMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
|
||||
if hasattr(instance, jobj_method):
|
||||
jobj = getattr(instance, jobj_method)()
|
||||
if instance.le_acme_version == 2:
|
||||
jobj.pop(uncompliant_field, None)
|
||||
return jobj
|
||||
|
||||
raise AttributeError('Method {0}() is not implemented.'.format(jobj_method)) # pragma: no cover
|
||||
@@ -7,7 +7,17 @@ import logging
|
||||
import socket
|
||||
import socketserver
|
||||
import threading
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
from OpenSSL import crypto
|
||||
from OpenSSL import SSL
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
@@ -18,30 +28,32 @@ logger = logging.getLogger(__name__)
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
"""Generic TLS Server."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
self.certs = kwargs.pop("certs", {})
|
||||
self.method = kwargs.pop(
|
||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.method = kwargs.pop("method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def _wrap_sock(self):
|
||||
self.socket = crypto_util.SSLSocket(
|
||||
def _wrap_sock(self) -> None:
|
||||
self.socket = cast(socket.socket, crypto_util.SSLSocket(
|
||||
self.socket, cert_selection=self._cert_selection,
|
||||
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||
method=self.method)
|
||||
method=self.method))
|
||||
|
||||
def _cert_selection(self, connection): # pragma: no cover
|
||||
def _cert_selection(self, connection: SSL.Connection
|
||||
) -> Optional[Tuple[crypto.PKey, crypto.X509]]: # pragma: no cover
|
||||
"""Callback selecting certificate for connection."""
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
if server_name:
|
||||
return self.certs.get(server_name, None)
|
||||
return None
|
||||
|
||||
def server_bind(self):
|
||||
def server_bind(self) -> None:
|
||||
self._wrap_sock()
|
||||
return socketserver.TCPServer.server_bind(self)
|
||||
|
||||
@@ -61,11 +73,15 @@ class BaseDualNetworkedServers:
|
||||
If two servers are instantiated, they will serve on the same port.
|
||||
"""
|
||||
|
||||
def __init__(self, ServerClass, server_address, *remaining_args, **kwargs):
|
||||
def __init__(self, ServerClass: Type[socketserver.TCPServer], server_address: Tuple[str, int],
|
||||
*remaining_args: Any, **kwargs: Any) -> None:
|
||||
port = server_address[1]
|
||||
self.threads: List[threading.Thread] = []
|
||||
self.servers: List[socketserver.BaseServer] = []
|
||||
|
||||
# Preserve socket error for re-raising, if no servers can be started
|
||||
last_socket_err: Optional[socket.error] = None
|
||||
|
||||
# Must try True first.
|
||||
# Ubuntu, for example, will fail to bind to IPv4 if we've already bound
|
||||
# to IPv6. But that's ok, since it will accept IPv4 connections on the IPv6
|
||||
@@ -82,7 +98,8 @@ class BaseDualNetworkedServers:
|
||||
logger.debug(
|
||||
"Successfully bound to %s:%s using %s", new_address[0],
|
||||
new_address[1], "IPv6" if ip_version else "IPv4")
|
||||
except socket.error:
|
||||
except socket.error as e:
|
||||
last_socket_err = e
|
||||
if self.servers:
|
||||
# Already bound using IPv6.
|
||||
logger.debug(
|
||||
@@ -101,9 +118,12 @@ class BaseDualNetworkedServers:
|
||||
# bind to the same port for both servers.
|
||||
port = server.socket.getsockname()[1]
|
||||
if not self.servers:
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
if last_socket_err:
|
||||
raise last_socket_err
|
||||
else: # pragma: no cover
|
||||
raise socket.error("Could not bind to IPv4 or IPv6.")
|
||||
|
||||
def serve_forever(self):
|
||||
def serve_forever(self) -> None:
|
||||
"""Wraps socketserver.TCPServer.serve_forever"""
|
||||
for server in self.servers:
|
||||
thread = threading.Thread(
|
||||
@@ -111,11 +131,11 @@ class BaseDualNetworkedServers:
|
||||
thread.start()
|
||||
self.threads.append(thread)
|
||||
|
||||
def getsocknames(self):
|
||||
def getsocknames(self) -> List[Tuple[str, int]]:
|
||||
"""Wraps socketserver.TCPServer.socket.getsockname"""
|
||||
return [server.socket.getsockname() for server in self.servers]
|
||||
|
||||
def shutdown_and_server_close(self):
|
||||
def shutdown_and_server_close(self) -> None:
|
||||
"""Wraps socketserver.TCPServer.shutdown, socketserver.TCPServer.server_close, and
|
||||
threading.Thread.join"""
|
||||
for server in self.servers:
|
||||
@@ -131,13 +151,20 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
|
||||
def __init__(self, server_address: Tuple[str, int],
|
||||
certs: List[Tuple[crypto.PKey, crypto.X509]],
|
||||
challenge_certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]],
|
||||
ipv6: bool = False) -> None:
|
||||
# We don't need to implement a request handler here because the work
|
||||
# (including logging) is being done by wrapped socket set up in the
|
||||
# parent TLSServer class.
|
||||
TLSServer.__init__(
|
||||
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
|
||||
self, server_address, socketserver.BaseRequestHandler, certs=certs,
|
||||
ipv6=ipv6)
|
||||
self.challenge_certs = challenge_certs
|
||||
|
||||
def _cert_selection(self, connection):
|
||||
def _cert_selection(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey,
|
||||
crypto.X509]]:
|
||||
# TODO: We would like to serve challenge cert only if asked for it via
|
||||
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||
@@ -146,10 +173,12 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
# handshake in alpn_selection() if ALPN protos are not what we expect.
|
||||
# [0] https://github.com/openssl/openssl/issues/4952
|
||||
server_name = connection.get_servername()
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs.get(server_name, None)
|
||||
if server_name:
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs[server_name]
|
||||
return None # pragma: no cover
|
||||
|
||||
def _alpn_selection(self, _connection, alpn_protos):
|
||||
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
|
||||
"""Callback to select alpn protocol."""
|
||||
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||
@@ -163,21 +192,22 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.ipv6 = kwargs.pop("ipv6", False)
|
||||
if self.ipv6:
|
||||
self.address_family = socket.AF_INET6
|
||||
else:
|
||||
self.address_family = socket.AF_INET
|
||||
BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||
"""HTTP01 Server."""
|
||||
|
||||
def __init__(self, server_address, resources, ipv6=False, timeout=30):
|
||||
HTTPServer.__init__(
|
||||
self, server_address, HTTP01RequestHandler.partial_init(
|
||||
def __init__(self, server_address: Tuple[str, int], resources: Set[challenges.HTTP01],
|
||||
ipv6: bool = False, timeout: int = 30) -> None:
|
||||
super().__init__(
|
||||
server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||
|
||||
|
||||
@@ -185,8 +215,8 @@ class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
"""HTTP01Server Wrapper. Tries everything for both. Failures for one don't
|
||||
affect the other."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
BaseDualNetworkedServers.__init__(self, HTTP01Server, *args, **kwargs)
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(HTTP01Server, *args, **kwargs)
|
||||
|
||||
|
||||
class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
@@ -201,10 +231,10 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
HTTP01Resource = collections.namedtuple(
|
||||
"HTTP01Resource", "chall response validation")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self._timeout = kwargs.pop('timeout', 30)
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.server: HTTP01Server
|
||||
|
||||
# In parent class BaseHTTPRequestHandler, 'timeout' is a class-level property but we
|
||||
@@ -214,7 +244,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
# everyone happy, we statically redefine 'timeout' as a method property, and set the
|
||||
# timeout value in a new internal instance-level property _timeout.
|
||||
@property
|
||||
def timeout(self):
|
||||
def timeout(self) -> int: # type: ignore[override]
|
||||
"""
|
||||
The default timeout this server should apply to requests.
|
||||
:return: timeout to apply
|
||||
@@ -222,16 +252,16 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
"""
|
||||
return self._timeout
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
def log_message(self, format: str, *args: Any) -> None: # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
def handle(self) -> None:
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
|
||||
def do_GET(self) -> None: # pylint: disable=invalid-name,missing-function-docstring
|
||||
if self.path == "/":
|
||||
self.handle_index()
|
||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||
@@ -239,21 +269,21 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
else:
|
||||
self.handle_404()
|
||||
|
||||
def handle_index(self):
|
||||
def handle_index(self) -> None:
|
||||
"""Handle index page."""
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(self.server.server_version.encode())
|
||||
|
||||
def handle_404(self):
|
||||
def handle_404(self) -> None:
|
||||
"""Handler 404 Not Found errors."""
|
||||
self.send_response(http_client.NOT_FOUND, message="Not Found")
|
||||
self.send_header("Content-type", "text/html")
|
||||
self.end_headers()
|
||||
self.wfile.write(b"404")
|
||||
|
||||
def handle_simple_http_resource(self):
|
||||
def handle_simple_http_resource(self) -> None:
|
||||
"""Handle HTTP01 provisioned resources."""
|
||||
for resource in self.simple_http_resources:
|
||||
if resource.chall.path == self.path:
|
||||
@@ -269,7 +299,8 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.path)
|
||||
|
||||
@classmethod
|
||||
def partial_init(cls, simple_http_resources, timeout):
|
||||
def partial_init(cls, simple_http_resources: Set[challenges.HTTP01],
|
||||
timeout: int) -> 'functools.partial[HTTP01RequestHandler]':
|
||||
"""Partially initialize this handler.
|
||||
|
||||
This is useful because `socketserver.BaseServer` takes
|
||||
@@ -280,16 +311,3 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
return functools.partial(
|
||||
cls, simple_http_resources=simple_http_resources,
|
||||
timeout=timeout)
|
||||
|
||||
|
||||
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||
"""BaseRequestHandler with logging."""
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
socketserver.BaseRequestHandler.handle(self)
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""ACME utilities."""
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
|
||||
|
||||
def map_keys(dikt, func):
|
||||
def map_keys(dikt: Mapping[Any, Any], func: Callable[[Any], Any]) -> Dict[Any, Any]:
|
||||
"""Map dictionary keys."""
|
||||
return {func(key): value for key, value in dikt.items()}
|
||||
|
||||
@@ -37,6 +37,7 @@ extensions = [
|
||||
'sphinx.ext.todo',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
autodoc_member_order = 'bysource'
|
||||
@@ -58,7 +59,7 @@ master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'acme-python'
|
||||
copyright = u'2015-2015, Let\'s Encrypt Project'
|
||||
copyright = u'2015, Let\'s Encrypt Project'
|
||||
author = u'Let\'s Encrypt Project'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
@@ -122,14 +123,7 @@ todo_include_todos = False
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||
# on_rtd is whether we are on readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
|
||||
@@ -3,6 +3,6 @@ usage: jws [-h] [--compact] {sign,verify} ...
|
||||
positional arguments:
|
||||
{sign,verify}
|
||||
|
||||
optional arguments:
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--compact
|
||||
|
||||
@@ -163,7 +163,7 @@ def example_http():
|
||||
# Register account and accept TOS
|
||||
|
||||
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
|
||||
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
|
||||
directory = client.ClientV2.get_directory(DIRECTORY_URL, net)
|
||||
client_acme = client.ClientV2(directory, net=net)
|
||||
|
||||
# Terms of Service URL is in client_acme.directory.meta.terms_of_service
|
||||
@@ -215,8 +215,7 @@ def example_http():
|
||||
try:
|
||||
regr = client_acme.query_registration(regr)
|
||||
except errors.Error as err:
|
||||
if err.typ == messages.OLD_ERROR_PREFIX + 'unauthorized' \
|
||||
or err.typ == messages.ERROR_PREFIX + 'unauthorized':
|
||||
if err.typ == messages.ERROR_PREFIX + 'unauthorized':
|
||||
# Status is deactivated.
|
||||
pass
|
||||
raise
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
python -m acme.standalone -p 1234
|
||||
curl -k https://localhost:1234
|
||||
@@ -1 +0,0 @@
|
||||
../../../acme/testdata/rsa2048_cert.pem
|
||||
@@ -1 +0,0 @@
|
||||
../../../acme/testdata/rsa2048_key.pem
|
||||
@@ -7,4 +7,7 @@
|
||||
# in --editable mode (-e), just "pip install acme[docs]" does not work as
|
||||
# expected and "pip install -e acme[docs]" must be used instead
|
||||
|
||||
# We also pin our dependencies for increased stability.
|
||||
|
||||
-c ../tools/requirements.txt
|
||||
-e acme[docs]
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
[bdist_wheel]
|
||||
universal = 1
|
||||
@@ -3,27 +3,19 @@ import sys
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
|
||||
version = '1.15.0.dev0'
|
||||
version = '2.12.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
'cryptography>=2.1.4',
|
||||
# formerly known as acme.jose:
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
'PyOpenSSL>=17.3.0',
|
||||
'cryptography>=3.2.1',
|
||||
# Josepy 2+ may introduce backward incompatible changes by droping usage of
|
||||
# deprecated PyOpenSSL APIs.
|
||||
'josepy>=1.13.0, <2',
|
||||
# pyOpenSSL 23.1.0 is a bad release: https://github.com/pyca/pyopenssl/issues/1199
|
||||
'PyOpenSSL>=17.5.0,!=23.1.0',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests>=2.6.0',
|
||||
'requests-toolbelt>=0.3.0',
|
||||
'setuptools>=39.0.1',
|
||||
]
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'tox',
|
||||
'pytz>=2019.3',
|
||||
'requests>=2.20.0',
|
||||
'setuptools>=41.6.0',
|
||||
]
|
||||
|
||||
docs_extras = [
|
||||
@@ -31,25 +23,41 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
test_extras = [
|
||||
# In theory we could scope importlib_resources to env marker 'python_version<"3.9"'. But this
|
||||
# makes the pinning mechanism emit warnings when running `poetry lock` because in the corner
|
||||
# case of an extra dependency with env marker coming from a setup.py file, it generate the
|
||||
# invalid requirement 'importlib_resource>=1.3.1;python<=3.9;extra=="test"'.
|
||||
# To fix the issue, we do not pass the env marker. This is fine because:
|
||||
# - importlib_resources can be applied to any Python version,
|
||||
# - this is a "test" extra dependency for limited audience,
|
||||
# - it does not change anything at the end for the generated requirement files.
|
||||
'importlib_resources>=1.3.1',
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
'typing-extensions',
|
||||
]
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
description='ACME protocol implementation in Python',
|
||||
url='https://github.com/letsencrypt/letsencrypt',
|
||||
url='https://github.com/certbot/certbot',
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
author_email='certbot-dev@eff.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=3.6',
|
||||
python_requires='>=3.8',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: 3.11',
|
||||
'Programming Language :: Python :: 3.12',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -58,7 +66,7 @@ setup(
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
'test': test_extras,
|
||||
},
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,72 +0,0 @@
|
||||
"""Tests for acme.fields."""
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import pytz
|
||||
|
||||
|
||||
class FixedTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Fixed."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import Fixed
|
||||
self.field = Fixed('name', 'x')
|
||||
|
||||
def test_decode(self):
|
||||
self.assertEqual('x', self.field.decode('x'))
|
||||
|
||||
def test_decode_bad(self):
|
||||
self.assertRaises(jose.DeserializationError, self.field.decode, 'y')
|
||||
|
||||
def test_encode(self):
|
||||
self.assertEqual('x', self.field.encode('x'))
|
||||
|
||||
def test_encode_override(self):
|
||||
self.assertEqual('y', self.field.encode('y'))
|
||||
|
||||
|
||||
class RFC3339FieldTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.RFC3339Field."""
|
||||
|
||||
def setUp(self):
|
||||
self.decoded = datetime.datetime(2015, 3, 27, tzinfo=pytz.utc)
|
||||
self.encoded = '2015-03-27T00:00:00Z'
|
||||
|
||||
def test_default_encoder(self):
|
||||
from acme.fields import RFC3339Field
|
||||
self.assertEqual(
|
||||
self.encoded, RFC3339Field.default_encoder(self.decoded))
|
||||
|
||||
def test_default_encoder_naive_fails(self):
|
||||
from acme.fields import RFC3339Field
|
||||
self.assertRaises(
|
||||
ValueError, RFC3339Field.default_encoder, datetime.datetime.now())
|
||||
|
||||
def test_default_decoder(self):
|
||||
from acme.fields import RFC3339Field
|
||||
self.assertEqual(
|
||||
self.decoded, RFC3339Field.default_decoder(self.encoded))
|
||||
|
||||
def test_default_decoder_raises_deserialization_error(self):
|
||||
from acme.fields import RFC3339Field
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, RFC3339Field.default_decoder, '')
|
||||
|
||||
|
||||
class ResourceTest(unittest.TestCase):
|
||||
"""Tests for acme.fields.Resource."""
|
||||
|
||||
def setUp(self):
|
||||
from acme.fields import Resource
|
||||
self.field = Resource('x')
|
||||
|
||||
def test_decode_good(self):
|
||||
self.assertEqual('x', self.field.decode('x'))
|
||||
|
||||
def test_decode_wrong(self):
|
||||
self.assertRaises(jose.DeserializationError, self.field.decode, 'y')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user