Compare commits
466 Commits
test-power
...
test-pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a87a536f82 | ||
|
|
cb916a0682 | ||
|
|
88386e8c82 | ||
|
|
a64e1f0129 | ||
|
|
fea176449c | ||
|
|
ff03e34c70 | ||
|
|
6fc832677e | ||
|
|
725870d558 | ||
|
|
631c88b209 | ||
|
|
6a093bd35a | ||
|
|
afb07cf50d | ||
|
|
aa61e6ad4e | ||
|
|
8a3aed0476 | ||
|
|
afc5baad4a | ||
|
|
eff761ab1e | ||
|
|
5f040a8e32 | ||
|
|
5173ab6b90 | ||
|
|
448fd9145a | ||
|
|
ac8798e818 | ||
|
|
34694251dd | ||
|
|
cc76906712 | ||
|
|
ef8c481634 | ||
|
|
c12404451d | ||
|
|
e378931eda | ||
|
|
160b209394 | ||
|
|
cac9d8f75e | ||
|
|
7f0fa18c57 | ||
|
|
fca7ec896a | ||
|
|
e066766cc9 | ||
|
|
be6c890874 | ||
|
|
feca125437 | ||
|
|
1be005289a | ||
|
|
79297ef5cb | ||
|
|
5ec29ca60b | ||
|
|
9a72db5b9b | ||
|
|
14cbf67d65 | ||
|
|
b20aaff661 | ||
|
|
a66f4e1150 | ||
|
|
501df0dc4e | ||
|
|
b551b6ee73 | ||
|
|
71d9dfa86e | ||
|
|
6628bc0e9b | ||
|
|
f43fa12fc0 | ||
|
|
2b425110dc | ||
|
|
55d411f1eb | ||
|
|
7ddd327f63 | ||
|
|
3a615176c5 | ||
|
|
e79af1b1de | ||
|
|
c8828dab30 | ||
|
|
f85b738e2f | ||
|
|
95a6b61cdc | ||
|
|
21b320ef42 | ||
|
|
8c81a1aaf8 | ||
|
|
ec147740ee | ||
|
|
b7b0ec321e | ||
|
|
7fe7a965f5 | ||
|
|
9f243c768f | ||
|
|
b841f0f307 | ||
|
|
8e736479f7 | ||
|
|
2ceabadb81 | ||
|
|
a2951b4db1 | ||
|
|
98615564ed | ||
|
|
3ce87d1fcb | ||
|
|
d62d853ea4 | ||
|
|
70731dd75b | ||
|
|
ae7b4a1755 | ||
|
|
f66a592e37 | ||
|
|
e8518bf206 | ||
|
|
2a047eb526 | ||
|
|
bc137103a3 | ||
|
|
085967ad29 | ||
|
|
4e9d3afcc4 | ||
|
|
acb6d34c5f | ||
|
|
63ec74276c | ||
|
|
e8a232297d | ||
|
|
575092d603 | ||
|
|
2d62dec7ec | ||
|
|
f93b90f87a | ||
|
|
f40e5bdefe | ||
|
|
9bbcc0046c | ||
|
|
b3dd2c09ba | ||
|
|
8574313841 | ||
|
|
a677534462 | ||
|
|
22730dc0ac | ||
|
|
086e6c46b6 | ||
|
|
bc0ed3cb01 | ||
|
|
220cc07239 | ||
|
|
271be07267 | ||
|
|
48a0cc0c42 | ||
|
|
5415fc201c | ||
|
|
b08fdc7dfb | ||
|
|
6eb5954f0e | ||
|
|
6ec83d52b5 | ||
|
|
403ded5c58 | ||
|
|
4d3f6c23be | ||
|
|
6d73b21dcf | ||
|
|
072c070c0c | ||
|
|
df1ca726f9 | ||
|
|
086c8b1b3e | ||
|
|
09ab4aea01 | ||
|
|
a6f2061ff7 | ||
|
|
02c1339753 | ||
|
|
a1cd909247 | ||
|
|
9ee4831f78 | ||
|
|
14dfbdbea5 | ||
|
|
270b5535e2 | ||
|
|
74b0340a13 | ||
|
|
b13dfc6437 | ||
|
|
c5bab9b07c | ||
|
|
b6964cae2e | ||
|
|
ebf1349b15 | ||
|
|
9d2e0ac013 | ||
|
|
05dbda4b51 | ||
|
|
40a2a5b99f | ||
|
|
68b3b048b9 | ||
|
|
d434b92945 | ||
|
|
1697d66ba7 | ||
|
|
a6a998d11b | ||
|
|
f82e2cc714 | ||
|
|
433c6f391c | ||
|
|
d64bb81864 | ||
|
|
88e183e69e | ||
|
|
590eeca38a | ||
|
|
b9a25c3987 | ||
|
|
41b99eba79 | ||
|
|
de39a42e6a | ||
|
|
183ccc64b1 | ||
|
|
6bca930752 | ||
|
|
cd993cdfb1 | ||
|
|
9f994d7a50 | ||
|
|
4f3dc8862d | ||
|
|
48139f382d | ||
|
|
8a3a8c7097 | ||
|
|
cb3ff9ef18 | ||
|
|
f743dbec3a | ||
|
|
2af297d72f | ||
|
|
95ef53e5d5 | ||
|
|
24c5fab8b6 | ||
|
|
713b91495b | ||
|
|
0f4c31c9c7 | ||
|
|
b9a8248541 | ||
|
|
8027430625 | ||
|
|
bce14ae65f | ||
|
|
25d1977d4f | ||
|
|
46eb4ec7e3 | ||
|
|
3ae8fa640b | ||
|
|
035b6514db | ||
|
|
f151099342 | ||
|
|
25e79e4aca | ||
|
|
db064a4109 | ||
|
|
860af81fef | ||
|
|
70c8481fd8 | ||
|
|
c5e5594ac3 | ||
|
|
ad8ffc1bf0 | ||
|
|
29a23d3148 | ||
|
|
44b1bd8e0e | ||
|
|
0bebdedcbc | ||
|
|
8ccc96bbdd | ||
|
|
4a2618d415 | ||
|
|
bcb3554836 | ||
|
|
0f53e8ad4e | ||
|
|
2a18ae6d57 | ||
|
|
3c4b922197 | ||
|
|
0bb1f0b2ce | ||
|
|
ba192f321d | ||
|
|
961c573864 | ||
|
|
fb39de7d01 | ||
|
|
97fcfd40d1 | ||
|
|
9ac476e87b | ||
|
|
8d776fb7ac | ||
|
|
50fa04ba0c | ||
|
|
e31834a6cd | ||
|
|
340a4280ea | ||
|
|
cc07722b3e | ||
|
|
67bcf0f6bd | ||
|
|
1b2328f18b | ||
|
|
560b9e5012 | ||
|
|
2f6fbe9987 | ||
|
|
bebcad0588 | ||
|
|
92f26367eb | ||
|
|
d135e6140b | ||
|
|
010b38fa10 | ||
|
|
8c8d3fab91 | ||
|
|
8192e3eb85 | ||
|
|
baf69d210b | ||
|
|
beea2d2208 | ||
|
|
4938273e0f | ||
|
|
466b4fbf71 | ||
|
|
95ae5f69f5 | ||
|
|
2acc1dcc89 | ||
|
|
fa55b468c8 | ||
|
|
cd27dcc32c | ||
|
|
6b97ac3344 | ||
|
|
332def46da | ||
|
|
b42e24178a | ||
|
|
8a15bd7927 | ||
|
|
3ea5170647 | ||
|
|
0b53c0d476 | ||
|
|
4eb9a71a4c | ||
|
|
96e003d1a3 | ||
|
|
7a7c6737cc | ||
|
|
0e59c6ba1b | ||
|
|
d230dcafeb | ||
|
|
bcf33c6659 | ||
|
|
71e3d82e47 | ||
|
|
bb6a660b21 | ||
|
|
d8e9f558c2 | ||
|
|
3a997a5631 | ||
|
|
c35054fd11 | ||
|
|
361d1f732e | ||
|
|
b224b49986 | ||
|
|
1a72cdecf2 | ||
|
|
5586ae071a | ||
|
|
ca60ad52b9 | ||
|
|
9154e7965f | ||
|
|
ac2d691ade | ||
|
|
5536c91223 | ||
|
|
9cbb13ef04 | ||
|
|
db6de76b11 | ||
|
|
01dc981a09 | ||
|
|
335894ab3b | ||
|
|
4cce3458f3 | ||
|
|
d71d3a1144 | ||
|
|
b06dacdfb5 | ||
|
|
e5cde2c598 | ||
|
|
84b6c3cebb | ||
|
|
a06d5ac7a1 | ||
|
|
9d94c6c5ef | ||
|
|
ed9648b4a3 | ||
|
|
2bcabe6626 | ||
|
|
c12baf7d8c | ||
|
|
193b44a0fa | ||
|
|
b69f5588f4 | ||
|
|
2c622944dd | ||
|
|
e0b72d9a62 | ||
|
|
d63be466a8 | ||
|
|
0f6486ec7f | ||
|
|
06e68cce44 | ||
|
|
eed45827ad | ||
|
|
751d836746 | ||
|
|
6693e87500 | ||
|
|
08cea381c8 | ||
|
|
84b5c571c0 | ||
|
|
0f35836deb | ||
|
|
5b749ff8f7 | ||
|
|
41306e1e37 | ||
|
|
864ea08341 | ||
|
|
74eea40905 | ||
|
|
859dc38cb9 | ||
|
|
f66314926a | ||
|
|
9c345ac301 | ||
|
|
af21d1d56e | ||
|
|
49912732ac | ||
|
|
8fb9a395ab | ||
|
|
35fb99b86f | ||
|
|
127d2dc307 | ||
|
|
569df2d37a | ||
|
|
ff732bf975 | ||
|
|
77871ba71c | ||
|
|
cd0acf5dcc | ||
|
|
8e4dc0a48c | ||
|
|
316e4640f8 | ||
|
|
537bee0994 | ||
|
|
e9895d2ec6 | ||
|
|
5992d521e2 | ||
|
|
4ca86d9482 | ||
|
|
bc3088121b | ||
|
|
dbda499b08 | ||
|
|
6df90d17ae | ||
|
|
e4a0edc7af | ||
|
|
1285297b23 | ||
|
|
9e3c348dff | ||
|
|
3bfaf41d3d | ||
|
|
06599a1e18 | ||
|
|
30ec4cafe1 | ||
|
|
c6d35549d6 | ||
|
|
9a256ca4fe | ||
|
|
809cb516c9 | ||
|
|
07abe7a8d6 | ||
|
|
2fd85a4f36 | ||
|
|
44b97df4e9 | ||
|
|
78168a5248 | ||
|
|
69aec55ead | ||
|
|
7f63141e41 | ||
|
|
d72a1a71d2 | ||
|
|
68f4ae12be | ||
|
|
9483b33ec1 | ||
|
|
144d4f2b44 | ||
|
|
e362948d45 | ||
|
|
6edb4e1a39 | ||
|
|
b1fb3296e9 | ||
|
|
3147026211 | ||
|
|
9f8e4507ad | ||
|
|
50ea608553 | ||
|
|
fa67b7ba0f | ||
|
|
6309ded92f | ||
|
|
5a4f158c55 | ||
|
|
bc5b079b2a | ||
|
|
a2be8e1956 | ||
|
|
2f737ee292 | ||
|
|
8c75a9de9f | ||
|
|
24aa1e9127 | ||
|
|
f4c0a9fd63 | ||
|
|
f169c37153 | ||
|
|
a489079208 | ||
|
|
ddf68aea80 | ||
|
|
2ae090529e | ||
|
|
4ea98d830b | ||
|
|
4fd04366aa | ||
|
|
2633c3ffb6 | ||
|
|
5b29e4616c | ||
|
|
32904d8c9e | ||
|
|
d68f37ae88 | ||
|
|
b3071aab29 | ||
|
|
2aac24c982 | ||
|
|
20df5507ae | ||
|
|
36311a276b | ||
|
|
22685ef86f | ||
|
|
c3cfd412c9 | ||
|
|
0b21e716ca | ||
|
|
8b90b55518 | ||
|
|
247d9cd887 | ||
|
|
d6ef34a03e | ||
|
|
9819443440 | ||
|
|
84b57fac93 | ||
|
|
7d79c91e9b | ||
|
|
c883efde0f | ||
|
|
42dda355c5 | ||
|
|
99b1538d0a | ||
|
|
fd64c8c33b | ||
|
|
3f52695ec2 | ||
|
|
fc7e5e8e60 | ||
|
|
bcaee66b0a | ||
|
|
df584a3b90 | ||
|
|
7d540fc33a | ||
|
|
605ef40656 | ||
|
|
b8856ac810 | ||
|
|
02bf7d7dfc | ||
|
|
e6f050dbe9 | ||
|
|
5607025e9b | ||
|
|
7cc6cf2604 | ||
|
|
86a6cc53cf | ||
|
|
1859fb059d | ||
|
|
c5a2ba03da | ||
|
|
995e70542a | ||
|
|
4f80f8b910 | ||
|
|
0e03f82733 | ||
|
|
5035a510a2 | ||
|
|
ef388a309f | ||
|
|
c98183c998 | ||
|
|
2b051dd197 | ||
|
|
bca73f9932 | ||
|
|
7da5196206 | ||
|
|
cc764b65c1 | ||
|
|
7b35abbcb4 | ||
|
|
6601d03ce8 | ||
|
|
d3a4b8fd8c | ||
|
|
f3ed133744 | ||
|
|
1a2189f4df | ||
|
|
a180d5d5c9 | ||
|
|
6a4b610269 | ||
|
|
97ae63efa6 | ||
|
|
3907b53b4b | ||
|
|
6c5959d892 | ||
|
|
601a114d1b | ||
|
|
86926dff92 | ||
|
|
9b35dbf2be | ||
|
|
05e35ff2e0 | ||
|
|
7d0651c315 | ||
|
|
174fa0e05c | ||
|
|
8d9943cb08 | ||
|
|
715899d5a8 | ||
|
|
882335c7ec | ||
|
|
35fa4c0457 | ||
|
|
11e402893f | ||
|
|
2338ab36fd | ||
|
|
e3c996de10 | ||
|
|
b8a9dd75eb | ||
|
|
2072599bd7 | ||
|
|
b1a8e7175b | ||
|
|
1e2f70b17a | ||
|
|
896c1e0b66 | ||
|
|
2f24726d4c | ||
|
|
5f315b46e9 | ||
|
|
a342eb5546 | ||
|
|
90fd1afc38 | ||
|
|
4473fd25cb | ||
|
|
a6772043d6 | ||
|
|
7234d8922d | ||
|
|
07dc2400eb | ||
|
|
1702cb90fd | ||
|
|
fcdeaf48f2 | ||
|
|
702ad99090 | ||
|
|
5f0703cbf1 | ||
|
|
9a3186a67e | ||
|
|
91ce42ce9c | ||
|
|
097c76f512 | ||
|
|
6e07e8b5c0 | ||
|
|
fd91643a7f | ||
|
|
78624a2b8c | ||
|
|
619b17753e | ||
|
|
60cd920bcb | ||
|
|
f512b5eaa2 | ||
|
|
9800e5d8fc | ||
|
|
695107bc98 | ||
|
|
e84ed49c56 | ||
|
|
fb323e083a | ||
|
|
ceea41c1e2 | ||
|
|
456122e342 | ||
|
|
84c1b912d9 | ||
|
|
0e78436b05 | ||
|
|
9b5b27597c | ||
|
|
3b065238b3 | ||
|
|
0f5bda4ff9 | ||
|
|
70be256c66 | ||
|
|
5713decf23 | ||
|
|
c194381f04 | ||
|
|
4401eacaac | ||
|
|
5c588a6f8d | ||
|
|
b92eb6f620 | ||
|
|
6148e5c355 | ||
|
|
ea44834c41 | ||
|
|
06fdbf2a55 | ||
|
|
ac1a60ff0b | ||
|
|
b70f9c4744 | ||
|
|
517ff5cb19 | ||
|
|
d14eec9ecf | ||
|
|
bdf24d2bed | ||
|
|
2bc64183a8 | ||
|
|
578ca1c6af | ||
|
|
19de05c72f | ||
|
|
a730b00a36 | ||
|
|
5e01467e2c | ||
|
|
d645574839 | ||
|
|
9b2322a573 | ||
|
|
79caaa8e6f | ||
|
|
8620dcf06f | ||
|
|
3f36298716 | ||
|
|
e9a9a180bb | ||
|
|
63c7dd109c | ||
|
|
8f6fc67378 | ||
|
|
67fddae90d | ||
|
|
a156d37ee1 | ||
|
|
1756ef8620 | ||
|
|
feacbe9671 | ||
|
|
c224340330 | ||
|
|
23fb6d2877 | ||
|
|
9620cc75d4 | ||
|
|
7337f64180 | ||
|
|
af1c66b28f | ||
|
|
d296ef2dcd | ||
|
|
f64386c73c | ||
|
|
270754deff | ||
|
|
a83f9eb4e4 | ||
|
|
fed2264dac | ||
|
|
1666e85118 | ||
|
|
db522aa155 | ||
|
|
31a8d086fc | ||
|
|
d0d7521215 | ||
|
|
2fc6f6e619 | ||
|
|
d8ab321894 | ||
|
|
62b054f265 | ||
|
|
1d1c096067 | ||
|
|
bcffaab602 | ||
|
|
b27e5804b9 | ||
|
|
4ca03aec8d |
@@ -69,12 +69,12 @@ Access can be defined for all or only selected repositories, which is nice.
|
||||
```
|
||||
|
||||
- Redirected to Azure DevOps, select the account created in _Having an Azure DevOps account_ section.
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targetted github repo)
|
||||
- Select the organization, and click "Create a new project" (let's name it the same than the targeted github repo)
|
||||
- The Visibility is public, to profit from 10 parallel jobs
|
||||
|
||||
```
|
||||
!!! ACCESS !!!
|
||||
Azure Pipelines needs access to the GitHub account (in term of beeing able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
Azure Pipelines needs access to the GitHub account (in term of being able to check it is valid), and the Resources shared between the GitHub account and Azure Pipelines.
|
||||
```
|
||||
|
||||
_Done. We can move to pipelines configuration._
|
||||
|
||||
14
.azure-pipelines/advanced-test.yml
Normal file
14
.azure-pipelines/advanced-test.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
# Advanced pipeline for running our full test suite on demand.
|
||||
trigger:
|
||||
# When changing these triggers, please ensure the documentation under
|
||||
# "Running tests in CI" is still correct.
|
||||
- test-*
|
||||
pr: none
|
||||
|
||||
variables:
|
||||
# We don't publish our Docker images in this pipeline, but when building them
|
||||
# for testing, let's use the nightly tag.
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
@@ -1,19 +0,0 @@
|
||||
# Advanced pipeline for isolated checks and release purpose
|
||||
trigger:
|
||||
- test-*
|
||||
- '*.x'
|
||||
pr:
|
||||
- test-*
|
||||
# This pipeline is also nightly run on master
|
||||
schedules:
|
||||
- cron: "0 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the release pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/installer-tests.yml
|
||||
@@ -1,12 +1,7 @@
|
||||
trigger:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
trigger: none
|
||||
pr:
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- '*.x'
|
||||
|
||||
jobs:
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/jobs/standard-tests-jobs.yml
|
||||
|
||||
18
.azure-pipelines/nightly.yml
Normal file
18
.azure-pipelines/nightly.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Nightly pipeline running each day for master.
|
||||
trigger: none
|
||||
pr: none
|
||||
schedules:
|
||||
- cron: "30 4 * * *"
|
||||
displayName: Nightly build
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
always: true
|
||||
|
||||
variables:
|
||||
dockerTag: nightly
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
@@ -1,13 +1,18 @@
|
||||
# Release pipeline to build and deploy Certbot for Windows for GitHub release tags
|
||||
# Release pipeline to run our full test suite, build artifacts, and deploy them
|
||||
# for GitHub release tags.
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- v*
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
# Any addition here should be reflected in the advanced pipeline.
|
||||
# It is advised to declare all jobs here as templates to improve maintainability.
|
||||
- template: templates/tests-suite.yml
|
||||
- template: templates/installer-tests.yml
|
||||
- template: templates/changelog.yml
|
||||
variables:
|
||||
dockerTag: ${{variables['Build.SourceBranchName']}}
|
||||
|
||||
stages:
|
||||
- template: templates/stages/test-and-package-stage.yml
|
||||
- template: templates/stages/changelog-stage.yml
|
||||
- template: templates/stages/deploy-stage.yml
|
||||
parameters:
|
||||
snapReleaseChannel: beta
|
||||
- template: templates/stages/notify-failure-stage.yml
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
jobs:
|
||||
- job: changelog
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- bash: |
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
@@ -1,15 +0,0 @@
|
||||
jobs:
|
||||
- job: installer_run
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
win2012r2:
|
||||
imageName: vs2015-win2012r2
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- script: wusa /uninstall /kb:3134758 /quiet /norestart & exit 0
|
||||
- script: powershell -Command "$PSVersionTable.PSVersion"
|
||||
100
.azure-pipelines/templates/jobs/extended-tests-jobs.yml
Normal file
100
.azure-pipelines/templates/jobs/extended-tests-jobs.yml
Normal file
@@ -0,0 +1,100 @@
|
||||
jobs:
|
||||
- job: extended_test
|
||||
variables:
|
||||
- name: IMAGE_NAME
|
||||
value: ubuntu-18.04
|
||||
- name: PYTHON_VERSION
|
||||
value: 3.9
|
||||
- group: certbot-common
|
||||
strategy:
|
||||
matrix:
|
||||
linux-py36:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py37:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37
|
||||
linux-py38:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: py38
|
||||
linux-py39-nopin:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
CERTBOT_NO_PIN: 1
|
||||
linux-boulder-v1-integration-certbot-oldest:
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-certbot-oldest:
|
||||
TOXENV: integration-certbot-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-integration-nginx-oldest:
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-integration-nginx-oldest:
|
||||
TOXENV: integration-nginx-oldest
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py27-integration:
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py36-integration:
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py37-integration:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py38-integration:
|
||||
PYTHON_VERSION: 3.8
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
linux-boulder-v1-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v1
|
||||
linux-boulder-v2-py39-integration:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: integration
|
||||
ACME_SERVER: boulder-v2
|
||||
nginx-compat:
|
||||
TOXENV: nginx_compat
|
||||
le-auto-oraclelinux6:
|
||||
TOXENV: le_auto_oraclelinux6
|
||||
docker-dev:
|
||||
TOXENV: docker_dev
|
||||
macos-farmtest-apache2:
|
||||
# We run one of these test farm tests on macOS to help ensure the
|
||||
# tests continue to work on the platform.
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: test-farm-apache2
|
||||
farmtest-leauto-upgrades:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: test-farm-leauto-upgrades
|
||||
farmtest-certonly-standalone:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: test-farm-certonly-standalone
|
||||
farmtest-sdists:
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: test-farm-sdists
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
202
.azure-pipelines/templates/jobs/packaging-jobs.yml
Normal file
202
.azure-pipelines/templates/jobs/packaging-jobs.yml
Normal file
@@ -0,0 +1,202 @@
|
||||
jobs:
|
||||
- job: docker_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- bash: set -e && tools/docker/build.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Build the Docker images
|
||||
# We don't filter for the Docker Hub organization to continue to allow
|
||||
# easy testing of these scripts on forks.
|
||||
- bash: |
|
||||
set -e
|
||||
DOCKER_IMAGES=$(docker images --filter reference='*/certbot' --filter reference='*/dns-*' --format '{{.Repository}}')
|
||||
docker save --output images.tar $DOCKER_IMAGES
|
||||
displayName: Save the Docker images
|
||||
# If the name of the tar file or artifact changes, the deploy stage will
|
||||
# also need to be updated.
|
||||
- bash: set -e && mv images.tar $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare Docker artifact
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
displayName: Store Docker artifact
|
||||
- job: installer_build
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.7
|
||||
architecture: x86
|
||||
addToPath: true
|
||||
- script: python windows-installer/construct.py
|
||||
displayName: Build Certbot installer
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: $(System.DefaultWorkingDirectory)/windows-installer/build/nsis
|
||||
contents: '*.exe'
|
||||
targetFolder: $(Build.ArtifactStagingDirectory)
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: windows-installer
|
||||
displayName: Publish Windows installer
|
||||
- job: installer_run
|
||||
dependsOn: installer_build
|
||||
strategy:
|
||||
matrix:
|
||||
win2019:
|
||||
imageName: windows-2019
|
||||
win2016:
|
||||
imageName: vs2017-win2016
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- powershell: |
|
||||
if ($PSVersionTable.PSVersion.Major -ne 5) {
|
||||
throw "Powershell version is not 5.x"
|
||||
}
|
||||
condition: eq(variables['imageName'], 'vs2017-win2016')
|
||||
displayName: Check Powershell 5.x is used in vs2017-win2016
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: windows-installer
|
||||
path: $(Build.SourcesDirectory)/bin
|
||||
displayName: Retrieve Windows installer
|
||||
# pip 9.0 provided by pipstrap is not able to resolve properly the pywin32 dependency
|
||||
# required by certbot-ci: as a temporary workaround until pipstrap is updated, we install
|
||||
# a recent version of pip, but we also to disable the isolated feature as described in
|
||||
# https://github.com/certbot/certbot/issues/8256
|
||||
- script: |
|
||||
py -3 -m venv venv
|
||||
venv\Scripts\python -m pip install pip==20.2.3 setuptools==50.3.0 wheel==0.35.1
|
||||
venv\Scripts\python tools\pip_install.py -e certbot-ci
|
||||
env:
|
||||
PIP_NO_BUILD_ISOLATION: no
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\windows_installer_integration_tests --allow-persistent-changes --installer-path $(Build.SourcesDirectory)\bin\certbot-beta-installer-win32.exe
|
||||
displayName: Run windows installer integration tests
|
||||
- script: |
|
||||
set PATH=%ProgramFiles(x86)%\Certbot\bin;%PATH%
|
||||
venv\Scripts\python -m pytest certbot-ci\certbot_integration_tests\certbot_tests -n 4
|
||||
displayName: Run certbot integration tests
|
||||
- job: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
timeoutInMinutes: 0
|
||||
variables:
|
||||
# Do not run the heavy non-amd64 builds for test branches
|
||||
${{ if not(startsWith(variables['Build.SourceBranchName'], 'test-')) }}:
|
||||
ARCHS: amd64 arm64 armhf
|
||||
${{ if startsWith(variables['Build.SourceBranchName'], 'test-') }}:
|
||||
ARCHS: amd64
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadSecureFile@1
|
||||
name: credentials
|
||||
inputs:
|
||||
secureFile: launchpad-credentials
|
||||
- script: |
|
||||
set -e
|
||||
git config --global user.email "$(Build.RequestedForEmail)"
|
||||
git config --global user.name "$(Build.RequestedFor)"
|
||||
mkdir -p ~/.local/share/snapcraft/provider/launchpad
|
||||
cp $(credentials.secureFilePath) ~/.local/share/snapcraft/provider/launchpad/credentials
|
||||
python3 tools/snap/build_remote.py ALL --archs ${ARCHS}
|
||||
displayName: Build snaps
|
||||
- script: |
|
||||
set -e
|
||||
mv *.snap $(Build.ArtifactStagingDirectory)
|
||||
mv certbot-dns-*/*.snap $(Build.ArtifactStagingDirectory)
|
||||
displayName: Prepare artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
artifact: snaps
|
||||
displayName: Store snaps artifacts
|
||||
- job: snap_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends nginx-light snapd
|
||||
python3 -m venv venv
|
||||
venv/bin/python letsencrypt-auto-source/pieces/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -U tox
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
sudo snap install --dangerous --classic snap/certbot_*_amd64.snap
|
||||
displayName: Install Certbot snap
|
||||
- script: |
|
||||
set -e
|
||||
venv/bin/python -m tox -e integration-external,apacheconftest-external-with-pebble
|
||||
displayName: Run tox
|
||||
- job: snap_dns_run
|
||||
dependsOn: snaps_build
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
displayName: Install dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: 3.9
|
||||
addToPath: true
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- script: |
|
||||
set -e
|
||||
python3 -m venv venv
|
||||
venv/bin/python letsencrypt-auto-source/pieces/pipstrap.py
|
||||
venv/bin/python tools/pip_install.py -e certbot-ci
|
||||
displayName: Prepare Certbot-CI
|
||||
- script: |
|
||||
set -e
|
||||
sudo -E venv/bin/pytest certbot-ci/snap_integration_tests/dns_tests --allow-persistent-changes --snap-folder $(Build.SourcesDirectory)/snap --snap-arch amd64
|
||||
displayName: Test DNS plugins snaps
|
||||
72
.azure-pipelines/templates/jobs/standard-tests-jobs.yml
Normal file
72
.azure-pipelines/templates/jobs/standard-tests-jobs.yml
Normal file
@@ -0,0 +1,72 @@
|
||||
jobs:
|
||||
- job: test
|
||||
variables:
|
||||
PYTHON_VERSION: 3.9
|
||||
strategy:
|
||||
matrix:
|
||||
macos-py27:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
macos-py39:
|
||||
IMAGE_NAME: macOS-10.15
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py39
|
||||
windows-py36:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
windows-py37-cover:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
windows-integration-certbot:
|
||||
IMAGE_NAME: vs2017-win2016
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
linux-oldest-tests-1:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{acme,apache,apache-v2,certbot}-oldest
|
||||
linux-oldest-tests-2:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: py27-{dns,nginx}-oldest
|
||||
linux-py27:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 2.7
|
||||
TOXENV: py27
|
||||
linux-py36:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: py36
|
||||
linux-py39-cover:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: py38-cover
|
||||
linux-py39-lint:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.9
|
||||
TOXENV: lint
|
||||
linux-py36-mypy:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
PYTHON_VERSION: 3.6
|
||||
TOXENV: mypy
|
||||
linux-integration:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: integration
|
||||
ACME_SERVER: pebble
|
||||
apache-compat:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apache_compat
|
||||
le-auto-centos6:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: le_auto_centos6
|
||||
apacheconftest:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: apacheconftest-with-pebble
|
||||
nginxroundtrip:
|
||||
IMAGE_NAME: ubuntu-18.04
|
||||
TOXENV: nginxroundtrip
|
||||
pool:
|
||||
vmImage: $(IMAGE_NAME)
|
||||
steps:
|
||||
- template: ../steps/tox-steps.yml
|
||||
19
.azure-pipelines/templates/stages/changelog-stage.yml
Normal file
19
.azure-pipelines/templates/stages/changelog-stage.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
stages:
|
||||
- stage: Changelog
|
||||
jobs:
|
||||
- job: prepare
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
# If we change the output filename from `release_notes.md`, it should also be changed in tools/create_github_release.py
|
||||
- bash: |
|
||||
set -e
|
||||
CERTBOT_VERSION="$(cd certbot && python -c "import certbot; print(certbot.__version__)" && cd ~-)"
|
||||
"${BUILD_REPOSITORY_LOCALPATH}\tools\extract_changelog.py" "${CERTBOT_VERSION}" >> "${BUILD_ARTIFACTSTAGINGDIRECTORY}/release_notes.md"
|
||||
displayName: Prepare changelog
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
# If we change the artifact's name, it should also be changed in tools/create_github_release.py
|
||||
artifact: changelog
|
||||
displayName: Publish changelog
|
||||
99
.azure-pipelines/templates/stages/deploy-stage.yml
Normal file
99
.azure-pipelines/templates/stages/deploy-stage.yml
Normal file
@@ -0,0 +1,99 @@
|
||||
parameters:
|
||||
- name: snapReleaseChannel
|
||||
type: string
|
||||
default: edge
|
||||
values:
|
||||
- edge
|
||||
- beta
|
||||
|
||||
stages:
|
||||
- stage: Deploy
|
||||
jobs:
|
||||
# This job relies on credentials used to publish the Certbot snaps. This
|
||||
# credential file was created by running:
|
||||
#
|
||||
# snapcraft logout
|
||||
# snapcraft login (provide the shared snapcraft credentials when prompted)
|
||||
# snapcraft export-login --channels=beta,edge snapcraft.cfg
|
||||
#
|
||||
# Then the file was added as a secure file in Azure pipelines
|
||||
# with the name snapcraft.cfg by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
|
||||
# including authorizing the file in all pipelines as described at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#how-do-i-authorize-a-secure-file-for-use-in-all-pipelines.
|
||||
#
|
||||
# This file has a maximum lifetime of one year and the current
|
||||
# file will expire on 2021-07-28 which is also tracked by
|
||||
# https://github.com/certbot/certbot/issues/7931. The file will
|
||||
# need to be updated before then to prevent automated deploys
|
||||
# from breaking.
|
||||
#
|
||||
# Revoking these credentials can be done by changing the password of the
|
||||
# account used to generate the credentials. See
|
||||
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
|
||||
# more info.
|
||||
- job: publish_snap
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends snapd
|
||||
sudo snap install --classic snapcraft
|
||||
displayName: Install dependencies
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: snaps
|
||||
path: $(Build.SourcesDirectory)/snap
|
||||
displayName: Retrieve Certbot snaps
|
||||
- task: DownloadSecureFile@1
|
||||
name: snapcraftCfg
|
||||
inputs:
|
||||
secureFile: snapcraft.cfg
|
||||
- bash: |
|
||||
set -e
|
||||
mkdir -p .snapcraft
|
||||
ln -s $(snapcraftCfg.secureFilePath) .snapcraft/snapcraft.cfg
|
||||
for SNAP_FILE in snap/*.snap; do
|
||||
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
|
||||
done
|
||||
displayName: Publish to Snap store
|
||||
- job: publish_docker
|
||||
pool:
|
||||
vmImage: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
amd64:
|
||||
DOCKER_ARCH: amd64
|
||||
arm32v6:
|
||||
DOCKER_ARCH: arm32v6
|
||||
arm64v8:
|
||||
DOCKER_ARCH: arm64v8
|
||||
steps:
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: docker_$(DOCKER_ARCH)
|
||||
path: $(Build.SourcesDirectory)
|
||||
displayName: Retrieve Docker images
|
||||
- bash: set -e && docker load --input $(Build.SourcesDirectory)/images.tar
|
||||
displayName: Load Docker images
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: login
|
||||
# The credentials used here are for the shared certbotbot account
|
||||
# on Docker Hub. The credentials are stored in a service account
|
||||
# which was created by following the instructions at
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
|
||||
# The name given to this service account must match the value
|
||||
# given to containerRegistry below. "Grant access to all
|
||||
# pipelines" should also be checked. To revoke these
|
||||
# credentials, we can change the password on the certbotbot
|
||||
# Docker Hub account or remove the account from the
|
||||
# Certbot organization on Docker Hub.
|
||||
containerRegistry: docker-hub
|
||||
displayName: Login to Docker Hub
|
||||
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH
|
||||
displayName: Deploy the Docker images
|
||||
19
.azure-pipelines/templates/stages/notify-failure-stage.yml
Normal file
19
.azure-pipelines/templates/stages/notify-failure-stage.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
stages:
|
||||
- stage: On_Failure
|
||||
jobs:
|
||||
- job: notify_mattermost
|
||||
variables:
|
||||
- group: certbot-common
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
MESSAGE="\
|
||||
---\n\
|
||||
##### Azure Pipeline
|
||||
*Repo* $(Build.Repository.ID) - *Pipeline* $(Build.DefinitionName) #$(Build.BuildNumber) - *Branch/PR* $(Build.SourceBranchName)\n\
|
||||
:warning: __Pipeline has failed__: [Link to the build](https://dev.azure.com/$(Build.Repository.ID)/_build/results?buildId=$(Build.BuildId)&view=results)\n\n\
|
||||
---"
|
||||
curl -i -X POST --data-urlencode "payload={\"text\":\"${MESSAGE}\"}" "$(MATTERMOST_URL)"
|
||||
condition: failed()
|
||||
@@ -0,0 +1,6 @@
|
||||
stages:
|
||||
- stage: TestAndPackage
|
||||
jobs:
|
||||
- template: ../jobs/standard-tests-jobs.yml
|
||||
- template: ../jobs/extended-tests-jobs.yml
|
||||
- template: ../jobs/packaging-jobs.yml
|
||||
57
.azure-pipelines/templates/steps/tox-steps.yml
Normal file
57
.azure-pipelines/templates/steps/tox-steps.yml
Normal file
@@ -0,0 +1,57 @@
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
brew install augeas
|
||||
condition: startswith(variables['IMAGE_NAME'], 'macOS')
|
||||
displayName: Install MacOS dependencies
|
||||
- bash: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
python-dev \
|
||||
gcc \
|
||||
libaugeas0 \
|
||||
libssl-dev \
|
||||
libffi-dev \
|
||||
ca-certificates \
|
||||
nginx-light \
|
||||
openssl
|
||||
sudo systemctl stop nginx
|
||||
condition: startswith(variables['IMAGE_NAME'], 'ubuntu')
|
||||
displayName: Install Linux dependencies
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv. The option "-I" is set so when CERTBOT_NO_PIN is also
|
||||
# set, pip updates dependencies it thinks are already satisfied to avoid some
|
||||
# problems with its lack of real dependency resolution.
|
||||
- bash: |
|
||||
set -e
|
||||
python letsencrypt-auto-source/pieces/pipstrap.py
|
||||
python tools/pip_install.py -I tox virtualenv
|
||||
displayName: Install runtime dependencies
|
||||
- task: DownloadSecureFile@1
|
||||
name: testFarmPem
|
||||
inputs:
|
||||
secureFile: azure-test-farm.pem
|
||||
condition: contains(variables['TOXENV'], 'test-farm')
|
||||
- bash: |
|
||||
set -e
|
||||
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
|
||||
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
|
||||
env
|
||||
if [[ "${TOXENV}" == *"oldest"* ]]; then
|
||||
tools/run_oldest_tests.sh
|
||||
else
|
||||
python -m tox
|
||||
fi
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
||||
AWS_EC2_PEM_FILE: $(testFarmPem.secureFilePath)
|
||||
displayName: Run tox
|
||||
@@ -1,38 +0,0 @@
|
||||
jobs:
|
||||
- job: test
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
strategy:
|
||||
matrix:
|
||||
py35:
|
||||
PYTHON_VERSION: 3.5
|
||||
TOXENV: py35
|
||||
py37-cover:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: py37-cover
|
||||
integration-certbot:
|
||||
PYTHON_VERSION: 3.7
|
||||
TOXENV: integration-certbot
|
||||
PYTEST_ADDOPTS: --numprocesses 4
|
||||
variables:
|
||||
- group: certbot-common
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: $(PYTHON_VERSION)
|
||||
addToPath: true
|
||||
- script: python tools/pip_install.py -U tox coverage
|
||||
displayName: Install dependencies
|
||||
- script: python -m tox
|
||||
displayName: Run tox
|
||||
# We do not require codecov report upload to succeed. So to avoid to break the pipeline if
|
||||
# something goes wrong, each command is suffixed with a command that hides any non zero exit
|
||||
# codes and echoes an informative message instead.
|
||||
- bash: |
|
||||
curl -s https://codecov.io/bash -o codecov-bash || echo "Failed to download codecov-bash"
|
||||
chmod +x codecov-bash || echo "Failed to apply execute permissions on codecov-bash"
|
||||
./codecov-bash -F windows || echo "Codecov did not collect coverage reports"
|
||||
condition: in(variables['TOXENV'], 'py37-cover', 'integration-certbot')
|
||||
env:
|
||||
CODECOV_TOKEN: $(codecov_token)
|
||||
displayName: Publish coverage
|
||||
18
.codecov.yml
18
.codecov.yml
@@ -1,18 +0,0 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default: off
|
||||
linux:
|
||||
flags: linux
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
windows:
|
||||
flags: windows
|
||||
# Fixed target instead of auto set by #7173, can
|
||||
# be removed when flags in Codecov are added back.
|
||||
target: 97.4
|
||||
threshold: 0.1
|
||||
base: auto
|
||||
18
.editorconfig
Normal file
18
.editorconfig
Normal file
@@ -0,0 +1,18 @@
|
||||
# https://editorconfig.org/
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
charset = utf-8
|
||||
max_line_length = 100
|
||||
|
||||
[*.yaml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -26,6 +26,7 @@ tags
|
||||
\#*#
|
||||
.idea
|
||||
.ropeproject
|
||||
.vscode
|
||||
|
||||
# auth --cert-path --chain-path
|
||||
/*.pem
|
||||
@@ -34,6 +35,7 @@ tags
|
||||
tests/letstest/letest-*/
|
||||
tests/letstest/*.pem
|
||||
tests/letstest/venv/
|
||||
tests/letstest/venv3/
|
||||
|
||||
.venv
|
||||
|
||||
@@ -49,3 +51,16 @@ tests/letstest/venv/
|
||||
.certbot_test_workspace
|
||||
**/assets/pebble*
|
||||
**/assets/challtestsrv*
|
||||
|
||||
# snap files
|
||||
.snapcraft
|
||||
parts
|
||||
prime
|
||||
stage
|
||||
*.snap
|
||||
snap-constraints.txt
|
||||
qemu-*
|
||||
certbot-dns*/certbot-dns*_amd64*.txt
|
||||
certbot-dns*/certbot-dns*_arm*.txt
|
||||
/certbot_amd64*.txt
|
||||
/certbot_arm*.txt
|
||||
|
||||
75
.travis.yml
75
.travis.yml
@@ -1,75 +0,0 @@
|
||||
language: python
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
|
||||
before_script:
|
||||
- 'if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ulimit -n 1024 ; fi'
|
||||
# On Travis, the fastest parallelization for integration tests has proved to be 4.
|
||||
- 'if [[ "$TOXENV" == *"integration"* ]]; then export PYTEST_ADDOPTS="--numprocesses 4"; fi'
|
||||
# Use Travis retry feature for farm tests since they are flaky
|
||||
- 'if [[ "$TOXENV" == "travis-test-farm"* ]]; then export TRAVIS_RETRY=travis_retry; fi'
|
||||
- export TOX_TESTENV_PASSENV=TRAVIS
|
||||
|
||||
# Only build pushes to the master branch, PRs, and branches beginning with
|
||||
# `test-` or of the form `digit(s).digit(s).x`. This reduces the number of
|
||||
# simultaneous Travis runs, which speeds turnaround time on review since there
|
||||
# is a cap of on the number of simultaneous runs.
|
||||
branches:
|
||||
only:
|
||||
# apache-parser-v2 is a temporary branch for doing work related to
|
||||
# rewriting the parser in the Apache plugin.
|
||||
- apache-parser-v2
|
||||
- master
|
||||
- /^\d+\.\d+\.x$/
|
||||
- /^test-.*$/
|
||||
|
||||
# Jobs for the main test suite are always executed (including on PRs) except for pushes on master.
|
||||
not-on-master: ¬-on-master
|
||||
if: NOT (type = push AND branch = master)
|
||||
|
||||
# Jobs for the extended test suite are executed for cron jobs and pushes to
|
||||
# non-development branches. See the explanation for apache-parser-v2 above.
|
||||
extended-test-suite: &extended-test-suite
|
||||
if: type = cron OR (type = push AND branch NOT IN (apache-parser-v2, master))
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# This job is always executed, including on master
|
||||
- python: "2.7"
|
||||
env: TOXENV=py27-cover FYI="py27 tests + code coverage"
|
||||
|
||||
# container-based infrastructure
|
||||
sudo: false
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages: # Keep in sync with letsencrypt-auto-source/pieces/bootstrappers/deb_common.sh and Boulder.
|
||||
- python-dev
|
||||
- gcc
|
||||
- libaugeas0
|
||||
- libssl-dev
|
||||
- libffi-dev
|
||||
- ca-certificates
|
||||
# For certbot-nginx integration testing
|
||||
- nginx-light
|
||||
- openssl
|
||||
|
||||
# tools/pip_install.py is used to pin packages to a known working version
|
||||
# except in tests where the environment variable CERTBOT_NO_PIN is set.
|
||||
# virtualenv is listed here explicitly to make sure it is upgraded when
|
||||
# CERTBOT_NO_PIN is set to work around failures we've seen when using an older
|
||||
# version of virtualenv.
|
||||
install: 'tools/pip_install.py -U codecov tox virtualenv'
|
||||
# Most of the time TRAVIS_RETRY is an empty string, and has no effect on the
|
||||
# script command. It is set only to `travis_retry` during farm tests, in
|
||||
# order to trigger the Travis retry feature, and compensate the inherent
|
||||
# flakiness of these specific tests.
|
||||
script: '$TRAVIS_RETRY tox'
|
||||
|
||||
after_success: '[ "$TOXENV" == "py27-cover" ] && codecov -F linux'
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
14
AUTHORS.md
14
AUTHORS.md
@@ -21,6 +21,7 @@ Authors
|
||||
* [Andrzej Górski](https://github.com/andrzej3393)
|
||||
* [Anselm Levskaya](https://github.com/levskaya)
|
||||
* [Antoine Jacoutot](https://github.com/ajacoutot)
|
||||
* [April King](https://github.com/april)
|
||||
* [asaph](https://github.com/asaph)
|
||||
* [Axel Beckert](https://github.com/xtaran)
|
||||
* [Bas](https://github.com/Mechazawa)
|
||||
@@ -35,7 +36,9 @@ Authors
|
||||
* [Blake Griffith](https://github.com/cowlicks)
|
||||
* [Brad Warren](https://github.com/bmw)
|
||||
* [Brandon Kraft](https://github.com/kraftbj)
|
||||
* [Brandon Kreisel](https://github.com/kraftbj)
|
||||
* [Brandon Kreisel](https://github.com/BKreisel)
|
||||
* [Brian Heim](https://github.com/brianlheim)
|
||||
* [Cameron Steel](https://github.com/Tugzrida)
|
||||
* [Ceesjan Luiten](https://github.com/quinox)
|
||||
* [Chad Whitacre](https://github.com/whit537)
|
||||
* [Chhatoi Pritam Baral](https://github.com/pritambaral)
|
||||
@@ -58,6 +61,7 @@ Authors
|
||||
* [Daniel Albers](https://github.com/AID)
|
||||
* [Daniel Aleksandersen](https://github.com/da2x)
|
||||
* [Daniel Convissor](https://github.com/convissor)
|
||||
* [Daniel "Drex" Drexler](https://github.com/aeturnum)
|
||||
* [Daniel Huang](https://github.com/dhuang)
|
||||
* [Dave Guarino](https://github.com/daguar)
|
||||
* [David cz](https://github.com/dave-cz)
|
||||
@@ -82,6 +86,7 @@ Authors
|
||||
* [Felix Schwarz](https://github.com/FelixSchwarz)
|
||||
* [Felix Yan](https://github.com/felixonmars)
|
||||
* [Filip Ochnik](https://github.com/filipochnik)
|
||||
* [Florian Klink](https://github.com/flokli)
|
||||
* [Francois Marier](https://github.com/fmarier)
|
||||
* [Frank](https://github.com/Frankkkkk)
|
||||
* [Frederic BLANC](https://github.com/fblanc)
|
||||
@@ -100,7 +105,9 @@ Authors
|
||||
* [Harlan Lieberman-Berg](https://github.com/hlieberman)
|
||||
* [Henri Salo](https://github.com/fgeek)
|
||||
* [Henry Chen](https://github.com/henrychen95)
|
||||
* [Hugo van Kemenade](https://github.com/hugovk)
|
||||
* [Ingolf Becker](https://github.com/watercrossing)
|
||||
* [Ivan Nejgebauer](https://github.com/inejge)
|
||||
* [Jaap Eldering](https://github.com/eldering)
|
||||
* [Jacob Hoffman-Andrews](https://github.com/jsha)
|
||||
* [Jacob Sachs](https://github.com/jsachs)
|
||||
@@ -124,6 +131,7 @@ Authors
|
||||
* [Jonathan Herlin](https://github.com/Jonher937)
|
||||
* [Jon Walsh](https://github.com/code-tree)
|
||||
* [Joona Hoikkala](https://github.com/joohoi)
|
||||
* [Josh McCullough](https://github.com/JoshMcCullough)
|
||||
* [Josh Soref](https://github.com/jsoref)
|
||||
* [Joubin Jabbari](https://github.com/joubin)
|
||||
* [Juho Juopperi](https://github.com/jkjuopperi)
|
||||
@@ -197,6 +205,7 @@ Authors
|
||||
* [Pierre Jaury](https://github.com/kaiyou)
|
||||
* [Piotr Kasprzyk](https://github.com/kwadrat)
|
||||
* [Prayag Verma](https://github.com/pra85)
|
||||
* [Rasesh Patel](https://github.com/raspat1)
|
||||
* [Reinaldo de Souza Jr](https://github.com/juniorz)
|
||||
* [Remi Rampin](https://github.com/remram44)
|
||||
* [Rémy HUBSCHER](https://github.com/Natim)
|
||||
@@ -229,9 +238,11 @@ Authors
|
||||
* [Spencer Bliven](https://github.com/sbliven)
|
||||
* [Stacey Sheldon](https://github.com/solidgoldbomb)
|
||||
* [Stavros Korokithakis](https://github.com/skorokithakis)
|
||||
* [Ștefan Talpalaru](https://github.com/stefantalpalaru)
|
||||
* [Stefan Weil](https://github.com/stweil)
|
||||
* [Steve Desmond](https://github.com/stevedesmond-ca)
|
||||
* [sydneyli](https://github.com/sydneyli)
|
||||
* [taixx046](https://github.com/taixx046)
|
||||
* [Tan Jay Jun](https://github.com/jayjun)
|
||||
* [Tapple Gao](https://github.com/tapple)
|
||||
* [Telepenin Nikolay](https://github.com/telepenin)
|
||||
@@ -263,5 +274,6 @@ Authors
|
||||
* [Yomna](https://github.com/ynasser)
|
||||
* [Yoni Jah](https://github.com/yonjah)
|
||||
* [YourDaddyIsHere](https://github.com/YourDaddyIsHere)
|
||||
* [Yuseong Cho](https://github.com/g6123)
|
||||
* [Zach Shepherd](https://github.com/zjs)
|
||||
* [陈三](https://github.com/chenxsan)
|
||||
|
||||
@@ -11,7 +11,7 @@ to the Sphinx generated docs is provided below.
|
||||
|
||||
|
||||
[1] https://github.com/blog/1184-contributing-guidelines
|
||||
[2] http://docutils.sourceforge.net/docs/user/rst/quickref.html#hyperlink-targets
|
||||
[2] https://docutils.sourceforge.io/docs/user/rst/quickref.html#hyperlink-targets
|
||||
|
||||
-->
|
||||
|
||||
|
||||
@@ -1,15 +1,22 @@
|
||||
"""ACME Identifier Validation Challenges."""
|
||||
import abc
|
||||
import codecs
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
|
||||
from cryptography.hazmat.primitives import hashes # type: ignore
|
||||
import josepy as jose
|
||||
import requests
|
||||
import six
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
from OpenSSL import crypto
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import fields
|
||||
from acme.mixins import ResourceMixin, TypeMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,7 +35,7 @@ class Challenge(jose.TypedJSONObjectWithFields):
|
||||
return UnrecognizedChallenge.from_json(jobj)
|
||||
|
||||
|
||||
class ChallengeResponse(jose.TypedJSONObjectWithFields):
|
||||
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
|
||||
# _fields_to_partial_json
|
||||
"""ACME challenge response."""
|
||||
TYPES = {} # type: dict
|
||||
@@ -303,7 +310,7 @@ class HTTP01Response(KeyAuthorizationChallengeResponse):
|
||||
uri = chall.uri(domain)
|
||||
logger.debug("Verifying %s at %s...", chall.typ, uri)
|
||||
try:
|
||||
http_response = requests.get(uri)
|
||||
http_response = requests.get(uri, verify=False)
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error("Unable to reach %s: %s", uri, error)
|
||||
return False
|
||||
@@ -362,29 +369,163 @@ class HTTP01(KeyAuthorizationChallenge):
|
||||
|
||||
@ChallengeResponse.register
|
||||
class TLSALPN01Response(KeyAuthorizationChallengeResponse):
|
||||
"""ACME TLS-ALPN-01 challenge response.
|
||||
|
||||
This class only allows initiating a TLS-ALPN-01 challenge returned from the
|
||||
CA. Full support for responding to TLS-ALPN-01 challenges by generating and
|
||||
serving the expected response certificate is not currently provided.
|
||||
"""
|
||||
"""ACME tls-alpn-01 challenge response."""
|
||||
typ = "tls-alpn-01"
|
||||
|
||||
PORT = 443
|
||||
"""Verification port as defined by the protocol.
|
||||
|
||||
@Challenge.register
|
||||
You can override it (e.g. for testing) by passing ``port`` to
|
||||
`simple_verify`.
|
||||
|
||||
"""
|
||||
|
||||
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
|
||||
ACME_TLS_1_PROTOCOL = "acme-tls/1"
|
||||
|
||||
@property
|
||||
def h(self):
|
||||
"""Hash value stored in challenge certificate"""
|
||||
return hashlib.sha256(self.key_authorization.encode('utf-8')).digest()
|
||||
|
||||
def gen_cert(self, domain, key=None, bits=2048):
|
||||
"""Generate tls-alpn-01 certificate.
|
||||
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey key: Optional private key used in
|
||||
certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
:param int bits: Number of bits for newly generated key.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
if key is None:
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, bits)
|
||||
|
||||
|
||||
der_value = b"DER:" + codecs.encode(self.h, 'hex')
|
||||
acme_extension = crypto.X509Extension(self.ID_PE_ACME_IDENTIFIER_V1,
|
||||
critical=True, value=der_value)
|
||||
|
||||
return crypto_util.gen_ss_cert(key, [domain], force_san=True,
|
||||
extensions=[acme_extension]), key
|
||||
|
||||
def probe_cert(self, domain, host=None, port=None):
|
||||
"""Probe tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: domain being validated, required.
|
||||
:param string host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
"""
|
||||
if host is None:
|
||||
host = socket.gethostbyname(domain)
|
||||
logger.debug('%s resolved to %s', domain, host)
|
||||
if port is None:
|
||||
port = self.PORT
|
||||
|
||||
return crypto_util.probe_sni(host=host, port=port, name=domain,
|
||||
alpn_protocols=[self.ACME_TLS_1_PROTOCOL])
|
||||
|
||||
def verify_cert(self, domain, cert):
|
||||
"""Verify tls-alpn-01 challenge certificate.
|
||||
|
||||
:param unicode domain: Domain name being validated.
|
||||
:param OpensSSL.crypto.X509 cert: Challenge certificate.
|
||||
|
||||
:returns: Whether the certificate was successfully verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
names = crypto_util._pyopenssl_cert_or_req_all_names(cert)
|
||||
logger.debug('Certificate %s. SANs: %s', cert.digest('sha256'), names)
|
||||
if len(names) != 1 or names[0].lower() != domain.lower():
|
||||
return False
|
||||
|
||||
for i in range(cert.get_extension_count()):
|
||||
ext = cert.get_extension(i)
|
||||
# FIXME: assume this is the ACME extension. Currently there is no
|
||||
# way to get full OID of an unknown extension from pyopenssl.
|
||||
if ext.get_short_name() == b'UNDEF':
|
||||
data = ext.get_data()
|
||||
return data == self.h
|
||||
|
||||
return False
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def simple_verify(self, chall, domain, account_public_key,
|
||||
cert=None, host=None, port=None):
|
||||
"""Simple verify.
|
||||
|
||||
Verify ``validation`` using ``account_public_key``, optionally
|
||||
probe tls-alpn-01 certificate and check using `verify_cert`.
|
||||
|
||||
:param .challenges.TLSALPN01 chall: Corresponding challenge.
|
||||
:param str domain: Domain name being validated.
|
||||
:param JWK account_public_key:
|
||||
:param OpenSSL.crypto.X509 cert: Optional certificate. If not
|
||||
provided (``None``) certificate will be retrieved using
|
||||
`probe_cert`.
|
||||
:param string host: IP address used to probe the certificate.
|
||||
:param int port: Port used to probe the certificate.
|
||||
|
||||
|
||||
:returns: ``True`` if and only if client's control of the domain has been verified.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
if not self.verify(chall, account_public_key):
|
||||
logger.debug("Verification of key authorization in response failed")
|
||||
return False
|
||||
|
||||
if cert is None:
|
||||
try:
|
||||
cert = self.probe_cert(domain=domain, host=host, port=port)
|
||||
except errors.Error as error:
|
||||
logger.debug(str(error), exc_info=True)
|
||||
return False
|
||||
|
||||
return self.verify_cert(domain, cert)
|
||||
|
||||
|
||||
@Challenge.register # pylint: disable=too-many-ancestors
|
||||
class TLSALPN01(KeyAuthorizationChallenge):
|
||||
"""ACME tls-alpn-01 challenge.
|
||||
|
||||
This class simply allows parsing the TLS-ALPN-01 challenge returned from
|
||||
the CA. Full TLS-ALPN-01 support is not currently provided.
|
||||
|
||||
"""
|
||||
typ = "tls-alpn-01"
|
||||
"""ACME tls-alpn-01 challenge."""
|
||||
response_cls = TLSALPN01Response
|
||||
typ = response_cls.typ
|
||||
|
||||
def validation(self, account_key, **kwargs):
|
||||
"""Generate validation for the challenge."""
|
||||
raise NotImplementedError()
|
||||
"""Generate validation.
|
||||
|
||||
:param JWK account_key:
|
||||
:param unicode domain: Domain verified by the challenge.
|
||||
:param OpenSSL.crypto.PKey cert_key: Optional private key used
|
||||
in certificate generation. If not provided (``None``), then
|
||||
fresh key will be generated.
|
||||
|
||||
:rtype: `tuple` of `OpenSSL.crypto.X509` and `OpenSSL.crypto.PKey`
|
||||
|
||||
"""
|
||||
return self.response(account_key).gen_cert(
|
||||
key=kwargs.get('cert_key'),
|
||||
domain=kwargs.get('domain'))
|
||||
|
||||
@staticmethod
|
||||
def is_supported():
|
||||
"""
|
||||
Check if TLS-ALPN-01 challenge is supported on this machine.
|
||||
This implies that a recent version of OpenSSL is installed (>= 1.0.2),
|
||||
or a recent cryptography version shipped with the OpenSSL library is installed.
|
||||
|
||||
:returns: ``True`` if TLS-ALPN-01 is supported on this machine, ``False`` otherwise.
|
||||
:rtype: bool
|
||||
|
||||
"""
|
||||
return (hasattr(SSL.Connection, "set_alpn_protos")
|
||||
and hasattr(SSL.Context, "set_alpn_select_callback"))
|
||||
|
||||
|
||||
@Challenge.register
|
||||
|
||||
@@ -13,18 +13,20 @@ import josepy as jose
|
||||
import OpenSSL
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.utils import parse_header_links
|
||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
||||
import six
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import http_client
|
||||
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
from acme import jws
|
||||
from acme import messages
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Text # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Text
|
||||
from acme.mixins import VersionedLEACMEMixin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -36,7 +38,7 @@ if sys.version_info < (2, 7, 9): # pragma: no cover
|
||||
try:
|
||||
requests.packages.urllib3.contrib.pyopenssl.inject_into_urllib3() # type: ignore
|
||||
except AttributeError:
|
||||
import urllib3.contrib.pyopenssl # pylint: disable=import-error
|
||||
import urllib3.contrib.pyopenssl
|
||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
|
||||
DEFAULT_NETWORK_TIMEOUT = 45
|
||||
@@ -446,7 +448,7 @@ class Client(ClientBase):
|
||||
heapq.heapify(waiting)
|
||||
# mapping between original Authorization Resource and the most
|
||||
# recently updated one
|
||||
updated = dict((authzr, authzr) for authzr in authzrs)
|
||||
updated = {authzr: authzr for authzr in authzrs}
|
||||
|
||||
while waiting:
|
||||
# find the smallest Retry-After, and sleep if necessary
|
||||
@@ -666,7 +668,7 @@ class ClientV2(ClientBase):
|
||||
response = self._post(self.directory['newOrder'], order)
|
||||
body = messages.Order.from_json(response.json())
|
||||
authorizations = []
|
||||
for url in body.authorizations: # pylint: disable=not-an-iterable
|
||||
for url in body.authorizations:
|
||||
authorizations.append(self._authzr_from_response(self._post_as_get(url), uri=url))
|
||||
return messages.OrderResource(
|
||||
body=body,
|
||||
@@ -732,11 +734,13 @@ class ClientV2(ClientBase):
|
||||
raise errors.ValidationError(failed)
|
||||
return orderr.update(authorizations=responses)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
@@ -753,8 +757,13 @@ class ClientV2(ClientBase):
|
||||
if body.error is not None:
|
||||
raise errors.IssuanceError(body.error)
|
||||
if body.certificate is not None:
|
||||
certificate_response = self._post_as_get(body.certificate).text
|
||||
return orderr.update(body=body, fullchain_pem=certificate_response)
|
||||
certificate_response = self._post_as_get(body.certificate)
|
||||
orderr = orderr.update(body=body, fullchain_pem=certificate_response.text)
|
||||
if fetch_alternative_chains:
|
||||
alt_chains_urls = self._get_links(certificate_response, 'alternate')
|
||||
alt_chains = [self._post_as_get(url).text for url in alt_chains_urls]
|
||||
orderr = orderr.update(alternative_fullchains_pem=alt_chains)
|
||||
return orderr
|
||||
raise errors.TimeoutError()
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
@@ -784,6 +793,20 @@ class ClientV2(ClientBase):
|
||||
new_args = args[:1] + (None,) + args[1:]
|
||||
return self._post(*new_args, **kwargs)
|
||||
|
||||
def _get_links(self, response, relation_type):
|
||||
"""
|
||||
Retrieves all Link URIs of relation_type from the response.
|
||||
:param requests.Response response: The requests HTTP response.
|
||||
:param str relation_type: The relation type to filter by.
|
||||
"""
|
||||
# Can't use response.links directly because it drops multiple links
|
||||
# of the same relation type, which is possible in RFC8555 responses.
|
||||
if 'Link' not in response.headers:
|
||||
return []
|
||||
links = parse_header_links(response.headers['Link'])
|
||||
return [l['url'] for l in links
|
||||
if 'rel' in l and 'url' in l and l['rel'] == relation_type]
|
||||
|
||||
|
||||
class BackwardsCompatibleClientV2(object):
|
||||
"""ACME client wrapper that tends towards V2-style calls, but
|
||||
@@ -862,11 +885,13 @@ class BackwardsCompatibleClientV2(object):
|
||||
return messages.OrderResource(authorizations=authorizations, csr_pem=csr_pem)
|
||||
return self.client.new_order(csr_pem)
|
||||
|
||||
def finalize_order(self, orderr, deadline):
|
||||
def finalize_order(self, orderr, deadline, fetch_alternative_chains=False):
|
||||
"""Finalize an order and obtain a certificate.
|
||||
|
||||
:param messages.OrderResource orderr: order to finalize
|
||||
:param datetime.datetime deadline: when to stop polling and timeout
|
||||
:param bool fetch_alternative_chains: whether to also fetch alternative
|
||||
certificate chains
|
||||
|
||||
:returns: finalized order
|
||||
:rtype: messages.OrderResource
|
||||
@@ -897,7 +922,7 @@ class BackwardsCompatibleClientV2(object):
|
||||
chain = crypto_util.dump_pyopenssl_chain(chain).decode()
|
||||
|
||||
return orderr.update(fullchain_pem=(cert + chain))
|
||||
return self.client.finalize_order(orderr, deadline)
|
||||
return self.client.finalize_order(orderr, deadline, fetch_alternative_chains)
|
||||
|
||||
def revoke(self, cert, rsn):
|
||||
"""Revoke certificate.
|
||||
@@ -942,7 +967,7 @@ class ClientNetwork(object):
|
||||
:param messages.RegistrationResource account: Account object. Required if you are
|
||||
planning to use .post() with acme_version=2 for anything other than
|
||||
creating a new account; may be set later after registering.
|
||||
:param josepy.JWASignature alg: Algoritm to use in signing JWS.
|
||||
:param josepy.JWASignature alg: Algorithm to use in signing JWS.
|
||||
:param bool verify_ssl: Whether to verify certificates on SSL connections.
|
||||
:param str user_agent: String to send as User-Agent header.
|
||||
:param float timeout: Timeout for requests.
|
||||
@@ -987,6 +1012,8 @@ class ClientNetwork(object):
|
||||
:rtype: `josepy.JWS`
|
||||
|
||||
"""
|
||||
if isinstance(obj, VersionedLEACMEMixin):
|
||||
obj.le_acme_version = acme_version
|
||||
jobj = obj.json_dumps(indent=2).encode() if obj else b''
|
||||
logger.debug('JWS payload:\n%s', jobj)
|
||||
kwargs = {
|
||||
@@ -1022,6 +1049,9 @@ class ClientNetwork(object):
|
||||
|
||||
"""
|
||||
response_ct = response.headers.get('Content-Type')
|
||||
# Strip parameters from the media-type (rfc2616#section-3.7)
|
||||
if response_ct:
|
||||
response_ct = response_ct.split(';')[0].strip()
|
||||
try:
|
||||
# TODO: response.json() is called twice, once here, and
|
||||
# once in _get and _post clients
|
||||
@@ -1117,8 +1147,8 @@ class ClientNetwork(object):
|
||||
debug_content = response.content.decode("utf-8")
|
||||
logger.debug('Received response:\nHTTP %d\n%s\n\n%s',
|
||||
response.status_code,
|
||||
"\n".join(["{0}: {1}".format(k, v)
|
||||
for k, v in response.headers.items()]),
|
||||
"\n".join("{0}: {1}".format(k, v)
|
||||
for k, v in response.headers.items()),
|
||||
debug_content)
|
||||
return response
|
||||
|
||||
|
||||
@@ -11,10 +11,9 @@ from OpenSSL import crypto
|
||||
from OpenSSL import SSL # type: ignore # https://github.com/python/typeshed/issues/2052
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import Callable # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Optional # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Tuple # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Union # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Callable
|
||||
from acme.magic_typing import Tuple
|
||||
from acme.magic_typing import Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,19 +27,41 @@ logger = logging.getLogger(__name__)
|
||||
_DEFAULT_SSL_METHOD = SSL.SSLv23_METHOD # type: ignore
|
||||
|
||||
|
||||
class SSLSocket(object):
|
||||
class _DefaultCertSelection(object):
|
||||
def __init__(self, certs):
|
||||
self.certs = certs
|
||||
|
||||
def __call__(self, connection):
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
|
||||
class SSLSocket(object): # pylint: disable=too-few-public-methods
|
||||
"""SSL wrapper for sockets.
|
||||
|
||||
:ivar socket sock: Original wrapped socket.
|
||||
:ivar dict certs: Mapping from domain names (`bytes`) to
|
||||
`OpenSSL.crypto.X509`.
|
||||
:ivar method: See `OpenSSL.SSL.Context` for allowed values.
|
||||
:ivar alpn_selection: Hook to select negotiated ALPN protocol for
|
||||
connection.
|
||||
:ivar cert_selection: Hook to select certificate for connection. If given,
|
||||
`certs` parameter would be ignored, and therefore must be empty.
|
||||
|
||||
"""
|
||||
def __init__(self, sock, certs, method=_DEFAULT_SSL_METHOD):
|
||||
def __init__(self, sock, certs=None,
|
||||
method=_DEFAULT_SSL_METHOD, alpn_selection=None,
|
||||
cert_selection=None):
|
||||
self.sock = sock
|
||||
self.certs = certs
|
||||
self.alpn_selection = alpn_selection
|
||||
self.method = method
|
||||
if not cert_selection and not certs:
|
||||
raise ValueError("Neither cert_selection or certs specified.")
|
||||
if cert_selection and certs:
|
||||
raise ValueError("Both cert_selection and certs specified.")
|
||||
if cert_selection is None:
|
||||
cert_selection = _DefaultCertSelection(certs)
|
||||
self.cert_selection = cert_selection
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.sock, name)
|
||||
@@ -57,24 +78,25 @@ class SSLSocket(object):
|
||||
:type connection: :class:`OpenSSL.Connection`
|
||||
|
||||
"""
|
||||
server_name = connection.get_servername()
|
||||
try:
|
||||
key, cert = self.certs[server_name]
|
||||
except KeyError:
|
||||
logger.debug("Server name (%s) not recognized, dropping SSL",
|
||||
server_name)
|
||||
pair = self.cert_selection(connection)
|
||||
if pair is None:
|
||||
logger.debug("Certificate selection for server name %s failed, dropping SSL",
|
||||
connection.get_servername())
|
||||
return
|
||||
key, cert = pair
|
||||
new_context = SSL.Context(self.method)
|
||||
new_context.set_options(SSL.OP_NO_SSLv2)
|
||||
new_context.set_options(SSL.OP_NO_SSLv3)
|
||||
new_context.use_privatekey(key)
|
||||
new_context.use_certificate(cert)
|
||||
if self.alpn_selection is not None:
|
||||
new_context.set_alpn_select_callback(self.alpn_selection)
|
||||
connection.set_context(new_context)
|
||||
|
||||
class FakeConnection(object):
|
||||
"""Fake OpenSSL.SSL.Connection."""
|
||||
|
||||
# pylint: disable=missing-docstring
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
def __init__(self, connection):
|
||||
self._wrapped = connection
|
||||
@@ -86,13 +108,15 @@ class SSLSocket(object):
|
||||
# OpenSSL.SSL.Connection.shutdown doesn't accept any args
|
||||
return self._wrapped.shutdown()
|
||||
|
||||
def accept(self): # pylint: disable=missing-docstring
|
||||
def accept(self): # pylint: disable=missing-function-docstring
|
||||
sock, addr = self.sock.accept()
|
||||
|
||||
context = SSL.Context(self.method)
|
||||
context.set_options(SSL.OP_NO_SSLv2)
|
||||
context.set_options(SSL.OP_NO_SSLv3)
|
||||
context.set_tlsext_servername_callback(self._pick_certificate_cb)
|
||||
if self.alpn_selection is not None:
|
||||
context.set_alpn_select_callback(self.alpn_selection)
|
||||
|
||||
ssl_sock = self.FakeConnection(SSL.Connection(context, sock))
|
||||
ssl_sock.set_accept_state()
|
||||
@@ -108,8 +132,9 @@ class SSLSocket(object):
|
||||
return ssl_sock, addr
|
||||
|
||||
|
||||
def probe_sni(name, host, port=443, timeout=300,
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0)):
|
||||
def probe_sni(name, host, port=443, timeout=300, # pylint: disable=too-many-arguments
|
||||
method=_DEFAULT_SSL_METHOD, source_address=('', 0),
|
||||
alpn_protocols=None):
|
||||
"""Probe SNI server for SSL certificate.
|
||||
|
||||
:param bytes name: Byte string to send as the server name in the
|
||||
@@ -121,6 +146,8 @@ def probe_sni(name, host, port=443, timeout=300,
|
||||
:param tuple source_address: Enables multi-path probing (selection
|
||||
of source interface). See `socket.creation_connection` for more
|
||||
info. Available only in Python 2.7+.
|
||||
:param alpn_protocols: Protocols to request using ALPN.
|
||||
:type alpn_protocols: `list` of `bytes`
|
||||
|
||||
:raises acme.errors.Error: In case of any problems.
|
||||
|
||||
@@ -150,6 +177,8 @@ def probe_sni(name, host, port=443, timeout=300,
|
||||
client_ssl = SSL.Connection(context, client)
|
||||
client_ssl.set_connect_state()
|
||||
client_ssl.set_tlsext_host_name(name) # pyOpenSSL>=0.13
|
||||
if alpn_protocols is not None:
|
||||
client_ssl.set_alpn_protos(alpn_protocols)
|
||||
try:
|
||||
client_ssl.do_handshake()
|
||||
client_ssl.shutdown()
|
||||
@@ -240,12 +269,14 @@ def _pyopenssl_cert_or_req_san(cert_or_req):
|
||||
|
||||
|
||||
def gen_ss_cert(key, domains, not_before=None,
|
||||
validity=(7 * 24 * 60 * 60), force_san=True):
|
||||
validity=(7 * 24 * 60 * 60), force_san=True, extensions=None):
|
||||
"""Generate new self-signed certificate.
|
||||
|
||||
:type domains: `list` of `unicode`
|
||||
:param OpenSSL.crypto.PKey key:
|
||||
:param bool force_san:
|
||||
:param extensions: List of additional extensions to include in the cert.
|
||||
:type extensions: `list` of `OpenSSL.crypto.X509Extension`
|
||||
|
||||
If more than one domain is provided, all of the domains are put into
|
||||
``subjectAltName`` X.509 extension and first domain is set as the
|
||||
@@ -258,10 +289,13 @@ def gen_ss_cert(key, domains, not_before=None,
|
||||
cert.set_serial_number(int(binascii.hexlify(os.urandom(16)), 16))
|
||||
cert.set_version(2)
|
||||
|
||||
extensions = [
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
|
||||
extensions.append(
|
||||
crypto.X509Extension(
|
||||
b"basicConstraints", True, b"CA:TRUE, pathlen:0"),
|
||||
]
|
||||
)
|
||||
|
||||
cert.get_subject().CN = domains[0]
|
||||
# TODO: what to put into cert.get_subject()?
|
||||
@@ -298,7 +332,6 @@ def dump_pyopenssl_chain(chain, filetype=crypto.FILETYPE_PEM):
|
||||
|
||||
def _dump_cert(cert):
|
||||
if isinstance(cert, jose.ComparableX509):
|
||||
# pylint: disable=protected-access
|
||||
cert = cert.wrapped
|
||||
return crypto.dump_certificate(filetype, cert)
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ class Header(jose.Header):
|
||||
url = jose.Field('url', omitempty=True)
|
||||
|
||||
@nonce.decoder
|
||||
def nonce(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
def nonce(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
try:
|
||||
return jose.decode_b64jose(value)
|
||||
except jose.DeserializationError as error:
|
||||
|
||||
@@ -10,8 +10,6 @@ class TypingClass(object):
|
||||
try:
|
||||
# mypy doesn't respect modifying sys.modules
|
||||
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
|
||||
# pylint: disable=unused-import
|
||||
from typing import Collection, IO # type: ignore
|
||||
# pylint: enable=unused-import
|
||||
except ImportError:
|
||||
sys.modules[__name__] = TypingClass()
|
||||
|
||||
@@ -9,9 +9,10 @@ from acme import errors
|
||||
from acme import fields
|
||||
from acme import jws
|
||||
from acme import util
|
||||
from acme.mixins import ResourceMixin
|
||||
|
||||
try:
|
||||
from collections.abc import Hashable # pylint: disable=no-name-in-module
|
||||
from collections.abc import Hashable
|
||||
except ImportError: # pragma: no cover
|
||||
from collections import Hashable
|
||||
|
||||
@@ -36,7 +37,7 @@ ERROR_CODES = {
|
||||
' domain'),
|
||||
'dns': 'There was a problem with a DNS query during identifier validation',
|
||||
'dnssec': 'The server could not validate a DNSSEC signed domain',
|
||||
'incorrectResponse': 'Response recieved didn\'t match the challenge\'s requirements',
|
||||
'incorrectResponse': 'Response received didn\'t match the challenge\'s requirements',
|
||||
# deprecate invalidEmail
|
||||
'invalidEmail': 'The provided email for a registration was invalid',
|
||||
'invalidContact': 'The provided contact URI was invalid',
|
||||
@@ -205,7 +206,7 @@ class Directory(jose.JSONDeSerializable):
|
||||
external_account_required = jose.Field('externalAccountRequired', omitempty=True)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(Directory.Meta, self).__init__(**kwargs)
|
||||
|
||||
@property
|
||||
@@ -245,13 +246,13 @@ class Directory(jose.JSONDeSerializable):
|
||||
try:
|
||||
return self[name.replace('_', '-')]
|
||||
except KeyError as error:
|
||||
raise AttributeError(str(error) + ': ' + name)
|
||||
raise AttributeError(str(error))
|
||||
|
||||
def __getitem__(self, name):
|
||||
try:
|
||||
return self._jobj[self._canon_key(name)]
|
||||
except KeyError:
|
||||
raise KeyError('Directory field not found')
|
||||
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
|
||||
|
||||
def to_partial_json(self):
|
||||
return self._jobj
|
||||
@@ -314,6 +315,9 @@ class Registration(ResourceBody):
|
||||
# on new-reg key server ignores 'key' and populates it based on
|
||||
# JWS.signature.combined.jwk
|
||||
key = jose.Field('key', omitempty=True, decoder=jose.JWK.from_json)
|
||||
# Contact field implements special behavior to allow messages that clear existing
|
||||
# contacts while not expecting the `contact` field when loading from json.
|
||||
# This is implemented in the constructor and *_json methods.
|
||||
contact = jose.Field('contact', omitempty=True, default=())
|
||||
agreement = jose.Field('agreement', omitempty=True)
|
||||
status = jose.Field('status', omitempty=True)
|
||||
@@ -326,24 +330,73 @@ class Registration(ResourceBody):
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, phone=None, email=None, external_account_binding=None, **kwargs):
|
||||
"""Create registration resource from contact details."""
|
||||
"""
|
||||
Create registration resource from contact details.
|
||||
|
||||
The `contact` keyword being passed to a Registration object is meaningful, so
|
||||
this function represents empty iterables in its kwargs by passing on an empty
|
||||
`tuple`.
|
||||
"""
|
||||
|
||||
# Note if `contact` was in kwargs.
|
||||
contact_provided = 'contact' in kwargs
|
||||
|
||||
# Pop `contact` from kwargs and add formatted email or phone numbers
|
||||
details = list(kwargs.pop('contact', ()))
|
||||
if phone is not None:
|
||||
details.append(cls.phone_prefix + phone)
|
||||
if email is not None:
|
||||
details.extend([cls.email_prefix + mail for mail in email.split(',')])
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
# Insert formatted contact information back into kwargs
|
||||
# or insert an empty tuple if `contact` provided.
|
||||
if details or contact_provided:
|
||||
kwargs['contact'] = tuple(details)
|
||||
|
||||
if external_account_binding:
|
||||
kwargs['external_account_binding'] = external_account_binding
|
||||
|
||||
return cls(**kwargs)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Note if the user provides a value for the `contact` member."""
|
||||
if 'contact' in kwargs:
|
||||
# Avoid the __setattr__ used by jose.TypedJSONObjectWithFields
|
||||
object.__setattr__(self, '_add_contact', True)
|
||||
super(Registration, self).__init__(**kwargs)
|
||||
|
||||
def _filter_contact(self, prefix):
|
||||
return tuple(
|
||||
detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable
|
||||
if detail.startswith(prefix))
|
||||
|
||||
def _add_contact_if_appropriate(self, jobj):
|
||||
"""
|
||||
The `contact` member of Registration objects should not be required when
|
||||
de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but
|
||||
it should be included in serializations if it was provided.
|
||||
|
||||
:param jobj: Dictionary containing this Registrations' data
|
||||
:type jobj: dict
|
||||
|
||||
:returns: Dictionary containing Registrations data to transmit to the server
|
||||
:rtype: dict
|
||||
"""
|
||||
if getattr(self, '_add_contact', False):
|
||||
jobj['contact'] = self.encode('contact')
|
||||
|
||||
return jobj
|
||||
|
||||
def to_partial_json(self):
|
||||
"""Modify josepy.JSONDeserializable.to_partial_json()"""
|
||||
jobj = super(Registration, self).to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""Modify josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
jobj = super(Registration, self).fields_to_partial_json()
|
||||
return self._add_contact_if_appropriate(jobj)
|
||||
|
||||
@property
|
||||
def phones(self):
|
||||
"""All phones found in the ``contact`` field."""
|
||||
@@ -356,13 +409,13 @@ class Registration(ResourceBody):
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewRegistration(Registration):
|
||||
class NewRegistration(ResourceMixin, Registration):
|
||||
"""New registration."""
|
||||
resource_type = 'new-reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateRegistration(Registration):
|
||||
class UpdateRegistration(ResourceMixin, Registration):
|
||||
"""Update registration."""
|
||||
resource_type = 'reg'
|
||||
resource = fields.Resource(resource_type)
|
||||
@@ -412,7 +465,7 @@ class ChallengeBody(ResourceBody):
|
||||
omitempty=True, default=None)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs = dict((self._internal_name(k), v) for k, v in kwargs.items())
|
||||
kwargs = {self._internal_name(k): v for k, v in kwargs.items()}
|
||||
super(ChallengeBody, self).__init__(**kwargs)
|
||||
|
||||
def encode(self, name):
|
||||
@@ -460,7 +513,6 @@ class ChallengeResource(Resource):
|
||||
@property
|
||||
def uri(self):
|
||||
"""The URL of the challenge body."""
|
||||
# pylint: disable=function-redefined,no-member
|
||||
return self.body.uri
|
||||
|
||||
|
||||
@@ -488,7 +540,7 @@ class Authorization(ResourceBody):
|
||||
wildcard = jose.Field('wildcard', omitempty=True)
|
||||
|
||||
@challenges.decoder
|
||||
def challenges(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
def challenges(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(ChallengeBody.from_json(chall) for chall in value)
|
||||
|
||||
@property
|
||||
@@ -499,13 +551,13 @@ class Authorization(ResourceBody):
|
||||
|
||||
|
||||
@Directory.register
|
||||
class NewAuthorization(Authorization):
|
||||
class NewAuthorization(ResourceMixin, Authorization):
|
||||
"""New authorization."""
|
||||
resource_type = 'new-authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
|
||||
|
||||
class UpdateAuthorization(Authorization):
|
||||
class UpdateAuthorization(ResourceMixin, Authorization):
|
||||
"""Update authorization."""
|
||||
resource_type = 'authz'
|
||||
resource = fields.Resource(resource_type)
|
||||
@@ -523,7 +575,7 @@ class AuthorizationResource(ResourceWithURI):
|
||||
|
||||
|
||||
@Directory.register
|
||||
class CertificateRequest(jose.JSONObjectWithFields):
|
||||
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
|
||||
"""ACME new-cert request.
|
||||
|
||||
:ivar josepy.util.ComparableX509 csr:
|
||||
@@ -549,7 +601,7 @@ class CertificateResource(ResourceWithURI):
|
||||
|
||||
|
||||
@Directory.register
|
||||
class Revocation(jose.JSONObjectWithFields):
|
||||
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
|
||||
"""Revocation message.
|
||||
|
||||
:ivar .ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
|
||||
@@ -566,9 +618,11 @@ class Revocation(jose.JSONObjectWithFields):
|
||||
class Order(ResourceBody):
|
||||
"""Order Resource Body.
|
||||
|
||||
:ivar list of .Identifier: List of identifiers for the certificate.
|
||||
:ivar identifiers: List of identifiers for the certificate.
|
||||
:vartype identifiers: `list` of `.Identifier`
|
||||
:ivar acme.messages.Status status:
|
||||
:ivar list of str authorizations: URLs of authorizations.
|
||||
:ivar authorizations: URLs of authorizations.
|
||||
:vartype authorizations: `list` of `str`
|
||||
:ivar str certificate: URL to download certificate as a fullchain PEM.
|
||||
:ivar str finalize: URL to POST to to request issuance once all
|
||||
authorizations have "valid" status.
|
||||
@@ -585,7 +639,7 @@ class Order(ResourceBody):
|
||||
error = jose.Field('error', omitempty=True, decoder=Error.from_json)
|
||||
|
||||
@identifiers.decoder
|
||||
def identifiers(value): # pylint: disable=missing-docstring,no-self-argument
|
||||
def identifiers(value): # pylint: disable=no-self-argument,missing-function-docstring
|
||||
return tuple(Identifier.from_json(identifier) for identifier in value)
|
||||
|
||||
class OrderResource(ResourceWithURI):
|
||||
@@ -593,15 +647,20 @@ class OrderResource(ResourceWithURI):
|
||||
|
||||
:ivar acme.messages.Order body:
|
||||
:ivar str csr_pem: The CSR this Order will be finalized with.
|
||||
:ivar list of acme.messages.AuthorizationResource authorizations:
|
||||
Fully-fetched AuthorizationResource objects.
|
||||
:ivar authorizations: Fully-fetched AuthorizationResource objects.
|
||||
:vartype authorizations: `list` of `acme.messages.AuthorizationResource`
|
||||
:ivar str fullchain_pem: The fetched contents of the certificate URL
|
||||
produced once the order was finalized, if it's present.
|
||||
:ivar alternative_fullchains_pem: The fetched contents of alternative certificate
|
||||
chain URLs produced once the order was finalized, if present and requested during
|
||||
finalization.
|
||||
:vartype alternative_fullchains_pem: `list` of `str`
|
||||
"""
|
||||
body = jose.Field('body', decoder=Order.from_json)
|
||||
csr_pem = jose.Field('csr_pem', omitempty=True)
|
||||
authorizations = jose.Field('authorizations')
|
||||
fullchain_pem = jose.Field('fullchain_pem', omitempty=True)
|
||||
alternative_fullchains_pem = jose.Field('alternative_fullchains_pem', omitempty=True)
|
||||
|
||||
@Directory.register
|
||||
class NewOrder(Order):
|
||||
|
||||
65
acme/acme/mixins.py
Normal file
65
acme/acme/mixins.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Useful mixins for Challenge and Resource objects"""
|
||||
|
||||
|
||||
class VersionedLEACMEMixin(object):
|
||||
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
|
||||
@property
|
||||
def le_acme_version(self):
|
||||
"""Define the version of ACME protocol to use"""
|
||||
return getattr(self, '_le_acme_version', 1)
|
||||
|
||||
@le_acme_version.setter
|
||||
def le_acme_version(self, version):
|
||||
# We need to use object.__setattr__ to not depend on the specific implementation of
|
||||
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
|
||||
# for any attempt to set an attribute to make objects immutable).
|
||||
object.__setattr__(self, '_le_acme_version', version)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == 'le_acme_version':
|
||||
# Required for @property to operate properly. See comment above.
|
||||
object.__setattr__(self, key, value)
|
||||
else:
|
||||
super(VersionedLEACMEMixin, self).__setattr__(key, value) # pragma: no cover
|
||||
|
||||
|
||||
class ResourceMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin generates a RFC8555 compliant JWS payload
|
||||
by removing the `resource` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'to_partial_json', 'resource')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(ResourceMixin, self),
|
||||
'fields_to_partial_json', 'resource')
|
||||
|
||||
|
||||
class TypeMixin(VersionedLEACMEMixin):
|
||||
"""
|
||||
This mixin allows generation of a RFC8555 compliant JWS payload
|
||||
by removing the `type` field if needed (eg. ACME v2 protocol).
|
||||
"""
|
||||
def to_partial_json(self):
|
||||
"""See josepy.JSONDeserializable.to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'to_partial_json', 'type')
|
||||
|
||||
def fields_to_partial_json(self):
|
||||
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
|
||||
return _safe_jobj_compliance(super(TypeMixin, self),
|
||||
'fields_to_partial_json', 'type')
|
||||
|
||||
|
||||
def _safe_jobj_compliance(instance, jobj_method, uncompliant_field):
|
||||
if hasattr(instance, jobj_method):
|
||||
jobj = getattr(instance, jobj_method)()
|
||||
if instance.le_acme_version == 2:
|
||||
jobj.pop(uncompliant_field, None)
|
||||
return jobj
|
||||
|
||||
raise AttributeError('Method {0}() is not implemented.'.format(jobj_method)) # pragma: no cover
|
||||
@@ -5,19 +5,16 @@ import logging
|
||||
import socket
|
||||
import threading
|
||||
|
||||
from six.moves import BaseHTTPServer # type: ignore # pylint: disable=import-error
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
from six.moves import BaseHTTPServer # type: ignore
|
||||
from six.moves import http_client
|
||||
from six.moves import socketserver # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
from acme import crypto_util
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# six.moves.* | pylint: disable=no-member,attribute-defined-outside-init
|
||||
# pylint: disable=no-init
|
||||
|
||||
|
||||
class TLSServer(socketserver.TCPServer):
|
||||
"""Generic TLS Server."""
|
||||
@@ -30,16 +27,22 @@ class TLSServer(socketserver.TCPServer):
|
||||
self.address_family = socket.AF_INET
|
||||
self.certs = kwargs.pop("certs", {})
|
||||
self.method = kwargs.pop(
|
||||
# pylint: disable=protected-access
|
||||
"method", crypto_util._DEFAULT_SSL_METHOD)
|
||||
self.allow_reuse_address = kwargs.pop("allow_reuse_address", True)
|
||||
socketserver.TCPServer.__init__(self, *args, **kwargs)
|
||||
|
||||
def _wrap_sock(self):
|
||||
self.socket = crypto_util.SSLSocket(
|
||||
self.socket, certs=self.certs, method=self.method)
|
||||
self.socket, cert_selection=self._cert_selection,
|
||||
alpn_selection=getattr(self, '_alpn_selection', None),
|
||||
method=self.method)
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
def _cert_selection(self, connection): # pragma: no cover
|
||||
"""Callback selecting certificate for connection."""
|
||||
server_name = connection.get_servername()
|
||||
return self.certs.get(server_name, None)
|
||||
|
||||
def server_bind(self):
|
||||
self._wrap_sock()
|
||||
return socketserver.TCPServer.server_bind(self)
|
||||
|
||||
@@ -124,6 +127,40 @@ class BaseDualNetworkedServers(object):
|
||||
self.threads = []
|
||||
|
||||
|
||||
class TLSALPN01Server(TLSServer, ACMEServerMixin):
|
||||
"""TLSALPN01 Server."""
|
||||
|
||||
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
|
||||
|
||||
def __init__(self, server_address, certs, challenge_certs, ipv6=False):
|
||||
TLSServer.__init__(
|
||||
self, server_address, _BaseRequestHandlerWithLogging, certs=certs,
|
||||
ipv6=ipv6)
|
||||
self.challenge_certs = challenge_certs
|
||||
|
||||
def _cert_selection(self, connection):
|
||||
# TODO: We would like to serve challenge cert only if asked for it via
|
||||
# ALPN. To do this, we need to retrieve the list of protos from client
|
||||
# hello, but this is currently impossible with openssl [0], and ALPN
|
||||
# negotiation is done after cert selection.
|
||||
# Therefore, currently we always return challenge cert, and terminate
|
||||
# handshake in alpn_selection() if ALPN protos are not what we expect.
|
||||
# [0] https://github.com/openssl/openssl/issues/4952
|
||||
server_name = connection.get_servername()
|
||||
logger.debug("Serving challenge cert for server name %s", server_name)
|
||||
return self.challenge_certs.get(server_name, None)
|
||||
|
||||
def _alpn_selection(self, _connection, alpn_protos):
|
||||
"""Callback to select alpn protocol."""
|
||||
if len(alpn_protos) == 1 and alpn_protos[0] == self.ACME_TLS_1_PROTOCOL:
|
||||
logger.debug("Agreed on %s ALPN", self.ACME_TLS_1_PROTOCOL)
|
||||
return self.ACME_TLS_1_PROTOCOL
|
||||
logger.debug("Cannot agree on ALPN proto. Got: %s", str(alpn_protos))
|
||||
# Explicitly close the connection now, by returning an empty string.
|
||||
# See https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_alpn_select_callback # pylint: disable=line-too-long
|
||||
return b""
|
||||
|
||||
|
||||
class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
"""Generic HTTP Server."""
|
||||
|
||||
@@ -139,10 +176,10 @@ class HTTPServer(BaseHTTPServer.HTTPServer):
|
||||
class HTTP01Server(HTTPServer, ACMEServerMixin):
|
||||
"""HTTP01 Server."""
|
||||
|
||||
def __init__(self, server_address, resources, ipv6=False):
|
||||
def __init__(self, server_address, resources, ipv6=False, timeout=30):
|
||||
HTTPServer.__init__(
|
||||
self, server_address, HTTP01RequestHandler.partial_init(
|
||||
simple_http_resources=resources), ipv6=ipv6)
|
||||
simple_http_resources=resources, timeout=timeout), ipv6=ipv6)
|
||||
|
||||
|
||||
class HTTP01DualNetworkedServers(BaseDualNetworkedServers):
|
||||
@@ -167,6 +204,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.simple_http_resources = kwargs.pop("simple_http_resources", set())
|
||||
self.timeout = kwargs.pop('timeout', 30)
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
@@ -178,7 +216,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.log_message("Incoming request")
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.handle(self)
|
||||
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-docstring
|
||||
def do_GET(self): # pylint: disable=invalid-name,missing-function-docstring
|
||||
if self.path == "/":
|
||||
self.handle_index()
|
||||
elif self.path.startswith("/" + challenges.HTTP01.URI_ROOT_PATH):
|
||||
@@ -216,7 +254,7 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
self.path)
|
||||
|
||||
@classmethod
|
||||
def partial_init(cls, simple_http_resources):
|
||||
def partial_init(cls, simple_http_resources, timeout):
|
||||
"""Partially initialize this handler.
|
||||
|
||||
This is useful because `socketserver.BaseServer` takes
|
||||
@@ -225,4 +263,18 @@ class HTTP01RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
"""
|
||||
return functools.partial(
|
||||
cls, simple_http_resources=simple_http_resources)
|
||||
cls, simple_http_resources=simple_http_resources,
|
||||
timeout=timeout)
|
||||
|
||||
|
||||
class _BaseRequestHandlerWithLogging(socketserver.BaseRequestHandler):
|
||||
"""BaseRequestHandler with logging."""
|
||||
|
||||
def log_message(self, format, *args): # pylint: disable=redefined-builtin
|
||||
"""Log arbitrary message."""
|
||||
logger.debug("%s - - %s", self.client_address[0], format % args)
|
||||
|
||||
def handle(self):
|
||||
"""Handle request."""
|
||||
self.log_message("Incoming request")
|
||||
socketserver.BaseRequestHandler.handle(self)
|
||||
|
||||
@@ -4,4 +4,4 @@ import six
|
||||
|
||||
def map_keys(dikt, func):
|
||||
"""Map dictionary keys."""
|
||||
return dict((func(key), value) for key, value in six.iteritems(dikt))
|
||||
return {func(key): value for key, value in six.iteritems(dikt)}
|
||||
|
||||
@@ -9,7 +9,7 @@ BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
@@ -41,7 +40,7 @@ extensions = [
|
||||
]
|
||||
|
||||
autodoc_member_order = 'bysource'
|
||||
autodoc_default_flags = ['show-inheritance', 'private-members']
|
||||
autodoc_default_flags = ['show-inheritance']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
@@ -113,7 +112,7 @@ pygments_style = 'sphinx'
|
||||
#keep_warnings = False
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = True
|
||||
todo_include_todos = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
@@ -121,7 +120,7 @@ todo_include_todos = True
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
|
||||
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
|
||||
# on_rtd is whether we are on readthedocs.org
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
|
||||
@@ -65,7 +65,7 @@ if errorlevel 9009 (
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
echo.https://www.sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.1.0.dev0'
|
||||
version = '1.10.0.dev0'
|
||||
|
||||
# Please update tox.ini when modifying dependency version requirements
|
||||
install_requires = [
|
||||
@@ -15,9 +16,8 @@ install_requires = [
|
||||
# 1.1.0+ is required to avoid the warnings described at
|
||||
# https://github.com/certbot/josepy/issues/13.
|
||||
'josepy>=1.1.0',
|
||||
'mock',
|
||||
# Connection.set_tlsext_host_name (>=0.13)
|
||||
'PyOpenSSL>=0.13.1',
|
||||
# Connection.set_tlsext_host_name (>=0.13) + matching Xenial requirements (>=0.15.1)
|
||||
'PyOpenSSL>=0.15.1',
|
||||
'pyrfc3339',
|
||||
'pytz',
|
||||
'requests[security]>=2.6.0', # security extras added in 2.4.1
|
||||
@@ -26,6 +26,15 @@ install_requires = [
|
||||
'six>=1.9.0', # needed for python_2_unicode_compatible
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
dev_extras = [
|
||||
'pytest',
|
||||
'pytest-xdist',
|
||||
@@ -37,22 +46,6 @@ docs_extras = [
|
||||
'sphinx_rtd_theme',
|
||||
]
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
setup(
|
||||
name='acme',
|
||||
version=version,
|
||||
@@ -61,7 +54,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
@@ -70,11 +63,10 @@ setup(
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
],
|
||||
@@ -86,7 +78,4 @@ setup(
|
||||
'dev': dev_extras,
|
||||
'docs': docs_extras,
|
||||
},
|
||||
test_suite='acme',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
@@ -2,10 +2,16 @@
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
import OpenSSL
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from acme import errors
|
||||
|
||||
import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.pem')
|
||||
@@ -181,7 +187,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
mock_get.return_value = mock.MagicMock(text=validation)
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_bad_validation(self, mock_get):
|
||||
@@ -197,7 +203,7 @@ class HTTP01ResponseTest(unittest.TestCase):
|
||||
HTTP01Response.WHITESPACE_CUTSET))
|
||||
self.assertTrue(self.response.simple_verify(
|
||||
self.chall, "local", KEY.public_key()))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"))
|
||||
mock_get.assert_called_once_with(self.chall.uri("local"), verify=False)
|
||||
|
||||
@mock.patch("acme.challenges.requests.get")
|
||||
def test_simple_verify_connection_error(self, mock_get):
|
||||
@@ -256,30 +262,87 @@ class HTTP01Test(unittest.TestCase):
|
||||
class TLSALPN01ResponseTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.msg = TLSALPN01Response(key_authorization=u'foo')
|
||||
from acme.challenges import TLSALPN01
|
||||
self.chall = TLSALPN01(
|
||||
token=jose.b64decode(b'a82d5ff8ef740d12881f6d3c2277ab2e'))
|
||||
self.domain = u'example.com'
|
||||
self.domain2 = u'example2.com'
|
||||
|
||||
self.response = self.chall.response(KEY)
|
||||
self.jmsg = {
|
||||
'resource': 'challenge',
|
||||
'type': 'tls-alpn-01',
|
||||
'keyAuthorization': u'foo',
|
||||
'keyAuthorization': self.response.key_authorization,
|
||||
}
|
||||
|
||||
from acme.challenges import TLSALPN01
|
||||
self.chall = TLSALPN01(token=(b'x' * 16))
|
||||
self.response = self.chall.response(KEY)
|
||||
|
||||
def test_to_partial_json(self):
|
||||
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
|
||||
self.msg.to_partial_json())
|
||||
self.response.to_partial_json())
|
||||
|
||||
def test_from_json(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
self.assertEqual(self.msg, TLSALPN01Response.from_json(self.jmsg))
|
||||
self.assertEqual(self.response, TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
def test_from_json_hashable(self):
|
||||
from acme.challenges import TLSALPN01Response
|
||||
hash(TLSALPN01Response.from_json(self.jmsg))
|
||||
|
||||
def test_gen_verify_cert(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_gen_verify_cert_gen_key(self):
|
||||
cert, key = self.response.gen_cert(self.domain)
|
||||
self.assertTrue(isinstance(key, OpenSSL.crypto.PKey))
|
||||
self.assertTrue(self.response.verify_cert(self.domain, cert))
|
||||
|
||||
def test_verify_bad_cert(self):
|
||||
self.assertFalse(self.response.verify_cert(self.domain,
|
||||
test_util.load_cert('cert.pem')))
|
||||
|
||||
def test_verify_bad_domain(self):
|
||||
key1 = test_util.load_pyopenssl_private_key('rsa512_key.pem')
|
||||
cert, key2 = self.response.gen_cert(self.domain, key1)
|
||||
self.assertEqual(key1, key2)
|
||||
self.assertFalse(self.response.verify_cert(self.domain2, cert))
|
||||
|
||||
def test_simple_verify_bad_key_authorization(self):
|
||||
key2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
|
||||
self.response.simple_verify(self.chall, "local", key2.public_key())
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.verify_cert', autospec=True)
|
||||
def test_simple_verify(self, mock_verify_cert):
|
||||
mock_verify_cert.return_value = mock.sentinel.verification
|
||||
self.assertEqual(
|
||||
mock.sentinel.verification, self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key(),
|
||||
cert=mock.sentinel.cert))
|
||||
mock_verify_cert.assert_called_once_with(
|
||||
self.response, self.domain, mock.sentinel.cert)
|
||||
|
||||
@mock.patch('acme.challenges.socket.gethostbyname')
|
||||
@mock.patch('acme.challenges.crypto_util.probe_sni')
|
||||
def test_probe_cert(self, mock_probe_sni, mock_gethostbyname):
|
||||
mock_gethostbyname.return_value = '127.0.0.1'
|
||||
self.response.probe_cert('foo.com')
|
||||
mock_gethostbyname.assert_called_once_with('foo.com')
|
||||
mock_probe_sni.assert_called_once_with(
|
||||
host='127.0.0.1', port=self.response.PORT, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
self.response.probe_cert('foo.com', host='8.8.8.8')
|
||||
mock_probe_sni.assert_called_with(
|
||||
host='8.8.8.8', port=mock.ANY, name='foo.com',
|
||||
alpn_protocols=['acme-tls/1'])
|
||||
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
|
||||
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
|
||||
mock_probe_cert.side_effect = errors.Error
|
||||
self.assertFalse(self.response.simple_verify(
|
||||
self.chall, self.domain, KEY.public_key()))
|
||||
|
||||
|
||||
class TLSALPN01Test(unittest.TestCase):
|
||||
|
||||
@@ -309,8 +372,13 @@ class TLSALPN01Test(unittest.TestCase):
|
||||
self.assertRaises(
|
||||
jose.DeserializationError, TLSALPN01.from_json, self.jmsg)
|
||||
|
||||
def test_validation(self):
|
||||
self.assertRaises(NotImplementedError, self.msg.validation, KEY)
|
||||
@mock.patch('acme.challenges.TLSALPN01Response.gen_cert')
|
||||
def test_validation(self, mock_gen_cert):
|
||||
mock_gen_cert.return_value = ('cert', 'key')
|
||||
self.assertEqual(('cert', 'key'), self.msg.validation(
|
||||
KEY, cert_key=mock.sentinel.cert_key, domain=mock.sentinel.domain))
|
||||
mock_gen_cert.assert_called_once_with(key=mock.sentinel.cert_key,
|
||||
domain=mock.sentinel.domain)
|
||||
|
||||
|
||||
class DNSTest(unittest.TestCase):
|
||||
@@ -413,5 +481,18 @@ class DNSResponseTest(unittest.TestCase):
|
||||
self.msg.check_validation(self.chall, KEY.public_key()))
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||
def test_challenge_payload(self):
|
||||
from acme.challenges import HTTP01Response
|
||||
|
||||
challenge_body = HTTP01Response()
|
||||
challenge_body.le_acme_version = 2
|
||||
|
||||
jobj = challenge_body.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that challenge responses must have an empty payload.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -6,7 +6,10 @@ import json
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import OpenSSL
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
@@ -15,7 +18,7 @@ from acme import challenges
|
||||
from acme import errors
|
||||
from acme import jws as acme_jws
|
||||
from acme import messages
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.mixins import VersionedLEACMEMixin
|
||||
import messages_test
|
||||
import test_util
|
||||
|
||||
@@ -260,7 +263,7 @@ class BackwardsCompatibleClientV2Test(ClientTestBase):
|
||||
with mock.patch('acme.client.ClientV2') as mock_client:
|
||||
client = self._init()
|
||||
client.finalize_order(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline)
|
||||
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
|
||||
|
||||
def test_revoke(self):
|
||||
self.response.json.return_value = DIRECTORY_V1.to_json()
|
||||
@@ -839,6 +842,32 @@ class ClientV2Test(ClientTestBase):
|
||||
deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
|
||||
self.assertRaises(errors.TimeoutError, self.client.finalize_order, self.orderr, deadline)
|
||||
|
||||
def test_finalize_order_alt_chains(self):
|
||||
updated_order = self.order.update(
|
||||
certificate='https://www.letsencrypt-demo.org/acme/cert/',
|
||||
)
|
||||
updated_orderr = self.orderr.update(body=updated_order,
|
||||
fullchain_pem=CERT_SAN_PEM,
|
||||
alternative_fullchains_pem=[CERT_SAN_PEM,
|
||||
CERT_SAN_PEM])
|
||||
self.response.json.return_value = updated_order.to_json()
|
||||
self.response.text = CERT_SAN_PEM
|
||||
self.response.headers['Link'] ='<https://example.com/acme/cert/1>;rel="alternate", ' + \
|
||||
'<https://example.com/dir>;rel="index", ' + \
|
||||
'<https://example.com/acme/cert/2>;title="foo";rel="alternate"'
|
||||
|
||||
deadline = datetime.datetime(9999, 9, 9)
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/1',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.net.post.assert_any_call('https://example.com/acme/cert/2',
|
||||
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
|
||||
self.assertEqual(resp, updated_orderr)
|
||||
|
||||
del self.response.headers['Link']
|
||||
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
|
||||
self.assertEqual(resp, updated_orderr.update(alternative_fullchains_pem=[]))
|
||||
|
||||
def test_revoke(self):
|
||||
self.client.revoke(messages_test.CERT, self.rsn)
|
||||
self.net.post.assert_called_once_with(
|
||||
@@ -886,7 +915,7 @@ class ClientV2Test(ClientTestBase):
|
||||
self.client.net.get.assert_not_called()
|
||||
|
||||
|
||||
class MockJSONDeSerializable(jose.JSONDeSerializable):
|
||||
class MockJSONDeSerializable(VersionedLEACMEMixin, jose.JSONDeSerializable):
|
||||
# pylint: disable=missing-docstring
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
@@ -980,6 +1009,35 @@ class ClientNetworkTest(unittest.TestCase):
|
||||
self.assertEqual(
|
||||
self.response, self.net._check_response(self.response))
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_ct_with_charset(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.response, self.net._check_response(
|
||||
self.response, content_type='application/json'))
|
||||
try:
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'application/json; charset=utf-8'
|
||||
)
|
||||
except AssertionError:
|
||||
return
|
||||
raise AssertionError('Expected Content-Type warning ' #pragma: no cover
|
||||
'to not have been logged')
|
||||
|
||||
@mock.patch('acme.client.logger')
|
||||
def test_check_response_ok_bad_ct(self, mock_logger):
|
||||
self.response.json.return_value = {}
|
||||
self.response.headers['Content-Type'] = 'text/plain'
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(self.response, self.net._check_response(
|
||||
self.response, content_type='application/json'))
|
||||
mock_logger.debug.assert_called_with(
|
||||
'Ignoring wrong Content-Type (%r) for JSON decodable response',
|
||||
'text/plain'
|
||||
)
|
||||
|
||||
def test_check_response_conflict(self):
|
||||
self.response.ok = False
|
||||
self.response.status_code = 409
|
||||
@@ -1284,7 +1342,7 @@ class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
|
||||
# test should fail if the default adapter type is changed by requests
|
||||
net = ClientNetwork(key=None, alg=None)
|
||||
session = requests.Session()
|
||||
for scheme in session.adapters.keys():
|
||||
for scheme in session.adapters:
|
||||
client_network_adapter = net.session.adapters.get(scheme)
|
||||
default_adapter = session.adapters.get(scheme)
|
||||
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
|
||||
|
||||
@@ -11,14 +11,12 @@ import six
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import errors
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
import test_util
|
||||
|
||||
|
||||
class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.cert = test_util.load_comparable_cert('rsa2048_cert.pem')
|
||||
key = test_util.load_pyopenssl_private_key('rsa2048_key.pem')
|
||||
@@ -32,7 +30,8 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
|
||||
|
||||
def server_bind(self): # pylint: disable=missing-docstring
|
||||
self.socket = SSLSocket(socket.socket(), certs=certs)
|
||||
self.socket = SSLSocket(socket.socket(),
|
||||
certs)
|
||||
socketserver.TCPServer.server_bind(self)
|
||||
|
||||
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
|
||||
@@ -73,6 +72,18 @@ class SSLSocketAndProbeSNITest(unittest.TestCase):
|
||||
socket.setdefaulttimeout(original_timeout)
|
||||
|
||||
|
||||
class SSLSocketTest(unittest.TestCase):
|
||||
"""Tests for acme.crypto_util.SSLSocket."""
|
||||
|
||||
def test_ssl_socket_invalid_arguments(self):
|
||||
from acme.crypto_util import SSLSocket
|
||||
with self.assertRaises(ValueError):
|
||||
_ = SSLSocket(None, {'sni': ('key', 'cert')},
|
||||
cert_selection=lambda _: None)
|
||||
with self.assertRaises(ValueError):
|
||||
_ = SSLSocket(None)
|
||||
|
||||
|
||||
class PyOpenSSLCertOrReqAllNamesTest(unittest.TestCase):
|
||||
"""Test for acme.crypto_util._pyopenssl_cert_or_req_all_names."""
|
||||
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Tests for acme.errors."""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class BadNonceTest(unittest.TestCase):
|
||||
@@ -35,7 +38,7 @@ class PollErrorTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
from acme.errors import PollError
|
||||
self.timeout = PollError(
|
||||
exhausted=set([mock.sentinel.AR]),
|
||||
exhausted={mock.sentinel.AR},
|
||||
updated={})
|
||||
self.invalid = PollError(exhausted=set(), updated={
|
||||
mock.sentinel.AR: mock.sentinel.AR2})
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
|
||||
class MagicTypingTest(unittest.TestCase):
|
||||
@@ -18,7 +21,7 @@ class MagicTypingTest(unittest.TestCase):
|
||||
sys.modules['typing'] = typing_class_mock
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
||||
from acme.magic_typing import Text
|
||||
self.assertEqual(Text, text_mock)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
@@ -31,7 +34,7 @@ class MagicTypingTest(unittest.TestCase):
|
||||
sys.modules['typing'] = None
|
||||
if 'acme.magic_typing' in sys.modules:
|
||||
del sys.modules['acme.magic_typing'] # pragma: no cover
|
||||
from acme.magic_typing import Text # pylint: disable=no-name-in-module
|
||||
from acme.magic_typing import Text
|
||||
self.assertTrue(Text is None)
|
||||
del sys.modules['acme.magic_typing']
|
||||
sys.modules['typing'] = temp_typing
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
import test_util
|
||||
|
||||
CERT = test_util.load_comparable_cert('cert.der')
|
||||
@@ -252,6 +254,19 @@ class RegistrationTest(unittest.TestCase):
|
||||
from acme.messages import Registration
|
||||
hash(Registration.from_json(self.jobj_from))
|
||||
|
||||
def test_default_not_transmitted(self):
|
||||
from acme.messages import NewRegistration
|
||||
empty_new_reg = NewRegistration()
|
||||
new_reg_with_contact = NewRegistration(contact=())
|
||||
|
||||
self.assertEqual(empty_new_reg.contact, ())
|
||||
self.assertEqual(new_reg_with_contact.contact, ())
|
||||
|
||||
self.assertTrue('contact' not in empty_new_reg.to_partial_json())
|
||||
self.assertTrue('contact' not in empty_new_reg.fields_to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.to_partial_json())
|
||||
self.assertTrue('contact' in new_reg_with_contact.fields_to_partial_json())
|
||||
|
||||
|
||||
class UpdateRegistrationTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.UpdateRegistration."""
|
||||
@@ -453,6 +468,7 @@ class OrderResourceTest(unittest.TestCase):
|
||||
'authorizations': None,
|
||||
})
|
||||
|
||||
|
||||
class NewOrderTest(unittest.TestCase):
|
||||
"""Tests for acme.messages.NewOrder."""
|
||||
|
||||
@@ -467,5 +483,18 @@ class NewOrderTest(unittest.TestCase):
|
||||
})
|
||||
|
||||
|
||||
class JWSPayloadRFC8555Compliant(unittest.TestCase):
|
||||
"""Test for RFC8555 compliance of JWS generated from resources/challenges"""
|
||||
def test_message_payload(self):
|
||||
from acme.messages import NewAuthorization
|
||||
|
||||
new_order = NewAuthorization()
|
||||
new_order.le_acme_version = 2
|
||||
|
||||
jobj = new_order.json_dumps(indent=2).encode()
|
||||
# RFC8555 states that JWS bodies must not have a resource field.
|
||||
self.assertEqual(jobj, b'{}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -4,13 +4,18 @@ import threading
|
||||
import unittest
|
||||
|
||||
import josepy as jose
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import requests
|
||||
from six.moves import http_client # pylint: disable=import-error
|
||||
from six.moves import socketserver # type: ignore # pylint: disable=import-error
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme import crypto_util
|
||||
from acme import errors
|
||||
|
||||
import test_util
|
||||
|
||||
|
||||
@@ -83,6 +88,81 @@ class HTTP01ServerTest(unittest.TestCase):
|
||||
def test_http01_not_found(self):
|
||||
self.assertFalse(self._test_http01(add=False))
|
||||
|
||||
def test_timely_shutdown(self):
|
||||
from acme.standalone import HTTP01Server
|
||||
server = HTTP01Server(('', 0), resources=set(), timeout=0.05)
|
||||
server_thread = threading.Thread(target=server.serve_forever)
|
||||
server_thread.start()
|
||||
|
||||
client = socket.socket()
|
||||
client.connect(('localhost', server.socket.getsockname()[1]))
|
||||
|
||||
stop_thread = threading.Thread(target=server.shutdown)
|
||||
stop_thread.start()
|
||||
server_thread.join(5.)
|
||||
|
||||
is_hung = server_thread.is_alive()
|
||||
try:
|
||||
client.shutdown(socket.SHUT_RDWR)
|
||||
except: # pragma: no cover, pylint: disable=bare-except
|
||||
# may raise error because socket could already be closed
|
||||
pass
|
||||
|
||||
self.assertFalse(is_hung, msg='Server shutdown should not be hung')
|
||||
|
||||
|
||||
@unittest.skipIf(not challenges.TLSALPN01.is_supported(), "pyOpenSSL too old")
|
||||
class TLSALPN01ServerTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.TLSALPN01Server."""
|
||||
|
||||
def setUp(self):
|
||||
self.certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa2048_key.pem'),
|
||||
test_util.load_cert('rsa2048_cert.pem'),
|
||||
)}
|
||||
# Use different certificate for challenge.
|
||||
self.challenge_certs = {b'localhost': (
|
||||
test_util.load_pyopenssl_private_key('rsa4096_key.pem'),
|
||||
test_util.load_cert('rsa4096_cert.pem'),
|
||||
)}
|
||||
from acme.standalone import TLSALPN01Server
|
||||
self.server = TLSALPN01Server(("localhost", 0), certs=self.certs,
|
||||
challenge_certs=self.challenge_certs)
|
||||
# pylint: disable=no-member
|
||||
self.thread = threading.Thread(target=self.server.serve_forever)
|
||||
self.thread.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.server.shutdown() # pylint: disable=no-member
|
||||
self.thread.join()
|
||||
|
||||
# TODO: This is not implemented yet, see comments in standalone.py
|
||||
# def test_certs(self):
|
||||
# host, port = self.server.socket.getsockname()[:2]
|
||||
# cert = crypto_util.probe_sni(
|
||||
# b'localhost', host=host, port=port, timeout=1)
|
||||
# # Expect normal cert when connecting without ALPN.
|
||||
# self.assertEqual(jose.ComparableX509(cert),
|
||||
# jose.ComparableX509(self.certs[b'localhost'][1]))
|
||||
|
||||
def test_challenge_certs(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
cert = crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"acme-tls/1"])
|
||||
# Expect challenge cert when connecting with ALPN.
|
||||
self.assertEqual(
|
||||
jose.ComparableX509(cert),
|
||||
jose.ComparableX509(self.challenge_certs[b'localhost'][1])
|
||||
)
|
||||
|
||||
def test_bad_alpn(self):
|
||||
host, port = self.server.socket.getsockname()[:2]
|
||||
with self.assertRaises(errors.Error):
|
||||
crypto_util.probe_sni(
|
||||
b'localhost', host=host, port=port, timeout=1,
|
||||
alpn_protocols=[b"bad-alpn"])
|
||||
|
||||
|
||||
class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
"""Test for acme.standalone.BaseDualNetworkedServers."""
|
||||
@@ -138,7 +218,6 @@ class BaseDualNetworkedServersTest(unittest.TestCase):
|
||||
class HTTP01DualNetworkedServersTest(unittest.TestCase):
|
||||
"""Tests for acme.standalone.HTTP01DualNetworkedServers."""
|
||||
|
||||
|
||||
def setUp(self):
|
||||
self.account_key = jose.JWK.load(
|
||||
test_util.load_vector('rsa1024_key.pem'))
|
||||
|
||||
8
acme/tests/testdata/README
vendored
8
acme/tests/testdata/README
vendored
@@ -4,12 +4,14 @@ to use appropriate extension for vector filenames: .pem for PEM and
|
||||
|
||||
The following command has been used to generate test keys:
|
||||
|
||||
for x in 256 512 1024 2048; do openssl genrsa -out rsa${k}_key.pem $k; done
|
||||
for k in 256 512 1024 2048 4096; do openssl genrsa -out rsa${k}_key.pem $k; done
|
||||
|
||||
and for the CSR:
|
||||
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -outform DER > csr.der
|
||||
|
||||
and for the certificate:
|
||||
and for the certificates:
|
||||
|
||||
openssl req -key rsa2047_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 -outform DER > cert.der
|
||||
openssl req -key rsa2048_key.pem -new -subj '/CN=example.com' -x509 > rsa2048_cert.pem
|
||||
openssl req -key rsa1024_key.pem -new -subj '/CN=example.com' -x509 > rsa1024_cert.pem
|
||||
|
||||
13
acme/tests/testdata/rsa1024_cert.pem
vendored
Normal file
13
acme/tests/testdata/rsa1024_cert.pem
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIB/TCCAWagAwIBAgIJAOyRIBs3QT8QMA0GCSqGSIb3DQEBCwUAMBYxFDASBgNV
|
||||
BAMMC2V4YW1wbGUuY29tMB4XDTE4MDQyMzEwMzE0NFoXDTE4MDUyMzEwMzE0NFow
|
||||
FjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJ
|
||||
AoGBAJqJ87R8aVwByONxgQA9hwgvQd/QqI1r1UInXhEF2VnEtZGtUWLi100IpIqr
|
||||
Mq4qusDwNZ3g8cUPtSkvJGs89djoajMDIJP7lQUEKUYnYrI0q755Tr/DgLWSk7iW
|
||||
l5ezym0VzWUD0/xXUz8yRbNMTjTac80rS5SZk2ja2wWkYlRJAgMBAAGjUzBRMB0G
|
||||
A1UdDgQWBBSsaX0IVZ4XXwdeffVAbG7gnxSYjTAfBgNVHSMEGDAWgBSsaX0IVZ4X
|
||||
XwdeffVAbG7gnxSYjTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4GB
|
||||
ADe7SVmvGH2nkwVfONk8TauRUDkePN1CJZKFb2zW1uO9ANJ2v5Arm/OQp0BG/xnI
|
||||
Djw/aLTNVESF89oe15dkrUErtcaF413MC1Ld5lTCaJLHLGqDKY69e02YwRuxW7jY
|
||||
qarpt7k7aR5FbcfO5r4V/FK/Gvp4Dmoky8uap7SJIW6x
|
||||
-----END CERTIFICATE-----
|
||||
30
acme/tests/testdata/rsa4096_cert.pem
vendored
Normal file
30
acme/tests/testdata/rsa4096_cert.pem
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFDTCCAvWgAwIBAgIUImqDrP53V69vFROsjP/gL0YtoA4wDQYJKoZIhvcNAQEL
|
||||
BQAwFjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wHhcNMjAwNTI3MjMyNDE0WhcNMjAw
|
||||
NjI2MjMyNDE0WjAWMRQwEgYDVQQDDAtleGFtcGxlLmNvbTCCAiIwDQYJKoZIhvcN
|
||||
AQEBBQADggIPADCCAgoCggIBANY9LKLk9Dxn0MUMQFHwBoTN4ehDSWBws2KcytpF
|
||||
mc8m9Mfk1wmb4fQSKYtK3wIFMfIyo9HQu0nKqMkkUw52o3ZXyOv+oWwF5qNy2BKu
|
||||
lh5OMSkaZ0o13zoPpW42e+IUnyxvg70+0urD+sUue4cyTHh/nBIUjrM/05ZJ/ac8
|
||||
HR0RK3H41YoqBjq69JjMZczZZhbNFit3s6p0R1TbVAgc3ckqbtX5BDyQMQQCP4Ed
|
||||
m4DgbAFVqdcPUCC5W3F3fmuQiPKHiADzONZnXpy6lUvLDWqcd6loKp+nKHM6OkXX
|
||||
8hmD7pE1PYMQo4hqOfhBR2IgMjAShwd5qUFjl1m2oo0Qm3PFXOk6i2ZQdS6AA/yd
|
||||
B5/mX0RnM2oIdFZPb6UZFSmtEgs9sTzn+hMUyNSZQRE54px1ur1xws2R+vbsCyM5
|
||||
+KoFVxDjVjU9TlZx3GvDvnqz/tbHjji6l8VHZYOBMBUXbKHu2U6pJFZ5Zp7k68/z
|
||||
a3Fb9Pjtn3iRkXEyC0N5kLgqO4QTlExnxebV8aMvQpWd/qefnMn9qPYIZPEXSQAR
|
||||
mEBIahkcACb60s+acG0WFFluwBPtBqEr8Q67XlSF0Ibf4iBiRzpPobhlWta1nrFg
|
||||
4IWHMSoZ0PE75bhIGBEkhrpcXQCAxXmAfxfjKDH7jdJ1fRdnZ/9+OzwYGVX5GH/l
|
||||
0QDtAgMBAAGjUzBRMB0GA1UdDgQWBBQh3xiz/o1nEU2ySylZ9gxCXvIPGzAfBgNV
|
||||
HSMEGDAWgBQh3xiz/o1nEU2ySylZ9gxCXvIPGzAPBgNVHRMBAf8EBTADAQH/MA0G
|
||||
CSqGSIb3DQEBCwUAA4ICAQAELoXz31oR9pdAwidlv9ZBOKiC7KBWy8VMqXNVkfTn
|
||||
bVRxAUex7zleLFIOkWnqadsMesU9sIwrbLzBcZ8Q/vBY+z2xOPdXcgcAoAmdKWoq
|
||||
YBQNiqng9r54sqlzB/77QZCf5fdktESe7NTxhCifgx5SAWq7IUQs/lm3tnMUSAfE
|
||||
5ctuN6M+w8K54y3WDprcfMHpnc3ZHeSPhVQApHM0h/bDvXq0bRS7kmq27Hb153Qm
|
||||
nH3TwYB5pPSWW38NbUc+s/a7mItO7S8ly8yGbA0j9c/IbN5lM+OCdk06asz3+c8E
|
||||
uo8nuCBoYO5+6AqC2N7WJ3Tdr/pFA8jTbd6VNVlgCWTIR8ZosL5Fgkfv+4fUBrHt
|
||||
zdVUqMUzvga5rvZnwnJ5Qfu/drHeAAo9MTNFQNe2QgDlYfWBh5GweolgmFSwrpkY
|
||||
v/5wLtIyv/ASHKswybbqMIlpttcLTXjx5yuh8swttT6Wh+FQqqQ32KSRB3StiwyK
|
||||
oH0ZhrwYHiFYNlPxecGX6XUta6rFtTlEdkBGSnXzgiTzL2l+Nc0as0V5B9RninZG
|
||||
qJ+VOChSQ0OFvg1riSXv7tMvbLdGQnxwTRL3t6BMS8I4LA2m3ZfWUcuXT783ODTH
|
||||
16f1Q1AgXd2csstTWO9cv+N/0fpX31nqrm6+CrGduSr2u4HjYYnlLIUhmdTvK3fX
|
||||
Fg==
|
||||
-----END CERTIFICATE-----
|
||||
51
acme/tests/testdata/rsa4096_key.pem
vendored
Normal file
51
acme/tests/testdata/rsa4096_key.pem
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKgIBAAKCAgEA1j0souT0PGfQxQxAUfAGhM3h6ENJYHCzYpzK2kWZzyb0x+TX
|
||||
CZvh9BIpi0rfAgUx8jKj0dC7ScqoySRTDnajdlfI6/6hbAXmo3LYEq6WHk4xKRpn
|
||||
SjXfOg+lbjZ74hSfLG+DvT7S6sP6xS57hzJMeH+cEhSOsz/Tlkn9pzwdHRErcfjV
|
||||
iioGOrr0mMxlzNlmFs0WK3ezqnRHVNtUCBzdySpu1fkEPJAxBAI/gR2bgOBsAVWp
|
||||
1w9QILlbcXd+a5CI8oeIAPM41mdenLqVS8sNapx3qWgqn6coczo6RdfyGYPukTU9
|
||||
gxCjiGo5+EFHYiAyMBKHB3mpQWOXWbaijRCbc8Vc6TqLZlB1LoAD/J0Hn+ZfRGcz
|
||||
agh0Vk9vpRkVKa0SCz2xPOf6ExTI1JlBETninHW6vXHCzZH69uwLIzn4qgVXEONW
|
||||
NT1OVnHca8O+erP+1seOOLqXxUdlg4EwFRdsoe7ZTqkkVnlmnuTrz/NrcVv0+O2f
|
||||
eJGRcTILQ3mQuCo7hBOUTGfF5tXxoy9ClZ3+p5+cyf2o9ghk8RdJABGYQEhqGRwA
|
||||
JvrSz5pwbRYUWW7AE+0GoSvxDrteVIXQht/iIGJHOk+huGVa1rWesWDghYcxKhnQ
|
||||
8TvluEgYESSGulxdAIDFeYB/F+MoMfuN0nV9F2dn/347PBgZVfkYf+XRAO0CAwEA
|
||||
AQKCAgEA0hZdTkQtCYtYm9LexDsXeWYX8VcCfrMmBj7xYcg9A3oVMmzDPuYBVwH0
|
||||
gWbjd6y2hOaJ5TfGYZ99kvmvBRDsTSHaoyopC7BhssjtAKz6Ay/0X3VH8usPQ3WS
|
||||
aZi+NT65tK6KRqtz08ppgLGLa1G00bl5x/Um1rpxeACI4FU/y4BJ1VMJvJpnT3KE
|
||||
Z86Qyagqx5NH+UpCApZSWPFX3zjHePzGgcfXErjniCHYOnpZQrFQ2KIzkfSvQ9fg
|
||||
x01ByKOM2CB2C1B33TCzBAioXRH6zyAu7A59NeCK9ywTduhDvie1a+oEryFC7IQW
|
||||
4s7I/H3MGX4hsf/pLXlHMy+5CZJOjRaC2h+pypfbbcuiXu6Sn64kHNpiI7SxI5DI
|
||||
MIRjyG7MdUcrzq0Rt8ogwwpbCoRqrl/w3bhxtqmeZaEZtyxbjlm7reK2YkIFDgyz
|
||||
JMqiJK5ZAi+9L/8c0xhjjAQQ0sIzrjmjA8U+6YnWL9jU5qXTVnBB8XQucyeeZGgk
|
||||
yRHyMur71qOXN8z3UEva7MHkDTUBlj8DgTz6sEjqCipaWl0CXfDNa4IhHIXD5qiF
|
||||
wplhq7OeS0v6EGG/UFa3Q/lFntxtrayxJX7uvvSccGzjPKXTjpWUELLi/FdnIsum
|
||||
eXT3RgIEYozj4BibDXaBLfHTCVzxOr7AAEvKM9XWSUgLA0paSWECggEBAO9ZBeE1
|
||||
GWzd1ejTTkcxBC9AK2rNsYG8PdNqiof/iTbuJWNeRqpG+KB/0CNIpjZ2X5xZd0tM
|
||||
FDpHTFehlP26Roxuq50iRAFc+SN5KoiO0A3JuJAidreIgRTia1saUUrypHqWrYEA
|
||||
VZVj2AI8Pyg3s1OkR2frFskY7hXBVb/pJNDP/m9xTXXIYiIXYkHYe+4RIJCnAxRv
|
||||
q5YHKaX+0Ull9YCZJCxmwvcHat8sgu8qkiwUMEM6QSNEkrEbdnWYBABvC1AR6sws
|
||||
7MP1h9+j22n4Zc/3D6kpFZEL9Erx8nNyhbOZ6q2Tdnf6YKVVjZdyVa8VyNnR0ROl
|
||||
3BjkFaHb/bg4e4kCggEBAOUk8ZJS3qBeGCOjug384zbHGcnhUBYtYJiOz+RXBtP+
|
||||
PRksbFtTkgk1sHuSGO8YRddU4Qv7Av1xL8o+DEsLBSD0YQ7pmLrR/LK+iDQ5N63O
|
||||
Fve9uJH0ybxAOkiua7G24+lTsVUP//KWToL4Wh5zbHBBjL5D2Z9zoeVbcE87xhva
|
||||
lImMVr4Ex252DqNP9wkZxBjudFyJ/C/TnXrjPcgwhxWTC7sLQMhE5p+490G7c4hX
|
||||
PywkIKrANbu37KDiAvVS+dC66ZgpL/NUDkeloAmGNO08LGzbV6YKchlvDyWU/AvW
|
||||
0hYjbL0FUq7K/wp1G9fumolB+fbI25K9c13X93STzUUCggEBAJDsNFUyk5yJjbYW
|
||||
C/WrRj9d+WwH9Az77+uNPSgvn+O0usq6EMuVgYGdImfa21lqv2Wp/kOHY1AOT7lX
|
||||
yyD+oyzw7dSNJOQ2aVwDR6+72Vof5DLRy1RBwPbmSd61xrc8yD658YCEtU1pUSe5
|
||||
VvyBDYH9nIbdn8RP5gkiMUusXXBaIFNWJXLFzDWcNxBrhk6V7EPp/EFphFmpKJyr
|
||||
+AkbRVWCZJbF+hMdWKadCwLJogwyhS6PnVU/dhrq6AU38GRa2Fy5HJRYN1xH1Oej
|
||||
DX3Su8L6c28Xw0k6FcczTHx+wVoIPkKvYTIwVkiFzt/+iMckx6KsGo5tBSHFKRwC
|
||||
WlQrTxECggEBALjUruLnY1oZ7AC7bTUhOimSOfQEgTQSUCtebsRxijlvhtsKYTDd
|
||||
XRt+qidStjgN7S/+8DRYuZWzOeg5WnMhpXZqiOudcyume922IGl3ibjxVsdoyjs5
|
||||
J4xohlrgDlBgBMDNWGoTqNGFejjcmNydH+gAh8VlN2INxJYbxqCyx17qVgwJHmLR
|
||||
uggYxD/pHYvCs9GkbknCp5/wYsOgDtKuihfV741lS1D/esN1UEQ+LrfYIEW7snno
|
||||
5q7Pcdhn1hkKYCWEzy2Ec4Aj2gzixQ9JqOF/OxpnZvCw1k47rg0TeqcWFYnz8x8Y
|
||||
7xO8/DH0OoxXk2GJzVXJuItJs4gLzzfCjL0CggEAJFHfC9jisdy7CoWiOpNCSF1B
|
||||
S0/CWDz77cZdlWkpTdaXGGp1MA/UKUFPIH8sOHfvpKS660+X4G/1ZBHmFb4P5kFF
|
||||
Qy8UyUMKtSOEdZS6KFlRlfSCAMd5aSTmCvq4OSjYEpMRwUhU/iEJNkn9Z1Soehe0
|
||||
U3dxJ8KiT1071geO6rRquSHoSJs6Y0WQKriYYQJOhh4Axs3PQihER2eyh+WGk8YJ
|
||||
02m0mMsjntqnXtdc6IcdKaHp9ko+OpM9QZLsvt19fxBcrXj/i21uUXrzuNtKfO6M
|
||||
JqGhsOrO2dh8lMhvodENvgKA0DmYDC9N7ogo7bxTNSedcjBF46FhJoqii8m70Q==
|
||||
-----END RSA PRIVATE KEY-----
|
||||
@@ -1,8 +1,7 @@
|
||||
include LICENSE.txt
|
||||
include README.rst
|
||||
recursive-include tests *
|
||||
include certbot_apache/_internal/centos-options-ssl-apache.conf
|
||||
include certbot_apache/_internal/options-ssl-apache.conf
|
||||
recursive-include certbot_apache/_internal/augeas_lens *.aug
|
||||
recursive-include certbot_apache/_internal/tls_configs *.conf
|
||||
global-exclude __pycache__
|
||||
global-exclude *.py[cod]
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
""" Utility functions for certbot-apache plugin """
|
||||
import binascii
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from certbot import errors
|
||||
from certbot import util
|
||||
|
||||
from certbot.compat import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_mod_deps(mod_name):
|
||||
"""Get known module dependencies.
|
||||
@@ -105,3 +115,143 @@ def parse_define_file(filepath, varname):
|
||||
def unique_id():
|
||||
""" Returns an unique id to be used as a VirtualHost identifier"""
|
||||
return binascii.hexlify(os.urandom(16)).decode("utf-8")
|
||||
|
||||
|
||||
def included_in_paths(filepath, paths):
|
||||
"""
|
||||
Returns true if the filepath is included in the list of paths
|
||||
that may contain full paths or wildcard paths that need to be
|
||||
expanded.
|
||||
|
||||
:param str filepath: Filepath to check
|
||||
:params list paths: List of paths to check against
|
||||
|
||||
:returns: True if included
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
return any(fnmatch.fnmatch(filepath, path) for path in paths)
|
||||
|
||||
|
||||
def parse_defines(apachectl):
|
||||
"""
|
||||
Gets Defines from httpd process and returns a dictionary of
|
||||
the defined variables.
|
||||
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: dictionary of defined variables
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
variables = {}
|
||||
define_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_RUN_CFG"]
|
||||
matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
||||
try:
|
||||
matches.remove("DUMP_RUN_CFG")
|
||||
except ValueError:
|
||||
return {}
|
||||
|
||||
for match in matches:
|
||||
if match.count("=") > 1:
|
||||
logger.error("Unexpected number of equal signs in "
|
||||
"runtime config dump.")
|
||||
raise errors.PluginError(
|
||||
"Error parsing Apache runtime variables")
|
||||
parts = match.partition("=")
|
||||
variables[parts[0]] = parts[2]
|
||||
|
||||
return variables
|
||||
|
||||
|
||||
def parse_includes(apachectl):
|
||||
"""
|
||||
Gets Include directives from httpd process and returns a list of
|
||||
their values.
|
||||
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: list of found Include directive values
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
inc_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_INCLUDES"]
|
||||
return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
||||
|
||||
|
||||
def parse_modules(apachectl):
|
||||
"""
|
||||
Get loaded modules from httpd process, and return the list
|
||||
of loaded module names.
|
||||
|
||||
:param str apachectl: Path to apachectl executable
|
||||
|
||||
:returns: list of found LoadModule module names
|
||||
:rtype: list of str
|
||||
"""
|
||||
|
||||
mod_cmd = [apachectl, "-t", "-D",
|
||||
"DUMP_MODULES"]
|
||||
return parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
|
||||
|
||||
def parse_from_subprocess(command, regexp):
|
||||
"""Get values from stdout of subprocess command
|
||||
|
||||
:param list command: Command to run
|
||||
:param str regexp: Regexp for parsing
|
||||
|
||||
:returns: list parsed from command output
|
||||
:rtype: list
|
||||
|
||||
"""
|
||||
stdout = _get_runtime_cfg(command)
|
||||
return re.compile(regexp).findall(stdout)
|
||||
|
||||
|
||||
def _get_runtime_cfg(command):
|
||||
"""
|
||||
Get runtime configuration info.
|
||||
|
||||
:param command: Command to run
|
||||
|
||||
:returns: stdout from command
|
||||
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
env=util.env_no_snap_for_external_calls())
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
logger.error(
|
||||
"Error running command %s for runtime parameters!%s",
|
||||
command, os.linesep)
|
||||
raise errors.MisconfigurationError(
|
||||
"Error accessing loaded Apache parameters: {0}".format(
|
||||
command))
|
||||
# Small errors that do not impede
|
||||
if proc.returncode != 0:
|
||||
logger.warning("Error in checking parameter list: %s", stderr)
|
||||
raise errors.MisconfigurationError(
|
||||
"Apache is unable to check whether or not the module is "
|
||||
"loaded because Apache is misconfigured.")
|
||||
|
||||
return stdout
|
||||
|
||||
def find_ssl_apache_conf(prefix):
|
||||
"""
|
||||
Find a TLS Apache config file in the dedicated storage.
|
||||
:param str prefix: prefix of the TLS Apache config file to find
|
||||
:return: the path the TLS Apache config file
|
||||
:rtype: str
|
||||
"""
|
||||
return pkg_resources.resource_filename(
|
||||
"certbot_apache",
|
||||
os.path.join("_internal", "tls_configs", "{0}-options-ssl-apache.conf".format(prefix)))
|
||||
|
||||
172
certbot-apache/certbot_apache/_internal/apacheparser.py
Normal file
172
certbot-apache/certbot_apache/_internal/apacheparser.py
Normal file
@@ -0,0 +1,172 @@
|
||||
""" apacheconfig implementation of the ParserNode interfaces """
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
|
||||
class ApacheParserNode(interfaces.ParserNode):
|
||||
""" apacheconfig implementation of ParserNode interface.
|
||||
|
||||
Expects metadata `ac_ast` to be passed in, where `ac_ast` is the AST provided
|
||||
by parsing the equivalent configuration text using the apacheconfig library.
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self._raw = self.metadata["ac_ast"]
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
pass
|
||||
|
||||
def find_ancestors(self, name): # pylint: disable=unused-variable
|
||||
"""Find ancestor BlockNodes with a given name"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
|
||||
class ApacheCommentNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(ApacheCommentNode, self).__init__(**kwargs)
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata and
|
||||
self.filepath == other.filepath)
|
||||
return False
|
||||
|
||||
|
||||
class ApacheDirectiveNode(ApacheParserNode):
|
||||
""" apacheconfig implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super(ApacheDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
self.include = None
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
self.parameters == other.parameters and
|
||||
self.enabled == other.enabled and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
def set_parameters(self, _parameters): # pragma: no cover
|
||||
"""Sets the parameters for DirectiveNode"""
|
||||
return
|
||||
|
||||
|
||||
class ApacheBlockNode(ApacheDirectiveNode):
|
||||
""" apacheconfig implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(ApacheBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other): # pragma: no cover
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
self.parameters == other.parameters and
|
||||
self.children == other.children and
|
||||
self.enabled == other.enabled and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
new_block = ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_block,)
|
||||
return new_block
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
new_dir = ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_dir,)
|
||||
return new_dir
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_comment(self, comment="", position=None): # pragma: no cover
|
||||
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
new_comment = ApacheCommentNode(comment=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)
|
||||
self.children += (new_comment,)
|
||||
return new_comment
|
||||
|
||||
def find_blocks(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
return [ApacheBlockNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def find_directives(self, name, exclude=True): # pylint: disable=unused-argument
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheDirectiveNode(name=assertions.PASS,
|
||||
parameters=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def find_comments(self, comment, exact=False): # pragma: no cover
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
return [ApacheCommentNode(comment=assertions.PASS,
|
||||
ancestor=self,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
|
||||
def delete_child(self, child): # pragma: no cover
|
||||
"""Deletes a ParserNode from the sequence of children"""
|
||||
return
|
||||
|
||||
def unsaved_files(self): # pragma: no cover
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return [assertions.PASS]
|
||||
|
||||
def parsed_paths(self): # pragma: no cover
|
||||
"""Returns a list of parsed configuration file paths"""
|
||||
return [assertions.PASS]
|
||||
|
||||
|
||||
interfaces.CommentNode.register(ApacheCommentNode)
|
||||
interfaces.DirectiveNode.register(ApacheDirectiveNode)
|
||||
interfaces.BlockNode.register(ApacheBlockNode)
|
||||
142
certbot-apache/certbot_apache/_internal/assertions.py
Normal file
142
certbot-apache/certbot_apache/_internal/assertions.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""Dual parser node assertions"""
|
||||
import fnmatch
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
|
||||
|
||||
PASS = "CERTBOT_PASS_ASSERT"
|
||||
|
||||
|
||||
def assertEqual(first, second):
|
||||
""" Equality assertion """
|
||||
|
||||
if isinstance(first, interfaces.CommentNode):
|
||||
assertEqualComment(first, second)
|
||||
elif isinstance(first, interfaces.DirectiveNode):
|
||||
assertEqualDirective(first, second)
|
||||
|
||||
# Do an extra interface implementation assertion, as the contents were
|
||||
# already checked for BlockNode in the assertEqualDirective
|
||||
if isinstance(first, interfaces.BlockNode):
|
||||
assert isinstance(second, interfaces.BlockNode)
|
||||
|
||||
# Skip tests if filepath includes the pass value. This is done
|
||||
# because filepath is variable of the base ParserNode interface, and
|
||||
# unless the implementation is actually done, we cannot assume getting
|
||||
# correct results from boolean assertion for dirty
|
||||
if not isPass(first.filepath) and not isPass(second.filepath):
|
||||
assert first.dirty == second.dirty
|
||||
# We might want to disable this later if testing with two separate
|
||||
# (but identical) directory structures.
|
||||
assert first.filepath == second.filepath
|
||||
|
||||
def assertEqualComment(first, second): # pragma: no cover
|
||||
""" Equality assertion for CommentNode """
|
||||
|
||||
assert isinstance(first, interfaces.CommentNode)
|
||||
assert isinstance(second, interfaces.CommentNode)
|
||||
|
||||
if not isPass(first.comment) and not isPass(second.comment): # type: ignore
|
||||
assert first.comment == second.comment # type: ignore
|
||||
|
||||
def _assertEqualDirectiveComponents(first, second): # pragma: no cover
|
||||
""" Handles assertion for instance variables for DirectiveNode and BlockNode"""
|
||||
|
||||
# Enabled value cannot be asserted, because Augeas implementation
|
||||
# is unable to figure that out.
|
||||
# assert first.enabled == second.enabled
|
||||
if not isPass(first.name) and not isPass(second.name):
|
||||
assert first.name == second.name
|
||||
|
||||
if not isPass(first.parameters) and not isPass(second.parameters):
|
||||
assert first.parameters == second.parameters
|
||||
|
||||
def assertEqualDirective(first, second):
|
||||
""" Equality assertion for DirectiveNode """
|
||||
|
||||
assert isinstance(first, interfaces.DirectiveNode)
|
||||
assert isinstance(second, interfaces.DirectiveNode)
|
||||
_assertEqualDirectiveComponents(first, second)
|
||||
|
||||
def isPass(value): # pragma: no cover
|
||||
"""Checks if the value is set to PASS"""
|
||||
if isinstance(value, bool):
|
||||
return True
|
||||
return PASS in value
|
||||
|
||||
def isPassDirective(block):
|
||||
""" Checks if BlockNode or DirectiveNode should pass the assertion """
|
||||
|
||||
if isPass(block.name):
|
||||
return True
|
||||
if isPass(block.parameters): # pragma: no cover
|
||||
return True
|
||||
if isPass(block.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
def isPassComment(comment):
|
||||
""" Checks if CommentNode should pass the assertion """
|
||||
|
||||
if isPass(comment.comment):
|
||||
return True
|
||||
if isPass(comment.filepath): # pragma: no cover
|
||||
return True
|
||||
return False
|
||||
|
||||
def isPassNodeList(nodelist): # pragma: no cover
|
||||
""" Checks if a ParserNode in the nodelist should pass the assertion,
|
||||
this function is used for results of find_* methods. Unimplemented find_*
|
||||
methods should return a sequence containing a single ParserNode instance
|
||||
with assertion pass string."""
|
||||
|
||||
try:
|
||||
node = nodelist[0]
|
||||
except IndexError:
|
||||
node = None
|
||||
|
||||
if not node: # pragma: no cover
|
||||
return False
|
||||
|
||||
if isinstance(node, interfaces.DirectiveNode):
|
||||
return isPassDirective(node)
|
||||
return isPassComment(node)
|
||||
|
||||
def assertEqualSimple(first, second):
|
||||
""" Simple assertion """
|
||||
if not isPass(first) and not isPass(second):
|
||||
assert first == second
|
||||
|
||||
def isEqualVirtualHost(first, second):
|
||||
"""
|
||||
Checks that two VirtualHost objects are similar. There are some built
|
||||
in differences with the implementations: VirtualHost created by ParserNode
|
||||
implementation doesn't have "path" defined, as it was used for Augeas path
|
||||
and that cannot obviously be used in the future. Similarly the legacy
|
||||
version lacks "node" variable, that has a reference to the BlockNode for the
|
||||
VirtualHost.
|
||||
"""
|
||||
return (
|
||||
first.name == second.name and
|
||||
first.aliases == second.aliases and
|
||||
first.filep == second.filep and
|
||||
first.addrs == second.addrs and
|
||||
first.ssl == second.ssl and
|
||||
first.enabled == second.enabled and
|
||||
first.modmacro == second.modmacro and
|
||||
first.ancestor == second.ancestor
|
||||
)
|
||||
|
||||
def assertEqualPathsList(first, second): # pragma: no cover
|
||||
"""
|
||||
Checks that the two lists of file paths match. This assertion allows for wildcard
|
||||
paths.
|
||||
"""
|
||||
if any(isPass(path) for path in first):
|
||||
return
|
||||
if any(isPass(path) for path in second):
|
||||
return
|
||||
for fpath in first:
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for spath in second])
|
||||
for spath in second:
|
||||
assert any([fnmatch.fnmatch(fpath, spath) for fpath in first])
|
||||
@@ -6,7 +6,7 @@ Authors:
|
||||
Raphael Pinson <raphink@gmail.com>
|
||||
|
||||
About: Reference
|
||||
Online Apache configuration manual: http://httpd.apache.org/docs/trunk/
|
||||
Online Apache configuration manual: https://httpd.apache.org/docs/trunk/
|
||||
|
||||
About: License
|
||||
This file is licensed under the LGPL v2+.
|
||||
|
||||
538
certbot-apache/certbot_apache/_internal/augeasparser.py
Normal file
538
certbot-apache/certbot_apache/_internal/augeasparser.py
Normal file
@@ -0,0 +1,538 @@
|
||||
"""
|
||||
Augeas implementation of the ParserNode interfaces.
|
||||
|
||||
Augeas works internally by using XPATH notation. The following is a short example
|
||||
of how this all works internally, to better understand what's going on under the
|
||||
hood.
|
||||
|
||||
A configuration file /etc/apache2/apache2.conf with the following content:
|
||||
|
||||
# First comment line
|
||||
# Second comment line
|
||||
WhateverDirective whatevervalue
|
||||
<ABlock>
|
||||
DirectiveInABlock dirvalue
|
||||
</ABlock>
|
||||
SomeDirective somedirectivevalue
|
||||
<ABlock>
|
||||
AnotherDirectiveInABlock dirvalue
|
||||
</ABlock>
|
||||
# Yet another comment
|
||||
|
||||
|
||||
Translates over to Augeas path notation (of immediate children), when calling
|
||||
for example: aug.match("/files/etc/apache2/apache2.conf/*")
|
||||
|
||||
[
|
||||
"/files/etc/apache2/apache2.conf/#comment[1]",
|
||||
"/files/etc/apache2/apache2.conf/#comment[2]",
|
||||
"/files/etc/apache2/apache2.conf/directive[1]",
|
||||
"/files/etc/apache2/apache2.conf/ABlock[1]",
|
||||
"/files/etc/apache2/apache2.conf/directive[2]",
|
||||
"/files/etc/apache2/apache2.conf/ABlock[2]",
|
||||
"/files/etc/apache2/apache2.conf/#comment[3]"
|
||||
]
|
||||
|
||||
Regardless of directives name, its key in the Augeas tree is always "directive",
|
||||
with index where needed of course. Comments work similarly, while blocks
|
||||
have their own key in the Augeas XPATH notation.
|
||||
|
||||
It's important to note that all of the unique keys have their own indices.
|
||||
|
||||
Augeas paths are case sensitive, while Apache configuration is case insensitive.
|
||||
It looks like this:
|
||||
|
||||
<block>
|
||||
directive value
|
||||
</block>
|
||||
<Block>
|
||||
Directive Value
|
||||
</Block>
|
||||
<block>
|
||||
directive value
|
||||
</block>
|
||||
<bLoCk>
|
||||
DiReCtiVe VaLuE
|
||||
</bLoCk>
|
||||
|
||||
Translates over to:
|
||||
|
||||
[
|
||||
"/files/etc/apache2/apache2.conf/block[1]",
|
||||
"/files/etc/apache2/apache2.conf/Block[1]",
|
||||
"/files/etc/apache2/apache2.conf/block[2]",
|
||||
"/files/etc/apache2/apache2.conf/bLoCk[1]",
|
||||
]
|
||||
"""
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parser
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
|
||||
class AugeasParserNode(interfaces.ParserNode):
|
||||
""" Augeas implementation of ParserNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasParserNode, self).__init__(**kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.filepath = filepath
|
||||
self.dirty = dirty
|
||||
self.metadata = metadata
|
||||
self.parser = self.metadata.get("augeasparser")
|
||||
try:
|
||||
if self.metadata["augeaspath"].endswith("/"):
|
||||
raise errors.PluginError(
|
||||
"Augeas path: {} has a trailing slash".format(
|
||||
self.metadata["augeaspath"]
|
||||
)
|
||||
)
|
||||
except KeyError:
|
||||
raise errors.PluginError("Augeas path is required")
|
||||
|
||||
def save(self, msg):
|
||||
self.parser.save(msg)
|
||||
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Searches for ancestor BlockNodes with a given name.
|
||||
|
||||
:param str name: Name of the BlockNode parent to search for
|
||||
|
||||
:returns: List of matching ancestor nodes.
|
||||
:rtype: list of AugeasBlockNode
|
||||
"""
|
||||
|
||||
ancestors = []
|
||||
|
||||
parent = self.metadata["augeaspath"]
|
||||
while True:
|
||||
# Get the path of ancestor node
|
||||
parent = parent.rpartition("/")[0]
|
||||
# Root of the tree
|
||||
if not parent or parent == "/files":
|
||||
break
|
||||
anc = self._create_blocknode(parent)
|
||||
if anc.name.lower() == name.lower():
|
||||
ancestors.append(anc)
|
||||
|
||||
return ancestors
|
||||
|
||||
def _create_blocknode(self, path):
|
||||
"""
|
||||
Helper function to create a BlockNode from Augeas path. This is used by
|
||||
AugeasParserNode.find_ancestors and AugeasBlockNode.
|
||||
and AugeasBlockNode.find_blocks
|
||||
|
||||
"""
|
||||
|
||||
name = self._aug_get_name(path)
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
|
||||
return AugeasBlockNode(name=name,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_get_name(self, path):
|
||||
"""
|
||||
Helper function to get name of a configuration block or variable from path.
|
||||
"""
|
||||
|
||||
# Remove the ending slash if any
|
||||
if path[-1] == "/": # pragma: no cover
|
||||
path = path[:-1]
|
||||
|
||||
# Get the block name
|
||||
name = path.split("/")[-1]
|
||||
|
||||
# remove [...], it's not allowed in Apache configuration and is used
|
||||
# for indexing within Augeas
|
||||
name = name.split("[")[0]
|
||||
return name
|
||||
|
||||
|
||||
class AugeasCommentNode(AugeasParserNode):
|
||||
""" Augeas implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs) # pylint: disable=unused-variable
|
||||
super(AugeasCommentNode, self).__init__(**kwargs)
|
||||
# self.comment = comment
|
||||
self.comment = comment
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.comment == other.comment and
|
||||
self.filepath == other.filepath and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
|
||||
class AugeasDirectiveNode(AugeasParserNode):
|
||||
""" Augeas implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
super(AugeasDirectiveNode, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
self.enabled = enabled
|
||||
if parameters:
|
||||
self.set_parameters(parameters)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
self.parameters == other.parameters and
|
||||
self.enabled == other.enabled and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets parameters of a DirectiveNode or BlockNode object.
|
||||
|
||||
:param list parameters: List of all parameters for the node to set.
|
||||
"""
|
||||
orig_params = self._aug_get_params(self.metadata["augeaspath"])
|
||||
|
||||
# Clear out old parameters
|
||||
for _ in orig_params:
|
||||
# When the first parameter is removed, the indices get updated
|
||||
param_path = "{}/arg[1]".format(self.metadata["augeaspath"])
|
||||
self.parser.aug.remove(param_path)
|
||||
# Insert new ones
|
||||
for pi, param in enumerate(parameters):
|
||||
param_path = "{}/arg[{}]".format(self.metadata["augeaspath"], pi+1)
|
||||
self.parser.aug.set(param_path, param)
|
||||
|
||||
@property
|
||||
def parameters(self):
|
||||
"""
|
||||
Fetches the parameters from Augeas tree, ensuring that the sequence always
|
||||
represents the current state
|
||||
|
||||
:returns: Tuple of parameters for this DirectiveNode
|
||||
:rtype: tuple:
|
||||
"""
|
||||
return tuple(self._aug_get_params(self.metadata["augeaspath"]))
|
||||
|
||||
def _aug_get_params(self, path):
|
||||
"""Helper function to get parameters for DirectiveNodes and BlockNodes"""
|
||||
|
||||
arg_paths = self.parser.aug.match(path + "/arg")
|
||||
return [self.parser.get_arg(apath) for apath in arg_paths]
|
||||
|
||||
|
||||
class AugeasBlockNode(AugeasDirectiveNode):
|
||||
""" Augeas implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(AugeasBlockNode, self).__init__(**kwargs)
|
||||
self.children = ()
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
return (self.name == other.name and
|
||||
self.filepath == other.filepath and
|
||||
self.parameters == other.parameters and
|
||||
self.children == other.children and
|
||||
self.enabled == other.enabled and
|
||||
self.dirty == other.dirty and
|
||||
self.ancestor == other.ancestor and
|
||||
self.metadata == other.metadata)
|
||||
return False
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new BlockNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
name,
|
||||
position
|
||||
)
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new block
|
||||
self.parser.aug.insert(insertpath, name, before)
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
# Parameters will be set at the initialization of the new object
|
||||
new_block = AugeasBlockNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_block
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Adds a new DirectiveNode to the sequence of children"""
|
||||
|
||||
if not parameters:
|
||||
raise errors.PluginError("Directive requires parameters and none were set.")
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
"directive",
|
||||
position
|
||||
)
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new directive
|
||||
self.parser.aug.insert(insertpath, "directive", before)
|
||||
# Set the directive key
|
||||
self.parser.aug.set(realpath, name)
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(realpath)
|
||||
)
|
||||
|
||||
new_dir = AugeasDirectiveNode(name=name,
|
||||
parameters=parameters,
|
||||
enabled=enabled,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""Adds a new CommentNode to the sequence of children"""
|
||||
|
||||
insertpath, realpath, before = self._aug_resolve_child_position(
|
||||
"#comment",
|
||||
position
|
||||
)
|
||||
new_metadata = {"augeasparser": self.parser, "augeaspath": realpath}
|
||||
|
||||
# Create the new comment
|
||||
self.parser.aug.insert(insertpath, "#comment", before)
|
||||
# Set the comment content
|
||||
self.parser.aug.set(realpath, comment)
|
||||
|
||||
new_comment = AugeasCommentNode(comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(realpath),
|
||||
metadata=new_metadata)
|
||||
return new_comment
|
||||
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""Recursive search of BlockNodes from the sequence of children"""
|
||||
|
||||
nodes = []
|
||||
paths = self._aug_find_blocks(name)
|
||||
if exclude:
|
||||
paths = self.parser.exclude_dirs(paths)
|
||||
for path in paths:
|
||||
nodes.append(self._create_blocknode(path))
|
||||
|
||||
return nodes
|
||||
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""Recursive search of DirectiveNodes from the sequence of children"""
|
||||
|
||||
nodes = []
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
directives = self.parser.find_dir(name, start=ownpath, exclude=exclude)
|
||||
already_parsed = set() # type: Set[str]
|
||||
for directive in directives:
|
||||
# Remove the /arg part from the Augeas path
|
||||
directive = directive.partition("/arg")[0]
|
||||
# find_dir returns an object for each _parameter_ of a directive
|
||||
# so we need to filter out duplicates.
|
||||
if directive not in already_parsed:
|
||||
nodes.append(self._create_directivenode(directive))
|
||||
already_parsed.add(directive)
|
||||
|
||||
return nodes
|
||||
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Recursive search of DirectiveNodes from the sequence of children.
|
||||
|
||||
:param str comment: Comment content to search for.
|
||||
"""
|
||||
|
||||
nodes = []
|
||||
ownpath = self.metadata.get("augeaspath")
|
||||
|
||||
comments = self.parser.find_comments(comment, start=ownpath)
|
||||
for com in comments:
|
||||
nodes.append(self._create_commentnode(com))
|
||||
|
||||
return nodes
|
||||
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Deletes a ParserNode from the sequence of children, and raises an
|
||||
exception if it's unable to do so.
|
||||
:param AugeasParserNode: child: A node to delete.
|
||||
"""
|
||||
if not self.parser.aug.remove(child.metadata["augeaspath"]):
|
||||
|
||||
raise errors.PluginError(
|
||||
("Could not delete child node, the Augeas path: {} doesn't " +
|
||||
"seem to exist.").format(child.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
def unsaved_files(self):
|
||||
"""Returns a list of unsaved filepaths"""
|
||||
return self.parser.unsaved_files()
|
||||
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
example: ['/etc/apache2/conf.d/*.load']
|
||||
|
||||
This is typically called on the root node of the ParserNode tree.
|
||||
|
||||
:returns: list of file paths of files that have been parsed
|
||||
"""
|
||||
|
||||
res_paths = []
|
||||
|
||||
paths = self.parser.existing_paths
|
||||
for directory in paths:
|
||||
for filename in paths[directory]:
|
||||
res_paths.append(os.path.join(directory, filename))
|
||||
|
||||
return res_paths
|
||||
|
||||
def _create_commentnode(self, path):
|
||||
"""Helper function to create a CommentNode from Augeas path"""
|
||||
|
||||
comment = self.parser.aug.get(path)
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Because of the dynamic nature of AugeasParser and the fact that we're
|
||||
# not populating the complete node tree, the ancestor has a dummy value
|
||||
return AugeasCommentNode(comment=comment,
|
||||
ancestor=assertions.PASS,
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _create_directivenode(self, path):
|
||||
"""Helper function to create a DirectiveNode from Augeas path"""
|
||||
|
||||
name = self.parser.get_arg(path)
|
||||
metadata = {"augeasparser": self.parser, "augeaspath": path}
|
||||
|
||||
# Check if the file was included from the root config or initial state
|
||||
enabled = self.parser.parsed_in_original(
|
||||
apache_util.get_file_path(path)
|
||||
)
|
||||
return AugeasDirectiveNode(name=name,
|
||||
ancestor=assertions.PASS,
|
||||
enabled=enabled,
|
||||
filepath=apache_util.get_file_path(path),
|
||||
metadata=metadata)
|
||||
|
||||
def _aug_find_blocks(self, name):
|
||||
"""Helper function to perform a search to Augeas DOM tree to search
|
||||
configuration blocks with a given name"""
|
||||
|
||||
# The code here is modified from configurator.get_virtual_hosts()
|
||||
blk_paths = set()
|
||||
for vhost_path in list(self.parser.parser_paths):
|
||||
paths = self.parser.aug.match(
|
||||
("/files%s//*[label()=~regexp('%s')]" %
|
||||
(vhost_path, parser.case_i(name))))
|
||||
blk_paths.update([path for path in paths if
|
||||
name.lower() in os.path.basename(path).lower()])
|
||||
return blk_paths
|
||||
|
||||
def _aug_resolve_child_position(self, name, position):
|
||||
"""
|
||||
Helper function that iterates through the immediate children and figures
|
||||
out the insertion path for a new AugeasParserNode.
|
||||
|
||||
Augeas also generalizes indices for directives and comments, simply by
|
||||
using "directive" or "comment" respectively as their names.
|
||||
|
||||
This function iterates over the existing children of the AugeasBlockNode,
|
||||
returning their insertion path, resulting Augeas path and if the new node
|
||||
should be inserted before or after the returned insertion path.
|
||||
|
||||
Note: while Apache is case insensitive, Augeas is not, and blocks like
|
||||
Nameofablock and NameOfABlock have different indices.
|
||||
|
||||
:param str name: Name of the AugeasBlockNode to insert, "directive" for
|
||||
AugeasDirectiveNode or "comment" for AugeasCommentNode
|
||||
:param int position: The position to insert the child AugeasParserNode to
|
||||
|
||||
:returns: Tuple of insert path, resulting path and a boolean if the new
|
||||
node should be inserted before it.
|
||||
:rtype: tuple of str, str, bool
|
||||
"""
|
||||
|
||||
# Default to appending
|
||||
before = False
|
||||
|
||||
all_children = self.parser.aug.match("{}/*".format(
|
||||
self.metadata["augeaspath"])
|
||||
)
|
||||
|
||||
# Calculate resulting_path
|
||||
# Augeas indices start at 1. We use counter to calculate the index to
|
||||
# be used in resulting_path.
|
||||
counter = 1
|
||||
for i, child in enumerate(all_children):
|
||||
if position is not None and i >= position:
|
||||
# We're not going to insert the new node to an index after this
|
||||
break
|
||||
childname = self._aug_get_name(child)
|
||||
if name == childname:
|
||||
counter += 1
|
||||
|
||||
resulting_path = "{}/{}[{}]".format(
|
||||
self.metadata["augeaspath"],
|
||||
name,
|
||||
counter
|
||||
)
|
||||
|
||||
# Form the correct insert_path
|
||||
# Inserting the only child and appending as the last child work
|
||||
# similarly in Augeas.
|
||||
append = not all_children or position is None or position >= len(all_children)
|
||||
if append:
|
||||
insert_path = "{}/*[last()]".format(
|
||||
self.metadata["augeaspath"]
|
||||
)
|
||||
elif position == 0:
|
||||
# Insert as the first child, before the current first one.
|
||||
insert_path = all_children[0]
|
||||
before = True
|
||||
else:
|
||||
insert_path = "{}/*[{}]".format(
|
||||
self.metadata["augeaspath"],
|
||||
position
|
||||
)
|
||||
|
||||
return (insert_path, resulting_path, before)
|
||||
|
||||
|
||||
interfaces.CommentNode.register(AugeasCommentNode)
|
||||
interfaces.DirectiveNode.register(AugeasDirectiveNode)
|
||||
interfaces.BlockNode.register(AugeasBlockNode)
|
||||
@@ -1,25 +0,0 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
|
||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
||||
#LogLevel warn
|
||||
#ErrorLog /var/log/apache2/error.log
|
||||
|
||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Apache Configurator."""
|
||||
# pylint: disable=too-many-lines
|
||||
from collections import defaultdict
|
||||
from distutils.version import LooseVersion
|
||||
import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
@@ -8,17 +9,21 @@ import re
|
||||
import socket
|
||||
import time
|
||||
|
||||
import pkg_resources
|
||||
import six
|
||||
import zope.component
|
||||
import zope.interface
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import DefaultDict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Union # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import DefaultDict
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from acme.magic_typing import Union
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
@@ -29,8 +34,10 @@ from certbot.plugins import common
|
||||
from certbot.plugins.enhancements import AutoHSTSEnhancement
|
||||
from certbot.plugins.util import path_surgery
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import constants
|
||||
from certbot_apache._internal import display_ops
|
||||
from certbot_apache._internal import dualparser
|
||||
from certbot_apache._internal import http_01
|
||||
from certbot_apache._internal import obj
|
||||
from certbot_apache._internal import parser
|
||||
@@ -108,14 +115,30 @@ class ApacheConfigurator(common.Installer):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None
|
||||
)
|
||||
|
||||
def option(self, key):
|
||||
"""Get a value from options"""
|
||||
return self.options.get(key)
|
||||
|
||||
def pick_apache_config(self, warn_on_no_mod_ssl=True):
|
||||
"""
|
||||
Pick the appropriate TLS Apache configuration file for current version of Apache and OS.
|
||||
|
||||
:param bool warn_on_no_mod_ssl: True if we should warn if mod_ssl is not found.
|
||||
|
||||
:return: the path to the TLS Apache configuration file to use
|
||||
:rtype: str
|
||||
"""
|
||||
# Disabling TLS session tickets is supported by Apache 2.4.11+ and OpenSSL 1.0.2l+.
|
||||
# So for old versions of Apache we pick a configuration without this option.
|
||||
openssl_version = self.openssl_version(warn_on_no_mod_ssl)
|
||||
if self.version < (2, 4, 11) or not openssl_version or\
|
||||
LooseVersion(openssl_version) < LooseVersion('1.0.2l'):
|
||||
return apache_util.find_ssl_apache_conf("old")
|
||||
return apache_util.find_ssl_apache_conf("current")
|
||||
|
||||
def _prepare_options(self):
|
||||
"""
|
||||
Set the values possibly changed by command line parameters to
|
||||
@@ -123,7 +146,7 @@ class ApacheConfigurator(common.Installer):
|
||||
"""
|
||||
opts = ["enmod", "dismod", "le_vhost_ext", "server_root", "vhost_root",
|
||||
"logs_root", "challenge_location", "handle_modules", "handle_sites",
|
||||
"ctl"]
|
||||
"ctl", "bin"]
|
||||
for o in opts:
|
||||
# Config options use dashes instead of underscores
|
||||
if self.conf(o.replace("_", "-")) is not None:
|
||||
@@ -172,6 +195,8 @@ class ApacheConfigurator(common.Installer):
|
||||
"(Only Ubuntu/Debian currently)")
|
||||
add("ctl", default=DEFAULTS["ctl"],
|
||||
help="Full path to Apache control script")
|
||||
add("bin", default=DEFAULTS["bin"],
|
||||
help="Full path to apache2/httpd binary")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize an Apache Configurator.
|
||||
@@ -181,26 +206,34 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
"""
|
||||
version = kwargs.pop("version", None)
|
||||
use_parsernode = kwargs.pop("use_parsernode", False)
|
||||
openssl_version = kwargs.pop("openssl_version", None)
|
||||
super(ApacheConfigurator, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add name_server association dict
|
||||
self.assoc = dict() # type: Dict[str, obj.VirtualHost]
|
||||
self.assoc = {} # type: Dict[str, obj.VirtualHost]
|
||||
# Outstanding challenges
|
||||
self._chall_out = set() # type: Set[KeyAuthorizationAnnotatedChallenge]
|
||||
# List of vhosts configured per wildcard domain on this run.
|
||||
# used by deploy_cert() and enhance()
|
||||
self._wildcard_vhosts = dict() # type: Dict[str, List[obj.VirtualHost]]
|
||||
self._wildcard_vhosts = {} # type: Dict[str, List[obj.VirtualHost]]
|
||||
# Maps enhancements to vhosts we've enabled the enhancement for
|
||||
self._enhanced_vhosts = defaultdict(set) # type: DefaultDict[str, Set[obj.VirtualHost]]
|
||||
# Temporary state for AutoHSTS enhancement
|
||||
self._autohsts = {} # type: Dict[str, Dict[str, Union[int, float]]]
|
||||
# Reverter save notes
|
||||
self.save_notes = ""
|
||||
|
||||
# Should we use ParserNode implementation instead of the old behavior
|
||||
self.USE_PARSERNODE = use_parsernode
|
||||
# Saves the list of file paths that were parsed initially, and
|
||||
# not added to parser tree by self.conf("vhost-root") for example.
|
||||
self.parsed_paths = [] # type: List[str]
|
||||
# These will be set in the prepare function
|
||||
self._prepared = False
|
||||
self.parser = None
|
||||
self.parser_root = None
|
||||
self.version = version
|
||||
self._openssl_version = openssl_version
|
||||
self.vhosts = None
|
||||
self.options = copy.deepcopy(self.OS_DEFAULTS)
|
||||
self._enhance_func = {"redirect": self._enable_redirect,
|
||||
@@ -217,6 +250,59 @@ class ApacheConfigurator(common.Installer):
|
||||
"""Full absolute path to digest of updated SSL configuration file."""
|
||||
return os.path.join(self.config.config_dir, constants.UPDATED_MOD_SSL_CONF_DIGEST)
|
||||
|
||||
def _open_module_file(self, ssl_module_location):
|
||||
"""Extract the open lines of openssl_version for testing purposes"""
|
||||
try:
|
||||
with open(ssl_module_location, mode="rb") as f:
|
||||
contents = f.read()
|
||||
except IOError as error:
|
||||
logger.debug(str(error), exc_info=True)
|
||||
return None
|
||||
return contents
|
||||
|
||||
def openssl_version(self, warn_on_no_mod_ssl=True):
|
||||
"""Lazily retrieve openssl version
|
||||
|
||||
:param bool warn_on_no_mod_ssl: `True` if we should warn if mod_ssl is not found. Set to
|
||||
`False` when we know we'll try to enable mod_ssl later. This is currently debian/ubuntu,
|
||||
when called from `prepare`.
|
||||
|
||||
:return: the OpenSSL version as a string, or None.
|
||||
:rtype: str or None
|
||||
"""
|
||||
if self._openssl_version:
|
||||
return self._openssl_version
|
||||
# Step 1. Determine the location of ssl_module
|
||||
try:
|
||||
ssl_module_location = self.parser.modules['ssl_module']
|
||||
except KeyError:
|
||||
if warn_on_no_mod_ssl:
|
||||
logger.warning("Could not find ssl_module; not disabling session tickets.")
|
||||
return None
|
||||
if ssl_module_location:
|
||||
# Possibility A: ssl_module is a DSO
|
||||
ssl_module_location = self.parser.standard_path_from_server_root(ssl_module_location)
|
||||
else:
|
||||
# Possibility B: ssl_module is statically linked into Apache
|
||||
if self.option("bin"):
|
||||
ssl_module_location = self.option("bin")
|
||||
else:
|
||||
logger.warning("ssl_module is statically linked but --apache-bin is "
|
||||
"missing; not disabling session tickets.")
|
||||
return None
|
||||
# Step 2. Grep in the binary for openssl version
|
||||
contents = self._open_module_file(ssl_module_location)
|
||||
if not contents:
|
||||
logger.warning("Unable to read ssl_module file; not disabling session tickets.")
|
||||
return None
|
||||
# looks like: OpenSSL 1.0.2s 28 May 2019
|
||||
matches = re.findall(br"OpenSSL ([0-9]\.[^ ]+) ", contents)
|
||||
if not matches:
|
||||
logger.warning("Could not find OpenSSL version; not disabling session tickets.")
|
||||
return None
|
||||
self._openssl_version = matches[0].decode('UTF-8')
|
||||
return self._openssl_version
|
||||
|
||||
def prepare(self):
|
||||
"""Prepare the authenticator/installer.
|
||||
|
||||
@@ -249,14 +335,26 @@ class ApacheConfigurator(common.Installer):
|
||||
# Perform the actual Augeas initialization to be able to react
|
||||
self.parser = self.get_parser()
|
||||
|
||||
# Set up ParserNode root
|
||||
pn_meta = {"augeasparser": self.parser,
|
||||
"augeaspath": self.parser.get_root_augpath(),
|
||||
"ac_ast": None}
|
||||
if self.USE_PARSERNODE:
|
||||
self.parser_root = self.get_parsernode_root(pn_meta)
|
||||
self.parsed_paths = self.parser_root.parsed_paths()
|
||||
|
||||
# Check for errors in parsing files with Augeas
|
||||
self.parser.check_parsing_errors("httpd.aug")
|
||||
|
||||
# Get all of the available vhosts
|
||||
self.vhosts = self.get_virtual_hosts()
|
||||
|
||||
# We may try to enable mod_ssl later. If so, we shouldn't warn if we can't find it now.
|
||||
# This is currently only true for debian/ubuntu.
|
||||
warn_on_no_mod_ssl = not self.option("handle_modules")
|
||||
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||
self.updated_mod_ssl_conf_digest)
|
||||
self.updated_mod_ssl_conf_digest,
|
||||
warn_on_no_mod_ssl)
|
||||
|
||||
# Prevent two Apache plugins from modifying a config at once
|
||||
try:
|
||||
@@ -344,6 +442,28 @@ class ApacheConfigurator(common.Installer):
|
||||
self.option("server_root"), self.conf("vhost-root"),
|
||||
self.version, configurator=self)
|
||||
|
||||
def get_parsernode_root(self, metadata):
|
||||
"""Initializes the ParserNode parser root instance."""
|
||||
|
||||
if HAS_APACHECONFIG:
|
||||
apache_vars = {}
|
||||
apache_vars["defines"] = apache_util.parse_defines(self.option("ctl"))
|
||||
apache_vars["includes"] = apache_util.parse_includes(self.option("ctl"))
|
||||
apache_vars["modules"] = apache_util.parse_modules(self.option("ctl"))
|
||||
metadata["apache_vars"] = apache_vars
|
||||
|
||||
with open(self.parser.loc["root"]) as f:
|
||||
with apacheconfig.make_loader(writable=True,
|
||||
**apacheconfig.flavors.NATIVE_APACHE) as loader:
|
||||
metadata["ac_ast"] = loader.loads(f.read())
|
||||
|
||||
return dualparser.DualBlockNode(
|
||||
name=assertions.PASS,
|
||||
ancestor=None,
|
||||
filepath=self.parser.loc["root"],
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
def _wildcard_domain(self, domain):
|
||||
"""
|
||||
Checks if domain is a wildcard domain
|
||||
@@ -438,7 +558,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
# Go through the vhosts, making sure that we cover all the names
|
||||
# present, but preferring the SSL vhosts
|
||||
filtered_vhosts = dict()
|
||||
filtered_vhosts = {}
|
||||
for vhost in vhosts:
|
||||
for name in vhost.get_names():
|
||||
if vhost.ssl:
|
||||
@@ -464,7 +584,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
# Make sure we create SSL vhosts for the ones that are HTTP only
|
||||
# if requested.
|
||||
return_vhosts = list()
|
||||
return_vhosts = []
|
||||
for vhost in dialog_output:
|
||||
if not vhost.ssl:
|
||||
return_vhosts.append(self.make_vhost_ssl(vhost))
|
||||
@@ -485,6 +605,11 @@ class ApacheConfigurator(common.Installer):
|
||||
# cert_key... can all be parsed appropriately
|
||||
self.prepare_server_https("443")
|
||||
|
||||
# If we haven't managed to enable mod_ssl by this point, error out
|
||||
if "ssl_module" not in self.parser.modules:
|
||||
raise errors.MisconfigurationError("Could not find ssl_module; "
|
||||
"not installing certificate.")
|
||||
|
||||
# Add directives and remove duplicates
|
||||
self._add_dummy_ssl_directives(vhost.path)
|
||||
self._clean_vhost(vhost)
|
||||
@@ -499,21 +624,6 @@ class ApacheConfigurator(common.Installer):
|
||||
path["chain_path"] = self.parser.find_dir(
|
||||
"SSLCertificateChainFile", None, vhost.path)
|
||||
|
||||
# Handle errors when certificate/key directives cannot be found
|
||||
if not path["cert_path"]:
|
||||
logger.warning(
|
||||
"Cannot find an SSLCertificateFile directive in %s. "
|
||||
"VirtualHost was not modified", vhost.path)
|
||||
raise errors.PluginError(
|
||||
"Unable to find an SSLCertificateFile directive")
|
||||
elif not path["cert_key"]:
|
||||
logger.warning(
|
||||
"Cannot find an SSLCertificateKeyFile directive for "
|
||||
"certificate in %s. VirtualHost was not modified", vhost.path)
|
||||
raise errors.PluginError(
|
||||
"Unable to find an SSLCertificateKeyFile directive for "
|
||||
"certificate")
|
||||
|
||||
logger.info("Deploying Certificate to VirtualHost %s", vhost.filep)
|
||||
|
||||
if self.version < (2, 4, 8) or (chain_path and not fullchain_path):
|
||||
@@ -760,7 +870,7 @@ class ApacheConfigurator(common.Installer):
|
||||
|
||||
return util.get_filtered_names(all_names)
|
||||
|
||||
def get_name_from_ip(self, addr): # pylint: disable=no-self-use
|
||||
def get_name_from_ip(self, addr):
|
||||
"""Returns a reverse dns name if available.
|
||||
|
||||
:param addr: IP Address
|
||||
@@ -868,6 +978,29 @@ class ApacheConfigurator(common.Installer):
|
||||
return vhost
|
||||
|
||||
def get_virtual_hosts(self):
|
||||
"""
|
||||
Temporary wrapper for legacy and ParserNode version for
|
||||
get_virtual_hosts. This should be replaced with the ParserNode
|
||||
implementation when ready.
|
||||
"""
|
||||
|
||||
v1_vhosts = self.get_virtual_hosts_v1()
|
||||
if self.USE_PARSERNODE and HAS_APACHECONFIG:
|
||||
v2_vhosts = self.get_virtual_hosts_v2()
|
||||
|
||||
for v1_vh in v1_vhosts:
|
||||
found = False
|
||||
for v2_vh in v2_vhosts:
|
||||
if assertions.isEqualVirtualHost(v1_vh, v2_vh):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
raise AssertionError("Equivalent for {} was not found".format(v1_vh.path))
|
||||
|
||||
return v2_vhosts
|
||||
return v1_vhosts
|
||||
|
||||
def get_virtual_hosts_v1(self):
|
||||
"""Returns list of virtual hosts found in the Apache configuration.
|
||||
|
||||
:returns: List of :class:`~certbot_apache._internal.obj.VirtualHost`
|
||||
@@ -920,6 +1053,80 @@ class ApacheConfigurator(common.Installer):
|
||||
vhs.append(new_vhost)
|
||||
return vhs
|
||||
|
||||
def get_virtual_hosts_v2(self):
|
||||
"""Returns list of virtual hosts found in the Apache configuration using
|
||||
ParserNode interface.
|
||||
:returns: List of :class:`~certbot_apache.obj.VirtualHost`
|
||||
objects found in configuration
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
vhs = []
|
||||
vhosts = self.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||
for vhblock in vhosts:
|
||||
vhs.append(self._create_vhost_v2(vhblock))
|
||||
return vhs
|
||||
|
||||
def _create_vhost_v2(self, node):
|
||||
"""Used by get_virtual_hosts_v2 to create vhost objects using ParserNode
|
||||
interfaces.
|
||||
:param interfaces.BlockNode node: The BlockNode object of VirtualHost block
|
||||
:returns: newly created vhost
|
||||
:rtype: :class:`~certbot_apache.obj.VirtualHost`
|
||||
"""
|
||||
addrs = set()
|
||||
for param in node.parameters:
|
||||
addrs.add(obj.Addr.fromstring(param))
|
||||
|
||||
is_ssl = False
|
||||
# Exclusion to match the behavior in get_virtual_hosts_v2
|
||||
sslengine = node.find_directives("SSLEngine", exclude=False)
|
||||
if sslengine:
|
||||
for directive in sslengine:
|
||||
if directive.parameters[0].lower() == "on":
|
||||
is_ssl = True
|
||||
break
|
||||
|
||||
# "SSLEngine on" might be set outside of <VirtualHost>
|
||||
# Treat vhosts with port 443 as ssl vhosts
|
||||
for addr in addrs:
|
||||
if addr.get_port() == "443":
|
||||
is_ssl = True
|
||||
|
||||
enabled = apache_util.included_in_paths(node.filepath, self.parsed_paths)
|
||||
|
||||
macro = False
|
||||
# Check if the VirtualHost is contained in a mod_macro block
|
||||
if node.find_ancestors("Macro"):
|
||||
macro = True
|
||||
vhost = obj.VirtualHost(
|
||||
node.filepath, None, addrs, is_ssl, enabled, modmacro=macro, node=node
|
||||
)
|
||||
self._populate_vhost_names_v2(vhost)
|
||||
return vhost
|
||||
|
||||
def _populate_vhost_names_v2(self, vhost):
|
||||
"""Helper function that populates the VirtualHost names.
|
||||
:param host: In progress vhost whose names will be added
|
||||
:type host: :class:`~certbot_apache.obj.VirtualHost`
|
||||
"""
|
||||
|
||||
servername_match = vhost.node.find_directives("ServerName",
|
||||
exclude=False)
|
||||
serveralias_match = vhost.node.find_directives("ServerAlias",
|
||||
exclude=False)
|
||||
|
||||
servername = None
|
||||
if servername_match:
|
||||
servername = servername_match[-1].parameters[-1]
|
||||
|
||||
if not vhost.modmacro:
|
||||
for alias in serveralias_match:
|
||||
for serveralias in alias.parameters:
|
||||
vhost.aliases.add(serveralias)
|
||||
vhost.name = servername
|
||||
|
||||
|
||||
def is_name_vhost(self, target_addr):
|
||||
"""Returns if vhost is a name based vhost
|
||||
|
||||
@@ -1112,6 +1319,14 @@ class ApacheConfigurator(common.Installer):
|
||||
self.enable_mod("socache_shmcb", temp=temp)
|
||||
if "ssl_module" not in self.parser.modules:
|
||||
self.enable_mod("ssl", temp=temp)
|
||||
# Make sure we're not throwing away any unwritten changes to the config
|
||||
self.parser.ensure_augeas_state()
|
||||
self.parser.aug.load()
|
||||
self.parser.reset_modules() # Reset to load the new ssl_module path
|
||||
# Call again because now we can gate on openssl version
|
||||
self.install_ssl_options_conf(self.mod_ssl_conf,
|
||||
self.updated_mod_ssl_conf_digest,
|
||||
warn_on_no_mod_ssl=True)
|
||||
|
||||
def make_vhost_ssl(self, nonssl_vhost):
|
||||
"""Makes an ssl_vhost version of a nonssl_vhost.
|
||||
@@ -1247,7 +1462,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if not line.lower().lstrip().startswith("rewriterule"):
|
||||
return False
|
||||
|
||||
# According to: http://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# According to: https://httpd.apache.org/docs/2.4/rewrite/flags.html
|
||||
# The syntax of a RewriteRule is:
|
||||
# RewriteRule pattern target [Flag1,Flag2,Flag3]
|
||||
# i.e. target is required, so it must exist.
|
||||
@@ -1364,7 +1579,7 @@ class ApacheConfigurator(common.Installer):
|
||||
result.append(comment)
|
||||
sift = True
|
||||
|
||||
result.append('\n'.join(['# ' + l for l in chunk]))
|
||||
result.append('\n'.join('# ' + l for l in chunk))
|
||||
else:
|
||||
result.append('\n'.join(chunk))
|
||||
return result, sift
|
||||
@@ -1504,7 +1719,7 @@ class ApacheConfigurator(common.Installer):
|
||||
for addr in vhost.addrs:
|
||||
# In Apache 2.2, when a NameVirtualHost directive is not
|
||||
# set, "*" and "_default_" will conflict when sharing a port
|
||||
addrs = set((addr,))
|
||||
addrs = {addr,}
|
||||
if addr.get_addr() in ("*", "_default_"):
|
||||
addrs.update(obj.Addr((a, addr.get_port(),))
|
||||
for a in ("*", "_default_"))
|
||||
@@ -1597,7 +1812,7 @@ class ApacheConfigurator(common.Installer):
|
||||
######################################################################
|
||||
# Enhancements
|
||||
######################################################################
|
||||
def supported_enhancements(self): # pylint: disable=no-self-use
|
||||
def supported_enhancements(self):
|
||||
"""Returns currently supported enhancements."""
|
||||
return ["redirect", "ensure-http-header", "staple-ocsp"]
|
||||
|
||||
@@ -1695,7 +1910,7 @@ class ApacheConfigurator(common.Installer):
|
||||
try:
|
||||
self._autohsts = self.storage.fetch("autohsts")
|
||||
except KeyError:
|
||||
self._autohsts = dict()
|
||||
self._autohsts = {}
|
||||
|
||||
def _autohsts_save_state(self):
|
||||
"""
|
||||
@@ -1817,7 +2032,7 @@ class ApacheConfigurator(common.Installer):
|
||||
ssl_vhost.filep)
|
||||
|
||||
def _verify_no_matching_http_header(self, ssl_vhost, header_substring):
|
||||
"""Checks to see if an there is an existing Header directive that
|
||||
"""Checks to see if there is an existing Header directive that
|
||||
contains the string header_substring.
|
||||
|
||||
:param ssl_vhost: vhost to check
|
||||
@@ -2163,7 +2378,7 @@ class ApacheConfigurator(common.Installer):
|
||||
vhost.enabled = True
|
||||
return
|
||||
|
||||
def enable_mod(self, mod_name, temp=False): # pylint: disable=unused-argument
|
||||
def enable_mod(self, mod_name, temp=False):
|
||||
"""Enables module in Apache.
|
||||
|
||||
Both enables and reloads Apache so module is active.
|
||||
@@ -2221,7 +2436,7 @@ class ApacheConfigurator(common.Installer):
|
||||
error = str(err)
|
||||
raise errors.MisconfigurationError(error)
|
||||
|
||||
def config_test(self): # pylint: disable=no-self-use
|
||||
def config_test(self):
|
||||
"""Check the configuration of Apache for errors.
|
||||
|
||||
:raises .errors.MisconfigurationError: If config_test fails
|
||||
@@ -2256,7 +2471,7 @@ class ApacheConfigurator(common.Installer):
|
||||
if len(matches) != 1:
|
||||
raise errors.PluginError("Unable to find Apache version")
|
||||
|
||||
return tuple([int(i) for i in matches[0].split(".")])
|
||||
return tuple(int(i) for i in matches[0].split("."))
|
||||
|
||||
def more_info(self):
|
||||
"""Human-readable string to help understand the module"""
|
||||
@@ -2271,7 +2486,7 @@ class ApacheConfigurator(common.Installer):
|
||||
###########################################################################
|
||||
# Challenges Section
|
||||
###########################################################################
|
||||
def get_chall_pref(self, unused_domain): # pylint: disable=no-self-use
|
||||
def get_chall_pref(self, unused_domain):
|
||||
"""Return list of challenge preferences."""
|
||||
return [challenges.HTTP01]
|
||||
|
||||
@@ -2325,14 +2540,19 @@ class ApacheConfigurator(common.Installer):
|
||||
self.restart()
|
||||
self.parser.reset_modules()
|
||||
|
||||
def install_ssl_options_conf(self, options_ssl, options_ssl_digest):
|
||||
"""Copy Certbot's SSL options file into the system's config dir if required."""
|
||||
def install_ssl_options_conf(self, options_ssl, options_ssl_digest, warn_on_no_mod_ssl=True):
|
||||
"""Copy Certbot's SSL options file into the system's config dir if required.
|
||||
|
||||
:param bool warn_on_no_mod_ssl: True if we should warn if mod_ssl is not found.
|
||||
"""
|
||||
|
||||
# XXX if we ever try to enforce a local privilege boundary (eg, running
|
||||
# certbot for unprivileged users via setuid), this function will need
|
||||
# to be modified.
|
||||
return common.install_version_controlled_file(options_ssl, options_ssl_digest,
|
||||
self.option("MOD_SSL_CONF_SRC"), constants.ALL_SSL_OPTIONS_HASHES)
|
||||
apache_config_path = self.pick_apache_config(warn_on_no_mod_ssl)
|
||||
|
||||
return common.install_version_controlled_file(
|
||||
options_ssl, options_ssl_digest, apache_config_path, constants.ALL_SSL_OPTIONS_HASHES)
|
||||
|
||||
def enable_autohsts(self, _unused_lineage, domains):
|
||||
"""
|
||||
@@ -2342,7 +2562,7 @@ class ApacheConfigurator(common.Installer):
|
||||
:type _unused_lineage: certbot._internal.storage.RenewableCert
|
||||
|
||||
:param domains: List of domains in certificate to enhance
|
||||
:type domains: str
|
||||
:type domains: `list` of `str`
|
||||
"""
|
||||
|
||||
self._autohsts_fetch_state()
|
||||
|
||||
@@ -24,6 +24,9 @@ ALL_SSL_OPTIONS_HASHES = [
|
||||
'0fcdc81280cd179a07ec4d29d3595068b9326b455c488de4b09f585d5dafc137',
|
||||
'86cc09ad5415cd6d5f09a947fe2501a9344328b1e8a8b458107ea903e80baa6c',
|
||||
'06675349e457eae856120cdebb564efe546f0b87399f2264baeb41e442c724c7',
|
||||
'5cc003edd93fb9cd03d40c7686495f8f058f485f75b5e764b789245a386e6daf',
|
||||
'007cd497a56a3bb8b6a2c1aeb4997789e7e38992f74e44cc5d13a625a738ac73',
|
||||
'34783b9e2210f5c4a23bced2dfd7ec289834716673354ed7c7abf69fe30192a3',
|
||||
]
|
||||
"""SHA256 hashes of the contents of previous versions of all versions of MOD_SSL_CONF_SRC"""
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ def select_vhost_multiple(vhosts):
|
||||
:rtype: :class:`list`of type `~obj.Vhost`
|
||||
"""
|
||||
if not vhosts:
|
||||
return list()
|
||||
return []
|
||||
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
|
||||
# Remove the extra newline from the last entry
|
||||
if tags_list:
|
||||
@@ -37,7 +37,7 @@ def select_vhost_multiple(vhosts):
|
||||
def _reversemap_vhosts(names, vhosts):
|
||||
"""Helper function for select_vhost_multiple for mapping string
|
||||
representations back to actual vhost objects"""
|
||||
return_vhosts = list()
|
||||
return_vhosts = []
|
||||
|
||||
for selection in names:
|
||||
for vhost in vhosts:
|
||||
|
||||
306
certbot-apache/certbot_apache/_internal/dualparser.py
Normal file
306
certbot-apache/certbot_apache/_internal/dualparser.py
Normal file
@@ -0,0 +1,306 @@
|
||||
""" Dual ParserNode implementation """
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import apacheparser
|
||||
|
||||
|
||||
class DualNodeBase(object):
|
||||
""" Dual parser interface for in development testing. This is used as the
|
||||
base class for dual parser interface classes. This class handles runtime
|
||||
attribute value assertions."""
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
""" Call save for both parsers """
|
||||
self.primary.save(msg)
|
||||
self.secondary.save(msg)
|
||||
|
||||
def __getattr__(self, aname):
|
||||
""" Attribute value assertion """
|
||||
firstval = getattr(self.primary, aname)
|
||||
secondval = getattr(self.secondary, aname)
|
||||
exclusions = [
|
||||
# Metadata will inherently be different, as ApacheParserNode does
|
||||
# not have Augeas paths and so on.
|
||||
aname == "metadata",
|
||||
callable(firstval)
|
||||
]
|
||||
if not any(exclusions):
|
||||
assertions.assertEqualSimple(firstval, secondval)
|
||||
return firstval
|
||||
|
||||
def find_ancestors(self, name):
|
||||
""" Traverses the ancestor tree and returns ancestors matching name """
|
||||
return self._find_helper(DualBlockNode, "find_ancestors", name)
|
||||
|
||||
def _find_helper(self, nodeclass, findfunc, search, **kwargs):
|
||||
"""A helper for find_* functions. The function specific attributes should
|
||||
be passed as keyword arguments.
|
||||
|
||||
:param interfaces.ParserNode nodeclass: The node class for results.
|
||||
:param str findfunc: Name of the find function to call
|
||||
:param str search: The search term
|
||||
"""
|
||||
|
||||
primary_res = getattr(self.primary, findfunc)(search, **kwargs)
|
||||
secondary_res = getattr(self.secondary, findfunc)(search, **kwargs)
|
||||
|
||||
# The order of search results for Augeas implementation cannot be
|
||||
# assured.
|
||||
|
||||
pass_primary = assertions.isPassNodeList(primary_res)
|
||||
pass_secondary = assertions.isPassNodeList(secondary_res)
|
||||
new_nodes = []
|
||||
|
||||
if pass_primary and pass_secondary:
|
||||
# Both unimplemented
|
||||
new_nodes.append(nodeclass(primary=primary_res[0],
|
||||
secondary=secondary_res[0])) # pragma: no cover
|
||||
elif pass_primary:
|
||||
for c in secondary_res:
|
||||
new_nodes.append(nodeclass(primary=primary_res[0],
|
||||
secondary=c))
|
||||
elif pass_secondary:
|
||||
for c in primary_res:
|
||||
new_nodes.append(nodeclass(primary=c,
|
||||
secondary=secondary_res[0]))
|
||||
else:
|
||||
assert len(primary_res) == len(secondary_res)
|
||||
matches = self._create_matching_list(primary_res, secondary_res)
|
||||
for p, s in matches:
|
||||
new_nodes.append(nodeclass(primary=p, secondary=s))
|
||||
|
||||
return new_nodes
|
||||
|
||||
|
||||
class DualCommentNode(DualNodeBase):
|
||||
""" Dual parser implementation of CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of CommentNode objects as well as creating a DualCommentNode object
|
||||
using precreated or fetched CommentNode objects if provided as optional
|
||||
arguments primary and secondary.
|
||||
|
||||
Parameters other than the following are from interfaces.CommentNode:
|
||||
|
||||
:param CommentNode primary: Primary pre-created CommentNode, mainly
|
||||
used when creating new DualParser nodes using add_* methods.
|
||||
:param CommentNode secondary: Secondary pre-created CommentNode
|
||||
"""
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
kwargs.setdefault("secondary", None)
|
||||
primary = kwargs.pop("primary")
|
||||
secondary = kwargs.pop("secondary")
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
self.primary = augeasparser.AugeasCommentNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheCommentNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualDirectiveNode(DualNodeBase):
|
||||
""" Dual parser implementation of DirectiveNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of DirectiveNode objects as well as creating a DualDirectiveNode object
|
||||
using precreated or fetched DirectiveNode objects if provided as optional
|
||||
arguments primary and secondary.
|
||||
|
||||
Parameters other than the following are from interfaces.DirectiveNode:
|
||||
|
||||
:param DirectiveNode primary: Primary pre-created DirectiveNode, mainly
|
||||
used when creating new DualParser nodes using add_* methods.
|
||||
:param DirectiveNode secondary: Secondary pre-created DirectiveNode
|
||||
|
||||
|
||||
"""
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
kwargs.setdefault("secondary", None)
|
||||
primary = kwargs.pop("primary")
|
||||
secondary = kwargs.pop("secondary")
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
self.primary = augeasparser.AugeasDirectiveNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheDirectiveNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def set_parameters(self, parameters):
|
||||
""" Sets parameters and asserts that both implementation successfully
|
||||
set the parameter sequence """
|
||||
|
||||
self.primary.set_parameters(parameters)
|
||||
self.secondary.set_parameters(parameters)
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
|
||||
class DualBlockNode(DualNodeBase):
|
||||
""" Dual parser implementation of BlockNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
""" This initialization implementation allows ordinary initialization
|
||||
of BlockNode objects as well as creating a DualBlockNode object
|
||||
using precreated or fetched BlockNode objects if provided as optional
|
||||
arguments primary and secondary.
|
||||
|
||||
Parameters other than the following are from interfaces.BlockNode:
|
||||
|
||||
:param BlockNode primary: Primary pre-created BlockNode, mainly
|
||||
used when creating new DualParser nodes using add_* methods.
|
||||
:param BlockNode secondary: Secondary pre-created BlockNode
|
||||
"""
|
||||
|
||||
kwargs.setdefault("primary", None)
|
||||
kwargs.setdefault("secondary", None)
|
||||
primary = kwargs.pop("primary")
|
||||
secondary = kwargs.pop("secondary")
|
||||
|
||||
if primary or secondary:
|
||||
assert primary and secondary
|
||||
self.primary = primary
|
||||
self.secondary = secondary
|
||||
else:
|
||||
self.primary = augeasparser.AugeasBlockNode(**kwargs)
|
||||
self.secondary = apacheparser.ApacheBlockNode(**kwargs)
|
||||
|
||||
assertions.assertEqual(self.primary, self.secondary)
|
||||
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
""" Creates a new child BlockNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualBlockNode object
|
||||
encapsulating both of the newly created objects """
|
||||
|
||||
primary_new = self.primary.add_child_block(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_block(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
new_block = DualBlockNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_block
|
||||
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
""" Creates a new child DirectiveNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualDirectiveNode
|
||||
object encapsulating both of the newly created objects """
|
||||
|
||||
primary_new = self.primary.add_child_directive(name, parameters, position)
|
||||
secondary_new = self.secondary.add_child_directive(name, parameters, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
new_dir = DualDirectiveNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_dir
|
||||
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
""" Creates a new child CommentNode, asserts that both implementations
|
||||
did it in a similar way, and returns a newly created DualCommentNode
|
||||
object encapsulating both of the newly created objects """
|
||||
|
||||
primary_new = self.primary.add_child_comment(comment, position)
|
||||
secondary_new = self.secondary.add_child_comment(comment, position)
|
||||
assertions.assertEqual(primary_new, secondary_new)
|
||||
new_comment = DualCommentNode(primary=primary_new, secondary=secondary_new)
|
||||
return new_comment
|
||||
|
||||
def _create_matching_list(self, primary_list, secondary_list):
|
||||
""" Matches the list of primary_list to a list of secondary_list and
|
||||
returns a list of tuples. This is used to create results for find_
|
||||
methods.
|
||||
|
||||
This helper function exists, because we cannot ensure that the list of
|
||||
search results returned by primary.find_* and secondary.find_* are ordered
|
||||
in a same way. The function pairs the same search results from both
|
||||
implementations to a list of tuples.
|
||||
"""
|
||||
|
||||
matched = []
|
||||
for p in primary_list:
|
||||
match = None
|
||||
for s in secondary_list:
|
||||
try:
|
||||
assertions.assertEqual(p, s)
|
||||
match = s
|
||||
break
|
||||
except AssertionError:
|
||||
continue
|
||||
if match:
|
||||
matched.append((p, match))
|
||||
else:
|
||||
raise AssertionError("Could not find a matching node.")
|
||||
return matched
|
||||
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for BlockNodes using both implementations and does simple
|
||||
checks for results. This is built upon the assumption that unimplemented
|
||||
find_* methods return a list with a single assertion passing object.
|
||||
After the assertion, it creates a list of newly created DualBlockNode
|
||||
instances that encapsulate the pairs of returned BlockNode objects.
|
||||
"""
|
||||
|
||||
return self._find_helper(DualBlockNode, "find_blocks", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Performs a search for DirectiveNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
find_* methods return a list with a single assertion passing object.
|
||||
After the assertion, it creates a list of newly created DualDirectiveNode
|
||||
instances that encapsulate the pairs of returned DirectiveNode objects.
|
||||
"""
|
||||
|
||||
return self._find_helper(DualDirectiveNode, "find_directives", name,
|
||||
exclude=exclude)
|
||||
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Performs a search for CommentNodes using both implementations and
|
||||
checks the results. This is built upon the assumption that unimplemented
|
||||
find_* methods return a list with a single assertion passing object.
|
||||
After the assertion, it creates a list of newly created DualCommentNode
|
||||
instances that encapsulate the pairs of returned CommentNode objects.
|
||||
"""
|
||||
|
||||
return self._find_helper(DualCommentNode, "find_comments", comment)
|
||||
|
||||
def delete_child(self, child):
|
||||
"""Deletes a child from the ParserNode implementations. The actual
|
||||
ParserNode implementations are used here directly in order to be able
|
||||
to match a child to the list of children."""
|
||||
|
||||
self.primary.delete_child(child.primary)
|
||||
self.secondary.delete_child(child.secondary)
|
||||
|
||||
def unsaved_files(self):
|
||||
""" Fetches the list of unsaved file paths and asserts that the lists
|
||||
match """
|
||||
primary_files = self.primary.unsaved_files()
|
||||
secondary_files = self.secondary.unsaved_files()
|
||||
assertions.assertEqualSimple(primary_files, secondary_files)
|
||||
|
||||
return primary_files
|
||||
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
example: ['/etc/apache2/conf.d/*.load']
|
||||
|
||||
This is typically called on the root node of the ParserNode tree.
|
||||
|
||||
:returns: list of file paths of files that have been parsed
|
||||
"""
|
||||
|
||||
primary_paths = self.primary.parsed_paths()
|
||||
secondary_paths = self.secondary.parsed_paths()
|
||||
assertions.assertEqualPathsList(primary_paths, secondary_paths)
|
||||
return primary_paths
|
||||
@@ -1,7 +1,5 @@
|
||||
""" Entry point for Apache Plugin """
|
||||
# Pylint does not like disutils.version when running inside a venv.
|
||||
# See: https://github.com/PyCQA/pylint/issues/73
|
||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module,import-error
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from certbot import util
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""A class that performs HTTP-01 challenges for Apache"""
|
||||
import logging
|
||||
import errno
|
||||
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List
|
||||
from acme.magic_typing import Set
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
from certbot.compat import os
|
||||
@@ -168,7 +169,15 @@ class ApacheHttp01(common.ChallengePerformer):
|
||||
|
||||
def _set_up_challenges(self):
|
||||
if not os.path.isdir(self.challenge_dir):
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
old_umask = filesystem.umask(0o022)
|
||||
try:
|
||||
filesystem.makedirs(self.challenge_dir, 0o755)
|
||||
except OSError as exception:
|
||||
if exception.errno not in (errno.EEXIST, errno.EISDIR):
|
||||
raise errors.PluginError(
|
||||
"Couldn't create root for http-01 challenge")
|
||||
finally:
|
||||
filesystem.umask(old_umask)
|
||||
|
||||
responses = []
|
||||
for achall in self.achalls:
|
||||
|
||||
515
certbot-apache/certbot_apache/_internal/interfaces.py
Normal file
515
certbot-apache/certbot_apache/_internal/interfaces.py
Normal file
@@ -0,0 +1,515 @@
|
||||
"""ParserNode interface for interacting with configuration tree.
|
||||
|
||||
General description
|
||||
-------------------
|
||||
|
||||
The ParserNode interfaces are designed to be able to contain all the parsing logic,
|
||||
while allowing their users to interact with the configuration tree in a Pythonic
|
||||
and well structured manner.
|
||||
|
||||
The structure allows easy traversal of the tree of ParserNodes. Each ParserNode
|
||||
stores a reference to its ancestor and immediate children, allowing the user to
|
||||
traverse the tree using built in interface methods as well as accessing the interface
|
||||
properties directly.
|
||||
|
||||
ParserNode interface implementation should stand between the actual underlying
|
||||
parser functionality and the business logic within Configurator code, interfacing
|
||||
with both. The ParserNode tree is a result of configuration parsing action.
|
||||
|
||||
ParserNode tree will be in charge of maintaining the parser state and hence the
|
||||
abstract syntax tree (AST). Interactions between ParserNode tree and underlying
|
||||
parser should involve only parsing the configuration files to this structure, and
|
||||
writing it back to the filesystem - while preserving the format including whitespaces.
|
||||
|
||||
For some implementations (Apache for example) it's important to keep track of and
|
||||
to use state information while parsing conditional blocks and directives. This
|
||||
allows the implementation to set a flag to parts of the parsed configuration
|
||||
structure as not being in effect in a case of unmatched conditional block. It's
|
||||
important to store these blocks in the tree as well in order to not to conduct
|
||||
destructive actions (failing to write back parts of the configuration) while writing
|
||||
the AST back to the filesystem.
|
||||
|
||||
The ParserNode tree is in charge of maintaining the its own structure while every
|
||||
child node fetched with find - methods or by iterating its list of children can be
|
||||
changed in place. When making changes the affected nodes should be flagged as "dirty"
|
||||
in order for the parser implementation to figure out the parts of the configuration
|
||||
that need to be written back to disk during the save() operation.
|
||||
|
||||
|
||||
Metadata
|
||||
--------
|
||||
|
||||
The metadata holds all the implementation specific attributes of the ParserNodes -
|
||||
things like the positional information related to the AST, file paths, whitespacing,
|
||||
and any other information relevant to the underlying parser engine.
|
||||
|
||||
Access to the metadata should be handled by implementation specific methods, allowing
|
||||
the Configurator functionality to access the underlying information where needed.
|
||||
|
||||
For some implementations the node can be initialized using the information carried
|
||||
in metadata alone. This is useful especially when populating the ParserNode tree
|
||||
while parsing the configuration.
|
||||
|
||||
|
||||
Apache implementation
|
||||
---------------------
|
||||
|
||||
The Apache implementation of ParserNode interface requires some implementation
|
||||
specific functionalities that are not described by the interface itself.
|
||||
|
||||
Initialization
|
||||
|
||||
When the user of a ParserNode class is creating these objects, they must specify
|
||||
the parameters as described in the documentation for the __init__ methods below.
|
||||
When these objects are created internally, however, some parameters may not be
|
||||
needed because (possibly more detailed) information is included in the metadata
|
||||
parameter. In this case, implementations can deviate from the required parameters
|
||||
from __init__, however, they should still behave the same when metadata is not
|
||||
provided.
|
||||
|
||||
For consistency internally, if an argument is provided directly in the ParserNode
|
||||
initialization parameters as well as within metadata it's recommended to establish
|
||||
clear behavior around this scenario within the implementation.
|
||||
|
||||
Conditional blocks
|
||||
|
||||
Apache configuration can have conditional blocks, for example: <IfModule ...>,
|
||||
resulting the directives and subblocks within it being either enabled or disabled.
|
||||
While find_* interface methods allow including the disabled parts of the configuration
|
||||
tree in searches a special care needs to be taken while parsing the structure in
|
||||
order to reflect the active state of configuration.
|
||||
|
||||
Whitespaces
|
||||
|
||||
Each ParserNode object is responsible of storing its prepending whitespace characters
|
||||
in order to be able to write the AST back to filesystem like it was, preserving the
|
||||
format, this applies for parameters of BlockNode and DirectiveNode as well.
|
||||
When parameters of ParserNode are changed, the pre-existing whitespaces in the
|
||||
parameter sequence are discarded, as the general reason for storing them is to
|
||||
maintain the ability to write the configuration back to filesystem exactly like
|
||||
it was. This loses its meaning when we have to change the directives or blocks
|
||||
parameters for other reasons.
|
||||
|
||||
Searches and matching
|
||||
|
||||
Apache configuration is largely case insensitive, so the Apache implementation of
|
||||
ParserNode interface needs to provide the user means to match block and directive
|
||||
names and parameters in case insensitive manner. This does not apply to everything
|
||||
however, for example the parameters of a conditional statement may be case sensitive.
|
||||
For this reason the internal representation of data should not ignore the case.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import six
|
||||
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ParserNode(object):
|
||||
"""
|
||||
ParserNode is the basic building block of the tree of such nodes,
|
||||
representing the structure of the configuration. It is largely meant to keep
|
||||
the structure information intact and idiomatically accessible.
|
||||
|
||||
The root node as well as the child nodes of it should be instances of ParserNode.
|
||||
Nodes keep track of their differences to on-disk representation of configuration
|
||||
by marking modified ParserNodes as dirty to enable partial write-to-disk for
|
||||
different files in the configuration structure.
|
||||
|
||||
While for the most parts the usage and the child types are obvious, "include"-
|
||||
and similar directives are an exception to this rule. This is because of the
|
||||
nature of include directives - which unroll the contents of another file or
|
||||
configuration block to their place. While we could unroll the included nodes
|
||||
to the parent tree, it remains important to keep the context of include nodes
|
||||
separate in order to write back the original configuration as it was.
|
||||
|
||||
For parsers that require the implementation to keep track of the whitespacing,
|
||||
it's responsibility of each ParserNode object itself to store its prepending
|
||||
whitespaces in order to be able to reconstruct the complete configuration file
|
||||
as it was when originally read from the disk.
|
||||
|
||||
ParserNode objects should have the following attributes:
|
||||
|
||||
# Reference to ancestor node, or None if the node is the root node of the
|
||||
# configuration tree.
|
||||
ancestor: Optional[ParserNode]
|
||||
|
||||
# True if this node has been modified since last save.
|
||||
dirty: bool
|
||||
|
||||
# Filepath of the file where the configuration element for this ParserNode
|
||||
# object resides. For root node, the value for filepath is the httpd root
|
||||
# configuration file. Filepath can be None if a configuration directive is
|
||||
# defined in for example the httpd command line.
|
||||
filepath: Optional[str]
|
||||
|
||||
# Metadata dictionary holds all the implementation specific key-value pairs
|
||||
# for the ParserNode instance.
|
||||
metadata: Dict[str, Any]
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the ParserNode instance, and sets the ParserNode specific
|
||||
instance variables. This is not meant to be used directly, but through
|
||||
specific classes implementing ParserNode interface.
|
||||
|
||||
:param ancestor: BlockNode ancestor for this CommentNode. Required.
|
||||
:type ancestor: BlockNode or None
|
||||
|
||||
:param filepath: Filesystem path for the file where this CommentNode
|
||||
does or should exist in the filesystem. Required.
|
||||
:type filepath: str or None
|
||||
|
||||
:param dirty: Boolean flag for denoting if this CommentNode has been
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
|
||||
:param metadata: Dictionary of metadata values for this ParserNode object.
|
||||
Metadata information should be used only internally in the implementation.
|
||||
Default: {}
|
||||
:type metadata: dict
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def save(self, msg):
|
||||
"""
|
||||
Save traverses the children, and attempts to write the AST to disk for
|
||||
all the objects that are marked dirty. The actual operation of course
|
||||
depends on the underlying implementation. save() shouldn't be called
|
||||
from the Configurator outside of its designated save() method in order
|
||||
to ensure that the Reverter checkpoints are created properly.
|
||||
|
||||
Note: this approach of keeping internal structure of the configuration
|
||||
within the ParserNode tree does not represent the file inclusion structure
|
||||
of actual configuration files that reside in the filesystem. To handle
|
||||
file writes properly, the file specific temporary trees should be extracted
|
||||
from the full ParserNode tree where necessary when writing to disk.
|
||||
|
||||
:param str msg: Message describing the reason for the save.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_ancestors(self, name):
|
||||
"""
|
||||
Traverses the ancestor tree up, searching for BlockNodes with a specific
|
||||
name.
|
||||
|
||||
:param str name: Name of the ancestor BlockNode to search for
|
||||
|
||||
:returns: A list of ancestor BlockNodes that match the name
|
||||
:rtype: list of BlockNode
|
||||
"""
|
||||
|
||||
|
||||
# Linter rule exclusion done because of https://github.com/PyCQA/pylint/issues/179
|
||||
@six.add_metaclass(abc.ABCMeta) # pylint: disable=abstract-method
|
||||
class CommentNode(ParserNode):
|
||||
"""
|
||||
CommentNode class is used for representation of comments within the parsed
|
||||
configuration structure. Because of the nature of comments, it is not able
|
||||
to have child nodes and hence it is always treated as a leaf node.
|
||||
|
||||
CommentNode stores its contents in class variable 'comment' and does not
|
||||
have a specific name.
|
||||
|
||||
CommentNode objects should have the following attributes in addition to
|
||||
the ones described in ParserNode:
|
||||
|
||||
# Contains the contents of the comment without the directive notation
|
||||
# (typically # or /* ... */).
|
||||
comment: str
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the CommentNode instance and sets its instance variables.
|
||||
|
||||
:param comment: Contents of the comment. Required.
|
||||
:type comment: str
|
||||
|
||||
:param ancestor: BlockNode ancestor for this CommentNode. Required.
|
||||
:type ancestor: BlockNode or None
|
||||
|
||||
:param filepath: Filesystem path for the file where this CommentNode
|
||||
does or should exist in the filesystem. Required.
|
||||
:type filepath: str or None
|
||||
|
||||
:param dirty: Boolean flag for denoting if this CommentNode has been
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
"""
|
||||
super(CommentNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DirectiveNode(ParserNode):
|
||||
"""
|
||||
DirectiveNode class represents a configuration directive within the configuration.
|
||||
It can have zero or more parameters attached to it. Because of the nature of
|
||||
single directives, it is not able to have child nodes and hence it is always
|
||||
treated as a leaf node.
|
||||
|
||||
If a this directive was defined on the httpd command line, the ancestor instance
|
||||
variable for this DirectiveNode should be None, and it should be inserted to the
|
||||
beginning of root BlockNode children sequence.
|
||||
|
||||
DirectiveNode objects should have the following attributes in addition to
|
||||
the ones described in ParserNode:
|
||||
|
||||
# True if this DirectiveNode is enabled and False if it is inside of an
|
||||
# inactive conditional block.
|
||||
enabled: bool
|
||||
|
||||
# Name, or key of the configuration directive. If BlockNode subclass of
|
||||
# DirectiveNode is the root configuration node, the name should be None.
|
||||
name: Optional[str]
|
||||
|
||||
# Tuple of parameters of this ParserNode object, excluding whitespaces.
|
||||
parameters: Tuple[str, ...]
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the DirectiveNode instance and sets its instance variables.
|
||||
|
||||
:param name: Name or key of the DirectiveNode object. Required.
|
||||
:type name: str or None
|
||||
|
||||
:param tuple parameters: Tuple of str parameters for this DirectiveNode.
|
||||
Default: ().
|
||||
:type parameters: tuple
|
||||
|
||||
:param ancestor: BlockNode ancestor for this DirectiveNode, or None for
|
||||
root configuration node. Required.
|
||||
:type ancestor: BlockNode or None
|
||||
|
||||
:param filepath: Filesystem path for the file where this DirectiveNode
|
||||
does or should exist in the filesystem, or None for directives introduced
|
||||
in the httpd command line. Required.
|
||||
:type filepath: str or None
|
||||
|
||||
:param dirty: Boolean flag for denoting if this DirectiveNode has been
|
||||
created or changed after the last save. Default: False.
|
||||
:type dirty: bool
|
||||
|
||||
:param enabled: True if this DirectiveNode object is parsed in the active
|
||||
configuration of the httpd. False if the DirectiveNode exists within a
|
||||
unmatched conditional configuration block. Default: True.
|
||||
:type enabled: bool
|
||||
|
||||
"""
|
||||
super(DirectiveNode, self).__init__(ancestor=kwargs['ancestor'],
|
||||
dirty=kwargs.get('dirty', False),
|
||||
filepath=kwargs['filepath'],
|
||||
metadata=kwargs.get('metadata', {})) # pragma: no cover
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_parameters(self, parameters):
|
||||
"""
|
||||
Sets the sequence of parameters for this ParserNode object without
|
||||
whitespaces. While the whitespaces for parameters are discarded when using
|
||||
this method, the whitespacing preceeding the ParserNode itself should be
|
||||
kept intact.
|
||||
|
||||
:param list parameters: sequence of parameters
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockNode(DirectiveNode):
|
||||
"""
|
||||
BlockNode class represents a block of nested configuration directives, comments
|
||||
and other blocks as its children. A BlockNode can have zero or more parameters
|
||||
attached to it.
|
||||
|
||||
Configuration blocks typically consist of one or more child nodes of all possible
|
||||
types. Because of this, the BlockNode class has various discovery and structure
|
||||
management methods.
|
||||
|
||||
Lists of parameters used as an optional argument for some of the methods should
|
||||
be lists of strings that are applicable parameters for each specific BlockNode
|
||||
or DirectiveNode type. As an example, for a following configuration example:
|
||||
|
||||
<VirtualHost *:80>
|
||||
...
|
||||
</VirtualHost>
|
||||
|
||||
The node type would be BlockNode, name would be 'VirtualHost' and its parameters
|
||||
would be: ['*:80'].
|
||||
|
||||
While for the following example:
|
||||
|
||||
LoadModule alias_module /usr/lib/apache2/modules/mod_alias.so
|
||||
|
||||
The node type would be DirectiveNode, name would be 'LoadModule' and its
|
||||
parameters would be: ['alias_module', '/usr/lib/apache2/modules/mod_alias.so']
|
||||
|
||||
The applicable parameters are dependent on the underlying configuration language
|
||||
and its grammar.
|
||||
|
||||
BlockNode objects should have the following attributes in addition to
|
||||
the ones described in DirectiveNode:
|
||||
|
||||
# Tuple of direct children of this BlockNode object. The order of children
|
||||
# in this tuple retain the order of elements in the parsed configuration
|
||||
# block.
|
||||
children: Tuple[ParserNode, ...]
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_block(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new BlockNode child node with provided values and marks the callee
|
||||
BlockNode dirty. This is used to add new children to the AST. The preceeding
|
||||
whitespaces should not be added based on the ancestor or siblings for the
|
||||
newly created object. This is to match the current behavior of the legacy
|
||||
parser implementation.
|
||||
|
||||
:param str name: The name of the child node to add
|
||||
:param list parameters: list of parameters for the node
|
||||
:param int position: Position in the list of children to add the new child
|
||||
node to. Defaults to None, which appends the newly created node to the list.
|
||||
If an integer is given, the child is inserted before that index in the
|
||||
list similar to list().insert.
|
||||
|
||||
:returns: BlockNode instance of the created child block
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_directive(self, name, parameters=None, position=None):
|
||||
"""
|
||||
Adds a new DirectiveNode child node with provided values and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
|
||||
:param str name: The name of the child node to add
|
||||
:param list parameters: list of parameters for the node
|
||||
:param int position: Position in the list of children to add the new child
|
||||
node to. Defaults to None, which appends the newly created node to the list.
|
||||
If an integer is given, the child is inserted before that index in the
|
||||
list similar to list().insert.
|
||||
|
||||
:returns: DirectiveNode instance of the created child directive
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_child_comment(self, comment="", position=None):
|
||||
"""
|
||||
Adds a new CommentNode child node with provided value and marks the
|
||||
callee BlockNode dirty. This is used to add new children to the AST. The
|
||||
preceeding whitespaces should not be added based on the ancestor or siblings
|
||||
for the newly created object. This is to match the current behavior of the
|
||||
legacy parser implementation.
|
||||
|
||||
|
||||
:param str comment: Comment contents
|
||||
:param int position: Position in the list of children to add the new child
|
||||
node to. Defaults to None, which appends the newly created node to the list.
|
||||
If an integer is given, the child is inserted before that index in the
|
||||
list similar to list().insert.
|
||||
|
||||
:returns: CommentNode instance of the created child comment
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_blocks(self, name, exclude=True):
|
||||
"""
|
||||
Find a configuration block by name. This method walks the child tree of
|
||||
ParserNodes under the instance it was called from. This way it is possible
|
||||
to search for the whole configuration tree, when starting from root node or
|
||||
to do a partial search when starting from a specified branch. The lookup
|
||||
should be case insensitive.
|
||||
|
||||
:param str name: The name of the directive to search for
|
||||
:param bool exclude: If the search results should exclude the contents of
|
||||
ParserNode objects that reside within conditional blocks and because
|
||||
of current state are not enabled.
|
||||
|
||||
:returns: A list of found BlockNode objects.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_directives(self, name, exclude=True):
|
||||
"""
|
||||
Find a directive by name. This method walks the child tree of ParserNodes
|
||||
under the instance it was called from. This way it is possible to search
|
||||
for the whole configuration tree, when starting from root node, or to do
|
||||
a partial search when starting from a specified branch. The lookup should
|
||||
be case insensitive.
|
||||
|
||||
:param str name: The name of the directive to search for
|
||||
:param bool exclude: If the search results should exclude the contents of
|
||||
ParserNode objects that reside within conditional blocks and because
|
||||
of current state are not enabled.
|
||||
|
||||
:returns: A list of found DirectiveNode objects.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_comments(self, comment):
|
||||
"""
|
||||
Find comments with value containing the search term.
|
||||
|
||||
This method walks the child tree of ParserNodes under the instance it was
|
||||
called from. This way it is possible to search for the whole configuration
|
||||
tree, when starting from root node, or to do a partial search when starting
|
||||
from a specified branch. The lookup should be case sensitive.
|
||||
|
||||
:param str comment: The content of comment to search for
|
||||
|
||||
:returns: A list of found CommentNode objects.
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_child(self, child):
|
||||
"""
|
||||
Remove a specified child node from the list of children of the called
|
||||
BlockNode object.
|
||||
|
||||
:param ParserNode child: Child ParserNode object to remove from the list
|
||||
of children of the callee.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def unsaved_files(self):
|
||||
"""
|
||||
Returns a list of file paths that have been changed since the last save
|
||||
(or the initial configuration parse). The intended use for this method
|
||||
is to tell the Reverter which files need to be included in a checkpoint.
|
||||
|
||||
This is typically called for the root of the ParserNode tree.
|
||||
|
||||
:returns: list of file paths of files that have been changed but not yet
|
||||
saved to disk.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parsed_paths(self):
|
||||
"""
|
||||
Returns a list of file paths that have currently been parsed into the parser
|
||||
tree. The returned list may include paths with wildcard characters, for
|
||||
example: ['/etc/apache2/conf.d/*.load']
|
||||
|
||||
This is typically called on the root node of the ParserNode tree.
|
||||
|
||||
:returns: list of file paths of files that have been parsed
|
||||
"""
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Module contains classes used by the Apache Configurator."""
|
||||
import re
|
||||
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set
|
||||
from certbot.plugins import common
|
||||
|
||||
|
||||
@@ -124,7 +124,7 @@ class VirtualHost(object):
|
||||
strip_name = re.compile(r"^(?:.+://)?([^ :$]*)")
|
||||
|
||||
def __init__(self, filep, path, addrs, ssl, enabled, name=None,
|
||||
aliases=None, modmacro=False, ancestor=None):
|
||||
aliases=None, modmacro=False, ancestor=None, node=None):
|
||||
|
||||
"""Initialize a VH."""
|
||||
self.filep = filep
|
||||
@@ -136,6 +136,7 @@ class VirtualHost(object):
|
||||
self.enabled = enabled
|
||||
self.modmacro = modmacro
|
||||
self.ancestor = ancestor
|
||||
self.node = node
|
||||
|
||||
def get_names(self):
|
||||
"""Return a set of all names."""
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3
|
||||
SSLCipherSuite ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:ECDHE-ECDSA-DES-CBC3-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:DES-CBC3-SHA:!DSS
|
||||
SSLHonorCipherOrder on
|
||||
SSLCompression off
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
|
||||
#CustomLog /var/log/apache2/access.log vhost_combined
|
||||
#LogLevel warn
|
||||
#ErrorLog /var/log/apache2/error.log
|
||||
|
||||
# Always ensure Cookies have "Secure" set (JAH 2012/1)
|
||||
#Header edit Set-Cookie (?i)^(.*)(;\s*secure)??((\s*;)?(.*)) "$1; Secure$3$4"
|
||||
@@ -1,9 +1,7 @@
|
||||
""" Distribution specific override class for Arch Linux """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
|
||||
@@ -26,6 +24,5 @@ class ArchConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
|
||||
import logging
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
from certbot.errors import MisconfigurationError
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
@@ -37,8 +35,7 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
""" Distribution specific override class for macOS """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
|
||||
@@ -26,6 +24,5 @@ class DarwinConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/other",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
""" Distribution specific override class for Debian family (Ubuntu/Debian) """
|
||||
import logging
|
||||
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
@@ -34,8 +33,7 @@ class DebianConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=True,
|
||||
handle_sites=True,
|
||||
challenge_location="/etc/apache2",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def enable_site(self, vhost):
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
""" Distribution specific override class for Fedora 29+ """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import errors
|
||||
from certbot import interfaces
|
||||
from certbot import util
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
@@ -31,9 +29,7 @@ class FedoraConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/httpd/conf.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
# TODO: eventually newest version of Fedora will need their own config
|
||||
"certbot_apache", os.path.join("_internal", "centos-options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def config_test(self):
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
""" Distribution specific override class for Gentoo Linux """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import parser
|
||||
@@ -29,8 +27,7 @@ class GentooConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
def _prepare_options(self):
|
||||
@@ -70,6 +67,6 @@ class GentooParser(parser.ApacheParser):
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
mod_cmd = [self.configurator.option("ctl"), "modules"]
|
||||
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
matches = apache_util.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
""" Distribution specific override class for OpenSUSE """
|
||||
import pkg_resources
|
||||
import zope.interface
|
||||
|
||||
from certbot import interfaces
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import configurator
|
||||
|
||||
|
||||
@@ -26,6 +24,5 @@ class OpenSUSEConfigurator(configurator.ApacheConfigurator):
|
||||
handle_modules=False,
|
||||
handle_sites=False,
|
||||
challenge_location="/etc/apache2/vhosts.d",
|
||||
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "options-ssl-apache.conf"))
|
||||
bin=None,
|
||||
)
|
||||
|
||||
@@ -3,16 +3,15 @@ import copy
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
from acme.magic_typing import Dict # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Set # pylint: disable=unused-import, no-name-in-module
|
||||
from acme.magic_typing import Dict
|
||||
from acme.magic_typing import List
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
from certbot_apache._internal import apache_util
|
||||
from certbot_apache._internal import constants
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -31,7 +30,7 @@ class ApacheParser(object):
|
||||
|
||||
"""
|
||||
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
|
||||
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
|
||||
fnmatch_chars = {"*", "?", "\\", "[", "]"}
|
||||
|
||||
def __init__(self, root, vhostroot=None, version=(2, 4),
|
||||
configurator=None):
|
||||
@@ -52,7 +51,7 @@ class ApacheParser(object):
|
||||
"version 1.2.0 or higher, please make sure you have you have "
|
||||
"those installed.")
|
||||
|
||||
self.modules = set() # type: Set[str]
|
||||
self.modules = {} # type: Dict[str, str]
|
||||
self.parser_paths = {} # type: Dict[str, List[str]]
|
||||
self.variables = {} # type: Dict[str, str]
|
||||
|
||||
@@ -249,14 +248,14 @@ class ApacheParser(object):
|
||||
def add_mod(self, mod_name):
|
||||
"""Shortcut for updating parser modules."""
|
||||
if mod_name + "_module" not in self.modules:
|
||||
self.modules.add(mod_name + "_module")
|
||||
self.modules[mod_name + "_module"] = None
|
||||
if "mod_" + mod_name + ".c" not in self.modules:
|
||||
self.modules.add("mod_" + mod_name + ".c")
|
||||
self.modules["mod_" + mod_name + ".c"] = None
|
||||
|
||||
def reset_modules(self):
|
||||
"""Reset the loaded modules list. This is called from cleanup to clear
|
||||
temporarily loaded modules."""
|
||||
self.modules = set()
|
||||
self.modules = {}
|
||||
self.update_modules()
|
||||
self.parse_modules()
|
||||
|
||||
@@ -267,7 +266,7 @@ class ApacheParser(object):
|
||||
the iteration issue. Else... parse and enable mods at same time.
|
||||
|
||||
"""
|
||||
mods = set() # type: Set[str]
|
||||
mods = {} # type: Dict[str, str]
|
||||
matches = self.find_dir("LoadModule")
|
||||
iterator = iter(matches)
|
||||
# Make sure prev_size != cur_size for do: while: iteration
|
||||
@@ -281,8 +280,8 @@ class ApacheParser(object):
|
||||
mod_name = self.get_arg(match_name)
|
||||
mod_filename = self.get_arg(match_filename)
|
||||
if mod_name and mod_filename:
|
||||
mods.add(mod_name)
|
||||
mods.add(os.path.basename(mod_filename)[:-2] + "c")
|
||||
mods[mod_name] = mod_filename
|
||||
mods[os.path.basename(mod_filename)[:-2] + "c"] = mod_filename
|
||||
else:
|
||||
logger.debug("Could not read LoadModule directive from Augeas path: %s",
|
||||
match_name[6:])
|
||||
@@ -290,32 +289,15 @@ class ApacheParser(object):
|
||||
|
||||
def update_runtime_variables(self):
|
||||
"""Update Includes, Defines and Includes from httpd config dump data"""
|
||||
|
||||
self.update_defines()
|
||||
self.update_includes()
|
||||
self.update_modules()
|
||||
|
||||
def update_defines(self):
|
||||
"""Get Defines from httpd process"""
|
||||
"""Updates the dictionary of known variables in the configuration"""
|
||||
|
||||
variables = dict()
|
||||
define_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
||||
"DUMP_RUN_CFG"]
|
||||
matches = self.parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)")
|
||||
try:
|
||||
matches.remove("DUMP_RUN_CFG")
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
for match in matches:
|
||||
if match.count("=") > 1:
|
||||
logger.error("Unexpected number of equal signs in "
|
||||
"runtime config dump.")
|
||||
raise errors.PluginError(
|
||||
"Error parsing Apache runtime variables")
|
||||
parts = match.partition("=")
|
||||
variables[parts[0]] = parts[2]
|
||||
|
||||
self.variables = variables
|
||||
self.variables = apache_util.parse_defines(self.configurator.option("ctl"))
|
||||
|
||||
def update_includes(self):
|
||||
"""Get includes from httpd process, and add them to DOM if needed"""
|
||||
@@ -325,9 +307,7 @@ class ApacheParser(object):
|
||||
# configuration files
|
||||
_ = self.find_dir("Include")
|
||||
|
||||
inc_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
||||
"DUMP_INCLUDES"]
|
||||
matches = self.parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
|
||||
matches = apache_util.parse_includes(self.configurator.option("ctl"))
|
||||
if matches:
|
||||
for i in matches:
|
||||
if not self.parsed_in_current(i):
|
||||
@@ -336,57 +316,11 @@ class ApacheParser(object):
|
||||
def update_modules(self):
|
||||
"""Get loaded modules from httpd process, and add them to DOM"""
|
||||
|
||||
mod_cmd = [self.configurator.option("ctl"), "-t", "-D",
|
||||
"DUMP_MODULES"]
|
||||
matches = self.parse_from_subprocess(mod_cmd, r"(.*)_module")
|
||||
matches = apache_util.parse_modules(self.configurator.option("ctl"))
|
||||
for mod in matches:
|
||||
self.add_mod(mod.strip())
|
||||
|
||||
def parse_from_subprocess(self, command, regexp):
|
||||
"""Get values from stdout of subprocess command
|
||||
|
||||
:param list command: Command to run
|
||||
:param str regexp: Regexp for parsing
|
||||
|
||||
:returns: list parsed from command output
|
||||
:rtype: list
|
||||
|
||||
"""
|
||||
stdout = self._get_runtime_cfg(command)
|
||||
return re.compile(regexp).findall(stdout)
|
||||
|
||||
def _get_runtime_cfg(self, command): # pylint: disable=no-self-use
|
||||
"""Get runtime configuration info.
|
||||
:param command: Command to run
|
||||
|
||||
:returns: stdout from command
|
||||
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
except (OSError, ValueError):
|
||||
logger.error(
|
||||
"Error running command %s for runtime parameters!%s",
|
||||
command, os.linesep)
|
||||
raise errors.MisconfigurationError(
|
||||
"Error accessing loaded Apache parameters: {0}".format(
|
||||
command))
|
||||
# Small errors that do not impede
|
||||
if proc.returncode != 0:
|
||||
logger.warning("Error in checking parameter list: %s", stderr)
|
||||
raise errors.MisconfigurationError(
|
||||
"Apache is unable to check whether or not the module is "
|
||||
"loaded because Apache is misconfigured.")
|
||||
|
||||
return stdout
|
||||
|
||||
def filter_args_num(self, matches, args): # pylint: disable=no-self-use
|
||||
def filter_args_num(self, matches, args):
|
||||
"""Filter out directives with specific number of arguments.
|
||||
|
||||
This function makes the assumption that all related arguments are given
|
||||
@@ -612,7 +546,7 @@ class ApacheParser(object):
|
||||
"%s//*[self::directive=~regexp('%s')]" % (start, regex))
|
||||
|
||||
if exclude:
|
||||
matches = self._exclude_dirs(matches)
|
||||
matches = self.exclude_dirs(matches)
|
||||
|
||||
if arg is None:
|
||||
arg_suffix = "/arg"
|
||||
@@ -678,9 +612,15 @@ class ApacheParser(object):
|
||||
|
||||
return value
|
||||
|
||||
def _exclude_dirs(self, matches):
|
||||
def get_root_augpath(self):
|
||||
"""
|
||||
Returns the Augeas path of root configuration.
|
||||
"""
|
||||
return get_aug_path(self.loc["root"])
|
||||
|
||||
def exclude_dirs(self, matches):
|
||||
"""Exclude directives that are not loaded into the configuration."""
|
||||
filters = [("ifmodule", self.modules), ("ifdefine", self.variables)]
|
||||
filters = [("ifmodule", self.modules.keys()), ("ifdefine", self.variables)]
|
||||
|
||||
valid_matches = []
|
||||
|
||||
@@ -721,6 +661,25 @@ class ApacheParser(object):
|
||||
|
||||
return True
|
||||
|
||||
def standard_path_from_server_root(self, arg):
|
||||
"""Ensure paths are consistent and absolute
|
||||
|
||||
:param str arg: Argument of directive
|
||||
|
||||
:returns: Standardized argument path
|
||||
:rtype: str
|
||||
"""
|
||||
# Remove beginning and ending quotes
|
||||
arg = arg.strip("'\"")
|
||||
|
||||
# Standardize the include argument based on server root
|
||||
if not arg.startswith("/"):
|
||||
# Normpath will condense ../
|
||||
arg = os.path.normpath(os.path.join(self.root, arg))
|
||||
else:
|
||||
arg = os.path.normpath(arg)
|
||||
return arg
|
||||
|
||||
def _get_include_path(self, arg):
|
||||
"""Converts an Apache Include directive into Augeas path.
|
||||
|
||||
@@ -741,16 +700,7 @@ class ApacheParser(object):
|
||||
# if matchObj.group() != arg:
|
||||
# logger.error("Error: Invalid regexp characters in %s", arg)
|
||||
# return []
|
||||
|
||||
# Remove beginning and ending quotes
|
||||
arg = arg.strip("'\"")
|
||||
|
||||
# Standardize the include argument based on server root
|
||||
if not arg.startswith("/"):
|
||||
# Normpath will condense ../
|
||||
arg = os.path.normpath(os.path.join(self.root, arg))
|
||||
else:
|
||||
arg = os.path.normpath(arg)
|
||||
arg = self.standard_path_from_server_root(arg)
|
||||
|
||||
# Attempts to add a transform to the file if one does not already exist
|
||||
if os.path.isdir(arg):
|
||||
@@ -764,7 +714,7 @@ class ApacheParser(object):
|
||||
split_arg = arg.split("/")
|
||||
for idx, split in enumerate(split_arg):
|
||||
if any(char in ApacheParser.fnmatch_chars for char in split):
|
||||
# Turn it into a augeas regex
|
||||
# Turn it into an augeas regex
|
||||
# TODO: Can this instead be an augeas glob instead of regex
|
||||
split_arg[idx] = ("* [label()=~regexp('%s')]" %
|
||||
self.fnmatch_to_re(split))
|
||||
@@ -774,14 +724,13 @@ class ApacheParser(object):
|
||||
|
||||
return get_aug_path(arg)
|
||||
|
||||
def fnmatch_to_re(self, clean_fn_match): # pylint: disable=no-self-use
|
||||
def fnmatch_to_re(self, clean_fn_match):
|
||||
"""Method converts Apache's basic fnmatch to regular expression.
|
||||
|
||||
Assumption - Configs are assumed to be well-formed and only writable by
|
||||
privileged users.
|
||||
|
||||
https://apr.apache.org/docs/apr/2.0/apr__fnmatch_8h_source.html
|
||||
http://apache2.sourcearchive.com/documentation/2.2.16-6/apr__fnmatch_8h_source.html
|
||||
|
||||
:param str clean_fn_match: Apache style filename match, like globs
|
||||
|
||||
@@ -791,7 +740,7 @@ class ApacheParser(object):
|
||||
"""
|
||||
if sys.version_info < (3, 6):
|
||||
# This strips off final /Z(?ms)
|
||||
return fnmatch.translate(clean_fn_match)[:-7]
|
||||
return fnmatch.translate(clean_fn_match)[:-7] # pragma: no cover
|
||||
# Since Python 3.6, it returns a different pattern like (?s:.*\.load)\Z
|
||||
return fnmatch.translate(clean_fn_match)[4:-3] # pragma: no cover
|
||||
|
||||
@@ -849,7 +798,7 @@ class ApacheParser(object):
|
||||
def _parsed_by_parser_paths(self, filep, paths):
|
||||
"""Helper function that searches through provided paths and returns
|
||||
True if file path is found in the set"""
|
||||
for directory in paths.keys():
|
||||
for directory in paths:
|
||||
for filename in paths[directory]:
|
||||
if fnmatch.fnmatch(filep, os.path.join(directory, filename)):
|
||||
return True
|
||||
@@ -995,8 +944,8 @@ def case_i(string):
|
||||
:param str string: string to make case i regex
|
||||
|
||||
"""
|
||||
return "".join(["[" + c.upper() + c.lower() + "]"
|
||||
if c.isalpha() else c for c in re.escape(string)])
|
||||
return "".join("[" + c.upper() + c.lower() + "]"
|
||||
if c.isalpha() else c for c in re.escape(string))
|
||||
|
||||
|
||||
def get_aug_path(file_path):
|
||||
|
||||
129
certbot-apache/certbot_apache/_internal/parsernode_util.py
Normal file
129
certbot-apache/certbot_apache/_internal/parsernode_util.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""ParserNode utils"""
|
||||
|
||||
|
||||
def validate_kwargs(kwargs, required_names):
|
||||
"""
|
||||
Ensures that the kwargs dict has all the expected values. This function modifies
|
||||
the kwargs dictionary, and hence the returned dictionary should be used instead
|
||||
in the caller function instead of the original kwargs.
|
||||
|
||||
:param dict kwargs: Dictionary of keyword arguments to validate.
|
||||
:param list required_names: List of required parameter names.
|
||||
"""
|
||||
|
||||
validated_kwargs = {}
|
||||
for name in required_names:
|
||||
try:
|
||||
validated_kwargs[name] = kwargs.pop(name)
|
||||
except KeyError:
|
||||
raise TypeError("Required keyword argument: {} undefined.".format(name))
|
||||
|
||||
# Raise exception if unknown key word arguments are found.
|
||||
if kwargs:
|
||||
unknown = ", ".join(kwargs.keys())
|
||||
raise TypeError("Unknown keyword argument(s): {}".format(unknown))
|
||||
return validated_kwargs
|
||||
|
||||
|
||||
def parsernode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for ParserNode. This function modifies the kwargs
|
||||
dictionary, and hence the returned dictionary should be used instead in the
|
||||
caller function instead of the original kwargs.
|
||||
|
||||
If metadata is provided, the otherwise required argument "filepath" may be
|
||||
omitted if the implementation is able to extract its value from the metadata.
|
||||
This usecase is handled within this function. Filepath defaults to None.
|
||||
|
||||
:param dict kwargs: Keyword argument dictionary to validate.
|
||||
|
||||
:returns: Tuple of validated and prepared arguments.
|
||||
"""
|
||||
|
||||
# As many values of ParserNode instances can be derived from the metadata,
|
||||
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||
if "metadata" in kwargs:
|
||||
# Filepath can be derived from the metadata in Augeas implementation.
|
||||
# Default is None, as in this case the responsibility of populating this
|
||||
# variable lies on the implementation.
|
||||
kwargs.setdefault("filepath", None)
|
||||
|
||||
kwargs.setdefault("dirty", False)
|
||||
kwargs.setdefault("metadata", {})
|
||||
|
||||
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "metadata"])
|
||||
return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
|
||||
|
||||
|
||||
def commentnode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for CommentNode and sets the default values for
|
||||
optional kwargs. This function modifies the kwargs dictionary, and hence the
|
||||
returned dictionary should be used instead in the caller function instead of
|
||||
the original kwargs.
|
||||
|
||||
If metadata is provided, the otherwise required argument "comment" may be
|
||||
omitted if the implementation is able to extract its value from the metadata.
|
||||
This usecase is handled within this function.
|
||||
|
||||
:param dict kwargs: Keyword argument dictionary to validate.
|
||||
|
||||
:returns: Tuple of validated and prepared arguments and ParserNode kwargs.
|
||||
"""
|
||||
|
||||
# As many values of ParserNode instances can be derived from the metadata,
|
||||
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||
if "metadata" in kwargs:
|
||||
kwargs.setdefault("comment", None)
|
||||
# Filepath can be derived from the metadata in Augeas implementation.
|
||||
# Default is None, as in this case the responsibility of populating this
|
||||
# variable lies on the implementation.
|
||||
kwargs.setdefault("filepath", None)
|
||||
|
||||
kwargs.setdefault("dirty", False)
|
||||
kwargs.setdefault("metadata", {})
|
||||
|
||||
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "comment",
|
||||
"metadata"])
|
||||
|
||||
comment = kwargs.pop("comment")
|
||||
return comment, kwargs
|
||||
|
||||
|
||||
def directivenode_kwargs(kwargs):
|
||||
"""
|
||||
Validates keyword arguments for DirectiveNode and BlockNode and sets the
|
||||
default values for optional kwargs. This function modifies the kwargs
|
||||
dictionary, and hence the returned dictionary should be used instead in the
|
||||
caller function instead of the original kwargs.
|
||||
|
||||
If metadata is provided, the otherwise required argument "name" may be
|
||||
omitted if the implementation is able to extract its value from the metadata.
|
||||
This usecase is handled within this function.
|
||||
|
||||
:param dict kwargs: Keyword argument dictionary to validate.
|
||||
|
||||
:returns: Tuple of validated and prepared arguments and ParserNode kwargs.
|
||||
"""
|
||||
|
||||
# As many values of ParserNode instances can be derived from the metadata,
|
||||
# (ancestor being a common exception here) make sure we permit it here as well.
|
||||
if "metadata" in kwargs:
|
||||
kwargs.setdefault("name", None)
|
||||
# Filepath can be derived from the metadata in Augeas implementation.
|
||||
# Default is None, as in this case the responsibility of populating this
|
||||
# variable lies on the implementation.
|
||||
kwargs.setdefault("filepath", None)
|
||||
|
||||
kwargs.setdefault("dirty", False)
|
||||
kwargs.setdefault("enabled", True)
|
||||
kwargs.setdefault("parameters", ())
|
||||
kwargs.setdefault("metadata", {})
|
||||
|
||||
kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "name",
|
||||
"parameters", "enabled", "metadata"])
|
||||
|
||||
name = kwargs.pop("name")
|
||||
parameters = kwargs.pop("parameters")
|
||||
enabled = kwargs.pop("enabled")
|
||||
return name, parameters, enabled, kwargs
|
||||
@@ -0,0 +1,19 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||
SSLHonorCipherOrder off
|
||||
SSLSessionTickets off
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
@@ -0,0 +1,18 @@
|
||||
# This file contains important security parameters. If you modify this file
|
||||
# manually, Certbot will be unable to automatically provide future security
|
||||
# updates. Instead, Certbot will print and log an error message with a path to
|
||||
# the up-to-date file that you will need to refer to when manually updating
|
||||
# this file.
|
||||
|
||||
SSLEngine on
|
||||
|
||||
# Intermediate configuration, tweak to your needs
|
||||
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||
SSLHonorCipherOrder off
|
||||
|
||||
SSLOptions +StrictRequire
|
||||
|
||||
# Add vhost name to log entries:
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"" vhost_combined
|
||||
LogFormat "%v %h %l %u %t \"%r\" %>s %b" vhost_common
|
||||
@@ -1,3 +1,3 @@
|
||||
# Remember to update setup.py to match the package versions below.
|
||||
acme[dev]==0.29.0
|
||||
-e certbot[dev]
|
||||
certbot[dev]==1.6.0
|
||||
@@ -1,38 +1,35 @@
|
||||
from distutils.version import LooseVersion
|
||||
import sys
|
||||
|
||||
from setuptools import __version__ as setuptools_version
|
||||
from setuptools import find_packages
|
||||
from setuptools import setup
|
||||
from setuptools.command.test import test as TestCommand
|
||||
|
||||
version = '1.1.0.dev0'
|
||||
version = '1.10.0.dev0'
|
||||
|
||||
# Remember to update local-oldest-requirements.txt when changing the minimum
|
||||
# acme/certbot version.
|
||||
install_requires = [
|
||||
'acme>=0.29.0',
|
||||
'certbot>=1.0.0.dev0',
|
||||
'mock',
|
||||
'certbot>=1.6.0',
|
||||
'python-augeas',
|
||||
'setuptools',
|
||||
'zope.component',
|
||||
'zope.interface',
|
||||
]
|
||||
|
||||
setuptools_known_environment_markers = (LooseVersion(setuptools_version) >= LooseVersion('36.2'))
|
||||
if setuptools_known_environment_markers:
|
||||
install_requires.append('mock ; python_version < "3.3"')
|
||||
elif 'bdist_wheel' in sys.argv[1:]:
|
||||
raise RuntimeError('Error, you are trying to build certbot wheels using an old version '
|
||||
'of setuptools. Version 36.2+ of setuptools is required.')
|
||||
elif sys.version_info < (3,3):
|
||||
install_requires.append('mock')
|
||||
|
||||
class PyTest(TestCommand):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
TestCommand.initialize_options(self)
|
||||
self.pytest_args = ''
|
||||
|
||||
def run_tests(self):
|
||||
import shlex
|
||||
# import here, cause outside the eggs aren't loaded
|
||||
import pytest
|
||||
errno = pytest.main(shlex.split(self.pytest_args))
|
||||
sys.exit(errno)
|
||||
|
||||
dev_extras = [
|
||||
'apacheconfig>=0.3.2',
|
||||
]
|
||||
|
||||
setup(
|
||||
name='certbot-apache',
|
||||
@@ -42,7 +39,7 @@ setup(
|
||||
author="Certbot Project",
|
||||
author_email='client-dev@letsencrypt.org',
|
||||
license='Apache License 2.0',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*',
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Environment :: Plugins',
|
||||
@@ -53,11 +50,10 @@ setup(
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Security',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
@@ -69,12 +65,12 @@ setup(
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
extras_require={
|
||||
'dev': dev_extras,
|
||||
},
|
||||
entry_points={
|
||||
'certbot.plugins': [
|
||||
'apache = certbot_apache._internal.entrypoint:ENTRYPOINT',
|
||||
],
|
||||
},
|
||||
test_suite='certbot_apache',
|
||||
tests_require=["pytest"],
|
||||
cmdclass={"test": PyTest},
|
||||
)
|
||||
|
||||
335
certbot-apache/tests/augeasnode_test.py
Normal file
335
certbot-apache/tests/augeasnode_test.py
Normal file
@@ -0,0 +1,335 @@
|
||||
"""Tests for AugeasParserNode classes"""
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
import os
|
||||
import util
|
||||
|
||||
from certbot import errors
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
|
||||
|
||||
def _get_augeasnode_mock(filepath):
|
||||
""" Helper function for mocking out DualNode instance with an AugeasNode """
|
||||
def augeasnode_mock(metadata):
|
||||
return augeasparser.AugeasBlockNode(
|
||||
name=assertions.PASS,
|
||||
ancestor=None,
|
||||
filepath=filepath,
|
||||
metadata=metadata)
|
||||
return augeasnode_mock
|
||||
|
||||
class AugeasParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(AugeasParserNodeTest, self).setUp()
|
||||
|
||||
with mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root") as mock_parsernode:
|
||||
mock_parsernode.side_effect = _get_augeasnode_mock(
|
||||
os.path.join(self.config_path, "apache2.conf"))
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir, use_parsernode=True)
|
||||
self.vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
|
||||
def test_save(self):
|
||||
with mock.patch('certbot_apache._internal.parser.ApacheParser.save') as mock_save:
|
||||
self.config.parser_root.save("A save message")
|
||||
self.assertTrue(mock_save.called)
|
||||
self.assertEqual(mock_save.call_args[0][0], "A save message")
|
||||
|
||||
def test_unsaved_files(self):
|
||||
with mock.patch('certbot_apache._internal.parser.ApacheParser.unsaved_files') as mock_uf:
|
||||
mock_uf.return_value = ["first", "second"]
|
||||
files = self.config.parser_root.unsaved_files()
|
||||
self.assertEqual(files, ["first", "second"])
|
||||
|
||||
def test_get_block_node_name(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
block = AugeasBlockNode(
|
||||
name=assertions.PASS,
|
||||
ancestor=None,
|
||||
filepath=assertions.PASS,
|
||||
metadata={"augeasparser": mock.Mock(), "augeaspath": "/files/anything"}
|
||||
)
|
||||
testcases = {
|
||||
"/some/path/FirstNode/SecondNode": "SecondNode",
|
||||
"/some/path/FirstNode/SecondNode/": "SecondNode",
|
||||
"OnlyPathItem": "OnlyPathItem",
|
||||
"/files/etc/apache2/apache2.conf/VirtualHost": "VirtualHost",
|
||||
"/Anything": "Anything",
|
||||
}
|
||||
for test in testcases:
|
||||
self.assertEqual(block._aug_get_name(test), testcases[test]) # pylint: disable=protected-access
|
||||
|
||||
def test_find_blocks(self):
|
||||
blocks = self.config.parser_root.find_blocks("VirtualHost", exclude=False)
|
||||
self.assertEqual(len(blocks), 12)
|
||||
|
||||
def test_find_blocks_case_insensitive(self):
|
||||
vhs = self.config.parser_root.find_blocks("VirtualHost")
|
||||
vhs2 = self.config.parser_root.find_blocks("viRtuAlHoST")
|
||||
self.assertEqual(len(vhs), len(vhs2))
|
||||
|
||||
def test_find_directive_found(self):
|
||||
directives = self.config.parser_root.find_directives("Listen")
|
||||
self.assertEqual(len(directives), 1)
|
||||
self.assertTrue(directives[0].filepath.endswith("/apache2/ports.conf"))
|
||||
self.assertEqual(directives[0].parameters, (u'80',))
|
||||
|
||||
def test_find_directive_notfound(self):
|
||||
directives = self.config.parser_root.find_directives("Nonexistent")
|
||||
self.assertEqual(len(directives), 0)
|
||||
|
||||
def test_find_directive_from_block(self):
|
||||
blocks = self.config.parser_root.find_blocks("virtualhost")
|
||||
found = False
|
||||
for vh in blocks:
|
||||
if vh.filepath.endswith("sites-enabled/certbot.conf"):
|
||||
servername = vh.find_directives("servername")
|
||||
self.assertEqual(servername[0].parameters[0], "certbot.demo")
|
||||
found = True
|
||||
self.assertTrue(found)
|
||||
|
||||
def test_find_comments(self):
|
||||
rootcomment = self.config.parser_root.find_comments(
|
||||
"This is the main Apache server configuration file. "
|
||||
)
|
||||
self.assertEqual(len(rootcomment), 1)
|
||||
self.assertTrue(rootcomment[0].filepath.endswith(
|
||||
"debian_apache_2_4/multiple_vhosts/apache2/apache2.conf"
|
||||
))
|
||||
|
||||
def test_set_parameters(self):
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names = [] # type: List[str]
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertFalse("going_to_set_this" in names)
|
||||
servernames[0].set_parameters(["something", "going_to_set_this"])
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
names = []
|
||||
for servername in servernames:
|
||||
names += servername.parameters
|
||||
self.assertTrue("going_to_set_this" in names)
|
||||
|
||||
def test_set_parameters_atinit(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasDirectiveNode
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
setparam = "certbot_apache._internal.augeasparser.AugeasDirectiveNode.set_parameters"
|
||||
with mock.patch(setparam) as mock_set:
|
||||
AugeasDirectiveNode(
|
||||
name=servernames[0].name,
|
||||
parameters=["test", "setting", "these"],
|
||||
ancestor=assertions.PASS,
|
||||
metadata=servernames[0].metadata
|
||||
)
|
||||
self.assertTrue(mock_set.called)
|
||||
self.assertEqual(
|
||||
mock_set.call_args_list[0][0][0],
|
||||
["test", "setting", "these"]
|
||||
)
|
||||
|
||||
def test_set_parameters_delete(self):
|
||||
# Set params
|
||||
servername = self.config.parser_root.find_directives("servername")[0]
|
||||
servername.set_parameters(["thisshouldnotexistpreviously", "another",
|
||||
"third"])
|
||||
|
||||
# Delete params
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
found = False
|
||||
for servername in servernames:
|
||||
if "thisshouldnotexistpreviously" in servername.parameters:
|
||||
self.assertEqual(len(servername.parameters), 3)
|
||||
servername.set_parameters(["thisshouldnotexistpreviously"])
|
||||
found = True
|
||||
self.assertTrue(found)
|
||||
|
||||
# Verify params
|
||||
servernames = self.config.parser_root.find_directives("servername")
|
||||
found = False
|
||||
for servername in servernames:
|
||||
if "thisshouldnotexistpreviously" in servername.parameters:
|
||||
self.assertEqual(len(servername.parameters), 1)
|
||||
servername.set_parameters(["thisshouldnotexistpreviously"])
|
||||
found = True
|
||||
self.assertTrue(found)
|
||||
|
||||
def test_add_child_comment(self):
|
||||
newc = self.config.parser_root.add_child_comment("The content")
|
||||
comments = self.config.parser_root.find_comments("The content")
|
||||
self.assertEqual(len(comments), 1)
|
||||
self.assertEqual(
|
||||
newc.metadata["augeaspath"],
|
||||
comments[0].metadata["augeaspath"]
|
||||
)
|
||||
self.assertEqual(newc.comment, comments[0].comment)
|
||||
|
||||
def test_delete_child(self):
|
||||
listens = self.config.parser_root.find_directives("Listen")
|
||||
self.assertEqual(len(listens), 1)
|
||||
self.config.parser_root.delete_child(listens[0])
|
||||
|
||||
listens = self.config.parser_root.find_directives("Listen")
|
||||
self.assertEqual(len(listens), 0)
|
||||
|
||||
def test_delete_child_not_found(self):
|
||||
listen = self.config.parser_root.find_directives("Listen")[0]
|
||||
listen.metadata["augeaspath"] = "/files/something/nonexistent"
|
||||
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
self.config.parser_root.delete_child,
|
||||
listen
|
||||
)
|
||||
|
||||
def test_add_child_block(self):
|
||||
nb = self.config.parser_root.add_child_block(
|
||||
"NewBlock",
|
||||
["first", "second"]
|
||||
)
|
||||
rpath, _, directive = nb.metadata["augeaspath"].rpartition("/")
|
||||
self.assertEqual(
|
||||
rpath,
|
||||
self.config.parser_root.metadata["augeaspath"]
|
||||
)
|
||||
self.assertTrue(directive.startswith("NewBlock"))
|
||||
|
||||
def test_add_child_block_beginning(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
"Beginning",
|
||||
position=0
|
||||
)
|
||||
parser = self.config.parser_root.parser
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get first child
|
||||
first = parser.aug.match("{}/*[1]".format(root_path))
|
||||
self.assertTrue(first[0].endswith("Beginning"))
|
||||
|
||||
def test_add_child_block_append(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
"VeryLast",
|
||||
)
|
||||
parser = self.config.parser_root.parser
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get last child
|
||||
last = parser.aug.match("{}/*[last()]".format(root_path))
|
||||
self.assertTrue(last[0].endswith("VeryLast"))
|
||||
|
||||
def test_add_child_block_append_alt(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
"VeryLastAlt",
|
||||
position=99999
|
||||
)
|
||||
parser = self.config.parser_root.parser
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Get last child
|
||||
last = parser.aug.match("{}/*[last()]".format(root_path))
|
||||
self.assertTrue(last[0].endswith("VeryLastAlt"))
|
||||
|
||||
def test_add_child_block_middle(self):
|
||||
self.config.parser_root.add_child_block(
|
||||
"Middle",
|
||||
position=5
|
||||
)
|
||||
parser = self.config.parser_root.parser
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# Augeas indices start at 1 :(
|
||||
middle = parser.aug.match("{}/*[6]".format(root_path))
|
||||
self.assertTrue(middle[0].endswith("Middle"))
|
||||
|
||||
def test_add_child_block_existing_name(self):
|
||||
parser = self.config.parser_root.parser
|
||||
root_path = self.config.parser_root.metadata["augeaspath"]
|
||||
# There already exists a single VirtualHost in the base config
|
||||
new_block = parser.aug.match("{}/VirtualHost[2]".format(root_path))
|
||||
self.assertEqual(len(new_block), 0)
|
||||
vh = self.config.parser_root.add_child_block(
|
||||
"VirtualHost",
|
||||
)
|
||||
new_block = parser.aug.match("{}/VirtualHost[2]".format(root_path))
|
||||
self.assertEqual(len(new_block), 1)
|
||||
self.assertTrue(vh.metadata["augeaspath"].endswith("VirtualHost[2]"))
|
||||
|
||||
def test_node_init_error_bad_augeaspath(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
parameters = {
|
||||
"name": assertions.PASS,
|
||||
"ancestor": None,
|
||||
"filepath": assertions.PASS,
|
||||
"metadata": {
|
||||
"augeasparser": mock.Mock(),
|
||||
"augeaspath": "/files/path/endswith/slash/"
|
||||
}
|
||||
}
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
AugeasBlockNode,
|
||||
**parameters
|
||||
)
|
||||
|
||||
def test_node_init_error_missing_augeaspath(self):
|
||||
from certbot_apache._internal.augeasparser import AugeasBlockNode
|
||||
parameters = {
|
||||
"name": assertions.PASS,
|
||||
"ancestor": None,
|
||||
"filepath": assertions.PASS,
|
||||
"metadata": {
|
||||
"augeasparser": mock.Mock(),
|
||||
}
|
||||
}
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
AugeasBlockNode,
|
||||
**parameters
|
||||
)
|
||||
|
||||
def test_add_child_directive(self):
|
||||
self.config.parser_root.add_child_directive(
|
||||
"ThisWasAdded",
|
||||
["with", "parameters"],
|
||||
position=0
|
||||
)
|
||||
dirs = self.config.parser_root.find_directives("ThisWasAdded")
|
||||
self.assertEqual(len(dirs), 1)
|
||||
self.assertEqual(dirs[0].parameters, ("with", "parameters"))
|
||||
# The new directive was added to the very first line of the config
|
||||
self.assertTrue(dirs[0].metadata["augeaspath"].endswith("[1]"))
|
||||
|
||||
def test_add_child_directive_exception(self):
|
||||
self.assertRaises(
|
||||
errors.PluginError,
|
||||
self.config.parser_root.add_child_directive,
|
||||
"ThisRaisesErrorBecauseMissingParameters"
|
||||
)
|
||||
|
||||
def test_parsed_paths(self):
|
||||
paths = self.config.parser_root.parsed_paths()
|
||||
self.assertEqual(len(paths), 6)
|
||||
|
||||
def test_find_ancestors(self):
|
||||
vhsblocks = self.config.parser_root.find_blocks("VirtualHost")
|
||||
macro_test = False
|
||||
nonmacro_test = False
|
||||
for vh in vhsblocks:
|
||||
if "/macro/" in vh.metadata["augeaspath"].lower():
|
||||
ancs = vh.find_ancestors("Macro")
|
||||
self.assertEqual(len(ancs), 1)
|
||||
macro_test = True
|
||||
else:
|
||||
ancs = vh.find_ancestors("Macro")
|
||||
self.assertEqual(len(ancs), 0)
|
||||
nonmacro_test = True
|
||||
self.assertTrue(macro_test)
|
||||
self.assertTrue(nonmacro_test)
|
||||
|
||||
def test_find_ancestors_bad_path(self):
|
||||
self.config.parser_root.metadata["augeaspath"] = ""
|
||||
ancs = self.config.parser_root.find_ancestors("Anything")
|
||||
self.assertEqual(len(ancs), 0)
|
||||
@@ -3,7 +3,10 @@
|
||||
import re
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from certbot import errors
|
||||
@@ -20,10 +23,10 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir, self.work_dir)
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules.add("mod_headers.c")
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
self.config.parser.modules["mod_headers.c"] = None
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
|
||||
self.vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
@@ -42,8 +45,8 @@ class AutoHSTSTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
|
||||
def test_autohsts_enable_headers_mod(self, mock_enable, _restart):
|
||||
self.config.parser.modules.discard("headers_module")
|
||||
self.config.parser.modules.discard("mod_header.c")
|
||||
self.config.parser.modules.pop("headers_module", None)
|
||||
self.config.parser.modules.pop("mod_header.c", None)
|
||||
self.config.enable_autohsts(mock.MagicMock(), ["ocspvhost.com"])
|
||||
self.assertTrue(mock_enable.called)
|
||||
|
||||
|
||||
@@ -19,12 +19,12 @@ def get_vh_truth(temp_dir, config_name):
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "test.example.com.conf"),
|
||||
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "test.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
@@ -104,7 +104,7 @@ class CentOS6Tests(util.ApacheTest):
|
||||
pre_loadmods = self.config.parser.find_dir(
|
||||
"LoadModule", "ssl_module", exclude=False)
|
||||
# LoadModules are not within IfModule blocks
|
||||
self.assertFalse(any(["ifmodule" in m.lower() for m in pre_loadmods]))
|
||||
self.assertFalse(any("ifmodule" in m.lower() for m in pre_loadmods))
|
||||
self.config.assoc["test.example.com"] = self.vh_truth[0]
|
||||
self.config.deploy_cert(
|
||||
"random.demo", "example/cert.pem", "example/key.pem",
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Test for certbot_apache._internal.configurator for Centos overrides"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
@@ -21,12 +24,12 @@ def get_vh_truth(temp_dir, config_name):
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "centos.example.com.conf"),
|
||||
os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "centos.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "ssl.conf"),
|
||||
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, None)
|
||||
]
|
||||
return vh_truth
|
||||
@@ -106,7 +109,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
def test_get_parser(self):
|
||||
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
define_val = (
|
||||
'Define: TEST1\n'
|
||||
@@ -126,7 +129,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
return mod_val
|
||||
return ""
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
self.config.parser.modules = {}
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
@@ -137,7 +140,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
@@ -155,7 +158,7 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_get_sysconfig_vars(self, mock_cfg):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
@@ -169,11 +172,11 @@ class MultipleVhostsTestCentOS(util.ApacheTest):
|
||||
mock_osi.return_value = ("centos", "7")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
import util
|
||||
|
||||
@@ -6,7 +6,10 @@ import socket
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
import six # pylint: disable=unused-import # six is used in mock.patch()
|
||||
|
||||
from acme import challenges
|
||||
@@ -75,7 +78,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser")
|
||||
@mock.patch("certbot_apache._internal.configurator.util.exe_exists")
|
||||
def _test_prepare_locked(self, unused_parser, unused_exe_exists):
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.get_parsernode_root")
|
||||
def _test_prepare_locked(self, _node, _exists, _parser):
|
||||
try:
|
||||
self.config.prepare()
|
||||
except errors.PluginError as err:
|
||||
@@ -98,7 +102,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
parserargs = ["server_root", "enmod", "dismod", "le_vhost_ext",
|
||||
"vhost_root", "logs_root", "challenge_location",
|
||||
"handle_modules", "handle_sites", "ctl"]
|
||||
exp = dict()
|
||||
exp = {}
|
||||
|
||||
for k in ApacheConfigurator.OS_DEFAULTS:
|
||||
if k in parserargs:
|
||||
@@ -139,11 +143,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_utility = mock_getutility()
|
||||
mock_utility.notification = mock.MagicMock(return_value=True)
|
||||
names = self.config.get_all_names()
|
||||
self.assertEqual(names, set(
|
||||
["certbot.demo", "ocspvhost.com", "encryption-example.demo",
|
||||
self.assertEqual(names, {"certbot.demo", "ocspvhost.com", "encryption-example.demo",
|
||||
"nonsym.link", "vhost.in.rootconf", "www.certbot.demo",
|
||||
"duplicate.example.com"]
|
||||
))
|
||||
"duplicate.example.com"})
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
@mock.patch("certbot_apache._internal.configurator.socket.gethostbyaddr")
|
||||
@@ -153,9 +155,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
mock_utility.notification.return_value = True
|
||||
vhost = obj.VirtualHost(
|
||||
"fp", "ap",
|
||||
set([obj.Addr(("8.8.8.8", "443")),
|
||||
{obj.Addr(("8.8.8.8", "443")),
|
||||
obj.Addr(("zombo.com",)),
|
||||
obj.Addr(("192.168.1.2"))]),
|
||||
obj.Addr(("192.168.1.2"))},
|
||||
True, False)
|
||||
|
||||
self.config.vhosts.append(vhost)
|
||||
@@ -184,7 +186,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_bad_servername_alias(self):
|
||||
ssl_vh1 = obj.VirtualHost(
|
||||
"fp1", "ap1", set([obj.Addr(("*", "443"))]),
|
||||
"fp1", "ap1", {obj.Addr(("*", "443"))},
|
||||
True, False)
|
||||
# pylint: disable=protected-access
|
||||
self.config._add_servernames(ssl_vh1)
|
||||
@@ -197,7 +199,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# pylint: disable=protected-access
|
||||
self.config._add_servernames(self.vh_truth[2])
|
||||
self.assertEqual(
|
||||
self.vh_truth[2].get_names(), set(["*.le.co", "ip-172-30-0-17"]))
|
||||
self.vh_truth[2].get_names(), {"*.le.co", "ip-172-30-0-17"})
|
||||
|
||||
def test_get_virtual_hosts(self):
|
||||
"""Make sure all vhosts are being properly found."""
|
||||
@@ -268,7 +270,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_choose_vhost_select_vhost_conflicting_non_ssl(self, mock_select):
|
||||
mock_select.return_value = self.vh_truth[3]
|
||||
conflicting_vhost = obj.VirtualHost(
|
||||
"path", "aug_path", set([obj.Addr.fromstring("*:443")]),
|
||||
"path", "aug_path", {obj.Addr.fromstring("*:443")},
|
||||
True, True)
|
||||
self.config.vhosts.append(conflicting_vhost)
|
||||
|
||||
@@ -277,14 +279,14 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_find_best_http_vhost_default(self):
|
||||
vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr.fromstring("_default_:80")]), False, True)
|
||||
"fp", "ap", {obj.Addr.fromstring("_default_:80")}, False, True)
|
||||
self.config.vhosts = [vh]
|
||||
self.assertEqual(self.config.find_best_http_vhost("foo.bar", False), vh)
|
||||
|
||||
def test_find_best_http_vhost_port(self):
|
||||
port = "8080"
|
||||
vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr.fromstring("*:" + port)]),
|
||||
"fp", "ap", {obj.Addr.fromstring("*:" + port)},
|
||||
False, True, "encryption-example.demo")
|
||||
self.config.vhosts.append(vh)
|
||||
self.assertEqual(self.config.find_best_http_vhost("foo.bar", False, port), vh)
|
||||
@@ -312,8 +314,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_find_best_vhost_variety(self):
|
||||
# pylint: disable=protected-access
|
||||
ssl_vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr(("*", "443")),
|
||||
obj.Addr(("zombo.com",))]),
|
||||
"fp", "ap", {obj.Addr(("*", "443")),
|
||||
obj.Addr(("zombo.com",))},
|
||||
True, False)
|
||||
self.config.vhosts.append(ssl_vh)
|
||||
self.assertEqual(self.config._find_best_vhost("zombo.com"), ssl_vh)
|
||||
@@ -340,9 +342,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
# Create
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
self.config.deploy_cert(
|
||||
@@ -376,9 +378,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# pragma: no cover
|
||||
|
||||
def test_deploy_cert(self):
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
# Patch _add_dummy_ssl_directives to make sure we write them correctly
|
||||
# pylint: disable=protected-access
|
||||
orig_add_dummy = self.config._add_dummy_ssl_directives
|
||||
@@ -453,41 +455,6 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
"SSLCertificateChainFile", "two/cert_chain.pem",
|
||||
self.vh_truth[1].path))
|
||||
|
||||
def test_deploy_cert_invalid_vhost(self):
|
||||
"""For test cases where the `ApacheConfigurator` class' `_deploy_cert`
|
||||
method is called with an invalid vhost parameter. Currently this tests
|
||||
that a PluginError is appropriately raised when important directives
|
||||
are missing in an SSL module."""
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
|
||||
def side_effect(*args):
|
||||
"""Mocks case where an SSLCertificateFile directive can be found
|
||||
but an SSLCertificateKeyFile directive is missing."""
|
||||
if "SSLCertificateFile" in args:
|
||||
return ["example/cert.pem"]
|
||||
return []
|
||||
|
||||
mock_find_dir = mock.MagicMock(return_value=[])
|
||||
mock_find_dir.side_effect = side_effect
|
||||
|
||||
self.config.parser.find_dir = mock_find_dir
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.deploy_cert, "random.demo",
|
||||
"example/cert.pem", "example/key.pem", "example/cert_chain.pem")
|
||||
|
||||
# Remove side_effect to mock case where both SSLCertificateFile
|
||||
# and SSLCertificateKeyFile directives are missing
|
||||
self.config.parser.find_dir.side_effect = None
|
||||
self.assertRaises(
|
||||
errors.PluginError, self.config.deploy_cert, "random.demo",
|
||||
"example/cert.pem", "example/key.pem", "example/cert_chain.pem")
|
||||
|
||||
def test_is_name_vhost(self):
|
||||
addr = obj.Addr.fromstring("*:80")
|
||||
self.assertTrue(self.config.is_name_vhost(addr))
|
||||
@@ -543,7 +510,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
call_found = True
|
||||
self.assertTrue(call_found)
|
||||
|
||||
def test_prepare_server_https(self):
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.reset_modules")
|
||||
def test_prepare_server_https(self, mock_reset):
|
||||
mock_enable = mock.Mock()
|
||||
self.config.enable_mod = mock_enable
|
||||
|
||||
@@ -569,7 +537,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
self.assertEqual(mock_add_dir.call_count, 2)
|
||||
|
||||
def test_prepare_server_https_named_listen(self):
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.reset_modules")
|
||||
def test_prepare_server_https_named_listen(self, mock_reset):
|
||||
mock_find = mock.Mock()
|
||||
mock_find.return_value = ["test1", "test2", "test3"]
|
||||
mock_get = mock.Mock()
|
||||
@@ -607,7 +576,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
# self.config.prepare_server_https("8080", temp=True)
|
||||
# self.assertEqual(self.listens, 0)
|
||||
|
||||
def test_prepare_server_https_needed_listen(self):
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.reset_modules")
|
||||
def test_prepare_server_https_needed_listen(self, mock_reset):
|
||||
mock_find = mock.Mock()
|
||||
mock_find.return_value = ["test1", "test2"]
|
||||
mock_get = mock.Mock()
|
||||
@@ -623,8 +593,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.prepare_server_https("443")
|
||||
self.assertEqual(mock_add_dir.call_count, 1)
|
||||
|
||||
def test_prepare_server_https_mixed_listen(self):
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.reset_modules")
|
||||
def test_prepare_server_https_mixed_listen(self, mock_reset):
|
||||
mock_find = mock.Mock()
|
||||
mock_find.return_value = ["test1", "test2"]
|
||||
mock_get = mock.Mock()
|
||||
@@ -681,7 +651,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(ssl_vhost.path,
|
||||
"/files" + ssl_vhost.filep + "/IfModule/Virtualhost")
|
||||
self.assertEqual(len(ssl_vhost.addrs), 1)
|
||||
self.assertEqual(set([obj.Addr.fromstring("*:443")]), ssl_vhost.addrs)
|
||||
self.assertEqual({obj.Addr.fromstring("*:443")}, ssl_vhost.addrs)
|
||||
self.assertEqual(ssl_vhost.name, "encryption-example.demo")
|
||||
self.assertTrue(ssl_vhost.ssl)
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
@@ -799,7 +769,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(mock_restart.call_count, 1)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_cleanup(self, mock_cfg, mock_restart):
|
||||
mock_cfg.return_value = ""
|
||||
_, achalls = self.get_key_and_achalls()
|
||||
@@ -815,7 +785,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertFalse(mock_restart.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_cleanup_no_errors(self, mock_cfg, mock_restart):
|
||||
mock_cfg.return_value = ""
|
||||
_, achalls = self.get_key_and_achalls()
|
||||
@@ -903,10 +873,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache._internal.display_ops.select_vhost")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_enhance_unknown_vhost(self, mock_exe, mock_sel_vhost, mock_get):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
mock_exe.return_value = True
|
||||
ssl_vh1 = obj.VirtualHost(
|
||||
"fp1", "ap1", set([obj.Addr(("*", "443"))]),
|
||||
"fp1", "ap1", {obj.Addr(("*", "443"))},
|
||||
True, False)
|
||||
ssl_vh1.name = "satoshi.com"
|
||||
self.config.vhosts.append(ssl_vh1)
|
||||
@@ -941,8 +911,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_ocsp_stapling(self, mock_exe):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 4, 7))
|
||||
mock_exe.return_value = True
|
||||
|
||||
@@ -968,8 +938,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_ocsp_stapling_twice(self, mock_exe):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 4, 7))
|
||||
mock_exe.return_value = True
|
||||
|
||||
@@ -996,8 +966,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_ocsp_unsupported_apache_version(self, mock_exe):
|
||||
mock_exe.return_value = True
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
|
||||
self.config.choose_vhost("certbot.demo")
|
||||
|
||||
@@ -1007,7 +977,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_get_http_vhost_third_filter(self):
|
||||
ssl_vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr(("*", "443"))]),
|
||||
"fp", "ap", {obj.Addr(("*", "443"))},
|
||||
True, False)
|
||||
ssl_vh.name = "satoshi.com"
|
||||
self.config.vhosts.append(ssl_vh)
|
||||
@@ -1020,8 +990,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_http_header_hsts(self, mock_exe, _):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
@@ -1041,9 +1011,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(len(hsts_header), 4)
|
||||
|
||||
def test_http_header_hsts_twice(self):
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
# skip the enable mod
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
|
||||
# This will create an ssl vhost for encryption-example.demo
|
||||
self.config.choose_vhost("encryption-example.demo")
|
||||
@@ -1059,8 +1029,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_http_header_uir(self, mock_exe, _):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
|
||||
mock_exe.return_value = True
|
||||
|
||||
@@ -1083,9 +1053,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(len(uir_header), 4)
|
||||
|
||||
def test_http_header_uir_twice(self):
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
# skip the enable mod
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
|
||||
# This will create an ssl vhost for encryption-example.demo
|
||||
self.config.choose_vhost("encryption-example.demo")
|
||||
@@ -1100,7 +1070,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_redirect_well_formed_http(self, mock_exe, _):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
mock_exe.return_value = True
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2))
|
||||
@@ -1126,7 +1096,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_rewrite_rule_exists(self):
|
||||
# Skip the enable mod
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
self.config.parser.add_dir(
|
||||
self.vh_truth[3].path, "RewriteRule", ["Unknown"])
|
||||
@@ -1135,7 +1105,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_rewrite_engine_exists(self):
|
||||
# Skip the enable mod
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
self.config.parser.add_dir(
|
||||
self.vh_truth[3].path, "RewriteEngine", "on")
|
||||
@@ -1145,7 +1115,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_redirect_with_existing_rewrite(self, mock_exe, _):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
mock_exe.return_value = True
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
|
||||
@@ -1179,7 +1149,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_redirect_with_old_https_redirection(self, mock_exe, _):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
mock_exe.return_value = True
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
|
||||
@@ -1208,10 +1178,10 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
|
||||
def test_redirect_with_conflict(self):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
ssl_vh = obj.VirtualHost(
|
||||
"fp", "ap", set([obj.Addr(("*", "443")),
|
||||
obj.Addr(("zombo.com",))]),
|
||||
"fp", "ap", {obj.Addr(("*", "443")),
|
||||
obj.Addr(("zombo.com",))},
|
||||
True, False)
|
||||
# No names ^ this guy should conflict.
|
||||
|
||||
@@ -1221,7 +1191,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_redirect_two_domains_one_vhost(self):
|
||||
# Skip the enable mod
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
|
||||
# Creates ssl vhost for the domain
|
||||
@@ -1236,7 +1206,7 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
|
||||
def test_redirect_from_previous_run(self):
|
||||
# Skip the enable mod
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
self.config.choose_vhost("red.blue.purple.com")
|
||||
self.config.enhance("red.blue.purple.com", "redirect")
|
||||
@@ -1249,22 +1219,22 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.config.enhance, "green.blue.purple.com", "redirect")
|
||||
|
||||
def test_create_own_redirect(self):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 3, 9))
|
||||
# For full testing... give names...
|
||||
self.vh_truth[1].name = "default.com"
|
||||
self.vh_truth[1].aliases = set(["yes.default.com"])
|
||||
self.vh_truth[1].aliases = {"yes.default.com"}
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
self.assertEqual(len(self.config.vhosts), 13)
|
||||
|
||||
def test_create_own_redirect_for_old_apache_version(self):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 2))
|
||||
# For full testing... give names...
|
||||
self.vh_truth[1].name = "default.com"
|
||||
self.vh_truth[1].aliases = set(["yes.default.com"])
|
||||
self.vh_truth[1].aliases = {"yes.default.com"}
|
||||
|
||||
# pylint: disable=protected-access
|
||||
self.config._enable_redirect(self.vh_truth[1], "")
|
||||
@@ -1325,9 +1295,9 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_deploy_cert_not_parsed_path(self):
|
||||
# Make sure that we add include to root config for vhosts when
|
||||
# handle-sites is false
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("socache_shmcb_module")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
tmp_path = filesystem.realpath(tempfile.mkdtemp("vhostroot"))
|
||||
filesystem.chmod(tmp_path, 0o755)
|
||||
mock_p = "certbot_apache._internal.configurator.ApacheConfigurator._get_ssl_vhost_path"
|
||||
@@ -1344,6 +1314,16 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
self.assertTrue(mock_add.called)
|
||||
shutil.rmtree(tmp_path)
|
||||
|
||||
def test_deploy_cert_no_mod_ssl(self):
|
||||
# Create
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
self.config.parser.modules["socache_shmcb_module"] = None
|
||||
self.config.prepare_server_https = mock.Mock()
|
||||
|
||||
self.assertRaises(errors.MisconfigurationError, self.config.deploy_cert,
|
||||
"encryption-example.demo", "example/cert.pem", "example/key.pem",
|
||||
"example/cert_chain.pem", "example/fullchain.pem")
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.parsed_in_original")
|
||||
def test_choose_vhost_and_servername_addition_parsed(self, mock_parsed):
|
||||
ret_vh = self.vh_truth[8]
|
||||
@@ -1440,8 +1420,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator._choose_vhosts_wildcard")
|
||||
def test_enhance_wildcard_after_install(self, mock_choose):
|
||||
# pylint: disable=protected-access
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
self.vh_truth[3].ssl = True
|
||||
self.config._wildcard_vhosts["*.certbot.demo"] = [self.vh_truth[3]]
|
||||
self.config.enhance("*.certbot.demo", "ensure-http-header",
|
||||
@@ -1452,8 +1432,8 @@ class MultipleVhostsTest(util.ApacheTest):
|
||||
def test_enhance_wildcard_no_install(self, mock_choose):
|
||||
self.vh_truth[3].ssl = True
|
||||
mock_choose.return_value = [self.vh_truth[3]]
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules.add("headers_module")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.parser.modules["headers_module"] = None
|
||||
self.config.enhance("*.certbot.demo", "ensure-http-header",
|
||||
"Upgrade-Insecure-Requests")
|
||||
self.assertTrue(mock_choose.called)
|
||||
@@ -1605,7 +1585,7 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
self.assertEqual(ssl_vhost.path,
|
||||
"/files" + ssl_vhost.filep + "/IfModule/VirtualHost")
|
||||
self.assertEqual(len(ssl_vhost.addrs), 1)
|
||||
self.assertEqual(set([obj.Addr.fromstring("*:443")]), ssl_vhost.addrs)
|
||||
self.assertEqual({obj.Addr.fromstring("*:443")}, ssl_vhost.addrs)
|
||||
self.assertEqual(ssl_vhost.name, "banana.vomit.com")
|
||||
self.assertTrue(ssl_vhost.ssl)
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
@@ -1637,7 +1617,7 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_make_vhost_ssl_with_existing_rewrite_rule(self, mock_get_utility):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[4])
|
||||
|
||||
@@ -1657,7 +1637,7 @@ class MultiVhostsTest(util.ApacheTest):
|
||||
|
||||
@certbot_util.patch_get_utility()
|
||||
def test_make_vhost_ssl_with_existing_rewrite_conds(self, mock_get_utility):
|
||||
self.config.parser.modules.add("rewrite_module")
|
||||
self.config.parser.modules["rewrite_module"] = None
|
||||
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[3])
|
||||
|
||||
@@ -1699,7 +1679,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
self.config.updated_mod_ssl_conf_digest)
|
||||
|
||||
def _current_ssl_options_hash(self):
|
||||
return crypto_util.sha256sum(self.config.option("MOD_SSL_CONF_SRC"))
|
||||
return crypto_util.sha256sum(self.config.pick_apache_config())
|
||||
|
||||
def _assert_current_file(self):
|
||||
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
|
||||
@@ -1735,7 +1715,7 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
self.assertFalse(mock_logger.warning.called)
|
||||
self.assertTrue(os.path.isfile(self.config.mod_ssl_conf))
|
||||
self.assertEqual(crypto_util.sha256sum(
|
||||
self.config.option("MOD_SSL_CONF_SRC")),
|
||||
self.config.pick_apache_config()),
|
||||
self._current_ssl_options_hash())
|
||||
self.assertNotEqual(crypto_util.sha256sum(self.config.mod_ssl_conf),
|
||||
self._current_ssl_options_hash())
|
||||
@@ -1751,19 +1731,118 @@ class InstallSslOptionsConfTest(util.ApacheTest):
|
||||
"%s has been manually modified; updated file "
|
||||
"saved to %s. We recommend updating %s for security purposes.")
|
||||
self.assertEqual(crypto_util.sha256sum(
|
||||
self.config.option("MOD_SSL_CONF_SRC")),
|
||||
self.config.pick_apache_config()),
|
||||
self._current_ssl_options_hash())
|
||||
# only print warning once
|
||||
with mock.patch("certbot.plugins.common.logger") as mock_logger:
|
||||
self._call()
|
||||
self.assertFalse(mock_logger.warning.called)
|
||||
|
||||
def test_current_file_hash_in_all_hashes(self):
|
||||
def test_ssl_config_files_hash_in_all_hashes(self):
|
||||
"""
|
||||
It is really critical that all TLS Apache config files have their SHA256 hash registered in
|
||||
constants.ALL_SSL_OPTIONS_HASHES. Otherwise Certbot will mistakenly assume that the config
|
||||
file has been manually edited by the user, and will refuse to update it.
|
||||
This test ensures that all necessary hashes are present.
|
||||
"""
|
||||
from certbot_apache._internal.constants import ALL_SSL_OPTIONS_HASHES
|
||||
self.assertTrue(self._current_ssl_options_hash() in ALL_SSL_OPTIONS_HASHES,
|
||||
"Constants.ALL_SSL_OPTIONS_HASHES must be appended"
|
||||
" with the sha256 hash of self.config.mod_ssl_conf when it is updated.")
|
||||
import pkg_resources
|
||||
|
||||
tls_configs_dir = pkg_resources.resource_filename(
|
||||
"certbot_apache", os.path.join("_internal", "tls_configs"))
|
||||
all_files = [os.path.join(tls_configs_dir, name) for name in os.listdir(tls_configs_dir)
|
||||
if name.endswith('options-ssl-apache.conf')]
|
||||
self.assertTrue(all_files)
|
||||
for one_file in all_files:
|
||||
file_hash = crypto_util.sha256sum(one_file)
|
||||
self.assertTrue(file_hash in ALL_SSL_OPTIONS_HASHES,
|
||||
"Constants.ALL_SSL_OPTIONS_HASHES must be appended with the sha256 "
|
||||
"hash of {0} when it is updated.".format(one_file))
|
||||
|
||||
def test_openssl_version(self):
|
||||
self.config._openssl_version = None
|
||||
some_string_contents = b"""
|
||||
SSLOpenSSLConfCmd
|
||||
OpenSSL configuration command
|
||||
SSLv3 not supported by this version of OpenSSL
|
||||
'%s': invalid OpenSSL configuration command
|
||||
OpenSSL 1.0.2g 1 Mar 2016
|
||||
OpenSSL
|
||||
AH02407: "SSLOpenSSLConfCmd %s %s" failed for %s
|
||||
AH02556: "SSLOpenSSLConfCmd %s %s" applied to %s
|
||||
OpenSSL 1.0.2g 1 Mar 2016
|
||||
"""
|
||||
# ssl_module as a DSO
|
||||
self.config.parser.modules['ssl_module'] = '/fake/path'
|
||||
with mock.patch("certbot_apache._internal.configurator."
|
||||
"ApacheConfigurator._open_module_file") as mock_omf:
|
||||
mock_omf.return_value = some_string_contents
|
||||
self.assertEqual(self.config.openssl_version(), "1.0.2g")
|
||||
|
||||
# ssl_module statically linked
|
||||
self.config._openssl_version = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
self.config.options['bin'] = '/fake/path/to/httpd'
|
||||
with mock.patch("certbot_apache._internal.configurator."
|
||||
"ApacheConfigurator._open_module_file") as mock_omf:
|
||||
mock_omf.return_value = some_string_contents
|
||||
self.assertEqual(self.config.openssl_version(), "1.0.2g")
|
||||
|
||||
def test_current_version(self):
|
||||
self.config.version = (2, 4, 10)
|
||||
self.config._openssl_version = '1.0.2m'
|
||||
self.assertTrue('old' in self.config.pick_apache_config())
|
||||
|
||||
self.config.version = (2, 4, 11)
|
||||
self.config._openssl_version = '1.0.2m'
|
||||
self.assertTrue('current' in self.config.pick_apache_config())
|
||||
|
||||
self.config._openssl_version = '1.0.2a'
|
||||
self.assertTrue('old' in self.config.pick_apache_config())
|
||||
|
||||
def test_openssl_version_warns(self):
|
||||
self.config._openssl_version = '1.0.2a'
|
||||
self.assertEqual(self.config.openssl_version(), '1.0.2a')
|
||||
|
||||
self.config._openssl_version = None
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
self.assertTrue("Could not find ssl_module" in mock_log.call_args[0][0])
|
||||
|
||||
# When no ssl_module is present at all
|
||||
self.config._openssl_version = None
|
||||
self.assertTrue("ssl_module" not in self.config.parser.modules)
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
self.assertTrue("Could not find ssl_module" in mock_log.call_args[0][0])
|
||||
|
||||
# When ssl_module is statically linked but --apache-bin not provided
|
||||
self.config._openssl_version = None
|
||||
self.config.options['bin'] = None
|
||||
self.config.parser.modules['ssl_module'] = None
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
self.assertTrue("ssl_module is statically linked but" in mock_log.call_args[0][0])
|
||||
|
||||
self.config.parser.modules['ssl_module'] = "/fake/path"
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
# Check that correct logger.warning was printed
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
self.assertTrue("Unable to read" in mock_log.call_args[0][0])
|
||||
|
||||
contents_missing_openssl = b"these contents won't match the regex"
|
||||
with mock.patch("certbot_apache._internal.configurator."
|
||||
"ApacheConfigurator._open_module_file") as mock_omf:
|
||||
mock_omf.return_value = contents_missing_openssl
|
||||
with mock.patch("certbot_apache._internal.configurator.logger.warning") as mock_log:
|
||||
# Check that correct logger.warning was printed
|
||||
self.assertEqual(self.config.openssl_version(), None)
|
||||
self.assertTrue("Could not find OpenSSL" in mock_log.call_args[0][0])
|
||||
|
||||
def test_open_module_file(self):
|
||||
mock_open = mock.mock_open(read_data="testing 12 3")
|
||||
with mock.patch("six.moves.builtins.open", mock_open):
|
||||
self.assertEqual(self.config._open_module_file("/nonsense/"), "testing 12 3")
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
@@ -46,7 +49,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
|
||||
@mock.patch("certbot.util.run_script")
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
@mock.patch("certbot_apache._internal.parser.subprocess.Popen")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_enable_mod(self, mock_popen, mock_exe_exists, mock_run_script):
|
||||
mock_popen().communicate.return_value = ("Define: DUMP_RUN_CFG", "")
|
||||
mock_popen().returncode = 0
|
||||
@@ -61,8 +64,8 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
def test_deploy_cert_enable_new_vhost(self):
|
||||
# Create
|
||||
ssl_vhost = self.config.make_vhost_ssl(self.vh_truth[0])
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.assertFalse(ssl_vhost.enabled)
|
||||
self.config.deploy_cert(
|
||||
"encryption-example.demo", "example/cert.pem", "example/key.pem",
|
||||
@@ -92,8 +95,8 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
self.work_dir, version=(2, 4, 16))
|
||||
self.config = self.mock_deploy_cert(self.config)
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
@@ -128,8 +131,8 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
self.work_dir, version=(2, 4, 16))
|
||||
self.config = self.mock_deploy_cert(self.config)
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
@@ -143,8 +146,8 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
self.work_dir, version=(2, 4, 7))
|
||||
self.config = self.mock_deploy_cert(self.config)
|
||||
self.config.parser.modules.add("ssl_module")
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["ssl_module"] = None
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
|
||||
# Get the default 443 vhost
|
||||
self.config.assoc["random.demo"] = self.vh_truth[1]
|
||||
@@ -157,7 +160,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_ocsp_stapling_enable_mod(self, mock_exe, _):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
self.config.get_version = mock.Mock(return_value=(2, 4, 7))
|
||||
mock_exe.return_value = True
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
@@ -169,7 +172,7 @@ class MultipleVhostsTestDebian(util.ApacheTest):
|
||||
@mock.patch("certbot.util.exe_exists")
|
||||
def test_ensure_http_header_enable_mod(self, mock_exe, _):
|
||||
self.config.parser.update_runtime_variables = mock.Mock()
|
||||
self.config.parser.modules.add("mod_ssl.c")
|
||||
self.config.parser.modules["mod_ssl.c"] = None
|
||||
mock_exe.return_value = True
|
||||
|
||||
# This will create an ssl vhost for certbot.demo
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Test certbot_apache._internal.display_ops."""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.display import util as display_util
|
||||
@@ -93,9 +96,9 @@ class SelectVhostTest(unittest.TestCase):
|
||||
|
||||
self.vhosts.append(
|
||||
obj.VirtualHost(
|
||||
"path", "aug_path", set([obj.Addr.fromstring("*:80")]),
|
||||
"path", "aug_path", {obj.Addr.fromstring("*:80")},
|
||||
False, False,
|
||||
"wildcard.com", set(["*.wildcard.com"])))
|
||||
"wildcard.com", {"*.wildcard.com"}))
|
||||
|
||||
self.assertEqual(self.vhosts[5], self._call(self.vhosts))
|
||||
|
||||
|
||||
445
certbot-apache/tests/dualnode_test.py
Normal file
445
certbot-apache/tests/dualnode_test.py
Normal file
@@ -0,0 +1,445 @@
|
||||
"""Tests for DualParserNode implementation"""
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot_apache._internal import assertions
|
||||
from certbot_apache._internal import augeasparser
|
||||
from certbot_apache._internal import dualparser
|
||||
|
||||
|
||||
class DualParserNodeTest(unittest.TestCase): # pylint: disable=too-many-public-methods
|
||||
"""DualParserNode tests"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
parser_mock = mock.MagicMock()
|
||||
parser_mock.aug.match.return_value = []
|
||||
parser_mock.get_arg.return_value = []
|
||||
self.metadata = {"augeasparser": parser_mock, "augeaspath": "/invalid", "ac_ast": None}
|
||||
self.block = dualparser.DualBlockNode(name="block",
|
||||
ancestor=None,
|
||||
filepath="/tmp/something",
|
||||
metadata=self.metadata)
|
||||
self.block_two = dualparser.DualBlockNode(name="block",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
metadata=self.metadata)
|
||||
self.directive = dualparser.DualDirectiveNode(name="directive",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
metadata=self.metadata)
|
||||
self.comment = dualparser.DualCommentNode(comment="comment",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
metadata=self.metadata)
|
||||
|
||||
def test_create_with_precreated(self):
|
||||
cnode = dualparser.DualCommentNode(comment="comment",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
primary=self.comment.secondary,
|
||||
secondary=self.comment.primary)
|
||||
dnode = dualparser.DualDirectiveNode(name="directive",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
primary=self.directive.secondary,
|
||||
secondary=self.directive.primary)
|
||||
bnode = dualparser.DualBlockNode(name="block",
|
||||
ancestor=self.block,
|
||||
filepath="/tmp/something",
|
||||
primary=self.block.secondary,
|
||||
secondary=self.block.primary)
|
||||
# Switched around
|
||||
self.assertTrue(cnode.primary is self.comment.secondary)
|
||||
self.assertTrue(cnode.secondary is self.comment.primary)
|
||||
self.assertTrue(dnode.primary is self.directive.secondary)
|
||||
self.assertTrue(dnode.secondary is self.directive.primary)
|
||||
self.assertTrue(bnode.primary is self.block.secondary)
|
||||
self.assertTrue(bnode.secondary is self.block.primary)
|
||||
|
||||
def test_set_params(self):
|
||||
params = ("first", "second")
|
||||
self.directive.primary.set_parameters = mock.Mock()
|
||||
self.directive.secondary.set_parameters = mock.Mock()
|
||||
self.directive.set_parameters(params)
|
||||
self.assertTrue(self.directive.primary.set_parameters.called)
|
||||
self.assertTrue(self.directive.secondary.set_parameters.called)
|
||||
|
||||
def test_set_parameters(self):
|
||||
pparams = mock.MagicMock()
|
||||
sparams = mock.MagicMock()
|
||||
pparams.parameters = ("a", "b")
|
||||
sparams.parameters = ("a", "b")
|
||||
self.directive.primary.set_parameters = pparams
|
||||
self.directive.secondary.set_parameters = sparams
|
||||
self.directive.set_parameters(("param", "seq"))
|
||||
self.assertTrue(pparams.called)
|
||||
self.assertTrue(sparams.called)
|
||||
|
||||
def test_delete_child(self):
|
||||
pdel = mock.MagicMock()
|
||||
sdel = mock.MagicMock()
|
||||
self.block.primary.delete_child = pdel
|
||||
self.block.secondary.delete_child = sdel
|
||||
self.block.delete_child(self.comment)
|
||||
self.assertTrue(pdel.called)
|
||||
self.assertTrue(sdel.called)
|
||||
|
||||
def test_unsaved_files(self):
|
||||
puns = mock.MagicMock()
|
||||
suns = mock.MagicMock()
|
||||
puns.return_value = assertions.PASS
|
||||
suns.return_value = assertions.PASS
|
||||
self.block.primary.unsaved_files = puns
|
||||
self.block.secondary.unsaved_files = suns
|
||||
self.block.unsaved_files()
|
||||
self.assertTrue(puns.called)
|
||||
self.assertTrue(suns.called)
|
||||
|
||||
def test_getattr_equality(self):
|
||||
self.directive.primary.variableexception = "value"
|
||||
self.directive.secondary.variableexception = "not_value"
|
||||
with self.assertRaises(AssertionError):
|
||||
_ = self.directive.variableexception
|
||||
|
||||
self.directive.primary.variable = "value"
|
||||
self.directive.secondary.variable = "value"
|
||||
try:
|
||||
self.directive.variable
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("getattr check raised an AssertionError where it shouldn't have")
|
||||
|
||||
def test_parsernode_dirty_assert(self):
|
||||
# disable assertion pass
|
||||
self.comment.primary.comment = "value"
|
||||
self.comment.secondary.comment = "value"
|
||||
self.comment.primary.filepath = "x"
|
||||
self.comment.secondary.filepath = "x"
|
||||
|
||||
self.comment.primary.dirty = False
|
||||
self.comment.secondary.dirty = True
|
||||
with self.assertRaises(AssertionError):
|
||||
assertions.assertEqual(self.comment.primary, self.comment.secondary)
|
||||
|
||||
def test_parsernode_filepath_assert(self):
|
||||
# disable assertion pass
|
||||
self.comment.primary.comment = "value"
|
||||
self.comment.secondary.comment = "value"
|
||||
|
||||
self.comment.primary.filepath = "first"
|
||||
self.comment.secondary.filepath = "second"
|
||||
with self.assertRaises(AssertionError):
|
||||
assertions.assertEqual(self.comment.primary, self.comment.secondary)
|
||||
|
||||
def test_add_child_block(self):
|
||||
mock_first = mock.MagicMock(return_value=self.block.primary)
|
||||
mock_second = mock.MagicMock(return_value=self.block.secondary)
|
||||
self.block.primary.add_child_block = mock_first
|
||||
self.block.secondary.add_child_block = mock_second
|
||||
self.block.add_child_block("Block")
|
||||
self.assertTrue(mock_first.called)
|
||||
self.assertTrue(mock_second.called)
|
||||
|
||||
def test_add_child_directive(self):
|
||||
mock_first = mock.MagicMock(return_value=self.directive.primary)
|
||||
mock_second = mock.MagicMock(return_value=self.directive.secondary)
|
||||
self.block.primary.add_child_directive = mock_first
|
||||
self.block.secondary.add_child_directive = mock_second
|
||||
self.block.add_child_directive("Directive")
|
||||
self.assertTrue(mock_first.called)
|
||||
self.assertTrue(mock_second.called)
|
||||
|
||||
def test_add_child_comment(self):
|
||||
mock_first = mock.MagicMock(return_value=self.comment.primary)
|
||||
mock_second = mock.MagicMock(return_value=self.comment.secondary)
|
||||
self.block.primary.add_child_comment = mock_first
|
||||
self.block.secondary.add_child_comment = mock_second
|
||||
self.block.add_child_comment("Comment")
|
||||
self.assertTrue(mock_first.called)
|
||||
self.assertTrue(mock_second.called)
|
||||
|
||||
def test_find_comments(self):
|
||||
pri_comments = [augeasparser.AugeasCommentNode(comment="some comment",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
sec_comments = [augeasparser.AugeasCommentNode(comment=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_coms_primary = mock.MagicMock(return_value=pri_comments)
|
||||
find_coms_secondary = mock.MagicMock(return_value=sec_comments)
|
||||
self.block.primary.find_comments = find_coms_primary
|
||||
self.block.secondary.find_comments = find_coms_secondary
|
||||
|
||||
dcoms = self.block.find_comments("comment")
|
||||
p_dcoms = [d.primary for d in dcoms]
|
||||
s_dcoms = [d.secondary for d in dcoms]
|
||||
p_coms = self.block.primary.find_comments("comment")
|
||||
s_coms = self.block.secondary.find_comments("comment")
|
||||
# Check that every comment response is represented in the list of
|
||||
# DualParserNode instances.
|
||||
for p in p_dcoms:
|
||||
self.assertTrue(p in p_coms)
|
||||
for s in s_dcoms:
|
||||
self.assertTrue(s in s_coms)
|
||||
|
||||
def test_find_blocks_first_passing(self):
|
||||
youshallnotpass = [augeasparser.AugeasBlockNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
youshallpass = [augeasparser.AugeasBlockNode(name=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_blocks_primary = mock.MagicMock(return_value=youshallpass)
|
||||
find_blocks_secondary = mock.MagicMock(return_value=youshallnotpass)
|
||||
self.block.primary.find_blocks = find_blocks_primary
|
||||
self.block.secondary.find_blocks = find_blocks_secondary
|
||||
|
||||
blocks = self.block.find_blocks("something")
|
||||
for block in blocks:
|
||||
try:
|
||||
assertions.assertEqual(block.primary, block.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertTrue(assertions.isPassDirective(block.primary))
|
||||
self.assertFalse(assertions.isPassDirective(block.secondary))
|
||||
|
||||
def test_find_blocks_second_passing(self):
|
||||
youshallnotpass = [augeasparser.AugeasBlockNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
youshallpass = [augeasparser.AugeasBlockNode(name=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_blocks_primary = mock.MagicMock(return_value=youshallnotpass)
|
||||
find_blocks_secondary = mock.MagicMock(return_value=youshallpass)
|
||||
self.block.primary.find_blocks = find_blocks_primary
|
||||
self.block.secondary.find_blocks = find_blocks_secondary
|
||||
|
||||
blocks = self.block.find_blocks("something")
|
||||
for block in blocks:
|
||||
try:
|
||||
assertions.assertEqual(block.primary, block.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertFalse(assertions.isPassDirective(block.primary))
|
||||
self.assertTrue(assertions.isPassDirective(block.secondary))
|
||||
|
||||
def test_find_dirs_first_passing(self):
|
||||
notpassing = [augeasparser.AugeasDirectiveNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
passing = [augeasparser.AugeasDirectiveNode(name=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_dirs_primary = mock.MagicMock(return_value=passing)
|
||||
find_dirs_secondary = mock.MagicMock(return_value=notpassing)
|
||||
self.block.primary.find_directives = find_dirs_primary
|
||||
self.block.secondary.find_directives = find_dirs_secondary
|
||||
|
||||
directives = self.block.find_directives("something")
|
||||
for directive in directives:
|
||||
try:
|
||||
assertions.assertEqual(directive.primary, directive.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertTrue(assertions.isPassDirective(directive.primary))
|
||||
self.assertFalse(assertions.isPassDirective(directive.secondary))
|
||||
|
||||
def test_find_dirs_second_passing(self):
|
||||
notpassing = [augeasparser.AugeasDirectiveNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
passing = [augeasparser.AugeasDirectiveNode(name=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_dirs_primary = mock.MagicMock(return_value=notpassing)
|
||||
find_dirs_secondary = mock.MagicMock(return_value=passing)
|
||||
self.block.primary.find_directives = find_dirs_primary
|
||||
self.block.secondary.find_directives = find_dirs_secondary
|
||||
|
||||
directives = self.block.find_directives("something")
|
||||
for directive in directives:
|
||||
try:
|
||||
assertions.assertEqual(directive.primary, directive.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertFalse(assertions.isPassDirective(directive.primary))
|
||||
self.assertTrue(assertions.isPassDirective(directive.secondary))
|
||||
|
||||
def test_find_coms_first_passing(self):
|
||||
notpassing = [augeasparser.AugeasCommentNode(comment="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
passing = [augeasparser.AugeasCommentNode(comment=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_coms_primary = mock.MagicMock(return_value=passing)
|
||||
find_coms_secondary = mock.MagicMock(return_value=notpassing)
|
||||
self.block.primary.find_comments = find_coms_primary
|
||||
self.block.secondary.find_comments = find_coms_secondary
|
||||
|
||||
comments = self.block.find_comments("something")
|
||||
for comment in comments:
|
||||
try:
|
||||
assertions.assertEqual(comment.primary, comment.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertTrue(assertions.isPassComment(comment.primary))
|
||||
self.assertFalse(assertions.isPassComment(comment.secondary))
|
||||
|
||||
def test_find_coms_second_passing(self):
|
||||
notpassing = [augeasparser.AugeasCommentNode(comment="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
passing = [augeasparser.AugeasCommentNode(comment=assertions.PASS,
|
||||
ancestor=self.block,
|
||||
filepath=assertions.PASS,
|
||||
metadata=self.metadata)]
|
||||
find_coms_primary = mock.MagicMock(return_value=notpassing)
|
||||
find_coms_secondary = mock.MagicMock(return_value=passing)
|
||||
self.block.primary.find_comments = find_coms_primary
|
||||
self.block.secondary.find_comments = find_coms_secondary
|
||||
|
||||
comments = self.block.find_comments("something")
|
||||
for comment in comments:
|
||||
try:
|
||||
assertions.assertEqual(comment.primary, comment.secondary)
|
||||
except AssertionError: # pragma: no cover
|
||||
self.fail("Assertion should have passed")
|
||||
self.assertFalse(assertions.isPassComment(comment.primary))
|
||||
self.assertTrue(assertions.isPassComment(comment.secondary))
|
||||
|
||||
def test_find_blocks_no_pass_equal(self):
|
||||
notpassing1 = [augeasparser.AugeasBlockNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
notpassing2 = [augeasparser.AugeasBlockNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
find_blocks_primary = mock.MagicMock(return_value=notpassing1)
|
||||
find_blocks_secondary = mock.MagicMock(return_value=notpassing2)
|
||||
self.block.primary.find_blocks = find_blocks_primary
|
||||
self.block.secondary.find_blocks = find_blocks_secondary
|
||||
|
||||
blocks = self.block.find_blocks("anything")
|
||||
for block in blocks:
|
||||
self.assertEqual(block.primary, block.secondary)
|
||||
self.assertTrue(block.primary is not block.secondary)
|
||||
|
||||
def test_find_dirs_no_pass_equal(self):
|
||||
notpassing1 = [augeasparser.AugeasDirectiveNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
notpassing2 = [augeasparser.AugeasDirectiveNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
find_dirs_primary = mock.MagicMock(return_value=notpassing1)
|
||||
find_dirs_secondary = mock.MagicMock(return_value=notpassing2)
|
||||
self.block.primary.find_directives = find_dirs_primary
|
||||
self.block.secondary.find_directives = find_dirs_secondary
|
||||
|
||||
directives = self.block.find_directives("anything")
|
||||
for directive in directives:
|
||||
self.assertEqual(directive.primary, directive.secondary)
|
||||
self.assertTrue(directive.primary is not directive.secondary)
|
||||
|
||||
def test_find_comments_no_pass_equal(self):
|
||||
notpassing1 = [augeasparser.AugeasCommentNode(comment="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
notpassing2 = [augeasparser.AugeasCommentNode(comment="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
find_coms_primary = mock.MagicMock(return_value=notpassing1)
|
||||
find_coms_secondary = mock.MagicMock(return_value=notpassing2)
|
||||
self.block.primary.find_comments = find_coms_primary
|
||||
self.block.secondary.find_comments = find_coms_secondary
|
||||
|
||||
comments = self.block.find_comments("anything")
|
||||
for comment in comments:
|
||||
self.assertEqual(comment.primary, comment.secondary)
|
||||
self.assertTrue(comment.primary is not comment.secondary)
|
||||
|
||||
def test_find_blocks_no_pass_notequal(self):
|
||||
notpassing1 = [augeasparser.AugeasBlockNode(name="notpassing",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
notpassing2 = [augeasparser.AugeasBlockNode(name="different",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)]
|
||||
find_blocks_primary = mock.MagicMock(return_value=notpassing1)
|
||||
find_blocks_secondary = mock.MagicMock(return_value=notpassing2)
|
||||
self.block.primary.find_blocks = find_blocks_primary
|
||||
self.block.secondary.find_blocks = find_blocks_secondary
|
||||
|
||||
with self.assertRaises(AssertionError):
|
||||
_ = self.block.find_blocks("anything")
|
||||
|
||||
def test_parsernode_notequal(self):
|
||||
ne_block = augeasparser.AugeasBlockNode(name="different",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)
|
||||
ne_directive = augeasparser.AugeasDirectiveNode(name="different",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)
|
||||
ne_comment = augeasparser.AugeasCommentNode(comment="different",
|
||||
ancestor=self.block,
|
||||
filepath="/path/to/whatever",
|
||||
metadata=self.metadata)
|
||||
self.assertFalse(self.block == ne_block)
|
||||
self.assertFalse(self.directive == ne_directive)
|
||||
self.assertFalse(self.comment == ne_comment)
|
||||
|
||||
def test_parsed_paths(self):
|
||||
mock_p = mock.MagicMock(return_value=['/path/file.conf',
|
||||
'/another/path',
|
||||
'/path/other.conf'])
|
||||
mock_s = mock.MagicMock(return_value=['/path/*.conf', '/another/path'])
|
||||
self.block.primary.parsed_paths = mock_p
|
||||
self.block.secondary.parsed_paths = mock_s
|
||||
self.block.parsed_paths()
|
||||
self.assertTrue(mock_p.called)
|
||||
self.assertTrue(mock_s.called)
|
||||
|
||||
def test_parsed_paths_error(self):
|
||||
mock_p = mock.MagicMock(return_value=['/path/file.conf'])
|
||||
mock_s = mock.MagicMock(return_value=['/path/*.conf', '/another/path'])
|
||||
self.block.primary.parsed_paths = mock_p
|
||||
self.block.secondary.parsed_paths = mock_s
|
||||
with self.assertRaises(AssertionError):
|
||||
self.block.parsed_paths()
|
||||
|
||||
def test_find_ancestors(self):
|
||||
primarymock = mock.MagicMock(return_value=[])
|
||||
secondarymock = mock.MagicMock(return_value=[])
|
||||
self.block.primary.find_ancestors = primarymock
|
||||
self.block.secondary.find_ancestors = secondarymock
|
||||
self.block.find_ancestors("anything")
|
||||
self.assertTrue(primarymock.called)
|
||||
self.assertTrue(secondarymock.called)
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Test for certbot_apache._internal.entrypoint for override class resolution"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot_apache._internal import configurator
|
||||
from certbot_apache._internal import entrypoint
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Test for certbot_apache._internal.configurator for Fedora 29+ overrides"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
@@ -100,7 +103,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
def test_get_parser(self):
|
||||
self.assertIsInstance(self.config.parser, override_fedora.FedoraParser)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
define_val = (
|
||||
'Define: TEST1\n'
|
||||
@@ -120,7 +123,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
return mod_val
|
||||
return ""
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
self.config.parser.modules = {}
|
||||
self.config.parser.variables = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
@@ -131,7 +134,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
self.assertEqual(mock_get.call_count, 3)
|
||||
self.assertEqual(len(self.config.parser.modules), 4)
|
||||
self.assertEqual(len(self.config.parser.variables), 2)
|
||||
self.assertTrue("TEST2" in self.config.parser.variables.keys())
|
||||
self.assertTrue("TEST2" in self.config.parser.variables)
|
||||
self.assertTrue("mod_another.c" in self.config.parser.modules)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
@@ -155,7 +158,7 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
raise Exception("Missed: %s" % vhost) # pragma: no cover
|
||||
self.assertEqual(found, 2)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_get_sysconfig_vars(self, mock_cfg):
|
||||
"""Make sure we read the sysconfig OPTIONS variable correctly"""
|
||||
# Return nothing for the process calls
|
||||
@@ -169,11 +172,11 @@ class MultipleVhostsTestFedora(util.ApacheTest):
|
||||
mock_osi.return_value = ("fedora", "29")
|
||||
self.config.parser.update_runtime_variables()
|
||||
|
||||
self.assertTrue("mock_define" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_value" in self.config.parser.variables.keys())
|
||||
self.assertTrue("mock_define" in self.config.parser.variables)
|
||||
self.assertTrue("mock_define_too" in self.config.parser.variables)
|
||||
self.assertTrue("mock_value" in self.config.parser.variables)
|
||||
self.assertEqual("TRUE", self.config.parser.variables["mock_value"])
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables.keys())
|
||||
self.assertTrue("MOCK_NOSEP" in self.config.parser.variables)
|
||||
self.assertEqual("NOSEP_VAL", self.config.parser.variables["NOSEP_TWO"])
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.util.run_script")
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
"""Test for certbot_apache._internal.configurator for Gentoo overrides"""
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
@@ -21,19 +24,19 @@ def get_vh_truth(temp_dir, config_name):
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "gentoo.example.com.conf"),
|
||||
os.path.join(aug_pre, "gentoo.example.com.conf/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "gentoo.example.com"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "00_default_vhost.conf"),
|
||||
os.path.join(aug_pre, "00_default_vhost.conf/IfDefine/VirtualHost"),
|
||||
set([obj.Addr.fromstring("*:80")]),
|
||||
{obj.Addr.fromstring("*:80")},
|
||||
False, True, "localhost"),
|
||||
obj.VirtualHost(
|
||||
os.path.join(prefix, "00_default_ssl_vhost.conf"),
|
||||
os.path.join(aug_pre,
|
||||
"00_default_ssl_vhost.conf" +
|
||||
"/IfDefine/IfDefine/IfModule/VirtualHost"),
|
||||
set([obj.Addr.fromstring("_default_:443")]),
|
||||
{obj.Addr.fromstring("_default_:443")},
|
||||
True, True, "localhost")
|
||||
]
|
||||
return vh_truth
|
||||
@@ -88,9 +91,9 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
with mock.patch("certbot_apache._internal.override_gentoo.GentooParser.update_modules"):
|
||||
self.config.parser.update_runtime_variables()
|
||||
for define in defines:
|
||||
self.assertTrue(define in self.config.parser.variables.keys())
|
||||
self.assertTrue(define in self.config.parser.variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.parse_from_subprocess")
|
||||
@mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess")
|
||||
def test_no_binary_configdump(self, mock_subprocess):
|
||||
"""Make sure we don't call binary dumps other than modules from Apache
|
||||
as this is not supported in Gentoo currently"""
|
||||
@@ -104,7 +107,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
self.config.parser.reset_modules()
|
||||
self.assertTrue(mock_subprocess.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_opportunistic_httpd_runtime_parsing(self, mock_get):
|
||||
mod_val = (
|
||||
'Loaded Modules:\n'
|
||||
@@ -117,7 +120,7 @@ class MultipleVhostsTestGentoo(util.ApacheTest):
|
||||
return mod_val
|
||||
return None # pragma: no cover
|
||||
mock_get.side_effect = mock_get_cfg
|
||||
self.config.parser.modules = set()
|
||||
self.config.parser.modules = {}
|
||||
|
||||
with mock.patch("certbot.util.get_os_info") as mock_osi:
|
||||
# Make sure we have the have the Gentoo httpd constants
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
"""Test for certbot_apache._internal.http_01."""
|
||||
import unittest
|
||||
import errno
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from acme import challenges
|
||||
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
|
||||
from certbot import achallenges
|
||||
from certbot import errors
|
||||
from certbot.compat import filesystem
|
||||
@@ -40,8 +43,8 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
|
||||
modules = ["ssl", "rewrite", "authz_core", "authz_host"]
|
||||
for mod in modules:
|
||||
self.config.parser.modules.add("mod_{0}.c".format(mod))
|
||||
self.config.parser.modules.add(mod + "_module")
|
||||
self.config.parser.modules["mod_{0}.c".format(mod)] = None
|
||||
self.config.parser.modules[mod + "_module"] = None
|
||||
|
||||
from certbot_apache._internal.http_01 import ApacheHttp01
|
||||
self.http = ApacheHttp01(self.config)
|
||||
@@ -52,24 +55,24 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
|
||||
def test_enable_modules_apache_2_2(self, mock_enmod):
|
||||
self.config.version = (2, 2)
|
||||
self.config.parser.modules.remove("authz_host_module")
|
||||
self.config.parser.modules.remove("mod_authz_host.c")
|
||||
del self.config.parser.modules["authz_host_module"]
|
||||
del self.config.parser.modules["mod_authz_host.c"]
|
||||
|
||||
enmod_calls = self.common_enable_modules_test(mock_enmod)
|
||||
self.assertEqual(enmod_calls[0][0][0], "authz_host")
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
|
||||
def test_enable_modules_apache_2_4(self, mock_enmod):
|
||||
self.config.parser.modules.remove("authz_core_module")
|
||||
self.config.parser.modules.remove("mod_authz_core.c")
|
||||
del self.config.parser.modules["authz_core_module"]
|
||||
del self.config.parser.modules["mod_authz_host.c"]
|
||||
|
||||
enmod_calls = self.common_enable_modules_test(mock_enmod)
|
||||
self.assertEqual(enmod_calls[0][0][0], "authz_core")
|
||||
|
||||
def common_enable_modules_test(self, mock_enmod):
|
||||
"""Tests enabling mod_rewrite and other modules."""
|
||||
self.config.parser.modules.remove("rewrite_module")
|
||||
self.config.parser.modules.remove("mod_rewrite.c")
|
||||
del self.config.parser.modules["rewrite_module"]
|
||||
del self.config.parser.modules["mod_rewrite.c"]
|
||||
|
||||
self.http.prepare_http01_modules()
|
||||
|
||||
@@ -197,6 +200,12 @@ class ApacheHttp01Test(util.ApacheTest):
|
||||
|
||||
self.assertTrue(os.path.exists(challenge_dir))
|
||||
|
||||
@mock.patch("certbot_apache._internal.http_01.filesystem.makedirs")
|
||||
def test_failed_makedirs(self, mock_makedirs):
|
||||
mock_makedirs.side_effect = OSError(errno.EACCES, "msg")
|
||||
self.http.add_chall(self.achalls[0])
|
||||
self.assertRaises(errors.PluginError, self.http.perform)
|
||||
|
||||
def _test_challenge_conf(self):
|
||||
with open(self.http.challenge_conf_pre) as f:
|
||||
pre_conf_contents = f.read()
|
||||
|
||||
@@ -14,13 +14,13 @@ class VirtualHostTest(unittest.TestCase):
|
||||
self.addr_default = Addr.fromstring("_default_:443")
|
||||
|
||||
self.vhost1 = VirtualHost(
|
||||
"filep", "vh_path", set([self.addr1]), False, False, "localhost")
|
||||
"filep", "vh_path", {self.addr1}, False, False, "localhost")
|
||||
|
||||
self.vhost1b = VirtualHost(
|
||||
"filep", "vh_path", set([self.addr1]), False, False, "localhost")
|
||||
"filep", "vh_path", {self.addr1}, False, False, "localhost")
|
||||
|
||||
self.vhost2 = VirtualHost(
|
||||
"fp", "vhp", set([self.addr2]), False, False, "localhost")
|
||||
"fp", "vhp", {self.addr2}, False, False, "localhost")
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual(repr(self.addr2),
|
||||
@@ -42,7 +42,7 @@ class VirtualHostTest(unittest.TestCase):
|
||||
|
||||
complex_vh = VirtualHost(
|
||||
"fp", "vhp",
|
||||
set([Addr.fromstring("*:443"), Addr.fromstring("1.2.3.4:443")]),
|
||||
{Addr.fromstring("*:443"), Addr.fromstring("1.2.3.4:443")},
|
||||
False, False)
|
||||
self.assertTrue(complex_vh.conflicts([self.addr1]))
|
||||
self.assertTrue(complex_vh.conflicts([self.addr2]))
|
||||
@@ -57,14 +57,14 @@ class VirtualHostTest(unittest.TestCase):
|
||||
def test_same_server(self):
|
||||
from certbot_apache._internal.obj import VirtualHost
|
||||
no_name1 = VirtualHost(
|
||||
"fp", "vhp", set([self.addr1]), False, False, None)
|
||||
"fp", "vhp", {self.addr1}, False, False, None)
|
||||
no_name2 = VirtualHost(
|
||||
"fp", "vhp", set([self.addr2]), False, False, None)
|
||||
"fp", "vhp", {self.addr2}, False, False, None)
|
||||
no_name3 = VirtualHost(
|
||||
"fp", "vhp", set([self.addr_default]),
|
||||
"fp", "vhp", {self.addr_default},
|
||||
False, False, None)
|
||||
no_name4 = VirtualHost(
|
||||
"fp", "vhp", set([self.addr2, self.addr_default]),
|
||||
"fp", "vhp", {self.addr2, self.addr_default},
|
||||
False, False, None)
|
||||
|
||||
self.assertTrue(self.vhost1.same_server(self.vhost2))
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
import shutil
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
from certbot import errors
|
||||
from certbot.compat import os
|
||||
@@ -114,7 +117,7 @@ class BasicParserTest(util.ParserTest):
|
||||
"""
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
# This makes sure that find_dir will work
|
||||
self.parser.modules.add("mod_ssl.c")
|
||||
self.parser.modules["mod_ssl.c"] = "/fake/path"
|
||||
|
||||
self.parser.add_dir_to_ifmodssl(
|
||||
get_aug_path(self.parser.loc["default"]),
|
||||
@@ -128,7 +131,7 @@ class BasicParserTest(util.ParserTest):
|
||||
def test_add_dir_to_ifmodssl_multiple(self):
|
||||
from certbot_apache._internal.parser import get_aug_path
|
||||
# This makes sure that find_dir will work
|
||||
self.parser.modules.add("mod_ssl.c")
|
||||
self.parser.modules["mod_ssl.c"] = "/fake/path"
|
||||
|
||||
self.parser.add_dir_to_ifmodssl(
|
||||
get_aug_path(self.parser.loc["default"]),
|
||||
@@ -165,7 +168,7 @@ class BasicParserTest(util.ParserTest):
|
||||
self.assertTrue(mock_logger.debug.called)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.find_dir")
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_update_runtime_variables(self, mock_cfg, _):
|
||||
define_val = (
|
||||
'ServerRoot: "/etc/apache2"\n'
|
||||
@@ -260,7 +263,7 @@ class BasicParserTest(util.ParserTest):
|
||||
expected_vars = {"TEST": "", "U_MICH": "", "TLS": "443",
|
||||
"example_path": "Documents/path"}
|
||||
|
||||
self.parser.modules = set()
|
||||
self.parser.modules = {}
|
||||
with mock.patch(
|
||||
"certbot_apache._internal.parser.ApacheParser.parse_file") as mock_parse:
|
||||
self.parser.update_runtime_variables()
|
||||
@@ -271,7 +274,7 @@ class BasicParserTest(util.ParserTest):
|
||||
self.assertEqual(mock_parse.call_count, 25)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser.find_dir")
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_update_runtime_variables_alt_values(self, mock_cfg, _):
|
||||
inc_val = (
|
||||
'Included configuration files:\n'
|
||||
@@ -282,7 +285,7 @@ class BasicParserTest(util.ParserTest):
|
||||
os.path.dirname(self.parser.loc["root"]))
|
||||
|
||||
mock_cfg.return_value = inc_val
|
||||
self.parser.modules = set()
|
||||
self.parser.modules = {}
|
||||
|
||||
with mock.patch(
|
||||
"certbot_apache._internal.parser.ApacheParser.parse_file") as mock_parse:
|
||||
@@ -293,7 +296,7 @@ class BasicParserTest(util.ParserTest):
|
||||
# path derived from root configuration Include statements
|
||||
self.assertEqual(mock_parse.call_count, 1)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_update_runtime_vars_bad_output(self, mock_cfg):
|
||||
mock_cfg.return_value = "Define: TLS=443=24"
|
||||
self.parser.update_runtime_variables()
|
||||
@@ -303,7 +306,7 @@ class BasicParserTest(util.ParserTest):
|
||||
errors.PluginError, self.parser.update_runtime_variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.option")
|
||||
@mock.patch("certbot_apache._internal.parser.subprocess.Popen")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_update_runtime_vars_bad_ctl(self, mock_popen, mock_opt):
|
||||
mock_popen.side_effect = OSError
|
||||
mock_opt.return_value = "nonexistent"
|
||||
@@ -311,7 +314,7 @@ class BasicParserTest(util.ParserTest):
|
||||
errors.MisconfigurationError,
|
||||
self.parser.update_runtime_variables)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.subprocess.Popen")
|
||||
@mock.patch("certbot_apache._internal.apache_util.subprocess.Popen")
|
||||
def test_update_runtime_vars_bad_exit(self, mock_popen):
|
||||
mock_popen().communicate.return_value = ("", "")
|
||||
mock_popen.returncode = -1
|
||||
@@ -355,7 +358,7 @@ class ParserInitTest(util.ApacheTest):
|
||||
ApacheParser, os.path.relpath(self.config_path),
|
||||
"/dummy/vhostpath", version=(2, 4, 22), configurator=self.config)
|
||||
|
||||
@mock.patch("certbot_apache._internal.parser.ApacheParser._get_runtime_cfg")
|
||||
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
|
||||
def test_unparseable(self, mock_cfg):
|
||||
from certbot_apache._internal.parser import ApacheParser
|
||||
mock_cfg.return_value = ('Define: TEST')
|
||||
|
||||
47
certbot-apache/tests/parsernode_configurator_test.py
Normal file
47
certbot-apache/tests/parsernode_configurator_test.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Tests for ApacheConfigurator for AugeasParserNode classes"""
|
||||
import unittest
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError: # pragma: no cover
|
||||
from unittest import mock # type: ignore
|
||||
|
||||
import util
|
||||
|
||||
try:
|
||||
import apacheconfig
|
||||
HAS_APACHECONFIG = True
|
||||
except ImportError: # pragma: no cover
|
||||
HAS_APACHECONFIG = False
|
||||
|
||||
|
||||
@unittest.skipIf(not HAS_APACHECONFIG, reason='Tests require apacheconfig dependency')
|
||||
class ConfiguratorParserNodeTest(util.ApacheTest): # pylint: disable=too-many-public-methods
|
||||
"""Test AugeasParserNode using available test configurations"""
|
||||
|
||||
def setUp(self): # pylint: disable=arguments-differ
|
||||
super(ConfiguratorParserNodeTest, self).setUp()
|
||||
|
||||
self.config = util.get_apache_configurator(
|
||||
self.config_path, self.vhost_path, self.config_dir,
|
||||
self.work_dir, use_parsernode=True)
|
||||
self.vh_truth = util.get_vh_truth(
|
||||
self.temp_dir, "debian_apache_2_4/multiple_vhosts")
|
||||
|
||||
def test_parsernode_get_vhosts(self):
|
||||
self.config.USE_PARSERNODE = True
|
||||
vhosts = self.config.get_virtual_hosts()
|
||||
# Legacy get_virtual_hosts() do not set the node
|
||||
self.assertTrue(vhosts[0].node is not None)
|
||||
|
||||
def test_parsernode_get_vhosts_mismatch(self):
|
||||
vhosts = self.config.get_virtual_hosts_v2()
|
||||
# One of the returned VirtualHost objects differs
|
||||
vhosts[0].name = "IdidntExpectThat"
|
||||
self.config.get_virtual_hosts_v2 = mock.MagicMock(return_value=vhosts)
|
||||
with self.assertRaises(AssertionError):
|
||||
_ = self.config.get_virtual_hosts()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
128
certbot-apache/tests/parsernode_test.py
Normal file
128
certbot-apache/tests/parsernode_test.py
Normal file
@@ -0,0 +1,128 @@
|
||||
""" Tests for ParserNode interface """
|
||||
|
||||
import unittest
|
||||
|
||||
from certbot_apache._internal import interfaces
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
|
||||
class DummyParserNode(interfaces.ParserNode):
|
||||
""" A dummy class implementing ParserNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the ParserNode instance.
|
||||
"""
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(kwargs)
|
||||
self.ancestor = ancestor
|
||||
self.dirty = dirty
|
||||
self.filepath = filepath
|
||||
self.metadata = metadata
|
||||
super(DummyParserNode, self).__init__(**kwargs)
|
||||
|
||||
def save(self, msg): # pragma: no cover
|
||||
"""Save"""
|
||||
pass
|
||||
|
||||
def find_ancestors(self, name): # pragma: no cover
|
||||
""" Find ancestors """
|
||||
return []
|
||||
|
||||
|
||||
class DummyCommentNode(DummyParserNode):
|
||||
""" A dummy class implementing CommentNode interface """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the CommentNode instance and sets its instance variables.
|
||||
"""
|
||||
comment, kwargs = util.commentnode_kwargs(kwargs)
|
||||
self.comment = comment
|
||||
super(DummyCommentNode, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class DummyDirectiveNode(DummyParserNode):
|
||||
""" A dummy class implementing DirectiveNode interface """
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
Initializes the DirectiveNode instance and sets its instance variables.
|
||||
"""
|
||||
name, parameters, enabled, kwargs = util.directivenode_kwargs(kwargs)
|
||||
self.name = name
|
||||
self.parameters = parameters
|
||||
self.enabled = enabled
|
||||
|
||||
super(DummyDirectiveNode, self).__init__(**kwargs)
|
||||
|
||||
def set_parameters(self, parameters): # pragma: no cover
|
||||
"""Set parameters"""
|
||||
pass
|
||||
|
||||
|
||||
class DummyBlockNode(DummyDirectiveNode):
|
||||
""" A dummy class implementing BlockNode interface """
|
||||
|
||||
def add_child_block(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Add child block"""
|
||||
pass
|
||||
|
||||
def add_child_directive(self, name, parameters=None, position=None): # pragma: no cover
|
||||
"""Add child directive"""
|
||||
pass
|
||||
|
||||
def add_child_comment(self, comment="", position=None): # pragma: no cover
|
||||
"""Add child comment"""
|
||||
pass
|
||||
|
||||
def find_blocks(self, name, exclude=True): # pragma: no cover
|
||||
"""Find blocks"""
|
||||
pass
|
||||
|
||||
def find_directives(self, name, exclude=True): # pragma: no cover
|
||||
"""Find directives"""
|
||||
pass
|
||||
|
||||
def find_comments(self, comment, exact=False): # pragma: no cover
|
||||
"""Find comments"""
|
||||
pass
|
||||
|
||||
def delete_child(self, child): # pragma: no cover
|
||||
"""Delete child"""
|
||||
pass
|
||||
|
||||
def unsaved_files(self): # pragma: no cover
|
||||
"""Unsaved files"""
|
||||
pass
|
||||
|
||||
|
||||
interfaces.CommentNode.register(DummyCommentNode)
|
||||
interfaces.DirectiveNode.register(DummyDirectiveNode)
|
||||
interfaces.BlockNode.register(DummyBlockNode)
|
||||
|
||||
class ParserNodeTest(unittest.TestCase):
|
||||
"""Dummy placeholder test case for ParserNode interfaces"""
|
||||
|
||||
def test_dummy(self):
|
||||
dummyblock = DummyBlockNode(
|
||||
name="None",
|
||||
parameters=(),
|
||||
ancestor=None,
|
||||
dirty=False,
|
||||
filepath="/some/random/path"
|
||||
)
|
||||
dummydirective = DummyDirectiveNode(
|
||||
name="Name",
|
||||
ancestor=None,
|
||||
filepath="/another/path"
|
||||
)
|
||||
dummycomment = DummyCommentNode(
|
||||
comment="Comment",
|
||||
ancestor=dummyblock,
|
||||
filepath="/some/file"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
115
certbot-apache/tests/parsernode_util_test.py
Normal file
115
certbot-apache/tests/parsernode_util_test.py
Normal file
@@ -0,0 +1,115 @@
|
||||
""" Tests for ParserNode utils """
|
||||
import unittest
|
||||
|
||||
from certbot_apache._internal import parsernode_util as util
|
||||
|
||||
|
||||
class ParserNodeUtilTest(unittest.TestCase):
|
||||
"""Tests for ParserNode utils"""
|
||||
|
||||
def _setup_parsernode(self):
|
||||
""" Sets up kwargs dict for ParserNode """
|
||||
return {
|
||||
"ancestor": None,
|
||||
"dirty": False,
|
||||
"filepath": "/tmp",
|
||||
}
|
||||
|
||||
def _setup_commentnode(self):
|
||||
""" Sets up kwargs dict for CommentNode """
|
||||
|
||||
pn = self._setup_parsernode()
|
||||
pn["comment"] = "x"
|
||||
return pn
|
||||
|
||||
def _setup_directivenode(self):
|
||||
""" Sets up kwargs dict for DirectiveNode """
|
||||
|
||||
pn = self._setup_parsernode()
|
||||
pn["name"] = "Name"
|
||||
pn["parameters"] = ("first",)
|
||||
pn["enabled"] = True
|
||||
return pn
|
||||
|
||||
def test_unknown_parameter(self):
|
||||
params = self._setup_parsernode()
|
||||
params["unknown"] = "unknown"
|
||||
self.assertRaises(TypeError, util.parsernode_kwargs, params)
|
||||
|
||||
params = self._setup_commentnode()
|
||||
params["unknown"] = "unknown"
|
||||
self.assertRaises(TypeError, util.commentnode_kwargs, params)
|
||||
|
||||
params = self._setup_directivenode()
|
||||
params["unknown"] = "unknown"
|
||||
self.assertRaises(TypeError, util.directivenode_kwargs, params)
|
||||
|
||||
def test_parsernode(self):
|
||||
params = self._setup_parsernode()
|
||||
ctrl = self._setup_parsernode()
|
||||
|
||||
ancestor, dirty, filepath, metadata = util.parsernode_kwargs(params)
|
||||
self.assertEqual(ancestor, ctrl["ancestor"])
|
||||
self.assertEqual(dirty, ctrl["dirty"])
|
||||
self.assertEqual(filepath, ctrl["filepath"])
|
||||
self.assertEqual(metadata, {})
|
||||
|
||||
def test_parsernode_from_metadata(self):
|
||||
params = self._setup_parsernode()
|
||||
params.pop("filepath")
|
||||
md = {"some": "value"}
|
||||
params["metadata"] = md
|
||||
|
||||
# Just testing that error from missing required parameters is not raised
|
||||
_, _, _, metadata = util.parsernode_kwargs(params)
|
||||
self.assertEqual(metadata, md)
|
||||
|
||||
def test_commentnode(self):
|
||||
params = self._setup_commentnode()
|
||||
ctrl = self._setup_commentnode()
|
||||
|
||||
comment, _ = util.commentnode_kwargs(params)
|
||||
self.assertEqual(comment, ctrl["comment"])
|
||||
|
||||
def test_commentnode_from_metadata(self):
|
||||
params = self._setup_commentnode()
|
||||
params.pop("comment")
|
||||
params["metadata"] = {}
|
||||
|
||||
# Just testing that error from missing required parameters is not raised
|
||||
util.commentnode_kwargs(params)
|
||||
|
||||
def test_directivenode(self):
|
||||
params = self._setup_directivenode()
|
||||
ctrl = self._setup_directivenode()
|
||||
|
||||
name, parameters, enabled, _ = util.directivenode_kwargs(params)
|
||||
self.assertEqual(name, ctrl["name"])
|
||||
self.assertEqual(parameters, ctrl["parameters"])
|
||||
self.assertEqual(enabled, ctrl["enabled"])
|
||||
|
||||
def test_directivenode_from_metadata(self):
|
||||
params = self._setup_directivenode()
|
||||
params.pop("filepath")
|
||||
params.pop("name")
|
||||
params["metadata"] = {"irrelevant": "value"}
|
||||
|
||||
# Just testing that error from missing required parameters is not raised
|
||||
util.directivenode_kwargs(params)
|
||||
|
||||
def test_missing_required(self):
|
||||
c_params = self._setup_commentnode()
|
||||
c_params.pop("comment")
|
||||
self.assertRaises(TypeError, util.commentnode_kwargs, c_params)
|
||||
|
||||
d_params = self._setup_directivenode()
|
||||
d_params.pop("ancestor")
|
||||
self.assertRaises(TypeError, util.directivenode_kwargs, d_params)
|
||||
|
||||
p_params = self._setup_parsernode()
|
||||
p_params.pop("filepath")
|
||||
self.assertRaises(TypeError, util.parsernode_kwargs, p_params)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main() # pragma: no cover
|
||||
@@ -26,7 +26,7 @@ Listen 443
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# The filtering dialog program (`builtin' is an internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog builtin
|
||||
|
||||
|
||||
@@ -702,7 +702,7 @@ IndexIgnore .??* *~ *# HEADER* README* RCS CVS *,v *,t
|
||||
# English (en) - Esperanto (eo) - Estonian (et) - French (fr) - German (de)
|
||||
# Greek-Modern (el) - Hebrew (he) - Italian (it) - Japanese (ja)
|
||||
# Korean (ko) - Luxembourgeois* (ltz) - Norwegian Nynorsk (nn)
|
||||
# Norwegian (no) - Polish (pl) - Portugese (pt)
|
||||
# Norwegian (no) - Polish (pl) - Portuguese (pt)
|
||||
# Brazilian Portuguese (pt-BR) - Russian (ru) - Swedish (sv)
|
||||
# Simplified Chinese (zh-CN) - Spanish (es) - Traditional Chinese (zh-TW)
|
||||
#
|
||||
|
||||
@@ -13,7 +13,7 @@ Listen 443 https
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# The filtering dialog program (`builtin' is an internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# The filtering dialog program (`builtin' is an internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# The filtering dialog program (`builtin' is an internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
|
||||
# Pass Phrase Dialog:
|
||||
# Configure the pass phrase gathering process.
|
||||
# The filtering dialog program (`builtin' is a internal
|
||||
# The filtering dialog program (`builtin' is an internal
|
||||
# terminal dialog) has to provide the pass phrase on stdout.
|
||||
SSLPassPhraseDialog exec:/usr/share/apache2/ask-for-passphrase
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user